gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package dev.android.example.jobland.others;
import android.os.Handler;
import android.os.Message;
import android.view.View;
import android.view.ViewGroup;
import android.view.inputmethod.InputMethodManager;
import android.widget.EditText;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* Created by quyen on 10/24/2017.
*/
public class SoftKeyboard implements View.OnFocusChangeListener
{
private static final int CLEAR_FOCUS = 0;
private ViewGroup layout;
private int layoutBottom;
private InputMethodManager im;
private int[] coords;
private boolean isKeyboardShow;
private SoftKeyboardChangesThread softKeyboardThread;
private List<EditText> editTextList;
private View tempView; // reference to a focused EditText
public SoftKeyboard(ViewGroup layout, InputMethodManager im)
{
this.layout = layout;
keyboardHideByDefault();
initEditTexts(layout);
this.im = im;
this.coords = new int[2];
this.isKeyboardShow = false;
this.softKeyboardThread = new SoftKeyboardChangesThread();
this.softKeyboardThread.start();
}
public void openSoftKeyboard()
{
if(!isKeyboardShow)
{
layoutBottom = getLayoutCoordinates();
im.toggleSoftInput(0, InputMethodManager.SHOW_IMPLICIT);
softKeyboardThread.keyboardOpened();
isKeyboardShow = true;
}
}
public void closeSoftKeyboard()
{
if(isKeyboardShow)
{
im.toggleSoftInput(InputMethodManager.HIDE_IMPLICIT_ONLY, 0);
isKeyboardShow = false;
}
}
public void setSoftKeyboardCallback(SoftKeyboardChanged mCallback)
{
softKeyboardThread.setCallback(mCallback);
}
public void unRegisterSoftKeyboardCallback()
{
softKeyboardThread.stopThread();
}
public interface SoftKeyboardChanged
{
public void onSoftKeyboardHide();
public void onSoftKeyboardShow();
}
private int getLayoutCoordinates()
{
layout.getLocationOnScreen(coords);
return coords[1] + layout.getHeight();
}
private void keyboardHideByDefault()
{
layout.setFocusable(true);
layout.setFocusableInTouchMode(true);
}
/*
* InitEditTexts now handles EditTexts in nested views
* Thanks to Francesco Verheye (verheye.francesco@gmail.com)
*/
private void initEditTexts(ViewGroup viewgroup)
{
if(editTextList == null)
editTextList = new ArrayList<EditText>();
int childCount = viewgroup.getChildCount();
for(int i=0; i<= childCount-1;i++)
{
View v = viewgroup.getChildAt(i);
if(v instanceof ViewGroup)
{
initEditTexts((ViewGroup) v);
}
if(v instanceof EditText)
{
EditText editText = (EditText) v;
editText.setOnFocusChangeListener(this);
editText.setCursorVisible(true);
editTextList.add(editText);
}
}
}
/*
* OnFocusChange does update tempView correctly now when keyboard is still shown
* Thanks to Israel Dominguez (dominguez.israel@gmail.com)
*/
@Override
public void onFocusChange(View v, boolean hasFocus)
{
if(hasFocus)
{
tempView = v;
if(!isKeyboardShow)
{
layoutBottom = getLayoutCoordinates();
softKeyboardThread.keyboardOpened();
isKeyboardShow = true;
}
}
}
// This handler will clear focus of selected EditText
private final Handler mHandler = new Handler()
{
@Override
public void handleMessage(Message m)
{
switch(m.what)
{
case CLEAR_FOCUS:
if(tempView != null)
{
tempView.clearFocus();
tempView = null;
}
break;
}
}
};
private class SoftKeyboardChangesThread extends Thread
{
private AtomicBoolean started;
private SoftKeyboardChanged mCallback;
public SoftKeyboardChangesThread()
{
started = new AtomicBoolean(true);
}
public void setCallback(SoftKeyboardChanged mCallback)
{
this.mCallback = mCallback;
}
@Override
public void run()
{
while(started.get())
{
// Wait until keyboard is requested to open
synchronized(this)
{
try
{
wait();
} catch (InterruptedException e)
{
e.printStackTrace();
}
}
int currentBottomLocation = getLayoutCoordinates();
// There is some lag between open soft-keyboard function and when it really appears.
while(currentBottomLocation == layoutBottom && started.get())
{
currentBottomLocation = getLayoutCoordinates();
}
if(started.get())
mCallback.onSoftKeyboardShow();
// When keyboard is opened from EditText, initial bottom location is greater than layoutBottom
// and at some moment equals layoutBottom.
// That broke the previous logic, so I added this new loop to handle this.
while(currentBottomLocation >= layoutBottom && started.get())
{
currentBottomLocation = getLayoutCoordinates();
}
// Now Keyboard is shown, keep checking layout dimensions until keyboard is gone
while(currentBottomLocation != layoutBottom && started.get())
{
synchronized(this)
{
try
{
wait(500);
} catch (InterruptedException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
currentBottomLocation = getLayoutCoordinates();
}
if(started.get())
mCallback.onSoftKeyboardHide();
// if keyboard has been opened clicking and EditText.
if(isKeyboardShow && started.get())
isKeyboardShow = false;
// if an EditText is focused, remove its focus (on UI thread)
if(started.get())
mHandler.obtainMessage(CLEAR_FOCUS).sendToTarget();
}
}
public void keyboardOpened()
{
synchronized(this)
{
notify();
}
}
public void stopThread()
{
synchronized(this)
{
started.set(false);
notify();
}
}
}
}
| |
package com.medievallords.carbyne.utils;
import com.medievallords.carbyne.Carbyne;
import com.medievallords.carbyne.gear.types.carbyne.CarbyneWeapon;
import com.medievallords.carbyne.listeners.CombatTagListeners;
import com.medievallords.carbyne.profiles.Profile;
import com.medievallords.carbyne.squads.Squad;
import com.medievallords.carbyne.squads.SquadType;
import com.medievallords.carbyne.staff.StaffManager;
import com.medievallords.carbyne.utils.scoreboard.Board;
import com.medievallords.carbyne.utils.scoreboard.BoardCooldown;
import com.medievallords.carbyne.utils.scoreboard.BoardFormat;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.entity.Player;
import org.bukkit.inventory.ItemStack;
import org.bukkit.scheduler.BukkitRunnable;
import java.text.DecimalFormat;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
/**
* Created by Calvin on 3/13/2017
* <p>
* for the Carbyne project.
*/
public class CarbyneBoardAdapter {
//private Carbyne main;
private String title = "&b&lMedieval Lords";
//private ProfileManager profileManager;
//private SquadManager squadManager;
private ColorScrollPlus colorScrollPlus;
public CarbyneBoardAdapter() {
this.colorScrollPlus = new ColorScrollPlus(ChatColor.AQUA, "Medieval Lords", "&f", "&b", "&f", false, false, ColorScrollPlus.ScrollType.FORWARD);
new BukkitRunnable() {
@Override
public void run() {
if (colorScrollPlus.getScrollType() == ColorScrollPlus.ScrollType.FORWARD) {
if (colorScrollPlus.getPosition() >= colorScrollPlus.getString().length())
colorScrollPlus.setScrollType(ColorScrollPlus.ScrollType.BACKWARD);
} else if (colorScrollPlus.getPosition() <= -1)
colorScrollPlus.setScrollType(ColorScrollPlus.ScrollType.FORWARD);
setTitle(colorScrollPlus.next());
}
}.runTaskTimerAsynchronously(Carbyne.getInstance(), 0L, 3);
}
public String getTitle(Player player) {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public List<String> getScoreboard(Profile profile, Player player, Board board, Set<BoardCooldown> set) {
ArrayList<String> lines = new ArrayList<>();
Iterator itr = set.iterator();
if (StaticClasses.staffManager.getStaffModePlayers().contains(player.getUniqueId()))
return staffScoreboard(player);
if (player.hasPermission("carbyne.staff.staffmode"))
lines.add("&7Vanished&c: " + StaticClasses.staffManager.isVanished(player));
if (profile.getRemainingPvPTime() > 1) {
lines.add(" ");
lines.add("&dProtection&7: " + formatTime(profile.getRemainingPvPTime()));
}
PlayerHealth playerHealth = PlayerHealth.getPlayerHealth(player.getUniqueId());
lines.add(" ");
lines.add("&aHealth&7: " + (int) playerHealth.getHealth());
lines.add(" ");
lines.add("&aStamina&7: " + playerHealth.getStamina());
if (StaticClasses.squadManager.getSquad(player.getUniqueId()) != null) {
Squad squad = StaticClasses.squadManager.getSquad(player.getUniqueId());
if (squad.getMembers().size() > 0) {
lines.add(" ");
lines.add("&dSquad [&7" + (squad.getType() == SquadType.PUBLIC ? "&7" : "&c") + squad.getType().toString().toLowerCase().substring(0, 1).toUpperCase() + squad.getType().toString().toLowerCase().substring(1) + "&d]:");
for (UUID member : squad.getAllPlayers())
if (!member.equals(player.getUniqueId()))
lines.add(" &7" + (squad.getLeader() == member ? "&l" : "") + (Bukkit.getPlayer(member).getName().length() > 7 ? Bukkit.getPlayer(member).getName().substring(0, 8) : Bukkit.getPlayer(member).getName()) + " " + formatHealth(Bukkit.getPlayer(member).getHealth()));
}
}
if (board.getCooldown("target") == null) {
if (StaticClasses.squadManager.getSquad(player.getUniqueId()) != null) {
Squad squad = StaticClasses.squadManager.getSquad(player.getUniqueId());
if (squad.getTargetUUID() != null)
squad.setTargetUUID(null);
if (squad.getTargetSquad() != null)
squad.setTargetSquad(null);
}
}
ItemStack hand = player.getInventory().getItemInHand();
if (hand != null) {
CarbyneWeapon carbyneWeapon = StaticClasses.gearManager.getCarbyneWeapon(hand);
if (carbyneWeapon != null && carbyneWeapon.getSpecial() != null) {
lines.add(" ");
lines.add("&dCharge&7: " + formatCharge(carbyneWeapon.getSpecialCharge(hand), carbyneWeapon.getSpecial().getRequiredCharge()));
}
}
try {
while (itr.hasNext()) {
BoardCooldown cooldown = (BoardCooldown) itr.next();
if (cooldown.getId().equals("logout")) {
lines.add(" ");
lines.add("&dLogout&7: " + cooldown.getFormattedString(BoardFormat.SECONDS));
}
if (cooldown.getId().equals("target"))
if (StaticClasses.squadManager.getSquad(player.getUniqueId()) != null) {
Squad squad = StaticClasses.squadManager.getSquad(player.getUniqueId());
if (squad.getTargetUUID() != null || squad.getTargetSquad() != null) {
lines.add(" ");
lines.add("&dTarget&7: " + (squad.getTargetSquad() != null ? Bukkit.getPlayer(squad.getTargetSquad().getLeader()).getName() + "'s Squad" : Bukkit.getPlayer(squad.getTargetUUID()).getName()));
}
}
if (cooldown.getId().equals("combattag"))
if (CombatTagListeners.isInCombat(player.getUniqueId())) {
lines.add(" ");
lines.add("&dCombat Timer&7: " + cooldown.getFormattedString(BoardFormat.SECONDS));
}
if (cooldown.getId().equals("potion")) {
lines.add(" ");
lines.add("&dPotion&7: " + cooldown.getFormattedString(BoardFormat.SECONDS));
}
if (cooldown.getId().equals("enderpearl")) {
lines.add(" ");
lines.add("&dEnderpearl&7: " + cooldown.getFormattedString(BoardFormat.SECONDS));
}
if (cooldown.getId().equals("skill")) {
lines.add(" ");
lines.add("&dSkill&7: " + cooldown.getFormattedString(BoardFormat.SECONDS));
}
if (cooldown.getId().equals("special")) {
lines.add(" ");
lines.add("&dSpecial&7: " + cooldown.getFormattedString(BoardFormat.SECONDS));
}
}
} catch (Exception e) {
Carbyne.getInstance().getLogger().log(Level.WARNING, e.getMessage());
}
if (lines.size() >= 1) {
lines.add(0, "&7&m-------------------");
lines.add(" ");
lines.add("&7&owww.playminecraft.org");
}
return lines;
}
private List<String> staffScoreboard(Player player) {
StaffManager staffManager = StaticClasses.staffManager;
ArrayList<String> lines = new ArrayList<>();
lines.add("&7Vanished: &c" + staffManager.isVanished(player));
lines.add(" ");
lines.add("&7Chat Muted: &a" + (staffManager.isChatMuted() ? "&a" + staffManager.isChatMuted() : "&c" + staffManager.isChatMuted()));
lines.add("&7Chat Speed: &a" + staffManager.getSlowChatTime() + "s");
lines.add(" ");
lines.add("&7Flying: " + (player.isFlying() ? "&atrue" : "&cfalse"));
if (lines.size() >= 1) {
lines.add(0, "&7&m-------------------");
lines.add(" ");
lines.add("&7&owww.playminecraft.org");
}
return lines;
}
String formatCharge(int charge, int required) {
double part = required / 10;
double at = part;
StringBuilder s = new StringBuilder();
while (at <= charge) {
s.append("\u2758");
at += part;
}
int length = 10 - s.length();
if (length <= 0) {
s.insert(0, "&a");
return s.toString();
} else if (length <= 4)
s.insert(0, "&a");
else if (length <= 7)
s.insert(0, "&e");
else
s.insert(0, "&c");
s.append("&7");
for (int i = 0; i < length; i++)
s.append("\u2758");
return s.toString();
}
String formatHealth(double health) {
double hearts = (health / 2);
DecimalFormat format = new DecimalFormat("#");
if (hearts <= 10 && hearts >= 7.5)
return String.format(" &a%s \u2764", format.format(hearts));
else if (hearts <= 7.5 && hearts >= 5)
return String.format(" &e%s \u2764", format.format(hearts));
else if (hearts <= 5 && hearts >= 2.5)
return String.format(" &6%s \u2764", format.format(hearts));
else
return String.format(" &c%s \u2764", format.format(hearts));
}
String formatTime(long millis) {
return String.format("%02d:%02d",
TimeUnit.MILLISECONDS.toMinutes(millis) % TimeUnit.HOURS.toMinutes(1),
TimeUnit.MILLISECONDS.toSeconds(millis) % TimeUnit.MINUTES.toSeconds(1));
}
}
| |
/*
* Copyright (c) 2012 Joe Rowley
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package com.mobileobservinglog;
import java.util.ArrayList;
import com.mobileobservinglog.R;
import com.mobileobservinglog.support.database.CatalogsDAO;
import com.mobileobservinglog.support.database.DatabaseHelper;
import android.content.Context;
import android.content.Intent;
import android.database.Cursor;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.RelativeLayout;
import android.widget.TextView;
public class CatalogsScreen extends ActivityBase{
//gather resources
RelativeLayout body;
Button searchButton;
TextView nothingHere;
ArrayList<Catalog> catalogList;
int listLocation = -1;
@Override
public void onCreate(Bundle icicle) {
Log.d("JoeDebug", "CatalogsScreen onCreate. Current session mode is " + settingsRef.getSessionMode());
super.onCreate(icicle);
customizeBrightness.setDimButtons(settingsRef.getButtonBrightness());
//setup the layout
setContentView(settingsRef.getCatalogsLayout());
body = (RelativeLayout)findViewById(R.id.catalogs_root);
}
@Override
public void onPause() {
super.onPause();
ListView list = getListView();
listLocation = list.getFirstVisiblePosition();
}
@Override
public void onDestroy() {
super.onDestroy();
}
//When we resume, we need to make sure we have the right layout set, in case the user has changed the session mode.
@Override
public void onResume() {
Log.d("JoeDebug", "CatalogsScreen onResume. Current session mode is " + settingsRef.getSessionMode());
super.onResume();
setLayout();
}
//Used by the Toggle Mode menu item method in ActivityBase. Reset the layout and force the redraw
@Override
public void setLayout(){
setContentView(settingsRef.getCatalogsLayout());
super.setLayout();
findButtonAddListener();
prepareListView();
body.postInvalidate();
if(listLocation > 0) {
ListView list = getListView();
if(list.getCount() > listLocation) {
list.setSelection(listLocation);
}
}
}
private void findButtonAddListener() {
searchButton = (Button)findViewById(R.id.search_button);
searchButton.setOnClickListener(searchCatalogs);
}
private final Button.OnClickListener searchCatalogs = new Button.OnClickListener() {
public void onClick(View view){
Intent intent = new Intent(CatalogsScreen.this.getApplication(), SearchScreen.class);
startActivity(intent);
}
};
private final Button.OnClickListener addCatalogs = new Button.OnClickListener() {
public void onClick(View view){
Intent intent = new Intent(CatalogsScreen.this.getApplication(), AddCatalogsScreen.class);
startActivity(intent);
}
};
/**
* Internal method to handle preparation of the list view upon creation or to be called by setLayout when session mode changes or onResume.
*/
protected void prepareListView()
{
catalogList = new ArrayList<Catalog>();
//Get the list of saved telescopes and populate the list
CatalogsDAO db = new CatalogsDAO(this);
Cursor catalogs = db.getAvailableCatalogs();
catalogs.moveToFirst();
for (int i = 0; i < catalogs.getCount(); i++)
{
Log.d("JoeDebug", "cursor size is " + catalogs.getCount());
String name = catalogs.getString(0);
String installed = catalogs.getString(1);
String count = catalogs.getString(2);
if (installed.equals("Yes")){
int logged = db.getNumLogged(name);
catalogList.add(new Catalog(name, count, logged));
}
catalogs.moveToNext();
}
catalogs.close();
db.close();
if (catalogList.size() == 0){
TextView nothingLeft = (TextView)findViewById(R.id.nothing_here);
nothingLeft.setVisibility(View.VISIBLE);
searchButton.setText("Available Catalogs");
searchButton.setOnClickListener(addCatalogs);
}
else{
Log.d("JoeTest", "List size is " + catalogList.size());
setListAdapter(new CatalogAdapter(this, settingsRef.getCatalogsList(), catalogList));
}
}
/**
* Take action on each of the list items when clicked. We need to let the user edit or remove their equipment profile
*/
@Override
protected void onListItemClick(ListView l, View v, int position, long id)
{
String catalog = catalogList.get(position).name;
Intent intent = new Intent(this.getApplication(), ObjectIndexScreen.class);
intent.putExtra("com.mobileobservationlog.indexType", "catalog");
intent.putExtra("com.mobileobservationlog.catalogName", catalog);
intent.putExtra("com.mobileobservationlog.listName", "None");
startActivity(intent);
}
@Override
public void toggleMode() {
ListView list = getListView();
listLocation = list.getFirstVisiblePosition();
super.toggleMode();
}
//////////////////////////////////////
// Catalog List Inflation Utilities //
//////////////////////////////////////
static class Catalog{
String name;
String count;
int logged;
Catalog(String catalogName, String objectCount, int numLogged){
name = catalogName;
count = objectCount;
logged = numLogged;
}
}
class CatalogAdapter extends ArrayAdapter<Catalog>{
int listLayout;
CatalogAdapter(Context context, int listLayout, ArrayList<Catalog> list){
super(context, listLayout, R.id.catalog_name, list);
this.listLayout = listLayout;
}
@Override
public View getView(int position, View convertView, ViewGroup parent){
CatalogWrapper wrapper = null;
if (convertView == null){
convertView = getLayoutInflater().inflate(listLayout, null);
wrapper = new CatalogWrapper(convertView);
convertView.setTag(wrapper);
}
else{
wrapper = (CatalogWrapper)convertView.getTag();
}
wrapper.populateFrom(getItem(position));
return convertView;
}
}
class CatalogWrapper{
private TextView name = null;
private TextView specs = null;
private ImageView icon = null;
private View row = null;
CatalogWrapper(View row){
this.row = row;
}
TextView getName(){
if (name == null){
name = (TextView)row.findViewById(R.id.catalog_name);
}
return name;
}
TextView getSpecs(){
if (specs == null){
specs = (TextView)row.findViewById(R.id.catalog_specs);
}
return specs;
}
ImageView getIcon(){
if (icon == null){
icon = (ImageView)row.findViewById(R.id.catalog_icon);
}
return icon;
}
void populateFrom(Catalog catalog){
getName().setText(catalog.name);
getSpecs().setText(formatStats(catalog));
getIcon().setImageResource(getIcon(catalog.name));
}
private int getIcon(String catalogName){
int retVal = 0;
if(catalogName.equals("Messier Catalog")){
retVal = settingsRef.getMessierIcon();
}
else if (catalogName.contains("NGC ")){
retVal = settingsRef.getNgcIcon();
}
else if (catalogName.contains("IC ")){
retVal = settingsRef.getIcIcon();
}
return retVal;
}
private String formatStats(Catalog catalog){
double countDouble = Double.parseDouble(catalog.count);
double percentFloor = Math.floor((catalog.logged/countDouble) * 100);
String retVal = String.format("%d of %s logged - (%d%%)", catalog.logged, catalog.count, (int)percentFloor);
return retVal;
}
}
}
| |
// Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.apple;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Multimap;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration.DefaultLabelConverter;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration.LabelConverter;
import com.google.devtools.build.lib.analysis.config.FragmentOptions;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.rules.apple.AppleConfiguration.ConfigurationDistinguisher;
import com.google.devtools.build.lib.rules.apple.Platform.PlatformType;
import com.google.devtools.build.lib.skylarkinterface.SkylarkModule;
import com.google.devtools.build.lib.skylarkinterface.SkylarkModuleCategory;
import com.google.devtools.common.options.Converters.CommaSeparatedOptionListConverter;
import com.google.devtools.common.options.EnumConverter;
import com.google.devtools.common.options.Option;
import java.util.List;
/**
* Command-line options for building for Apple platforms.
*/
public class AppleCommandLineOptions extends FragmentOptions {
@VisibleForTesting
public static final String DEFAULT_MINIMUM_IOS = "7.0";
@Option(
name = "xcode_version",
defaultValue = "null",
category = "build",
converter = DottedVersionConverter.class,
help =
"If specified, uses xcode of the given version for relevant build actions. "
+ "If unspecified, uses the executor default version of xcode."
)
// TODO(bazel-team): This should be of String type, to allow referencing an alias based
// on an xcode_config target.
public DottedVersion xcodeVersion;
@Option(
name = "ios_sdk_version",
defaultValue = "null",
converter = DottedVersionConverter.class,
category = "build",
help = "Specifies the version of the iOS SDK to use to build iOS applications."
)
public DottedVersion iosSdkVersion;
@Option(
name = "watchos_sdk_version",
defaultValue = "null",
converter = DottedVersionConverter.class,
category = "build",
help = "Specifies the version of the WatchOS SDK to use to build WatchOS applications."
)
public DottedVersion watchOsSdkVersion;
@Option(
name = "tvos_sdk_version",
defaultValue = "null",
converter = DottedVersionConverter.class,
category = "build",
help = "Specifies the version of the AppleTVOS SDK to use to build AppleTVOS applications."
)
public DottedVersion tvOsSdkVersion;
@Option(
name = "macosx_sdk_version",
defaultValue = "null",
converter = DottedVersionConverter.class,
category = "build",
help = "Specifies the version of the Mac OS X SDK to use to build Mac OS X applications."
)
public DottedVersion macOsXSdkVersion;
@Option(
name = "ios_minimum_os",
defaultValue = DEFAULT_MINIMUM_IOS,
category = "flags",
converter = DottedVersionConverter.class,
help = "Minimum compatible iOS version for target simulators and devices."
)
public DottedVersion iosMinimumOs;
@Option(
name = "watchos_minimum_os",
defaultValue = "null",
category = "flags",
converter = DottedVersionConverter.class,
help = "Minimum compatible watchOS version for target simulators and devices."
)
public DottedVersion watchosMinimumOs;
@Option(
name = "tvos_minimum_os",
defaultValue = "null",
category = "flags",
converter = DottedVersionConverter.class,
help = "Minimum compatible tvOS version for target simulators and devices."
)
public DottedVersion tvosMinimumOs;
@VisibleForTesting public static final String DEFAULT_IOS_SDK_VERSION = "8.4";
@VisibleForTesting public static final String DEFAULT_WATCHOS_SDK_VERSION = "2.0";
@VisibleForTesting public static final String DEFAULT_MACOSX_SDK_VERSION = "10.10";
@VisibleForTesting public static final String DEFAULT_TVOS_SDK_VERSION = "9.0";
@VisibleForTesting static final String DEFAULT_IOS_CPU = "x86_64";
/**
* The default watchos CPU value.
*/
public static final String DEFAULT_WATCHOS_CPU = "i386";
/**
* The default tvOS CPU value.
*/
public static final String DEFAULT_TVOS_CPU = "x86_64";
@Option(name = "ios_cpu",
defaultValue = DEFAULT_IOS_CPU,
category = "build",
help = "Specifies to target CPU of iOS compilation.")
public String iosCpu;
@Option(
name = "apple_crosstool_top",
defaultValue = "@bazel_tools//tools/cpp:toolchain",
category = "version",
converter = LabelConverter.class,
help = "The label of the crosstool package to be used in Apple and Objc rules and their"
+ " dependencies."
)
public Label appleCrosstoolTop;
@Option(name = "apple_platform_type",
defaultValue = "IOS",
category = "undocumented",
converter = PlatformTypeConverter.class,
help =
"Don't set this value from the command line - it is derived from other flags and "
+ "configuration transitions derived from rule attributes")
public PlatformType applePlatformType;
@Option(name = "apple_split_cpu",
defaultValue = "",
category = "undocumented",
help =
"Don't set this value from the command line - it is derived from other flags and "
+ "configuration transitions derived from rule attributes")
public String appleSplitCpu;
// This option exists because two configurations are not allowed to have the same cache key
// (partially derived from options). Since we have multiple transitions that may result in the
// same configuration values at runtime we need an artificial way to distinguish between them.
// This option must only be set by those transitions for this purpose.
// TODO(bazel-team): Remove this once we have dynamic configurations but make sure that different
// configurations (e.g. by min os version) always use different output paths.
@Option(name = "apple configuration distinguisher",
defaultValue = "UNKNOWN",
converter = ConfigurationDistinguisherConverter.class,
category = "undocumented")
public ConfigurationDistinguisher configurationDistinguisher;
@Option(name = "ios_multi_cpus",
converter = CommaSeparatedOptionListConverter.class,
defaultValue = "",
category = "flags",
help = "Comma-separated list of architectures to build an ios_application with. The result "
+ "is a universal binary containing all specified architectures.")
public List<String> iosMultiCpus;
@Option(name = "watchos_cpus",
converter = CommaSeparatedOptionListConverter.class,
defaultValue = DEFAULT_WATCHOS_CPU,
category = "flags",
help = "Comma-separated list of architectures for which to build Apple watchOS binaries.")
public List<String> watchosCpus;
@Option(name = "tvos_cpus",
converter = CommaSeparatedOptionListConverter.class,
defaultValue = DEFAULT_TVOS_CPU,
category = "flags",
help = "Comma-separated list of architectures for which to build Apple tvOS binaries.")
public List<String> tvosCpus;
@Option(name = "default_ios_provisioning_profile",
defaultValue = "",
category = "undocumented",
converter = DefaultProvisioningProfileConverter.class)
public Label defaultProvisioningProfile;
@Option(
name = "xcode_version_config",
defaultValue = "@local_config_xcode//:host_xcodes",
category = "undocumented",
converter = LabelConverter.class,
help =
"The label of the xcode_config rule to be used for selecting the xcode version "
+ "in the build configuration"
)
public Label xcodeVersionConfig;
/**
* The default label of the build-wide {@code xcode_config} configuration rule. This can be
* changed from the default using the {@code xcode_version_config} build flag.
*/
// TODO(cparsons): Update all callers to reference the actual xcode_version_config flag value.
static final String DEFAULT_XCODE_VERSION_CONFIG_LABEL = "//tools/objc:host_xcodes";
/** Converter for --default_ios_provisioning_profile. */
public static class DefaultProvisioningProfileConverter extends DefaultLabelConverter {
public DefaultProvisioningProfileConverter() {
super("//tools/objc:default_provisioning_profile");
}
}
@Option(
name = "xcode_toolchain",
defaultValue = "null",
category = "flags",
help = "The identifier of an Xcode toolchain to use for builds. Currently only the toolchains "
+ "that ship with Xcode are supported. For example, in addition to the default toolchain"
+ " Xcode 8 has 'com.apple.dt.toolchain.Swift_2_3' which can be used for building legacy"
+ " Swift code."
)
public String xcodeToolchain;
@Option(name = "apple_bitcode",
converter = AppleBitcodeMode.Converter.class,
// TODO(blaze-team): Default to embedded_markers when fully implemented.
defaultValue = "none",
category = "flags",
help = "Specify the Apple bitcode mode for compile steps. "
+ "Values: 'none', 'embedded_markers', 'embedded'.")
public AppleBitcodeMode appleBitcodeMode;
private Platform getPlatform() {
for (String architecture : iosMultiCpus) {
if (Platform.forTarget(PlatformType.IOS, architecture) == Platform.IOS_DEVICE) {
return Platform.IOS_DEVICE;
}
}
return Platform.forTarget(PlatformType.IOS, iosCpu);
}
@Override
public void addAllLabels(Multimap<String, Label> labelMap) {
if (getPlatform() == Platform.IOS_DEVICE) {
labelMap.put("default_provisioning_profile", defaultProvisioningProfile);
}
labelMap.put("xcode_version_config", xcodeVersionConfig);
}
/**
* Represents the Apple Bitcode mode for compilation steps.
*
* <p>Bitcode is an intermediate representation of a compiled program. For many platforms, Apple
* requires app submissions to contain bitcode in order to be uploaded to the app store.
*
* <p>This is a build-wide value, as bitcode mode needs to be consistent among a target and its
* compiled dependencies.
*/
@SkylarkModule(
name = "apple_bitcode_mode",
category = SkylarkModuleCategory.NONE,
doc =
"Apple Bitcode mode for compilation steps. Possible values are \"none\", "
+ "\"embedded\", and \"embedded_markers\""
)
public enum AppleBitcodeMode {
/** Do not compile bitcode. */
NONE("none", ImmutableList.<String>of()),
/**
* Compile the minimal set of bitcode markers. This is often the best option for developer/debug
* builds.
*/
EMBEDDED_MARKERS(
"embedded_markers", ImmutableList.of("bitcode_embedded_markers"), "-fembed-bitcode-marker"),
/** Fully embed bitcode in compiled files. This is often the best option for release builds. */
EMBEDDED("embedded", ImmutableList.of("bitcode_embedded"), "-fembed-bitcode");
private final String mode;
private final ImmutableList<String> featureNames;
private final ImmutableList<String> clangFlags;
private AppleBitcodeMode(
String mode, ImmutableList<String> featureNames, String... clangFlags) {
this.mode = mode;
this.featureNames = featureNames;
this.clangFlags = ImmutableList.copyOf(clangFlags);
}
@Override
public String toString() {
return mode;
}
/** Returns the names of any crosstool features that correspond to this bitcode mode. */
public ImmutableList<String> getFeatureNames() {
return featureNames;
}
/**
* Returns the flags that should be added to compile and link actions to use this
* bitcode setting.
*/
public ImmutableList<String> getCompileAndLinkFlags() {
return clangFlags;
}
/**
* Converts to {@link AppleBitcodeMode}.
*/
public static class Converter extends EnumConverter<AppleBitcodeMode> {
public Converter() {
super(AppleBitcodeMode.class, "apple bitcode mode");
}
}
}
@Override
public FragmentOptions getHost(boolean fallback) {
AppleCommandLineOptions host = (AppleCommandLineOptions) super.getHost(fallback);
// Set options needed in the host configuration.
host.xcodeVersionConfig = xcodeVersionConfig;
host.xcodeVersion = xcodeVersion;
host.iosSdkVersion = iosSdkVersion;
host.watchOsSdkVersion = watchOsSdkVersion;
host.tvOsSdkVersion = tvOsSdkVersion;
host.macOsXSdkVersion = macOsXSdkVersion;
host.appleBitcodeMode = appleBitcodeMode;
return host;
}
/** Converter for the Apple configuration distinguisher. */
public static final class ConfigurationDistinguisherConverter
extends EnumConverter<ConfigurationDistinguisher> {
public ConfigurationDistinguisherConverter() {
super(ConfigurationDistinguisher.class, "Apple rule configuration distinguisher");
}
}
/** Flag converter for {@link PlatformType}. */
public static final class PlatformTypeConverter
extends EnumConverter<PlatformType> {
public PlatformTypeConverter() {
super(PlatformType.class, "Apple platform type");
}
}
}
| |
/*
* Copyright (c) 2010-2020 Nathan Rajlich
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*
*/
package org.java_websocket.server;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import org.java_websocket.WebSocket;
import org.java_websocket.drafts.Draft;
import org.java_websocket.drafts.Draft_6455;
import org.java_websocket.handshake.ClientHandshake;
import org.java_websocket.util.SocketUtil;
import org.junit.Test;
public class WebSocketServerTest {
@Test
public void testConstructor() {
List<Draft> draftCollection = Collections.<Draft>singletonList(new Draft_6455());
Collection<WebSocket> webSocketCollection = new HashSet<WebSocket>();
InetSocketAddress inetAddress = new InetSocketAddress(1337);
try {
WebSocketServer server = new MyWebSocketServer(null, 1, draftCollection, webSocketCollection);
fail("Should fail");
} catch (IllegalArgumentException e) {
//OK
}
try {
WebSocketServer server = new MyWebSocketServer(inetAddress, 0, draftCollection,
webSocketCollection);
fail("Should fail");
} catch (IllegalArgumentException e) {
//OK
}
try {
WebSocketServer server = new MyWebSocketServer(inetAddress, -1, draftCollection,
webSocketCollection);
fail("Should fail");
} catch (IllegalArgumentException e) {
//OK
}
try {
WebSocketServer server = new MyWebSocketServer(inetAddress, Integer.MIN_VALUE,
draftCollection, webSocketCollection);
fail("Should fail");
} catch (IllegalArgumentException e) {
//OK
}
try {
WebSocketServer server = new MyWebSocketServer(inetAddress, Integer.MIN_VALUE,
draftCollection, webSocketCollection);
fail("Should fail");
} catch (IllegalArgumentException e) {
//OK
}
try {
WebSocketServer server = new MyWebSocketServer(inetAddress, 1, draftCollection, null);
fail("Should fail");
} catch (IllegalArgumentException e) {
//OK
}
try {
WebSocketServer server = new MyWebSocketServer(inetAddress, 1, draftCollection,
webSocketCollection);
// OK
} catch (IllegalArgumentException e) {
fail("Should not fail");
}
try {
WebSocketServer server = new MyWebSocketServer(inetAddress, 1, null, webSocketCollection);
// OK
} catch (IllegalArgumentException e) {
fail("Should not fail");
}
}
@Test
public void testGetAddress() throws IOException {
int port = SocketUtil.getAvailablePort();
InetSocketAddress inetSocketAddress = new InetSocketAddress(port);
MyWebSocketServer server = new MyWebSocketServer(port);
assertEquals(inetSocketAddress, server.getAddress());
}
@Test
public void testGetDrafts() {
List<Draft> draftCollection = Collections.<Draft>singletonList(new Draft_6455());
Collection<WebSocket> webSocketCollection = new HashSet<WebSocket>();
InetSocketAddress inetAddress = new InetSocketAddress(1337);
MyWebSocketServer server = new MyWebSocketServer(inetAddress, 1, draftCollection,
webSocketCollection);
assertEquals(1, server.getDraft().size());
assertEquals(draftCollection.get(0), server.getDraft().get(0));
}
@Test
public void testGetPort() throws IOException, InterruptedException {
int port = SocketUtil.getAvailablePort();
CountDownLatch countServerDownLatch = new CountDownLatch(1);
MyWebSocketServer server = new MyWebSocketServer(port);
assertEquals(port, server.getPort());
server = new MyWebSocketServer(0, countServerDownLatch);
assertEquals(0, server.getPort());
server.start();
countServerDownLatch.await();
assertNotEquals(0, server.getPort());
}
@Test
public void testMaxPendingConnections() {
MyWebSocketServer server = new MyWebSocketServer(1337);
assertEquals(server.getMaxPendingConnections(), -1);
server.setMaxPendingConnections(10);
assertEquals(server.getMaxPendingConnections(), 10);
}
@Test
public void testBroadcast() {
MyWebSocketServer server = new MyWebSocketServer(1337);
try {
server.broadcast((byte[]) null, Collections.<WebSocket>emptyList());
fail("Should fail");
} catch (IllegalArgumentException e) {
// OK
}
try {
server.broadcast((ByteBuffer) null, Collections.<WebSocket>emptyList());
fail("Should fail");
} catch (IllegalArgumentException e) {
// OK
}
try {
server.broadcast((String) null, Collections.<WebSocket>emptyList());
fail("Should fail");
} catch (IllegalArgumentException e) {
// OK
}
try {
server.broadcast(new byte[]{(byte) 0xD0}, null);
fail("Should fail");
} catch (IllegalArgumentException e) {
// OK
}
try {
server.broadcast(ByteBuffer.wrap(new byte[]{(byte) 0xD0}), null);
fail("Should fail");
} catch (IllegalArgumentException e) {
// OK
}
try {
server.broadcast("", null);
fail("Should fail");
} catch (IllegalArgumentException e) {
// OK
}
try {
server.broadcast("", Collections.<WebSocket>emptyList());
// OK
} catch (IllegalArgumentException e) {
fail("Should not fail");
}
}
private static class MyWebSocketServer extends WebSocketServer {
private CountDownLatch serverLatch = null;
public MyWebSocketServer(InetSocketAddress address, int decodercount, List<Draft> drafts,
Collection<WebSocket> connectionscontainer) {
super(address, decodercount, drafts, connectionscontainer);
}
public MyWebSocketServer(int port, CountDownLatch serverLatch) {
super(new InetSocketAddress(port));
this.serverLatch = serverLatch;
}
public MyWebSocketServer(int port) {
this(port, null);
}
@Override
public void onOpen(WebSocket conn, ClientHandshake handshake) {
}
@Override
public void onClose(WebSocket conn, int code, String reason, boolean remote) {
}
@Override
public void onMessage(WebSocket conn, String message) {
}
@Override
public void onError(WebSocket conn, Exception ex) {
ex.printStackTrace();
}
@Override
public void onStart() {
if (serverLatch != null) {
serverLatch.countDown();
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zookeeper.server.quorum;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetSocketAddress;
import java.net.SocketException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.zookeeper.jmx.MBeanRegistry;
import org.apache.zookeeper.jmx.ZKMBeanInfo;
import org.apache.zookeeper.server.ServerCnxnFactory;
import org.apache.zookeeper.server.ZKDatabase;
import org.apache.zookeeper.server.ZooKeeperServer;
import org.apache.zookeeper.server.persistence.FileTxnSnapLog;
import org.apache.zookeeper.server.quorum.flexible.QuorumMaj;
import org.apache.zookeeper.server.quorum.flexible.QuorumVerifier;
import org.apache.zookeeper.server.util.ZxidUtils;
/**
* This class manages the quorum protocol. There are three states this server
* can be in:
* <ol>
* <li>Leader election - each server will elect a leader (proposing itself as a
* leader initially).</li>
* <li>Follower - the server will synchronize with the leader and replicate any
* transactions.</li>
* <li>Leader - the server will process requests and forward them to followers.
* A majority of followers must log the request before it can be accepted.
* </ol>
*
* This class will setup a datagram socket that will always respond with its
* view of the current leader. The response will take the form of:
*
* <pre>
* int xid;
*
* long myid;
*
* long leader_id;
*
* long leader_zxid;
* </pre>
*
* The request for the current leader will consist solely of an xid: int xid;
*/
public class QuorumPeer extends Thread implements QuorumStats.Provider {
private static final Logger LOG = LoggerFactory.getLogger(QuorumPeer.class);
QuorumBean jmxQuorumBean;
LocalPeerBean jmxLocalPeerBean;
LeaderElectionBean jmxLeaderElectionBean;
QuorumCnxManager qcm;
/* ZKDatabase is a top level member of quorumpeer
* which will be used in all the zookeeperservers
* instantiated later. Also, it is created once on
* bootup and only thrown away in case of a truncate
* message from the leader
*/
private ZKDatabase zkDb;
public static class QuorumServer {
public QuorumServer(long id, InetSocketAddress addr,
InetSocketAddress electionAddr) {
this.id = id;
this.addr = addr;
this.electionAddr = electionAddr;
}
public QuorumServer(long id, InetSocketAddress addr) {
this.id = id;
this.addr = addr;
this.electionAddr = null;
}
public QuorumServer(long id, InetSocketAddress addr,
InetSocketAddress electionAddr, LearnerType type) {
this.id = id;
this.addr = addr;
this.electionAddr = electionAddr;
this.type = type;
}
public InetSocketAddress addr;
public InetSocketAddress electionAddr;
public long id;
public LearnerType type = LearnerType.PARTICIPANT;
}
public enum ServerState {
LOOKING, FOLLOWING, LEADING, OBSERVING;
}
/*
* A peer can either be participating, which implies that it is willing to
* both vote in instances of consensus and to elect or become a Leader, or
* it may be observing in which case it isn't.
*
* We need this distinction to decide which ServerState to move to when
* conditions change (e.g. which state to become after LOOKING).
*/
public enum LearnerType {
PARTICIPANT, OBSERVER;
}
/*
* To enable observers to have no identifier, we need a generic identifier
* at least for QuorumCnxManager. We use the following constant to as the
* value of such a generic identifier.
*/
static final long OBSERVER_ID = Long.MAX_VALUE;
/*
* Record leader election time
*/
public long start_fle, end_fle;
/*
* Default value of peer is participant
*/
private LearnerType learnerType = LearnerType.PARTICIPANT;
public LearnerType getLearnerType() {
return learnerType;
}
/**
* Sets the LearnerType both in the QuorumPeer and in the peerMap
*/
public void setLearnerType(LearnerType p) {
learnerType = p;
if (quorumPeers.containsKey(this.myid)) {
this.quorumPeers.get(myid).type = p;
} else {
LOG.error("Setting LearnerType to " + p + " but " + myid
+ " not in QuorumPeers. ");
}
}
/**
* The servers that make up the cluster
*/
protected Map<Long, QuorumServer> quorumPeers;
public int getQuorumSize(){
return getVotingView().size();
}
/**
* QuorumVerifier implementation; default (majority).
*/
private QuorumVerifier quorumConfig;
/**
* My id
*/
private long myid;
/**
* get the id of this quorum peer.
*/
public long getId() {
return myid;
}
/**
* This is who I think the leader currently is.
*/
volatile private Vote currentVote;
public synchronized Vote getCurrentVote(){
return currentVote;
}
public synchronized void setCurrentVote(Vote v){
currentVote = v;
}
volatile boolean running = true;
/**
* The number of milliseconds of each tick
*/
protected int tickTime;
/**
* Minimum number of milliseconds to allow for session timeout.
* A value of -1 indicates unset, use default.
*/
protected int minSessionTimeout = -1;
/**
* Maximum number of milliseconds to allow for session timeout.
* A value of -1 indicates unset, use default.
*/
protected int maxSessionTimeout = -1;
/**
* The number of ticks that the initial synchronization phase can take
*/
protected int initLimit;
/**
* The number of ticks that can pass between sending a request and getting
* an acknowledgment
*/
protected int syncLimit;
/**
* The current tick
*/
protected int tick;
/**
* @deprecated As of release 3.4.0, this class has been deprecated, since
* it is used with one of the udp-based versions of leader election, which
* we are also deprecating.
*
* This class simply responds to requests for the current leader of this
* node.
* <p>
* The request contains just an xid generated by the requestor.
* <p>
* The response has the xid, the id of this server, the id of the leader,
* and the zxid of the leader.
*
*
*/
@Deprecated
class ResponderThread extends Thread {
ResponderThread() {
super("ResponderThread");
}
volatile boolean running = true;
@Override
public void run() {
try {
byte b[] = new byte[36];
ByteBuffer responseBuffer = ByteBuffer.wrap(b);
DatagramPacket packet = new DatagramPacket(b, b.length);
while (running) {
udpSocket.receive(packet);
if (packet.getLength() != 4) {
LOG.warn("Got more than just an xid! Len = "
+ packet.getLength());
} else {
responseBuffer.clear();
responseBuffer.getInt(); // Skip the xid
responseBuffer.putLong(myid);
Vote current = getCurrentVote();
switch (getPeerState()) {
case LOOKING:
responseBuffer.putLong(current.getId());
responseBuffer.putLong(current.getZxid());
break;
case LEADING:
responseBuffer.putLong(myid);
try {
long proposed;
synchronized(leader) {
proposed = leader.lastProposed;
}
responseBuffer.putLong(proposed);
} catch (NullPointerException npe) {
// This can happen in state transitions,
// just ignore the request
}
break;
case FOLLOWING:
responseBuffer.putLong(current.getId());
try {
responseBuffer.putLong(follower.getZxid());
} catch (NullPointerException npe) {
// This can happen in state transitions,
// just ignore the request
}
break;
case OBSERVING:
// Do nothing, Observers keep themselves to
// themselves.
break;
}
packet.setData(b);
udpSocket.send(packet);
}
packet.setLength(b.length);
}
} catch (RuntimeException e) {
LOG.warn("Unexpected runtime exception in ResponderThread",e);
} catch (IOException e) {
LOG.warn("Unexpected IO exception in ResponderThread",e);
} finally {
LOG.warn("QuorumPeer responder thread exited");
}
}
}
private ServerState state = ServerState.LOOKING;
public synchronized void setPeerState(ServerState newState){
state=newState;
}
public synchronized ServerState getPeerState(){
return state;
}
DatagramSocket udpSocket;
private InetSocketAddress myQuorumAddr;
public InetSocketAddress getQuorumAddress(){
return myQuorumAddr;
}
private int electionType;
Election electionAlg;
ServerCnxnFactory cnxnFactory;
private FileTxnSnapLog logFactory = null;
private final QuorumStats quorumStats;
public QuorumPeer() {
super("QuorumPeer");
quorumStats = new QuorumStats(this);
}
/**
* For backward compatibility purposes, we instantiate QuorumMaj by default.
*/
public QuorumPeer(Map<Long, QuorumServer> quorumPeers, File dataDir,
File dataLogDir, int electionType,
long myid, int tickTime, int initLimit, int syncLimit,
ServerCnxnFactory cnxnFactory) throws IOException {
this(quorumPeers, dataDir, dataLogDir, electionType, myid, tickTime,
initLimit, syncLimit, cnxnFactory,
new QuorumMaj(countParticipants(quorumPeers)));
}
public QuorumPeer(Map<Long, QuorumServer> quorumPeers, File dataDir,
File dataLogDir, int electionType,
long myid, int tickTime, int initLimit, int syncLimit,
ServerCnxnFactory cnxnFactory,
QuorumVerifier quorumConfig) throws IOException {
this();
this.cnxnFactory = cnxnFactory;
this.quorumPeers = quorumPeers;
this.electionType = electionType;
this.myid = myid;
this.tickTime = tickTime;
this.initLimit = initLimit;
this.syncLimit = syncLimit;
this.logFactory = new FileTxnSnapLog(dataLogDir, dataDir);
this.zkDb = new ZKDatabase(this.logFactory);
if(quorumConfig == null)
this.quorumConfig = new QuorumMaj(countParticipants(quorumPeers));
else this.quorumConfig = quorumConfig;
}
QuorumStats quorumStats() {
return quorumStats;
}
@Override
public synchronized void start() {
loadDataBase();
cnxnFactory.start();
startLeaderElection();
super.start();
}
private void loadDataBase() {
try {
zkDb.loadDataBase();
// load the epochs
long lastProcessedZxid = zkDb.getDataTree().lastProcessedZxid;
long epochOfZxid = ZxidUtils.getEpochFromZxid(lastProcessedZxid);
try {
currentEpoch = readLongFromFile(CURRENT_EPOCH_FILENAME);
} catch(FileNotFoundException e) {
// pick a reasonable epoch number
// this should only happen once when moving to a
// new code version
LOG.info(CURRENT_EPOCH_FILENAME + " not found! Creating with a reasonable default. This should only happen when you are upgrading your installation");
currentEpoch = epochOfZxid;
writeLongToFile(CURRENT_EPOCH_FILENAME, currentEpoch);
}
if (epochOfZxid > currentEpoch) {
throw new IOException("The current epoch, " + ZxidUtils.zxidToString(currentEpoch) + ", is older than the last zxid, " + lastProcessedZxid);
}
try {
acceptedEpoch = readLongFromFile(ACCEPTED_EPOCH_FILENAME);
} catch(FileNotFoundException e) {
// pick a reasonable epoch number
// this should only happen once when moving to a
// new code version
LOG.info(ACCEPTED_EPOCH_FILENAME + " not found! Creating with a reasonable default. This should only happen when you are upgrading your installation");
acceptedEpoch = epochOfZxid;
writeLongToFile(CURRENT_EPOCH_FILENAME, acceptedEpoch);
}
if (acceptedEpoch < currentEpoch) {
throw new IOException("The current epoch, " + ZxidUtils.zxidToString(currentEpoch) + " is less than the accepted epoch, " + ZxidUtils.zxidToString(acceptedEpoch));
}
} catch(IOException ie) {
LOG.error("Unable to load database on disk", ie);
throw new RuntimeException("Unable to run quorum server ", ie);
}
}
ResponderThread responder;
synchronized public void stopLeaderElection() {
responder.running = false;
responder.interrupt();
}
synchronized public void startLeaderElection() {
try {
currentVote = new Vote(myid, getLastLoggedZxid(), getCurrentEpoch());
} catch(IOException e) {
RuntimeException re = new RuntimeException(e.getMessage());
re.setStackTrace(e.getStackTrace());
throw re;
}
for (QuorumServer p : getView().values()) {
if (p.id == myid) {
myQuorumAddr = p.addr;
break;
}
}
if (myQuorumAddr == null) {
throw new RuntimeException("My id " + myid + " not in the peer list");
}
if (electionType == 0) {
try {
udpSocket = new DatagramSocket(myQuorumAddr.getPort());
responder = new ResponderThread();
responder.start();
} catch (SocketException e) {
throw new RuntimeException(e);
}
}
this.electionAlg = createElectionAlgorithm(electionType);
}
/**
* Count the number of nodes in the map that could be followers.
* @param peers
* @return The number of followers in the map
*/
protected static int countParticipants(Map<Long,QuorumServer> peers) {
int count = 0;
for (QuorumServer q : peers.values()) {
if (q.type == LearnerType.PARTICIPANT) {
count++;
}
}
return count;
}
/**
* This constructor is only used by the existing unit test code.
* It defaults to FileLogProvider persistence provider.
*/
public QuorumPeer(Map<Long,QuorumServer> quorumPeers, File snapDir,
File logDir, int clientPort, int electionAlg,
long myid, int tickTime, int initLimit, int syncLimit)
throws IOException
{
this(quorumPeers, snapDir, logDir, electionAlg,
myid,tickTime, initLimit,syncLimit,
ServerCnxnFactory.createFactory(new InetSocketAddress(clientPort), -1),
new QuorumMaj(countParticipants(quorumPeers)));
}
/**
* This constructor is only used by the existing unit test code.
* It defaults to FileLogProvider persistence provider.
*/
public QuorumPeer(Map<Long,QuorumServer> quorumPeers, File snapDir,
File logDir, int clientPort, int electionAlg,
long myid, int tickTime, int initLimit, int syncLimit,
QuorumVerifier quorumConfig)
throws IOException
{
this(quorumPeers, snapDir, logDir, electionAlg,
myid,tickTime, initLimit,syncLimit,
ServerCnxnFactory.createFactory(new InetSocketAddress(clientPort), -1),
quorumConfig);
}
/**
* returns the highest zxid that this host has seen
*
* @return the highest zxid for this host
*/
public long getLastLoggedZxid() {
if (!zkDb.isInitialized()) {
loadDataBase();
}
return zkDb.getDataTreeLastProcessedZxid();
}
public Follower follower;
public Leader leader;
public Observer observer;
protected Follower makeFollower(FileTxnSnapLog logFactory) throws IOException {
return new Follower(this, new FollowerZooKeeperServer(logFactory,
this,new ZooKeeperServer.BasicDataTreeBuilder(), this.zkDb));
}
protected Leader makeLeader(FileTxnSnapLog logFactory) throws IOException {
return new Leader(this, new LeaderZooKeeperServer(logFactory,
this,new ZooKeeperServer.BasicDataTreeBuilder(), this.zkDb));
}
protected Observer makeObserver(FileTxnSnapLog logFactory) throws IOException {
return new Observer(this, new ObserverZooKeeperServer(logFactory,
this, new ZooKeeperServer.BasicDataTreeBuilder(), this.zkDb));
}
protected Election createElectionAlgorithm(int electionAlgorithm){
Election le=null;
//TODO: use a factory rather than a switch
switch (electionAlgorithm) {
case 0:
le = new LeaderElection(this);
break;
case 1:
le = new AuthFastLeaderElection(this);
break;
case 2:
le = new AuthFastLeaderElection(this, true);
break;
case 3:
qcm = new QuorumCnxManager(this);
QuorumCnxManager.Listener listener = qcm.listener;
if(listener != null){
listener.start();
le = new FastLeaderElection(this, qcm);
} else {
LOG.error("Null listener when initializing cnx manager");
}
break;
default:
assert false;
}
return le;
}
protected Election makeLEStrategy(){
LOG.debug("Initializing leader election protocol...");
if (getElectionType() == 0) {
electionAlg = new LeaderElection(this);
}
return electionAlg;
}
synchronized protected void setLeader(Leader newLeader){
leader=newLeader;
}
synchronized protected void setFollower(Follower newFollower){
follower=newFollower;
}
synchronized protected void setObserver(Observer newObserver){
observer=newObserver;
}
synchronized public ZooKeeperServer getActiveServer(){
if(leader!=null)
return leader.zk;
else if(follower!=null)
return follower.zk;
else if (observer != null)
return observer.zk;
return null;
}
@Override
public void run() {
setName("QuorumPeer" + "[myid=" + getId() + "]" +
cnxnFactory.getLocalAddress());
LOG.debug("Starting quorum peer");
try {
jmxQuorumBean = new QuorumBean(this);
MBeanRegistry.getInstance().register(jmxQuorumBean, null);
for(QuorumServer s: getView().values()){
ZKMBeanInfo p;
if (getId() == s.id) {
p = jmxLocalPeerBean = new LocalPeerBean(this);
try {
MBeanRegistry.getInstance().register(p, jmxQuorumBean);
} catch (Exception e) {
LOG.warn("Failed to register with JMX", e);
jmxLocalPeerBean = null;
}
} else {
p = new RemotePeerBean(s);
try {
MBeanRegistry.getInstance().register(p, jmxQuorumBean);
} catch (Exception e) {
LOG.warn("Failed to register with JMX", e);
}
}
}
} catch (Exception e) {
LOG.warn("Failed to register with JMX", e);
jmxQuorumBean = null;
}
try {
/*
* Main loop
*/
while (running) {
switch (getPeerState()) {
case LOOKING:
LOG.info("LOOKING");
// Create read-only server but don't start it immediately
final ReadOnlyZooKeeperServer roZk = new ReadOnlyZooKeeperServer(
logFactory, this,
new ZooKeeperServer.BasicDataTreeBuilder(),
this.zkDb);
// Instead of starting roZk immediately, wait some grace
// period before we decide we're partitioned.
//
// Thread is used here because otherwise it would require
// changes in each of election strategy classes which is
// unnecessary code coupling.
Thread roZkMgr = new Thread() {
public void run() {
try {
// lower-bound grace period to 2 secs
sleep(Math.max(2000, tickTime));
if (ServerState.LOOKING.equals(getPeerState())) {
roZk.startup();
}
} catch (Exception e) {
LOG.error("FAILED to start ReadOnlyZooKeeperServer", e);
}
}
};
try {
roZkMgr.start();
setCurrentVote(makeLEStrategy().lookForLeader());
} catch (Exception e) {
LOG.warn("Unexpected exception",e);
setPeerState(ServerState.LOOKING);
} finally {
// If the thread is in the the grace period, interrupt
// to come out of waiting.
roZkMgr.interrupt();
roZk.shutdown();
}
break;
case OBSERVING:
try {
LOG.info("OBSERVING");
setObserver(makeObserver(logFactory));
observer.observeLeader();
} catch (Exception e) {
LOG.warn("Unexpected exception",e );
} finally {
observer.shutdown();
setObserver(null);
setPeerState(ServerState.LOOKING);
}
break;
case FOLLOWING:
try {
LOG.info("FOLLOWING");
setFollower(makeFollower(logFactory));
follower.followLeader();
} catch (Exception e) {
LOG.warn("Unexpected exception",e);
} finally {
follower.shutdown();
setFollower(null);
setPeerState(ServerState.LOOKING);
}
break;
case LEADING:
LOG.info("LEADING");
try {
setLeader(makeLeader(logFactory));
leader.lead();
setLeader(null);
} catch (Exception e) {
LOG.warn("Unexpected exception",e);
} finally {
if (leader != null) {
leader.shutdown("Forcing shutdown");
setLeader(null);
}
setPeerState(ServerState.LOOKING);
}
break;
}
}
} finally {
LOG.warn("QuorumPeer main thread exited");
try {
MBeanRegistry.getInstance().unregisterAll();
} catch (Exception e) {
LOG.warn("Failed to unregister with JMX", e);
}
jmxQuorumBean = null;
jmxLocalPeerBean = null;
}
}
public void shutdown() {
running = false;
if (leader != null) {
leader.shutdown("quorum Peer shutdown");
}
if (follower != null) {
follower.shutdown();
}
cnxnFactory.shutdown();
if(udpSocket != null) {
udpSocket.close();
}
if(getElectionAlg() != null){
this.interrupt();
getElectionAlg().shutdown();
}
try {
zkDb.close();
} catch (IOException ie) {
LOG.warn("Error closing logs ", ie);
}
}
/**
* A 'view' is a node's current opinion of the membership of the entire
* ensemble.
*/
public Map<Long,QuorumPeer.QuorumServer> getView() {
return Collections.unmodifiableMap(this.quorumPeers);
}
/**
* Observers are not contained in this view, only nodes with
* PeerType=PARTICIPANT.
*/
public Map<Long,QuorumPeer.QuorumServer> getVotingView() {
Map<Long,QuorumPeer.QuorumServer> ret =
new HashMap<Long, QuorumPeer.QuorumServer>();
Map<Long,QuorumPeer.QuorumServer> view = getView();
for (QuorumServer server : view.values()) {
if (server.type == LearnerType.PARTICIPANT) {
ret.put(server.id, server);
}
}
return ret;
}
/**
* Returns only observers, no followers.
*/
public Map<Long,QuorumPeer.QuorumServer> getObservingView() {
Map<Long,QuorumPeer.QuorumServer> ret =
new HashMap<Long, QuorumPeer.QuorumServer>();
Map<Long,QuorumPeer.QuorumServer> view = getView();
for (QuorumServer server : view.values()) {
if (server.type == LearnerType.OBSERVER) {
ret.put(server.id, server);
}
}
return ret;
}
/**
* Check if a node is in the current view. With static membership, the
* result of this check will never change; only when dynamic membership
* is introduced will this be more useful.
*/
public boolean viewContains(Long sid) {
return this.quorumPeers.containsKey(sid);
}
/**
* Only used by QuorumStats at the moment
*/
public String[] getQuorumPeers() {
List<String> l = new ArrayList<String>();
synchronized (this) {
if (leader != null) {
synchronized (leader.learners) {
for (LearnerHandler fh :
leader.learners)
{
if (fh.getSocket() == null)
continue;
String s = fh.getSocket().getRemoteSocketAddress().toString();
if (leader.isLearnerSynced(fh))
s += "*";
l.add(s);
}
}
} else if (follower != null) {
l.add(follower.sock.getRemoteSocketAddress().toString());
}
}
return l.toArray(new String[0]);
}
public String getServerState() {
switch (getPeerState()) {
case LOOKING:
return QuorumStats.Provider.LOOKING_STATE;
case LEADING:
return QuorumStats.Provider.LEADING_STATE;
case FOLLOWING:
return QuorumStats.Provider.FOLLOWING_STATE;
case OBSERVING:
return QuorumStats.Provider.OBSERVING_STATE;
}
return QuorumStats.Provider.UNKNOWN_STATE;
}
/**
* get the id of this quorum peer.
*/
public long getMyid() {
return myid;
}
/**
* set the id of this quorum peer.
*/
public void setMyid(long myid) {
this.myid = myid;
}
/**
* Get the number of milliseconds of each tick
*/
public int getTickTime() {
return tickTime;
}
/**
* Set the number of milliseconds of each tick
*/
public void setTickTime(int tickTime) {
LOG.info("tickTime set to " + tickTime);
this.tickTime = tickTime;
}
/** Maximum number of connections allowed from particular host (ip) */
public int getMaxClientCnxnsPerHost() {
ServerCnxnFactory fac = getCnxnFactory();
if (fac == null) {
return -1;
}
return fac.getMaxClientCnxnsPerHost();
}
/** minimum session timeout in milliseconds */
public int getMinSessionTimeout() {
return minSessionTimeout == -1 ? tickTime * 2 : minSessionTimeout;
}
/** minimum session timeout in milliseconds */
public void setMinSessionTimeout(int min) {
LOG.info("minSessionTimeout set to " + min);
this.minSessionTimeout = min;
}
/** maximum session timeout in milliseconds */
public int getMaxSessionTimeout() {
return maxSessionTimeout == -1 ? tickTime * 20 : maxSessionTimeout;
}
/** minimum session timeout in milliseconds */
public void setMaxSessionTimeout(int max) {
LOG.info("maxSessionTimeout set to " + max);
this.maxSessionTimeout = max;
}
/**
* Get the number of ticks that the initial synchronization phase can take
*/
public int getInitLimit() {
return initLimit;
}
/**
* Set the number of ticks that the initial synchronization phase can take
*/
public void setInitLimit(int initLimit) {
LOG.info("initLimit set to " + initLimit);
this.initLimit = initLimit;
}
/**
* Get the current tick
*/
public int getTick() {
return tick;
}
/**
* Return QuorumVerifier object
*/
public QuorumVerifier getQuorumVerifier(){
return quorumConfig;
}
public void setQuorumVerifier(QuorumVerifier quorumConfig){
this.quorumConfig = quorumConfig;
}
/**
* Get an instance of LeaderElection
*/
public Election getElectionAlg(){
return electionAlg;
}
/**
* Get the synclimit
*/
public int getSyncLimit() {
return syncLimit;
}
/**
* Set the synclimit
*/
public void setSyncLimit(int syncLimit) {
this.syncLimit = syncLimit;
}
/**
* Gets the election type
*/
public int getElectionType() {
return electionType;
}
/**
* Sets the election type
*/
public void setElectionType(int electionType) {
this.electionType = electionType;
}
public ServerCnxnFactory getCnxnFactory() {
return cnxnFactory;
}
public void setCnxnFactory(ServerCnxnFactory cnxnFactory) {
this.cnxnFactory = cnxnFactory;
}
public void setQuorumPeers(Map<Long,QuorumServer> quorumPeers) {
this.quorumPeers = quorumPeers;
}
public int getClientPort() {
return cnxnFactory.getLocalPort();
}
public void setClientPortAddress(InetSocketAddress addr) {
}
public void setTxnFactory(FileTxnSnapLog factory) {
this.logFactory = factory;
}
public FileTxnSnapLog getTxnFactory() {
return this.logFactory;
}
/**
* set zk database for this node
* @param database
*/
public void setZKDatabase(ZKDatabase database) {
this.zkDb = database;
}
public void setRunning(boolean running) {
this.running = running;
}
public boolean isRunning() {
return running;
}
/**
* get reference to QuorumCnxManager
*/
public QuorumCnxManager getQuorumCnxManager() {
return qcm;
}
private long readLongFromFile(String name) throws IOException {
File file = new File(logFactory.getSnapDir(), name);
BufferedReader br = new BufferedReader(new FileReader(file));
String line = "";
try {
line = br.readLine();
return Long.parseLong(line);
} catch(NumberFormatException e) {
throw new IOException("Found " + line + " in " + file);
} finally {
br.close();
}
}
private long acceptedEpoch = -1;
private long currentEpoch = -1;
public static final String CURRENT_EPOCH_FILENAME = "currentEpoch";
public static final String ACCEPTED_EPOCH_FILENAME = "acceptedEpoch";
private void writeLongToFile(String name, long value) throws IOException {
File file = new File(logFactory.getSnapDir(), name);
FileOutputStream out = new FileOutputStream(file);
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(out));
try {
bw.write(Long.toString(value));
bw.flush();
out.getFD().sync();
} finally {
bw.close();
}
}
public long getCurrentEpoch() throws IOException {
if (currentEpoch == -1) {
currentEpoch = readLongFromFile(CURRENT_EPOCH_FILENAME);
}
return currentEpoch;
}
public long getAcceptedEpoch() throws IOException {
if (acceptedEpoch == -1) {
acceptedEpoch = readLongFromFile(ACCEPTED_EPOCH_FILENAME);
}
return acceptedEpoch;
}
public void setCurrentEpoch(long e) throws IOException {
currentEpoch = e;
writeLongToFile(CURRENT_EPOCH_FILENAME, e);
}
public void setAcceptedEpoch(long e) throws IOException {
acceptedEpoch = e;
writeLongToFile(ACCEPTED_EPOCH_FILENAME, e);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.threadpool;
import com.google.common.base.Objects;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import com.google.common.util.concurrent.MoreExecutors;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRunnable;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
import org.elasticsearch.common.unit.SizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.*;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.node.settings.NodeSettingsService;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.concurrent.*;
import static org.elasticsearch.common.collect.MapBuilder.newMapBuilder;
import static org.elasticsearch.common.settings.ImmutableSettings.settingsBuilder;
import static org.elasticsearch.common.unit.SizeValue.parseSizeValue;
import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes;
/**
*
*/
public class ThreadPool extends AbstractComponent {
public static class Names {
public static final String SAME = "same";
public static final String GENERIC = "generic";
public static final String GET = "get";
public static final String INDEX = "index";
public static final String BULK = "bulk";
public static final String SEARCH = "search";
public static final String SUGGEST = "suggest";
public static final String PERCOLATE = "percolate";
public static final String MANAGEMENT = "management";
public static final String FLUSH = "flush";
public static final String MERGE = "merge";
public static final String REFRESH = "refresh";
public static final String WARMER = "warmer";
public static final String SNAPSHOT = "snapshot";
public static final String OPTIMIZE = "optimize";
public static final String BENCH = "bench";
}
public static final String THREADPOOL_GROUP = "threadpool.";
private volatile ImmutableMap<String, ExecutorHolder> executors;
private final ImmutableMap<String, Settings> defaultExecutorTypeSettings;
private final Queue<ExecutorHolder> retiredExecutors = new ConcurrentLinkedQueue<>();
private final ScheduledThreadPoolExecutor scheduler;
private final EstimatedTimeThread estimatedTimeThread;
public ThreadPool(String name) {
this(ImmutableSettings.builder().put("name", name).build(), null);
}
@Inject
public ThreadPool(Settings settings, @Nullable NodeSettingsService nodeSettingsService) {
super(settings);
assert settings.get("name") != null : "ThreadPool's settings should contain a name";
Map<String, Settings> groupSettings = settings.getGroups(THREADPOOL_GROUP);
int availableProcessors = EsExecutors.boundedNumberOfProcessors(settings);
int halfProcMaxAt5 = Math.min(((availableProcessors + 1) / 2), 5);
int halfProcMaxAt10 = Math.min(((availableProcessors + 1) / 2), 10);
defaultExecutorTypeSettings = ImmutableMap.<String, Settings>builder()
.put(Names.GENERIC, settingsBuilder().put("type", "cached").put("keep_alive", "30s").build())
.put(Names.INDEX, settingsBuilder().put("type", "fixed").put("size", availableProcessors).put("queue_size", 200).build())
.put(Names.BULK, settingsBuilder().put("type", "fixed").put("size", availableProcessors).put("queue_size", 50).build())
.put(Names.GET, settingsBuilder().put("type", "fixed").put("size", availableProcessors).put("queue_size", 1000).build())
.put(Names.SEARCH, settingsBuilder().put("type", "fixed").put("size", availableProcessors * 3).put("queue_size", 1000).build())
.put(Names.SUGGEST, settingsBuilder().put("type", "fixed").put("size", availableProcessors).put("queue_size", 1000).build())
.put(Names.PERCOLATE, settingsBuilder().put("type", "fixed").put("size", availableProcessors).put("queue_size", 1000).build())
.put(Names.MANAGEMENT, settingsBuilder().put("type", "fixed").put("size", halfProcMaxAt5).put("queue_size", 100).build())
.put(Names.FLUSH, settingsBuilder().put("type", "scaling").put("keep_alive", "5m").put("size", halfProcMaxAt5).build())
.put(Names.MERGE, settingsBuilder().put("type", "scaling").put("keep_alive", "5m").put("size", halfProcMaxAt5).build())
.put(Names.REFRESH, settingsBuilder().put("type", "scaling").put("keep_alive", "5m").put("size", halfProcMaxAt10).build())
.put(Names.WARMER, settingsBuilder().put("type", "scaling").put("keep_alive", "5m").put("size", halfProcMaxAt5).build())
.put(Names.SNAPSHOT, settingsBuilder().put("type", "scaling").put("keep_alive", "5m").put("size", halfProcMaxAt5).build())
.put(Names.OPTIMIZE, settingsBuilder().put("type", "fixed").put("size", 1).build())
.put(Names.BENCH, settingsBuilder().put("type", "scaling").put("keep_alive", "5m").put("size", halfProcMaxAt5).build())
.build();
Map<String, ExecutorHolder> executors = Maps.newHashMap();
for (Map.Entry<String, Settings> executor : defaultExecutorTypeSettings.entrySet()) {
executors.put(executor.getKey(), build(executor.getKey(), groupSettings.get(executor.getKey()), executor.getValue()));
}
executors.put(Names.SAME, new ExecutorHolder(MoreExecutors.directExecutor(), new Info(Names.SAME, "same")));
if (!executors.get(Names.GENERIC).info.getType().equals("cached")) {
throw new ElasticsearchIllegalArgumentException("generic thread pool must be of type cached");
}
this.executors = ImmutableMap.copyOf(executors);
this.scheduler = new ScheduledThreadPoolExecutor(1, EsExecutors.daemonThreadFactory(settings, "scheduler"), new EsAbortPolicy());
this.scheduler.setExecuteExistingDelayedTasksAfterShutdownPolicy(false);
this.scheduler.setContinueExistingPeriodicTasksAfterShutdownPolicy(false);
if (nodeSettingsService != null) {
nodeSettingsService.addListener(new ApplySettings());
}
TimeValue estimatedTimeInterval = componentSettings.getAsTime("estimated_time_interval", TimeValue.timeValueMillis(200));
this.estimatedTimeThread = new EstimatedTimeThread(EsExecutors.threadName(settings, "[timer]"), estimatedTimeInterval.millis());
this.estimatedTimeThread.start();
}
public long estimatedTimeInMillis() {
return estimatedTimeThread.estimatedTimeInMillis();
}
public ThreadPoolInfo info() {
List<Info> infos = new ArrayList<>();
for (ExecutorHolder holder : executors.values()) {
String name = holder.info.getName();
// no need to have info on "same" thread pool
if ("same".equals(name)) {
continue;
}
infos.add(holder.info);
}
return new ThreadPoolInfo(infos);
}
public Info info(String name) {
ExecutorHolder holder = executors.get(name);
if (holder == null) {
return null;
}
return holder.info;
}
public ThreadPoolStats stats() {
List<ThreadPoolStats.Stats> stats = new ArrayList<>();
for (ExecutorHolder holder : executors.values()) {
String name = holder.info.getName();
// no need to have info on "same" thread pool
if ("same".equals(name)) {
continue;
}
int threads = -1;
int queue = -1;
int active = -1;
long rejected = -1;
int largest = -1;
long completed = -1;
if (holder.executor() instanceof ThreadPoolExecutor) {
ThreadPoolExecutor threadPoolExecutor = (ThreadPoolExecutor) holder.executor();
threads = threadPoolExecutor.getPoolSize();
queue = threadPoolExecutor.getQueue().size();
active = threadPoolExecutor.getActiveCount();
largest = threadPoolExecutor.getLargestPoolSize();
completed = threadPoolExecutor.getCompletedTaskCount();
RejectedExecutionHandler rejectedExecutionHandler = threadPoolExecutor.getRejectedExecutionHandler();
if (rejectedExecutionHandler instanceof XRejectedExecutionHandler) {
rejected = ((XRejectedExecutionHandler) rejectedExecutionHandler).rejected();
}
}
stats.add(new ThreadPoolStats.Stats(name, threads, queue, active, rejected, largest, completed));
}
return new ThreadPoolStats(stats);
}
public Executor generic() {
return executor(Names.GENERIC);
}
public Executor executor(String name) {
Executor executor = executors.get(name).executor();
if (executor == null) {
throw new ElasticsearchIllegalArgumentException("No executor found for [" + name + "]");
}
return executor;
}
public ScheduledExecutorService scheduler() {
return this.scheduler;
}
public ScheduledFuture<?> scheduleWithFixedDelay(Runnable command, TimeValue interval) {
return scheduler.scheduleWithFixedDelay(new LoggingRunnable(command), interval.millis(), interval.millis(), TimeUnit.MILLISECONDS);
}
public ScheduledFuture<?> schedule(TimeValue delay, String name, Runnable command) {
if (!Names.SAME.equals(name)) {
command = new ThreadedRunnable(command, executor(name));
}
return scheduler.schedule(command, delay.millis(), TimeUnit.MILLISECONDS);
}
public void shutdown() {
estimatedTimeThread.running = false;
estimatedTimeThread.interrupt();
scheduler.shutdown();
for (ExecutorHolder executor : executors.values()) {
if (executor.executor() instanceof ThreadPoolExecutor) {
((ThreadPoolExecutor) executor.executor()).shutdown();
}
}
}
public void shutdownNow() {
estimatedTimeThread.running = false;
estimatedTimeThread.interrupt();
scheduler.shutdownNow();
for (ExecutorHolder executor : executors.values()) {
if (executor.executor() instanceof ThreadPoolExecutor) {
((ThreadPoolExecutor) executor.executor()).shutdownNow();
}
}
while (!retiredExecutors.isEmpty()) {
((ThreadPoolExecutor) retiredExecutors.remove().executor()).shutdownNow();
}
}
public boolean awaitTermination(long timeout, TimeUnit unit) throws InterruptedException {
boolean result = scheduler.awaitTermination(timeout, unit);
for (ExecutorHolder executor : executors.values()) {
if (executor.executor() instanceof ThreadPoolExecutor) {
result &= ((ThreadPoolExecutor) executor.executor()).awaitTermination(timeout, unit);
}
}
while (!retiredExecutors.isEmpty()) {
result &= ((ThreadPoolExecutor) retiredExecutors.remove().executor()).awaitTermination(timeout, unit);
}
return result;
}
private ExecutorHolder build(String name, @Nullable Settings settings, Settings defaultSettings) {
return rebuild(name, null, settings, defaultSettings);
}
private ExecutorHolder rebuild(String name, ExecutorHolder previousExecutorHolder, @Nullable Settings settings, Settings defaultSettings) {
if (Names.SAME.equals(name)) {
// Don't allow to change the "same" thread executor
return previousExecutorHolder;
}
if (settings == null) {
settings = ImmutableSettings.Builder.EMPTY_SETTINGS;
}
Info previousInfo = previousExecutorHolder != null ? previousExecutorHolder.info : null;
String type = settings.get("type", previousInfo != null ? previousInfo.getType() : defaultSettings.get("type"));
ThreadFactory threadFactory = EsExecutors.daemonThreadFactory(this.settings, name);
if ("same".equals(type)) {
if (previousExecutorHolder != null) {
logger.debug("updating thread_pool [{}], type [{}]", name, type);
} else {
logger.debug("creating thread_pool [{}], type [{}]", name, type);
}
return new ExecutorHolder(MoreExecutors.directExecutor(), new Info(name, type));
} else if ("cached".equals(type)) {
TimeValue defaultKeepAlive = defaultSettings.getAsTime("keep_alive", timeValueMinutes(5));
if (previousExecutorHolder != null) {
if ("cached".equals(previousInfo.getType())) {
TimeValue updatedKeepAlive = settings.getAsTime("keep_alive", previousInfo.getKeepAlive());
if (!previousInfo.getKeepAlive().equals(updatedKeepAlive)) {
logger.debug("updating thread_pool [{}], type [{}], keep_alive [{}]", name, type, updatedKeepAlive);
((EsThreadPoolExecutor) previousExecutorHolder.executor()).setKeepAliveTime(updatedKeepAlive.millis(), TimeUnit.MILLISECONDS);
return new ExecutorHolder(previousExecutorHolder.executor(), new Info(name, type, -1, -1, updatedKeepAlive, null));
}
return previousExecutorHolder;
}
if (previousInfo.getKeepAlive() != null) {
defaultKeepAlive = previousInfo.getKeepAlive();
}
}
TimeValue keepAlive = settings.getAsTime("keep_alive", defaultKeepAlive);
if (previousExecutorHolder != null) {
logger.debug("updating thread_pool [{}], type [{}], keep_alive [{}]", name, type, keepAlive);
} else {
logger.debug("creating thread_pool [{}], type [{}], keep_alive [{}]", name, type, keepAlive);
}
Executor executor = EsExecutors.newCached(keepAlive.millis(), TimeUnit.MILLISECONDS, threadFactory);
return new ExecutorHolder(executor, new Info(name, type, -1, -1, keepAlive, null));
} else if ("fixed".equals(type)) {
int defaultSize = defaultSettings.getAsInt("size", EsExecutors.boundedNumberOfProcessors(settings));
SizeValue defaultQueueSize = getAsSizeOrUnbounded(defaultSettings, "queue", getAsSizeOrUnbounded(defaultSettings, "queue_size", null));
if (previousExecutorHolder != null) {
if ("fixed".equals(previousInfo.getType())) {
SizeValue updatedQueueSize = getAsSizeOrUnbounded(settings, "capacity", getAsSizeOrUnbounded(settings, "queue", getAsSizeOrUnbounded(settings, "queue_size", previousInfo.getQueueSize())));
if (Objects.equal(previousInfo.getQueueSize(), updatedQueueSize)) {
int updatedSize = settings.getAsInt("size", previousInfo.getMax());
if (previousInfo.getMax() != updatedSize) {
logger.debug("updating thread_pool [{}], type [{}], size [{}], queue_size [{}]", name, type, updatedSize, updatedQueueSize);
((EsThreadPoolExecutor) previousExecutorHolder.executor()).setCorePoolSize(updatedSize);
((EsThreadPoolExecutor) previousExecutorHolder.executor()).setMaximumPoolSize(updatedSize);
return new ExecutorHolder(previousExecutorHolder.executor(), new Info(name, type, updatedSize, updatedSize, null, updatedQueueSize));
}
return previousExecutorHolder;
}
}
if (previousInfo.getMax() >= 0) {
defaultSize = previousInfo.getMax();
}
defaultQueueSize = previousInfo.getQueueSize();
}
int size = settings.getAsInt("size", defaultSize);
SizeValue queueSize = getAsSizeOrUnbounded(settings, "capacity", getAsSizeOrUnbounded(settings, "queue", getAsSizeOrUnbounded(settings, "queue_size", defaultQueueSize)));
logger.debug("creating thread_pool [{}], type [{}], size [{}], queue_size [{}]", name, type, size, queueSize);
Executor executor = EsExecutors.newFixed(size, queueSize == null ? -1 : (int) queueSize.singles(), threadFactory);
return new ExecutorHolder(executor, new Info(name, type, size, size, null, queueSize));
} else if ("scaling".equals(type)) {
TimeValue defaultKeepAlive = defaultSettings.getAsTime("keep_alive", timeValueMinutes(5));
int defaultMin = defaultSettings.getAsInt("min", 1);
int defaultSize = defaultSettings.getAsInt("size", EsExecutors.boundedNumberOfProcessors(settings));
if (previousExecutorHolder != null) {
if ("scaling".equals(previousInfo.getType())) {
TimeValue updatedKeepAlive = settings.getAsTime("keep_alive", previousInfo.getKeepAlive());
int updatedMin = settings.getAsInt("min", previousInfo.getMin());
int updatedSize = settings.getAsInt("max", settings.getAsInt("size", previousInfo.getMax()));
if (!previousInfo.getKeepAlive().equals(updatedKeepAlive) || previousInfo.getMin() != updatedMin || previousInfo.getMax() != updatedSize) {
logger.debug("updating thread_pool [{}], type [{}], keep_alive [{}]", name, type, updatedKeepAlive);
if (!previousInfo.getKeepAlive().equals(updatedKeepAlive)) {
((EsThreadPoolExecutor) previousExecutorHolder.executor()).setKeepAliveTime(updatedKeepAlive.millis(), TimeUnit.MILLISECONDS);
}
if (previousInfo.getMin() != updatedMin) {
((EsThreadPoolExecutor) previousExecutorHolder.executor()).setCorePoolSize(updatedMin);
}
if (previousInfo.getMax() != updatedSize) {
((EsThreadPoolExecutor) previousExecutorHolder.executor()).setMaximumPoolSize(updatedSize);
}
return new ExecutorHolder(previousExecutorHolder.executor(), new Info(name, type, updatedMin, updatedSize, updatedKeepAlive, null));
}
return previousExecutorHolder;
}
if (previousInfo.getKeepAlive() != null) {
defaultKeepAlive = previousInfo.getKeepAlive();
}
if (previousInfo.getMin() >= 0) {
defaultMin = previousInfo.getMin();
}
if (previousInfo.getMax() >= 0) {
defaultSize = previousInfo.getMax();
}
}
TimeValue keepAlive = settings.getAsTime("keep_alive", defaultKeepAlive);
int min = settings.getAsInt("min", defaultMin);
int size = settings.getAsInt("max", settings.getAsInt("size", defaultSize));
if (previousExecutorHolder != null) {
logger.debug("updating thread_pool [{}], type [{}], min [{}], size [{}], keep_alive [{}]", name, type, min, size, keepAlive);
} else {
logger.debug("creating thread_pool [{}], type [{}], min [{}], size [{}], keep_alive [{}]", name, type, min, size, keepAlive);
}
Executor executor = EsExecutors.newScaling(min, size, keepAlive.millis(), TimeUnit.MILLISECONDS, threadFactory);
return new ExecutorHolder(executor, new Info(name, type, min, size, keepAlive, null));
}
throw new ElasticsearchIllegalArgumentException("No type found [" + type + "], for [" + name + "]");
}
public void updateSettings(Settings settings) {
Map<String, Settings> groupSettings = settings.getGroups("threadpool");
if (groupSettings.isEmpty()) {
return;
}
for (Map.Entry<String, Settings> executor : defaultExecutorTypeSettings.entrySet()) {
Settings updatedSettings = groupSettings.get(executor.getKey());
if (updatedSettings == null) {
continue;
}
ExecutorHolder oldExecutorHolder = executors.get(executor.getKey());
ExecutorHolder newExecutorHolder = rebuild(executor.getKey(), oldExecutorHolder, updatedSettings, executor.getValue());
if (!oldExecutorHolder.equals(newExecutorHolder)) {
executors = newMapBuilder(executors).put(executor.getKey(), newExecutorHolder).immutableMap();
if (!oldExecutorHolder.executor().equals(newExecutorHolder.executor()) && oldExecutorHolder.executor() instanceof EsThreadPoolExecutor) {
retiredExecutors.add(oldExecutorHolder);
((EsThreadPoolExecutor) oldExecutorHolder.executor()).shutdown(new ExecutorShutdownListener(oldExecutorHolder));
}
}
}
}
/**
* A thread pool size can also be unbounded and is represented by -1, which is not supported by SizeValue (which only supports positive numbers)
*/
private SizeValue getAsSizeOrUnbounded(Settings settings, String setting, SizeValue defaultValue) throws SettingsException {
if ("-1".equals(settings.get(setting))) {
return null;
}
return parseSizeValue(settings.get(setting), defaultValue);
}
class ExecutorShutdownListener implements EsThreadPoolExecutor.ShutdownListener {
private ExecutorHolder holder;
public ExecutorShutdownListener(ExecutorHolder holder) {
this.holder = holder;
}
@Override
public void onTerminated() {
retiredExecutors.remove(holder);
}
}
class LoggingRunnable implements Runnable {
private final Runnable runnable;
LoggingRunnable(Runnable runnable) {
this.runnable = runnable;
}
@Override
public void run() {
try {
runnable.run();
} catch (Exception e) {
logger.warn("failed to run {}", e, runnable.toString());
}
}
@Override
public int hashCode() {
return runnable.hashCode();
}
@Override
public boolean equals(Object obj) {
return runnable.equals(obj);
}
@Override
public String toString() {
return "[threaded] " + runnable.toString();
}
}
class ThreadedRunnable implements Runnable {
private final Runnable runnable;
private final Executor executor;
ThreadedRunnable(Runnable runnable, Executor executor) {
this.runnable = runnable;
this.executor = executor;
}
@Override
public void run() {
executor.execute(runnable);
}
@Override
public int hashCode() {
return runnable.hashCode();
}
@Override
public boolean equals(Object obj) {
return runnable.equals(obj);
}
@Override
public String toString() {
return "[threaded] " + runnable.toString();
}
}
static class EstimatedTimeThread extends Thread {
final long interval;
volatile boolean running = true;
volatile long estimatedTimeInMillis;
EstimatedTimeThread(String name, long interval) {
super(name);
this.interval = interval;
this.estimatedTimeInMillis = System.currentTimeMillis();
setDaemon(true);
}
public long estimatedTimeInMillis() {
return this.estimatedTimeInMillis;
}
@Override
public void run() {
while (running) {
estimatedTimeInMillis = System.currentTimeMillis();
try {
Thread.sleep(interval);
} catch (InterruptedException e) {
running = false;
return;
}
}
}
}
static class ExecutorHolder {
private final Executor executor;
public final Info info;
ExecutorHolder(Executor executor, Info info) {
assert executor instanceof EsThreadPoolExecutor || executor == MoreExecutors.directExecutor();
this.executor = executor;
this.info = info;
}
Executor executor() {
return executor;
}
}
public static class Info implements Streamable, ToXContent {
private String name;
private String type;
private int min;
private int max;
private TimeValue keepAlive;
private SizeValue queueSize;
Info() {
}
public Info(String name, String type) {
this(name, type, -1);
}
public Info(String name, String type, int size) {
this(name, type, size, size, null, null);
}
public Info(String name, String type, int min, int max, @Nullable TimeValue keepAlive, @Nullable SizeValue queueSize) {
this.name = name;
this.type = type;
this.min = min;
this.max = max;
this.keepAlive = keepAlive;
this.queueSize = queueSize;
}
public String getName() {
return this.name;
}
public String getType() {
return this.type;
}
public int getMin() {
return this.min;
}
public int getMax() {
return this.max;
}
@Nullable
public TimeValue getKeepAlive() {
return this.keepAlive;
}
@Nullable
public SizeValue getQueueSize() {
return this.queueSize;
}
@Override
public void readFrom(StreamInput in) throws IOException {
name = in.readString();
type = in.readString();
min = in.readInt();
max = in.readInt();
if (in.readBoolean()) {
keepAlive = TimeValue.readTimeValue(in);
}
if (in.readBoolean()) {
queueSize = SizeValue.readSizeValue(in);
}
in.readBoolean(); // here to conform with removed waitTime
in.readBoolean(); // here to conform with removed rejected setting
in.readBoolean(); // here to conform with queue type
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(name);
out.writeString(type);
out.writeInt(min);
out.writeInt(max);
if (keepAlive == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
keepAlive.writeTo(out);
}
if (queueSize == null) {
out.writeBoolean(false);
} else {
out.writeBoolean(true);
queueSize.writeTo(out);
}
out.writeBoolean(false); // here to conform with removed waitTime
out.writeBoolean(false); // here to conform with removed rejected setting
out.writeBoolean(false); // here to conform with queue type
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(name, XContentBuilder.FieldCaseConversion.NONE);
builder.field(Fields.TYPE, type);
if (min != -1) {
builder.field(Fields.MIN, min);
}
if (max != -1) {
builder.field(Fields.MAX, max);
}
if (keepAlive != null) {
builder.field(Fields.KEEP_ALIVE, keepAlive.toString());
}
if (queueSize == null) {
builder.field(Fields.QUEUE_SIZE, -1);
} else {
builder.field(Fields.QUEUE_SIZE, queueSize.toString());
}
builder.endObject();
return builder;
}
static final class Fields {
static final XContentBuilderString TYPE = new XContentBuilderString("type");
static final XContentBuilderString MIN = new XContentBuilderString("min");
static final XContentBuilderString MAX = new XContentBuilderString("max");
static final XContentBuilderString KEEP_ALIVE = new XContentBuilderString("keep_alive");
static final XContentBuilderString QUEUE_SIZE = new XContentBuilderString("queue_size");
}
}
class ApplySettings implements NodeSettingsService.Listener {
@Override
public void onRefreshSettings(Settings settings) {
updateSettings(settings);
}
}
}
| |
package com.gplucky.mybatis.model;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
public class RateTracingTypeMappingExample {
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database table rate_tracing_type_mapping
*
* @mbggenerated
*/
protected String orderByClause;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database table rate_tracing_type_mapping
*
* @mbggenerated
*/
protected boolean distinct;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database table rate_tracing_type_mapping
*
* @mbggenerated
*/
protected List<Criteria> oredCriteria;
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table rate_tracing_type_mapping
*
* @mbggenerated
*/
public RateTracingTypeMappingExample() {
oredCriteria = new ArrayList<Criteria>();
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table rate_tracing_type_mapping
*
* @mbggenerated
*/
public void setOrderByClause(String orderByClause) {
this.orderByClause = orderByClause;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table rate_tracing_type_mapping
*
* @mbggenerated
*/
public String getOrderByClause() {
return orderByClause;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table rate_tracing_type_mapping
*
* @mbggenerated
*/
public void setDistinct(boolean distinct) {
this.distinct = distinct;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table rate_tracing_type_mapping
*
* @mbggenerated
*/
public boolean isDistinct() {
return distinct;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table rate_tracing_type_mapping
*
* @mbggenerated
*/
public List<Criteria> getOredCriteria() {
return oredCriteria;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table rate_tracing_type_mapping
*
* @mbggenerated
*/
public void or(Criteria criteria) {
oredCriteria.add(criteria);
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table rate_tracing_type_mapping
*
* @mbggenerated
*/
public Criteria or() {
Criteria criteria = createCriteriaInternal();
oredCriteria.add(criteria);
return criteria;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table rate_tracing_type_mapping
*
* @mbggenerated
*/
public Criteria createCriteria() {
Criteria criteria = createCriteriaInternal();
if (oredCriteria.size() == 0) {
oredCriteria.add(criteria);
}
return criteria;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table rate_tracing_type_mapping
*
* @mbggenerated
*/
protected Criteria createCriteriaInternal() {
Criteria criteria = new Criteria();
return criteria;
}
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table rate_tracing_type_mapping
*
* @mbggenerated
*/
public void clear() {
oredCriteria.clear();
orderByClause = null;
distinct = false;
}
/**
* This class was generated by MyBatis Generator.
* This class corresponds to the database table rate_tracing_type_mapping
*
* @mbggenerated
*/
protected abstract static class GeneratedCriteria {
protected List<Criterion> criteria;
protected GeneratedCriteria() {
super();
criteria = new ArrayList<Criterion>();
}
public boolean isValid() {
return criteria.size() > 0;
}
public List<Criterion> getAllCriteria() {
return criteria;
}
public List<Criterion> getCriteria() {
return criteria;
}
protected void addCriterion(String condition) {
if (condition == null) {
throw new RuntimeException("Value for condition cannot be null");
}
criteria.add(new Criterion(condition));
}
protected void addCriterion(String condition, Object value, String property) {
if (value == null) {
throw new RuntimeException("Value for " + property + " cannot be null");
}
criteria.add(new Criterion(condition, value));
}
protected void addCriterion(String condition, Object value1, Object value2, String property) {
if (value1 == null || value2 == null) {
throw new RuntimeException("Between values for " + property + " cannot be null");
}
criteria.add(new Criterion(condition, value1, value2));
}
public Criteria andIdIsNull() {
addCriterion("id is null");
return (Criteria) this;
}
public Criteria andIdIsNotNull() {
addCriterion("id is not null");
return (Criteria) this;
}
public Criteria andIdEqualTo(Integer value) {
addCriterion("id =", value, "id");
return (Criteria) this;
}
public Criteria andIdNotEqualTo(Integer value) {
addCriterion("id <>", value, "id");
return (Criteria) this;
}
public Criteria andIdGreaterThan(Integer value) {
addCriterion("id >", value, "id");
return (Criteria) this;
}
public Criteria andIdGreaterThanOrEqualTo(Integer value) {
addCriterion("id >=", value, "id");
return (Criteria) this;
}
public Criteria andIdLessThan(Integer value) {
addCriterion("id <", value, "id");
return (Criteria) this;
}
public Criteria andIdLessThanOrEqualTo(Integer value) {
addCriterion("id <=", value, "id");
return (Criteria) this;
}
public Criteria andIdIn(List<Integer> values) {
addCriterion("id in", values, "id");
return (Criteria) this;
}
public Criteria andIdNotIn(List<Integer> values) {
addCriterion("id not in", values, "id");
return (Criteria) this;
}
public Criteria andIdBetween(Integer value1, Integer value2) {
addCriterion("id between", value1, value2, "id");
return (Criteria) this;
}
public Criteria andIdNotBetween(Integer value1, Integer value2) {
addCriterion("id not between", value1, value2, "id");
return (Criteria) this;
}
public Criteria andCodeIsNull() {
addCriterion("code is null");
return (Criteria) this;
}
public Criteria andCodeIsNotNull() {
addCriterion("code is not null");
return (Criteria) this;
}
public Criteria andCodeEqualTo(String value) {
addCriterion("code =", value, "code");
return (Criteria) this;
}
public Criteria andCodeNotEqualTo(String value) {
addCriterion("code <>", value, "code");
return (Criteria) this;
}
public Criteria andCodeGreaterThan(String value) {
addCriterion("code >", value, "code");
return (Criteria) this;
}
public Criteria andCodeGreaterThanOrEqualTo(String value) {
addCriterion("code >=", value, "code");
return (Criteria) this;
}
public Criteria andCodeLessThan(String value) {
addCriterion("code <", value, "code");
return (Criteria) this;
}
public Criteria andCodeLessThanOrEqualTo(String value) {
addCriterion("code <=", value, "code");
return (Criteria) this;
}
public Criteria andCodeLike(String value) {
addCriterion("code like", value, "code");
return (Criteria) this;
}
public Criteria andCodeNotLike(String value) {
addCriterion("code not like", value, "code");
return (Criteria) this;
}
public Criteria andCodeIn(List<String> values) {
addCriterion("code in", values, "code");
return (Criteria) this;
}
public Criteria andCodeNotIn(List<String> values) {
addCriterion("code not in", values, "code");
return (Criteria) this;
}
public Criteria andCodeBetween(String value1, String value2) {
addCriterion("code between", value1, value2, "code");
return (Criteria) this;
}
public Criteria andCodeNotBetween(String value1, String value2) {
addCriterion("code not between", value1, value2, "code");
return (Criteria) this;
}
public Criteria andVersionIsNull() {
addCriterion("version is null");
return (Criteria) this;
}
public Criteria andVersionIsNotNull() {
addCriterion("version is not null");
return (Criteria) this;
}
public Criteria andVersionEqualTo(Integer value) {
addCriterion("version =", value, "version");
return (Criteria) this;
}
public Criteria andVersionNotEqualTo(Integer value) {
addCriterion("version <>", value, "version");
return (Criteria) this;
}
public Criteria andVersionGreaterThan(Integer value) {
addCriterion("version >", value, "version");
return (Criteria) this;
}
public Criteria andVersionGreaterThanOrEqualTo(Integer value) {
addCriterion("version >=", value, "version");
return (Criteria) this;
}
public Criteria andVersionLessThan(Integer value) {
addCriterion("version <", value, "version");
return (Criteria) this;
}
public Criteria andVersionLessThanOrEqualTo(Integer value) {
addCriterion("version <=", value, "version");
return (Criteria) this;
}
public Criteria andVersionIn(List<Integer> values) {
addCriterion("version in", values, "version");
return (Criteria) this;
}
public Criteria andVersionNotIn(List<Integer> values) {
addCriterion("version not in", values, "version");
return (Criteria) this;
}
public Criteria andVersionBetween(Integer value1, Integer value2) {
addCriterion("version between", value1, value2, "version");
return (Criteria) this;
}
public Criteria andVersionNotBetween(Integer value1, Integer value2) {
addCriterion("version not between", value1, value2, "version");
return (Criteria) this;
}
public Criteria andTypeIsNull() {
addCriterion("type is null");
return (Criteria) this;
}
public Criteria andTypeIsNotNull() {
addCriterion("type is not null");
return (Criteria) this;
}
public Criteria andTypeEqualTo(String value) {
addCriterion("type =", value, "type");
return (Criteria) this;
}
public Criteria andTypeNotEqualTo(String value) {
addCriterion("type <>", value, "type");
return (Criteria) this;
}
public Criteria andTypeGreaterThan(String value) {
addCriterion("type >", value, "type");
return (Criteria) this;
}
public Criteria andTypeGreaterThanOrEqualTo(String value) {
addCriterion("type >=", value, "type");
return (Criteria) this;
}
public Criteria andTypeLessThan(String value) {
addCriterion("type <", value, "type");
return (Criteria) this;
}
public Criteria andTypeLessThanOrEqualTo(String value) {
addCriterion("type <=", value, "type");
return (Criteria) this;
}
public Criteria andTypeLike(String value) {
addCriterion("type like", value, "type");
return (Criteria) this;
}
public Criteria andTypeNotLike(String value) {
addCriterion("type not like", value, "type");
return (Criteria) this;
}
public Criteria andTypeIn(List<String> values) {
addCriterion("type in", values, "type");
return (Criteria) this;
}
public Criteria andTypeNotIn(List<String> values) {
addCriterion("type not in", values, "type");
return (Criteria) this;
}
public Criteria andTypeBetween(String value1, String value2) {
addCriterion("type between", value1, value2, "type");
return (Criteria) this;
}
public Criteria andTypeNotBetween(String value1, String value2) {
addCriterion("type not between", value1, value2, "type");
return (Criteria) this;
}
public Criteria andCreateTimeIsNull() {
addCriterion("create_time is null");
return (Criteria) this;
}
public Criteria andCreateTimeIsNotNull() {
addCriterion("create_time is not null");
return (Criteria) this;
}
public Criteria andCreateTimeEqualTo(Date value) {
addCriterion("create_time =", value, "createTime");
return (Criteria) this;
}
public Criteria andCreateTimeNotEqualTo(Date value) {
addCriterion("create_time <>", value, "createTime");
return (Criteria) this;
}
public Criteria andCreateTimeGreaterThan(Date value) {
addCriterion("create_time >", value, "createTime");
return (Criteria) this;
}
public Criteria andCreateTimeGreaterThanOrEqualTo(Date value) {
addCriterion("create_time >=", value, "createTime");
return (Criteria) this;
}
public Criteria andCreateTimeLessThan(Date value) {
addCriterion("create_time <", value, "createTime");
return (Criteria) this;
}
public Criteria andCreateTimeLessThanOrEqualTo(Date value) {
addCriterion("create_time <=", value, "createTime");
return (Criteria) this;
}
public Criteria andCreateTimeIn(List<Date> values) {
addCriterion("create_time in", values, "createTime");
return (Criteria) this;
}
public Criteria andCreateTimeNotIn(List<Date> values) {
addCriterion("create_time not in", values, "createTime");
return (Criteria) this;
}
public Criteria andCreateTimeBetween(Date value1, Date value2) {
addCriterion("create_time between", value1, value2, "createTime");
return (Criteria) this;
}
public Criteria andCreateTimeNotBetween(Date value1, Date value2) {
addCriterion("create_time not between", value1, value2, "createTime");
return (Criteria) this;
}
public Criteria andUpdateTimeIsNull() {
addCriterion("update_time is null");
return (Criteria) this;
}
public Criteria andUpdateTimeIsNotNull() {
addCriterion("update_time is not null");
return (Criteria) this;
}
public Criteria andUpdateTimeEqualTo(Date value) {
addCriterion("update_time =", value, "updateTime");
return (Criteria) this;
}
public Criteria andUpdateTimeNotEqualTo(Date value) {
addCriterion("update_time <>", value, "updateTime");
return (Criteria) this;
}
public Criteria andUpdateTimeGreaterThan(Date value) {
addCriterion("update_time >", value, "updateTime");
return (Criteria) this;
}
public Criteria andUpdateTimeGreaterThanOrEqualTo(Date value) {
addCriterion("update_time >=", value, "updateTime");
return (Criteria) this;
}
public Criteria andUpdateTimeLessThan(Date value) {
addCriterion("update_time <", value, "updateTime");
return (Criteria) this;
}
public Criteria andUpdateTimeLessThanOrEqualTo(Date value) {
addCriterion("update_time <=", value, "updateTime");
return (Criteria) this;
}
public Criteria andUpdateTimeIn(List<Date> values) {
addCriterion("update_time in", values, "updateTime");
return (Criteria) this;
}
public Criteria andUpdateTimeNotIn(List<Date> values) {
addCriterion("update_time not in", values, "updateTime");
return (Criteria) this;
}
public Criteria andUpdateTimeBetween(Date value1, Date value2) {
addCriterion("update_time between", value1, value2, "updateTime");
return (Criteria) this;
}
public Criteria andUpdateTimeNotBetween(Date value1, Date value2) {
addCriterion("update_time not between", value1, value2, "updateTime");
return (Criteria) this;
}
}
/**
* This class was generated by MyBatis Generator.
* This class corresponds to the database table rate_tracing_type_mapping
*
* @mbggenerated do_not_delete_during_merge
*/
public static class Criteria extends GeneratedCriteria {
protected Criteria() {
super();
}
}
/**
* This class was generated by MyBatis Generator.
* This class corresponds to the database table rate_tracing_type_mapping
*
* @mbggenerated
*/
public static class Criterion {
private String condition;
private Object value;
private Object secondValue;
private boolean noValue;
private boolean singleValue;
private boolean betweenValue;
private boolean listValue;
private String typeHandler;
public String getCondition() {
return condition;
}
public Object getValue() {
return value;
}
public Object getSecondValue() {
return secondValue;
}
public boolean isNoValue() {
return noValue;
}
public boolean isSingleValue() {
return singleValue;
}
public boolean isBetweenValue() {
return betweenValue;
}
public boolean isListValue() {
return listValue;
}
public String getTypeHandler() {
return typeHandler;
}
protected Criterion(String condition) {
super();
this.condition = condition;
this.typeHandler = null;
this.noValue = true;
}
protected Criterion(String condition, Object value, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.typeHandler = typeHandler;
if (value instanceof List<?>) {
this.listValue = true;
} else {
this.singleValue = true;
}
}
protected Criterion(String condition, Object value) {
this(condition, value, null);
}
protected Criterion(String condition, Object value, Object secondValue, String typeHandler) {
super();
this.condition = condition;
this.value = value;
this.secondValue = secondValue;
this.typeHandler = typeHandler;
this.betweenValue = true;
}
protected Criterion(String condition, Object value, Object secondValue) {
this(condition, value, secondValue, null);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.compaction;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.lang.ref.WeakReference;
import java.nio.file.*;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
import java.util.function.Function;
import com.google.common.collect.MapMaker;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.JsonNodeFactory;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.cassandra.concurrent.ExecutorPlus;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.io.sstable.format.SSTableReader;
import org.apache.cassandra.io.util.File;
import org.apache.cassandra.utils.ExecutorUtils;
import org.apache.cassandra.utils.NoSpamLogger;
import static org.apache.cassandra.concurrent.ExecutorFactory.Global.executorFactory;
import static org.apache.cassandra.utils.Clock.Global.currentTimeMillis;
public class CompactionLogger
{
public interface Strategy
{
JsonNode sstable(SSTableReader sstable);
JsonNode options();
static Strategy none = new Strategy()
{
public JsonNode sstable(SSTableReader sstable)
{
return null;
}
public JsonNode options()
{
return null;
}
};
}
/**
* This will produce the compaction strategy's starting information.
*/
public interface StrategySummary
{
JsonNode getSummary();
}
/**
* This is an interface to allow writing to a different interface.
*/
public interface Writer
{
/**
* This is used when we are already trying to write out the start of a
* @param statement This should be written out to the medium capturing the logs
* @param tag This is an identifier for a strategy; each strategy should have a distinct Object
*/
void writeStart(JsonNode statement, Object tag);
/**
* @param statement This should be written out to the medium capturing the logs
* @param summary This can be used when a tag is not recognized by this writer; this can be because the file
* has been rolled, or otherwise the writer had to start over
* @param tag This is an identifier for a strategy; each strategy should have a distinct Object
*/
void write(JsonNode statement, StrategySummary summary, Object tag);
}
private interface CompactionStrategyAndTableFunction
{
JsonNode apply(AbstractCompactionStrategy strategy, SSTableReader sstable);
}
private static final JsonNodeFactory json = JsonNodeFactory.instance;
private static final Logger logger = LoggerFactory.getLogger(CompactionLogger.class);
private static final CompactionLogSerializer serializer = new CompactionLogSerializer();
private final WeakReference<ColumnFamilyStore> cfsRef;
private final WeakReference<CompactionStrategyManager> csmRef;
private final AtomicInteger identifier = new AtomicInteger(0);
private final Map<AbstractCompactionStrategy, String> compactionStrategyMapping = new MapMaker().weakKeys().makeMap();
private final AtomicBoolean enabled = new AtomicBoolean(false);
public CompactionLogger(ColumnFamilyStore cfs, CompactionStrategyManager csm)
{
csmRef = new WeakReference<>(csm);
cfsRef = new WeakReference<>(cfs);
}
private void forEach(Consumer<AbstractCompactionStrategy> consumer)
{
CompactionStrategyManager csm = csmRef.get();
if (csm == null)
return;
csm.getStrategies()
.forEach(l -> l.forEach(consumer));
}
private ArrayNode compactionStrategyMap(Function<AbstractCompactionStrategy, JsonNode> select)
{
ArrayNode node = json.arrayNode();
forEach(acs -> node.add(select.apply(acs)));
return node;
}
private ArrayNode sstableMap(Collection<SSTableReader> sstables, CompactionStrategyAndTableFunction csatf)
{
CompactionStrategyManager csm = csmRef.get();
ArrayNode node = json.arrayNode();
if (csm == null)
return node;
sstables.forEach(t -> node.add(csatf.apply(csm.getCompactionStrategyFor(t), t)));
return node;
}
private String getId(AbstractCompactionStrategy strategy)
{
return compactionStrategyMapping.computeIfAbsent(strategy, s -> String.valueOf(identifier.getAndIncrement()));
}
private JsonNode formatSSTables(AbstractCompactionStrategy strategy)
{
ArrayNode node = json.arrayNode();
CompactionStrategyManager csm = csmRef.get();
ColumnFamilyStore cfs = cfsRef.get();
if (csm == null || cfs == null)
return node;
for (SSTableReader sstable : cfs.getLiveSSTables())
{
if (csm.getCompactionStrategyFor(sstable) == strategy)
node.add(formatSSTable(strategy, sstable));
}
return node;
}
private JsonNode formatSSTable(AbstractCompactionStrategy strategy, SSTableReader sstable)
{
ObjectNode node = json.objectNode();
node.put("generation", sstable.descriptor.generation);
node.put("version", sstable.descriptor.version.getVersion());
node.put("size", sstable.onDiskLength());
JsonNode logResult = strategy.strategyLogger().sstable(sstable);
if (logResult != null)
node.set("details", logResult);
return node;
}
private JsonNode startStrategy(AbstractCompactionStrategy strategy)
{
ObjectNode node = json.objectNode();
CompactionStrategyManager csm = csmRef.get();
if (csm == null)
return node;
node.put("strategyId", getId(strategy));
node.put("type", strategy.getName());
node.set("tables", formatSSTables(strategy));
node.put("repaired", csm.isRepaired(strategy));
List<String> folders = csm.getStrategyFolders(strategy);
ArrayNode folderNode = json.arrayNode();
for (String folder : folders)
{
folderNode.add(folder);
}
node.set("folders", folderNode);
JsonNode logResult = strategy.strategyLogger().options();
if (logResult != null)
node.set("options", logResult);
return node;
}
private JsonNode shutdownStrategy(AbstractCompactionStrategy strategy)
{
ObjectNode node = json.objectNode();
node.put("strategyId", getId(strategy));
return node;
}
private JsonNode describeSSTable(AbstractCompactionStrategy strategy, SSTableReader sstable)
{
ObjectNode node = json.objectNode();
node.put("strategyId", getId(strategy));
node.set("table", formatSSTable(strategy, sstable));
return node;
}
private void describeStrategy(ObjectNode node)
{
ColumnFamilyStore cfs = cfsRef.get();
if (cfs == null)
return;
node.put("keyspace", cfs.keyspace.getName());
node.put("table", cfs.getTableName());
node.put("time", currentTimeMillis());
}
private JsonNode startStrategies()
{
ObjectNode node = json.objectNode();
node.put("type", "enable");
describeStrategy(node);
node.set("strategies", compactionStrategyMap(this::startStrategy));
return node;
}
public void enable()
{
if (enabled.compareAndSet(false, true))
{
serializer.writeStart(startStrategies(), this);
}
}
public void disable()
{
if (enabled.compareAndSet(true, false))
{
ObjectNode node = json.objectNode();
node.put("type", "disable");
describeStrategy(node);
node.set("strategies", compactionStrategyMap(this::shutdownStrategy));
serializer.write(node, this::startStrategies, this);
}
}
public void flush(Collection<SSTableReader> sstables)
{
if (enabled.get())
{
ObjectNode node = json.objectNode();
node.put("type", "flush");
describeStrategy(node);
node.set("tables", sstableMap(sstables, this::describeSSTable));
serializer.write(node, this::startStrategies, this);
}
}
public void compaction(long startTime, Collection<SSTableReader> input, long endTime, Collection<SSTableReader> output)
{
if (enabled.get())
{
ObjectNode node = json.objectNode();
node.put("type", "compaction");
describeStrategy(node);
node.put("start", String.valueOf(startTime));
node.put("end", String.valueOf(endTime));
node.set("input", sstableMap(input, this::describeSSTable));
node.set("output", sstableMap(output, this::describeSSTable));
serializer.write(node, this::startStrategies, this);
}
}
public void pending(AbstractCompactionStrategy strategy, int remaining)
{
if (remaining != 0 && enabled.get())
{
ObjectNode node = json.objectNode();
node.put("type", "pending");
describeStrategy(node);
node.put("strategyId", getId(strategy));
node.put("pending", remaining);
serializer.write(node, this::startStrategies, this);
}
}
private static class CompactionLogSerializer implements Writer
{
private static final String logDirectory = System.getProperty("cassandra.logdir", ".");
private final ExecutorPlus loggerService = executorFactory().sequential("CompactionLogger");
// This is only accessed on the logger service thread, so it does not need to be thread safe
private final Set<Object> rolled = new HashSet<>();
private OutputStreamWriter stream;
private static OutputStreamWriter createStream() throws IOException
{
int count = 0;
Path compactionLog = new File(logDirectory, "compaction.log").toPath();
if (Files.exists(compactionLog))
{
Path tryPath = compactionLog;
while (Files.exists(tryPath))
{
tryPath = new File(logDirectory, String.format("compaction-%d.log", count++)).toPath();
}
Files.move(compactionLog, tryPath);
}
return new OutputStreamWriter(Files.newOutputStream(compactionLog, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE));
}
private void writeLocal(String toWrite)
{
try
{
if (stream == null)
stream = createStream();
stream.write(toWrite);
stream.flush();
}
catch (IOException ioe)
{
// We'll drop the change and log the error to the logger.
NoSpamLogger.log(logger, NoSpamLogger.Level.ERROR, 1, TimeUnit.MINUTES,
"Could not write to the log file: {}", ioe);
}
}
public void writeStart(JsonNode statement, Object tag)
{
final String toWrite = statement.toString() + System.lineSeparator();
loggerService.execute(() -> {
rolled.add(tag);
writeLocal(toWrite);
});
}
public void write(JsonNode statement, StrategySummary summary, Object tag)
{
final String toWrite = statement.toString() + System.lineSeparator();
loggerService.execute(() -> {
if (!rolled.contains(tag))
{
writeLocal(summary.getSummary().toString() + System.lineSeparator());
rolled.add(tag);
}
writeLocal(toWrite);
});
}
}
public static void shutdownNowAndWait(long timeout, TimeUnit unit) throws InterruptedException, TimeoutException
{
ExecutorUtils.shutdownNowAndWait(timeout, unit, serializer.loggerService);
}
}
| |
/**
* Copyright (c) 2007-2014 Kaazing Corporation. All rights reserved.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.kaazing.gateway.transport.http.bridge.filter;
import static java.lang.String.format;
import static org.kaazing.gateway.transport.BridgeSession.REMOTE_ADDRESS;
import static org.kaazing.gateway.transport.http.HttpHeaders.HEADER_FORWARDED;
import java.net.URI;
import java.security.Principal;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ScheduledExecutorService;
import javax.security.auth.Subject;
import javax.security.auth.login.LoginContext;
import org.apache.mina.core.session.AttributeKey;
import org.apache.mina.core.session.IoSession;
import org.apache.mina.core.write.WriteRequest;
import org.kaazing.gateway.resource.address.ResourceAddress;
import org.kaazing.gateway.resource.address.http.HttpResourceAddress;
import org.kaazing.gateway.security.TypedCallbackHandlerMap;
import org.kaazing.gateway.security.auth.DefaultLoginResult;
import org.kaazing.gateway.security.auth.context.ResultAwareLoginContext;
import org.kaazing.gateway.security.auth.token.DefaultAuthenticationToken;
import org.kaazing.gateway.transport.http.DefaultHttpSession;
import org.kaazing.gateway.transport.http.HttpCookie;
import org.kaazing.gateway.transport.http.HttpProtocol;
import org.kaazing.gateway.transport.http.HttpStatus;
import org.kaazing.gateway.transport.http.bridge.HttpMessage;
import org.kaazing.gateway.transport.http.bridge.HttpRequestMessage;
import org.kaazing.gateway.transport.http.bridge.HttpResponseMessage;
import org.kaazing.gateway.transport.http.security.auth.token.AuthenticationTokenExtractor;
import org.kaazing.gateway.transport.http.security.auth.token.DefaultAuthenticationTokenExtractor;
import org.kaazing.gateway.util.scheduler.SchedulerProvider;
import org.kaazing.mina.core.session.IoSessionEx;
import org.slf4j.Logger;
public class HttpSubjectSecurityFilter extends HttpLoginSecurityFilter {
public static final String NAME = HttpProtocol.NAME + "#security";
public static final String AUTHORIZATION_HEADER = "Authorization";
public static final String WWW_AUTHENTICATE_HEADER = "WWW-Authenticate";
/**
* Prefix to the authentication scheme to indicate that the Kaazing client application will handle the challenge rather than
* delegate to the browser or the native platform.
*/
public static final String AUTH_SCHEME_APPLICATION_PREFIX = "Application ";
public static final String AUTH_SCHEME_BASIC = "Basic";
public static final String AUTH_SCHEME_NEGOTIATE = "Negotiate";
private static final String HEADER_FORWARDED_REMOTE_IP_ADDRESS = "for=%s";
static final AttributeKey NEW_SESSION_COOKIE_KEY = new AttributeKey(HttpSubjectSecurityFilter.class, "sessionCookie");
private final AuthorizationMap authorizationMap;
private ScheduledExecutorService scheduler;
public HttpSubjectSecurityFilter() {
this(null);
}
public HttpSubjectSecurityFilter(Logger logger) {
super(logger);
// Each filter has it's own map. There's only one filter though.
// Reset the map when the filter is constructed to allow an embedded gateway to repeatedly launch
// (e.g. for integration tests)
authorizationMap = new AuthorizationMap();
}
public void setSchedulerProvider(SchedulerProvider provider) {
this.scheduler = provider.getScheduler("loginmodule", false);
}
// --------------------------------------------------------
// Security code for subject-security LEGACY
@Override
public void doMessageReceived(NextFilter nextFilter, IoSession session, Object message) throws Exception {
// GL.debug("http", getClass().getSimpleName() + " request received.");
if (! httpRequestMessageReceived(nextFilter, session, message)) return;
HttpRequestMessage httpRequest = (HttpRequestMessage) message;
final boolean loggerIsEnabled = logger != null && logger.isTraceEnabled();
String forwarded = httpRequest.getHeader(HEADER_FORWARDED);
if ((forwarded == null) || (forwarded.length() == 0)) {
String remoteIpAddress = null;
ResourceAddress resourceAddress = REMOTE_ADDRESS.get(session);
ResourceAddress tcpResourceAddress = resourceAddress.findTransport("tcp");
if (tcpResourceAddress != null) {
URI resource = tcpResourceAddress.getResource();
remoteIpAddress = resource.getHost();
if (loggerIsEnabled) {
logger.trace(format("HttpSubjectSecurityFilter: Remote IP Address: '%s'", remoteIpAddress));
}
}
if (remoteIpAddress != null) {
httpRequest.setHeader(HEADER_FORWARDED, format(HEADER_FORWARDED_REMOTE_IP_ADDRESS, remoteIpAddress));
}
}
// Make sure we start with the subject from the underlying transport session in case it already has an authenticated subject
// (e.g. we are httpxe and our transport is http or transport is SSL with a client certificate)
if (httpRequest.getSubject() == null) {
httpRequest.setSubject( ((IoSessionEx)session).getSubject() );
}
ResourceAddress httpAddress = httpRequest.getLocalAddress();
String realmName = httpAddress.getOption(HttpResourceAddress.REALM_NAME);
if ( realmName == null ) {
ResultAwareLoginContext loginContext = null;
// Make sure we propagate the login context from the layer below in httpxe case
if (session instanceof DefaultHttpSession) {
loginContext = ((DefaultHttpSession)session).getLoginContext();
}
if (loginContext != null) {
httpRequest.setLoginContext(loginContext);
}
else {
setUnprotectedLoginContext(httpRequest);
}
if (loggerIsEnabled) {
logger.trace("HttpSubjectSecurityFilter skipped because no realm is configured.");
}
super.doMessageReceived(nextFilter, session, message);
return;
}
securityMessageReceived(nextFilter, session, httpRequest);
}
protected void writeSessionCookie(IoSession session, HttpRequestMessage httpRequest, DefaultLoginResult loginResult) {
// secure requests always have cookie accessible, even
// on first access
final HttpCookie newSessionCookie = (HttpCookie) loginResult.getLoginAuthorizationAttachment();
httpRequest.addCookie(newSessionCookie);
session.setAttribute(NEW_SESSION_COOKIE_KEY, newSessionCookie);
if (loggerEnabled()) {
logger.trace("Sending session cookie {}", newSessionCookie);
}
}
@Override
public void filterWrite(NextFilter nextFilter, IoSession session, WriteRequest writeRequest) throws Exception {
// include new session cookie in response
Object message = writeRequest.getMessage();
HttpMessage httpMessage = (HttpMessage) message;
switch (httpMessage.getKind()) {
case RESPONSE:
HttpResponseMessage httpResponse = (HttpResponseMessage) httpMessage;
HttpCookie sessionCookie = (HttpCookie) session.removeAttribute(NEW_SESSION_COOKIE_KEY);
if (sessionCookie != null) {
httpResponse.addCookie(sessionCookie);
}
break;
default:
break;
}
super.filterWrite(nextFilter, session, writeRequest);
}
@Override
public void exceptionCaught(NextFilter nextFilter, IoSession session, Throwable cause) throws Exception {
if (loggerEnabled()) {
logger.trace("Caught exception.", cause);
}
super.exceptionCaught(nextFilter, session, cause);
}
@Override
public void destroy() throws Exception {
super.destroy();
}
/**
* <strong>For testing only</strong>
*
* Allows for the authorizationMap to be accessed from unit tests.
*
*/
AuthorizationMap getAuthorizationMap() {
return authorizationMap;
}
/**
* Captures the notion of a Subject object being valid for a certain time (e.g. inactivity-timeout).
*/
public static class TimedCredential {
private Subject subject;
private Long expirationTimestamp;
public TimedCredential(Subject subject, Long expirationTimestamp) {
if (subject == null) {
throw new IllegalArgumentException("subject was null");
}
this.subject = subject;
this.expirationTimestamp = expirationTimestamp;
}
public Subject getSubject() {
return subject;
}
public boolean hasExpirationTimestamp() {
return expirationTimestamp != null;
}
public Long getExpirationTimestamp() {
return expirationTimestamp;
}
public void setExpirationTimestamp(Long expirationTimestamp) {
this.expirationTimestamp = expirationTimestamp;
}
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("[TimedCredential: Subject(");
for ( Principal p: subject.getPrincipals()) {
sb.append(p.getName()).append('/');
}
if ( subject.getPrincipals().size()>0) {
sb.deleteCharAt(sb.length()-1);
}
sb.append(") ");
if ( expirationTimestamp != null ) {
String expires = new SimpleDateFormat("yyyyMMdd HH:mm:ss").format(new Date(expirationTimestamp*1000L));
sb.append("; expires on ").append(expires);
}
sb.append(" ]");
return sb.toString();
}
}
/**
* Maintain a mapping of authorization key strings, to which subject they correspond
* and for how long the mapping is valid for read.
* <p/>
* In addition, establish a reverse mapping from Subject to authorization key.
* <p/>
* Combined, this allows one to lookup, validation expiration by authorization key, and also to
* clear the authentication map by Subject as well.
*/
public static class AuthorizationMap {
private Map<String, TimedCredential> keyToTimedCredentialMap = new ConcurrentHashMap<>();
private Map<Subject, String> subjectToKeyMap = new ConcurrentHashMap<>();
// For testing
TimedCredential get(String key) {
return keyToTimedCredentialMap.get(key);
}
public TimedCredential get(String realmName, String key) {
return keyToTimedCredentialMap.get(realmName + key);
}
public synchronized void put(String realmName, String key, TimedCredential value) {
keyToTimedCredentialMap.put(realmName + key, value);
subjectToKeyMap.put(value.subject, realmName + key);
}
public synchronized TimedCredential removeByKey(String realmName, String key) {
TimedCredential removedValue = keyToTimedCredentialMap.remove(realmName + key);
if (removedValue != null && removedValue.subject != null) {
subjectToKeyMap.remove(removedValue.subject);
}
return removedValue;
}
public synchronized String removeBySubject(Subject subject) {
String removedKey = subjectToKeyMap.remove(subject);
if (removedKey != null) {
keyToTimedCredentialMap.remove(removedKey);
}
return removedKey;
}
public boolean containsKey(String key) {
return keyToTimedCredentialMap.containsKey(key);
}
public boolean containsSubject(Subject subject) {
return subjectToKeyMap.containsKey(subject);
}
public String getKey(Subject subject) {
return subjectToKeyMap.get(subject);
}
public int size() {
return keyToTimedCredentialMap.size();
}
}
// --------------------------------------------------------
// Security code for subject-security going forward
void securityMessageReceived(NextFilter nextFilter, IoSession session, Object message) throws Exception {
final boolean loggerIsEnabled = logger != null && logger.isTraceEnabled();
if (! httpRequestMessageReceived(nextFilter, session, message)) return;
HttpRequestMessage httpRequest = (HttpRequestMessage) message;
ResourceAddress httpAddress = httpRequest.getLocalAddress();
String realmName = httpAddress.getOption(HttpResourceAddress.REALM_NAME);
String realmChallengeScheme = httpAddress.getOption(HttpResourceAddress.REALM_CHALLENGE_SCHEME);
if ( alreadyLoggedIn(session, httpAddress)) {
// KG-3232, KG-3267: we should never leave the login context unset
// for unprotected services.
if (httpRequest.getLoginContext() == null) {
setUnprotectedLoginContext(httpRequest);
}
if (loggerIsEnabled) {
logger.trace("HttpSubjectSecurityFilter skipped because we are already allowed or logged in.");
}
super.doMessageReceived(nextFilter, session, message);
return;
}
if ( realmName == null ) {
setUnprotectedLoginContext(httpRequest);
if (loggerIsEnabled) {
logger.trace("HttpSecurityStandardFilter skipped because no realm is configured.");
}
super.doMessageReceived(nextFilter, session, message);
return;
}
AuthenticationTokenExtractor tokenExtractor = DefaultAuthenticationTokenExtractor.INSTANCE;
//
// Login using the token; if login fails, the appropriate reply has already been sent from this filter
// so stop the filter chain here.
//
DefaultAuthenticationToken authToken = (DefaultAuthenticationToken) tokenExtractor.extract(httpRequest);
// If the client request provided authentication data which has
// a challenge scheme, make sure that the client-sent challenge
// scheme matches what we expect. If not, it's a badly formatted
// request, and the client should be informed of this.
String clientChallengeScheme = authToken.getScheme();
String expectedChallengeScheme = getBaseAuthScheme(realmChallengeScheme);
if (clientChallengeScheme != null &&
clientChallengeScheme.equals(expectedChallengeScheme) == false) {
if (loggerEnabled()) {
logger.trace(String.format("A websocket request used the '%s' challenge scheme when we expected the '%s' challenge scheme", clientChallengeScheme, expectedChallengeScheme));
}
String reason = String.format("Expected challenge scheme '%s' not found", expectedChallengeScheme);
writeResponse(HttpStatus.CLIENT_BAD_REQUEST, reason, nextFilter, session, httpRequest);
return;
}
// Now set the expected challenge scheme on the AuthToken. If the
// client provided a scheme, the above check ensures that the
// provided scheme matches our expected scheme, so calling setScheme()
// does not harm anything. If the client did NOT provide a scheme,
// this properly sets one, for the benefit of login modules which
// check for such things.
authToken.setScheme(expectedChallengeScheme);
// Suspend incoming events into this filter. Will resume after LoginContext.login() completion
suspendIncoming(session);
// Schedule LoginContext.login() execution using a separate thread
LoginContextTask loginContextTask = new LoginContextTask(nextFilter, session, httpRequest, authToken, null);
scheduler.execute(loginContextTask);
}
// Task for running LoginContext.login() in a separate thread(other than I/O thread)
private final class LoginContextTask implements Runnable {
private final NextFilter nextFilter;
private final IoSession session;
private final HttpRequestMessage httpRequest;
private final DefaultAuthenticationToken authToken;
private final TypedCallbackHandlerMap additionalCallbacks;
private final long createdTime;
LoginContextTask(NextFilter nextFilter, IoSession session, HttpRequestMessage httpRequest,
DefaultAuthenticationToken authToken, TypedCallbackHandlerMap additionalCallbacks) {
this.nextFilter = nextFilter;
this.session = session;
this.httpRequest = httpRequest;
this.authToken = authToken;
this.additionalCallbacks = additionalCallbacks;
this.createdTime = System.currentTimeMillis();
}
@Override
public void run() {
if (loggerEnabled()) {
logger.trace("Executing login task %d ms after scheduling for session %s",
(System.currentTimeMillis() - createdTime) , session);
}
boolean succeeded = login(nextFilter, session, httpRequest, authToken, additionalCallbacks);
try {
if (succeeded) {
// Complete the rest of the filter chain
HttpSubjectSecurityFilter.super.doMessageReceived(nextFilter, session, httpRequest);
}
// If there are any events buffered during suspension, resume them
HttpSubjectSecurityFilter.super.resumeIncoming(session);
} catch (Exception e) {
session.getFilterChain().fireExceptionCaught(e);
}
if (loggerEnabled()) {
logger.trace("Finished login task after %d ms for session %s",
(System.currentTimeMillis() - createdTime), session);
}
}
}
}
| |
/***********************************************************************************************
* The MIT License (MIT)
*
* Copyright (c) 2016 Stephen Carlson
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
**********************************************************************************************/
package com.stcarlso.goece.utility;
import java.io.Serializable;
/**
* Represents a complex value in engineering notation, with the units, suffix, significand, and
* number of significant figures stored. Optional tolerance is also carried.
*/
public class ComplexValue extends EngineeringValue implements Serializable {
private static final long serialVersionUID = -7587317370710347618L;
/**
* The phase angle in degrees.
*/
protected final double angle;
/**
* Cached imaginary part.
*/
private final transient double imag;
/**
* Cached real part.
*/
private final transient double real;
/**
* Create an engineering value with no units, no tolerance, and 3 significant figures.
*
* @param mag the raw magnitude value
* @param phase the phase angle in degrees
*/
public ComplexValue(final double mag, final double phase) {
this(mag, phase, "");
}
/**
* Creates a new engineering value with the specified units and 3 significant figures.
*
* @param mag the raw magnitude value
* @param phase the phase angle in degrees
* @param units the units to assign to this value
*/
public ComplexValue(final double mag, final double phase, final String units) {
this(mag, phase, 0.0, units);
}
/**
* Creates a new engineering value with the specified tolerance and units. The value will
* have 3 significant figures.
*
* @param mag the raw magnitude value
* @param phase the phase angle in degrees
* @param tolerance the tolerance of this value (0.1 = 10%, 0.01 = 1%) or 0 to suppress
* @param units the units to assign to this value
*/
public ComplexValue(final double mag, final double phase, final double tolerance,
final String units) {
this(mag, phase, tolerance, 3, units);
}
/**
* Create a new engineering value based on an existing value's tolerance, significant
* figures, and units, but with a new raw value and phase angle.
*
* @param mag the raw magnitude value
* @param phase the phase angle in degrees
* @param template the template value where units, tolerance, and significant figures are
* copied
*/
public ComplexValue(final double mag, final double phase, final ComplexValue template) {
this(mag, phase, template.getTolerance(), template.getSigfigs(), template.getUnits());
}
/**
* Creates a new engineering value with the specified tolerance, precision, and units.
*
* @param mag the raw magnitude value
* @param phase the phase angle in degrees
* @param tolerance the tolerance of this value (0.1 = 10%, 0.01 = 1%) or 0 to suppress
* @param sigfigs the number of significant figures
* @param units the units to assign to this value
*/
public ComplexValue(final double mag, final double phase, final double tolerance,
final int sigfigs, final String units) {
super(Math.abs(mag), tolerance, sigfigs, units);
final double angleRad = Math.toRadians(phase);
// Compensate for negative magnitude
double phaseNormal = phase;
if (mag < 0.0)
phaseNormal += 180.0;
// Normalize to [0, 360)
phaseNormal %= 360.0;
if (phaseNormal < 0.0) phaseNormal += 360.0;
angle = phaseNormal;
real = ECECalc.ieeeRound(mag * Math.cos(angleRad));
imag = ECECalc.ieeeRound(mag * Math.sin(angleRad));
}
@Override
public EngineeringValue add(final EngineeringValue other) {
return newRectangularValue(getReal() + other.getReal(),
getImaginary() + other.getImaginary());
}
@Override
public EngineeringValue divide(final EngineeringValue other) {
final double divisor = other.getValue(), num = getValue();
if (divisor == 0.0 && num == 0.0)
throw new ArithmeticException("Complex-valued division by zero");
return newValue(num / divisor, getAngle() - other.getAngle());
}
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final ComplexValue value = (ComplexValue)o;
return Double.compare(value.getAngle(), getAngle()) == 0 && getUnits().equals(
value.getUnits()) && Double.compare(value.getValue(), getValue()) == 0;
}
@Override
public double getAngle() {
return angle;
}
@Override
public double getImaginary() {
return imag;
}
@Override
public double getReal() {
return real;
}
public int hashCode() {
final long temp = Double.doubleToLongBits(getAngle());
return 31 * (int)(temp ^ (temp >>> 32)) + super.hashCode();
}
@Override
public EngineeringValue multiply(final EngineeringValue other) {
return newValue(getValue() * other.getValue(), getAngle() + other.getAngle());
}
/**
* Convenience method to copy the metadata of this value into a new object.
*
* @param newReal the new real component
* @param newImag the new imaginary component
* @return a new instance with the specified value, but units and tolerance from this object
*/
public ComplexValue newRectangularValue(final double newReal, final double newImag) {
final double newMag = Math.hypot(newReal, newImag);
double newPhase = Math.toDegrees(Math.atan2(newImag, newReal));
// [-180, 180) to [0, 360)
if (newPhase < 0.0)
newPhase += 360.0;
return new ComplexValue(newMag, newPhase, this);
}
/**
* Convenience method to copy the metadata of this value into a new object.
*
* @param newMag the new raw magnitude value
* @param newPhase the new phase angle in degrees
* @return a new instance with the specified value, but units and tolerance from this object
*/
public ComplexValue newValue(final double newMag, final double newPhase) {
return new ComplexValue(newMag, newPhase, this);
}
@Override
public EngineeringValue pow(final double exponent) {
// Complex values have more than one of these -- return the first
EngineeringValue ret;
if (exponent == 0.0)
ret = newValue(1.0);
else {
// Non-trivial case
final double absExponent = Math.abs(exponent);
ret = newValue(Math.pow(getValue(), absExponent), getAngle() * absExponent);
if (exponent < 0.0)
// Handle negative exponents correctly
ret = newValue(1.0, 0.0).divide(ret);
}
return ret;
}
@Override
public EngineeringValue subtract(final EngineeringValue other) {
return newRectangularValue(getReal() - other.getReal(),
getImaginary() - other.getImaginary());
}
public String toString() {
final StringBuilder format = new StringBuilder(significandToString());
final double tol = getTolerance();
format.append(" %s%s");
if (tol > 0.0) {
// value +/- #%
format.append(' ');
format.append(P_M_SYMBOL);
format.append(toleranceToString(tol));
format.append("%%");
}
format.append(" @ %.1f\u00B0");
return String.format(format.toString(), getSIPrefix(), getUnits(), getAngle());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.metastore;
import java.io.IOException;
import java.util.HashMap;
import javax.security.auth.login.LoginException;
import javax.security.sasl.AuthenticationException;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
import org.apache.hadoop.hive.metastore.security.TUGIContainingTransport;
import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
import org.apache.hadoop.hive.metastore.security.MetastoreDelegationTokenManager;
import org.apache.thrift.transport.layered.TFramedTransport;
import org.apache.thrift.transport.TSaslServerTransport;
import org.apache.thrift.transport.TTransport;
import org.apache.thrift.transport.TTransportException;
import org.apache.thrift.transport.TTransportFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class helps in some aspects of authentication. It creates the proper Thrift classes for the
* given configuration as well as helps with authenticating requests.
*
* This is copied from HiveAuthFactory and modified to be used for HMS. But we should see if we can
* use same code for both Hive and HMS.
*/
public class AuthFactory {
private static final Logger LOG = LoggerFactory.getLogger(AuthFactory.class);
private HadoopThriftAuthBridge.Server saslServer;
private String authTypeStr;
private final String transportMode;
private String hadoopAuth;
private MetastoreDelegationTokenManager delegationTokenManager = null;
private boolean useFramedTransport;
private boolean executeSetUGI;
private Configuration conf;
public AuthFactory(HadoopThriftAuthBridge bridge, Configuration conf, Object baseHandler)
throws HiveMetaException, TTransportException {
// For now metastore only operates in binary mode. It would be good if we could model an HMS
// as a ThriftCliService, but right now that's too much tied with HiveServer2.
this.conf = conf;
transportMode = "binary";
authTypeStr = MetastoreConf.getVar(conf,
MetastoreConf.ConfVars.THRIFT_METASTORE_AUTHENTICATION);
useFramedTransport = MetastoreConf.getBoolVar(conf, ConfVars.USE_THRIFT_FRAMED_TRANSPORT);
executeSetUGI = MetastoreConf.getBoolVar(conf, ConfVars.EXECUTE_SET_UGI);
// Secured mode communication with Hadoop
// Blend this with THRIFT_METASTORE_AUTHENTICATION, for now they are separate. useSasl
// should be set to true when authentication is anything other than NONE. Or we should use a
// separate configuration for that?
// In case of HS2, this is defined by configuration HADOOP_SECURITY_AUTHENTICATION, which
// indicates the authentication used by underlying HDFS. In case of metastore SASL and hadoop
// authentication seem to be tied up. But with password based SASL we might have to break
// this coupling. Should we provide HADOOP_SECURITY_AUTHENTICATION for hadoop too, or use
// USE_THRIFT_SASL itself to indicate that the underlying Hadoop is kerberized.
if (StringUtils.isBlank(authTypeStr)) {
authTypeStr = AuthConstants.AuthTypes.NOSASL.getAuthName();
}
if (MetastoreConf.getBoolVar(conf, MetastoreConf.ConfVars.USE_THRIFT_SASL)) {
hadoopAuth = "kerberos";
// If SASL is enabled but no authentication method is specified, we use only kerberos as an
// authentication mechanism.
if (authTypeStr.equalsIgnoreCase(AuthConstants.AuthTypes.NOSASL.getAuthName())) {
authTypeStr = AuthConstants.AuthTypes.KERBEROS.getAuthName();
}
} else {
hadoopAuth = "simple";
}
LOG.info("Using authentication " + authTypeStr +
" with kerberos authentication " + (isSASLWithKerberizedHadoop() ? "enabled." : "disabled"));
if (isSASLWithKerberizedHadoop()) {
// we are in secure mode.
if (useFramedTransport) {
throw new HiveMetaException("Framed transport is not supported with SASL enabled.");
}
saslServer =
bridge.createServer(MetastoreConf.getVar(conf, MetastoreConf.ConfVars.KERBEROS_KEYTAB_FILE),
MetastoreConf.getVar(conf, ConfVars.KERBEROS_PRINCIPAL),
MetastoreConf.getVar(conf, ConfVars.CLIENT_KERBEROS_PRINCIPAL));
// Start delegation token manager
delegationTokenManager = new MetastoreDelegationTokenManager();
try {
delegationTokenManager.startDelegationTokenSecretManager(conf, baseHandler,
HadoopThriftAuthBridge.Server.ServerMode.METASTORE);
saslServer.setSecretManager(delegationTokenManager.getSecretManager());
} catch (IOException e) {
throw new TTransportException("Failed to start token manager", e);
}
}
}
TTransportFactory getAuthTransFactory(boolean useSSL, Configuration conf) throws LoginException {
TTransportFactory transportFactory;
TSaslServerTransport.Factory serverTransportFactory;
if (isSASLWithKerberizedHadoop()) {
try {
if (useFramedTransport) {
throw new LoginException("Framed transport is not supported with SASL enabled.");
}
serverTransportFactory = saslServer.createSaslServerTransportFactory(
MetaStoreUtils.getMetaStoreSaslProperties(conf, useSSL));
transportFactory = saslServer.wrapTransportFactoryInClientUGI(serverTransportFactory);
} catch (TTransportException e) {
throw new LoginException(e.getMessage());
}
if (authTypeStr.equalsIgnoreCase(AuthConstants.AuthTypes.KERBEROS.getAuthName())) {
// no-op
} else if (authTypeStr.equalsIgnoreCase(AuthConstants.AuthTypes.NONE.getAuthName()) ||
authTypeStr.equalsIgnoreCase(AuthConstants.AuthTypes.LDAP.getAuthName()) ||
authTypeStr.equalsIgnoreCase(AuthConstants.AuthTypes.PAM.getAuthName()) ||
authTypeStr.equalsIgnoreCase(AuthConstants.AuthTypes.CUSTOM.getAuthName()) ||
authTypeStr.equalsIgnoreCase(AuthConstants.AuthTypes.CONFIG.getAuthName())) {
try {
MetaStorePlainSaslHelper.init();
LOG.debug("Adding server definition for PLAIN SaSL with authentication "+ authTypeStr +
" to transport factory " + serverTransportFactory);
serverTransportFactory.addServerDefinition("PLAIN",
authTypeStr, null, new HashMap<String, String>(),
new MetaStorePlainSaslHelper.PlainServerCallbackHandler(authTypeStr, conf));
} catch (AuthenticationException e) {
throw new LoginException("Error setting callback handler" + e);
}
} else {
throw new LoginException("Unsupported authentication type " + authTypeStr);
}
} else {
if (authTypeStr.equalsIgnoreCase(AuthConstants.AuthTypes.NONE.getAuthName()) ||
authTypeStr.equalsIgnoreCase(AuthConstants.AuthTypes.LDAP.getAuthName()) ||
authTypeStr.equalsIgnoreCase(AuthConstants.AuthTypes.PAM.getAuthName()) ||
authTypeStr.equalsIgnoreCase(AuthConstants.AuthTypes.CUSTOM.getAuthName()) ||
authTypeStr.equalsIgnoreCase(AuthConstants.AuthTypes.CONFIG.getAuthName())) {
if (useFramedTransport) {
throw new LoginException("Framed transport is not supported with password based " +
"authentication enabled.");
}
if (executeSetUGI) {
throw new LoginException("Setting " + ConfVars.EXECUTE_SET_UGI + " is not supported " +
"with password based authentication enabled.");
}
LOG.info("Using plain SASL transport factory with " + authTypeStr + " authentication");
transportFactory = MetaStorePlainSaslHelper.getPlainTransportFactory(authTypeStr, conf);
} else if (authTypeStr.equalsIgnoreCase(AuthConstants.AuthTypes.NOSASL.getAuthName())) {
if (executeSetUGI) {
transportFactory = useFramedTransport ?
new ChainedTTransportFactory(new TFramedTransport.Factory(),
new TUGIContainingTransport.Factory())
:new TUGIContainingTransport.Factory();
} else {
transportFactory = useFramedTransport ?
new TFramedTransport.Factory() : new TTransportFactory();
}
} else {
throw new LoginException("Unsupported authentication type " + authTypeStr);
}
}
return transportFactory;
}
public HadoopThriftAuthBridge.Server getSaslServer() throws IllegalStateException {
if (!isSASLWithKerberizedHadoop() || null == saslServer) {
throw new IllegalStateException("SASL server is not setup");
}
return saslServer;
}
public MetastoreDelegationTokenManager getDelegationTokenManager() throws IllegalStateException {
if (!isSASLWithKerberizedHadoop() || null == saslServer) {
throw new IllegalStateException("SASL server is not setup");
}
return delegationTokenManager;
}
public boolean isSASLWithKerberizedHadoop() {
return "kerberos".equalsIgnoreCase(hadoopAuth)
&& !authTypeStr.equalsIgnoreCase(AuthConstants.AuthTypes.NOSASL.getAuthName());
}
private static final class ChainedTTransportFactory extends TTransportFactory {
private final TTransportFactory parentTransFactory;
private final TTransportFactory childTransFactory;
private ChainedTTransportFactory(
TTransportFactory parentTransFactory,
TTransportFactory childTransFactory) {
this.parentTransFactory = parentTransFactory;
this.childTransFactory = childTransFactory;
}
@Override
public TTransport getTransport(TTransport trans) throws TTransportException {
return childTransFactory.getTransport(parentTransFactory.getTransport(trans));
}
}
}
| |
/** Notice of modification as required by the LGPL
* This file was modified by Gemstone Systems Inc. on
* $Date$
**/
// $Id: FRAG2.java,v 1.20 2005/08/11 12:43:47 belaban Exp $
package com.gemstone.org.jgroups.protocols;
import java.util.HashMap;
import java.util.Hashtable;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Vector;
import com.gemstone.org.jgroups.Address;
import com.gemstone.org.jgroups.Event;
import com.gemstone.org.jgroups.Message;
import com.gemstone.org.jgroups.View;
import com.gemstone.org.jgroups.ViewId;
import com.gemstone.org.jgroups.stack.Protocol;
import com.gemstone.org.jgroups.util.ExternalStrings;
import com.gemstone.org.jgroups.util.Range;
import com.gemstone.org.jgroups.util.Util;
/**
* Fragmentation layer. Fragments messages larger than frag_size into smaller packets.
* Reassembles fragmented packets into bigger ones. The fragmentation number is prepended
* to the messages as a header (and removed at the receiving side).<p>
* Each fragment is identified by (a) the sender (part of the message to which the header is appended),
* (b) the fragmentation ID (which is unique per FRAG2 layer (monotonically increasing) and (c) the
* fragement ID which ranges from 0 to number_of_fragments-1.<p>
* Requirement: lossless delivery (e.g. NAK, ACK). No requirement on ordering. Works for both unicast and
* multicast messages.<br/>
* Compared to FRAG, this protocol does <em>not</em> need to serialize the message in order to break it into
* smaller fragments: it looks only at the message's buffer, which is a byte[] array anyway. We assume that the
* size addition for headers and src and dest address is minimal when the transport finally has to serialize the
* message, so we add a constant (1000 bytes).
* @author Bela Ban
* @version $Id: FRAG2.java,v 1.20 2005/08/11 12:43:47 belaban Exp $
*/
public class FRAG2 extends Protocol {
public static boolean DEBUG_FRAG2 = Boolean.getBoolean("gemfire.debug-frag2");
/** The max number of bytes in a message. If a message's buffer is bigger, it will be fragmented */
int frag_size=1500;
/** Number of bytes that we think the headers plus src and dest will take up when
message is serialized by transport. This will be subtracted from frag_size */
int overhead=50;
/*the fragmentation list contains a fragmentation table per sender
*this way it becomes easier to clean up if a sender (member) leaves or crashes
*/
private final FragmentationList fragment_list=new FragmentationList();
private int curr_id=1;
private final Vector members=new Vector(11);
static String name="FRAG2"; // GemStone - increased visibility for DirAck
long num_sent_msgs=0;
long num_sent_frags=0;
long num_received_msgs=0;
long num_received_frags=0;
// DirAck dirAckProtocol;
/** GemStoneAddition - birth view id */
private long initialViewId;
/** GemStoneAddition - current view id */
private long currentViewId;
/** GemStoneAddition - messages that should be queued before getting a view id */
private List preJoinMessages = new LinkedList();
@Override // GemStoneAddition
public String getName() {
return name;
}
// start GemStoneAddition
@Override // GemStoneAddition
public int getProtocolEnum() {
return com.gemstone.org.jgroups.stack.Protocol.enumFRAG2;
}
// end GemStone addition
public int getFragSize() {return frag_size;}
public void setFragSize(int s) {frag_size=s;}
public int getOverhead() {return overhead;}
public void setOverhead(int o) {overhead=o;}
public long getNumberOfSentMessages() {return num_sent_msgs;}
public long getNumberOfSentFragments() {return num_sent_frags;}
public long getNumberOfReceivedMessages() {return num_received_msgs;}
public long getNumberOfReceivedFragments() {return num_received_frags;}
synchronized int getNextId() {
return curr_id++;
}
/** Setup the Protocol instance acording to the configuration string */
@Override // GemStoneAddition
public boolean setProperties(Properties props) {
String str;
super.setProperties(props);
str=props.getProperty("frag_size");
if(str != null) {
frag_size=Integer.parseInt(str);
props.remove("frag_size");
}
str=props.getProperty("overhead");
if(str != null) {
overhead=Integer.parseInt(str);
props.remove("overhead");
}
int old_frag_size=frag_size;
frag_size-=overhead;
if(frag_size <=0) {
log.error("frag_size=" + old_frag_size + ", overhead=" + overhead +
", new frag_size=" + frag_size + ": new frag_size is invalid");
return false;
}
if(log.isInfoEnabled())
log.info(ExternalStrings.FRAG2_FRAG_SIZE_0__OVERHEAD_1__NEW_FRAG_SIZE_2,
new Object[] {old_frag_size, overhead, frag_size});
if(props.size() > 0) {
log.error(ExternalStrings.FRAG2_FRAG2SETPROPERTIES_THE_FOLLOWING_PROPERTIES_ARE_NOT_RECOGNIZED__0, props);
return false;
}
return true;
}
@Override // GemStoneAddition
public void start() throws Exception {
// dirAckProtocol = (DirAck)stack.findProtocol(DirAck.name); // GemStoneAddition
}
@Override // GemStoneAddition
public void resetStats() {
super.resetStats();
num_sent_msgs=num_sent_frags=num_received_msgs=num_received_frags=0;
}
/**
* Fragment a packet if larger than frag_size (add a header). Otherwise just pass down. Only
* add a header if framentation is needed !
*/
@Override // GemStoneAddition
public void down(Event evt) {
switch(evt.getType()) {
case Event.MSG:
Message msg=(Message)evt.getArg();
long size=msg.getLength();
synchronized(this) {
num_sent_msgs++;
}
if(size > frag_size) {
if(trace) {
StringBuffer sb=new StringBuffer("message's buffer size is ");
sb.append(size).append(", will fragment ").append("(frag_size=");
sb.append(frag_size).append(')');
log.trace(sb.toString());
}
gf_fragment(msg); // GemStoneAddition
//fragment(msg); // Fragment and pass down
recordSize();
return;
}
break;
case Event.VIEW_CHANGE:
long initId = this.initialViewId;
viewChanged(evt);
synchronized(this.preJoinMessages) { // GemStoneAddition - preJoinMessages
if (initId == 0 && this.initialViewId != 0) {
for (Iterator it=this.preJoinMessages.iterator(); it.hasNext(); /**/) {
Message m = (Message)it.next();
unfragment(m);
recordSize();
}
this.preJoinMessages.clear();
}
}
break;
case Event.CONFIG:
passDown(evt);
if(log.isDebugEnabled()) log.debug("received CONFIG event: " + evt.getArg());
handleConfigEvent((HashMap)evt.getArg());
return;
}
passDown(evt); // Pass on to the layer below us
}
/**
* If event is a message, if it is fragmented, re-assemble fragments into big message and pass up
* the stack.
*/
@Override // GemStoneAddition
public void up(Event evt) {
switch(evt.getType()) {
case Event.MSG:
Message msg=(Message)evt.getArg();
Object obj=msg.getHeader(getName());
if(obj != null && obj instanceof FragHeader) { // needs to be defragmented
unfragment(msg); // Unfragment and possibly pass up
recordSize();
return;
}
else {
num_received_msgs++;
}
break;
case Event.CONFIG:
passUp(evt);
if(log.isInfoEnabled()) log.info(ExternalStrings.FRAG2_RECEIVED_CONFIG_EVENT__0, (Object)evt.getArg());
handleConfigEvent((HashMap)evt.getArg());
return;
// GemStoneAddition - track the birth and current view ids
case Event.VIEW_CHANGE:
// members anymore !
long initId = this.initialViewId;
viewChanged(evt);
synchronized(this.preJoinMessages) {
if (initId == 0 && this.initialViewId != 0) {
for (Iterator it=this.preJoinMessages.iterator(); it.hasNext(); /**/) {
Message m = (Message)it.next();
unfragment(m);
recordSize();
}
this.preJoinMessages.clear();
}
}
break;
}
passUp(evt); // Pass up to the layer above us by default
}
private void viewChanged(Event evt) {
// GemStoneAddition - track the initial view id and the current
// view id. Some fragments of early messages may be thrown away
// or queued by DirAck, and FRAG2 may hold corresponding fragments,
// so it needs to do the same thing.
ViewId vid = ((View)evt.getArg()).getVid();
if (this.initialViewId == 0) {
this.initialViewId = vid.getId();
}
this.currentViewId = vid.getId();
/* gemstoneaddition - moved this code out of down() so it could be used
* in up(), where view changes are actually seen
*/
//don't do anything if this dude is sending out the view change
//we are receiving a view change,
//in here we check for the
View view=(View)evt.getArg();
Vector new_mbrs=view.getMembers(), left_mbrs;
Address mbr;
left_mbrs=Util.determineLeftMembers(members, new_mbrs);
synchronized(members) { // GemStoneAddition - synchronize all access to members collection
members.clear();
members.addAll(new_mbrs);
}
for(int i=0; i < left_mbrs.size(); i++) {
mbr=(Address)left_mbrs.elementAt(i);
//the new view doesn't contain the sender, he must have left,
//hence we will clear all his fragmentation tables
fragment_list.remove(mbr);
if(trace) log.trace("removed " + mbr + " from fragmentation table");
}
recordSize();
}
private void recordSize() { // GemstoneAddition - memory debugging
// if (DEBUG_FRAG2) {
// stack.gemfireStats.setJg3(this.fragment_list.size());
// }
}
/**
* GemStoneAddition - [obsolete] check for dirAck multiple destinations. If found,
* copy the message for each dest. An assertion in DirAck will check to
* make sure this has happened
*/
void gf_fragment(Message msg) {
fragment(msg);
}
/** Send all fragments as separate messages (with same ID !).
Example:
<pre>
Given the generated ID is 2344, number of fragments=3, message {dst,src,buf}
would be fragmented into:
[2344,3,0]{dst,src,buf1},
[2344,3,1]{dst,src,buf2} and
[2344,3,2]{dst,src,buf3}
</pre>
*/
void fragment(Message msg) {
// long start; // GemStoneAddition
byte[] buffer;
List fragments;
Event evt;
FragHeader hdr;
Message frag_msg;
Address dest=msg.getDest();
long id=0; // GemStoneAddition getNextId(); // used as seqnos
int num_frags;
StringBuffer sb;
Range r;
try {
hdr = (FragHeader)msg.getHeader(getName());
id = getNextId();
buffer=msg.getBuffer();
fragments=Util.computeFragOffsets(buffer, frag_size);
num_frags=fragments.size();
if (stack != null) {
stack.gfPeerFunctions.incJgFragmentsCreated(num_frags);
stack.gfPeerFunctions.incJgFragmentationsPerformed();
}
//synchronized(this) {
// num_sent_frags+=num_frags;
//}
if(trace) {
sb=new StringBuffer("fragmenting packet to ");
sb.append((dest != null ? dest.toString() : "<all members>")).append(" (size=").append(buffer.length);
sb.append(") into ").append(num_frags).append(" fragment(s) [frag_size=").append(frag_size).append(']');
log.trace(sb.toString());
}
int fsize = fragments.size(); // GemStoneAddition
for(int i=0; i < fsize; i++) {
r=(Range)fragments.get(i);
// Copy the original msg (needed because we need to copy the headers too)
frag_msg=msg.copy(false); // don't copy the buffer, only src, dest and headers
frag_msg.bundleable = false; // GemStoneAddition
frag_msg.setBuffer(buffer, (int)r.low, (int)r.high);
// GemStoneAddition:
// send the view id with the message
hdr=new FragHeader(id, i, num_frags, this.currentViewId);
frag_msg.putHeader(getName(), hdr);
// GemStoneAddition - only the last fragment is processed in DirAck
evt=new Event(Event.MSG, frag_msg);
passDown(evt);
}
msg.putHeader(getName(), hdr); // GemStoneAddition - save the frag header for rexmits
}
catch(Exception e) {
if(log.isErrorEnabled()) log.error(ExternalStrings.FRAG2_EXCEPTION_IS__0, e);
}
}
/**
1. Get all the fragment buffers
2. When all are received -> Assemble them into one big buffer
3. Read headers and byte buffer from big buffer
4. Set headers and buffer in msg
5. Pass msg up the stack
*/
void unfragment(Message msg) {
FragmentationTable frag_table;
Address sender=msg.getSrc();
Message assembled_msg;
FragHeader hdr=(FragHeader)msg.removeHeader(getName());
synchronized(preJoinMessages) {
if (this.initialViewId == 0) {
preJoinMessages.add(msg);
return;
}
}
frag_table=fragment_list.get(sender);
if(frag_table == null) {
frag_table=new FragmentationTable(sender);
try {
fragment_list.add(sender, frag_table);
}
catch(IllegalArgumentException x) { // the entry has already been added, probably in parallel from another thread
frag_table=fragment_list.get(sender);
}
}
num_received_frags++;
assembled_msg=frag_table.add(hdr.id, hdr.frag_id, hdr.num_frags, msg);
if(assembled_msg != null) {
try {
if(trace) log.trace("assembled_msg is " + assembled_msg);
assembled_msg.setSrc(sender); // needed ? YES, because fragments have a null src !!
num_received_msgs++;
//this.log.getLogWriter().info("fg size = " + this.fragment_list.size());
passUp(new Event(Event.MSG, assembled_msg));
}
catch(Exception e) {
if(log.isErrorEnabled()) log.error(ExternalStrings.FRAG2_EXCEPTION_IS__0, e);
}
}
}
void handleConfigEvent(HashMap map) {
if(map == null) return;
if(map.containsKey("frag_size")) {
frag_size=((Integer)map.get("frag_size")).intValue();
if(log.isDebugEnabled()) log.debug("setting frag_size=" + frag_size);
}
}
/**
* A fragmentation list keeps a list of fragmentation tables
* sorted by an Address ( the sender ).
* This way, if the sender disappears or leaves the group half way
* sending the content, we can simply remove this members fragmentation
* table and clean up the memory of the receiver.
* We do not have to do the same for the sender, since the sender doesn't keep a fragmentation table
*/
static class FragmentationList {
/* * HashMap key=Address, value=FragmentationTable, initialize the hashtable to hold all the fragmentation
* tables (11 is the best growth capacity to start with)
*/
private final HashMap frag_tables=new HashMap(11);
/**
* Adds a fragmentation table for this particular sender
* If this sender already has a fragmentation table, an IllegalArgumentException
* will be thrown.
* @param sender - the address of the sender, cannot be null
* @param table - the fragmentation table of this sender, cannot be null
* @exception IllegalArgumentException if an entry for this sender already exist
*/
public void add(Address sender, FragmentationTable table) throws IllegalArgumentException {
FragmentationTable healthCheck;
synchronized(frag_tables) {
healthCheck=(FragmentationTable)frag_tables.get(sender);
if(healthCheck == null) {
frag_tables.put(sender, table);
}
else {
throw new IllegalArgumentException("Sender <" + sender + "> already exists in the fragementation list.");
}
}
}
/**
* returns a fragmentation table for this sender
* returns null if the sender doesn't have a fragmentation table
* @return the fragmentation table for this sender, or null if no table exist
*/
public FragmentationTable get(Address sender) {
synchronized(frag_tables) {
return (FragmentationTable)frag_tables.get(sender);
}
}
/**
* returns true if this sender already holds a
* fragmentation for this sender, false otherwise
* @param sender - the sender, cannot be null
* @return true if this sender already has a fragmentation table
*/
public boolean containsSender(Address sender) {
synchronized(frag_tables) {
return frag_tables.containsKey(sender);
}
}
/**
* removes the fragmentation table from the list.
* after this operation, the fragementation list will no longer
* hold a reference to this sender's fragmentation table
* @param sender - the sender who's fragmentation table you wish to remove, cannot be null
* @return true if the table was removed, false if the sender doesn't have an entry
*/
public boolean remove(Address sender) {
synchronized(frag_tables) {
boolean result=containsSender(sender);
frag_tables.remove(sender);
return result;
}
}
/**
* returns a list of all the senders that have fragmentation tables
* opened.
* @return an array of all the senders in the fragmentation list
*/
public Address[] getSenders() {
Address[] result;
int index=0;
synchronized(frag_tables) {
result=new Address[frag_tables.size()];
for(Iterator it=frag_tables.keySet().iterator(); it.hasNext();) {
result[index++]=(Address)it.next();
}
}
return result;
}
@Override // GemStoneAddition
public String toString() {
Map.Entry entry;
StringBuffer buf=new StringBuffer("Fragmentation list contains ");
synchronized(frag_tables) {
buf.append(frag_tables.size()).append(" tables\n");
for(Iterator it=frag_tables.entrySet().iterator(); it.hasNext();) {
entry=(Map.Entry)it.next();
buf.append(entry.getKey()).append(": " ).append(entry.getValue()).append("\n");
}
}
return buf.toString();
}
/** GemStoneAddition - sanity check */
public int size() {
int result= 0;
synchronized(frag_tables) {
for (Iterator it=frag_tables.values().iterator(); it.hasNext(); ) {
FragmentationTable f = (FragmentationTable)it.next();
result += f.size();
}
}
return result;
}
}
/**
* Keeps track of the fragments that are received.
* Reassembles fragements into entire messages when all fragments have been received.
* The fragmentation holds a an array of byte arrays for a unique sender
* The first dimension of the array is the order of the fragmentation, in case the arrive out of order
*/
static class FragmentationTable {
private final Address sender;
/* the hashtable that holds the fragmentation entries for this sender*/
private final Hashtable h=new Hashtable(11); // keys: frag_ids, vals: Entrys
FragmentationTable(Address sender) {
this.sender=sender;
}
/**
* inner class represents an entry for a message
* each entry holds an array of byte arrays sorted
* once all the byte buffer entries have been filled
* the fragmentation is considered complete.
*/
static class Entry {
//the total number of fragment in this message
int tot_frags=0;
// each fragment is a byte buffer
Message fragments[]=null;
//the number of fragments we have received
int number_of_frags_recvd=0;
// the message ID
long msg_id=-1;
/**
* Creates a new entry
* @param tot_frags the number of fragments to expect for this message
*/
Entry(long msg_id, int tot_frags) {
this.msg_id=msg_id;
this.tot_frags=tot_frags;
fragments=new Message[tot_frags];
for(int i=0; i < tot_frags; i++)
fragments[i]=null;
}
/**
* adds on fragmentation buffer to the message
* @param frag_id the number of the fragment being added 0..(tot_num_of_frags - 1)
* @param frag the byte buffer containing the data for this fragmentation, should not be null
*/
public void set(int frag_id, Message frag) {
// don't count an already received fragment (should not happen though because the
// reliable transmission protocol(s) below should weed out duplicates
if(fragments[frag_id] == null) {
fragments[frag_id]=frag;
number_of_frags_recvd++;
}
}
/** returns true if this fragmentation is complete
* ie, all fragmentations have been received for this buffer
*
*/
public boolean isComplete() {
/*first make the simple check*/
if(number_of_frags_recvd < tot_frags) {
return false;
}
/*then double check just in case*/
for(int i=0; i < fragments.length; i++) {
if(fragments[i] == null)
return false;
}
/*all fragmentations have been received*/
return true;
}
/**
* Assembles all the fragments into one buffer. Takes all Messages, and combines their buffers into one
* buffer.
* This method does not check if the fragmentation is complete (use {@link #isComplete()} to verify
* before calling this method)
* @return the complete message in one buffer
*
*/
public Message assembleMessage() {
Message retval;
byte[] combined_buffer, tmp;
int combined_length=0, length, offset;
Message fragment;
int index=0;
for(int i=0; i < fragments.length; i++) {
fragment=fragments[i];
combined_length+=fragment.getLength();
}
combined_buffer=new byte[combined_length];
for(int i=0; i < fragments.length; i++) {
fragment=fragments[i];
tmp=fragment.getRawBuffer();
length=fragment.getLength();
offset=fragment.getOffset();
System.arraycopy(tmp, offset, combined_buffer, index, length);
index+=length;
}
retval=fragments[fragments.length-1].copy(false); // GemStoneAddition - for DirAck support we copy the final fragment
retval.setBuffer(combined_buffer);
return retval;
}
/**
* debug only
*/
@Override // GemStoneAddition
public String toString() {
StringBuffer ret=new StringBuffer();
ret.append("[tot_frags=" + tot_frags + ", number_of_frags_recvd=" + number_of_frags_recvd + ']');
return ret.toString();
}
@Override // GemStoneAddition
public int hashCode() {
return super.hashCode();
}
/** GemStoneAddition - debugging memory */
public int size() {
int result = 0;
for (int i=0; i<fragments.length; i++) {
Message m = fragments[i];
if (m != null) {
result += m.getLength();
}
}
return result;
}
}
/**
* Creates a new entry if not yet present. Adds the fragment.
* If all fragements for a given message have been received,
* an entire message is reassembled and returned.
* Otherwise null is returned.
* @param id - the message ID, unique for a sender
* @param frag_id the index of this fragmentation (0..tot_frags-1)
* @param tot_frags the total number of fragmentations expected
* @param fragment - the byte buffer for this fragment
*/
public synchronized Message add(long id, int frag_id, int tot_frags, Message fragment) {
Message retval=null;
Entry e=(Entry)h.get(Long.valueOf(id));
if(e == null) { // Create new entry if not yet present
e=new Entry(id, tot_frags);
h.put(Long.valueOf(id), e);
}
e.set(frag_id, fragment);
if(e.isComplete()) {
retval=e.assembleMessage();
h.remove(Long.valueOf(id));
}
return retval;
}
public void reset() {
}
@Override // GemStoneAddition
public String toString() {
StringBuffer buf=new StringBuffer("Fragmentation Table Sender:").append(sender).append("\n\t");
java.util.Enumeration e=this.h.elements();
while(e.hasMoreElements()) {
Entry entry=(Entry)e.nextElement();
int count=0;
for(int i=0; i < entry.fragments.length; i++) {
if(entry.fragments[i] != null) {
count++;
}
}
buf.append("Message ID:").append(entry.msg_id).append("\n\t");
buf.append("Total Frags:").append(entry.tot_frags).append("\n\t");
buf.append("Frags Received:").append(count).append("\n\n");
}
return buf.toString();
}
/** GemStoneAddition - memory debugging */
public int size() {
int result = 0;
java.util.Enumeration e = h.elements();
while (e.hasMoreElements()) {
Entry entry = (Entry)e.nextElement();
result += entry.size();
}
return result;
}
}
}
| |
//============================================================================
//
// Copyright (C) 2006-2022 Talend Inc. - www.talend.com
//
// This source code is available under agreement available at
// %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt
//
// You should have received a copy of the agreement
// along with this program; if not, write to Talend SA
// 9 rue Pages 92150 Suresnes, France
//
//============================================================================
package org.talend.components.marklogic.runtime;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.IndexedRecord;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.talend.components.api.component.runtime.Result;
import org.talend.components.api.component.runtime.WriteOperation;
import org.talend.components.api.component.runtime.WriterWithFeedback;
import org.talend.components.api.container.RuntimeContainer;
import org.talend.components.marklogic.exceptions.MarkLogicErrorCode;
import org.talend.components.marklogic.exceptions.MarkLogicException;
import org.talend.components.marklogic.tmarklogicoutput.MarkLogicOutputProperties;
import org.talend.daikon.i18n.GlobalI18N;
import org.talend.daikon.i18n.I18nMessages;
import com.marklogic.client.DatabaseClient;
import com.marklogic.client.document.DocumentDescriptor;
import com.marklogic.client.document.DocumentManager;
import com.marklogic.client.document.DocumentUriTemplate;
import com.marklogic.client.io.BytesHandle;
import com.marklogic.client.io.FileHandle;
import com.marklogic.client.io.Format;
import com.marklogic.client.io.StringHandle;
import com.marklogic.client.io.marker.AbstractWriteHandle;
public class MarkLogicWriter implements WriterWithFeedback<Result, IndexedRecord, IndexedRecord> {
private transient static final Logger LOGGER = LoggerFactory.getLogger(MarkLogicWriter.class);
protected static final I18nMessages MESSAGES = GlobalI18N.getI18nMessageProvider().getI18nMessages(MarkLogicWriter.class);
private static final String LEGACY_NB_LINE_UPSERTED_NAME = "NB_LINE_UPSERTED";
private static final String LEGACY_NB_LINE_DELETED_NAME = "NB_LINE_DELETED";
private static final String LEGACY_NB_LINE_PATCHED_NAME = "NB_LINE_PATCHED";
private static final String LEGACY_NB_LINE_REJECTED_NAME = "NB_LINE_REJECTED";
private MarkLogicOutputProperties properties;
private MarkLogicWriteOperation writeOperation;
RuntimeContainer container;
private DatabaseClient connectionClient;
DocumentManager docMgr = null;
private Schema rejectSchema;
private String docIdPrefix;
private MarkLogicOutputProperties.DocType docType;
private String docIdSuffix;
private Result result;
private List<IndexedRecord> successWrites;
private List<IndexedRecord> rejectWrites;
private boolean autoGenerateId;
@Override
public void open(String uId) throws IOException {
this.result = new Result(uId);
connectionClient = writeOperation.getSink().connect(container);
rejectSchema = properties.schemaReject.schema.getValue();
docIdPrefix = properties.docIdPrefix.getStringValue();
docType = properties.docType.getValue();
initializeDocManager();
}
private void initializeDocManager() {
if (connectionClient == null) {
return;
}
switch (properties.docType.getValue()) {
case MIXED:
docMgr = connectionClient.newDocumentManager();
docIdSuffix = "";
break;
case XML:
docMgr = connectionClient.newXMLDocumentManager();
docIdSuffix = "xml";
break;
case JSON:
docMgr = connectionClient.newJSONDocumentManager();
docIdSuffix = "json";
break;
case PLAIN_TEXT:
docMgr = connectionClient.newTextDocumentManager();
docIdSuffix = "txt";
break;
case BINARY:
docMgr = connectionClient.newBinaryDocumentManager();
docIdSuffix = "bin";
break;
}
}
@Override
public void write(Object indexedRecordDatum) throws IOException {
if (indexedRecordDatum == null || !(indexedRecordDatum instanceof IndexedRecord)) {
return;
}
cleanWrites();
IndexedRecord indexedRecord = (IndexedRecord) indexedRecordDatum;
String docId = (String) indexedRecord.get(0);
Object docContent = indexedRecord.get(1);
try {
switch (properties.action.getValue()) {
case DELETE:
deleteRecord(docId);
break;
case PATCH:
patchRecord(docId, (String) docContent);
break;
case UPSERT:
AbstractWriteHandle genericHandle = prepareWriteHandle(docContent);
if (autoGenerateId) {
docId = generateDocId(genericHandle);
indexedRecord.put(0, docId);
}
upsertRecord(docId, genericHandle);
break;
}
handleSuccessRecord(indexedRecord);
}
catch (Exception e){
handleRejectRecord(indexedRecord, e);
}
result.totalCount++;
}
private AbstractWriteHandle prepareWriteHandle(Object docContent) {
AbstractWriteHandle genericHandle = null;
if (MarkLogicOutputProperties.DocType.BINARY == properties.docType.getValue()) {
if (docContent instanceof byte[]) {
genericHandle = new BytesHandle((byte[]) docContent);
} else if (docContent instanceof File) {
genericHandle = new FileHandle((File) docContent);
} else if (docContent instanceof String) {
genericHandle = new StringHandle((String) docContent);
} else {
throw new MarkLogicException(new MarkLogicErrorCode("Unsupported Content Represent in " + docContent.getClass()));
}
} else {
if (docContent instanceof String) {
genericHandle = new StringHandle((String) docContent);
}
}
return genericHandle;
}
private String generateDocId(AbstractWriteHandle genericHandle) {
DocumentUriTemplate template = docMgr.newDocumentUriTemplate(docIdSuffix);
if (StringUtils.isNotEmpty(properties.docIdPrefix.getStringValue())
&& !"\"\"".equals(properties.docIdPrefix.getStringValue())) {
String realPrefix = properties.docIdPrefix.getStringValue();
if (!(realPrefix.endsWith("/") || realPrefix.endsWith("\\"))) {
realPrefix = realPrefix + "/";
}
template.setDirectory(realPrefix.replaceAll("\\\\", "/"));
}
DocumentDescriptor docDesc = docMgr.create(template, genericHandle);
return docDesc.getUri();
}
private void upsertRecord(String docId, AbstractWriteHandle genericHandle) {
docMgr.write(docId, genericHandle);
}
private void deleteRecord(String docId) {
docMgr.delete(docId);
}
private void patchRecord(String docId, String docContent) {
StringHandle patchHandle = new StringHandle(docContent);
if (MarkLogicOutputProperties.DocType.JSON == docType) {
patchHandle.withFormat(Format.JSON);
} else if (MarkLogicOutputProperties.DocType.XML == docType) {
patchHandle.withFormat(Format.XML);
} else {
throw new MarkLogicException(new MarkLogicErrorCode("Cant patch for docType " + docType));
}
docMgr.patch(docId, patchHandle);
}
private void handleSuccessRecord(IndexedRecord record) {
result.successCount++;
successWrites.add(record);
}
private void handleRejectRecord(IndexedRecord record, Exception e) {
result.rejectCount++;
IndexedRecord errorIndexedRecord = new GenericData.Record(rejectSchema);
errorIndexedRecord.put(0, record.get(0) + " " + e.getMessage());
rejectWrites.add(errorIndexedRecord);
}
@Override
public Result close() throws IOException {
if (!properties.connection.isReferencedConnectionUsed()) {
connectionClient.release();
LOGGER.info(MESSAGES.getMessage("info.connectionClosed"));
}
writeLegacyNBLineResult();
return result;
}
private void writeLegacyNBLineResult() {
int linesUpserted = 0;
int linesPatched = 0;
int linesDeleted = 0;
int linesRejected = 0;
switch (properties.action.getValue()) {
case UPSERT:
linesUpserted = result.successCount;
break;
case PATCH:
linesPatched = result.successCount;
break;
case DELETE:
linesDeleted = result.successCount;
break;
}
linesRejected = result.rejectCount;
this.container.setComponentData(container.getCurrentComponentId() , LEGACY_NB_LINE_UPSERTED_NAME, linesUpserted);
this.container.setComponentData(container.getCurrentComponentId() , LEGACY_NB_LINE_PATCHED_NAME, linesPatched);
this.container.setComponentData(container.getCurrentComponentId() , LEGACY_NB_LINE_DELETED_NAME, linesDeleted);
this.container.setComponentData(container.getCurrentComponentId() , LEGACY_NB_LINE_REJECTED_NAME, linesRejected);
}
@Override
public WriteOperation<Result> getWriteOperation() {
return writeOperation;
}
@Override
public Iterable<IndexedRecord> getSuccessfulWrites() {
return Collections.unmodifiableCollection(successWrites);
}
@Override
public Iterable<IndexedRecord> getRejectedWrites() {
return Collections.unmodifiableCollection(rejectWrites);
}
@Override
public void cleanWrites() {
successWrites.clear();
rejectWrites.clear();
}
public MarkLogicWriter(MarkLogicWriteOperation writeOperation, RuntimeContainer container,
MarkLogicOutputProperties properties) {
this.writeOperation = writeOperation;
this.container = container;
this.properties = properties;
this.autoGenerateId = properties.autoGenerateDocId.getValue();
successWrites = new ArrayList<>();
rejectWrites = new ArrayList<>();
}
}
| |
package com.intellij.refactoring.typeMigration.intentions;
import com.intellij.codeInsight.FileModificationService;
import com.intellij.codeInsight.intention.HighPriorityAction;
import com.intellij.codeInsight.intention.LowPriorityAction;
import com.intellij.codeInsight.intention.PsiElementBaseIntentionAction;
import com.intellij.lang.java.JavaLanguage;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.codeStyle.JavaCodeStyleSettings;
import com.intellij.psi.impl.AllowedApiFilterExtension;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.PsiUtil;
import com.intellij.refactoring.typeMigration.TypeMigrationVariableTypeFixProvider;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.ObjectUtils;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Map;
import java.util.concurrent.atomic.*;
import static com.intellij.util.ObjectUtils.assertNotNull;
/**
* @author anna
* @since 26-Aug-2009
*/
public class ConvertFieldToAtomicIntention extends PsiElementBaseIntentionAction implements LowPriorityAction {
private static final Logger LOG = Logger.getInstance(ConvertFieldToAtomicIntention.class);
private final Map<PsiType, String> myFromToMap = ContainerUtil.newHashMap();
{
myFromToMap.put(PsiType.INT, AtomicInteger.class.getName());
myFromToMap.put(PsiType.LONG, AtomicLong.class.getName());
myFromToMap.put(PsiType.BOOLEAN, AtomicBoolean.class.getName());
myFromToMap.put(PsiType.INT.createArrayType(), AtomicIntegerArray.class.getName());
myFromToMap.put(PsiType.LONG.createArrayType(), AtomicLongArray.class.getName());
}
@NotNull
@Override
public String getText() {
return "Convert to atomic";
}
@NotNull
@Override
public String getFamilyName() {
return getText();
}
@Override
public boolean isAvailable(@NotNull Project project, Editor editor, @NotNull PsiElement element) {
PsiVariable psiVariable = getVariable(element);
if (psiVariable == null || psiVariable instanceof PsiResourceVariable) return false;
if (psiVariable.getLanguage() != JavaLanguage.INSTANCE) return false;
if (psiVariable.getTypeElement() == null) return false;
if (!PsiUtil.isLanguageLevel5OrHigher(psiVariable)) return false;
final PsiType psiType = psiVariable.getType();
final PsiClass psiTypeClass = PsiUtil.resolveClassInType(psiType);
if (psiTypeClass != null) {
final String qualifiedName = psiTypeClass.getQualifiedName();
if (qualifiedName != null) { //is already atomic
if (myFromToMap.values().contains(qualifiedName) ||
qualifiedName.equals(AtomicReference.class.getName()) ||
qualifiedName.equals(AtomicReferenceArray.class.getName())) {
return false;
}
}
}
else if (!myFromToMap.containsKey(psiType)) {
return false;
}
return AllowedApiFilterExtension.isClassAllowed(AtomicReference.class.getName(), element);
}
PsiVariable getVariable(PsiElement element) {
if (element instanceof PsiIdentifier) {
final PsiElement parent = element.getParent();
if (parent instanceof PsiLocalVariable || parent instanceof PsiField) {
return (PsiVariable)parent;
}
}
return null;
}
@Override
public void invoke(@NotNull Project project, Editor editor, @NotNull PsiElement element) throws IncorrectOperationException {
final PsiVariable var = getVariable(element);
LOG.assertTrue(var != null);
final PsiType fromType = var.getType();
PsiClassType toType = getMigrationTargetType(project, element, fromType);
if (toType == null) return;
if (!FileModificationService.getInstance().preparePsiElementsForWrite(var)) return;
addExplicitInitializer(var);
String toTypeCanonicalText = toType.getCanonicalText();
TypeMigrationVariableTypeFixProvider.runTypeMigrationOnVariable(var, toType, editor, false, false);
postProcessVariable(var, toTypeCanonicalText);
}
static void addExplicitInitializer(@NotNull PsiVariable var) {
PsiExpression currentInitializer = var.getInitializer();
if (currentInitializer != null) return;
final PsiType type = var.getType();
String initializerText = null;
if (PsiType.BOOLEAN.equals(type)) {
initializerText = "false";
}
else if (type instanceof PsiPrimitiveType) {
initializerText = "0";
}
if (initializerText != null) {
String finalInitializerText = initializerText;
WriteAction.run(() -> {
PsiExpression initializer = JavaPsiFacade.getElementFactory(var.getProject()).createExpressionFromText(finalInitializerText, var);
if (var instanceof PsiLocalVariable) {
((PsiLocalVariable)var).setInitializer(initializer);
}
else if (var instanceof PsiField) {
((PsiField)var).setInitializer(initializer);
}
});
}
}
static void postProcessVariable(@NotNull PsiVariable var, @NotNull String toType) {
Project project = var.getProject();
if (var instanceof PsiField || CodeStyleSettingsManager.getSettings(project).getCustomSettings(JavaCodeStyleSettings.class).GENERATE_FINAL_LOCALS) {
PsiModifierList modifierList = assertNotNull(var.getModifierList());
WriteAction.run(() -> {
if (var.getInitializer() == null) {
final PsiExpression newInitializer = JavaPsiFacade.getElementFactory(project).createExpressionFromText("new " + toType + "()", var);
if (var instanceof PsiLocalVariable) {
((PsiLocalVariable)var).setInitializer(newInitializer);
}
else if (var instanceof PsiField) {
((PsiField)var).setInitializer(newInitializer);
}
JavaCodeStyleManager.getInstance(var.getProject()).shortenClassReferences(var.getInitializer());
}
modifierList.setModifierProperty(PsiModifier.FINAL, true);
modifierList.setModifierProperty(PsiModifier.VOLATILE, false);
JavaCodeStyleManager.getInstance(project).shortenClassReferences(var);
CodeStyleManager.getInstance(project).reformat(var);
});
}
}
@Nullable
private PsiClassType getMigrationTargetType(@NotNull Project project,
@NotNull PsiElement element,
@NotNull PsiType fromType) {
JavaPsiFacade psiFacade = JavaPsiFacade.getInstance(project);
PsiElementFactory factory = psiFacade.getElementFactory();
final String atomicQualifiedName = myFromToMap.get(fromType);
if (atomicQualifiedName != null) {
final PsiClass atomicClass = psiFacade.findClass(atomicQualifiedName, GlobalSearchScope.allScope(project));
if (atomicClass == null) {//show warning
return null;
}
return factory.createType(atomicClass);
}
else if (fromType instanceof PsiArrayType) {
final PsiClass atomicReferenceArrayClass =
psiFacade.findClass(AtomicReferenceArray.class.getName(), GlobalSearchScope.allScope(project));
if (atomicReferenceArrayClass == null) {//show warning
return null;
}
final Map<PsiTypeParameter, PsiType> substitutor = ContainerUtil.newHashMap();
final PsiTypeParameter[] typeParameters = atomicReferenceArrayClass.getTypeParameters();
if (typeParameters.length == 1) {
PsiType componentType = ((PsiArrayType)fromType).getComponentType();
if (componentType instanceof PsiPrimitiveType) componentType = ((PsiPrimitiveType)componentType).getBoxedType(element);
substitutor.put(typeParameters[0], componentType);
}
return factory.createType(atomicReferenceArrayClass, factory.createSubstitutor(substitutor));
}
else {
final PsiClass atomicReferenceClass = psiFacade.findClass(AtomicReference.class.getName(), GlobalSearchScope.allScope(project));
if (atomicReferenceClass == null) {//show warning
return null;
}
final Map<PsiTypeParameter, PsiType> substitutor = ContainerUtil.newHashMap();
final PsiTypeParameter[] typeParameters = atomicReferenceClass.getTypeParameters();
if (typeParameters.length == 1) {
PsiType type = fromType;
if (type instanceof PsiPrimitiveType) type = ((PsiPrimitiveType)fromType).getBoxedType(element);
substitutor.put(typeParameters[0], type);
}
return factory.createType(atomicReferenceClass, factory.createSubstitutor(substitutor));
}
}
@Override
public boolean startInWriteAction() {
return false;
}
public static class ConvertNonFinalLocalToAtomicFix extends ConvertFieldToAtomicIntention implements HighPriorityAction {
private PsiElement myContext;
public ConvertNonFinalLocalToAtomicFix(PsiElement context) {
myContext = context;
}
@Override
public boolean isAvailable(@NotNull Project project, Editor editor, @NotNull PsiElement element) {
return myContext.isValid();
}
@Override
PsiVariable getVariable(PsiElement element) {
if(myContext instanceof PsiReferenceExpression && myContext.isValid()) {
PsiReferenceExpression ref = (PsiReferenceExpression)myContext;
if(PsiUtil.isAccessedForWriting(ref)) {
return ObjectUtils.tryCast(ref.resolve(), PsiLocalVariable.class);
}
}
return null;
}
}
}
| |
package edu.isi.wubble.util;
import static java.lang.Math.max;
import static java.lang.Math.min;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.image.BufferedImage;
import java.io.File;
import java.util.Collections;
import java.util.Comparator;
import java.util.LinkedList;
import javax.imageio.ImageIO;
import com.jme.math.FastMath;
import com.jme.renderer.ColorRGBA;
public class ColorHSB {
private static final long serialVersionUID = 1L;
public float _hue;
public float _sat;
public float _bri;
public ColorHSB() {
_hue = _sat = _bri = 1.0f;
}
public ColorHSB(ColorRGBA color) {
fromColorRGBA(color);
clamp();
}
public ColorHSB(float hue, float saturation, float brightness) {
_hue = hue;
_sat = saturation;
_bri = brightness;
clamp();
}
public ColorHSB(ColorHSB hsb) {
this._hue = hsb._hue;
this._sat = hsb._sat;
this._bri = hsb._bri;
clamp();
}
public void set(float hue, float saturation, float brightness) {
this._hue = hue;
this._sat = saturation;
this._bri = brightness;
clamp();
}
public ColorHSB set(ColorHSB hsb) {
if (hsb == null) {
_hue = 0;
_sat = 0;
_bri = 0;
} else {
_hue = hsb._hue;
_sat = hsb._sat;
_bri = hsb._bri;
}
return this;
}
/**
* <code>clamp</code> insures that all values are between 0 and 1. If any
* are less than 0 they are set to zero. If any are more than 1 they are
* set to one.
*
*/
public void clamp() {
if (_hue < 0) _hue += 360;
if (_hue > 360) _hue -= 360;
_sat = FastMath.clamp(_sat, 0f, 1f);
_bri = FastMath.clamp(_bri, 0f, 1f);
}
/**
* <code>toString</code> returns the string representation of this color.
* The format of the string is:<br>
* @return the string representation of this color.
*/
public String toString() {
return "edu.isi.wubble.util: [H="+_hue+", S="+_sat+", B="+_bri+"]";
}
public ColorHSB clone() {
return new ColorHSB(_hue,_sat,_bri);
}
/**
* <code>equals</code> returns true if this color is logically equivalent
* to a given color. That is, if the values of the two colors are the same.
* False is returned otherwise.
* @param o the object to compare againts.
* @return true if the colors are equal, false otherwise.
*/
public boolean equals(Object o) {
if( !(o instanceof ColorHSB) ) {
return false;
}
if(this == o) {
return true;
}
ColorHSB comp = (ColorHSB)o;
if (Float.compare(_hue, comp._hue) != 0) return false;
if (Float.compare(_sat, comp._sat) != 0) return false;
if (Float.compare(_bri, comp._bri) != 0) return false;
return true;
}
public Class getClassTag() {
return this.getClass();
}
public void fromColorRGBA(ColorRGBA color) {
float max = max(color.r, max(color.g, color.b));
float min = min(color.r, min(color.g, color.b));
float delta = max - min;
_bri = max;
if (Float.compare(max,0f) != 0) {
_sat = delta / max;
}
if (Float.compare(_sat, 0) != 0) {
float addAngle = 0;
if (Float.compare(max, color.r) == 0 && color.g >= color.b) {
_hue = (color.g - color.b) / delta;
} else if (Float.compare(max, color.r) == 0 && color.g < color.b) {
_hue = (color.g - color.b) / delta;
addAngle = 360;
} else if (Float.compare(max, color.g) == 0) {
_hue = (color.b - color.r) / delta;
addAngle = 120;
} else if (Float.compare(max, color.b) == 0) {
_hue = (color.r - color.g) / delta;
addAngle = 240;
}
_hue = (60f * _hue) + addAngle;
} else {
_hue = 0;
}
}
public ColorRGBA toRGBA() {
ColorRGBA color = new ColorRGBA();
int h = ((int) Math.floor(_hue / 60.0f)) % 6;
float f = (_hue / 60.0f) - (float) h;
float p = _bri * (1f - _sat);
float q = _bri * (1f - (f*_sat));
float t = _bri * (1f - (1f - f)*_sat);
if (h == 0)
return new ColorRGBA(_bri, t, p, 0);
if (h == 1)
return new ColorRGBA(q, _bri, p, 0);
if (h == 2)
return new ColorRGBA(p, _bri, t, 0);
if (h == 3)
return new ColorRGBA(p, q, _bri, 0);
if (h == 4)
return new ColorRGBA(t, p, _bri, 0);
if (h == 5)
return new ColorRGBA(_bri, p, q, 0);
return color;
}
public static void main(String[] args) {
Comparator<ColorHSB> hue = new Comparator<ColorHSB>() {
public int compare(ColorHSB arg0, ColorHSB arg1) {
if (arg0._hue < arg1._hue)
return -1;
else if (arg0._hue > arg1._hue)
return 1;
return 0;
}
};
Comparator<ColorHSB> bri = new Comparator<ColorHSB>() {
public int compare(ColorHSB o1, ColorHSB o2) {
if (o1._bri < o2._bri)
return -1;
else if (o1._bri > o2._bri)
return 1;
if (o1._hue < o2._hue)
return -1;
else if (o1._hue > o2._hue)
return 1;
if (o1._sat < o2._sat)
return -1;
else if (o1._sat > o2._sat)
return 1;
return 0;
}
};
LinkedList<ColorHSB> hueColors = new LinkedList<ColorHSB>();
LinkedList<ColorHSB> briColors = new LinkedList<ColorHSB>();
for (float r = 0; r < 1.01f; r += 0.25f) {
for (float g = 0; g < 1.01f; g += 0.25f) {
for (float b = 0; b < 1.01f; b += 0.25f) {
ColorRGBA blah = new ColorRGBA(r,g,b,0);
ColorHSB hsb = new ColorHSB(blah);
hueColors.add(hsb);
briColors.add(hsb);
}
}
}
BufferedImage bi = new BufferedImage(50, 125*20, BufferedImage.TYPE_INT_RGB);
Graphics graphics = bi.getGraphics();
Collections.sort(hueColors, hue);
int count = 0;
for (ColorHSB color : hueColors) {
ColorRGBA blah = color.toRGBA();
System.out.println("color: " + color);
graphics.setColor(new Color(blah.r, blah.g, blah.b));
graphics.fillRect(0, count*20, 50, 20);
++count;
}
try {
ImageIO.write(bi, "png", new File("hue.png"));
} catch (Exception e) {
e.printStackTrace();
}
bi = new BufferedImage(50, 125*20, BufferedImage.TYPE_INT_RGB);
graphics = bi.getGraphics();
Collections.sort(briColors, bri);
count = 0;
for (ColorHSB color : briColors) {
ColorRGBA blah = color.toRGBA();
System.out.println("color: " + color);
graphics.setColor(new Color(blah.r, blah.g, blah.b));
graphics.fillRect(0, count*20, 50, 20);
++count;
}
try {
ImageIO.write(bi, "png", new File("bri.png"));
} catch (Exception e) {
e.printStackTrace();
}
}
}
| |
/**
* Copyright (c) 2013-2020 Nikita Koksharov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.redisson;
import io.netty.buffer.ByteBuf;
import org.redisson.api.*;
import org.redisson.api.mapreduce.RCollectionMapReduce;
import org.redisson.client.codec.Codec;
import org.redisson.client.codec.StringCodec;
import org.redisson.client.protocol.RedisCommands;
import org.redisson.command.CommandExecutor;
import org.redisson.mapreduce.RedissonCollectionMapReduce;
import org.redisson.misc.RPromise;
import org.redisson.misc.RedissonPromise;
import java.io.ByteArrayOutputStream;
import java.io.ObjectOutputStream;
import java.math.BigInteger;
import java.security.MessageDigest;
import java.util.*;
/**
*
* @author Nikita Koksharov
*
* @param <V> value type
*/
public class RedissonSortedSet<V> extends RedissonObject implements RSortedSet<V> {
public static class BinarySearchResult<V> {
private V value;
private Integer index;
public BinarySearchResult(V value) {
super();
this.value = value;
}
public BinarySearchResult() {
}
public void setIndex(Integer index) {
this.index = index;
}
public Integer getIndex() {
return index;
}
public V getValue() {
return value;
}
}
private Comparator comparator = Comparator.naturalOrder();
CommandExecutor commandExecutor;
private RLock lock;
private RedissonList<V> list;
private RBucket<String> comparatorHolder;
private RedissonClient redisson;
protected RedissonSortedSet(CommandExecutor commandExecutor, String name, RedissonClient redisson) {
super(commandExecutor, name);
this.commandExecutor = commandExecutor;
this.redisson = redisson;
comparatorHolder = redisson.getBucket(getComparatorKeyName(), StringCodec.INSTANCE);
lock = redisson.getLock("redisson_sortedset_lock:{" + getName() + "}");
list = (RedissonList<V>) redisson.<V>getList(getName());
}
public RedissonSortedSet(Codec codec, CommandExecutor commandExecutor, String name, Redisson redisson) {
super(codec, commandExecutor, name);
this.commandExecutor = commandExecutor;
comparatorHolder = redisson.getBucket(getComparatorKeyName(), StringCodec.INSTANCE);
lock = redisson.getLock("redisson_sortedset_lock:{" + getName() + "}");
list = (RedissonList<V>) redisson.<V>getList(getName(), codec);
}
@Override
public <KOut, VOut> RCollectionMapReduce<V, KOut, VOut> mapReduce() {
return new RedissonCollectionMapReduce<V, KOut, VOut>(this, redisson, commandExecutor.getConnectionManager());
}
private void loadComparator() {
try {
String comparatorSign = comparatorHolder.get();
if (comparatorSign != null) {
String[] parts = comparatorSign.split(":");
String className = parts[0];
String sign = parts[1];
String result = calcClassSign(className);
if (!result.equals(sign)) {
throw new IllegalStateException("Local class signature of " + className + " differs from used by this SortedSet!");
}
Class<?> clazz = Class.forName(className);
comparator = (Comparator<V>) clazz.newInstance();
}
} catch (IllegalStateException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
// TODO cache result
private static String calcClassSign(String name) {
try {
Class<?> clazz = Class.forName(name);
ByteArrayOutputStream result = new ByteArrayOutputStream();
ObjectOutputStream outputStream = new ObjectOutputStream(result);
outputStream.writeObject(clazz);
outputStream.close();
MessageDigest crypt = MessageDigest.getInstance("SHA-1");
crypt.reset();
crypt.update(result.toByteArray());
return new BigInteger(1, crypt.digest()).toString(16);
} catch (Exception e) {
throw new IllegalStateException("Can't calculate sign of " + name, e);
}
}
@Override
public Collection<V> readAll() {
return get(readAllAsync());
}
@Override
public RFuture<Collection<V>> readAllAsync() {
return commandExecutor.readAsync(getName(), codec, RedisCommands.LRANGE_SET, getName(), 0, -1);
}
@Override
public int size() {
return list.size();
}
@Override
public boolean isEmpty() {
return list.isEmpty();
}
@Override
public boolean contains(final Object o) {
return binarySearch((V) o, codec).getIndex() >= 0;
}
@Override
public Iterator<V> iterator() {
return list.iterator();
}
@Override
public Object[] toArray() {
return list.toArray();
}
@Override
public <T> T[] toArray(T[] a) {
return list.toArray(a);
}
@Override
public boolean add(V value) {
lock.lock();
try {
checkComparator();
BinarySearchResult<V> res = binarySearch(value, codec);
if (res.getIndex() < 0) {
int index = -(res.getIndex() + 1);
ByteBuf encodedValue = encode(value);
commandExecutor.evalWrite(getName(), RedisCommands.EVAL_VOID,
"local len = redis.call('llen', KEYS[1]);"
+ "if tonumber(ARGV[1]) < len then "
+ "local pivot = redis.call('lindex', KEYS[1], ARGV[1]);"
+ "redis.call('linsert', KEYS[1], 'before', pivot, ARGV[2]);"
+ "return;"
+ "end;"
+ "redis.call('rpush', KEYS[1], ARGV[2]);", Arrays.<Object>asList(getName()), index, encodedValue);
return true;
} else {
return false;
}
} finally {
lock.unlock();
}
}
private void checkComparator() {
String comparatorSign = comparatorHolder.get();
if (comparatorSign != null) {
String[] vals = comparatorSign.split(":");
String className = vals[0];
if (!comparator.getClass().getName().equals(className)) {
loadComparator();
}
}
}
public RFuture<Boolean> addAsync(final V value) {
final RPromise<Boolean> promise = new RedissonPromise<Boolean>();
commandExecutor.getConnectionManager().getExecutor().execute(new Runnable() {
public void run() {
try {
boolean res = add(value);
promise.trySuccess(res);
} catch (Exception e) {
promise.tryFailure(e);
}
}
});
return promise;
}
@Override
public RFuture<Boolean> removeAsync(final Object value) {
final RPromise<Boolean> promise = new RedissonPromise<Boolean>();
commandExecutor.getConnectionManager().getExecutor().execute(new Runnable() {
@Override
public void run() {
try {
boolean result = remove(value);
promise.trySuccess(result);
} catch (Exception e) {
promise.tryFailure(e);
}
}
});
return promise;
}
@Override
public boolean remove(Object value) {
lock.lock();
try {
checkComparator();
BinarySearchResult<V> res = binarySearch((V) value, codec);
if (res.getIndex() < 0) {
return false;
}
list.remove((int) res.getIndex());
return true;
} finally {
lock.unlock();
}
}
@Override
public boolean containsAll(Collection<?> c) {
for (Object object : c) {
if (!contains(object)) {
return false;
}
}
return true;
}
@Override
public boolean addAll(Collection<? extends V> c) {
boolean changed = false;
for (V v : c) {
if (add(v)) {
changed = true;
}
}
return changed;
}
@Override
public boolean retainAll(Collection<?> c) {
boolean changed = false;
for (Iterator<?> iterator = iterator(); iterator.hasNext();) {
Object object = (Object) iterator.next();
if (!c.contains(object)) {
iterator.remove();
changed = true;
}
}
return changed;
}
@Override
public boolean removeAll(Collection<?> c) {
boolean changed = false;
for (Object obj : c) {
if (remove(obj)) {
changed = true;
}
}
return changed;
}
@Override
public void clear() {
delete();
}
@Override
public Comparator<? super V> comparator() {
return comparator;
}
@Override
public SortedSet<V> subSet(V fromElement, V toElement) {
throw new UnsupportedOperationException();
// return new RedissonSubSortedSet<V>(this, connectionManager, fromElement, toElement);
}
@Override
public SortedSet<V> headSet(V toElement) {
return subSet(null, toElement);
}
@Override
public SortedSet<V> tailSet(V fromElement) {
return subSet(fromElement, null);
}
@Override
public V first() {
V res = list.getValue(0);
if (res == null) {
throw new NoSuchElementException();
}
return res;
}
@Override
public V last() {
V res = list.getValue(-1);
if (res == null) {
throw new NoSuchElementException();
}
return res;
}
private String getComparatorKeyName() {
return "redisson_sortedset_comparator:{" + getName() + "}";
}
@Override
public boolean trySetComparator(Comparator<? super V> comparator) {
String className = comparator.getClass().getName();
final String comparatorSign = className + ":" + calcClassSign(className);
Boolean res = commandExecutor.evalWrite(getName(), StringCodec.INSTANCE, RedisCommands.EVAL_BOOLEAN,
"if redis.call('llen', KEYS[1]) == 0 then "
+ "redis.call('set', KEYS[2], ARGV[1]); "
+ "return 1; "
+ "else "
+ "return 0; "
+ "end",
Arrays.<Object>asList(getName(), getComparatorKeyName()), comparatorSign);
if (res) {
this.comparator = comparator;
}
return res;
}
// TODO optimize: get three values each time instead of single
public BinarySearchResult<V> binarySearch(V value, Codec codec) {
int size = list.size();
int upperIndex = size - 1;
int lowerIndex = 0;
while (lowerIndex <= upperIndex) {
int index = lowerIndex + (upperIndex - lowerIndex) / 2;
V res = list.getValue(index);
if (res == null) {
return new BinarySearchResult<V>();
}
int cmp = comparator.compare(value, res);
if (cmp == 0) {
BinarySearchResult<V> indexRes = new BinarySearchResult<V>();
indexRes.setIndex(index);
return indexRes;
} else if (cmp < 0) {
upperIndex = index - 1;
} else {
lowerIndex = index + 1;
}
}
BinarySearchResult<V> indexRes = new BinarySearchResult<V>();
indexRes.setIndex(-(lowerIndex + 1));
return indexRes;
}
@SuppressWarnings("AvoidInlineConditionals")
public String toString() {
Iterator<V> it = iterator();
if (! it.hasNext())
return "[]";
StringBuilder sb = new StringBuilder();
sb.append('[');
for (;;) {
V e = it.next();
sb.append(e == this ? "(this Collection)" : e);
if (! it.hasNext())
return sb.append(']').toString();
sb.append(',').append(' ');
}
}
}
| |
/*
* Copyright 2004 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.common.base.Strings.nullToEmpty;
import com.google.common.base.Preconditions;
import com.google.javascript.rhino.InputId;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Set;
/**
* NodeTraversal allows an iteration through the nodes in the parse tree,
* and facilitates the optimizations on the parse tree.
*
*/
public class NodeTraversal {
private final AbstractCompiler compiler;
private final Callback callback;
/** Contains the current node*/
private Node curNode;
public static final DiagnosticType NODE_TRAVERSAL_ERROR =
DiagnosticType.error("JSC_NODE_TRAVERSAL_ERROR", "{0}");
/**
* Stack containing the Scopes that have been created. The Scope objects
* are lazily created; so the {@code scopeRoots} stack contains the
* Nodes for all Scopes that have not been created yet.
*/
private final Deque<Scope> scopes = new ArrayDeque<>();
/**
* A stack of scope roots. All scopes that have not been created
* are represented in this Deque.
*/
private final Deque<Node> scopeRoots = new ArrayDeque<>();
/**
* A stack of scope roots that are valid cfg roots. All cfg roots that have not been created
* are represented in this Deque.
*/
private final Deque<Node> cfgRoots = new ArrayDeque<>();
/**
* Stack of control flow graphs (CFG). There is one CFG per scope. CFGs
* are lazily populated: elements are {@code null} until requested by
* {@link #getControlFlowGraph()}. Note that {@link ArrayDeque} does not allow
* {@code null} elements, so {@link LinkedList} is used instead.
*/
Deque<ControlFlowGraph<Node>> cfgs = new LinkedList<>();
/** The current source file name */
private String sourceName;
/** The current input */
private InputId inputId;
/** The scope creator */
private final ScopeCreator scopeCreator;
private final boolean useBlockScope;
/** Possible callback for scope entry and exist **/
private ScopedCallback scopeCallback;
/** Callback for passes that iterate over a list of functions */
public interface FunctionCallback {
void enterFunction(AbstractCompiler compiler, Node fnRoot);
}
/**
* Callback for tree-based traversals
*/
public interface Callback {
/**
* <p>Visits a node in pre order (before visiting its children) and decides
* whether this node's children should be traversed. If children are
* traversed, they will be visited by
* {@link #visit(NodeTraversal, Node, Node)} in postorder.</p>
* <p>Implementations can have side effects (e.g. modifying the parse
* tree).</p>
* @return whether the children of this node should be visited
*/
boolean shouldTraverse(NodeTraversal nodeTraversal, Node n, Node parent);
/**
* <p>Visits a node in postorder (after its children have been visited).
* A node is visited only if all its parents should be traversed
* ({@link #shouldTraverse(NodeTraversal, Node, Node)}).</p>
* <p>Implementations can have side effects (e.g. modifying the parse
* tree).</p>
*/
void visit(NodeTraversal t, Node n, Node parent);
}
/**
* Callback that also knows about scope changes
*/
public interface ScopedCallback extends Callback {
/**
* Called immediately after entering a new scope. The new scope can
* be accessed through t.getScope()
*/
void enterScope(NodeTraversal t);
/**
* Called immediately before exiting a scope. The ending scope can
* be accessed through t.getScope()
*/
void exitScope(NodeTraversal t);
}
/**
* Abstract callback to visit all nodes in postorder.
*/
public abstract static class AbstractPostOrderCallback implements Callback {
@Override
public final boolean shouldTraverse(NodeTraversal nodeTraversal, Node n,
Node parent) {
return true;
}
}
/** Abstract callback to visit all nodes in preorder. */
public abstract static class AbstractPreOrderCallback implements Callback {
@Override
public void visit(NodeTraversal t, Node n, Node parent) {}
}
/**
* Abstract scoped callback to visit all nodes in postorder.
*/
public abstract static class AbstractScopedCallback
implements ScopedCallback {
@Override
public final boolean shouldTraverse(NodeTraversal nodeTraversal, Node n,
Node parent) {
return true;
}
@Override
public void enterScope(NodeTraversal t) {}
@Override
public void exitScope(NodeTraversal t) {}
}
/**
* Abstract callback to visit all nodes but not traverse into function
* bodies.
*/
public abstract static class AbstractShallowCallback implements Callback {
@Override
public final boolean shouldTraverse(NodeTraversal nodeTraversal, Node n,
Node parent) {
// We do want to traverse the name of a named function, but we don't
// want to traverse the arguments or body.
return parent == null || !parent.isFunction() ||
n == parent.getFirstChild();
}
}
/**
* Abstract callback to visit all structure and statement nodes but doesn't
* traverse into functions or expressions.
*/
public abstract static class AbstractShallowStatementCallback
implements Callback {
@Override
public final boolean shouldTraverse(NodeTraversal nodeTraversal, Node n,
Node parent) {
return parent == null || NodeUtil.isControlStructure(parent)
|| NodeUtil.isStatementBlock(parent);
}
}
/**
* Abstract callback to visit a pruned set of nodes.
*/
public abstract static class AbstractNodeTypePruningCallback
implements Callback {
private final Set<Integer> nodeTypes;
private final boolean include;
/**
* Creates an abstract pruned callback.
* @param nodeTypes the nodes to include in the traversal
*/
public AbstractNodeTypePruningCallback(Set<Integer> nodeTypes) {
this(nodeTypes, true);
}
/**
* Creates an abstract pruned callback.
* @param nodeTypes the nodes to include/exclude in the traversal
* @param include whether to include or exclude the nodes in the traversal
*/
public AbstractNodeTypePruningCallback(Set<Integer> nodeTypes,
boolean include) {
this.nodeTypes = nodeTypes;
this.include = include;
}
@Override
public boolean shouldTraverse(NodeTraversal nodeTraversal, Node n,
Node parent) {
return include == nodeTypes.contains(n.getType());
}
}
/**
* Creates a node traversal using the specified callback interface.
*/
public NodeTraversal(AbstractCompiler compiler, Callback cb) {
this(compiler, cb, compiler.getLanguageMode().isEs6OrHigher()
? new Es6SyntacticScopeCreator(compiler)
: SyntacticScopeCreator.makeUntyped(compiler));
}
/**
* Creates a node traversal using the specified callback interface
* and the scope creator.
*/
public NodeTraversal(AbstractCompiler compiler, Callback cb,
ScopeCreator scopeCreator) {
this.callback = cb;
if (cb instanceof ScopedCallback) {
this.scopeCallback = (ScopedCallback) cb;
}
this.compiler = compiler;
this.inputId = null;
this.sourceName = "";
this.scopeCreator = scopeCreator;
this.useBlockScope = scopeCreator.hasBlockScope();
}
private void throwUnexpectedException(Exception unexpectedException) {
// If there's an unexpected exception, try to get the
// line number of the code that caused it.
String message = unexpectedException.getMessage();
// TODO(user): It is possible to get more information if curNode or
// its parent is missing. We still have the scope stack in which it is still
// very useful to find out at least which function caused the exception.
if (inputId != null) {
message =
unexpectedException.getMessage() + "\n" +
formatNodeContext("Node", curNode) +
(curNode == null ?
"" :
formatNodeContext("Parent", curNode.getParent()));
}
compiler.throwInternalError(message, unexpectedException);
}
private String formatNodeContext(String label, Node n) {
if (n == null) {
return " " + label + ": NULL";
}
return " " + label + "(" + n.toString(false, false, false) + "): "
+ formatNodePosition(n);
}
/**
* Traverses a parse tree recursively.
*/
public void traverse(Node root) {
try {
inputId = NodeUtil.getInputId(root);
sourceName = "";
curNode = root;
pushScope(root);
// null parent ensures that the shallow callbacks will traverse root
traverseBranch(root, null);
popScope();
} catch (Exception unexpectedException) {
throwUnexpectedException(unexpectedException);
}
}
void traverseRoots(Node externs, Node root) {
try {
Node scopeRoot = externs.getParent();
Preconditions.checkState(scopeRoot != null);
inputId = NodeUtil.getInputId(scopeRoot);
sourceName = "";
curNode = scopeRoot;
pushScope(scopeRoot);
traverseBranch(externs, scopeRoot);
Preconditions.checkState(root.getParent() == scopeRoot);
traverseBranch(root, scopeRoot);
popScope();
} catch (Exception unexpectedException) {
throwUnexpectedException(unexpectedException);
}
}
private static final String MISSING_SOURCE = "[source unknown]";
private String formatNodePosition(Node n) {
String sourceFileName = getBestSourceFileName(n);
if (sourceFileName == null) {
return MISSING_SOURCE + "\n";
}
int lineNumber = n.getLineno();
int columnNumber = n.getCharno();
String src = compiler.getSourceLine(sourceFileName, lineNumber);
if (src == null) {
src = MISSING_SOURCE;
}
return sourceFileName + ":" + lineNumber + ":" + columnNumber + "\n"
+ src + "\n";
}
/**
* Traverses a parse tree recursively with a scope, starting with the given
* root. This should only be used in the global scope. Otherwise, use
* {@link #traverseAtScope}.
*/
void traverseWithScope(Node root, Scope s) {
Preconditions.checkState(s.isGlobal());
try {
inputId = null;
sourceName = "";
curNode = root;
pushScope(s);
traverseBranch(root, null);
popScope();
} catch (Exception unexpectedException) {
throwUnexpectedException(unexpectedException);
}
}
/**
* Traverses a parse tree recursively with a scope, starting at that scope's
* root.
*/
void traverseAtScope(Scope s) {
Node n = s.getRootNode();
if (n.isFunction()) {
// We need to do some extra magic to make sure that the scope doesn't
// get re-created when we dive into the function.
if (inputId == null) {
inputId = NodeUtil.getInputId(n);
}
sourceName = getSourceName(n);
curNode = n;
pushScope(s);
Node args = n.getSecondChild();
Node body = args.getNext();
traverseBranch(args, n);
traverseBranch(body, n);
popScope();
} else if (n.isBlock()) {
if (inputId == null) {
inputId = NodeUtil.getInputId(n);
}
sourceName = getSourceName(n);
curNode = n;
pushScope(s);
// traverseBranch is not called here to avoid re-creating the block scope.
for (Node child = n.getFirstChild(); child != null; ) {
Node next = child.getNext();
traverseBranch(child, n);
child = next;
}
popScope();
} else {
Preconditions.checkState(s.isGlobal(), "Expected global scope. Got:", s);
traverseWithScope(n, s);
}
}
/**
* Traverse a function out-of-band of normal traversal.
*
* @param node The function node.
* @param scope The scope the function is contained in. Does not fire enter/exit
* callback events for this scope.
*/
public void traverseFunctionOutOfBand(Node node, Scope scope) {
Preconditions.checkNotNull(scope);
Preconditions.checkState(node.isFunction());
Preconditions.checkState(scope.getRootNode() != null);
if (inputId == null) {
inputId = NodeUtil.getInputId(node);
}
curNode = node.getParent();
pushScope(scope, true /* quietly */);
traverseBranch(node, curNode);
popScope(true /* quietly */);
}
/**
* Traverses an inner node recursively with a refined scope. An inner node may
* be any node with a non {@code null} parent (i.e. all nodes except the
* root).
*
* @param node the node to traverse
* @param parent the node's parent, it may not be {@code null}
* @param refinedScope the refined scope of the scope currently at the top of
* the scope stack or in trivial cases that very scope or {@code null}
*/
void traverseInnerNode(Node node, Node parent, Scope refinedScope) {
Preconditions.checkNotNull(parent);
if (inputId == null) {
inputId = NodeUtil.getInputId(node);
}
if (refinedScope != null && getScope() != refinedScope) {
curNode = node;
pushScope(refinedScope);
traverseBranch(node, parent);
popScope();
} else {
traverseBranch(node, parent);
}
}
public AbstractCompiler getCompiler() {
return compiler;
}
/**
* Gets the current line number, or zero if it cannot be determined. The line
* number is retrieved lazily as a running time optimization.
*/
public int getLineNumber() {
Node cur = curNode;
while (cur != null) {
int line = cur.getLineno();
if (line >= 0) {
return line;
}
cur = cur.getParent();
}
return 0;
}
/**
* Gets the current char number, or zero if it cannot be determined. The line
* number is retrieved lazily as a running time optimization.
*/
public int getCharno() {
Node cur = curNode;
while (cur != null) {
int line = cur.getCharno();
if (line >= 0) {
return line;
}
cur = cur.getParent();
}
return 0;
}
/**
* Gets the current input source name.
*
* @return A string that may be empty, but not null
*/
public String getSourceName() {
return sourceName;
}
/**
* Gets the current input source.
*/
public CompilerInput getInput() {
return compiler.getInput(inputId);
}
/**
* Gets the current input module.
*/
public JSModule getModule() {
CompilerInput input = getInput();
return input == null ? null : input.getModule();
}
/** Returns the node currently being traversed. */
public Node getCurrentNode() {
return curNode;
}
/**
* Traversal for passes that work only on changed functions.
* Suppose a loopable pass P1 uses this traversal.
* Then, if a function doesn't change between two runs of P1, it won't look at
* the function the second time.
* (We're assuming that P1 runs to a fixpoint, o/w we may miss optimizations.)
*
* <p>Most changes are reported with calls to Compiler.reportCodeChange(), which
* doesn't know which scope changed. We keep track of the current scope by
* calling Compiler.setScope inside pushScope and popScope.
* The automatic tracking can be wrong in rare cases when a pass changes scope
* w/out causing a call to pushScope or popScope. It's very hard to find the
* places where this happens unless a bug is triggered.
* Passes that do cross-scope modifications call
* Compiler.reportChangeToEnclosingScope(Node n).
*/
public static void traverseChangedFunctions(
AbstractCompiler compiler, FunctionCallback callback) {
final AbstractCompiler comp = compiler;
final FunctionCallback cb = callback;
final Node jsRoot = comp.getJsRoot();
NodeTraversal.traverseEs6(comp, jsRoot,
new AbstractPreOrderCallback() {
@Override
public final boolean shouldTraverse(NodeTraversal t, Node n, Node p) {
if ((n == jsRoot || n.isFunction()) && comp.hasScopeChanged(n)) {
cb.enterFunction(comp, n);
}
return true;
}
});
}
/**
* Traverses a node recursively.
* @deprecated Use traverseEs6 whenever possible.
*/
@Deprecated
public static void traverse(AbstractCompiler compiler, Node root, Callback cb) {
NodeTraversal t = new NodeTraversal(compiler, cb);
t.traverse(root);
}
/**
* Traverses using the ES6SyntacticScopeCreator
*/
// TODO (stephshi): rename to "traverse" when the old traverse method is no longer used
public static void traverseEs6(AbstractCompiler compiler, Node root, Callback cb) {
NodeTraversal t = new NodeTraversal(compiler, cb, new Es6SyntacticScopeCreator(compiler));
t.traverse(root);
}
public static void traverseTyped(AbstractCompiler compiler, Node root, Callback cb) {
NodeTraversal t = new NodeTraversal(compiler, cb, SyntacticScopeCreator.makeTyped(compiler));
t.traverse(root);
}
@Deprecated
public static void traverseRoots(
AbstractCompiler compiler, Callback cb, Node externs, Node root) {
NodeTraversal t = new NodeTraversal(compiler, cb);
t.traverseRoots(externs, root);
}
public static void traverseRootsEs6(
AbstractCompiler compiler, Callback cb, Node externs, Node root) {
NodeTraversal t = new NodeTraversal(compiler, cb, new Es6SyntacticScopeCreator(compiler));
t.traverseRoots(externs, root);
}
public static void traverseRootsTyped(
AbstractCompiler compiler, Callback cb, Node externs, Node root) {
NodeTraversal t = new NodeTraversal(compiler, cb, SyntacticScopeCreator.makeTyped(compiler));
t.traverseRoots(externs, root);
}
/**
* Traverses a branch.
*/
private void traverseBranch(Node n, Node parent) {
int type = n.getType();
if (type == Token.SCRIPT) {
inputId = n.getInputId();
sourceName = getSourceName(n);
}
curNode = n;
if (!callback.shouldTraverse(this, n, parent)) {
return;
}
if (type == Token.FUNCTION) {
traverseFunction(n, parent);
} else if (type == Token.CLASS) {
traverseClass(n, parent);
} else if (useBlockScope && NodeUtil.createsBlockScope(n)) {
traverseBlockScope(n);
} else {
for (Node child = n.getFirstChild(); child != null; ) {
// child could be replaced, in which case our child node
// would no longer point to the true next
Node next = child.getNext();
traverseBranch(child, n);
child = next;
}
}
curNode = n;
callback.visit(this, n, parent);
}
/** Traverses a function. */
private void traverseFunction(Node n, Node parent) {
Preconditions.checkState(n.getChildCount() == 3, n);
Preconditions.checkState(n.isFunction());
final Node fnName = n.getFirstChild();
boolean isFunctionExpression = (parent != null)
&& NodeUtil.isFunctionExpression(n);
if (!isFunctionExpression) {
// Function declarations are in the scope containing the declaration.
traverseBranch(fnName, n);
}
curNode = n;
pushScope(n);
if (isFunctionExpression) {
// Function expression names are only accessible within the function
// scope.
traverseBranch(fnName, n);
}
final Node args = fnName.getNext();
final Node body = args.getNext();
// Args
traverseBranch(args, n);
// Body
// ES6 "arrow" function may not have a block as a body.
traverseBranch(body, n);
popScope();
}
/** Traverses a class. */
private void traverseClass(Node n, Node parent) {
Preconditions.checkState(n.getChildCount() == 3, n);
Preconditions.checkState(n.isClass());
final Node className = n.getFirstChild();
boolean isClassExpression = NodeUtil.isClassExpression(n);
if (!isClassExpression) {
// Class declarations are in the scope containing the declaration.
traverseBranch(className, n);
}
curNode = n;
pushScope(n);
if (isClassExpression) {
// Class expression names are only accessible within the function
// scope.
traverseBranch(className, n);
}
final Node extendsClause = className.getNext();
final Node body = extendsClause.getNext();
// Extends
traverseBranch(extendsClause, n);
// Body
traverseBranch(body, n);
popScope();
}
/** Traverses a non-function block. */
private void traverseBlockScope(Node n) {
pushScope(n);
for (Node child : n.children()) {
traverseBranch(child, n);
}
popScope();
}
/** Examines the functions stack for the last instance of a function node. When possible, prefer
* this method over NodeUtil.getEnclosingFunction() because this in general looks at less nodes.
*/
public Node getEnclosingFunction() {
Node root = getCfgRoot();
return root.isFunction() ? root : null;
}
/** Creates a new scope (e.g. when entering a function). */
private void pushScope(Node node) {
Preconditions.checkState(curNode != null);
Preconditions.checkState(node != null);
compiler.setScope(node);
scopeRoots.push(node);
if (NodeUtil.isValidCfgRoot(node)) {
cfgRoots.push(node);
cfgs.push(null);
}
if (scopeCallback != null) {
scopeCallback.enterScope(this);
}
}
/** Creates a new scope (e.g. when entering a function). */
private void pushScope(Scope s) {
pushScope(s, false);
}
/**
* Creates a new scope (e.g. when entering a function).
* @param quietly Don't fire an enterScope callback.
*/
private void pushScope(Scope s, boolean quietly) {
Preconditions.checkState(curNode != null);
compiler.setScope(s.getRootNode());
scopes.push(s);
if (NodeUtil.isValidCfgRoot(s.getRootNode())) {
cfgRoots.push(s.getRootNode());
cfgs.push(null);
}
if (!quietly && scopeCallback != null) {
scopeCallback.enterScope(this);
}
}
private void popScope() {
popScope(false);
}
/**
* Pops back to the previous scope (e.g. when leaving a function).
* @param quietly Don't fire the exitScope callback.
*/
private void popScope(boolean quietly) {
if (!quietly && scopeCallback != null) {
scopeCallback.exitScope(this);
}
Node scopeRoot;
if (scopeRoots.isEmpty()) {
scopeRoot = scopes.pop().getRootNode();
} else {
scopeRoot = scopeRoots.pop();
}
if (NodeUtil.isValidCfgRoot(scopeRoot)) {
Preconditions.checkState(!cfgRoots.isEmpty());
Preconditions.checkState(cfgRoots.pop() == scopeRoot);
cfgs.pop();
}
if (hasScope()) {
compiler.setScope(getScopeRoot());
}
}
/** Gets the current scope. */
public Scope getScope() {
Scope scope = scopes.isEmpty() ? null : scopes.peek();
if (scopeRoots.isEmpty()) {
return scope;
}
Iterator<Node> it = scopeRoots.descendingIterator();
while (it.hasNext()) {
scope = scopeCreator.createScope(it.next(), scope);
scopes.push(scope);
}
scopeRoots.clear();
// No need to call compiler.setScope; the top scopeRoot is now the top scope
return scope;
}
public Scope getClosestHoistScope() {
// TODO(moz): This should not call getScope(). We should find the root of the closest hoist
// scope and effectively getScope() from there, which avoids scanning inner scopes that might
// not be needed.
return getScope().getClosestHoistScope();
}
public TypedScope getTypedScope() {
Scope s = getScope();
Preconditions.checkState(s instanceof TypedScope,
"getTypedScope called for untyped traversal");
return (TypedScope) s;
}
/** Gets the control flow graph for the current JS scope. */
public ControlFlowGraph<Node> getControlFlowGraph() {
if (cfgs.peek() == null) {
ControlFlowAnalysis cfa = new ControlFlowAnalysis(compiler, false, true);
cfa.process(null, getCfgRoot());
cfgs.pop();
cfgs.push(cfa.getCfg());
}
return cfgs.peek();
}
/** Returns the current scope's root. */
public Node getScopeRoot() {
if (scopeRoots.isEmpty()) {
return scopes.peek().getRootNode();
} else {
return scopeRoots.peek();
}
}
private Node getCfgRoot() {
return cfgRoots.peek();
}
/**
* Determines whether the traversal is currently in the global scope. Note that this returns false
* in a global block scope.
*/
public boolean inGlobalScope() {
return getScopeDepth() == 0;
}
/**
* Determines whether the hoist scope of the current traversal is global.
*/
public boolean inGlobalHoistScope() {
return !getCfgRoot().isFunction();
}
int getScopeDepth() {
int sum = scopes.size() + scopeRoots.size();
Preconditions.checkState(sum > 0);
return sum - 1; // Use 0-based scope depth to be consistent within the compiler
}
public boolean hasScope() {
return !(scopes.isEmpty() && scopeRoots.isEmpty());
}
/** Reports a diagnostic (error or warning) */
public void report(Node n, DiagnosticType diagnosticType,
String... arguments) {
JSError error = JSError.make(n, diagnosticType, arguments);
compiler.report(error);
}
private static String getSourceName(Node n) {
String name = n.getSourceFileName();
return nullToEmpty(name);
}
InputId getInputId() {
return inputId;
}
/**
* Creates a JSError during NodeTraversal.
*
* @param n Determines the line and char position within the source file name
* @param type The DiagnosticType
* @param arguments Arguments to be incorporated into the message
*/
public JSError makeError(Node n, CheckLevel level, DiagnosticType type,
String... arguments) {
return JSError.make(n, level, type, arguments);
}
/**
* Creates a JSError during NodeTraversal.
*
* @param n Determines the line and char position within the source file name
* @param type The DiagnosticType
* @param arguments Arguments to be incorporated into the message
*/
public JSError makeError(Node n, DiagnosticType type, String... arguments) {
return JSError.make(n, type, arguments);
}
private String getBestSourceFileName(Node n) {
return n == null ? sourceName : n.getSourceFileName();
}
}
| |
package org.hl7.fhir.instance.model;
/*
Copyright (c) 2011-2013, HL7, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of HL7 nor the names of its contributors may be used to
endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
// Generated on Wed, Jul 10, 2013 05:58+1000 for FHIR v0.09
import java.util.*;
/**
* A description of a query with a set of parameters
*/
public class Query extends Resource {
public enum QueryOutcome {
ok, // The query was processed successfully
limited, // The query was processed successfully, but some additional limitations were added
refused, // The server refused to process the query
error, // The server tried to process the query, but some erorr occurred
Null; // added to help the parsers
public static QueryOutcome fromCode(String codeString) throws Exception {
if (codeString == null || "".equals(codeString))
return null;
if ("ok".equals(codeString))
return ok;
if ("limited".equals(codeString))
return limited;
if ("refused".equals(codeString))
return refused;
if ("error".equals(codeString))
return error;
throw new Exception("Unknown QueryOutcome code '"+codeString+"'");
}
public String toCode() {
switch (this) {
case ok: return "ok";
case limited: return "limited";
case refused: return "refused";
case error: return "error";
default: return "?";
}
}
}
public class QueryOutcomeEnumFactory implements EnumFactory {
public Enum<?> fromCode(String codeString) throws Exception {
if (codeString == null || "".equals(codeString))
if (codeString == null || "".equals(codeString))
return null;
if ("ok".equals(codeString))
return QueryOutcome.ok;
if ("limited".equals(codeString))
return QueryOutcome.limited;
if ("refused".equals(codeString))
return QueryOutcome.refused;
if ("error".equals(codeString))
return QueryOutcome.error;
throw new Exception("Unknown QueryOutcome code '"+codeString+"'");
}
public String toCode(Enum<?> code) throws Exception {
if (code == QueryOutcome.ok)
return "ok";
if (code == QueryOutcome.limited)
return "limited";
if (code == QueryOutcome.refused)
return "refused";
if (code == QueryOutcome.error)
return "error";
return "?";
}
}
public class QueryResponseComponent extends Element {
/**
* Links response to source query
*/
protected Uri identifier;
/**
* Outcome of processing the query
*/
protected Enumeration<QueryOutcome> outcome;
/**
* Total number of matching records
*/
protected Integer total;
/**
* Parameters server used
*/
protected List<Extension> parameter = new ArrayList<Extension>();
/**
* To get first page (if paged)
*/
protected List<Extension> first = new ArrayList<Extension>();
/**
* To get previous page (if paged)
*/
protected List<Extension> previous = new ArrayList<Extension>();
/**
* To get next page (if paged)
*/
protected List<Extension> next = new ArrayList<Extension>();
/**
* To get last page (if paged)
*/
protected List<Extension> last = new ArrayList<Extension>();
/**
* Resources that are the results of the search
*/
protected List<ResourceReference> reference = new ArrayList<ResourceReference>();
public Uri getIdentifier() {
return this.identifier;
}
public void setIdentifier(Uri value) {
this.identifier = value;
}
public String getIdentifierSimple() {
return this.identifier == null ? null : this.identifier.getValue();
}
public void setIdentifierSimple(String value) {
if (this.identifier == null)
this.identifier = new Uri();
this.identifier.setValue(value);
}
public Enumeration<QueryOutcome> getOutcome() {
return this.outcome;
}
public void setOutcome(Enumeration<QueryOutcome> value) {
this.outcome = value;
}
public QueryOutcome getOutcomeSimple() {
return this.outcome == null ? null : this.outcome.getValue();
}
public void setOutcomeSimple(QueryOutcome value) {
if (this.outcome == null)
this.outcome = new Enumeration<QueryOutcome>();
this.outcome.setValue(value);
}
public Integer getTotal() {
return this.total;
}
public void setTotal(Integer value) {
this.total = value;
}
public int getTotalSimple() {
return this.total == null ? null : this.total.getValue();
}
public void setTotalSimple(int value) {
if (value == -1)
this.total = null;
else {
if (this.total == null)
this.total = new Integer();
this.total.setValue(value);
}
}
public List<Extension> getParameter() {
return this.parameter;
}
public List<Extension> getFirst() {
return this.first;
}
public List<Extension> getPrevious() {
return this.previous;
}
public List<Extension> getNext() {
return this.next;
}
public List<Extension> getLast() {
return this.last;
}
public List<ResourceReference> getReference() {
return this.reference;
}
public QueryResponseComponent copy(Query e) {
QueryResponseComponent dst = e.new QueryResponseComponent();
dst.identifier = identifier == null ? null : identifier.copy();
dst.outcome = outcome == null ? null : outcome.copy();
dst.total = total == null ? null : total.copy();
dst.parameter = new ArrayList<Extension>();
for (Extension i : parameter)
dst.parameter.add(i.copy());
dst.first = new ArrayList<Extension>();
for (Extension i : first)
dst.first.add(i.copy());
dst.previous = new ArrayList<Extension>();
for (Extension i : previous)
dst.previous.add(i.copy());
dst.next = new ArrayList<Extension>();
for (Extension i : next)
dst.next.add(i.copy());
dst.last = new ArrayList<Extension>();
for (Extension i : last)
dst.last.add(i.copy());
dst.reference = new ArrayList<ResourceReference>();
for (ResourceReference i : reference)
dst.reference.add(i.copy());
return dst;
}
}
/**
* Links query and it's response(s)
*/
protected Uri identifier;
/**
* Set of query parameters with vslues
*/
protected List<Extension> parameter = new ArrayList<Extension>();
/**
* If this is a response to a query
*/
protected QueryResponseComponent response;
public Uri getIdentifier() {
return this.identifier;
}
public void setIdentifier(Uri value) {
this.identifier = value;
}
public String getIdentifierSimple() {
return this.identifier == null ? null : this.identifier.getValue();
}
public void setIdentifierSimple(String value) {
if (this.identifier == null)
this.identifier = new Uri();
this.identifier.setValue(value);
}
public List<Extension> getParameter() {
return this.parameter;
}
public QueryResponseComponent getResponse() {
return this.response;
}
public void setResponse(QueryResponseComponent value) {
this.response = value;
}
public Query copy() {
Query dst = new Query();
dst.identifier = identifier == null ? null : identifier.copy();
dst.parameter = new ArrayList<Extension>();
for (Extension i : parameter)
dst.parameter.add(i.copy());
dst.response = response == null ? null : response.copy(dst);
return dst;
}
protected Query typedCopy() {
return copy();
}
@Override
public ResourceType getResourceType() {
return ResourceType.Query;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.tools.dynamometer;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import com.google.common.primitives.Ints;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Properties;
import java.util.Random;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Supplier;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
import org.apache.hadoop.yarn.api.ContainerManagementProtocol;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
import org.apache.hadoop.yarn.api.records.NodeReport;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.URL;
import org.apache.hadoop.yarn.api.records.UpdatedContainer;
import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest;
import org.apache.hadoop.yarn.client.api.async.AMRMClientAsync;
import org.apache.hadoop.yarn.client.api.async.NMClientAsync;
import org.apache.hadoop.yarn.client.api.async.impl.NMClientAsyncImpl;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.security.AMRMTokenIdentifier;
import org.apache.hadoop.yarn.util.Records;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The ApplicationMaster for Dynamometer. This will launch DataNodes in YARN
* containers. If the RPC address of a NameNode is specified, it will configure
* the DataNodes to talk to that NameNode. Else, a NameNode will be launched as
* part of this YARN application. This does not implement any retry/failure
* handling.
* TODO: Add proper retry/failure handling
* <p>
* The AM will persist until it has run for a period of time equal to the
* timeout specified or until the application is killed.
* <p>
* If the NameNode is launched internally, it will upload some information
* onto the remote HDFS instance (i.e., the default FileSystem) about its
* hostname and ports. This is in the location determined by the
* {@link DynoConstants#DYNAMOMETER_STORAGE_DIR} and
* {@link DynoConstants#NN_INFO_FILE_NAME} constants and is in the
* {@link Properties} file format. This is consumed by this AM as well as the
* {@link Client} to determine how to contact the NameNode.
* <p>
* Information about the location of the DataNodes is logged by the AM.
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
public class ApplicationMaster {
private static final Logger LOG =
LoggerFactory.getLogger(ApplicationMaster.class);
private static final Random RAND = new Random();
// Configuration
private Configuration conf;
// Handle to communicate with the Resource Manager
private AMRMClientAsync<ContainerRequest> amRMClient;
// Handle to communicate with the Node Manager
private NMClientAsync nmClientAsync;
// Listen to process the response from the Node Manager
private NMCallbackHandler containerListener;
// The collection of options passed in via the Client
private AMOptions amOptions;
private List<LocalResource> blockListFiles;
private int numTotalDataNodes;
private int numTotalDataNodeContainers;
// Counter for completed datanodes (complete denotes successful or failed )
private AtomicInteger numCompletedDataNodeContainers = new AtomicInteger();
// Allocated datanode count so that we know how many datanodes has the RM
// allocated to us
private AtomicInteger numAllocatedDataNodeContainers = new AtomicInteger();
// Count of failed datanodes
private AtomicInteger numFailedDataNodeContainers = new AtomicInteger();
// True iff the application has completed and is ready for cleanup
// Once true, will never be false. This variable should not be accessed
// directly but rather through the isComplete, waitForCompletion, and
// markCompleted methods.
private boolean completed = false;
private final Object completionLock = new Object();
private ByteBuffer allTokens;
// Launch threads
private List<Thread> launchThreads = new ArrayList<>();
// True iff this AM should launch and manage a Namanode
private boolean launchNameNode;
// The service RPC address of a remote NameNode to be contacted by the
// launched DataNodes
private String namenodeServiceRpcAddress = "";
// Directory to use for remote storage (a location on the remote FS which
// can be accessed by all components)
private Path remoteStoragePath;
// The ACLs to view the launched containers
private Map<ApplicationAccessType, String> applicationAcls;
// The container the NameNode is running within
private volatile Container namenodeContainer;
// Map of the containers that the DataNodes are running within
private ConcurrentMap<ContainerId, Container> datanodeContainers =
new ConcurrentHashMap<>();
// Username of the user who launched this application.
private String launchingUser;
/**
* @param args Command line args
*/
public static void main(String[] args) {
boolean result = false;
try {
ApplicationMaster appMaster = new ApplicationMaster();
LOG.info("Initializing ApplicationMaster");
boolean doRun = appMaster.init(args);
if (!doRun) {
System.exit(0);
}
result = appMaster.run();
} catch (Throwable t) {
LOG.error("Error running ApplicationMaster", t);
System.exit(1);
}
if (result) {
LOG.info("Application Master completed successfully. exiting");
System.exit(0);
} else {
LOG.info("Application Master failed. exiting");
System.exit(2);
}
}
public ApplicationMaster() {
// Set up the configuration
conf = new YarnConfiguration();
}
/**
* Parse command line options.
*
* @param args Command line args
* @return Whether init successful and run should be invoked
* @throws ParseException on error while parsing options
*/
public boolean init(String[] args) throws ParseException {
Options opts = new Options();
AMOptions.setOptions(opts);
CommandLine cliParser = new GnuParser().parse(opts, args);
if (args.length == 0) {
printUsage(opts);
throw new IllegalArgumentException(
"No args specified for application master to initialize");
}
if (cliParser.hasOption("help")) {
printUsage(opts);
return false;
}
Map<String, String> envs = System.getenv();
remoteStoragePath = new Path(
envs.get(DynoConstants.REMOTE_STORAGE_PATH_ENV));
applicationAcls = new HashMap<>();
applicationAcls.put(ApplicationAccessType.VIEW_APP,
envs.get(DynoConstants.JOB_ACL_VIEW_ENV));
launchingUser = envs.get(Environment.USER.name());
if (envs.containsKey(DynoConstants.REMOTE_NN_RPC_ADDR_ENV)) {
launchNameNode = false;
namenodeServiceRpcAddress = envs
.get(DynoConstants.REMOTE_NN_RPC_ADDR_ENV);
} else {
launchNameNode = true;
// namenodeServiceRpcAddress will be set in run() once properties are
// available
}
ContainerId containerId =
ContainerId.fromString(envs.get(Environment.CONTAINER_ID.name()));
ApplicationAttemptId appAttemptID = containerId.getApplicationAttemptId();
LOG.info("Application master for app: appId={}, clusterTimestamp={}, "
+ "attemptId={}", appAttemptID.getApplicationId().getId(),
appAttemptID.getApplicationId().getClusterTimestamp(),
appAttemptID.getAttemptId());
amOptions = AMOptions.initFromParser(cliParser);
return true;
}
/**
* Helper function to print usage.
*
* @param opts arsed command line options
*/
private void printUsage(Options opts) {
new HelpFormatter().printHelp("ApplicationMaster", opts);
}
/**
* Main run function for the application master.
*
* @return True if the application completed successfully; false if if exited
* unexpectedly, failed, was killed, etc.
* @throws YarnException for issues while contacting YARN daemons
* @throws IOException for other issues
* @throws InterruptedException when the thread is interrupted
*/
public boolean run() throws YarnException, IOException, InterruptedException {
LOG.info("Starting ApplicationMaster");
Credentials credentials = UserGroupInformation.getCurrentUser()
.getCredentials();
DataOutputBuffer dob = new DataOutputBuffer();
credentials.writeTokenStorageToStream(dob);
// Now remove the AM->RM token so that containers cannot access it.
credentials.getAllTokens().removeIf((token) ->
token.getKind().equals(AMRMTokenIdentifier.KIND_NAME));
allTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
AMRMClientAsync.AbstractCallbackHandler allocListener =
new RMCallbackHandler();
amRMClient = AMRMClientAsync.createAMRMClientAsync(1000, allocListener);
amRMClient.init(conf);
amRMClient.start();
containerListener = createNMCallbackHandler();
nmClientAsync = new NMClientAsyncImpl(containerListener);
nmClientAsync.init(conf);
nmClientAsync.start();
// Register self with ResourceManager
// This will start heartbeating to the RM
String appMasterHostname = NetUtils.getHostname();
amRMClient.registerApplicationMaster(appMasterHostname, -1, "");
// Supplier to use to indicate to wait-loops to stop waiting
Supplier<Boolean> exitCritera = this::isComplete;
Optional<Properties> namenodeProperties = Optional.empty();
if (launchNameNode) {
ContainerRequest nnContainerRequest = setupContainerAskForRM(
amOptions.getNameNodeMemoryMB(), amOptions.getNameNodeVirtualCores(),
0, amOptions.getNameNodeNodeLabelExpression());
LOG.info("Requested NameNode ask: " + nnContainerRequest.toString());
amRMClient.addContainerRequest(nnContainerRequest);
// Wait for the NN container to make its information available on the
// shared
// remote file storage
Path namenodeInfoPath = new Path(remoteStoragePath,
DynoConstants.NN_INFO_FILE_NAME);
LOG.info("Waiting on availability of NameNode information at "
+ namenodeInfoPath);
namenodeProperties = DynoInfraUtils.waitForAndGetNameNodeProperties(
exitCritera, conf, namenodeInfoPath, LOG);
if (!namenodeProperties.isPresent()) {
cleanup();
return false;
}
namenodeServiceRpcAddress = DynoInfraUtils
.getNameNodeServiceRpcAddr(namenodeProperties.get()).toString();
LOG.info("NameNode information: " + namenodeProperties.get());
LOG.info("NameNode can be reached at: " + DynoInfraUtils
.getNameNodeHdfsUri(namenodeProperties.get()).toString());
DynoInfraUtils.waitForNameNodeStartup(namenodeProperties.get(),
exitCritera, LOG);
} else {
LOG.info("Using remote NameNode with RPC address: "
+ namenodeServiceRpcAddress);
}
blockListFiles = Collections
.synchronizedList(getDataNodeBlockListingFiles());
numTotalDataNodes = blockListFiles.size();
if (numTotalDataNodes == 0) {
LOG.error(
"No block listing files were found! Cannot run with 0 DataNodes.");
markCompleted();
return false;
}
numTotalDataNodeContainers = (int) Math.ceil(((double) numTotalDataNodes)
/ Math.max(1, amOptions.getDataNodesPerCluster()));
LOG.info("Requesting {} DataNode containers with {} MB memory, {} vcores",
numTotalDataNodeContainers, amOptions.getDataNodeMemoryMB(),
amOptions.getDataNodeVirtualCores());
for (int i = 0; i < numTotalDataNodeContainers; ++i) {
ContainerRequest datanodeAsk = setupContainerAskForRM(
amOptions.getDataNodeMemoryMB(), amOptions.getDataNodeVirtualCores(),
1, amOptions.getDataNodeNodeLabelExpression());
amRMClient.addContainerRequest(datanodeAsk);
LOG.debug("Requested datanode ask: " + datanodeAsk.toString());
}
LOG.info("Finished requesting datanode containers");
if (launchNameNode) {
DynoInfraUtils.waitForNameNodeReadiness(namenodeProperties.get(),
numTotalDataNodes, true, exitCritera, conf, LOG);
}
waitForCompletion();
return cleanup();
}
private NMCallbackHandler createNMCallbackHandler() {
return new NMCallbackHandler();
}
/**
* Wait until the application has finished and is ready for cleanup.
*/
private void waitForCompletion() throws InterruptedException {
synchronized (completionLock) {
while (!completed) {
completionLock.wait();
}
}
}
/**
* Check completion status of the application.
*
* @return True iff it has completed.
*/
private boolean isComplete() {
synchronized (completionLock) {
return completed;
}
}
/**
* Mark that this application should begin cleaning up and exit.
*/
private void markCompleted() {
synchronized (completionLock) {
completed = true;
completionLock.notify();
}
}
/**
* @return True iff the application successfully completed
*/
private boolean cleanup() {
// Join all launched threads
// needed for when we time out
// and we need to release containers
for (Thread launchThread : launchThreads) {
try {
launchThread.join(10000);
} catch (InterruptedException e) {
LOG.info("Exception thrown in thread join: " + e.getMessage());
e.printStackTrace();
}
}
// When the application completes, it should stop all running containers
LOG.info("Application completed. Stopping running containers");
nmClientAsync.stop();
// When the application completes, it should send a finish application
// signal to the RM
LOG.info("Application completed. Signalling finish to RM");
FinalApplicationStatus appStatus;
String appMessage = null;
boolean success;
if (numFailedDataNodeContainers.get() == 0
&& numCompletedDataNodeContainers.get() == numTotalDataNodes) {
appStatus = FinalApplicationStatus.SUCCEEDED;
success = true;
} else {
appStatus = FinalApplicationStatus.FAILED;
appMessage = "Diagnostics: total=" + numTotalDataNodeContainers
+ ", completed=" + numCompletedDataNodeContainers.get()
+ ", allocated=" + numAllocatedDataNodeContainers.get()
+ ", failed=" + numFailedDataNodeContainers.get();
success = false;
}
try {
amRMClient.unregisterApplicationMaster(appStatus, appMessage, null);
} catch (YarnException|IOException ex) {
LOG.error("Failed to unregister application", ex);
}
amRMClient.stop();
return success;
}
private class RMCallbackHandler
extends AMRMClientAsync.AbstractCallbackHandler {
@Override
public void onContainersCompleted(
List<ContainerStatus> completedContainers) {
LOG.info("Got response from RM for container ask, completedCnt="
+ completedContainers.size());
for (ContainerStatus containerStatus : completedContainers) {
String containerInfo = "containerID=" + containerStatus.getContainerId()
+ ", state=" + containerStatus.getState() + ", exitStatus="
+ containerStatus.getExitStatus() + ", diagnostics="
+ StringUtils.abbreviate(containerStatus.getDiagnostics(), 1000);
String component;
if (isNameNode(containerStatus.getContainerId())) {
component = "NAMENODE";
} else if (isDataNode(containerStatus.getContainerId())) {
component = "DATANODE";
} else {
LOG.error("Received container status for unknown container: "
+ containerInfo);
continue;
}
LOG.info(
"Got container status for " + component + ": " + containerInfo);
// non complete containers should not be here
assert (containerStatus.getState() == ContainerState.COMPLETE);
if (component.equals("NAMENODE")) {
LOG.info("NameNode container completed; marking application as done");
markCompleted();
}
// increment counters for completed/failed containers
int exitStatus = containerStatus.getExitStatus();
int completedIdx = numCompletedDataNodeContainers.incrementAndGet();
if (0 != exitStatus) {
numFailedDataNodeContainers.incrementAndGet();
} else {
LOG.info("DataNode {} completed successfully, containerId={}",
completedIdx, containerStatus.getContainerId());
}
}
if (numCompletedDataNodeContainers.get() == numTotalDataNodeContainers) {
LOG.info(
"All datanode containers completed; marking application as done");
markCompleted();
}
}
@Override
public void onContainersAllocated(List<Container> allocatedContainers) {
LOG.info("Got response from RM for container ask, allocatedCnt="
+ allocatedContainers.size());
for (Container container : allocatedContainers) {
LaunchContainerRunnable containerLauncher;
String componentType;
Resource rsrc = container.getResource();
if (launchNameNode
&& rsrc.getMemorySize() >= amOptions.getNameNodeMemoryMB()
&& rsrc.getVirtualCores() >= amOptions.getNameNodeVirtualCores()
&& namenodeContainer == null) {
namenodeContainer = container;
componentType = "NAMENODE";
containerLauncher = new LaunchContainerRunnable(container, true);
} else if (rsrc.getMemorySize() >= amOptions.getDataNodeMemoryMB()
&& rsrc.getVirtualCores() >= amOptions.getDataNodeVirtualCores()
&& numAllocatedDataNodeContainers.get() < numTotalDataNodes) {
if (launchNameNode && namenodeContainer == null) {
LOG.error("Received a container with following resources suited "
+ "for a DataNode but no NameNode container exists: "
+ "containerMem=" + rsrc.getMemorySize() + ", containerVcores="
+ rsrc.getVirtualCores());
continue;
}
numAllocatedDataNodeContainers.getAndIncrement();
datanodeContainers.put(container.getId(), container);
componentType = "DATANODE";
containerLauncher = new LaunchContainerRunnable(container, false);
} else {
LOG.warn("Received unwanted container allocation: " + container);
nmClientAsync.stopContainerAsync(container.getId(),
container.getNodeId());
continue;
}
LOG.info("Launching " + componentType + " on a new container."
+ ", containerId=" + container.getId() + ", containerNode="
+ container.getNodeId().getHost() + ":"
+ container.getNodeId().getPort() + ", containerNodeURI="
+ container.getNodeHttpAddress() + ", containerResourceMemory="
+ rsrc.getMemorySize() + ", containerResourceVirtualCores="
+ rsrc.getVirtualCores());
Thread launchThread = new Thread(containerLauncher);
// launch and start the container on a separate thread to keep
// the main thread unblocked
// as all containers may not be allocated at one go.
launchThreads.add(launchThread);
launchThread.start();
}
}
@Override
public void onShutdownRequest() {
markCompleted();
}
@Override
public void onNodesUpdated(List<NodeReport> updatedNodes) {
LOG.info("onNodesUpdated: " + Joiner.on(",").join(updatedNodes));
}
@Override
public float getProgress() {
return 0.0f;
}
@Override
public void onError(Throwable e) {
markCompleted();
amRMClient.stop();
}
@Override
public void onContainersUpdated(List<UpdatedContainer> containers) {
LOG.info("onContainersUpdated: " + Joiner.on(",").join(containers));
}
}
private class NMCallbackHandler
extends NMClientAsync.AbstractCallbackHandler {
@Override
public void onContainerStopped(ContainerId containerId) {
if (isNameNode(containerId)) {
LOG.info("NameNode container stopped: " + containerId);
namenodeContainer = null;
markCompleted();
} else if (isDataNode(containerId)) {
LOG.debug("DataNode container stopped: " + containerId);
datanodeContainers.remove(containerId);
} else {
LOG.error(
"onContainerStopped received unknown container ID: " + containerId);
}
}
@Override
public void onContainerStatusReceived(ContainerId containerId,
ContainerStatus containerStatus) {
if (LOG.isDebugEnabled()) {
LOG.debug("Container Status: id=" + containerId + ", status="
+ containerStatus);
}
}
@Override
public void onContainerStarted(ContainerId containerId,
Map<String, ByteBuffer> allServiceResponse) {
if (isNameNode(containerId)) {
LOG.info("NameNode container started at ID " + containerId);
} else if (isDataNode(containerId)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Succeeded to start DataNode Container " + containerId);
}
nmClientAsync.getContainerStatusAsync(containerId,
datanodeContainers.get(containerId).getNodeId());
} else {
LOG.error(
"onContainerStarted received unknown container ID: " + containerId);
}
}
@Override
public void onStartContainerError(ContainerId containerId, Throwable t) {
if (isNameNode(containerId)) {
LOG.error("Failed to start namenode container ID " + containerId, t);
namenodeContainer = null;
markCompleted();
} else if (isDataNode(containerId)) {
LOG.error("Failed to start DataNode Container " + containerId);
datanodeContainers.remove(containerId);
numCompletedDataNodeContainers.incrementAndGet();
numFailedDataNodeContainers.incrementAndGet();
} else {
LOG.error("onStartContainerError received unknown container ID: "
+ containerId);
}
}
@Override
public void onGetContainerStatusError(ContainerId containerId,
Throwable t) {
LOG.error("Failed to query the status of Container " + containerId);
}
@Override
public void onStopContainerError(ContainerId containerId, Throwable t) {
if (isNameNode(containerId)) {
LOG.error("Failed to stop NameNode container ID " + containerId);
namenodeContainer = null;
} else if (isDataNode(containerId)) {
LOG.error("Failed to stop DataNode Container " + containerId);
datanodeContainers.remove(containerId);
} else {
LOG.error("onStopContainerError received unknown containerID: "
+ containerId);
}
}
@Override
@Deprecated
public void onContainerResourceIncreased(ContainerId containerId,
Resource resource) {
LOG.info("onContainerResourceIncreased: {}, {}", containerId, resource);
}
@Override
public void onContainerResourceUpdated(ContainerId containerId,
Resource resource) {
LOG.info("onContainerResourceUpdated: {}, {}", containerId, resource);
}
@Override
@Deprecated
public void onIncreaseContainerResourceError(ContainerId containerId,
Throwable t) {
LOG.info("onIncreaseContainerResourceError: {}", containerId, t);
}
@Override
public void onUpdateContainerResourceError(ContainerId containerId,
Throwable t) {
LOG.info("onUpdateContainerResourceError: {}", containerId, t);
}
}
/**
* Thread to connect to the {@link ContainerManagementProtocol} and launch the
* container that will execute the shell command.
*/
private class LaunchContainerRunnable implements Runnable {
// Allocated container
private Container container;
private boolean isNameNodeLauncher;
/**
* @param lcontainer Allocated container
* @param isNameNode True iff this should launch a NameNode
*/
LaunchContainerRunnable(Container lcontainer, boolean isNameNode) {
this.container = lcontainer;
this.isNameNodeLauncher = isNameNode;
}
/**
* Get the map of local resources to be used for launching this container.
*/
private Map<String, LocalResource> getLocalResources() {
Map<String, LocalResource> localResources = new HashMap<>();
Map<String, String> envs = System.getenv();
addAsLocalResourceFromEnv(DynoConstants.CONF_ZIP, localResources, envs);
addAsLocalResourceFromEnv(DynoConstants.START_SCRIPT, localResources,
envs);
addAsLocalResourceFromEnv(DynoConstants.HADOOP_BINARY, localResources,
envs);
addAsLocalResourceFromEnv(DynoConstants.VERSION, localResources, envs);
addAsLocalResourceFromEnv(DynoConstants.DYNO_DEPENDENCIES, localResources,
envs);
if (isNameNodeLauncher) {
addAsLocalResourceFromEnv(DynoConstants.FS_IMAGE, localResources, envs);
addAsLocalResourceFromEnv(DynoConstants.FS_IMAGE_MD5, localResources,
envs);
} else {
int blockFilesToLocalize = Math.max(1,
amOptions.getDataNodesPerCluster());
for (int i = 0; i < blockFilesToLocalize; i++) {
try {
localResources.put(
DynoConstants.BLOCK_LIST_RESOURCE_PATH_PREFIX + i,
blockListFiles.remove(0));
} catch (IndexOutOfBoundsException e) {
break;
}
}
}
return localResources;
}
/**
* Connects to CM, sets up container launch context for shell command and
* eventually dispatches the container start request to the CM.
*/
@Override
public void run() {
LOG.info("Setting up container launch context for containerid="
+ container.getId() + ", isNameNode=" + isNameNodeLauncher);
ContainerLaunchContext ctx = Records
.newRecord(ContainerLaunchContext.class);
// Set the environment
ctx.setEnvironment(amOptions.getShellEnv());
ctx.setApplicationACLs(applicationAcls);
try {
ctx.setLocalResources(getLocalResources());
ctx.setCommands(getContainerStartCommand());
} catch (IOException e) {
LOG.error("Error while configuring container!", e);
return;
}
// Set up tokens for the container
ctx.setTokens(allTokens.duplicate());
nmClientAsync.startContainerAsync(container, ctx);
LOG.info("Starting {}; track at: http://{}/node/containerlogs/{}/{}/",
isNameNodeLauncher ? "NAMENODE" : "DATANODE",
container.getNodeHttpAddress(), container.getId(), launchingUser);
}
/**
* Return the command used to start this container.
*/
private List<String> getContainerStartCommand() throws IOException {
// Set the necessary command to execute on the allocated container
List<String> vargs = new ArrayList<>();
// Set executable command
vargs.add("./" + DynoConstants.START_SCRIPT.getResourcePath());
String component = isNameNodeLauncher ? "namenode" : "datanode";
vargs.add(component);
if (isNameNodeLauncher) {
vargs.add(remoteStoragePath.getFileSystem(conf)
.makeQualified(remoteStoragePath).toString());
} else {
vargs.add(namenodeServiceRpcAddress);
vargs.add(String.valueOf(amOptions.getDataNodeLaunchDelaySec() < 1 ? 0
: RAND.nextInt(
Ints.checkedCast(amOptions.getDataNodeLaunchDelaySec()))));
}
// Add log redirect params
vargs.add("1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stdout");
vargs.add("2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stderr");
LOG.info("Completed setting up command for " + component + ": " + vargs);
return Lists.newArrayList(Joiner.on(" ").join(vargs));
}
/**
* Add the given resource into the map of resources, using information from
* the supplied environment variables.
*
* @param resource The resource to add.
* @param localResources Map of local resources to insert into.
* @param env Map of environment variables.
*/
public void addAsLocalResourceFromEnv(DynoResource resource,
Map<String, LocalResource> localResources, Map<String, String> env) {
LOG.debug("Adding resource to localResources: " + resource);
String resourcePath = resource.getResourcePath();
if (resourcePath == null) {
// Default to using the file name in the path
resourcePath = resource.getPath(env).getName();
}
localResources.put(resourcePath,
LocalResource.newInstance(URL.fromPath(resource.getPath(env)),
resource.getType(), LocalResourceVisibility.APPLICATION,
resource.getLength(env), resource.getTimestamp(env)));
}
}
private List<LocalResource> getDataNodeBlockListingFiles()
throws IOException {
Path blockListDirPath = new Path(
System.getenv().get(DynoConstants.BLOCK_LIST_PATH_ENV));
LOG.info("Looking for block listing files in " + blockListDirPath);
FileSystem blockZipFS = blockListDirPath.getFileSystem(conf);
List<LocalResource> files = new LinkedList<>();
for (FileStatus stat : blockZipFS.listStatus(blockListDirPath,
DynoConstants.BLOCK_LIST_FILE_FILTER)) {
LocalResource blockListResource = LocalResource.newInstance(
URL.fromPath(stat.getPath()),
LocalResourceType.FILE, LocalResourceVisibility.APPLICATION,
stat.getLen(), stat.getModificationTime());
files.add(blockListResource);
}
return files;
}
/**
* Return true iff {@code containerId} represents the NameNode container.
*/
private boolean isNameNode(ContainerId containerId) {
return namenodeContainer != null
&& namenodeContainer.getId().equals(containerId);
}
/**
* Return true iff {@code containerId} represents a DataNode container.
*/
private boolean isDataNode(ContainerId containerId) {
return datanodeContainers.containsKey(containerId);
}
/**
* Setup the request that will be sent to the RM for the container ask.
*
* @return the setup ResourceRequest to be sent to RM
*/
private ContainerRequest setupContainerAskForRM(int memory, int vcores,
int priority, String nodeLabel) {
Priority pri = Records.newRecord(Priority.class);
pri.setPriority(priority);
// Set up resource type requirements
// For now, memory and CPU are supported so we set memory and cpu
// requirements
Resource capability = Records.newRecord(Resource.class);
capability.setMemorySize(memory);
capability.setVirtualCores(vcores);
return new ContainerRequest(capability, null, null, pri, true, nodeLabel);
}
}
| |
/*
* Copyright (c) 2016 Uber Technologies, Inc. (hoodie-dev-group@uber.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.uber.hoodie;
import static com.uber.hoodie.common.HoodieTestDataGenerator.DEFAULT_FIRST_PARTITION_PATH;
import static com.uber.hoodie.common.HoodieTestDataGenerator.DEFAULT_SECOND_PARTITION_PATH;
import static com.uber.hoodie.common.HoodieTestDataGenerator.DEFAULT_THIRD_PARTITION_PATH;
import static com.uber.hoodie.common.table.HoodieTimeline.COMPACTION_ACTION;
import static com.uber.hoodie.common.table.HoodieTimeline.GREATER;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import com.google.common.collect.Iterables;
import com.uber.hoodie.avro.model.HoodieCompactionPlan;
import com.uber.hoodie.common.HoodieCleanStat;
import com.uber.hoodie.common.model.FileSlice;
import com.uber.hoodie.common.model.HoodieCleaningPolicy;
import com.uber.hoodie.common.model.HoodieCommitMetadata;
import com.uber.hoodie.common.model.HoodieDataFile;
import com.uber.hoodie.common.model.HoodieFileGroup;
import com.uber.hoodie.common.model.HoodieFileGroupId;
import com.uber.hoodie.common.model.HoodieRecord;
import com.uber.hoodie.common.model.HoodieTableType;
import com.uber.hoodie.common.model.HoodieTestUtils;
import com.uber.hoodie.common.model.HoodieWriteStat;
import com.uber.hoodie.common.table.HoodieTableMetaClient;
import com.uber.hoodie.common.table.HoodieTimeline;
import com.uber.hoodie.common.table.TableFileSystemView;
import com.uber.hoodie.common.table.timeline.HoodieActiveTimeline;
import com.uber.hoodie.common.table.timeline.HoodieInstant;
import com.uber.hoodie.common.table.timeline.HoodieInstant.State;
import com.uber.hoodie.common.util.AvroUtils;
import com.uber.hoodie.common.util.CompactionUtils;
import com.uber.hoodie.common.util.FSUtils;
import com.uber.hoodie.common.util.collection.Pair;
import com.uber.hoodie.config.HoodieCompactionConfig;
import com.uber.hoodie.config.HoodieWriteConfig;
import com.uber.hoodie.index.HoodieIndex;
import com.uber.hoodie.table.HoodieTable;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.TreeSet;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.hadoop.fs.Path;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.scheduler.SparkListener;
import org.apache.spark.scheduler.SparkListenerTaskEnd;
import org.apache.spark.util.AccumulatorV2;
import org.junit.Assert;
import org.junit.Test;
import scala.Option;
import scala.collection.Iterator;
/**
* Test Cleaning related logic
*/
public class TestCleaner extends TestHoodieClientBase {
private static final int BIG_BATCH_INSERT_SIZE = 500;
private static Logger logger = LogManager.getLogger(TestHoodieClientBase.class);
@Override
public void tearDown() throws IOException {
super.tearDown();
}
/**
* Helper method to do first batch of insert for clean by versions/commits tests
*
* @param cfg Hoodie Write Config
* @param client Hoodie Client
* @param recordGenFunction Function to generate records for insertion
* @param insertFn Insertion API for testing
* @throws Exception in case of error
*/
private String insertFirstBigBatchForClientCleanerTest(
HoodieWriteConfig cfg,
HoodieWriteClient client,
Function2<List<HoodieRecord>, String, Integer> recordGenFunction,
Function3<JavaRDD<WriteStatus>, HoodieWriteClient, JavaRDD<HoodieRecord>, String> insertFn) throws Exception {
/**
* do a big insert
* (this is basically same as insert part of upsert, just adding it here so we can
* catch breakages in insert(), if the implementation diverges.)
*/
String newCommitTime = client.startCommit();
List<HoodieRecord> records = recordGenFunction.apply(newCommitTime, BIG_BATCH_INSERT_SIZE);
JavaRDD<HoodieRecord> writeRecords = jsc.parallelize(records, 5);
List<WriteStatus> statuses = insertFn.apply(client, writeRecords, newCommitTime).collect();
// Verify there are no errors
assertNoWriteErrors(statuses);
// verify that there is a commit
HoodieTableMetaClient metaClient = new HoodieTableMetaClient(jsc.hadoopConfiguration(), basePath);
HoodieTimeline timeline = new HoodieActiveTimeline(metaClient).getCommitTimeline();
assertEquals("Expecting a single commit.", 1, timeline.findInstantsAfter("000", Integer.MAX_VALUE).countInstants());
// Should have 100 records in table (check using Index), all in locations marked at commit
HoodieTable table = HoodieTable.getHoodieTable(metaClient, getConfig(), jsc);
assertFalse(table.getCompletedCommitsTimeline().empty());
String commitTime = table.getCompletedCommitsTimeline().getInstants().findFirst().get().getTimestamp();
assertFalse(table.getCompletedCleanTimeline().empty());
assertEquals("The clean instant should be the same as the commit instant", commitTime,
table.getCompletedCleanTimeline().getInstants().findFirst().get().getTimestamp());
HoodieIndex index = HoodieIndex.createIndex(cfg, jsc);
List<HoodieRecord> taggedRecords = index.tagLocation(jsc.parallelize(records, 1), jsc, table).collect();
checkTaggedRecords(taggedRecords, newCommitTime);
return newCommitTime;
}
/**
* Test Clean-By-Versions using insert/upsert API
*/
@Test
public void testInsertAndCleanByVersions() throws Exception {
testInsertAndCleanByVersions(HoodieWriteClient::insert, HoodieWriteClient::upsert, false);
}
/**
* Test Clean-By-Versions using prepped versions of insert/upsert API
*/
@Test
public void testInsertPreppedAndCleanByVersions() throws Exception {
testInsertAndCleanByVersions(HoodieWriteClient::insertPreppedRecords,
HoodieWriteClient::upsertPreppedRecords, true);
}
/**
* Test Clean-By-Versions using bulk-insert/upsert API
*/
@Test
public void testBulkInsertAndCleanByVersions() throws Exception {
testInsertAndCleanByVersions(HoodieWriteClient::bulkInsert, HoodieWriteClient::upsert, false);
}
/**
* Test Clean-By-Versions using prepped versions of bulk-insert/upsert API
*/
@Test
public void testBulkInsertPreppedAndCleanByVersions() throws Exception {
testInsertAndCleanByVersions(
(client, recordRDD, commitTime) -> client.bulkInsertPreppedRecords(recordRDD, commitTime, Option.empty()),
HoodieWriteClient::upsertPreppedRecords, true);
}
/**
* Test Helper for Cleaning by versions logic from HoodieWriteClient API perspective
*
* @param insertFn Insert API to be tested
* @param upsertFn Upsert API to be tested
* @param isPreppedAPI Flag to indicate if a prepped-version is used. If true, a wrapper function will be used during
* record generation to also tag the regards (de-dupe is implicit as we use uniq record-gen APIs)
* @throws Exception in case of errors
*/
private void testInsertAndCleanByVersions(
Function3<JavaRDD<WriteStatus>, HoodieWriteClient, JavaRDD<HoodieRecord>, String> insertFn,
Function3<JavaRDD<WriteStatus>, HoodieWriteClient, JavaRDD<HoodieRecord>, String> upsertFn,
boolean isPreppedAPI
) throws Exception {
int maxVersions = 2; // keep upto 2 versions for each file
HoodieWriteConfig cfg = getConfigBuilder().withCompactionConfig(
HoodieCompactionConfig.newBuilder().withCleanerPolicy(HoodieCleaningPolicy.KEEP_LATEST_FILE_VERSIONS)
.retainFileVersions(maxVersions).build())
.withParallelism(1, 1).withBulkInsertParallelism(1)
.withFinalizeWriteParallelism(1).withConsistencyCheckEnabled(true)
.build();
HoodieWriteClient client = new HoodieWriteClient(jsc, cfg);
final Function2<List<HoodieRecord>, String, Integer> recordInsertGenWrappedFunction =
generateWrapRecordsFn(isPreppedAPI, cfg, dataGen::generateInserts);
final Function2<List<HoodieRecord>, String, Integer> recordUpsertGenWrappedFunction =
generateWrapRecordsFn(isPreppedAPI, cfg, dataGen::generateUniqueUpdates);
insertFirstBigBatchForClientCleanerTest(cfg, client, recordInsertGenWrappedFunction, insertFn);
Map<HoodieFileGroupId, FileSlice> compactionFileIdToLatestFileSlice = new HashMap<>();
HoodieTableMetaClient metadata = new HoodieTableMetaClient(jsc.hadoopConfiguration(), basePath);
HoodieTable table = HoodieTable.getHoodieTable(metadata, getConfig(), jsc);
for (String partitionPath : dataGen.getPartitionPaths()) {
TableFileSystemView fsView = table.getFileSystemView();
Optional<Boolean> added = fsView.getAllFileGroups(partitionPath).findFirst()
.map(fg -> {
fg.getLatestFileSlice().map(fs -> compactionFileIdToLatestFileSlice.put(fg.getFileGroupId(), fs));
return true;
});
if (added.isPresent()) {
// Select only one file-group for compaction
break;
}
}
// Create workload with selected file-slices
List<Pair<String, FileSlice>> partitionFileSlicePairs = compactionFileIdToLatestFileSlice.entrySet().stream()
.map(e -> Pair.of(e.getKey().getPartitionPath(), e.getValue())).collect(Collectors.toList());
HoodieCompactionPlan compactionPlan =
CompactionUtils.buildFromFileSlices(partitionFileSlicePairs, Optional.empty(), Optional.empty());
List<String> instantTimes = HoodieTestUtils.monotonicIncreasingCommitTimestamps(9, 1);
String compactionTime = instantTimes.get(0);
table.getActiveTimeline().saveToCompactionRequested(
new HoodieInstant(State.REQUESTED, COMPACTION_ACTION, compactionTime),
AvroUtils.serializeCompactionPlan(compactionPlan));
instantTimes = instantTimes.subList(1, instantTimes.size());
// Keep doing some writes and clean inline. Make sure we have expected number of files
// remaining.
for (String newInstantTime : instantTimes) {
try {
client.startCommitWithTime(newInstantTime);
List<HoodieRecord> records = recordUpsertGenWrappedFunction.apply(newInstantTime, 100);
List<WriteStatus> statuses =
upsertFn.apply(client, jsc.parallelize(records, 1), newInstantTime).collect();
// Verify there are no errors
assertNoWriteErrors(statuses);
metadata = new HoodieTableMetaClient(jsc.hadoopConfiguration(), basePath);
table = HoodieTable.getHoodieTable(metadata, getConfig(), jsc);
HoodieTimeline timeline = table.getMetaClient().getCommitsTimeline();
TableFileSystemView fsView = table.getFileSystemView();
// Need to ensure the following
for (String partitionPath : dataGen.getPartitionPaths()) {
// compute all the versions of all files, from time 0
HashMap<String, TreeSet<String>> fileIdToVersions = new HashMap<>();
for (HoodieInstant entry : timeline.getInstants().collect(Collectors.toList())) {
HoodieCommitMetadata commitMetadata = HoodieCommitMetadata
.fromBytes(timeline.getInstantDetails(entry).get(), HoodieCommitMetadata.class);
for (HoodieWriteStat wstat : commitMetadata.getWriteStats(partitionPath)) {
if (!fileIdToVersions.containsKey(wstat.getFileId())) {
fileIdToVersions.put(wstat.getFileId(), new TreeSet<>());
}
fileIdToVersions.get(wstat.getFileId()).add(FSUtils.getCommitTime(new Path(wstat.getPath()).getName()));
}
}
List<HoodieFileGroup> fileGroups = fsView.getAllFileGroups(partitionPath).collect(Collectors.toList());
for (HoodieFileGroup fileGroup : fileGroups) {
if (compactionFileIdToLatestFileSlice.containsKey(fileGroup.getFileGroupId())) {
// Ensure latest file-slice selected for compaction is retained
Optional<HoodieDataFile> dataFileForCompactionPresent =
fileGroup.getAllDataFiles().filter(df -> {
return compactionFileIdToLatestFileSlice.get(fileGroup.getFileGroupId())
.getBaseInstantTime().equals(df.getCommitTime());
}).findAny();
Assert.assertTrue("Data File selected for compaction is retained",
dataFileForCompactionPresent.isPresent());
} else {
// file has no more than max versions
String fileId = fileGroup.getFileGroupId().getFileId();
List<HoodieDataFile> dataFiles = fileGroup.getAllDataFiles().collect(Collectors.toList());
assertTrue("fileId " + fileId + " has more than " + maxVersions + " versions",
dataFiles.size() <= maxVersions);
// Each file, has the latest N versions (i.e cleaning gets rid of older versions)
List<String> commitedVersions = new ArrayList<>(fileIdToVersions.get(fileId));
for (int i = 0; i < dataFiles.size(); i++) {
assertEquals("File " + fileId + " does not have latest versions on commits" + commitedVersions,
Iterables.get(dataFiles, i).getCommitTime(), commitedVersions.get(commitedVersions.size() - 1 - i));
}
}
}
}
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
}
}
/**
* Test Clean-By-Versions using insert/upsert API
*/
@Test
public void testInsertAndCleanByCommits() throws Exception {
testInsertAndCleanByCommits(HoodieWriteClient::insert, HoodieWriteClient::upsert, false);
}
/**
* Test Clean-By-Versions using prepped version of insert/upsert API
*/
@Test
public void testInsertPreppedAndCleanByCommits() throws Exception {
testInsertAndCleanByCommits(HoodieWriteClient::insertPreppedRecords,
HoodieWriteClient::upsertPreppedRecords, true);
}
/**
* Test Clean-By-Versions using prepped versions of bulk-insert/upsert API
*/
@Test
public void testBulkInsertPreppedAndCleanByCommits() throws Exception {
testInsertAndCleanByCommits(
(client, recordRDD, commitTime) -> client.bulkInsertPreppedRecords(recordRDD, commitTime, Option.empty()),
HoodieWriteClient::upsertPreppedRecords, true);
}
/**
* Test Clean-By-Versions using bulk-insert/upsert API
*/
@Test
public void testBulkInsertAndCleanByCommits() throws Exception {
testInsertAndCleanByCommits(HoodieWriteClient::bulkInsert, HoodieWriteClient::upsert, false);
}
/**
* Test Helper for Cleaning by versions logic from HoodieWriteClient API perspective
*
* @param insertFn Insert API to be tested
* @param upsertFn Upsert API to be tested
* @param isPreppedAPI Flag to indicate if a prepped-version is used. If true, a wrapper function will be used during
* record generation to also tag the regards (de-dupe is implicit as we use uniq record-gen APIs)
* @throws Exception in case of errors
*/
private void testInsertAndCleanByCommits(
Function3<JavaRDD<WriteStatus>, HoodieWriteClient, JavaRDD<HoodieRecord>, String> insertFn,
Function3<JavaRDD<WriteStatus>, HoodieWriteClient, JavaRDD<HoodieRecord>, String> upsertFn,
boolean isPreppedAPI
) throws Exception {
int maxCommits = 3; // keep upto 3 commits from the past
HoodieWriteConfig cfg = getConfigBuilder().withCompactionConfig(
HoodieCompactionConfig.newBuilder()
.withCleanerPolicy(HoodieCleaningPolicy.KEEP_LATEST_FILE_VERSIONS).retainCommits(maxCommits).build())
.withParallelism(1, 1).withBulkInsertParallelism(1)
.withFinalizeWriteParallelism(1).withConsistencyCheckEnabled(true).build();
HoodieWriteClient client = new HoodieWriteClient(jsc, cfg);
final Function2<List<HoodieRecord>, String, Integer> recordInsertGenWrappedFunction =
generateWrapRecordsFn(isPreppedAPI, cfg, dataGen::generateInserts);
final Function2<List<HoodieRecord>, String, Integer> recordUpsertGenWrappedFunction =
generateWrapRecordsFn(isPreppedAPI, cfg, dataGen::generateUniqueUpdates);
insertFirstBigBatchForClientCleanerTest(cfg, client, recordInsertGenWrappedFunction, insertFn);
// Keep doing some writes and clean inline. Make sure we have expected number of files remaining.
HoodieTestUtils.monotonicIncreasingCommitTimestamps(8, 1).stream().forEach(newCommitTime -> {
try {
client.startCommitWithTime(newCommitTime);
List<HoodieRecord> records = recordUpsertGenWrappedFunction.apply(newCommitTime, 100);
List<WriteStatus> statuses =
upsertFn.apply(client, jsc.parallelize(records, 1), newCommitTime).collect();
// Verify there are no errors
assertNoWriteErrors(statuses);
HoodieTableMetaClient metadata = new HoodieTableMetaClient(jsc.hadoopConfiguration(), basePath);
HoodieTable table1 = HoodieTable.getHoodieTable(metadata, cfg, jsc);
HoodieTimeline activeTimeline = table1.getCompletedCommitsTimeline();
Optional<HoodieInstant> earliestRetainedCommit = activeTimeline.nthFromLastInstant(maxCommits - 1);
Set<HoodieInstant> acceptableCommits = activeTimeline.getInstants().collect(Collectors.toSet());
if (earliestRetainedCommit.isPresent()) {
acceptableCommits.removeAll(
activeTimeline.findInstantsInRange("000", earliestRetainedCommit.get().getTimestamp()).getInstants()
.collect(Collectors.toSet()));
acceptableCommits.add(earliestRetainedCommit.get());
}
TableFileSystemView fsView = table1.getFileSystemView();
// Need to ensure the following
for (String partitionPath : dataGen.getPartitionPaths()) {
List<HoodieFileGroup> fileGroups = fsView.getAllFileGroups(partitionPath).collect(Collectors.toList());
for (HoodieFileGroup fileGroup : fileGroups) {
Set<String> commitTimes = new HashSet<>();
fileGroup.getAllDataFiles().forEach(value -> {
logger.debug("Data File - " + value);
commitTimes.add(value.getCommitTime());
});
assertEquals("Only contain acceptable versions of file should be present",
acceptableCommits.stream().map(HoodieInstant::getTimestamp).collect(Collectors.toSet()), commitTimes);
}
}
} catch (IOException ioe) {
throw new RuntimeException(ioe);
}
});
}
/**
* Test HoodieTable.clean() Cleaning by versions logic
*/
@Test
public void testKeepLatestFileVersions() throws IOException {
HoodieWriteConfig config = HoodieWriteConfig.newBuilder().withPath(basePath).withAssumeDatePartitioning(true)
.withCompactionConfig(HoodieCompactionConfig.newBuilder().withCleanerPolicy(
HoodieCleaningPolicy.KEEP_LATEST_FILE_VERSIONS).retainFileVersions(1).build())
.build();
// make 1 commit, with 1 file per partition
HoodieTestUtils.createCommitFiles(basePath, "000");
String file1P0C0 = HoodieTestUtils.createNewDataFile(basePath, DEFAULT_FIRST_PARTITION_PATH, "000");
String file1P1C0 = HoodieTestUtils.createNewDataFile(basePath, DEFAULT_SECOND_PARTITION_PATH, "000");
HoodieTable table = HoodieTable.getHoodieTable(
new HoodieTableMetaClient(jsc.hadoopConfiguration(), config.getBasePath(), true), config,
jsc);
List<HoodieCleanStat> hoodieCleanStatsOne = table.clean(jsc);
assertEquals("Must not clean any files", 0,
getCleanStat(hoodieCleanStatsOne, DEFAULT_FIRST_PARTITION_PATH).getSuccessDeleteFiles().size());
assertEquals("Must not clean any files", 0,
getCleanStat(hoodieCleanStatsOne, DEFAULT_SECOND_PARTITION_PATH).getSuccessDeleteFiles().size());
assertTrue(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "000", file1P0C0));
assertTrue(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_SECOND_PARTITION_PATH, "000", file1P1C0));
// make next commit, with 1 insert & 1 update per partition
HoodieTestUtils.createCommitFiles(basePath, "001");
table = HoodieTable.getHoodieTable(new HoodieTableMetaClient(jsc.hadoopConfiguration(), basePath, true), config,
jsc);
String file2P0C1 = HoodieTestUtils.createNewDataFile(basePath, DEFAULT_FIRST_PARTITION_PATH, "001"); // insert
String file2P1C1 = HoodieTestUtils.createNewDataFile(basePath, DEFAULT_SECOND_PARTITION_PATH, "001"); // insert
HoodieTestUtils.createDataFile(basePath, DEFAULT_FIRST_PARTITION_PATH, "001", file1P0C0); // update
HoodieTestUtils.createDataFile(basePath, DEFAULT_SECOND_PARTITION_PATH, "001", file1P1C0); // update
List<HoodieCleanStat> hoodieCleanStatsTwo = table.clean(jsc);
assertEquals("Must clean 1 file", 1,
getCleanStat(hoodieCleanStatsTwo, DEFAULT_FIRST_PARTITION_PATH).getSuccessDeleteFiles().size());
assertEquals("Must clean 1 file", 1,
getCleanStat(hoodieCleanStatsTwo, DEFAULT_SECOND_PARTITION_PATH).getSuccessDeleteFiles().size());
assertTrue(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "001", file2P0C1));
assertTrue(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_SECOND_PARTITION_PATH, "001", file2P1C1));
assertFalse(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "000", file1P0C0));
assertFalse(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_SECOND_PARTITION_PATH, "000", file1P1C0));
// make next commit, with 2 updates to existing files, and 1 insert
HoodieTestUtils.createCommitFiles(basePath, "002");
table = HoodieTable.getHoodieTable(new HoodieTableMetaClient(jsc.hadoopConfiguration(), config.getBasePath(), true),
config, jsc);
HoodieTestUtils.createDataFile(basePath, DEFAULT_FIRST_PARTITION_PATH, "002", file1P0C0); // update
HoodieTestUtils.createDataFile(basePath, DEFAULT_FIRST_PARTITION_PATH, "002", file2P0C1); // update
String file3P0C2 = HoodieTestUtils.createNewDataFile(basePath, DEFAULT_FIRST_PARTITION_PATH, "002");
List<HoodieCleanStat> hoodieCleanStatsThree = table.clean(jsc);
assertEquals("Must clean two files", 2,
getCleanStat(hoodieCleanStatsThree, DEFAULT_FIRST_PARTITION_PATH).getSuccessDeleteFiles().size());
assertFalse(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "001", file1P0C0));
assertFalse(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "001", file2P0C1));
assertTrue(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "002", file3P0C2));
// No cleaning on partially written file, with no commit.
HoodieTestUtils.createDataFile(basePath, DEFAULT_FIRST_PARTITION_PATH, "003", file3P0C2); // update
List<HoodieCleanStat> hoodieCleanStatsFour = table.clean(jsc);
assertEquals("Must not clean any files", 0,
getCleanStat(hoodieCleanStatsFour, DEFAULT_FIRST_PARTITION_PATH).getSuccessDeleteFiles().size());
assertTrue(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "002", file3P0C2));
}
/**
* Test HoodieTable.clean() Cleaning by versions logic for MOR table with Log files
*/
@Test
public void testKeepLatestFileVersionsMOR() throws IOException {
HoodieWriteConfig config = HoodieWriteConfig.newBuilder().withPath(basePath).withAssumeDatePartitioning(true)
.withCompactionConfig(HoodieCompactionConfig.newBuilder().withCleanerPolicy(
HoodieCleaningPolicy.KEEP_LATEST_FILE_VERSIONS).retainFileVersions(1).build())
.build();
HoodieTableMetaClient metaClient = HoodieTestUtils.initTableType(jsc.hadoopConfiguration(), basePath,
HoodieTableType.MERGE_ON_READ);
// Make 3 files, one base file and 2 log files associated with base file
String file1P0 = HoodieTestUtils.createNewDataFile(basePath, DEFAULT_FIRST_PARTITION_PATH, "000");
String file2P0L0 = HoodieTestUtils
.createNewLogFile(fs, basePath, DEFAULT_FIRST_PARTITION_PATH, "000", file1P0, Optional.empty());
String file2P0L1 = HoodieTestUtils
.createNewLogFile(fs, basePath, DEFAULT_FIRST_PARTITION_PATH, "000", file1P0, Optional.of(2));
// make 1 compaction commit
HoodieTestUtils.createCompactionCommitFiles(fs, basePath, "000");
// Make 4 files, one base file and 3 log files associated with base file
HoodieTestUtils.createDataFile(basePath, DEFAULT_FIRST_PARTITION_PATH, "001", file1P0);
file2P0L0 = HoodieTestUtils
.createNewLogFile(fs, basePath, DEFAULT_FIRST_PARTITION_PATH, "001", file1P0, Optional.empty());
file2P0L0 = HoodieTestUtils
.createNewLogFile(fs, basePath, DEFAULT_FIRST_PARTITION_PATH, "001", file1P0, Optional.of(2));
file2P0L0 = HoodieTestUtils
.createNewLogFile(fs, basePath, DEFAULT_FIRST_PARTITION_PATH, "001", file1P0, Optional.of(3));
// make 1 compaction commit
HoodieTestUtils.createCompactionCommitFiles(fs, basePath, "001");
HoodieTable table = HoodieTable.getHoodieTable(metaClient, config, jsc);
List<HoodieCleanStat> hoodieCleanStats = table.clean(jsc);
assertEquals("Must clean three files, one parquet and 2 log files", 3,
getCleanStat(hoodieCleanStats, DEFAULT_FIRST_PARTITION_PATH).getSuccessDeleteFiles().size());
assertFalse(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "000", file1P0));
assertFalse(
HoodieTestUtils.doesLogFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "000", file2P0L0, Optional.empty()));
assertFalse(
HoodieTestUtils.doesLogFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "000", file2P0L0, Optional.of(2)));
}
/**
* Test HoodieTable.clean() Cleaning by commit logic for MOR table with Log files
*/
@Test
public void testKeepLatestCommits() throws IOException {
HoodieWriteConfig config = HoodieWriteConfig.newBuilder().withPath(basePath).withAssumeDatePartitioning(true)
.withCompactionConfig(HoodieCompactionConfig.newBuilder().withCleanerPolicy(
HoodieCleaningPolicy.KEEP_LATEST_COMMITS).retainCommits(2).build()).build();
// make 1 commit, with 1 file per partition
HoodieTestUtils.createCommitFiles(basePath, "000");
String file1P0C0 = HoodieTestUtils.createNewDataFile(basePath, DEFAULT_FIRST_PARTITION_PATH, "000");
String file1P1C0 = HoodieTestUtils.createNewDataFile(basePath, DEFAULT_SECOND_PARTITION_PATH, "000");
HoodieTable table = HoodieTable.getHoodieTable(
new HoodieTableMetaClient(jsc.hadoopConfiguration(), config.getBasePath(), true), config,
jsc);
List<HoodieCleanStat> hoodieCleanStatsOne = table.clean(jsc);
assertEquals("Must not clean any files", 0,
getCleanStat(hoodieCleanStatsOne, DEFAULT_FIRST_PARTITION_PATH).getSuccessDeleteFiles().size());
assertEquals("Must not clean any files", 0,
getCleanStat(hoodieCleanStatsOne, DEFAULT_SECOND_PARTITION_PATH).getSuccessDeleteFiles().size());
assertTrue(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "000", file1P0C0));
assertTrue(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_SECOND_PARTITION_PATH, "000", file1P1C0));
// make next commit, with 1 insert & 1 update per partition
HoodieTestUtils.createCommitFiles(basePath, "001");
table = HoodieTable.getHoodieTable(new HoodieTableMetaClient(jsc.hadoopConfiguration(), config.getBasePath(), true),
config, jsc);
String file2P0C1 = HoodieTestUtils.createNewDataFile(basePath, DEFAULT_FIRST_PARTITION_PATH, "001"); // insert
String file2P1C1 = HoodieTestUtils.createNewDataFile(basePath, DEFAULT_SECOND_PARTITION_PATH, "001"); // insert
HoodieTestUtils.createDataFile(basePath, DEFAULT_FIRST_PARTITION_PATH, "001", file1P0C0); // update
HoodieTestUtils.createDataFile(basePath, DEFAULT_SECOND_PARTITION_PATH, "001", file1P1C0); // update
List<HoodieCleanStat> hoodieCleanStatsTwo = table.clean(jsc);
assertEquals("Must not clean any files", 0,
getCleanStat(hoodieCleanStatsTwo, DEFAULT_FIRST_PARTITION_PATH).getSuccessDeleteFiles().size());
assertEquals("Must not clean any files", 0,
getCleanStat(hoodieCleanStatsTwo, DEFAULT_SECOND_PARTITION_PATH).getSuccessDeleteFiles().size());
assertTrue(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "001", file2P0C1));
assertTrue(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_SECOND_PARTITION_PATH, "001", file2P1C1));
assertTrue(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "000", file1P0C0));
assertTrue(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_SECOND_PARTITION_PATH, "000", file1P1C0));
// make next commit, with 2 updates to existing files, and 1 insert
HoodieTestUtils.createCommitFiles(basePath, "002");
table = HoodieTable.getHoodieTable(new HoodieTableMetaClient(jsc.hadoopConfiguration(), config.getBasePath(), true),
config, jsc);
HoodieTestUtils.createDataFile(basePath, DEFAULT_FIRST_PARTITION_PATH, "002", file1P0C0); // update
HoodieTestUtils.createDataFile(basePath, DEFAULT_FIRST_PARTITION_PATH, "002", file2P0C1); // update
String file3P0C2 = HoodieTestUtils.createNewDataFile(basePath, DEFAULT_FIRST_PARTITION_PATH, "002");
List<HoodieCleanStat> hoodieCleanStatsThree = table.clean(jsc);
assertEquals("Must not clean any file. We have to keep 1 version before the latest commit time to keep", 0,
getCleanStat(hoodieCleanStatsThree, DEFAULT_FIRST_PARTITION_PATH).getSuccessDeleteFiles().size());
assertTrue(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "000", file1P0C0));
// make next commit, with 2 updates to existing files, and 1 insert
HoodieTestUtils.createCommitFiles(basePath, "003");
table = HoodieTable.getHoodieTable(new HoodieTableMetaClient(jsc.hadoopConfiguration(), config.getBasePath(), true),
config, jsc);
HoodieTestUtils.createDataFile(basePath, DEFAULT_FIRST_PARTITION_PATH, "003", file1P0C0); // update
HoodieTestUtils.createDataFile(basePath, DEFAULT_FIRST_PARTITION_PATH, "003", file2P0C1); // update
String file4P0C3 = HoodieTestUtils.createNewDataFile(basePath, DEFAULT_FIRST_PARTITION_PATH, "003");
List<HoodieCleanStat> hoodieCleanStatsFour = table.clean(jsc);
assertEquals("Must not clean one old file", 1,
getCleanStat(hoodieCleanStatsFour, DEFAULT_FIRST_PARTITION_PATH).getSuccessDeleteFiles().size());
assertFalse(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "000", file1P0C0));
assertTrue(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "001", file1P0C0));
assertTrue(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "002", file1P0C0));
assertTrue(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "001", file2P0C1));
assertTrue(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "002", file2P0C1));
assertTrue(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "002", file3P0C2));
assertTrue(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "003", file4P0C3));
// No cleaning on partially written file, with no commit.
HoodieTestUtils.createDataFile(basePath, DEFAULT_FIRST_PARTITION_PATH, "004", file3P0C2); // update
List<HoodieCleanStat> hoodieCleanStatsFive = table.clean(jsc);
assertEquals("Must not clean any files", 0,
getCleanStat(hoodieCleanStatsFive, DEFAULT_FIRST_PARTITION_PATH).getSuccessDeleteFiles().size());
assertTrue(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "001", file1P0C0));
assertTrue(HoodieTestUtils.doesDataFileExist(basePath, DEFAULT_FIRST_PARTITION_PATH, "001", file2P0C1));
}
/**
* Test Cleaning functionality of table.rollback() API.
*/
@Test
public void testCleanTemporaryDataFilesOnRollback() throws IOException {
HoodieTestUtils.createCommitFiles(basePath, "000");
List<String> tempFiles = createTempFiles("000", 10);
assertEquals("Some temp files are created.", 10, tempFiles.size());
assertEquals("Some temp files are created.", tempFiles.size(), getTotalTempFiles());
HoodieWriteConfig config = HoodieWriteConfig.newBuilder().withPath(basePath)
.withUseTempFolderCopyOnWriteForCreate(false)
.withUseTempFolderCopyOnWriteForMerge(false).build();
HoodieTable table = HoodieTable.getHoodieTable(
new HoodieTableMetaClient(jsc.hadoopConfiguration(), config.getBasePath(), true), config,
jsc);
table.rollback(jsc, Collections.emptyList(), true);
assertEquals("Some temp files are created.", tempFiles.size(), getTotalTempFiles());
config = HoodieWriteConfig.newBuilder().withPath(basePath).withUseTempFolderCopyOnWriteForCreate(true)
.withUseTempFolderCopyOnWriteForMerge(false).build();
table = HoodieTable.getHoodieTable(new HoodieTableMetaClient(jsc.hadoopConfiguration(), config.getBasePath(), true),
config, jsc);
table.rollback(jsc, Collections.emptyList(), true);
assertEquals("All temp files are deleted.", 0, getTotalTempFiles());
}
/**
* Test CLeaner Stat when there are no partition paths.
*/
@Test
public void testCleaningWithZeroPartitonPaths() throws IOException {
HoodieWriteConfig config = HoodieWriteConfig.newBuilder().withPath(basePath).withAssumeDatePartitioning(true)
.withCompactionConfig(HoodieCompactionConfig.newBuilder().withCleanerPolicy(
HoodieCleaningPolicy.KEEP_LATEST_COMMITS).retainCommits(2).build()).build();
// Make a commit, although there are no partitionPaths.
// Example use-case of this is when a client wants to create a table
// with just some commit metadata, but no data/partitionPaths.
HoodieTestUtils.createCommitFiles(basePath, "000");
HoodieTable table = HoodieTable.getHoodieTable(
new HoodieTableMetaClient(jsc.hadoopConfiguration(), config.getBasePath(), true), config,
jsc);
List<HoodieCleanStat> hoodieCleanStatsOne = table.clean(jsc);
assertTrue("HoodieCleanStats should be empty for a table with empty partitionPaths", hoodieCleanStatsOne.isEmpty());
}
/**
* Test Clean-by-commits behavior in the presence of skewed partitions
*/
@Test
public void testCleaningSkewedPartitons() throws IOException {
HoodieWriteConfig config = HoodieWriteConfig.newBuilder().withPath(basePath).withAssumeDatePartitioning(true)
.withCompactionConfig(HoodieCompactionConfig.newBuilder().withCleanerPolicy(
HoodieCleaningPolicy.KEEP_LATEST_COMMITS).retainCommits(2).build()).build();
Map<Long, Long> stageOneShuffleReadTaskRecordsCountMap = new HashMap<>();
// Since clean involves repartition in order to uniformly distribute data,
// we can inspect the number of records read by various tasks in stage 1.
// There should not be skew in the number of records read in the task.
// SparkListener below listens to the stage end events and captures number of
// records read by various tasks in stage-1.
jsc.sc().addSparkListener(new SparkListener() {
@Override
public void onTaskEnd(SparkListenerTaskEnd taskEnd) {
Iterator<AccumulatorV2<?, ?>> iterator = taskEnd.taskMetrics().accumulators().iterator();
while (iterator.hasNext()) {
AccumulatorV2 accumulator = iterator.next();
if (taskEnd.stageId() == 1 && accumulator.isRegistered() && accumulator.name().isDefined()
&& accumulator.name().get().equals("internal.metrics.shuffle.read.recordsRead")) {
stageOneShuffleReadTaskRecordsCountMap.put(taskEnd.taskInfo().taskId(), (Long) accumulator.value());
}
}
}
});
// make 1 commit, with 100 files in one partition and 10 in other two
HoodieTestUtils.createCommitFiles(basePath, "000");
List<String> filesP0C0 = createFilesInPartition(DEFAULT_FIRST_PARTITION_PATH, "000", 100);
List<String> filesP1C0 = createFilesInPartition(DEFAULT_SECOND_PARTITION_PATH, "000", 10);
List<String> filesP2C0 = createFilesInPartition(DEFAULT_THIRD_PARTITION_PATH, "000", 10);
HoodieTestUtils.createCommitFiles(basePath, "001");
updateAllFilesInPartition(filesP0C0, DEFAULT_FIRST_PARTITION_PATH, "001");
updateAllFilesInPartition(filesP1C0, DEFAULT_SECOND_PARTITION_PATH, "001");
updateAllFilesInPartition(filesP2C0, DEFAULT_THIRD_PARTITION_PATH, "001");
HoodieTestUtils.createCommitFiles(basePath, "002");
updateAllFilesInPartition(filesP0C0, DEFAULT_FIRST_PARTITION_PATH, "002");
updateAllFilesInPartition(filesP1C0, DEFAULT_SECOND_PARTITION_PATH, "002");
updateAllFilesInPartition(filesP2C0, DEFAULT_THIRD_PARTITION_PATH, "002");
HoodieTestUtils.createCommitFiles(basePath, "003");
updateAllFilesInPartition(filesP0C0, DEFAULT_FIRST_PARTITION_PATH, "003");
updateAllFilesInPartition(filesP1C0, DEFAULT_SECOND_PARTITION_PATH, "003");
updateAllFilesInPartition(filesP2C0, DEFAULT_THIRD_PARTITION_PATH, "003");
HoodieTable table = HoodieTable.getHoodieTable(
new HoodieTableMetaClient(jsc.hadoopConfiguration(), config.getBasePath(), true), config,
jsc);
List<HoodieCleanStat> hoodieCleanStats = table.clean(jsc);
assertEquals(100, getCleanStat(hoodieCleanStats, DEFAULT_FIRST_PARTITION_PATH).getSuccessDeleteFiles().size());
assertEquals(10, getCleanStat(hoodieCleanStats, DEFAULT_SECOND_PARTITION_PATH).getSuccessDeleteFiles().size());
assertEquals(10, getCleanStat(hoodieCleanStats, DEFAULT_THIRD_PARTITION_PATH).getSuccessDeleteFiles().size());
// 3 tasks are expected since the number of partitions is 3
assertEquals(3, stageOneShuffleReadTaskRecordsCountMap.keySet().size());
// Sum of all records processed = total number of files to clean
assertEquals(120,
stageOneShuffleReadTaskRecordsCountMap.values().stream().reduce((a, b) -> a + b).get().intValue());
assertTrue("The skew in handling files to clean is not removed. "
+ "Each task should handle more records than the partitionPath with least files "
+ "and less records than the partitionPath with most files.",
stageOneShuffleReadTaskRecordsCountMap.values().stream().filter(a -> a > 10 && a < 100).count() == 3);
}
/**
* Test Keep Latest Commits when there are pending compactions
*/
@Test
public void testKeepLatestCommitsWithPendingCompactions() throws IOException {
HoodieWriteConfig config = HoodieWriteConfig.newBuilder().withPath(basePath).withAssumeDatePartitioning(true)
.withCompactionConfig(HoodieCompactionConfig.newBuilder().withCleanerPolicy(
HoodieCleaningPolicy.KEEP_LATEST_COMMITS).retainCommits(2).build()).build();
// Deletions:
// . FileId Parquet Logs Total Retained Commits
// FileId7 5 10 15 009, 011
// FileId6 5 10 15 009
// FileId5 3 6 9 005
// FileId4 2 4 6 003
// FileId3 1 2 3 001
// FileId2 0 0 0 000
// FileId1 0 0 0 000
testPendingCompactions(config, 48, 18);
}
/**
* Test Keep Latest Versions when there are pending compactions
*/
@Test
public void testKeepLatestVersionsWithPendingCompactions() throws IOException {
HoodieWriteConfig config = HoodieWriteConfig.newBuilder().withPath(basePath).withAssumeDatePartitioning(true)
.withCompactionConfig(HoodieCompactionConfig.newBuilder().withCleanerPolicy(
HoodieCleaningPolicy.KEEP_LATEST_FILE_VERSIONS).retainFileVersions(2).build()).build();
// Deletions:
// . FileId Parquet Logs Total Retained Commits
// FileId7 5 10 15 009, 011
// FileId6 4 8 12 007, 009
// FileId5 2 4 6 003 005
// FileId4 1 2 3 001, 003
// FileId3 0 0 0 000, 001
// FileId2 0 0 0 000
// FileId1 0 0 0 000
testPendingCompactions(config, 36, 9);
}
/**
* Common test method for validating pending compactions
*
* @param config Hoodie Write Config
* @param expNumFilesDeleted Number of files deleted
*/
public void testPendingCompactions(HoodieWriteConfig config, int expNumFilesDeleted,
int expNumFilesUnderCompactionDeleted) throws IOException {
HoodieTableMetaClient metaClient = HoodieTestUtils.initTableType(jsc.hadoopConfiguration(), basePath,
HoodieTableType.MERGE_ON_READ);
String[] instants = new String[]{"000", "001", "003", "005", "007", "009", "011", "013"};
String[] compactionInstants = new String[]{"002", "004", "006", "008", "010"};
Map<String, String> expFileIdToPendingCompaction = new HashMap<>();
Map<String, String> fileIdToLatestInstantBeforeCompaction = new HashMap<>();
Map<String, List<FileSlice>> compactionInstantsToFileSlices = new HashMap<>();
for (String instant : instants) {
HoodieTestUtils.createCommitFiles(basePath, instant);
}
// Generate 7 file-groups. First one has only one slice and no pending compaction. File Slices (2 - 5) has
// multiple versions with pending compaction. File Slices (6 - 7) have multiple file-slices but not under
// compactions
// FileIds 2-5 will be under compaction
int maxNumFileIds = 7;
String[] fileIds = new String[]
{"fileId1", "fileId2", "fileId3", "fileId4", "fileId5", "fileId6", "fileId7"};
int maxNumFileIdsForCompaction = 4;
for (int i = 0; i < maxNumFileIds; i++) {
final String fileId = HoodieTestUtils.createDataFile(basePath, DEFAULT_FIRST_PARTITION_PATH, instants[0],
fileIds[i]);
HoodieTestUtils.createNewLogFile(fs, basePath, DEFAULT_FIRST_PARTITION_PATH, instants[0],
fileId, Optional.empty());
HoodieTestUtils.createNewLogFile(fs, basePath, DEFAULT_FIRST_PARTITION_PATH, instants[0],
fileId, Optional.of(2));
fileIdToLatestInstantBeforeCompaction.put(fileId, instants[0]);
for (int j = 1; j <= i; j++) {
if (j == i && j <= maxNumFileIdsForCompaction) {
expFileIdToPendingCompaction.put(fileId, compactionInstants[j]);
HoodieTable table = HoodieTable.getHoodieTable(
new HoodieTableMetaClient(jsc.hadoopConfiguration(), config.getBasePath(), true), config,
jsc);
FileSlice slice = table.getRTFileSystemView().getLatestFileSlices(DEFAULT_FIRST_PARTITION_PATH)
.filter(fs -> fs.getFileId().equals(fileId)).findFirst().get();
List<FileSlice> slices = new ArrayList<>();
if (compactionInstantsToFileSlices.containsKey(compactionInstants[j])) {
slices = compactionInstantsToFileSlices.get(compactionInstants[j]);
}
slices.add(slice);
compactionInstantsToFileSlices.put(compactionInstants[j], slices);
// Add log-files to simulate delta-commits after pending compaction
HoodieTestUtils.createNewLogFile(fs, basePath, DEFAULT_FIRST_PARTITION_PATH, compactionInstants[j],
fileId, Optional.empty());
HoodieTestUtils.createNewLogFile(fs, basePath, DEFAULT_FIRST_PARTITION_PATH, compactionInstants[j],
fileId, Optional.of(2));
} else {
HoodieTestUtils.createDataFile(basePath, DEFAULT_FIRST_PARTITION_PATH, instants[j], fileId);
HoodieTestUtils.createNewLogFile(fs, basePath, DEFAULT_FIRST_PARTITION_PATH, instants[j], fileId,
Optional.empty());
HoodieTestUtils.createNewLogFile(fs, basePath, DEFAULT_FIRST_PARTITION_PATH, instants[j], fileId,
Optional.of(2));
fileIdToLatestInstantBeforeCompaction.put(fileId, instants[j]);
}
}
}
// Setup pending compaction plans
for (String instant : compactionInstants) {
List<FileSlice> fileSliceList = compactionInstantsToFileSlices.get(instant);
if (null != fileSliceList) {
HoodieTestUtils.createCompactionRequest(metaClient, instant,
fileSliceList.stream().map(fs -> Pair.of(DEFAULT_FIRST_PARTITION_PATH, fs)).collect(Collectors.toList()));
}
}
// Clean now
HoodieTable table = HoodieTable.getHoodieTable(
new HoodieTableMetaClient(jsc.hadoopConfiguration(), config.getBasePath(), true), config,
jsc);
List<HoodieCleanStat> hoodieCleanStats = table.clean(jsc);
// Test for safety
final HoodieTable hoodieTable = HoodieTable.getHoodieTable(
new HoodieTableMetaClient(jsc.hadoopConfiguration(), config.getBasePath(), true), config,
jsc);
expFileIdToPendingCompaction.entrySet().stream().forEach(entry -> {
String fileId = entry.getKey();
String baseInstantForCompaction = fileIdToLatestInstantBeforeCompaction.get(fileId);
Optional<FileSlice> fileSliceForCompaction =
hoodieTable.getRTFileSystemView().getLatestFileSlicesBeforeOrOn(DEFAULT_FIRST_PARTITION_PATH,
baseInstantForCompaction).filter(fs -> fs.getFileId().equals(fileId)).findFirst();
Assert.assertTrue("Base Instant for Compaction must be preserved", fileSliceForCompaction.isPresent());
Assert.assertTrue("FileSlice has data-file", fileSliceForCompaction.get().getDataFile().isPresent());
Assert.assertEquals("FileSlice has log-files", 2,
fileSliceForCompaction.get().getLogFiles().count());
});
// Test for progress (Did we clean some files ?)
long numFilesUnderCompactionDeleted =
hoodieCleanStats.stream().flatMap(cleanStat -> {
return convertPathToFileIdWithCommitTime(metaClient, cleanStat.getDeletePathPatterns()).map(
fileIdWithCommitTime -> {
if (expFileIdToPendingCompaction.containsKey(fileIdWithCommitTime.getKey())) {
Assert.assertTrue("Deleted instant time must be less than pending compaction",
HoodieTimeline.compareTimestamps(
fileIdToLatestInstantBeforeCompaction.get(fileIdWithCommitTime.getKey()),
fileIdWithCommitTime.getValue(), GREATER));
return true;
}
return false;
});
}).filter(x -> x).count();
long numDeleted = hoodieCleanStats.stream()
.flatMap(cleanStat -> cleanStat.getDeletePathPatterns().stream()).count();
// Tighter check for regression
Assert.assertEquals("Correct number of files deleted", expNumFilesDeleted, numDeleted);
Assert.assertEquals("Correct number of files under compaction deleted",
expNumFilesUnderCompactionDeleted, numFilesUnderCompactionDeleted);
}
/**
* Utility method to create temporary data files
*
* @param commitTime Commit Timestamp
* @param numFiles Number for files to be generated
* @return generated files
* @throws IOException in case of error
*/
private List<String> createTempFiles(String commitTime, int numFiles) throws IOException {
List<String> files = new ArrayList<>();
for (int i = 0; i < numFiles; i++) {
files.add(HoodieTestUtils.createNewDataFile(basePath, HoodieTableMetaClient.TEMPFOLDER_NAME, commitTime));
}
return files;
}
/***
* Helper method to return temporary files count
* @return Number of temporary files found
* @throws IOException in case of error
*/
private int getTotalTempFiles() throws IOException {
return fs.listStatus(new Path(basePath, HoodieTableMetaClient.TEMPFOLDER_NAME)).length;
}
private Stream<Pair<String, String>> convertPathToFileIdWithCommitTime(
final HoodieTableMetaClient metaClient, List<String> paths) {
Predicate<String> roFilePredicate = path ->
path.contains(metaClient.getTableConfig().getROFileFormat().getFileExtension());
Predicate<String> rtFilePredicate = path ->
path.contains(metaClient.getTableConfig().getRTFileFormat().getFileExtension());
Stream<Pair<String, String>> stream1 = paths.stream().filter(roFilePredicate)
.map(fullPath -> {
String fileName = Paths.get(fullPath).getFileName().toString();
return Pair.of(FSUtils.getFileId(fileName), FSUtils.getCommitTime(fileName));
});
Stream<Pair<String, String>> stream2 = paths.stream().filter(rtFilePredicate)
.map(path -> {
return Pair.of(FSUtils.getFileIdFromLogPath(new Path(path)),
FSUtils.getBaseCommitTimeFromLogPath(new Path(path)));
});
return Stream.concat(stream1, stream2);
}
}
| |
// -*- mode: java; c-basic-offset: 2; -*-
// Copyright 2009-2011 Google, All Rights reserved
// Copyright 2011-2012 MIT, All rights reserved
// Released under the MIT License https://raw.github.com/mit-cml/app-inventor/master/mitlicense.txt
package com.google.appinventor.components.scripts;
import com.google.appinventor.components.annotations.DesignerComponent;
import com.google.appinventor.components.annotations.DesignerProperty;
import com.google.appinventor.components.annotations.PropertyCategory;
import com.google.appinventor.components.annotations.SimpleEvent;
import com.google.appinventor.components.annotations.SimpleFunction;
import com.google.appinventor.components.annotations.SimpleObject;
import com.google.appinventor.components.annotations.SimpleProperty;
import com.google.appinventor.components.annotations.UsesLibraries;
import com.google.appinventor.components.annotations.UsesPermissions;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import java.io.IOException;
import java.io.Writer;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import javax.annotation.processing.AbstractProcessor;
import javax.annotation.processing.Messager;
import javax.annotation.processing.ProcessingEnvironment;
import javax.annotation.processing.RoundEnvironment;
import javax.lang.model.element.AnnotationMirror;
import javax.lang.model.element.Element;
import javax.lang.model.element.ElementKind;
import javax.lang.model.element.ExecutableElement;
import javax.lang.model.element.Modifier;
import javax.lang.model.element.TypeElement;
import javax.lang.model.element.VariableElement;
import javax.lang.model.type.DeclaredType;
import javax.lang.model.type.ExecutableType;
import javax.lang.model.type.TypeKind;
import javax.lang.model.type.TypeMirror;
import javax.lang.model.util.Elements;
import javax.lang.model.util.Types;
import javax.tools.Diagnostic;
import javax.tools.FileObject;
import javax.tools.StandardLocation;
/**
* Processor for generating output files based on the annotations and
* javadoc in the component source code.
* <p>
* Specifically, this reads over the source files, building up a representation
* of components and their designer properties, properties, methods, and
* events. Concrete subclasses implement the method {@link #outputResults()}
* to generate output.
* <p>
* Currently, the following annotations are used:
* <ul>
* <li> {@link DesignerComponent} and {@link SimpleObject} to identify
* components. Subclasses can distinguish between the two through
* the boolean fields
* {@link ComponentProcessor.ComponentInfo#designerComponent} and
* {@link ComponentProcessor.ComponentInfo#simpleObject}.
* <li> {@link DesignerProperty} to identify designer properties.
* <li> {@link SimpleProperty} to identify properties.
* <li> {@link SimpleFunction} to identify methods.
* <li> {@link SimpleEvent} to identify events.
* </ul>
*
* @author spertus@google.com (Ellen Spertus)
*/
public abstract class ComponentProcessor extends AbstractProcessor {
private static final String OUTPUT_PACKAGE = "";
// Returned by getSupportedAnnotationTypes()
private static final Set<String> SUPPORTED_ANNOTATION_TYPES = ImmutableSet.of(
"com.google.appinventor.components.annotations.DesignerComponent",
"com.google.appinventor.components.annotations.DesignerProperty",
"com.google.appinventor.components.annotations.SimpleEvent",
"com.google.appinventor.components.annotations.SimpleFunction",
"com.google.appinventor.components.annotations.SimpleObject",
"com.google.appinventor.components.annotations.SimpleProperty");
// Returned by getRwString()
private static final String READ_WRITE = "read-write";
private static final String READ_ONLY = "read-only";
private static final String WRITE_ONLY = "write-only";
// The next two fields are set in init().
/**
* A handle allowing access to facilities provided by the annotation
* processing tool framework
*/
private Elements elementUtils;
private Types typeUtils;
/**
* Produced through {@link ProcessingEnvironment#getMessager()} and
* used for outputing errors and warnings.
*/
// Set in process()
protected Messager messager;
/**
* Indicates which pass is being performed by the Java annotation processor
*/
private int pass = 0;
/**
* Information about every App Inventor component. Keys are fully-qualified names
* (such as "com.google.appinventor.components.runtime.components.android.Label"), and
* values are the corresponding {@link ComponentProcessor.ComponentInfo} objects.
* This is constructed by {@link #process} for use in {@link #outputResults()}.
*/
protected final SortedMap<String, ComponentInfo> components = Maps.newTreeMap();
private final List<String> componentTypes = Lists.newArrayList();
/**
* Represents a parameter consisting of a name and a type. The type is a
* String representation of the java type, such as "int", "double", or
* "java.lang.String".
*/
protected final class Parameter {
/**
* The parameter name
*/
protected final String name;
/**
* The parameter's Java type, such as "int" or "java.lang.String".
*/
protected final String type;
/**
* Constructs a Parameter.
*
* @param name the parameter name
* @param type the parameter's Java type (such as "int" or "java.lang.String")
*/
protected Parameter(String name, String type) {
this.name = name;
this.type = type;
}
/**
* Provides a Yail type for a given parameter type. This is useful because
* the parameter types used for {@link Event} are Simple types (e.g.,
* "Single"), while the parameter types used for {@link Method} are
* Java types (e.g., "int".
*
* @param parameter a parameter
* @return the string representation of the corresponding Yail type
* @throws RuntimeException if {@code parameter} does not have a
* corresponding Yail type
*/
protected String parameterToYailType(Parameter parameter) {
return javaTypeToYailType(type);
}
}
/**
* Represents a component feature that has a name and a description.
*/
protected abstract static class Feature {
protected final String name;
protected String description;
protected Feature(String name, String description, String featureType) {
this.name = name;
if (description == null || description.isEmpty()) {
this.description = featureType + " for " + name;
} else {
// Throw out the first @ or { and everything after it,
// in order to strip out @param, @author, {@link ...}, etc.
this.description = description.split("[@{]")[0].trim();
}
}
}
/**
* Represents a component feature that has a name, description, and
* parameters.
*/
protected abstract class ParameterizedFeature extends Feature {
// Inherits name, description
protected final List<Parameter> parameters;
protected final boolean userVisible;
protected ParameterizedFeature(String name, String description, String feature,
boolean userVisible) {
super(name, description, feature);
this.userVisible = userVisible;
parameters = Lists.newArrayList();
}
protected void addParameter(String name, String type) {
parameters.add(new Parameter(name, type));
}
/**
* Generates a comma-separated string corresponding to the parameter list,
* using Yail types (e.g., "number n, text t1").
*
* @return a string representation of the parameter list
* @throws RuntimeException if the parameter type cannot be mapped to any
* of the legal return values
*/
protected String toParameterString() {
StringBuilder sb = new StringBuilder();
int count = 0;
for (Parameter param : parameters) {
sb.append(param.parameterToYailType(param));
sb.append(" ");
sb.append(param.name);
if (++count != parameters.size()) {
sb.append(", ");
}
}
return new String(sb);
}
}
/**
* Represents an App Inventor event (annotated with {@link SimpleEvent}).
*/
protected final class Event extends ParameterizedFeature
implements Cloneable, Comparable<Event> {
// Inherits name, description, and parameters
protected Event(String name, String description, boolean userVisible) {
super(name, description, "Event", userVisible);
}
@Override
public Event clone() {
Event that = new Event(name, description, userVisible);
for (Parameter p : parameters) {
that.addParameter(p.name, p.type);
}
return that;
}
@Override
public int compareTo(Event e) {
return name.compareTo(e.name);
}
}
/**
* Represents an App Inventor component method (annotated with
* {@link SimpleFunction}).
*/
protected final class Method extends ParameterizedFeature
implements Cloneable, Comparable<Method> {
// Inherits name, description, and parameters
private String returnType;
protected Method(String name, String description, boolean userVisible) {
super(name, description, "Method", userVisible);
// returnType defaults to null
}
protected String getReturnType() {
return returnType;
}
@Override
public Method clone() {
Method that = new Method(name, description, userVisible);
for (Parameter p : parameters) {
that.addParameter(p.name, p.type);
}
that.returnType = returnType;
return that;
}
@Override
public int compareTo(Method f) {
return name.compareTo(f.name);
}
}
/**
* Represents an App Inventor component property (annotated with
* {@link SimpleProperty}).
*/
protected static final class Property implements Cloneable {
protected final String name;
private String description;
private PropertyCategory propertyCategory;
private boolean userVisible;
private String type;
private boolean readable;
private boolean writable;
private String componentInfoName;
protected Property(String name, String description,
PropertyCategory category, boolean userVisible) {
this.name = name;
this.description = description;
this.propertyCategory = category;
this.userVisible = userVisible;
// type defaults to null
// readable and writable default to false
}
@Override
public Property clone() {
Property that = new Property(name, description, propertyCategory, userVisible);
that.type = type;
that.readable = readable;
that.writable = writable;
that.componentInfoName = componentInfoName;
return that;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("<Property name: ");
sb.append(name);
sb.append(", type: ");
sb.append(type);
if (readable) {
sb.append(" readable");
}
if (writable) {
sb.append(" writable");
}
sb.append(">");
return sb.toString();
}
/**
* Returns the description of this property, as retrieved by
* {@link SimpleProperty#description()}.
*
* @return the description of this property
*/
protected String getDescription() {
return description;
}
/**
* Returns whether this property is visible in the Blocks Editor, as retrieved
* from {@link SimpleProperty#userVisible()}.
*
* @return whether the property is visible in the Blocks Editor
*/
protected boolean isUserVisible() {
return userVisible;
}
/**
* Returns this property's Java type (e.g., "int", "double", or "java.lang.String").
*
* @return the feature's Java type
*/
protected String getType() {
return type;
}
/**
* Returns whether this property is readable (has a getter).
*
* @return whether this property is readable
*/
protected boolean isReadable() {
return readable;
}
/**
* Returns whether this property is writable (has a setter).
*
* @return whether this property is writable
*/
protected boolean isWritable() {
return writable;
}
/**
* Returns a string indicating whether this property is readable and/or
* writable.
*
* @return one of "read-write", "read-only", or "write-only"
* @throws {@link RuntimeException} if the property is neither readable nor
* writable
*/
protected String getRwString() {
if (readable) {
if (writable) {
return READ_WRITE;
} else {
return READ_ONLY;
}
} else {
if (!writable) {
throw new RuntimeException("Property " + name +
" is neither readable nor writable");
}
return WRITE_ONLY;
}
}
}
/**
* Represents an App Inventor component, including its designer properties,
* Simple properties, methods, and events.
*/
protected final class ComponentInfo extends Feature {
// Inherits name and description
/**
* Permissions required by this component.
* @see android.Manifest.permission
*/
protected final Set<String> permissions;
/**
* Libraries required by this component.
*/
protected final Set<String> libraries;
/**
* Properties of this component that are visible in the Designer.
* @see DesignerProperty
*/
protected final SortedMap<String, DesignerProperty> designerProperties;
/**
* Properties of this component, whether or not they are visible in
* the Designer. The keys of this map are a superset of the keys of
* {@link #designerProperties}.
*/
protected final SortedMap<String, Property> properties;
/**
* Methods provided by this component.
*/
protected final SortedMap<String, Method> methods;
/**
* Events provided by this component.
*/
protected final SortedMap<String, Event> events;
/**
* Whether this component is abstract (such as
* {@link com.google.appinventor.components.runtime.Sprite}) or concrete.
*/
protected final boolean abstractClass;
/**
* The displayed name of this component. This is usually the same as the
* {@link Class#getSimpleName()}. The exception is for the component
* {@link com.google.appinventor.components.runtime.Form}, for which the
* name "Screen" is used.
*/
protected final String displayName;
private String helpDescription; // Shorter popup description
private String category;
private String categoryString;
private boolean simpleObject;
private boolean designerComponent;
private int version;
private boolean showOnPalette;
private boolean nonVisible;
private String iconName;
protected ComponentInfo(Element element) {
super(element.getSimpleName().toString(), // Short name
elementUtils.getDocComment(element),
"Component");
displayName = getDisplayNameForComponentType(name);
permissions = Sets.newHashSet();
libraries = Sets.newHashSet();
designerProperties = Maps.newTreeMap();
properties = Maps.newTreeMap();
methods = Maps.newTreeMap();
events = Maps.newTreeMap();
abstractClass = element.getModifiers().contains(Modifier.ABSTRACT);
for (AnnotationMirror am : element.getAnnotationMirrors()) {
DeclaredType dt = am.getAnnotationType();
String annotationName = am.getAnnotationType().toString();
if (annotationName.equals(SimpleObject.class.getName())) {
simpleObject = true;
}
if (annotationName.equals(DesignerComponent.class.getName())) {
designerComponent = true;
DesignerComponent designerComponentAnnotation =
element.getAnnotation(DesignerComponent.class);
// Override javadoc description with explicit description
// if provided.
String explicitDescription = designerComponentAnnotation.description();
if (!explicitDescription.isEmpty()) {
description = explicitDescription;
}
// Set helpDescription to the designerHelpDescription field if
// provided; otherwise, use description
helpDescription = designerComponentAnnotation.designerHelpDescription();
if (helpDescription.isEmpty()) {
helpDescription = description;
}
category = designerComponentAnnotation.category().getName();
categoryString = designerComponentAnnotation.category().toString();
version = designerComponentAnnotation.version();
showOnPalette = designerComponentAnnotation.showOnPalette();
nonVisible = designerComponentAnnotation.nonVisible();
iconName = designerComponentAnnotation.iconName();
}
}
}
/**
* A brief description of this component to be shown when the user requests
* help in the Designer. This is obtained from the first of the following that
* was provided in the source code for the component:
* <ol>
* <li> {@link DesignerComponent#designerHelpDescription()}</li>
* <li> {@link DesignerComponent#description()}</li>
* <li> the Javadoc preceding the beginning of the class corresponding to the component</li>
* </ol>
*/
protected String getHelpDescription() {
return helpDescription;
}
/**
* Returns the name of this component's category within the Designer, as displayed
* (for example, "Screen Arrangement").
*
* @return the name of this component's Designer category
*/
protected String getCategory() {
return category;
}
/**
* Returns the String representation of the EnumConstant corresponding to this
* component's category within the Designer (for example, "ARRANGEMENTS").
* Usually, you should use {@link #getCategory()} instead.
*
* @return the EnumConstant representing this component's Designer category
*/
protected String getCategoryString() {
return categoryString;
}
/**
* Returns the version number of this component, as specified by
* {@link DesignerComponent#version()}.
*
* @return the version number of this component
*/
protected int getVersion() {
return version;
}
/**
* Returns whether this component is shown on the palette in the Designer, as
* specified by {@link DesignerComponent#showOnPalette()}.
*
* @return whether this component is shown on the Designer palette
*/
protected boolean getShowOnPalette() {
return showOnPalette;
}
/**
* Returns whether this component is non-visible on the device's screen, as
* specified by {@link DesignerComponent#nonVisible()}. Examples of non-visible
* components are {@link com.google.appinventor.components.runtime.LocationSensor}
* and {@link com.google.appinventor.components.runtime.Clock}.
*
* @return {@code true} if the component is non-visible, {@code false} otherwise
*/
protected boolean getNonVisible() {
return nonVisible;
}
/**
* Returns the name of the icon file used on the Designer palette, as specified in
* {@link DesignerComponent#iconName()}.
*
* @return the name of the icon file
*/
protected String getIconName() {
return iconName;
}
private String getDisplayNameForComponentType(String componentTypeName) {
// Users don't know what a 'Form' is. They know it as a 'Screen'.
return "Form".equals(componentTypeName) ? "Screen" : componentTypeName;
}
}
/**
* Returns the annotations supported by this {@code ComponentProcessor}, namely those related
* to components ({@link com.google.appinventor.components.annotations}).
*
* @return the supported annotations
*/
@Override
public Set<String> getSupportedAnnotationTypes() {
return SUPPORTED_ANNOTATION_TYPES;
}
@Override
public void init(ProcessingEnvironment processingEnv) {
super.init(processingEnv);
elementUtils = processingEnv.getElementUtils();
typeUtils = processingEnv.getTypeUtils();
}
/**
* Processes the component-related annotations ({@link
* com.google.appinventor.components.annotations}),
* populating {@link #components} and initializing {@link #messager} for use within
* {@link #outputResults()}, which is called at the end of this method and must be overriden by
* concrete subclasses.
*
* @param annotations the annotation types requested to be processed
* @param roundEnv environment for information about the current and prior round
* @return {@code true}, indicating that the annotations have been claimed by this processor.
* @see AbstractProcessor#process
*/
@Override
public boolean process(Set<? extends TypeElement> annotations, RoundEnvironment roundEnv) {
// This method will be called many times for the source code.
// Only do something on the first pass.
pass++;
if (pass > 1) {
return true;
}
messager = processingEnv.getMessager();
for (TypeElement te : annotations) {
if (te.getSimpleName().toString().equals("DesignerComponent")
|| te.getSimpleName().toString().equals("SimpleObject")) {
for (Element element : roundEnv.getElementsAnnotatedWith(te)) {
processComponent(element);
}
}
}
// Put the component class names (including abstract classes)
componentTypes.addAll(components.keySet());
// Remove non-components before calling outputResults.
List<String> removeList = Lists.newArrayList();
for (Map.Entry<String, ComponentInfo> entry : components.entrySet()) {
ComponentInfo component = entry.getValue();
if (component.abstractClass || !component.designerComponent) {
removeList.add(entry.getKey());
}
}
components.keySet().removeAll(removeList);
try {
// This is an abstract method implemented in concrete subclasses.
outputResults();
} catch (IOException e) {
throw new RuntimeException(e);
}
// Indicate that we have successfully handled the annotations.
return true;
}
/*
* This processes an element if it represents a component, reading in its
* information and adding it to components. If this component is a
* subclass of another component, this method recursively calls itself on the
* superclass.
*/
private void processComponent(Element element) {
// If the element is not a component (e.g., Float), return early.
if (element.getAnnotation(SimpleObject.class) == null &&
element.getAnnotation(DesignerComponent.class) == null) {
return;
}
// If we already processed this component, return early.
String longComponentName = element.asType().toString();
if (components.containsKey(longComponentName)) {
return;
}
// Create new ComponentInfo.
ComponentInfo componentInfo = new ComponentInfo(element);
// Check if this extends another component (DesignerComponent or SimpleObject).
List<? extends TypeMirror> directSupertypes = typeUtils.directSupertypes(element.asType());
if (!directSupertypes.isEmpty()) {
// Only look at the first one. Later ones would be interfaces,
// which we don't care about.
String parentName = directSupertypes.get(0).toString();
ComponentInfo parentComponent = components.get(parentName);
if (parentComponent == null) {
// Try to process the parent component now.
Element parentElement = elementUtils.getTypeElement(parentName);
if (parentElement != null) {
processComponent(parentElement);
parentComponent = components.get(parentName);
}
}
// If we still can't find the parent class, we don't care about it,
// since it's not a component (but something like java.lang.Object).
// Otherwise, we need to copy its designer properties, properties, methods, and events.
if (parentComponent != null) {
// Copy its permissions, designer properties, properties, methods, and events.
componentInfo.permissions.addAll(parentComponent.permissions);
// Since we don't modify DesignerProperties, we can just call Map.putAll to copy the
// designer properties from parentComponent to componentInfo.
componentInfo.designerProperties.putAll(parentComponent.designerProperties);
// NOTE(lizlooney) We can't just call Map.putAll to copy the events/properties/methods from
// parentComponent to componentInfo because then each component will share a single
// Event/Property/Method and if one component overrides something about an
// Event/Property/Method, then it will affect all the other components that are sharing
// that Event/Property/Method.
for (Map.Entry<String, Event> entry : parentComponent.events.entrySet()) {
componentInfo.events.put(entry.getKey(), entry.getValue().clone());
}
for (Map.Entry<String, Property> entry : parentComponent.properties.entrySet()) {
componentInfo.properties.put(entry.getKey(), entry.getValue().clone());
}
for (Map.Entry<String, Method> entry : parentComponent.methods.entrySet()) {
componentInfo.methods.put(entry.getKey(), entry.getValue().clone());
}
}
}
// Gather permissions.
UsesPermissions up = element.getAnnotation(UsesPermissions.class);
if (up != null) {
for (String permission : up.permissionNames().split(",")) {
componentInfo.permissions.add(permission.trim());
}
}
// Gather library names.
UsesLibraries ulib = element.getAnnotation(UsesLibraries.class);
if (ulib != null) {
for (String library : ulib.libraries().split(",")) {
componentInfo.libraries.add(library.trim());
}
}
// Build up event information.
processEvents(componentInfo, element);
// Build up property information.
processProperties(componentInfo, element);
// Build up method information.
processMethods(componentInfo, element);
// Add it to our components map.
components.put(longComponentName, componentInfo);
}
private boolean isPublicMethod(Element element) {
return element.getModifiers().contains(Modifier.PUBLIC)
&& element.getKind() == ElementKind.METHOD;
}
private Property executableElementToProperty(Element element, String componentInfoName) {
String propertyName = element.getSimpleName().toString();
SimpleProperty simpleProperty = element.getAnnotation(SimpleProperty.class);
if (!(element.asType() instanceof ExecutableType)) {
throw new RuntimeException("element.asType() is not an ExecutableType for " +
propertyName);
}
Property property = new Property(propertyName,
simpleProperty.description(),
simpleProperty.category(),
simpleProperty.userVisible());
// Get parameters to tell if this is a getter or setter.
ExecutableType executableType = (ExecutableType) element.asType();
List<? extends TypeMirror> parameters = executableType.getParameterTypes();
// Check if it is a setter or getter, and set the property's readable, writable,
// and type fields appropriately.
TypeMirror typeMirror;
if (parameters.size() == 0) {
// It is a getter.
property.readable = true;
typeMirror = executableType.getReturnType();
if (typeMirror.getKind().equals(TypeKind.VOID)) {
throw new RuntimeException("Property method is void and has no parameters: "
+ propertyName);
}
} else {
// It is a setter.
property.writable = true;
if (parameters.size() != 1) {
throw new RuntimeException("Too many parameters for setter for " +
propertyName);
}
typeMirror = parameters.get(0);
}
// Use typeMirror to set the property's type.
if (!typeMirror.getKind().equals(TypeKind.VOID)) {
property.type = typeMirror.toString();
}
property.componentInfoName = componentInfoName;
return property;
}
private void processProperties(ComponentInfo componentInfo,
Element componentElement) {
// We no longer support properties that use the variant type.
for (Element element : componentElement.getEnclosedElements()) {
if (!isPublicMethod(element)) {
continue;
}
// Get the name of the prospective property.
String propertyName = element.getSimpleName().toString();
// Designer property information
DesignerProperty designerProperty = element.getAnnotation(DesignerProperty.class);
if (designerProperty != null) {
componentInfo.designerProperties.put(propertyName, designerProperty);
}
// If property is overridden without again using SimpleProperty, remove
// it. For example, this is done for Ball.Width(), which overrides the
// inherited property Width() because Ball uses Radius() instead.
if (element.getAnnotation(SimpleProperty.class) == null) {
if (componentInfo.properties.containsKey(propertyName)) {
// Look at the prior property's componentInfoName.
Property priorProperty = componentInfo.properties.get(propertyName);
if (priorProperty.componentInfoName.equals(componentInfo.name)) {
// The prior property's componentInfoName is the same as this componentInfo's name.
// This is just a read-only or write-only property. We don't need to do anything
// special here.
} else {
// The prior property's componentInfoName is the different than this componentInfo's
// name. This is an overridden property without the SimpleProperty annotation and we
// need to remove it.
componentInfo.properties.remove(propertyName);
}
}
} else {
// Create a new Property element, then compare and combine it with any
// prior Property element with the same property name, verifying that
// they are consistent.
Property newProperty = executableElementToProperty(element, componentInfo.name);
if (componentInfo.properties.containsKey(propertyName)) {
Property priorProperty = componentInfo.properties.get(propertyName);
if (!priorProperty.type.equals(newProperty.type)) {
// The 'real' type of a property is determined by its getter, if
// it has one. In theory there can be multiple setters which
// take different types and those types can differ from the
// getter.
if (newProperty.readable) {
priorProperty.type = newProperty.type;
} else if (priorProperty.writable) {
// TODO(user): handle lang_def and document generation for multiple setters.
throw new RuntimeException("Inconsistent types " + priorProperty.type +
" and " + newProperty.type + " for property " +
propertyName + " in component " + componentInfo.name);
}
}
// Merge newProperty into priorProperty, which is already in the properties map.
if (priorProperty.description.isEmpty() && !newProperty.description.isEmpty()) {
priorProperty.description = newProperty.description;
}
if (priorProperty.propertyCategory == PropertyCategory.UNSET) {
priorProperty.propertyCategory = newProperty.propertyCategory;
} else if (newProperty.propertyCategory != priorProperty.propertyCategory &&
newProperty.propertyCategory != PropertyCategory.UNSET) {
throw new RuntimeException(
"Property " + propertyName + " has inconsistent categories " +
priorProperty.propertyCategory + " and " +
newProperty.propertyCategory + " in component " +
componentInfo.name);
}
priorProperty.readable = priorProperty.readable || newProperty.readable;
priorProperty.writable = priorProperty.writable || newProperty.writable;
priorProperty.userVisible = priorProperty.userVisible && newProperty.userVisible;
priorProperty.componentInfoName = componentInfo.name;
} else {
// Add the new property to the properties map.
componentInfo.properties.put(propertyName, newProperty);
}
}
}
}
// Note: The top halves of the bodies of processEvent() and processMethods()
// are very similar. I tried refactoring in several ways but it just made
// things more complex.
private void processEvents(ComponentInfo componentInfo,
Element componentElement) {
for (Element element : componentElement.getEnclosedElements()) {
if (!isPublicMethod(element)) {
continue;
}
// Get the name of the prospective event.
String eventName = element.getSimpleName().toString();
SimpleEvent simpleEventAnnotation = element.getAnnotation(SimpleEvent.class);
// Remove overriden events unless SimpleEvent is again specified.
// See comment in processProperties for an example.
if (simpleEventAnnotation == null) {
if (componentInfo.events.containsKey(eventName)) {
componentInfo.events.remove(eventName);
}
} else {
String eventDescription = simpleEventAnnotation.description();
if (eventDescription.isEmpty()) {
eventDescription = elementUtils.getDocComment(element);
if (eventDescription == null) {
messager.printMessage(Diagnostic.Kind.WARNING,
"In component " + componentInfo.name +
", event " + eventName +
" is missing a description.");
eventDescription = "";
}
}
boolean userVisible = simpleEventAnnotation.userVisible();
Event event = new Event(eventName, eventDescription, userVisible);
componentInfo.events.put(event.name, event);
// Verify that this element has an ExecutableType.
if (!(element instanceof ExecutableElement)) {
throw new RuntimeException("In component " + componentInfo.name +
", the representation of SimpleEvent " + eventName +
" does not implement ExecutableElement.");
}
ExecutableElement e = (ExecutableElement) element;
// Extract the parameters.
for (VariableElement ve : e.getParameters()) {
event.addParameter(ve.getSimpleName().toString(),
ve.asType().toString());
}
}
}
}
private void processMethods(ComponentInfo componentInfo,
Element componentElement) {
for (Element element : componentElement.getEnclosedElements()) {
if (!isPublicMethod(element)) {
continue;
}
// Get the name of the prospective method.
String methodName = element.getSimpleName().toString();
SimpleFunction simpleFunctionAnnotation = element.getAnnotation(SimpleFunction.class);
// Remove overriden methods unless SimpleFunction is again specified.
// See comment in processProperties for an example.
if (simpleFunctionAnnotation == null) {
if (componentInfo.methods.containsKey(methodName)) {
componentInfo.methods.remove(methodName);
}
} else {
String methodDescription = simpleFunctionAnnotation.description();
if (methodDescription.isEmpty()) {
methodDescription = elementUtils.getDocComment(element);
if (methodDescription == null) {
messager.printMessage(Diagnostic.Kind.WARNING,
"In component " + componentInfo.name +
", method " + methodName +
" is missing a description.");
methodDescription = "";
}
}
boolean userVisible = simpleFunctionAnnotation.userVisible();
Method method = new Method(methodName, methodDescription, userVisible);
componentInfo.methods.put(method.name, method);
// Verify that this element has an ExecutableType.
if (!(element instanceof ExecutableElement)) {
throw new RuntimeException("In component " + componentInfo.name +
", the representation of SimpleFunction " + methodName +
" does not implement ExecutableElement.");
}
ExecutableElement e = (ExecutableElement) element;
// Extract the parameters.
for (VariableElement ve : e.getParameters()) {
method.addParameter(ve.getSimpleName().toString(),
ve.asType().toString());
}
// Extract the return type.
if (e.getReturnType().getKind() != TypeKind.VOID) {
method.returnType = e.getReturnType().toString();
}
}
}
}
/**
* <p>Outputs the required component information in the desired format. It is called by
* {@link #process} after the fields {@link #components} and {@link #messager}
* have been populated.</p>
*
* <p>Implementations of this methods should call {@link #getOutputWriter(String)} to obtain a
* {@link Writer} for their output. Diagnostic messages should be written
* using {@link #messager}.</p>
*/
protected abstract void outputResults() throws IOException;
/**
* Returns the appropriate Yail type (e.g., "number" or "text") for a
* given Java type (e.g., "float" or "java.lang.String"). All component
* names are converted to "component".
*
* @param type a type name, as returned by {@link TypeMirror#toString()}
* @return one of "boolean", "text", "number", "list", or "component".
* @throws RuntimeException if the parameter cannot be mapped to any of the
* legal return values
*/
protected final String javaTypeToYailType(String type) {
// boolean -> boolean
if (type.equals("boolean")) {
return type;
}
// String -> text
if (type.equals("java.lang.String")) {
return "text";
}
// {float, double, int, short, long} -> number
if (type.equals("float") || type.equals("double") || type.equals("int") ||
type.equals("short") || type.equals("long")) {
return "number";
}
// YailList -> list
if (type.equals("com.google.appinventor.components.runtime.util.YailList")) {
return "list";
}
// List<?> -> list
if (type.startsWith("java.util.List")) {
return "list";
}
// Calendar -> InstantInTime
if (type.equals("java.util.Calendar")) {
return "InstantInTime";
}
if (type.equals("java.lang.Object")) {
return "any";
}
// Check if it's a component.
if (componentTypes.contains(type)) {
return "component";
}
throw new RuntimeException("Cannot convert Java type '" + type +
"' to Yail type");
}
/**
* Creates and returns a {@link FileObject} for output.
*
* @param fileName the name of the output file
* @return the {@code FileObject}
* @throws IOException if the file cannot be created
*/
protected FileObject createOutputFileObject(String fileName) throws IOException {
return processingEnv.getFiler().
createResource(StandardLocation.SOURCE_OUTPUT, OUTPUT_PACKAGE, fileName);
}
/**
* Returns a {@link Writer} to which output should be written. As with any
* {@code Writer}, the methods {@link Writer#flush()} and {@link Writer#close()}
* should be called when output is complete.
*
* @param fileName the name of the output file
* @return the {@code Writer}
* @throws IOException if the {@code Writer} or underlying {@link FileObject}
* cannot be created
*/
protected Writer getOutputWriter(String fileName) throws IOException {
return createOutputFileObject(fileName).openWriter();
}
}
| |
/* Copyright 2017 Alfa Financial Software
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.alfasoftware.morf.jdbc.h2;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import org.alfasoftware.morf.jdbc.AbstractSqlDialectTest;
import org.alfasoftware.morf.jdbc.SqlDialect;
import org.apache.commons.lang.StringUtils;
import com.google.common.collect.ImmutableList;
/**
* Tests SQL statements generated for MySQL.
*
* @author Copyright (c) Alfa Financial Software 2010
*/
public class TestH2Dialect extends AbstractSqlDialectTest {
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#createTestDialect()
*/
@Override
protected SqlDialect createTestDialect() {
return new H2Dialect();
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedCreateTableStatements()
*/
@Override
protected List<String> expectedCreateTableStatements() {
return Arrays
.asList(
"CREATE TABLE Test (id BIGINT NOT NULL, version INTEGER DEFAULT 0, stringField VARCHAR(3), intField DECIMAL(8,0), floatField DECIMAL(13,2) NOT NULL, dateField DATE, booleanField BIT, charField VARCHAR(1), blobField LONGVARBINARY, bigIntegerField BIGINT DEFAULT 12345, clobField NCLOB, CONSTRAINT Test_PK PRIMARY KEY (id))",
"CREATE UNIQUE INDEX Test_NK ON Test (stringField)",
"CREATE INDEX Test_1 ON Test (intField,floatField)",
"CREATE TABLE Alternate (id BIGINT NOT NULL, version INTEGER DEFAULT 0, stringField VARCHAR(3), CONSTRAINT Alternate_PK PRIMARY KEY (id))",
"CREATE INDEX Alternate_1 ON Alternate (stringField)",
"CREATE TABLE NonNull (id BIGINT NOT NULL, version INTEGER DEFAULT 0, stringField VARCHAR(3) NOT NULL, intField DECIMAL(8,0) NOT NULL, booleanField BIT NOT NULL, dateField DATE NOT NULL, blobField LONGVARBINARY NOT NULL, CONSTRAINT NonNull_PK PRIMARY KEY (id))",
"CREATE TABLE CompositePrimaryKey (id BIGINT NOT NULL, version INTEGER DEFAULT 0, stringField VARCHAR(3) NOT NULL, secondPrimaryKey VARCHAR(3) NOT NULL, CONSTRAINT CompositePrimaryKey_PK PRIMARY KEY (id, secondPrimaryKey))",
"CREATE TABLE AutoNumber (intField BIGINT AUTO_INCREMENT(5) COMMENT 'AUTONUMSTART:[5]', CONSTRAINT AutoNumber_PK PRIMARY KEY (intField))"
);
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedCreateTemporaryTableStatements()
*/
@Override
protected List<String> expectedCreateTemporaryTableStatements() {
return Arrays
.asList(
"CREATE TEMPORARY TABLE TEMP_TempTest (id BIGINT NOT NULL, version INTEGER DEFAULT 0, stringField VARCHAR(3), intField DECIMAL(8,0), floatField DECIMAL(13,2) NOT NULL, dateField DATE, booleanField BIT, charField VARCHAR(1), blobField LONGVARBINARY, bigIntegerField BIGINT DEFAULT 12345, clobField NCLOB, CONSTRAINT TEMP_TempTest_PK PRIMARY KEY (id))",
"CREATE UNIQUE INDEX TempTest_NK ON TEMP_TempTest (stringField)",
"CREATE INDEX TempTest_1 ON TEMP_TempTest (intField,floatField)",
"CREATE TEMPORARY TABLE TEMP_TempAlternate (id BIGINT NOT NULL, version INTEGER DEFAULT 0, stringField VARCHAR(3), CONSTRAINT TEMP_TempAlternate_PK PRIMARY KEY (id))",
"CREATE INDEX TempAlternate_1 ON TEMP_TempAlternate (stringField)",
"CREATE TEMPORARY TABLE TEMP_TempNonNull (id BIGINT NOT NULL, version INTEGER DEFAULT 0, stringField VARCHAR(3) NOT NULL, intField DECIMAL(8,0) NOT NULL, booleanField BIT NOT NULL, dateField DATE NOT NULL, blobField LONGVARBINARY NOT NULL, CONSTRAINT TEMP_TempNonNull_PK PRIMARY KEY (id))");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedCreateTableStatementsWithLongTableName()
*/
@Override
protected List<String> expectedCreateTableStatementsWithLongTableName() {
return Arrays
.asList("CREATE TABLE "
+ TABLE_WITH_VERY_LONG_NAME
+ " (id BIGINT NOT NULL, version INTEGER DEFAULT 0, stringField VARCHAR(3), intField DECIMAL(8,0), floatField DECIMAL(13,2) NOT NULL, dateField DATE, booleanField BIT, charField VARCHAR(1), CONSTRAINT "
+ TABLE_WITH_VERY_LONG_NAME + "_PK PRIMARY KEY (id))",
"CREATE UNIQUE INDEX Test_NK ON tableWithANameThatExceedsTwentySevenCharactersToMakeSureSchemaNameDoesNotGetFactoredIntoOracleNameTruncation (stringField)",
"CREATE INDEX Test_1 ON tableWithANameThatExceedsTwentySevenCharactersToMakeSureSchemaNameDoesNotGetFactoredIntoOracleNameTruncation (intField,floatField)"
);
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedDropTableStatements()
*/
@Override
protected List<String> expectedDropTableStatements() {
return Arrays.asList("drop table Test");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedDropTempTableStatements()
*/
@Override
protected List<String> expectedDropTempTableStatements() {
return Arrays.asList("drop table TEMP_TempTest");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedTruncateTableStatements()
*/
@Override
protected List<String> expectedTruncateTableStatements() {
return Arrays.asList("truncate table Test");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedTruncateTempTableStatements()
*/
@Override
protected List<String> expectedTruncateTempTableStatements() {
return Arrays.asList("truncate table TEMP_TempTest");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedDeleteAllFromTableStatements()
*/
@Override
protected List<String> expectedDeleteAllFromTableStatements() {
return Arrays.asList("delete from Test");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedParameterisedInsertStatement()
*/
@Override
protected String expectedParameterisedInsertStatement() {
return "INSERT INTO Test (id, version, stringField, intField, floatField, dateField, booleanField, charField, blobField, bigIntegerField, clobField) VALUES (5, :version, CAST('Escap''d' AS VARCHAR(7)), 7, :floatField, 20100405, 1, :charField, :blobField, :bigIntegerField, :clobField)";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedParameterisedInsertStatementWithTableInDifferentSchema()
*/
@Override
protected String expectedParameterisedInsertStatementWithTableInDifferentSchema() {
return "INSERT INTO MYSCHEMA.Test (id, version, stringField, intField, floatField, dateField, booleanField, charField, blobField, bigIntegerField, clobField) VALUES (5, :version, CAST('Escap''d' AS VARCHAR(7)), 7, :floatField, 20100405, 1, :charField, :blobField, :bigIntegerField, :clobField)";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAutoGenerateIdStatement()
*/
@Override
protected List<String> expectedAutoGenerateIdStatement() {
return Arrays.asList(
"DELETE FROM idvalues where name = 'Test'",
"INSERT INTO idvalues (name, value) VALUES('Test', (SELECT COALESCE(MAX(id) + 1, 1) AS CurrentValue FROM Test))",
"INSERT INTO Test (version, stringField, id) SELECT version, stringField, (SELECT COALESCE(value, 0) FROM idvalues WHERE (name = CAST('Test' AS VARCHAR(4)))) + Other.id FROM Other"
);
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedInsertWithIdAndVersion()
*/
@Override
protected List<String> expectedInsertWithIdAndVersion() {
return Arrays.asList(
"DELETE FROM idvalues where name = 'Test'",
"INSERT INTO idvalues (name, value) VALUES('Test', (SELECT COALESCE(MAX(id) + 1, 1) AS CurrentValue FROM Test))",
"INSERT INTO Test (stringField, id, version) SELECT stringField, (SELECT COALESCE(value, 0) FROM idvalues WHERE (name = CAST('Test' AS VARCHAR(4)))) + Other.id, 0 AS version FROM Other"
);
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedSpecifiedValueInsert()
*/
@Override
protected List<String> expectedSpecifiedValueInsert() {
return Arrays.asList(
"DELETE FROM idvalues where name = 'Test'",
"INSERT INTO idvalues (name, value) VALUES('Test', (SELECT COALESCE(MAX(id) + 1, 1) AS CurrentValue FROM Test))",
"INSERT INTO Test (stringField, intField, floatField, dateField, booleanField, charField, id, version, blobField, bigIntegerField, clobField) VALUES (CAST('Escap''d' AS VARCHAR(7)), 7, 11.25, 20100405, 1, CAST('X' AS VARCHAR(1)), (SELECT COALESCE(value, 1) FROM idvalues WHERE (name = CAST('Test' AS VARCHAR(4)))), 0, null, 12345, null)"
);
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedSpecifiedValueInsertWithTableInDifferentSchema()
*/
@Override
protected List<String> expectedSpecifiedValueInsertWithTableInDifferentSchema() {
return Arrays.asList(
"DELETE FROM idvalues where name = 'Test'",
"INSERT INTO idvalues (name, value) VALUES('Test', (SELECT COALESCE(MAX(id) + 1, 1) AS CurrentValue FROM MYSCHEMA.Test))",
"INSERT INTO MYSCHEMA.Test (stringField, intField, floatField, dateField, booleanField, charField, id, version, blobField, bigIntegerField, clobField) VALUES (CAST('Escap''d' AS VARCHAR(7)), 7, 11.25, 20100405, 1, CAST('X' AS VARCHAR(1)), (SELECT COALESCE(value, 1) FROM idvalues WHERE (name = CAST('Test' AS VARCHAR(4)))), 0, null, 12345, null)"
);
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedParameterisedInsertStatementWithNoColumnValues()
*/
@Override
protected String expectedParameterisedInsertStatementWithNoColumnValues() {
return "INSERT INTO Test (id, version, stringField, intField, floatField, dateField, booleanField, charField, blobField, bigIntegerField, clobField) VALUES (:id, :version, :stringField, :intField, :floatField, :dateField, :booleanField, :charField, :blobField, :bigIntegerField, :clobField)";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedEmptyStringInsertStatement()
*/
@Override
protected String expectedEmptyStringInsertStatement() {
return "INSERT INTO Test (stringField, id, version, intField, floatField, dateField, booleanField, charField, blobField, bigIntegerField, clobField) VALUES (NULL, (SELECT COALESCE(value, 1) FROM idvalues WHERE (name = CAST('Test' AS VARCHAR(4)))), 0, 0, 0, null, 0, NULL, null, 12345, null)";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedConcatenationWithCase()
*/
@Override
protected String expectedConcatenationWithCase() {
return "SELECT COALESCE(assetDescriptionLine1,'') || COALESCE(CASE WHEN (taxVariationIndicator = CAST('Y' AS VARCHAR(1))) THEN exposureCustomerNumber ELSE invoicingCustomerNumber END,'') AS test FROM schedule";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedConcatenationWithFunction()
*/
@Override
protected String expectedConcatenationWithFunction() {
return "SELECT COALESCE(assetDescriptionLine1,'') || COALESCE(MAX(scheduleStartDate),'') AS test FROM schedule";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedConcatenationWithMultipleFieldLiterals()
*/
@Override
protected String expectedConcatenationWithMultipleFieldLiterals() {
return "SELECT COALESCE(CAST('ABC' AS VARCHAR(3)),'') || COALESCE(CAST(' ' AS VARCHAR(1)),'') || COALESCE(CAST('DEF' AS VARCHAR(3)),'') AS assetDescription FROM schedule";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedNestedConcatenations()
*/
@Override
protected String expectedNestedConcatenations() {
return "SELECT COALESCE(field1,'') || COALESCE(COALESCE(field2,'') || COALESCE(CAST('XYZ' AS VARCHAR(3)),''),'') AS test FROM schedule";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedSelectWithConcatenation1()
*/
@Override
protected String expectedSelectWithConcatenation1() {
return "SELECT COALESCE(assetDescriptionLine1,'') || COALESCE(CAST(' ' AS VARCHAR(1)),'') || COALESCE(assetDescriptionLine2,'') AS assetDescription FROM schedule";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedSelectWithConcatenation2()
*/
@Override
protected String expectedSelectWithConcatenation2() {
return "SELECT COALESCE(assetDescriptionLine1,'') || COALESCE(CAST('XYZ' AS VARCHAR(3)),'') || COALESCE(assetDescriptionLine2,'') AS assetDescription FROM schedule";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedIsNull()
*/
@Override
protected String expectedIsNull() {
return "COALESCE(CAST('A' AS VARCHAR(1)), CAST('B' AS VARCHAR(1))) ";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedMathsPlus()
*/
@Override
protected String expectedMathsPlus() {
return "1 + 1";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedMathsMinus()
*/
@Override
protected String expectedMathsMinus() {
return "1 - 1";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedMathsDivide()
*/
@Override
protected String expectedMathsDivide() {
return "1 / 1";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedMathsMultiply()
*/
@Override
protected String expectedMathsMultiply() {
return "1 * 1";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedStringCast()
*/
@Override
protected String expectedStringCast() {
return "CAST(value AS VARCHAR(10))";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedBigIntCast()
*/
@Override
protected String expectedBigIntCast() {
return "CAST(value AS BIGINT)";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedBigIntFunctionCast()
*/
@Override
protected String expectedBigIntFunctionCast() {
return "CAST(MIN(value) AS BIGINT)";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedBooleanCast()
*/
@Override
protected String expectedBooleanCast() {
return "CAST(value AS BIT)";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedDateCast()
*/
@Override
protected String expectedDateCast() {
return "CAST(value AS DATE)";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedDecimalCast()
*/
@Override
protected String expectedDecimalCast() {
return "CAST(value AS DECIMAL(10,2))";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedIntegerCast()
*/
@Override
protected String expectedIntegerCast() {
return "CAST(value AS INTEGER)";
}
/**
* {@inheritDoc}
*
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedSelectWithUnion()
*/
@Override
protected String expectedSelectWithUnion() {
return "SELECT stringField FROM Other UNION SELECT stringField FROM Test UNION ALL SELECT stringField FROM Alternate ORDER BY stringField";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedLeftPad()
*/
@Override
protected String expectedLeftPad() {
return "SELECT LPAD(stringField, 10, CAST('j' AS VARCHAR(1))) FROM Test";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterTableAddBlobColumnStatement()
*/
@Override
protected List<String> expectedAlterTableAddBlobColumnStatement() {
return Arrays.asList("ALTER TABLE Test ADD COLUMN blobField_new LONGVARBINARY NULL");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterTableAlterBlobColumnStatement()
*/
@Override
protected List<String> expectedAlterTableAlterBlobColumnStatement() {
return Arrays.asList("ALTER TABLE Test ALTER COLUMN blobField LONGVARBINARY");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterTableAlterBooleanColumnStatement()
*/
@Override
protected List<String> expectedAlterTableAlterBooleanColumnStatement() {
return Arrays.asList("ALTER TABLE Test ALTER COLUMN booleanField BIT");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterTableAddBooleanColumnStatement()
*/
@Override
protected List<String> expectedAlterTableAddBooleanColumnStatement() {
return Arrays.asList("ALTER TABLE Test ADD COLUMN booleanField_new BIT NULL");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterTableAddStringColumnStatement()
*/
@Override
protected List<String> expectedAlterTableAddStringColumnStatement() {
return Arrays.asList("ALTER TABLE Test ADD COLUMN stringField_new VARCHAR(6) NULL");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterTableAlterStringColumnStatement()
*/
@Override
protected List<String> expectedAlterTableAlterStringColumnStatement() {
return Arrays.asList("ALTER TABLE Test ALTER COLUMN stringField VARCHAR(6)");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterTableAddIntegerColumnStatement()
*/
@Override
protected List<String> expectedAlterTableAddIntegerColumnStatement() {
return Arrays.asList("ALTER TABLE Test ADD COLUMN intField_new INTEGER NULL");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterTableAlterIntegerColumnStatement()
*/
@Override
protected List<String> expectedAlterTableAlterIntegerColumnStatement() {
return Arrays.asList("ALTER TABLE Test ALTER COLUMN intField DECIMAL(10,0)");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterTableAddDateColumnStatement()
*/
@Override
protected List<String> expectedAlterTableAddDateColumnStatement() {
return Arrays.asList("ALTER TABLE Test ADD COLUMN dateField_new DATE NULL");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterTableAlterDateColumnStatement()
*/
@Override
protected List<String> expectedAlterTableAlterDateColumnStatement() {
return Arrays.asList("ALTER TABLE Test ALTER COLUMN dateField DATE");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterTableAddDecimalColumnStatement()
*/
@Override
protected List<String> expectedAlterTableAddDecimalColumnStatement() {
return Arrays.asList("ALTER TABLE Test ADD COLUMN floatField_new DECIMAL(6,3) NULL");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterTableAlterDecimalColumnStatement()
*/
@Override
protected List<String> expectedAlterTableAlterDecimalColumnStatement() {
return Arrays.asList("ALTER TABLE Test ALTER COLUMN floatField SET NULL",
"ALTER TABLE Test ALTER COLUMN floatField DECIMAL(14,3)");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterTableAddBigIntegerColumnStatement()
*/
@Override
protected List<String> expectedAlterTableAddBigIntegerColumnStatement() {
return Arrays.asList("ALTER TABLE Test ADD COLUMN bigIntegerField_new BIGINT NULL");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterTableAlterBigIntegerColumnStatement()
*/
@Override
protected List<String> expectedAlterTableAlterBigIntegerColumnStatement() {
return Arrays.asList("ALTER TABLE Test ALTER COLUMN bigIntegerField BIGINT");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterTableAddColumnNotNullableStatement()
*/
@Override
protected List<String> expectedAlterTableAddColumnNotNullableStatement() {
return Arrays.asList("ALTER TABLE Test ADD COLUMN dateField_new DATE DEFAULT DATE '2010-01-01' NOT NULL");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterTableAlterColumnFromNullableToNotNullableStatement()
*/
@Override
protected List<String> expectedAlterTableAlterColumnFromNullableToNotNullableStatement() {
return Arrays.asList("ALTER TABLE Test ALTER COLUMN dateField SET NOT NULL");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterTableAlterColumnFromNotNullableToNotNullableStatement()
*/
@Override
protected List<String> expectedAlterTableAlterColumnFromNotNullableToNotNullableStatement() {
return Arrays.asList("ALTER TABLE Test ALTER COLUMN floatField DECIMAL(20,3)");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterTableAlterColumnFromNotNullableToNullableStatement()
*/
@Override
protected List<String> expectedAlterTableAlterColumnFromNotNullableToNullableStatement() {
return Arrays.asList("ALTER TABLE Test ALTER COLUMN floatField SET NULL",
"ALTER TABLE Test ALTER COLUMN floatField DECIMAL(20,3)");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterTableAddColumnWithDefaultStatement()
*/
@Override
protected List<String> expectedAlterTableAddColumnWithDefaultStatement() {
return Arrays.asList("ALTER TABLE Test ADD COLUMN floatField_new DECIMAL(6,3) DEFAULT 20.33 NULL");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterTableAlterColumnWithDefaultStatement()
*/
@Override
protected List<String> expectedAlterTableAlterColumnWithDefaultStatement() {
return Arrays.asList("ALTER TABLE Test ALTER COLUMN bigIntegerField SET DEFAULT 54321",
"ALTER TABLE Test ALTER COLUMN bigIntegerField BIGINT");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedChangeIndexFollowedByChangeOfAssociatedColumnStatement()
*/
@Override
protected List<String> expectedChangeIndexFollowedByChangeOfAssociatedColumnStatement() {
return Arrays.asList(
// dropIndexStatements & addIndexStatements
"DROP INDEX Test_1",
"CREATE INDEX Test_1 ON Test (intField)",
// changeColumnStatements
"ALTER TABLE Test ALTER COLUMN intField DECIMAL(11,0)");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAddIndexStatementsOnSingleColumn()
*/
@Override
protected List<String> expectedAddIndexStatementsOnSingleColumn() {
return Arrays.asList("CREATE INDEX indexName ON Test (id)");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAddIndexStatementsOnMultipleColumns()
*/
@Override
protected List<String> expectedAddIndexStatementsOnMultipleColumns() {
return Arrays.asList("CREATE INDEX indexName ON Test (id,version)");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAddIndexStatementsUnique()
*/
@Override
protected List<String> expectedAddIndexStatementsUnique() {
return Arrays.asList("CREATE UNIQUE INDEX indexName ON Test (id)");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedIndexDropStatements()
*/
@Override
protected List<String> expectedIndexDropStatements() {
return Arrays.asList("DROP INDEX indexName");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterColumnMakePrimaryStatements()
*/
@Override
protected List<String> expectedAlterColumnMakePrimaryStatements() {
return Arrays.asList("ALTER TABLE Test ADD CONSTRAINT Test_PK PRIMARY KEY (id, dateField)");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterPrimaryKeyColumnCompositeKeyStatements()
*/
@Override
protected List<String> expectedAlterPrimaryKeyColumnCompositeKeyStatements() {
return Arrays.asList("ALTER TABLE CompositePrimaryKey ALTER COLUMN secondPrimaryKey VARCHAR(5)");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterRemoveColumnFromCompositeKeyStatements()
*/
@Override
protected List<String> expectedAlterRemoveColumnFromCompositeKeyStatements() {
return ImmutableList.of(
"ALTER TABLE CompositePrimaryKey DROP CONSTRAINT CompositePrimaryKey_PK",
"ALTER TABLE CompositePrimaryKey ALTER COLUMN secondPrimaryKey SET NULL",
"ALTER TABLE CompositePrimaryKey ALTER COLUMN secondPrimaryKey VARCHAR(5)",
"ALTER TABLE CompositePrimaryKey ADD CONSTRAINT CompositePrimaryKey_PK PRIMARY KEY (id)"
);
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterPrimaryKeyColumnStatements()
*/
@Override
protected List<String> expectedAlterPrimaryKeyColumnStatements() {
return Arrays.asList(
"ALTER TABLE Test ALTER COLUMN id RENAME TO renamedId",
"ALTER TABLE Test ALTER COLUMN renamedId BIGINT"
);
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterColumnRenamingAndChangingNullability()
*/
@Override
protected List<String> expectedAlterColumnRenamingAndChangingNullability() {
return Arrays.asList("ALTER TABLE Other ALTER COLUMN floatField RENAME TO blahField",
"ALTER TABLE Other ALTER COLUMN blahField SET NULL", "ALTER TABLE Other ALTER COLUMN blahField DECIMAL(20,3)");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterColumnChangingLengthAndCase()
*/
@Override
protected List<String> expectedAlterColumnChangingLengthAndCase() {
return Arrays.asList("ALTER TABLE Other ALTER COLUMN floatField RENAME TO FloatField",
"ALTER TABLE Other ALTER COLUMN FloatField DECIMAL(20,3)");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#varCharCast(java.lang.String)
*/
@Override
protected String varCharCast(String value) {
return String.format("CAST(%s AS VARCHAR(%d))", value, StringUtils.replace(value, "'", "").length());
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterTableAddStringColumnWithDefaultStatement()
*/
@Override
protected List<String> expectedAlterTableAddStringColumnWithDefaultStatement() {
return Arrays.asList("ALTER TABLE Test ADD COLUMN stringField_with_default VARCHAR(6) DEFAULT CAST('N' AS VARCHAR(1)) NOT NULL");
}
/**
* {@inheritDoc}
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAutonumberUpdate()
*/
@Override
protected List<String> expectedAutonumberUpdate() {
return Arrays.asList("MERGE INTO Autonumber (id, value) SELECT 'TestTable', (SELECT GREATEST((SELECT COALESCE(MAX(id) + 1, 1) AS CurrentValue FROM TestTable), (SELECT value from Autonumber WHERE name='TestTable'), 1))");
}
/**
* {@inheritDoc}
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedUpdateWithSelectMinimum()
*/
@Override
protected String expectedUpdateWithSelectMinimum() {
String value1 = varCharCast("'S'");
String value2 = varCharCast("'Y'");
return "UPDATE " + tableName("Other") + " O SET intField = (SELECT MIN(intField) FROM " + tableName("Test") + " T WHERE ((T.charField = " + stringLiteralPrefix() + value1 + ") AND (T.stringField = O.stringField) AND (T.intField = O.intField))) WHERE (stringField = " + stringLiteralPrefix() + value2 + ")";
}
/**
* {@inheritDoc}
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedUpdateUsingAliasedDestinationTable()
*/
@Override
protected String expectedUpdateUsingAliasedDestinationTable() {
return "UPDATE " + tableName("FloatingRateRate") + " A SET settlementFrequency = (SELECT settlementFrequency FROM " + tableName("FloatingRateDetail") + " B WHERE (A.floatingRateDetailId = B.id))";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedCreateViewStatement()
*/
@Override
protected String expectedCreateViewStatement() {
return "CREATE VIEW " + tableName("TestView") + " AS (SELECT stringField FROM " + tableName("Test") + " WHERE (stringField = " + varCharCast("'blah'") + "))";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedYYYYMMDDToDate()
*/
@Override
protected String expectedYYYYMMDDToDate() {
return "CAST(SUBSTRING(CAST('20100101' AS VARCHAR(8)), 1, 4)||'-'||SUBSTRING(CAST('20100101' AS VARCHAR(8)), 5, 2)||'-'||SUBSTRING(CAST('20100101' AS VARCHAR(8)), 7, 2) AS DATE)";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedDateToYyyymmdd()
*/
@Override
protected String expectedDateToYyyymmdd() {
return "CAST(SUBSTRING(testField, 1, 4)||SUBSTRING(testField, 6, 2)||SUBSTRING(testField, 9, 2) AS INT)";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedDateToYyyymmddHHmmss()
*/
@Override
protected String expectedDateToYyyymmddHHmmss() {
return "CAST(SUBSTRING(testField, 1, 4)||SUBSTRING(testField, 6, 2)||SUBSTRING(testField, 9, 2)||SUBSTRING(testField, 12, 2)||SUBSTRING(testField, 15, 2)||SUBSTRING(testField, 18, 2) AS BIGINT)";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedNow()
*/
@Override
protected String expectedNow() {
return "CURRENT_TIMESTAMP()";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedDropViewStatement()
*/
@Override
protected List<String> expectedDropViewStatements() {
return Arrays.asList("DROP VIEW " + tableName("TestView") + " IF EXISTS CASCADE");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedStringLiteralToIntegerCast()
*/
@Override
protected String expectedStringLiteralToIntegerCast() {
return "CAST(" + varCharCast("'1234567890'") + " AS INTEGER)";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedSubstring()
*/
@Override
protected String expectedSubstring() {
return "SELECT SUBSTRING(field1, 1, 3) FROM " + tableName("schedule");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAutonumberUpdateForNonIdColumn()
*/
@Override
protected List<String> expectedAutonumberUpdateForNonIdColumn() {
return Arrays.asList("MERGE INTO Autonumber (id, value) SELECT 'TestTable', (SELECT GREATEST((SELECT COALESCE(MAX(generatedColumn) + 1, 1) AS CurrentValue FROM TestTable), (SELECT value from Autonumber WHERE name='TestTable'), 1))");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedStringFunctionCast()
*/
@Override
protected String expectedStringFunctionCast() {
return "CAST(MIN(field) AS VARCHAR(8))";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedDaysBetween()
*/
@Override
protected String expectedDaysBetween() {
return "SELECT DATEDIFF('DAY',dateOne, dateTwo) FROM MyTable";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedMergeSimple()
*/
@Override
protected String expectedMergeSimple() {
return "MERGE INTO foo(id, bar) KEY(id) SELECT somewhere.newId AS id, somewhere.newBar AS bar FROM somewhere";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedMergeComplex()
*/
@Override
protected String expectedMergeComplex() {
return "MERGE INTO foo(id, bar) KEY(id) SELECT somewhere.newId AS id, join.joinBar AS bar FROM somewhere INNER JOIN join ON (somewhere.newId = join.joinId)";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedMergeSourceInDifferentSchema()
*/
@Override
protected String expectedMergeSourceInDifferentSchema() {
return "MERGE INTO foo(id, bar) KEY(id) SELECT somewhere.newId AS id, somewhere.newBar AS bar FROM MYSCHEMA.somewhere";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedMergeTargetInDifferentSchema()
*/
@Override
protected String expectedMergeTargetInDifferentSchema() {
return "MERGE INTO MYSCHEMA.foo(id, bar) KEY(id) SELECT somewhere.newId AS id, somewhere.newBar AS bar FROM somewhere";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAddDays()
*/
@Override
protected String expectedAddDays() {
return "DATEADD('DAY', -20, testField)";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAddMonths()
*/
@Override
protected String expectedAddMonths() {
return "DATEADD('MONTH', -3, testField)";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAlterRemoveColumnFromSimpleKeyStatements()
*/
@Override
protected List<String> expectedAlterRemoveColumnFromSimpleKeyStatements() {
return Collections.singletonList("ALTER TABLE Test DROP COLUMN id");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedRenameTableStatements()
*/
@Override
protected List<String> expectedRenameTableStatements() {
return ImmutableList.of(
"ALTER TABLE Test DROP CONSTRAINT Test_PK",
"ALTER TABLE Test RENAME TO Renamed",
"ALTER TABLE Renamed ADD CONSTRAINT Renamed_PK PRIMARY KEY (id)"
);
}
/**
* @return the expected statements for renaming a table with a long name.
*/
@Override
protected List<String> getRenamingTableWithLongNameStatements() {
return ImmutableList.of(
"ALTER TABLE 123456789012345678901234567890XXX DROP CONSTRAINT 123456789012345678901234567890XXX_PK",
"ALTER TABLE 123456789012345678901234567890XXX RENAME TO Blah",
"ALTER TABLE Blah ADD CONSTRAINT Blah_PK PRIMARY KEY (id)");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedRenameIndexStatements()
*/
@Override
protected List<String> expectedRenameIndexStatements() {
return ImmutableList.of("ALTER INDEX TempTest_1 RENAME TO TempTest_2");
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedMergeForAllPrimaryKeys()
*/
@Override
protected String expectedMergeForAllPrimaryKeys() {
return "MERGE INTO foo(id) KEY(id) SELECT somewhere.newId AS id FROM somewhere";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedRandomString()
*/
@Override
protected String expectedRandomString() {
return "SUBSTRING(REPLACE(RANDOM_UUID(),'-'), 1, 10)";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedSelectLiteralWithWhereClauseString()
*/
@Override
protected String expectedSelectLiteralWithWhereClauseString() {
return "SELECT CAST('LITERAL' AS VARCHAR(7)) FROM dual WHERE (CAST('ONE' AS VARCHAR(3)) = CAST('ONE' AS VARCHAR(3)))";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAddTableFromStatements()
*/
@Override
protected List<String> expectedAddTableFromStatements() {
return ImmutableList.of(
"CREATE TABLE SomeTable (someField VARCHAR(3) NOT NULL, otherField DECIMAL(3,0) NOT NULL, CONSTRAINT SomeTable_PK PRIMARY KEY (someField))",
"CREATE INDEX SomeTable_1 ON SomeTable (otherField)",
"INSERT INTO SomeTable SELECT someField, otherField FROM OtherTable"
);
}
/**
* No hints are supported.
*
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedHints1(int)
*/
@Override
protected String expectedHints1(int rowCount) {
return "SELECT * FROM SCHEMA2.Foo INNER JOIN Bar ON (a = b) LEFT OUTER JOIN Fo ON (a = b) INNER JOIN Fum Fumble ON (a = b) ORDER BY a";
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#supportsWindowFunctions()
*/
@Override
protected boolean supportsWindowFunctions() {
return false;
}
/**
* @see org.alfasoftware.morf.jdbc.AbstractSqlDialectTest#expectedAnalyseTableSql()
*/
@Override
protected Collection<String> expectedAnalyseTableSql() {
return SqlDialect.NO_STATEMENTS;
}
/**
* @return The expected SQL for a delete statement with a limit and where criterion.
*/
@Override
protected String expectedDeleteWithLimitAndWhere(String value) {
return "DELETE FROM " + tableName(TEST_TABLE) + " WHERE (Test.stringField = " + stringLiteralPrefix() + value + ") LIMIT 1000";
}
/**
* @return The expected SQL for a delete statement with a limit and where criterion.
*/
@Override
protected String expectedDeleteWithLimitAndComplexWhere(String value1, String value2) {
return "DELETE FROM " + tableName(TEST_TABLE) + " WHERE ((Test.stringField = " + stringLiteralPrefix() + value1 + ") OR (Test.stringField = " + stringLiteralPrefix() + value2 + ")) LIMIT 1000";
}
/**
* @return The expected SQL for a delete statement with a limit and where criterion.
*/
@Override
protected String expectedDeleteWithLimitWithoutWhere() {
return "DELETE FROM " + tableName(TEST_TABLE) + " LIMIT 1000";
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Boris V. Kuznetsov
* @version $Revision$
*/
package java.security;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Enumeration;
import java.net.URL;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.apache.harmony.security.fortress.Engine;
import org.apache.harmony.security.fortress.PolicyUtils;
import org.apache.harmony.security.fortress.SecurityAccess;
import org.apache.harmony.security.fortress.Services;
import org.apache.harmony.security.internal.nls.Messages;
/**
* For access to security providers and properties.
*/
public final class Security {
// Security properties
private static Properties secprops = new Properties();
// static initialization
// - load security properties files
// - load statically registered providers
// - if no provider description file found then load default providers
static {
AccessController.doPrivileged(new java.security.PrivilegedAction<Void>() {
public Void run() {
boolean loaded = false;
File f = new File(System.getProperty("java.home") //$NON-NLS-1$
+ File.separator + "lib" + File.separator //$NON-NLS-1$
+ "security" + File.separator + "java.security"); //$NON-NLS-1$ //$NON-NLS-2$
if (f.exists()) {
try {
FileInputStream fis = new FileInputStream(f);
InputStream is = new BufferedInputStream(fis);
secprops.load(is);
loaded = true;
is.close();
} catch (IOException e) {
// System.err.println("Could not load Security properties file: "
// + e);
}
}
if ("true".equalsIgnoreCase(secprops.getProperty("security.allowCustomPropertiesFile", "true"))) { //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
String securityFile = System.getProperty("java.security.properties"); //$NON-NLS-1$
if (securityFile != null) {
if (securityFile.startsWith("=")) { // overwrite //$NON-NLS-1$
secprops = new Properties();
loaded = false;
securityFile = securityFile.substring(1);
}
try {
securityFile = PolicyUtils.expand(securityFile, System.getProperties());
} catch (PolicyUtils.ExpansionFailedException e) {
// System.err.println("Could not load custom Security properties file "
// + securityFile +": " + e);
}
f = new File(securityFile);
InputStream is;
try {
if (f.exists()) {
FileInputStream fis = new FileInputStream(f);
is = new BufferedInputStream(fis);
} else {
URL url = new URL(securityFile);
is = new BufferedInputStream(url.openStream());
}
secprops.load(is);
loaded = true;
is.close();
} catch (IOException e) {
// System.err.println("Could not load custom Security properties file "
// + securityFile +": " + e);
}
}
}
if (!loaded) {
registerDefaultProviders();
}
Engine.door = new SecurityDoor();
return null;
}
});
}
/**
* This class can't be instantiated.
*/
private Security() {
}
// Register default providers
private static void registerDefaultProviders() {
secprops.put("security.provider.1", "org.apache.harmony.security.provider.cert.DRLCertFactory"); //$NON-NLS-1$ //$NON-NLS-2$
secprops.put("security.provider.2", "org.apache.harmony.security.provider.crypto.CryptoProvider"); //$NON-NLS-1$ //$NON-NLS-2$
secprops.put("security.provider.3", "org.apache.harmony.xnet.provider.jsse.JSSEProvider"); //$NON-NLS-1$ //$NON-NLS-2$
secprops.put("security.provider.4", "org.bouncycastle.jce.provider.BouncyCastleProvider"); //$NON-NLS-1$ //$NON-NLS-2$
}
/**
* Deprecated method which returns null.
* @param algName
* @param propName
* @return <code>null</code>
*
* @deprecated Use AlgorithmParameters and KeyFactory instead
*/
public static String getAlgorithmProperty(String algName, String propName) {
if (algName == null || propName == null) {
return null;
}
String prop = propName + "." + algName; //$NON-NLS-1$
Provider[] providers = getProviders();
for (int i = 0; i < providers.length; i++) {
for (Enumeration e = providers[i].propertyNames(); e
.hasMoreElements();) {
String pname = (String) e.nextElement();
if (prop.equalsIgnoreCase(pname)) {
return providers[i].getProperty(pname);
}
}
}
return null;
}
/**
* @com.intel.drl.spec_ref
*
*/
public static synchronized int insertProviderAt(Provider provider,
int position) {
// check security access; check that provider is not already
// installed, else return -1; if (position <1) or (position > max
// position) position = max position + 1; insert provider, shift up
// one position for next providers; Note: The position is 1-based
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
sm.checkSecurityAccess("insertProvider." + provider.getName()); //$NON-NLS-1$
}
if (getProvider(provider.getName()) != null) {
return -1;
}
int result = Services.insertProviderAt(provider, position);
renumProviders();
return result;
}
/**
* Adds the extra provider to the collection of providers.
* @param provider
*
* @return int The priority/position of the provider added.
* @exception SecurityException
* If there is a SecurityManager installed and it denies
* adding a new provider.
*/
public static int addProvider(Provider provider) {
return insertProviderAt(provider, 0);
}
/**
* @com.intel.drl.spec_ref
*
*/
public static synchronized void removeProvider(String name) {
// It is not clear from spec.:
// 1. if name is null, should we checkSecurityAccess or not?
// throw SecurityException or not?
// 2. as 1 but provider is not installed
// 3. behavior if name is empty string?
Provider p;
if ((name == null) || (name.length() == 0)) {
return;
}
p = getProvider(name);
if (p == null) {
return;
}
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
sm.checkSecurityAccess("removeProvider." + name); //$NON-NLS-1$
}
Services.removeProvider(p.getProviderNumber());
renumProviders();
p.setProviderNumber(-1);
}
/**
* @com.intel.drl.spec_ref
*
*/
public static synchronized Provider[] getProviders() {
return Services.getProviders();
}
/**
* @com.intel.drl.spec_ref
*
*/
public static synchronized Provider getProvider(String name) {
return Services.getProvider(name);
}
/**
* Returns the collection of providers which meet the user supplied string
* filter.
*
* @param filter
* case-insensitive filter
* @return the providers which meet the user supplied string filter
* <code>filter</code>. A <code>null</code> value signifies
* that none of the installed providers meets the filter
* specification
* @exception InvalidParameterException
* if an unusable filter is supplied
*/
public static Provider[] getProviders(String filter) {
if (filter == null) {
throw new NullPointerException(Messages.getString("security.2A")); //$NON-NLS-1$
}
if (filter.length() == 0) {
throw new InvalidParameterException(
Messages.getString("security.2B")); //$NON-NLS-1$
}
HashMap<String, String> hm = new HashMap<String, String>();
int i = filter.indexOf(":"); //$NON-NLS-1$
if ((i == filter.length() - 1) || (i == 0)) {
throw new InvalidParameterException(
Messages.getString("security.2B")); //$NON-NLS-1$
}
if (i < 1) {
hm.put(filter, ""); //$NON-NLS-1$
} else {
hm.put(filter.substring(0, i), filter.substring(i + 1));
}
return getProviders(hm);
}
/**
* @com.intel.drl.spec_ref
*
*/
public static synchronized Provider[] getProviders(Map<String,String> filter) {
if (filter == null) {
throw new NullPointerException(Messages.getString("security.2A")); //$NON-NLS-1$
}
if (filter.isEmpty()) {
return null;
}
java.util.List<Provider> result = Services.getProvidersList();
Set keys = filter.entrySet();
Map.Entry entry;
for (Iterator it = keys.iterator(); it.hasNext();) {
entry = (Map.Entry) it.next();
String key = (String) entry.getKey();
String val = (String) entry.getValue();
String attribute = null;
int i = key.indexOf(" "); //$NON-NLS-1$
int j = key.indexOf("."); //$NON-NLS-1$
if (j == -1) {
throw new InvalidParameterException(
Messages.getString("security.2B")); //$NON-NLS-1$
}
if (i == -1) { // <crypto_service>.<algorithm_or_type>
if (val.length() != 0) {
throw new InvalidParameterException(
Messages.getString("security.2B")); //$NON-NLS-1$
}
} else { // <crypto_service>.<algorithm_or_type> <attribute_name>
if (val.length() == 0) {
throw new InvalidParameterException(
Messages.getString("security.2B")); //$NON-NLS-1$
}
attribute = key.substring(i + 1);
if (attribute.trim().length() == 0) {
throw new InvalidParameterException(
Messages.getString("security.2B")); //$NON-NLS-1$
}
key = key.substring(0, i);
}
String serv = key.substring(0, j);
String alg = key.substring(j + 1);
if (serv.length() == 0 || alg.length() == 0) {
throw new InvalidParameterException(
Messages.getString("security.2B")); //$NON-NLS-1$
}
Provider p;
for (int k = 0; k < result.size(); k++) {
try {
p = (Provider) result.get(k);
} catch (IndexOutOfBoundsException e) {
break;
}
if (!p.implementsAlg(serv, alg, attribute, val)) {
result.remove(p);
k--;
}
}
}
if (result.size() > 0) {
return result.toArray(new Provider[result.size()]);
} else {
return null;
}
}
/**
* Answers the value of the security property named by the argument.
*
*
* @param key
* String The property name
* @return String The property value
*
* @exception SecurityException
* If there is a SecurityManager installed and it will not
* allow the property to be fetched from the current access
* control context.
*/
public static String getProperty(String key) {
if (key == null) {
throw new NullPointerException(Messages.getString("security.2C")); //$NON-NLS-1$
}
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
sm.checkSecurityAccess("getProperty." + key); //$NON-NLS-1$
}
return secprops.getProperty(key);
}
/**
* Sets a given security property.
*
*
* @param key
* String The property name.
* @param datnum
* String The property value.
* @exception SecurityException
* If there is a SecurityManager installed and it will not
* allow the property to be set from the current access
* control context.
*/
public static void setProperty(String key, String datnum) {
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
sm.checkSecurityAccess("setProperty." + key); //$NON-NLS-1$
}
secprops.put(key, datnum);
}
/**
* @com.intel.drl.spec_ref
*
*/
public static Set<String> getAlgorithms(String serviceName) {
Set<String> result = new HashSet<String>();
Provider[] p = getProviders();
for (int i = 0; i < p.length; i++) {
for (Iterator it = p[i].getServices().iterator(); it.hasNext();) {
Provider.Service s = (Provider.Service) it.next();
if (s.getType().equalsIgnoreCase(serviceName)) {
result.add(s.getAlgorithm());
}
}
}
return result;
}
/**
*
* Update sequence numbers of all providers
*
*/
private static void renumProviders() {
Provider[] p = Services.getProviders();
for (int i = 0; i < p.length; i++) {
p[i].setProviderNumber(i + 1);
}
}
private static class SecurityDoor implements SecurityAccess {
// Access to Security.renumProviders()
public void renumProviders() {
Security.renumProviders();
}
// Access to Security.getAliases()
public Iterator<String> getAliases(Provider.Service s) {
return s.getAliases();
}
// Access to Provider.getService()
public Provider.Service getService(Provider p, String type) {
return p.getService(type);
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive;
import com.google.common.base.StandardSystemProperty;
import io.airlift.configuration.Config;
import io.airlift.configuration.ConfigDescription;
import io.airlift.units.DataSize;
import io.airlift.units.Duration;
import io.airlift.units.MinDataSize;
import io.airlift.units.MinDuration;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotNull;
import java.io.File;
import java.util.concurrent.TimeUnit;
import static io.airlift.units.DataSize.Unit.MEGABYTE;
public class HiveS3Config
{
private String s3AwsAccessKey;
private String s3AwsSecretKey;
private String s3Endpoint;
private PrestoS3SignerType s3SignerType;
private boolean s3UseInstanceCredentials = true;
private boolean s3SslEnabled = true;
private boolean s3SseEnabled;
private PrestoS3SseType s3SseType = PrestoS3SseType.S3;
private String s3EncryptionMaterialsProvider;
private String s3KmsKeyId;
private String s3SseKmsKeyId;
private int s3MaxClientRetries = 3;
private int s3MaxErrorRetries = 10;
private Duration s3MaxBackoffTime = new Duration(10, TimeUnit.MINUTES);
private Duration s3MaxRetryTime = new Duration(10, TimeUnit.MINUTES);
private Duration s3ConnectTimeout = new Duration(5, TimeUnit.SECONDS);
private Duration s3SocketTimeout = new Duration(5, TimeUnit.SECONDS);
private int s3MaxConnections = 500;
private File s3StagingDirectory = new File(StandardSystemProperty.JAVA_IO_TMPDIR.value());
private DataSize s3MultipartMinFileSize = new DataSize(16, MEGABYTE);
private DataSize s3MultipartMinPartSize = new DataSize(5, MEGABYTE);
private boolean pinS3ClientToCurrentRegion;
private String s3UserAgentPrefix = "";
public String getS3AwsAccessKey()
{
return s3AwsAccessKey;
}
@Config("hive.s3.aws-access-key")
public HiveS3Config setS3AwsAccessKey(String s3AwsAccessKey)
{
this.s3AwsAccessKey = s3AwsAccessKey;
return this;
}
public String getS3AwsSecretKey()
{
return s3AwsSecretKey;
}
@Config("hive.s3.aws-secret-key")
public HiveS3Config setS3AwsSecretKey(String s3AwsSecretKey)
{
this.s3AwsSecretKey = s3AwsSecretKey;
return this;
}
public String getS3Endpoint()
{
return s3Endpoint;
}
@Config("hive.s3.endpoint")
public HiveS3Config setS3Endpoint(String s3Endpoint)
{
this.s3Endpoint = s3Endpoint;
return this;
}
public PrestoS3SignerType getS3SignerType()
{
return s3SignerType;
}
@Config("hive.s3.signer-type")
public HiveS3Config setS3SignerType(PrestoS3SignerType s3SignerType)
{
this.s3SignerType = s3SignerType;
return this;
}
public boolean isS3UseInstanceCredentials()
{
return s3UseInstanceCredentials;
}
@Config("hive.s3.use-instance-credentials")
public HiveS3Config setS3UseInstanceCredentials(boolean s3UseInstanceCredentials)
{
this.s3UseInstanceCredentials = s3UseInstanceCredentials;
return this;
}
public boolean isS3SslEnabled()
{
return s3SslEnabled;
}
@Config("hive.s3.ssl.enabled")
public HiveS3Config setS3SslEnabled(boolean s3SslEnabled)
{
this.s3SslEnabled = s3SslEnabled;
return this;
}
public String getS3EncryptionMaterialsProvider()
{
return s3EncryptionMaterialsProvider;
}
@Config("hive.s3.encryption-materials-provider")
@ConfigDescription("Use a custom encryption materials provider for S3 data encryption")
public HiveS3Config setS3EncryptionMaterialsProvider(String s3EncryptionMaterialsProvider)
{
this.s3EncryptionMaterialsProvider = s3EncryptionMaterialsProvider;
return this;
}
public String getS3KmsKeyId()
{
return s3KmsKeyId;
}
@Config("hive.s3.kms-key-id")
@ConfigDescription("Use an AWS KMS key for S3 data encryption")
public HiveS3Config setS3KmsKeyId(String s3KmsKeyId)
{
this.s3KmsKeyId = s3KmsKeyId;
return this;
}
public String getS3SseKmsKeyId()
{
return s3SseKmsKeyId;
}
@Config("hive.s3.sse.kms-key-id")
@ConfigDescription("KMS Key ID to use for S3 server-side encryption with KMS-managed key")
public HiveS3Config setS3SseKmsKeyId(String s3SseKmsKeyId)
{
this.s3SseKmsKeyId = s3SseKmsKeyId;
return this;
}
public boolean isS3SseEnabled()
{
return s3SseEnabled;
}
@Config("hive.s3.sse.enabled")
@ConfigDescription("Enable S3 server side encryption")
public HiveS3Config setS3SseEnabled(boolean s3SseEnabled)
{
this.s3SseEnabled = s3SseEnabled;
return this;
}
@NotNull
public PrestoS3SseType getS3SseType()
{
return s3SseType;
}
@Config("hive.s3.sse.type")
@ConfigDescription("Key management type for S3 server-side encryption (S3 or KMS)")
public HiveS3Config setS3SseType(PrestoS3SseType s3SseType)
{
this.s3SseType = s3SseType;
return this;
}
@Min(0)
public int getS3MaxClientRetries()
{
return s3MaxClientRetries;
}
@Config("hive.s3.max-client-retries")
public HiveS3Config setS3MaxClientRetries(int s3MaxClientRetries)
{
this.s3MaxClientRetries = s3MaxClientRetries;
return this;
}
@Min(0)
public int getS3MaxErrorRetries()
{
return s3MaxErrorRetries;
}
@Config("hive.s3.max-error-retries")
public HiveS3Config setS3MaxErrorRetries(int s3MaxErrorRetries)
{
this.s3MaxErrorRetries = s3MaxErrorRetries;
return this;
}
@MinDuration("1s")
@NotNull
public Duration getS3MaxBackoffTime()
{
return s3MaxBackoffTime;
}
@Config("hive.s3.max-backoff-time")
public HiveS3Config setS3MaxBackoffTime(Duration s3MaxBackoffTime)
{
this.s3MaxBackoffTime = s3MaxBackoffTime;
return this;
}
@MinDuration("1ms")
@NotNull
public Duration getS3MaxRetryTime()
{
return s3MaxRetryTime;
}
@Config("hive.s3.max-retry-time")
public HiveS3Config setS3MaxRetryTime(Duration s3MaxRetryTime)
{
this.s3MaxRetryTime = s3MaxRetryTime;
return this;
}
@MinDuration("1ms")
@NotNull
public Duration getS3ConnectTimeout()
{
return s3ConnectTimeout;
}
@Config("hive.s3.connect-timeout")
public HiveS3Config setS3ConnectTimeout(Duration s3ConnectTimeout)
{
this.s3ConnectTimeout = s3ConnectTimeout;
return this;
}
@MinDuration("1ms")
@NotNull
public Duration getS3SocketTimeout()
{
return s3SocketTimeout;
}
@Config("hive.s3.socket-timeout")
public HiveS3Config setS3SocketTimeout(Duration s3SocketTimeout)
{
this.s3SocketTimeout = s3SocketTimeout;
return this;
}
@Min(1)
public int getS3MaxConnections()
{
return s3MaxConnections;
}
@Config("hive.s3.max-connections")
public HiveS3Config setS3MaxConnections(int s3MaxConnections)
{
this.s3MaxConnections = s3MaxConnections;
return this;
}
@NotNull
public File getS3StagingDirectory()
{
return s3StagingDirectory;
}
@Config("hive.s3.staging-directory")
@ConfigDescription("Temporary directory for staging files before uploading to S3")
public HiveS3Config setS3StagingDirectory(File s3StagingDirectory)
{
this.s3StagingDirectory = s3StagingDirectory;
return this;
}
@NotNull
@MinDataSize("16MB")
public DataSize getS3MultipartMinFileSize()
{
return s3MultipartMinFileSize;
}
@Config("hive.s3.multipart.min-file-size")
@ConfigDescription("Minimum file size for an S3 multipart upload")
public HiveS3Config setS3MultipartMinFileSize(DataSize size)
{
this.s3MultipartMinFileSize = size;
return this;
}
@NotNull
@MinDataSize("5MB")
public DataSize getS3MultipartMinPartSize()
{
return s3MultipartMinPartSize;
}
@Config("hive.s3.multipart.min-part-size")
@ConfigDescription("Minimum part size for an S3 multipart upload")
public HiveS3Config setS3MultipartMinPartSize(DataSize size)
{
this.s3MultipartMinPartSize = size;
return this;
}
public boolean isPinS3ClientToCurrentRegion()
{
return pinS3ClientToCurrentRegion;
}
@Config("hive.s3.pin-client-to-current-region")
@ConfigDescription("Should the S3 client be pinned to the current EC2 region")
public HiveS3Config setPinS3ClientToCurrentRegion(boolean pinS3ClientToCurrentRegion)
{
this.pinS3ClientToCurrentRegion = pinS3ClientToCurrentRegion;
return this;
}
@NotNull
public String getS3UserAgentPrefix()
{
return s3UserAgentPrefix;
}
@Config("hive.s3.user-agent-prefix")
@ConfigDescription("The user agent prefix to use for S3 calls")
public HiveS3Config setS3UserAgentPrefix(String s3UserAgentPrefix)
{
this.s3UserAgentPrefix = s3UserAgentPrefix;
return this;
}
}
| |
package ibm;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import javax.swing.JCheckBoxMenuItem;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JRadioButtonMenuItem;
import javax.swing.KeyStroke;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
public class MenuBarView {
public MenuBarView() {
optionMenu = new JCheckBoxMenuItem[10];
deployMenu = new JCheckBoxMenuItem[18];
cfg = null;
}
JMenuBar buildMenuBar(Settings settings, IBMExtractConfig cfg,
String optionCodes[][], String deployCodes[][],
ActionListener helpActionListener,
ActionListener aboutActionListener,
ActionListener getNewVersionActionListener,
ActionListener executeAllListener, ActionListener executeListener,
ActionListener revalidateListener,
ActionListener db2FileOpenActionListener,
ActionListener revalidateAllListener,
ActionListener refreshListener, ActionListener discardListener) {
this.cfg = cfg;
this.optionCodes = optionCodes;
this.deployCodes = deployCodes;
JMenuBar bar = new JMenuBar();
bar.putClientProperty("jgoodies.headerStyle", settings
.getMenuBarHeaderStyle());
bar.putClientProperty("Plastic.borderStyle", settings
.getMenuBarPlasticBorderStyle());
bar.putClientProperty("jgoodies.windows.borderStyle", settings
.getMenuBarWindowsBorderStyle());
bar.putClientProperty("Plastic.is3D", settings.getMenuBar3DHint());
bar.add(buildFileMenu(executeAllListener, executeListener,
revalidateListener, db2FileOpenActionListener,
revalidateAllListener, refreshListener, discardListener));
bar.add(buildOptionMenu());
bar.add(buildDeployMenu());
bar.add(buildHelpMenu(helpActionListener, aboutActionListener,
getNewVersionActionListener));
return bar;
}
private JMenu buildFileMenu(ActionListener executeAllListener,
ActionListener executeListener, ActionListener revalidateListener,
ActionListener db2FileOpenActionListener,
ActionListener revalidateAllListener,
ActionListener refreshListener, ActionListener discardListener) {
JMenu menu = createMenu("File", 'F');
JMenuItem item = createMenuItem("Select DB2 Objects Directory",
readImageIcon("open.gif"), 'O', KeyStroke
.getKeyStroke("ctrl O"));
if (db2FileOpenActionListener != null)
item.addActionListener(db2FileOpenActionListener);
menu.add(item);
menuRefresh = createMenuItem("Refresh objects\u2026",
readImageIcon("valid.gif"), 'L', KeyStroke
.getKeyStroke("ctrl L"));
if (refreshListener != null)
menuRefresh.addActionListener(refreshListener);
menuRefresh.setEnabled(false);
menu.add(menuRefresh);
menu.addSeparator();
menuExecuteAll = createMenuItem("Execute All Statements\u2026",
readImageIcon("srcdb.png"), 'A', KeyStroke
.getKeyStroke("ctrl A"));
if (executeAllListener != null)
menuExecuteAll.addActionListener(executeAllListener);
menuExecuteAll.setEnabled(false);
menu.add(menuExecuteAll);
menuExecute = createMenuItem("Execute Selected Statements\u2026",
readImageIcon("dstdb.png"), 'E', KeyStroke
.getKeyStroke("ctrl E"));
if (executeListener != null)
menuExecute.addActionListener(executeListener);
menuExecute.setEnabled(false);
menu.add(menuExecute);
menu.addSeparator();
menuRevalidateAll = createMenuItem("Revalidate All Statements\u2026",
readImageIcon("revalidate.png"), 'R', KeyStroke
.getKeyStroke("ctrl R"));
if (revalidateAllListener != null)
menuRevalidateAll.addActionListener(revalidateAllListener);
menuRevalidateAll.setEnabled(false);
menu.add(menuRevalidateAll);
menuRevalidate = createMenuItem("Revalidate Selected Statements\u2026",
readImageIcon("valid.gif"), 'H', KeyStroke
.getKeyStroke("ctrl H"));
if (revalidateListener != null)
menuRevalidate.addActionListener(revalidateListener);
menuRevalidate.setEnabled(false);
menu.add(menuRevalidate);
menu.addSeparator();
menuDiscard = createMenuItem("Do not deploy these objects\u2026",
readImageIcon("valid.gif"), 'C', KeyStroke
.getKeyStroke("ctrl C"));
if (discardListener != null)
menuDiscard.addActionListener(discardListener);
menuDiscard.setEnabled(false);
menu.add(menuDiscard);
if (!isQuitInOSMenu()) {
menu.addSeparator();
item = createMenuItem("Exit", 'x');
menu.add(item);
item.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
System.exit(1);
}
});
}
return menu;
}
private void setSubMenu(String cbName, final int idx,
final String menuHeading, JMenu menu,
final JCheckBoxMenuItem subMenu[], final String codes[][]) {
subMenu[idx] = createCheckBoxMenuItem(menuHeading, false);
subMenu[idx].setName(cbName);
subMenu[idx].setEnabled(true);
subMenu[idx].addChangeListener(new ChangeListener() {
public void stateChanged(ChangeEvent e) {
JCheckBoxMenuItem source = (JCheckBoxMenuItem) e.getSource();
if (source.isEnabled())
source.setText(menuHeading);
}
});
subMenu[idx].setIcon(readImageIcon("check.gif"));
subMenu[idx].setSelectedIcon(readImageIcon("check_selected.gif"));
if (cbName.equals("OPTION_TRAILING_BLANKS"))
subMenu[idx].setSelected(Boolean.valueOf(
cfg.getTrimTrailingSpaces()).booleanValue());
else if (cbName.equals("OPTION_DBCLOBS"))
subMenu[idx].setSelected(Boolean.valueOf(cfg.getDbclob())
.booleanValue());
else if (cbName.equals("OPTION_GRAPHICS"))
subMenu[idx].setSelected(Boolean.valueOf(cfg.getGraphic())
.booleanValue());
else if (cbName.equals("OPTION_SPLIT_TRIGGER"))
subMenu[idx].setSelected(Boolean.valueOf(
cfg.getRegenerateTriggers()).booleanValue());
else if (cbName.equals("OPTION_COMPRESS_TABLE"))
subMenu[idx].setSelected(Boolean.valueOf(cfg.getCompressTable())
.booleanValue());
else if (cbName.equals("OPTION_COMPRESS_INDEX"))
subMenu[idx].setSelected(Boolean.valueOf(cfg.getCompressIndex())
.booleanValue());
else if (cbName.equals("OPTION_EXTRACT_PARTITIONS"))
subMenu[idx].setSelected(Boolean
.valueOf(cfg.getExtractPartitions()).booleanValue());
else if (cbName.equals("OPTION_EXTRACT_HASH_PARTITIONS"))
subMenu[idx].setSelected(Boolean.valueOf(
cfg.getExtractHashPartitions()).booleanValue());
else if (cbName.equals("OPTION_GENERATE_CONS_NAMES"))
subMenu[idx].setSelected(!Boolean.valueOf(
cfg.getRetainConstraintsName()).booleanValue());
else if (cbName.equals("OPTION_USE_BESTPRACTICE_TSNAMES"))
subMenu[idx].setSelected(Boolean.valueOf(
cfg.getUseBestPracticeTSNames()).booleanValue());
subMenu[idx].addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
boolean value = subMenu[idx].isSelected();
codes[idx][1] = Boolean.toString(value);
JCheckBoxMenuItem o = (JCheckBoxMenuItem) e.getSource();
if (o.getName().equals("OPTION_TRAILING_BLANKS")) {
cfg.setTrimTrailingSpaces(Boolean.toString(value));
codes[MenuBarView.OPTION_TRAILING_BLANKS.intValue()][1] = Boolean
.toString(value);
} else if (o.getName().equals("OPTION_DBCLOBS")) {
cfg.setDbclob(Boolean.toString(value));
codes[MenuBarView.OPTION_DBCLOBS.intValue()][1] = Boolean
.toString(value);
} else if (o.getName().equals("OPTION_GRAPHICS")) {
cfg.setGraphic(Boolean.toString(value));
codes[MenuBarView.OPTION_GRAPHICS.intValue()][1] = Boolean
.toString(value);
} else if (o.getName().equals("OPTION_SPLIT_TRIGGER")) {
cfg.setRegenerateTriggers(Boolean.toString(value));
codes[MenuBarView.OPTION_SPLIT_TRIGGER.intValue()][1] = Boolean
.toString(value);
} else if (o.getName().equals("OPTION_COMPRESS_TABLE")) {
cfg.setCompressTable(Boolean.toString(value));
codes[MenuBarView.OPTION_COMPRESS_TABLE.intValue()][1] = Boolean
.toString(value);
} else if (o.getName().equals("OPTION_COMPRESS_INDEX")) {
cfg.setCompressTable(Boolean.toString(value));
codes[MenuBarView.OPTION_COMPRESS_INDEX.intValue()][1] = Boolean
.toString(value);
} else if (o.getName().equals("OPTION_EXTRACT_PARTITIONS")) {
cfg.setExtractPartitions(Boolean.toString(value));
codes[MenuBarView.OPTION_EXTRACT_PARTITIONS.intValue()][1] = Boolean
.toString(value);
} else if (o.getName().equals("OPTION_EXTRACT_HASH_PARTITIONS")) {
cfg.setExtractHashPartitions(Boolean.toString(value));
codes[MenuBarView.OPTION_EXTRACT_HASH_PARTITIONS.intValue()][1] = Boolean
.toString(value);
} else if (o.getName().equals("OPTION_GENERATE_CONS_NAMES")) {
cfg.setRetainConstraintsName(Boolean.toString(!value));
codes[MenuBarView.OPTION_GENERATE_CONS_NAMES.intValue()][1] = Boolean
.toString(!value);
} else if (o.getName()
.equals("OPTION_USE_BESTPRACTICE_TSNAMES")) {
cfg.setUseBestPracticeTSNames(Boolean.toString(value));
codes[MenuBarView.OPTION_USE_BESTPRACTICE_TSNAMES
.intValue()][1] = Boolean.toString(value);
}
}
});
menu.add(subMenu[idx]);
}
private JMenu buildOptionMenu() {
JMenu menu = createMenu("Options", 'O');
setSubMenu("OPTION_TRAILING_BLANKS", OPTION_TRAILING_BLANKS.intValue(),
"Trim trailing blanks during unload", menu, optionMenu,
optionCodes);
setSubMenu("OPTION_DBCLOBS", OPTION_DBCLOBS.intValue(),
"Turn DB CLOB to varchar during unload", menu, optionMenu,
optionCodes);
setSubMenu("OPTION_GRAPHICS", OPTION_GRAPHICS.intValue(),
"Turn graphics char to normal char", menu, optionMenu,
optionCodes);
setSubMenu("OPTION_SPLIT_TRIGGER", OPTION_SPLIT_TRIGGER.intValue(),
"Split multiple action Triggers", menu, optionMenu, optionCodes);
setSubMenu("OPTION_COMPRESS_TABLE", OPTION_COMPRESS_TABLE.intValue(),
"Compress Tables", menu, optionMenu, optionCodes);
setSubMenu("OPTION_COMPRESS_INDEX", OPTION_COMPRESS_INDEX.intValue(),
"Compress Index", menu, optionMenu, optionCodes);
setSubMenu("OPTION_EXTRACT_PARTITIONS", OPTION_EXTRACT_PARTITIONS
.intValue(), "Extract Partitions", menu, optionMenu,
optionCodes);
setSubMenu("OPTION_EXTRACT_HASH_PARTITIONS",
OPTION_EXTRACT_HASH_PARTITIONS.intValue(),
"Extract Hash Partitions", menu, optionMenu, optionCodes);
setSubMenu("OPTION_GENERATE_CONS_NAMES", OPTION_GENERATE_CONS_NAMES
.intValue(), "Use Generated Constraints Names", menu,
optionMenu, optionCodes);
setSubMenu("OPTION_USE_BESTPRACTICE_TSNAMES",
OPTION_USE_BESTPRACTICE_TSNAMES.intValue(),
"Use Best Practice Tablespace Definitions", menu, optionMenu,
optionCodes);
return menu;
}
private JMenu buildDeployMenu() {
JMenu menu = createMenu("Deploy", 'Y');
setSubMenu("DEPLOY_TSBP", DEPLOY_TSBP.intValue(),
"Include BUFFER POOL/TABLE SPACE in interactive Deploy", menu,
deployMenu, deployCodes);
setSubMenu("DEPLOY_ROLE", DEPLOY_ROLE.intValue(),
"Include ROLE in interactive Deploy", menu, deployMenu,
deployCodes);
setSubMenu("DEPLOY_SEQUENCE", DEPLOY_SEQUENCE.intValue(),
"Include SEQUENCES in interactive Deploy", menu, deployMenu,
deployCodes);
setSubMenu("DEPLOY_TABLE", DEPLOY_TABLE.intValue(),
"Include TABLES in interactive Deploy", menu, deployMenu,
deployCodes);
setSubMenu("DEPLOY_DEFAULT", DEPLOY_DEFAULT.intValue(),
"Include DEFAULTS in interactive Deploy", menu, deployMenu,
deployCodes);
setSubMenu("DEPLOY_CHECK_CONSTRAINTS", DEPLOY_CHECK_CONSTRAINTS
.intValue(), "Include CHECK CONSTRAINTS in interactive Deploy",
menu, deployMenu, deployCodes);
setSubMenu("DEPLOY_PRIMARY_KEY", DEPLOY_PRIMARY_KEY.intValue(),
"Include PRIMARY KEYS in interactive Deploy", menu, deployMenu,
deployCodes);
setSubMenu("DEPLOY_UNIQUE_INDEX", DEPLOY_UNIQUE_INDEX.intValue(),
"Include UNIQUE INDEXES in interactive Deploy", menu,
deployMenu, deployCodes);
setSubMenu("DEPLOY_INDEX", DEPLOY_INDEX.intValue(),
"Include INDEXES in interactive Deploy", menu, deployMenu,
deployCodes);
setSubMenu("DEPLOY_FOREIGN_KEYS", DEPLOY_FOREIGN_KEYS.intValue(),
"Include FOREIGN KEYS in interactive Deploy", menu, deployMenu,
deployCodes);
setSubMenu("DEPLOY_TYPE", DEPLOY_TYPE.intValue(),
"Include TYPE in interactive Deploy", menu, deployMenu,
deployCodes);
setSubMenu("DEPLOY_FUNCTION", DEPLOY_FUNCTION.intValue(),
"Include FUNCTIONS in interactive Deploy", menu, deployMenu,
deployCodes);
setSubMenu("DEPLOY_VIEW", DEPLOY_VIEW.intValue(),
"Include VIEWS in interactive Deploy", menu, deployMenu,
deployCodes);
setSubMenu("DEPLOY_MQT", DEPLOY_MQT.intValue(),
"Include MQT in interactive Deploy", menu, deployMenu,
deployCodes);
setSubMenu("DEPLOY_TRIGGER", DEPLOY_TRIGGER.intValue(),
"Include TRIGGERS in interactive Deploy", menu, deployMenu,
deployCodes);
setSubMenu("DEPLOY_PROCEDURE", DEPLOY_PROCEDURE.intValue(),
"Include PROCEDURES in interactive Deploy", menu, deployMenu,
deployCodes);
setSubMenu("DEPLOY_PACKAGE", DEPLOY_PACKAGE.intValue(),
"Include PACKAGES in interactive Deploy", menu, deployMenu,
deployCodes);
setSubMenu("DEPLOY_PACKAGE_BODY", DEPLOY_PACKAGE_BODY.intValue(),
"Include PACKAGE BODIES in interactive Deploy", menu,
deployMenu, deployCodes);
return menu;
}
private JMenu buildHelpMenu(ActionListener helpActionListener,
ActionListener aboutActionListener,
ActionListener getNewVersionActionListener) {
JMenu menu = createMenu("Help", 'H');
JMenuItem item = createMenuItem("Help Contents",
readImageIcon("help.gif"), 'H');
if (helpActionListener != null)
item.addActionListener(helpActionListener);
item = createMenuItem("Check New Version",
readImageIcon("check_selected.gif"), 'C');
if (getNewVersionActionListener != null)
item.addActionListener(getNewVersionActionListener);
menu.add(item);
if (!isAboutInOSMenu()) {
menu.addSeparator();
item = createMenuItem("About", 'a');
item.addActionListener(aboutActionListener);
menu.add(item);
}
return menu;
}
protected JMenu createMenu(String text, char mnemonic) {
JMenu menu = new JMenu(text);
menu.setMnemonic(mnemonic);
return menu;
}
protected JMenuItem createMenuItem(String text) {
return new JMenuItem(text);
}
protected JMenuItem createMenuItem(String text, char mnemonic) {
return new JMenuItem(text, mnemonic);
}
protected JMenuItem createMenuItem(String text, char mnemonic, KeyStroke key) {
JMenuItem menuItem = new JMenuItem(text, mnemonic);
menuItem.setAccelerator(key);
return menuItem;
}
protected JMenuItem createMenuItem(String text, Icon icon) {
return new JMenuItem(text, icon);
}
protected JMenuItem createMenuItem(String text, Icon icon, char mnemonic) {
JMenuItem menuItem = new JMenuItem(text, icon);
menuItem.setMnemonic(mnemonic);
return menuItem;
}
protected JMenuItem createMenuItem(String text, Icon icon, char mnemonic,
KeyStroke key) {
JMenuItem menuItem = createMenuItem(text, icon, mnemonic);
menuItem.setAccelerator(key);
return menuItem;
}
protected JRadioButtonMenuItem createRadioButtonMenuItem(String text,
boolean selected) {
return new JRadioButtonMenuItem(text, selected);
}
protected JCheckBoxMenuItem createCheckBoxMenuItem(String text,
boolean selected) {
return new JCheckBoxMenuItem(text, selected);
}
protected boolean isQuitInOSMenu() {
return false;
}
protected boolean isAboutInOSMenu() {
return false;
}
private JCheckBoxMenuItem createCheckItem(boolean enabled, boolean selected) {
JCheckBoxMenuItem item = createCheckBoxMenuItem(getToggleLabel(enabled,
selected), selected);
item.setEnabled(enabled);
item.addChangeListener(new ChangeListener() {
public void stateChanged(ChangeEvent e) {
JCheckBoxMenuItem source = (JCheckBoxMenuItem) e.getSource();
source.setText(getToggleLabel(source.isEnabled(), source
.isSelected()));
}
});
return item;
}
protected String getToggleLabel(boolean enabled, boolean selected) {
String prefix = enabled ? "Enabled" : "Disabled";
String suffix = selected ? "Selected" : "Deselected";
return (new StringBuilder()).append(prefix).append(" and ").append(
suffix).toString();
}
private ImageIcon readImageIcon(String filename) {
java.net.URL url = getClass().getResource(
(new StringBuilder()).append("resources/images/").append(
filename).toString());
return new ImageIcon(url);
}
public JMenuItem menuExecuteAll;
public JMenuItem menuExecute;
public JMenuItem menuRevalidate;
public JMenuItem menuRevalidateAll;
public JMenuItem menuRefresh;
public JMenuItem menuDiscard;
public static final Integer OPTION_TRAILING_BLANKS = new Integer(0);
public static final Integer OPTION_DBCLOBS = new Integer(1);
public static final Integer OPTION_GRAPHICS = new Integer(2);
public static final Integer OPTION_SPLIT_TRIGGER = new Integer(3);
public static final Integer OPTION_COMPRESS_TABLE = new Integer(4);
public static final Integer OPTION_COMPRESS_INDEX = new Integer(5);
public static final Integer OPTION_EXTRACT_PARTITIONS = new Integer(6);
public static final Integer OPTION_EXTRACT_HASH_PARTITIONS = new Integer(7);
public static final Integer OPTION_GENERATE_CONS_NAMES = new Integer(8);
public static final Integer OPTION_USE_BESTPRACTICE_TSNAMES = new Integer(9);
public static final Integer DEPLOY_TSBP = new Integer(0);
public static final Integer DEPLOY_ROLE = new Integer(1);
public static final Integer DEPLOY_SEQUENCE = new Integer(2);
public static final Integer DEPLOY_TABLE = new Integer(3);
public static final Integer DEPLOY_DEFAULT = new Integer(4);
public static final Integer DEPLOY_CHECK_CONSTRAINTS = new Integer(5);
public static final Integer DEPLOY_PRIMARY_KEY = new Integer(6);
public static final Integer DEPLOY_UNIQUE_INDEX = new Integer(7);
public static final Integer DEPLOY_INDEX = new Integer(8);
public static final Integer DEPLOY_FOREIGN_KEYS = new Integer(9);
public static final Integer DEPLOY_TYPE = new Integer(10);
public static final Integer DEPLOY_FUNCTION = new Integer(11);
public static final Integer DEPLOY_VIEW = new Integer(12);
public static final Integer DEPLOY_MQT = new Integer(13);
public static final Integer DEPLOY_TRIGGER = new Integer(14);
public static final Integer DEPLOY_PROCEDURE = new Integer(15);
public static final Integer DEPLOY_PACKAGE = new Integer(16);
public static final Integer DEPLOY_PACKAGE_BODY = new Integer(17);
public JCheckBoxMenuItem optionMenu[];
public JCheckBoxMenuItem deployMenu[];
private String deployCodes[][];
private String optionCodes[][];
private IBMExtractConfig cfg;
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache;
import static org.apache.geode.distributed.ConfigurationProperties.HTTP_SERVICE_PORT;
import static org.apache.geode.distributed.ConfigurationProperties.JMX_MANAGER;
import static org.apache.geode.distributed.ConfigurationProperties.JMX_MANAGER_PORT;
import static org.apache.geode.distributed.ConfigurationProperties.JMX_MANAGER_START;
import static org.apache.geode.distributed.ConfigurationProperties.LOCATORS;
import static org.apache.geode.distributed.ConfigurationProperties.LOG_FILE;
import static org.apache.geode.distributed.ConfigurationProperties.MAX_WAIT_TIME_RECONNECT;
import static org.apache.geode.distributed.ConfigurationProperties.MEMBER_TIMEOUT;
import static org.apache.geode.internal.AvailablePortHelper.getRandomAvailableTCPPorts;
import static org.apache.geode.test.awaitility.GeodeAwaitility.await;
import static org.apache.geode.test.dunit.Disconnect.disconnectAllFromDS;
import static org.apache.geode.test.dunit.Invoke.invokeInEveryVM;
import static org.apache.geode.test.dunit.VM.getVM;
import static org.apache.geode.test.dunit.VM.getVMId;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import java.io.File;
import java.io.Serializable;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.IntStream;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.client.ClientCache;
import org.apache.geode.cache.client.ClientCacheFactory;
import org.apache.geode.cache.client.ClientRegionShortcut;
import org.apache.geode.distributed.LocatorLauncher;
import org.apache.geode.distributed.ServerLauncher;
import org.apache.geode.distributed.internal.InternalLocator;
import org.apache.geode.internal.lang.SystemProperty;
import org.apache.geode.internal.lang.SystemPropertyHelper;
import org.apache.geode.management.internal.cli.util.CommandStringBuilder;
import org.apache.geode.test.dunit.VM;
import org.apache.geode.test.dunit.rules.DistributedRule;
import org.apache.geode.test.junit.rules.GfshCommandRule;
import org.apache.geode.test.junit.rules.serializable.SerializableTemporaryFolder;
public class CompactOfflineDiskStoreDUnitTest implements Serializable {
@Rule
public transient GfshCommandRule gfsh = new GfshCommandRule();
@Rule
public SerializableTemporaryFolder temporaryFolder = new SerializableTemporaryFolder();
@Rule
public DistributedRule distributedRule = new DistributedRule(2);
private String locatorName;
private File locatorDir;
private int locatorPort;
private int locatorJmxPort;
private static final LocatorLauncher DUMMY_LOCATOR = mock(LocatorLauncher.class);
private static final AtomicReference<LocatorLauncher> LOCATOR =
new AtomicReference<>(DUMMY_LOCATOR);
private VM server;
private String serverName;
private File serverDir;
private int serverPort;
private String locators;
private static final ServerLauncher DUMMY_SERVER = mock(ServerLauncher.class);
private static final AtomicReference<ServerLauncher> SERVER =
new AtomicReference<>(DUMMY_SERVER);
private final int NUM_ENTRIES = 1000;
private static final String DISK_STORE_NAME = "testDiskStore";
private static final String REGION_NAME = "testRegion";
@Before
public void setUp() throws Exception {
VM locator = getVM(0);
server = getVM(1);
locatorName = "locator";
serverName = "server";
locatorDir = temporaryFolder.newFolder(locatorName);
serverDir = temporaryFolder.newFolder(serverName);
int[] port = getRandomAvailableTCPPorts(3);
locatorPort = port[0];
locatorJmxPort = port[1];
serverPort = port[2];
locators = "localhost[" + locatorPort + "]";
locator.invoke(() -> startLocator(locatorName, locatorDir, locatorPort, locatorJmxPort));
gfsh.connectAndVerify(locatorJmxPort, GfshCommandRule.PortType.jmxManager);
server.invoke(() -> startServer(serverName, serverDir, serverPort, locators, true));
}
@After
public void tearDown() {
invokeInEveryVM(() -> {
SERVER.getAndSet(DUMMY_SERVER).stop();
LOCATOR.getAndSet(DUMMY_LOCATOR).stop();
});
disconnectAllFromDS();
}
@Test
public void testDuplicateDiskStoreCompaction() {
createDiskStore();
createRegion();
populateRegions();
assertRegionSizeAndDiskStore();
server.invoke(CompactOfflineDiskStoreDUnitTest::stopServer);
server.invoke(this::compactOfflineDiskStore);
server.invoke(() -> startServer(serverName, serverDir, serverPort, locators, false));
server.invoke(CompactOfflineDiskStoreDUnitTest::verifyDiskStoreOplogs);
assertRegionSizeAndDiskStore();
}
private void compactOfflineDiskStore() throws Exception {
DiskStoreImpl.offlineCompact(DISK_STORE_NAME, new File[] {serverDir}, false/* upgrade */, -1);
}
private static void startLocator(String name, File workingDirectory, int locatorPort,
int jmxPort) {
LOCATOR.set(new LocatorLauncher.Builder()
.setMemberName(name)
.setPort(locatorPort)
.setWorkingDirectory(workingDirectory.getAbsolutePath())
.set(JMX_MANAGER, "true")
.set(JMX_MANAGER_PORT, String.valueOf(jmxPort))
.set(JMX_MANAGER_START, "true")
.set(LOG_FILE, new File(workingDirectory, name + ".log").getAbsolutePath())
.set(MAX_WAIT_TIME_RECONNECT, "1000")
.set(MEMBER_TIMEOUT, "2000")
.build());
LOCATOR.get().start();
await().untilAsserted(() -> {
InternalLocator locator = (InternalLocator) LOCATOR.get().getLocator();
assertThat(locator.isSharedConfigurationRunning())
.as("Locator shared configuration is running on locator" + getVMId())
.isTrue();
});
}
private static void startServer(String name, File workingDirectory, int serverPort,
String locators, boolean parallelDiskStoreRecovery) {
System.setProperty(
SystemProperty.DEFAULT_PREFIX + SystemPropertyHelper.PARALLEL_DISK_STORE_RECOVERY,
String.valueOf(parallelDiskStoreRecovery));
SERVER.set(new ServerLauncher.Builder()
.setDeletePidFileOnStop(Boolean.TRUE)
.setMemberName(name)
.setWorkingDirectory(workingDirectory.getAbsolutePath())
.setServerPort(serverPort)
.set(HTTP_SERVICE_PORT, "0")
.set(LOCATORS, locators)
.set(LOG_FILE, new File(workingDirectory, name + ".log").getAbsolutePath())
.set(MAX_WAIT_TIME_RECONNECT, "1000")
.set(MEMBER_TIMEOUT, "2000")
.build());
SERVER.get().start();
}
private static void stopServer() {
SERVER.get().stop();
}
private static void verifyDiskStoreOplogs() {
((InternalCache) SERVER.get().getCache()).listDiskStores().forEach(diskStore -> {
Oplog[] oplogs = ((DiskStoreImpl) diskStore).getPersistentOplogs().getAllOplogs();
// There should be two Oplogs in the array.
// One is the offline compacted Oplog.
// The other is the new Oplog created during server restart.
assertThat(oplogs.length).isEqualTo(2);
});
}
private void assertRegionSizeAndDiskStore() {
assertRegionSize();
assertDiskStore(serverName);
}
private void assertDiskStore(String serverName) {
String command;
command = new CommandStringBuilder("describe disk-store")
.addOption("name", CompactOfflineDiskStoreDUnitTest.DISK_STORE_NAME)
.addOption("member", serverName)
.getCommandString();
gfsh.executeAndAssertThat(command).statusIsSuccess().containsOutput(
CompactOfflineDiskStoreDUnitTest.REGION_NAME);
}
private void assertRegionSize() {
String command;
command = new CommandStringBuilder("describe region")
.addOption("name", CompactOfflineDiskStoreDUnitTest.REGION_NAME)
.getCommandString();
gfsh.executeAndAssertThat(command).statusIsSuccess()
.containsOutput(String.valueOf(NUM_ENTRIES));
}
private void populateRegions() {
ClientCacheFactory clientCacheFactory = new ClientCacheFactory();
ClientCache clientCache =
clientCacheFactory.addPoolLocator("localhost", locatorPort).create();
Region<Object, Object> clientRegion1 = clientCache
.createClientRegionFactory(ClientRegionShortcut.CACHING_PROXY).create(REGION_NAME);
IntStream.range(0, NUM_ENTRIES).forEach(i -> {
clientRegion1.put("key-" + i, "value-" + i);
clientRegion1.put("key-" + i, "value-" + i + 1); // update again for future compaction
});
}
private void createRegion() {
String command;
command = new CommandStringBuilder("create region")
.addOption("name", CompactOfflineDiskStoreDUnitTest.REGION_NAME)
.addOption("type", "PARTITION_PERSISTENT")
.addOption("disk-store", CompactOfflineDiskStoreDUnitTest.DISK_STORE_NAME)
.getCommandString();
gfsh.executeAndAssertThat(command).statusIsSuccess();
}
private void createDiskStore() {
String command;
command = new CommandStringBuilder("create disk-store")
.addOption("name", CompactOfflineDiskStoreDUnitTest.DISK_STORE_NAME)
.addOption("dir", serverDir.getAbsolutePath())
.addOption("auto-compact", "true")
.addOption("allow-force-compaction", "true")
.getCommandString();
gfsh.executeAndAssertThat(command).statusIsSuccess();
}
}
| |
/**
* Copyright (c) 2010 Source Auditor Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.spdx.tag;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import org.spdx.rdfparser.DOAPProject;
import org.spdx.rdfparser.InvalidSPDXAnalysisException;
import org.spdx.rdfparser.SPDXReview;
import org.spdx.rdfparser.SpdxDocumentContainer;
import org.spdx.rdfparser.license.AnyLicenseInfo;
import org.spdx.rdfparser.license.ExtractedLicenseInfo;
import org.spdx.rdfparser.license.SimpleLicensingInfo;
import org.spdx.rdfparser.model.Annotation;
import org.spdx.rdfparser.model.Checksum;
import org.spdx.rdfparser.model.DoapProject;
import org.spdx.rdfparser.model.ExternalDocumentRef;
import org.spdx.rdfparser.model.Relationship;
import org.spdx.rdfparser.model.SpdxDocument;
import org.spdx.rdfparser.model.SpdxElement;
import org.spdx.rdfparser.model.SpdxFile;
import org.spdx.rdfparser.model.SpdxFile.FileType;
import org.spdx.rdfparser.model.SpdxItem;
import org.spdx.rdfparser.model.SpdxPackage;
import org.spdx.tools.RdfToTag;
import com.google.common.collect.Lists;
/**
* Define Common methods used by Tag-Value and SPDXViewer to print the SPDX
* document.
*
* @author Rana Rahal, Protecode Inc.
*/
public class CommonCode {
/**
* @param doc
* @param out
* @param constants
* @throws InvalidSPDXAnalysisException
*/
public static void printDoc(SpdxDocument doc, PrintWriter out,
Properties constants) throws InvalidSPDXAnalysisException {
if (doc == null) {
println(out, "Warning: No document to print");
return;
}
// version
String spdxVersion = "";
if (doc.getSpecVersion() != null
&& doc.getCreationInfo().getCreated() != null) {
spdxVersion = doc.getSpecVersion();
println(out, constants.getProperty("PROP_SPDX_VERSION") + spdxVersion);
}
// Data license
if (!spdxVersion.equals(SpdxDocumentContainer.POINT_EIGHT_SPDX_VERSION)
&& !spdxVersion.equals(SpdxDocumentContainer.POINT_NINE_SPDX_VERSION)) {
AnyLicenseInfo dataLicense = doc.getDataLicense();
if (dataLicense != null) {
if (dataLicense instanceof SimpleLicensingInfo) {
println(out, constants.getProperty("PROP_SPDX_DATA_LICENSE")
+ ((SimpleLicensingInfo)dataLicense).getLicenseId());
} else {
println(out, constants.getProperty("PROP_SPDX_DATA_LICENSE")
+ dataLicense.toString());
}
}
}
// Document Uri
String docNamespace = doc.getDocumentNamespace();
if (docNamespace != null && !docNamespace.isEmpty()) {
out.println(constants.getProperty("PROP_DOCUMENT_NAMESPACE") + docNamespace);
}
// element properties
printElementProperties(doc, out, constants, "PROP_DOCUMENT_NAME", "PROP_SPDX_COMMENT");
println(out, "");
// External References
ExternalDocumentRef[] externalRefs = doc.getExternalDocumentRefs();
if (externalRefs != null && externalRefs.length > 0) {
String externalDocRefHedr = constants.getProperty("EXTERNAL_DOC_REFS_HEADER");
if (externalDocRefHedr != null && !externalDocRefHedr.isEmpty()) {
println(out, externalDocRefHedr);
}
for (int i = 0; i < externalRefs.length; i++) {
printExternalRef(externalRefs[i], out, constants);
}
}
// Creators
if (doc.getCreationInfo().getCreators() != null
&& doc.getCreationInfo().getCreators().length > 0) {
println(out, constants.getProperty("CREATION_INFO_HEADER"));
String[] creators = doc.getCreationInfo().getCreators();
for (int i = 0; i < creators.length; i++) {
println(out, constants.getProperty("PROP_CREATION_CREATOR")
+ creators[i]);
}
}
// Creation Date
if (doc.getCreationInfo().getCreated() != null
&& !doc.getCreationInfo().getCreated().isEmpty()) {
println(out, constants.getProperty("PROP_CREATION_CREATED")
+ doc.getCreationInfo().getCreated());
}
// Creator Comment
if (doc.getCreationInfo().getComment() != null
&& !doc.getCreationInfo().getComment().isEmpty()) {
println(out, constants.getProperty("PROP_CREATION_COMMENT")
+ constants.getProperty("PROP_BEGIN_TEXT")
+ doc.getCreationInfo().getComment()
+ constants.getProperty("PROP_END_TEXT"));
}
// License list version
if (doc.getCreationInfo().getLicenseListVersion() != null &&
!doc.getCreationInfo().getLicenseListVersion().isEmpty()) {
println(out, constants.getProperty("PROP_LICENSE_LIST_VERSION") +
doc.getCreationInfo().getLicenseListVersion());
}
printElementAnnotationsRelationships(doc, out, constants, "PROP_DOCUMENT_NAME", "PROP_SPDX_COMMENT");
println(out, "");
// Print the actual files
List<SpdxPackage> allPackages = doc.getDocumentContainer().findAllPackages();
List<SpdxFile> allFiles = doc.getDocumentContainer().findAllFiles();
// first print out any described files
SpdxItem[] items = doc.getDocumentDescribes();
if (items != null && items.length > 0) {
for (int i = 0; i < items.length; i++) {
if (items[i] instanceof SpdxFile) {
printFile((SpdxFile)items[i], out, constants);
allFiles.remove(items[i]);
}
}
}
// print any described packages
if (items != null && items.length > 0) {
for (int i = 0; i < items.length; i++) {
if (items[i] instanceof SpdxPackage) {
printPackage((SpdxPackage)items[i], out, constants, allFiles);
allPackages.remove(items[i]);
}
}
}
// print remaining packages
Collections.sort(allPackages);
Iterator<SpdxPackage> pkgIter = allPackages.iterator();
while (pkgIter.hasNext()) {
printPackage(pkgIter.next(), out, constants, allFiles);
}
// print remaining files
Collections.sort(allFiles);
Iterator<SpdxFile> fileIter = allFiles.iterator();
while (fileIter.hasNext()) {
printFile(fileIter.next(), out, constants);
}
// Extracted license infos
println(out, "");
if (doc.getExtractedLicenseInfos() != null
&& doc.getExtractedLicenseInfos().length > 0) {
ExtractedLicenseInfo[] nonStandardLic = doc
.getExtractedLicenseInfos();
println(out, constants.getProperty("LICENSE_INFO_HEADER"));
for (int i = 0; i < nonStandardLic.length; i++) {
printLicense(nonStandardLic[i], out, constants);
}
}
// Reviewers
SPDXReview[] reviewedBy = doc.getReviewers();
if (reviewedBy != null && reviewedBy.length > 0) {
println(out, constants.getProperty("REVIEW_INFO_HEADER"));
for (int i = 0; i < reviewedBy.length; i++) {
println(out, constants.getProperty("PROP_REVIEW_REVIEWER")
+ reviewedBy[i].getReviewer());
println(out, constants.getProperty("PROP_REVIEW_DATE")
+ reviewedBy[i].getReviewDate());
if (reviewedBy[i].getComment() != null
&& !reviewedBy[i].getComment().isEmpty()) {
println(out, constants.getProperty("PROP_REVIEW_COMMENT")
+ constants.getProperty("PROP_BEGIN_TEXT")
+ reviewedBy[i].getComment()
+ constants.getProperty("PROP_END_TEXT"));
}
println(out, "");
}
}
}
/**
* @param externalDocumentRef
* @param out
* @param constants
* @throws InvalidSPDXAnalysisException
*/
private static void printExternalRef(
ExternalDocumentRef externalDocumentRef, PrintWriter out,
Properties constants) throws InvalidSPDXAnalysisException {
String uri = externalDocumentRef.getSpdxDocumentNamespace();
if (uri == null || uri.isEmpty()) {
uri = "[UNSPECIFIED]";
}
String sha1 = "[UNSPECIFIED]";
Checksum checksum = externalDocumentRef.getChecksum();
if (checksum != null && checksum.getValue() != null && !checksum.getValue().isEmpty()) {
sha1 = checksum.getValue();
}
String id = externalDocumentRef.getExternalDocumentId();
if (id == null || id.isEmpty()) {
id = "[UNSPECIFIED]";
}
println(out, constants.getProperty("PROP_EXTERNAL_DOC_URI") +
id + " " + uri + " SHA1: " + sha1);
}
/**
* @param doc
* @param out
* @param constants
* @param string
*/
private static void printElementProperties(SpdxElement element,
PrintWriter out, Properties constants, String nameProperty,
String commentProperty) {
if (element.getName() != null && !element.getName().isEmpty()) {
println(out, constants.getProperty(nameProperty) + element.getName());
}
if (element.getId() != null && !element.getId().isEmpty()) {
println(out, constants.getProperty("PROP_ELEMENT_ID") + element.getId());
}
if (element.getComment() != null && !element.getComment().isEmpty()) {
println(out, constants.getProperty(commentProperty)
+ constants.getProperty("PROP_BEGIN_TEXT")
+ element.getComment()
+ constants.getProperty("PROP_END_TEXT"));
}
}
private static void printElementAnnotationsRelationships(SpdxElement element,
PrintWriter out, Properties constants, String nameProperty,
String commentProperty) {
Annotation[] annotations = element.getAnnotations();
if (annotations != null && annotations.length > 0) {
println(out, constants.getProperty("ANNOTATION_HEADER"));
for (int i = 0; i < annotations.length; i++) {
printAnnotation(annotations[i], element.getId(), out, constants);
}
}
Relationship[] relationships = element.getRelationships();
if (relationships != null && relationships.length > 0) {
println(out, constants.getProperty("RELATIONSHIP_HEADER"));
for (int i = 0; i < relationships.length; i++) {
printRelationship(relationships[i], element.getId(), out, constants);
}
}
}
/**
* @param relationship
* @param out
* @param constants
*/
private static void printRelationship(Relationship relationship,
String elementId, PrintWriter out, Properties constants) {
out.println(constants.getProperty("PROP_RELATIONSHIP")+
elementId+" " +
relationship.getRelationshipType().toTag()+
" " + relationship.getRelatedSpdxElement().getId());
}
/**
* @param annotation
* @param out
* @param constants
*/
private static void printAnnotation(Annotation annotation, String id,
PrintWriter out, Properties constants) {
out.println(constants.getProperty("PROP_ANNOTATOR")+annotation.getAnnotator());
out.println(constants.getProperty("PROP_ANNOTATION_DATE")+annotation.getAnnotationDate());
out.println(constants.getProperty("PROP_ANNOTATION_COMMENT")
+ constants.getProperty("PROP_BEGIN_TEXT")
+ annotation.getComment()
+ constants.getProperty("PROP_END_TEXT"));
out.println(constants.getProperty("PROP_ANNOTATION_TYPE")+
(annotation.getAnnotationType().getTag()));
out.println(constants.getProperty("PROP_ANNOTATION_ID")+id);
}
/**
* @param license
*/
private static void printLicense(ExtractedLicenseInfo license,
PrintWriter out, Properties constants) {
// id
if (license.getLicenseId() != null && !license.getLicenseId().isEmpty()) {
println(out,
constants.getProperty("PROP_LICENSE_ID") + license.getLicenseId());
}
if (license.getExtractedText() != null && !license.getExtractedText().isEmpty()) {
println(out, constants.getProperty("PROP_EXTRACTED_TEXT")
+ constants.getProperty("PROP_BEGIN_TEXT")
+ license.getExtractedText() + constants.getProperty("PROP_END_TEXT"));
}
if (license.getName() != null && !license.getName().isEmpty()) {
println(out, constants.getProperty("PROP_LICENSE_NAME")+license.getName());
}
if (license.getSeeAlso() != null && license.getSeeAlso().length > 0) {
StringBuilder sb = new StringBuilder();
sb.append(license.getSeeAlso()[0]);
for (int i = 1; i < license.getSeeAlso().length; i++) {
sb.append(", ");
sb.append(license.getSeeAlso()[i]);
}
println(out, constants.getProperty("PROP_SOURCE_URLS")+sb.toString());
}
if (license.getSeeAlso() != null) {
if (license.getComment() != null && !license.getComment().isEmpty()) {
println(out, constants.getProperty("PROP_LICENSE_COMMENT")
+ constants.getProperty("PROP_BEGIN_TEXT")
+ license.getComment()
+ constants.getProperty("PROP_END_TEXT"));
}
}
println(out, "");
}
/**
* @param spdxPackage
* @throws InvalidSPDXAnalysisException
*/
private static void printPackage(SpdxPackage pkg, PrintWriter out,
Properties constants, List<SpdxFile> remainingFilesToPrint) throws InvalidSPDXAnalysisException {
println(out, constants.getProperty("PACKAGE_INFO_HEADER"));
printElementProperties(pkg, out, constants,"PROP_PACKAGE_DECLARED_NAME",
"PROP_PACKAGE_COMMENT");
// Version
if (pkg.getVersionInfo() != null && !pkg.getVersionInfo().isEmpty()) {
println(out,
constants.getProperty("PROP_PACKAGE_VERSION_INFO")
+ pkg.getVersionInfo());
}
// File name
if (pkg.getPackageFileName() != null && !pkg.getPackageFileName().isEmpty()) {
println(out,
constants.getProperty("PROP_PACKAGE_FILE_NAME")
+ pkg.getPackageFileName());
}
// Supplier
if (pkg.getSupplier() != null && !pkg.getSupplier().isEmpty()) {
println(out,
constants.getProperty("PROP_PACKAGE_SUPPLIER")
+ pkg.getSupplier());
}
// Originator
if (pkg.getOriginator() != null && !pkg.getOriginator().isEmpty()) {
println(out,
constants.getProperty("PROP_PACKAGE_ORIGINATOR")
+ pkg.getOriginator());
}
// Download location
if (pkg.getDownloadLocation() != null && !pkg.getDownloadLocation().isEmpty()) {
println(out,
constants.getProperty("PROP_PACKAGE_DOWNLOAD_URL")
+ pkg.getDownloadLocation());
}
// package verification code
if (pkg.getPackageVerificationCode() != null
&& pkg.getPackageVerificationCode().getValue() != null
&& !pkg.getPackageVerificationCode().getValue().isEmpty()) {
String code = constants.getProperty("PROP_PACKAGE_VERIFICATION_CODE") + pkg.getPackageVerificationCode().getValue();
String[] excludedFiles = pkg.getPackageVerificationCode().getExcludedFileNames();
if (excludedFiles.length != 0) {
StringBuilder excludedFilesBuilder = new StringBuilder("(");
for (String excludedFile : excludedFiles) {
if(excludedFilesBuilder.length() > 0){
excludedFilesBuilder.append(", ");
}
excludedFilesBuilder.append(excludedFile);
}
excludedFilesBuilder.append(')');
code += excludedFilesBuilder.toString();
}
println(out, code);
}
// Checksums
Checksum[] checksums = pkg.getChecksums();
if (checksums != null && checksums.length > 0) {
for (int i = 0; i < checksums.length; i++) {
printChecksum(checksums[i], out, constants, "PROP_PACKAGE_CHECKSUM");
}
}
// Home page
if (pkg.getHomepage() != null && !pkg.getHomepage().isEmpty()) {
println(out, constants.getProperty("PROP_PACKAGE_HOMEPAGE_URL") +
pkg.getHomepage());
}
// Source info
if (pkg.getSourceInfo() != null && !pkg.getSourceInfo().isEmpty()) {
println(out,
constants.getProperty("PROP_PACKAGE_SOURCE_INFO")
+ constants.getProperty("PROP_BEGIN_TEXT")
+ pkg.getSourceInfo()
+ constants.getProperty("PROP_END_TEXT"));
}
// concluded license
if (pkg.getLicenseConcluded() != null) {
println(out, constants.getProperty("PROP_PACKAGE_CONCLUDED_LICENSE")
+ pkg.getLicenseConcluded());
}
// License information from files
if (pkg.getLicenseInfoFromFiles() != null
&& pkg.getLicenseInfoFromFiles().length > 0) {
AnyLicenseInfo[] licenses = pkg.getLicenseInfoFromFiles();
println(out, constants.getProperty("LICENSE_FROM_FILES_INFO_HEADER"));
for (int i = 0; i < licenses.length; i++) {
println(out,
constants
.getProperty("PROP_PACKAGE_LICENSE_INFO_FROM_FILES")
+ licenses[i].toString());
}
}
// Declared licenses
if (pkg.getLicenseDeclared() != null) {
println(out, constants.getProperty("PROP_PACKAGE_DECLARED_LICENSE")
+ pkg.getLicenseDeclared());
}
if (pkg.getLicenseComments() != null
&& !pkg.getLicenseComments().isEmpty()) {
println(out, constants.getProperty("PROP_PACKAGE_LICENSE_COMMENT")
+ constants.getProperty("PROP_BEGIN_TEXT")
+ pkg.getLicenseComments() +
constants.getProperty("PROP_END_TEXT"));
}
// Declared copyright
if (pkg.getCopyrightText() != null
&& !pkg.getCopyrightText().isEmpty()) {
println(out, constants.getProperty("PROP_PACKAGE_DECLARED_COPYRIGHT")
+ constants.getProperty("PROP_BEGIN_TEXT")
+ pkg.getCopyrightText() + constants.getProperty("PROP_END_TEXT"));
}
// Short description
if (pkg.getSummary() != null
&& !pkg.getSummary().isEmpty()) {
println(out, constants.getProperty("PROP_PACKAGE_SHORT_DESC")
+ constants.getProperty("PROP_BEGIN_TEXT")
+ pkg.getSummary() + constants.getProperty("PROP_END_TEXT"));
}
// Description
if (pkg.getDescription() != null && !pkg.getDescription().isEmpty()) {
println(out, constants.getProperty("PROP_PACKAGE_DESCRIPTION")
+ constants.getProperty("PROP_BEGIN_TEXT")
+ pkg.getDescription() + constants.getProperty("PROP_END_TEXT"));
}
printElementAnnotationsRelationships(pkg, out, constants,"PROP_PACKAGE_DECLARED_NAME",
"PROP_PACKAGE_COMMENT");
// Files
if (pkg.getFiles() != null && pkg.getFiles().length > 0) {
/* Add files to a List */
List<SpdxFile> sortedFileList = Lists.newArrayList();
/* Sort the SPDX files before printout */
sortedFileList = Arrays.asList(pkg.getFiles());
Collections.sort(sortedFileList);
println(out, "");
println(out, constants.getProperty("FILE_INFO_HEADER"));
/* Print out sorted files */
for (SpdxFile file : sortedFileList) {
printFile(file, out, constants);
remainingFilesToPrint.remove(file);
println(out, "");
}
}
}
/**
* @param checksum
* @param out
* @param constants
* @param checksumProperty
*/
private static void printChecksum(Checksum checksum, PrintWriter out,
Properties constants, String checksumProperty) {
out.println(constants.getProperty(checksumProperty)
+ Checksum.CHECKSUM_ALGORITHM_TO_TAG.get(checksum.getAlgorithm())
+ " " + checksum.getValue());
}
/**
* @param file
*/
private static void printFile(SpdxFile file, PrintWriter out,
Properties constants) {
printElementProperties(file, out, constants, "PROP_FILE_NAME",
"PROP_FILE_COMMENT");
// type
FileType[] fileTypes = file.getFileTypes();
if (fileTypes != null && fileTypes.length > 0) {
for (int i = 0 ;i < fileTypes.length; i++) {
println(out, constants.getProperty("PROP_FILE_TYPE") + SpdxFile.FILE_TYPE_TO_TAG.get(fileTypes[i]));
}
}
Checksum[] checksums = file.getChecksums();
if (checksums != null) {
for (int i = 0; i < checksums.length; i++) {
printChecksum(checksums[i], out, constants, "PROP_FILE_CHECKSUM");
}
}
// concluded license
if (file.getLicenseConcluded() != null) {
println(out, constants.getProperty("PROP_FILE_LICENSE")
+ file.getLicenseConcluded().toString());
}
// License info in file
if (file.getLicenseInfoFromFiles() != null && file.getLicenseInfoFromFiles().length > 0) {
// print(out, "\tLicense information from file: ");
// print(out, file.getSeenLicenses()[0].toString());
for (int i = 0; i < file.getLicenseInfoFromFiles().length; i++) {
println(out, constants.getProperty("PROP_FILE_SEEN_LICENSE")
+ file.getLicenseInfoFromFiles()[i].toString());
}
}
// license comments
if (file.getLicenseComments() != null
&& !file.getLicenseComments().isEmpty()) {
println(out,
constants.getProperty("PROP_FILE_LIC_COMMENTS")
+ file.getLicenseComments());
}
// file copyright
if (file.getCopyrightText() != null && !file.getCopyrightText().isEmpty()) {
println(out, constants.getProperty("PROP_FILE_COPYRIGHT")
+ constants.getProperty("PROP_BEGIN_TEXT")
+ file.getCopyrightText() + constants.getProperty("PROP_END_TEXT"));
}
// artifact of
if (file.getArtifactOf() != null && file.getArtifactOf().length > 0) {
for (int i = 0; i < file.getArtifactOf().length; i++) {
printProject(file.getArtifactOf()[i], out, constants);
}
}
// File notice
if (file.getNoticeText() != null && !file.getNoticeText().isEmpty()) {
println(out, constants.getProperty("PROP_FILE_NOTICE_TEXT") +
constants.getProperty("PROP_BEGIN_TEXT") +
file.getNoticeText() +
constants.getProperty("PROP_END_TEXT"));
}
// file contributors
if (file.getFileContributors() != null && file.getFileContributors().length > 0) {
for (int i = 0; i < file.getFileContributors().length; i++) {
println(out, constants.getProperty("PROP_FILE_CONTRIBUTOR")+
file.getFileContributors()[i]);
}
}
// file dependencies
SpdxFile[] fileDependencies = file.getFileDependencies();
if (fileDependencies != null && fileDependencies.length > 0) {
for (SpdxFile fileDepdency : fileDependencies) {
println(out, constants.getProperty("PROP_FILE_DEPENDENCY") + fileDepdency.getName());
}
}
printElementAnnotationsRelationships(file, out, constants, "PROP_FILE_NAME",
"PROP_FILE_COMMENT");
}
/**
* @param doapProject
*/
private static void printProject(DoapProject doapProject, PrintWriter out,
Properties constants) {
// project name
if (doapProject.getName() != null && !doapProject.getName().isEmpty()) {
println(out,
constants.getProperty("PROP_PROJECT_NAME")
+ doapProject.getName());
}
// project homepage
if (doapProject.getHomePage() != null
&& !doapProject.getHomePage().isEmpty()) {
println(out, constants.getProperty("PROP_PROJECT_HOMEPAGE")
+ doapProject.getHomePage());
}
// DOAP file url
if (doapProject.getProjectUri() != null
&& !doapProject.getProjectUri().isEmpty()
&& !doapProject.getProjectUri().equals(DOAPProject.UNKNOWN_URI)) {
println(out,
constants.getProperty("PROP_PROJECT_URI")
+ doapProject.getProjectUri());
}
}
private static void println(PrintWriter out, String output) {
if (out != null) {
out.println(output);
} else {
System.out.println(output);
}
}
public static Properties getTextFromProperties(final String path)
throws IOException {
InputStream is = null;
Properties prop = new Properties();
try {
is = RdfToTag.class.getClassLoader().getResourceAsStream(path);
prop.load(is);
} finally {
try {
if (is != null) {
is.close();
}
} catch (Throwable e) {
// logger.warn("Unable to close properties file.");
}
}
return prop;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.compute.model;
/**
* Message containing information of one individual backend.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class Backend extends com.google.api.client.json.GenericJson {
/**
* Specifies the balancing mode for the backend.
*
* When choosing a balancing mode, you need to consider the loadBalancingScheme, and protocol for
* the backend service, as well as the type of backend (instance group or NEG).
*
* - If the load balancing mode is CONNECTION, then the load is spread based on how many
* concurrent connections the backend can handle. You can use the CONNECTION balancing mode if the
* protocol for the backend service is SSL, TCP, or UDP.
*
* If the loadBalancingScheme for the backend service is EXTERNAL (SSL Proxy and TCP Proxy load
* balancers), you must also specify exactly one of the following parameters: maxConnections,
* maxConnectionsPerInstance, or maxConnectionsPerEndpoint.
*
* If the loadBalancingScheme for the backend service is INTERNAL (internal TCP/UDP load
* balancers), you cannot specify any additional parameters. - If the load balancing mode is
* RATE, the load is spread based on the rate of HTTP requests per second (RPS). You can use the
* RATE balancing mode if the protocol for the backend service is HTTP or HTTPS. You must specify
* exactly one of the following parameters: maxRate, maxRatePerInstance, or maxRatePerEndpoint.
* - If the load balancing mode is UTILIZATION, the load is spread based on the CPU utilization of
* instances in an instance group. You can use the UTILIZATION balancing mode if the
* loadBalancingScheme of the backend service is EXTERNAL, INTERNAL_SELF_MANAGED, or
* INTERNAL_MANAGED and the backends are instance groups. There are no restrictions on the backend
* service protocol.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String balancingMode;
/**
* A multiplier applied to the group's maximum servicing capacity (based on UTILIZATION, RATE or
* CONNECTION). Default value is 1, which means the group will serve up to 100% of its configured
* capacity (depending on balancingMode). A setting of 0 means the group is completely drained,
* offering 0% of its available Capacity. Valid range is [0.0,1.0].
*
* This cannot be used for internal load balancing.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float capacityScaler;
/**
* An optional description of this resource. Provide this property when you create the resource.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String description;
/**
* This field designates whether this is a failover backend. More than one failover backend can be
* configured for a given BackendService.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Boolean failover;
/**
* The fully-qualified URL of an instance group or network endpoint group (NEG) resource. The type
* of backend that a backend service supports depends on the backend service's
* loadBalancingScheme.
*
* - When the loadBalancingScheme for the backend service is EXTERNAL, INTERNAL_SELF_MANAGED, or
* INTERNAL_MANAGED, the backend can be either an instance group or a NEG. The backends on the
* backend service must be either all instance groups or all NEGs. You cannot mix instance group
* and NEG backends on the same backend service.
*
* - When the loadBalancingScheme for the backend service is INTERNAL, the backend must be an
* instance group in the same region as the backend service. NEGs are not supported.
*
* You must use the fully-qualified URL (starting with https://www.googleapis.com/) to specify the
* instance group or NEG. Partial URLs are not supported.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String group;
/**
* Defines a maximum target for simultaneous connections for the entire backend (instance group or
* NEG). If the backend's balancingMode is UTILIZATION, this is an optional parameter. If the
* backend's balancingMode is CONNECTION, and backend is attached to a backend service whose
* loadBalancingScheme is EXTERNAL, you must specify either this parameter,
* maxConnectionsPerInstance, or maxConnectionsPerEndpoint.
*
* Not available if the backend's balancingMode is RATE. If the loadBalancingScheme is INTERNAL,
* then maxConnections is not supported, even though the backend requires a balancing mode of
* CONNECTION.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer maxConnections;
/**
* Defines a maximum target for simultaneous connections for an endpoint of a NEG. This is
* multiplied by the number of endpoints in the NEG to implicitly calculate a maximum number of
* target maximum simultaneous connections for the NEG. If the backend's balancingMode is
* CONNECTION, and the backend is attached to a backend service whose loadBalancingScheme is
* EXTERNAL, you must specify either this parameter, maxConnections, or maxConnectionsPerInstance.
*
* Not available if the backend's balancingMode is RATE. Internal TCP/UDP load balancing does not
* support setting maxConnectionsPerEndpoint even though its backends require a balancing mode of
* CONNECTION.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer maxConnectionsPerEndpoint;
/**
* Defines a maximum target for simultaneous connections for a single VM in a backend instance
* group. This is multiplied by the number of instances in the instance group to implicitly
* calculate a target maximum number of simultaneous connections for the whole instance group. If
* the backend's balancingMode is UTILIZATION, this is an optional parameter. If the backend's
* balancingMode is CONNECTION, and backend is attached to a backend service whose
* loadBalancingScheme is EXTERNAL, you must specify either this parameter, maxConnections, or
* maxConnectionsPerEndpoint.
*
* Not available if the backend's balancingMode is RATE. Internal TCP/UDP load balancing does not
* support setting maxConnectionsPerInstance even though its backends require a balancing mode of
* CONNECTION.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer maxConnectionsPerInstance;
/**
* The max requests per second (RPS) of the group. Can be used with either RATE or UTILIZATION
* balancing modes, but required if RATE mode. For RATE mode, either maxRate or maxRatePerInstance
* must be set.
*
* This cannot be used for internal load balancing.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Integer maxRate;
/**
* Defines a maximum target for requests per second (RPS) for an endpoint of a NEG. This is
* multiplied by the number of endpoints in the NEG to implicitly calculate a target maximum rate
* for the NEG.
*
* If the backend's balancingMode is RATE, you must specify either this parameter, maxRate, or
* maxRatePerInstance.
*
* Not available if the backend's balancingMode is CONNECTION.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float maxRatePerEndpoint;
/**
* Defines a maximum target for requests per second (RPS) for a single VM in a backend instance
* group. This is multiplied by the number of instances in the instance group to implicitly
* calculate a target maximum rate for the whole instance group.
*
* If the backend's balancingMode is UTILIZATION, this is an optional parameter. If the backend's
* balancingMode is RATE, you must specify either this parameter, maxRate, or maxRatePerEndpoint.
*
* Not available if the backend's balancingMode is CONNECTION.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float maxRatePerInstance;
/**
* Defines the maximum average CPU utilization of a backend VM in an instance group. The valid
* range is [0.0, 1.0]. This is an optional parameter if the backend's balancingMode is
* UTILIZATION.
*
* This parameter can be used in conjunction with maxRate, maxRatePerInstance, maxConnections, or
* maxConnectionsPerInstance.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.Float maxUtilization;
/**
* Specifies the balancing mode for the backend.
*
* When choosing a balancing mode, you need to consider the loadBalancingScheme, and protocol for
* the backend service, as well as the type of backend (instance group or NEG).
*
* - If the load balancing mode is CONNECTION, then the load is spread based on how many
* concurrent connections the backend can handle. You can use the CONNECTION balancing mode if the
* protocol for the backend service is SSL, TCP, or UDP.
*
* If the loadBalancingScheme for the backend service is EXTERNAL (SSL Proxy and TCP Proxy load
* balancers), you must also specify exactly one of the following parameters: maxConnections,
* maxConnectionsPerInstance, or maxConnectionsPerEndpoint.
*
* If the loadBalancingScheme for the backend service is INTERNAL (internal TCP/UDP load
* balancers), you cannot specify any additional parameters. - If the load balancing mode is
* RATE, the load is spread based on the rate of HTTP requests per second (RPS). You can use the
* RATE balancing mode if the protocol for the backend service is HTTP or HTTPS. You must specify
* exactly one of the following parameters: maxRate, maxRatePerInstance, or maxRatePerEndpoint.
* - If the load balancing mode is UTILIZATION, the load is spread based on the CPU utilization of
* instances in an instance group. You can use the UTILIZATION balancing mode if the
* loadBalancingScheme of the backend service is EXTERNAL, INTERNAL_SELF_MANAGED, or
* INTERNAL_MANAGED and the backends are instance groups. There are no restrictions on the backend
* service protocol.
* @return value or {@code null} for none
*/
public java.lang.String getBalancingMode() {
return balancingMode;
}
/**
* Specifies the balancing mode for the backend.
*
* When choosing a balancing mode, you need to consider the loadBalancingScheme, and protocol for
* the backend service, as well as the type of backend (instance group or NEG).
*
* - If the load balancing mode is CONNECTION, then the load is spread based on how many
* concurrent connections the backend can handle. You can use the CONNECTION balancing mode if the
* protocol for the backend service is SSL, TCP, or UDP.
*
* If the loadBalancingScheme for the backend service is EXTERNAL (SSL Proxy and TCP Proxy load
* balancers), you must also specify exactly one of the following parameters: maxConnections,
* maxConnectionsPerInstance, or maxConnectionsPerEndpoint.
*
* If the loadBalancingScheme for the backend service is INTERNAL (internal TCP/UDP load
* balancers), you cannot specify any additional parameters. - If the load balancing mode is
* RATE, the load is spread based on the rate of HTTP requests per second (RPS). You can use the
* RATE balancing mode if the protocol for the backend service is HTTP or HTTPS. You must specify
* exactly one of the following parameters: maxRate, maxRatePerInstance, or maxRatePerEndpoint.
* - If the load balancing mode is UTILIZATION, the load is spread based on the CPU utilization of
* instances in an instance group. You can use the UTILIZATION balancing mode if the
* loadBalancingScheme of the backend service is EXTERNAL, INTERNAL_SELF_MANAGED, or
* INTERNAL_MANAGED and the backends are instance groups. There are no restrictions on the backend
* service protocol.
* @param balancingMode balancingMode or {@code null} for none
*/
public Backend setBalancingMode(java.lang.String balancingMode) {
this.balancingMode = balancingMode;
return this;
}
/**
* A multiplier applied to the group's maximum servicing capacity (based on UTILIZATION, RATE or
* CONNECTION). Default value is 1, which means the group will serve up to 100% of its configured
* capacity (depending on balancingMode). A setting of 0 means the group is completely drained,
* offering 0% of its available Capacity. Valid range is [0.0,1.0].
*
* This cannot be used for internal load balancing.
* @return value or {@code null} for none
*/
public java.lang.Float getCapacityScaler() {
return capacityScaler;
}
/**
* A multiplier applied to the group's maximum servicing capacity (based on UTILIZATION, RATE or
* CONNECTION). Default value is 1, which means the group will serve up to 100% of its configured
* capacity (depending on balancingMode). A setting of 0 means the group is completely drained,
* offering 0% of its available Capacity. Valid range is [0.0,1.0].
*
* This cannot be used for internal load balancing.
* @param capacityScaler capacityScaler or {@code null} for none
*/
public Backend setCapacityScaler(java.lang.Float capacityScaler) {
this.capacityScaler = capacityScaler;
return this;
}
/**
* An optional description of this resource. Provide this property when you create the resource.
* @return value or {@code null} for none
*/
public java.lang.String getDescription() {
return description;
}
/**
* An optional description of this resource. Provide this property when you create the resource.
* @param description description or {@code null} for none
*/
public Backend setDescription(java.lang.String description) {
this.description = description;
return this;
}
/**
* This field designates whether this is a failover backend. More than one failover backend can be
* configured for a given BackendService.
* @return value or {@code null} for none
*/
public java.lang.Boolean getFailover() {
return failover;
}
/**
* This field designates whether this is a failover backend. More than one failover backend can be
* configured for a given BackendService.
* @param failover failover or {@code null} for none
*/
public Backend setFailover(java.lang.Boolean failover) {
this.failover = failover;
return this;
}
/**
* The fully-qualified URL of an instance group or network endpoint group (NEG) resource. The type
* of backend that a backend service supports depends on the backend service's
* loadBalancingScheme.
*
* - When the loadBalancingScheme for the backend service is EXTERNAL, INTERNAL_SELF_MANAGED, or
* INTERNAL_MANAGED, the backend can be either an instance group or a NEG. The backends on the
* backend service must be either all instance groups or all NEGs. You cannot mix instance group
* and NEG backends on the same backend service.
*
* - When the loadBalancingScheme for the backend service is INTERNAL, the backend must be an
* instance group in the same region as the backend service. NEGs are not supported.
*
* You must use the fully-qualified URL (starting with https://www.googleapis.com/) to specify the
* instance group or NEG. Partial URLs are not supported.
* @return value or {@code null} for none
*/
public java.lang.String getGroup() {
return group;
}
/**
* The fully-qualified URL of an instance group or network endpoint group (NEG) resource. The type
* of backend that a backend service supports depends on the backend service's
* loadBalancingScheme.
*
* - When the loadBalancingScheme for the backend service is EXTERNAL, INTERNAL_SELF_MANAGED, or
* INTERNAL_MANAGED, the backend can be either an instance group or a NEG. The backends on the
* backend service must be either all instance groups or all NEGs. You cannot mix instance group
* and NEG backends on the same backend service.
*
* - When the loadBalancingScheme for the backend service is INTERNAL, the backend must be an
* instance group in the same region as the backend service. NEGs are not supported.
*
* You must use the fully-qualified URL (starting with https://www.googleapis.com/) to specify the
* instance group or NEG. Partial URLs are not supported.
* @param group group or {@code null} for none
*/
public Backend setGroup(java.lang.String group) {
this.group = group;
return this;
}
/**
* Defines a maximum target for simultaneous connections for the entire backend (instance group or
* NEG). If the backend's balancingMode is UTILIZATION, this is an optional parameter. If the
* backend's balancingMode is CONNECTION, and backend is attached to a backend service whose
* loadBalancingScheme is EXTERNAL, you must specify either this parameter,
* maxConnectionsPerInstance, or maxConnectionsPerEndpoint.
*
* Not available if the backend's balancingMode is RATE. If the loadBalancingScheme is INTERNAL,
* then maxConnections is not supported, even though the backend requires a balancing mode of
* CONNECTION.
* @return value or {@code null} for none
*/
public java.lang.Integer getMaxConnections() {
return maxConnections;
}
/**
* Defines a maximum target for simultaneous connections for the entire backend (instance group or
* NEG). If the backend's balancingMode is UTILIZATION, this is an optional parameter. If the
* backend's balancingMode is CONNECTION, and backend is attached to a backend service whose
* loadBalancingScheme is EXTERNAL, you must specify either this parameter,
* maxConnectionsPerInstance, or maxConnectionsPerEndpoint.
*
* Not available if the backend's balancingMode is RATE. If the loadBalancingScheme is INTERNAL,
* then maxConnections is not supported, even though the backend requires a balancing mode of
* CONNECTION.
* @param maxConnections maxConnections or {@code null} for none
*/
public Backend setMaxConnections(java.lang.Integer maxConnections) {
this.maxConnections = maxConnections;
return this;
}
/**
* Defines a maximum target for simultaneous connections for an endpoint of a NEG. This is
* multiplied by the number of endpoints in the NEG to implicitly calculate a maximum number of
* target maximum simultaneous connections for the NEG. If the backend's balancingMode is
* CONNECTION, and the backend is attached to a backend service whose loadBalancingScheme is
* EXTERNAL, you must specify either this parameter, maxConnections, or maxConnectionsPerInstance.
*
* Not available if the backend's balancingMode is RATE. Internal TCP/UDP load balancing does not
* support setting maxConnectionsPerEndpoint even though its backends require a balancing mode of
* CONNECTION.
* @return value or {@code null} for none
*/
public java.lang.Integer getMaxConnectionsPerEndpoint() {
return maxConnectionsPerEndpoint;
}
/**
* Defines a maximum target for simultaneous connections for an endpoint of a NEG. This is
* multiplied by the number of endpoints in the NEG to implicitly calculate a maximum number of
* target maximum simultaneous connections for the NEG. If the backend's balancingMode is
* CONNECTION, and the backend is attached to a backend service whose loadBalancingScheme is
* EXTERNAL, you must specify either this parameter, maxConnections, or maxConnectionsPerInstance.
*
* Not available if the backend's balancingMode is RATE. Internal TCP/UDP load balancing does not
* support setting maxConnectionsPerEndpoint even though its backends require a balancing mode of
* CONNECTION.
* @param maxConnectionsPerEndpoint maxConnectionsPerEndpoint or {@code null} for none
*/
public Backend setMaxConnectionsPerEndpoint(java.lang.Integer maxConnectionsPerEndpoint) {
this.maxConnectionsPerEndpoint = maxConnectionsPerEndpoint;
return this;
}
/**
* Defines a maximum target for simultaneous connections for a single VM in a backend instance
* group. This is multiplied by the number of instances in the instance group to implicitly
* calculate a target maximum number of simultaneous connections for the whole instance group. If
* the backend's balancingMode is UTILIZATION, this is an optional parameter. If the backend's
* balancingMode is CONNECTION, and backend is attached to a backend service whose
* loadBalancingScheme is EXTERNAL, you must specify either this parameter, maxConnections, or
* maxConnectionsPerEndpoint.
*
* Not available if the backend's balancingMode is RATE. Internal TCP/UDP load balancing does not
* support setting maxConnectionsPerInstance even though its backends require a balancing mode of
* CONNECTION.
* @return value or {@code null} for none
*/
public java.lang.Integer getMaxConnectionsPerInstance() {
return maxConnectionsPerInstance;
}
/**
* Defines a maximum target for simultaneous connections for a single VM in a backend instance
* group. This is multiplied by the number of instances in the instance group to implicitly
* calculate a target maximum number of simultaneous connections for the whole instance group. If
* the backend's balancingMode is UTILIZATION, this is an optional parameter. If the backend's
* balancingMode is CONNECTION, and backend is attached to a backend service whose
* loadBalancingScheme is EXTERNAL, you must specify either this parameter, maxConnections, or
* maxConnectionsPerEndpoint.
*
* Not available if the backend's balancingMode is RATE. Internal TCP/UDP load balancing does not
* support setting maxConnectionsPerInstance even though its backends require a balancing mode of
* CONNECTION.
* @param maxConnectionsPerInstance maxConnectionsPerInstance or {@code null} for none
*/
public Backend setMaxConnectionsPerInstance(java.lang.Integer maxConnectionsPerInstance) {
this.maxConnectionsPerInstance = maxConnectionsPerInstance;
return this;
}
/**
* The max requests per second (RPS) of the group. Can be used with either RATE or UTILIZATION
* balancing modes, but required if RATE mode. For RATE mode, either maxRate or maxRatePerInstance
* must be set.
*
* This cannot be used for internal load balancing.
* @return value or {@code null} for none
*/
public java.lang.Integer getMaxRate() {
return maxRate;
}
/**
* The max requests per second (RPS) of the group. Can be used with either RATE or UTILIZATION
* balancing modes, but required if RATE mode. For RATE mode, either maxRate or maxRatePerInstance
* must be set.
*
* This cannot be used for internal load balancing.
* @param maxRate maxRate or {@code null} for none
*/
public Backend setMaxRate(java.lang.Integer maxRate) {
this.maxRate = maxRate;
return this;
}
/**
* Defines a maximum target for requests per second (RPS) for an endpoint of a NEG. This is
* multiplied by the number of endpoints in the NEG to implicitly calculate a target maximum rate
* for the NEG.
*
* If the backend's balancingMode is RATE, you must specify either this parameter, maxRate, or
* maxRatePerInstance.
*
* Not available if the backend's balancingMode is CONNECTION.
* @return value or {@code null} for none
*/
public java.lang.Float getMaxRatePerEndpoint() {
return maxRatePerEndpoint;
}
/**
* Defines a maximum target for requests per second (RPS) for an endpoint of a NEG. This is
* multiplied by the number of endpoints in the NEG to implicitly calculate a target maximum rate
* for the NEG.
*
* If the backend's balancingMode is RATE, you must specify either this parameter, maxRate, or
* maxRatePerInstance.
*
* Not available if the backend's balancingMode is CONNECTION.
* @param maxRatePerEndpoint maxRatePerEndpoint or {@code null} for none
*/
public Backend setMaxRatePerEndpoint(java.lang.Float maxRatePerEndpoint) {
this.maxRatePerEndpoint = maxRatePerEndpoint;
return this;
}
/**
* Defines a maximum target for requests per second (RPS) for a single VM in a backend instance
* group. This is multiplied by the number of instances in the instance group to implicitly
* calculate a target maximum rate for the whole instance group.
*
* If the backend's balancingMode is UTILIZATION, this is an optional parameter. If the backend's
* balancingMode is RATE, you must specify either this parameter, maxRate, or maxRatePerEndpoint.
*
* Not available if the backend's balancingMode is CONNECTION.
* @return value or {@code null} for none
*/
public java.lang.Float getMaxRatePerInstance() {
return maxRatePerInstance;
}
/**
* Defines a maximum target for requests per second (RPS) for a single VM in a backend instance
* group. This is multiplied by the number of instances in the instance group to implicitly
* calculate a target maximum rate for the whole instance group.
*
* If the backend's balancingMode is UTILIZATION, this is an optional parameter. If the backend's
* balancingMode is RATE, you must specify either this parameter, maxRate, or maxRatePerEndpoint.
*
* Not available if the backend's balancingMode is CONNECTION.
* @param maxRatePerInstance maxRatePerInstance or {@code null} for none
*/
public Backend setMaxRatePerInstance(java.lang.Float maxRatePerInstance) {
this.maxRatePerInstance = maxRatePerInstance;
return this;
}
/**
* Defines the maximum average CPU utilization of a backend VM in an instance group. The valid
* range is [0.0, 1.0]. This is an optional parameter if the backend's balancingMode is
* UTILIZATION.
*
* This parameter can be used in conjunction with maxRate, maxRatePerInstance, maxConnections, or
* maxConnectionsPerInstance.
* @return value or {@code null} for none
*/
public java.lang.Float getMaxUtilization() {
return maxUtilization;
}
/**
* Defines the maximum average CPU utilization of a backend VM in an instance group. The valid
* range is [0.0, 1.0]. This is an optional parameter if the backend's balancingMode is
* UTILIZATION.
*
* This parameter can be used in conjunction with maxRate, maxRatePerInstance, maxConnections, or
* maxConnectionsPerInstance.
* @param maxUtilization maxUtilization or {@code null} for none
*/
public Backend setMaxUtilization(java.lang.Float maxUtilization) {
this.maxUtilization = maxUtilization;
return this;
}
@Override
public Backend set(String fieldName, Object value) {
return (Backend) super.set(fieldName, value);
}
@Override
public Backend clone() {
return (Backend) super.clone();
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import static org.hamcrest.MatcherAssert.assertThat;
import com.facebook.buck.cli.BuildTargetNodeToBuildRuleTransformer;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.FakeBuildRule;
import com.facebook.buck.rules.FakeSourcePath;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.TargetGraph;
import com.facebook.buck.rules.args.StringArg;
import com.google.common.base.Predicates;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.collect.ImmutableSortedSet;
import org.hamcrest.Matchers;
import org.junit.Test;
public class NativeLinkablesTest {
private static class FakeNativeLinkable extends FakeBuildRule implements NativeLinkable {
private final Iterable<NativeLinkable> deps;
private final Iterable<NativeLinkable> exportedDeps;
private final NativeLinkable.Linkage preferredLinkage;
private final NativeLinkableInput nativeLinkableInput;
private final ImmutableMap<String, SourcePath> sharedLibraries;
public FakeNativeLinkable(
String target,
Iterable<? extends NativeLinkable> deps,
Iterable<? extends NativeLinkable> exportedDeps,
NativeLinkable.Linkage preferredLinkage,
NativeLinkableInput nativeLinkableInput,
ImmutableMap<String, SourcePath> sharedLibraries,
BuildRule... ruleDeps) {
super(
BuildTargetFactory.newInstance(target),
new SourcePathResolver(
new BuildRuleResolver(
TargetGraph.EMPTY,
new BuildTargetNodeToBuildRuleTransformer())),
ImmutableSortedSet.copyOf(ruleDeps));
this.deps = ImmutableList.copyOf(deps);
this.exportedDeps = ImmutableList.copyOf(exportedDeps);
this.preferredLinkage = preferredLinkage;
this.nativeLinkableInput = nativeLinkableInput;
this.sharedLibraries = sharedLibraries;
}
@Override
public Iterable<NativeLinkable> getNativeLinkableDeps(CxxPlatform cxxPlatform) {
return deps;
}
@Override
public Iterable<NativeLinkable> getNativeLinkableExportedDeps(CxxPlatform cxxPlatform) {
return exportedDeps;
}
@Override
public NativeLinkableInput getNativeLinkableInput(
CxxPlatform cxxPlatform,
Linker.LinkableDepType type) {
return nativeLinkableInput;
}
@Override
public NativeLinkable.Linkage getPreferredLinkage(CxxPlatform cxxPlatform) {
return preferredLinkage;
}
@Override
public ImmutableMap<String, SourcePath> getSharedLibraries(
CxxPlatform cxxPlatform) {
return sharedLibraries;
}
}
@Test
public void regularDepsUsingSharedLinkageAreNotTransitive() {
FakeNativeLinkable b =
new FakeNativeLinkable(
"//:b",
ImmutableList.<NativeLinkable>of(),
ImmutableList.<NativeLinkable>of(),
NativeLinkable.Linkage.ANY,
NativeLinkableInput.builder()
.addAllArgs(StringArg.from("b"))
.build(),
ImmutableMap.<String, SourcePath>of());
FakeNativeLinkable a =
new FakeNativeLinkable(
"//:a",
ImmutableList.of(b),
ImmutableList.<NativeLinkable>of(),
NativeLinkable.Linkage.ANY,
NativeLinkableInput.builder()
.addAllArgs(StringArg.from("a"))
.build(),
ImmutableMap.<String, SourcePath>of());
assertThat(
NativeLinkables.getNativeLinkables(
CxxPlatformUtils.DEFAULT_PLATFORM,
ImmutableList.of(a),
Linker.LinkableDepType.SHARED).keySet(),
Matchers.not(Matchers.hasItem(b.getBuildTarget())));
}
@Test
public void exportedDepsUsingSharedLinkageAreTransitive() {
FakeNativeLinkable b =
new FakeNativeLinkable(
"//:b",
ImmutableList.<NativeLinkable>of(),
ImmutableList.<NativeLinkable>of(),
NativeLinkable.Linkage.ANY,
NativeLinkableInput.builder()
.addAllArgs(StringArg.from("b"))
.build(),
ImmutableMap.<String, SourcePath>of());
FakeNativeLinkable a =
new FakeNativeLinkable(
"//:a",
ImmutableList.<NativeLinkable>of(),
ImmutableList.of(b),
NativeLinkable.Linkage.ANY,
NativeLinkableInput.builder()
.addAllArgs(StringArg.from("a"))
.build(),
ImmutableMap.<String, SourcePath>of());
assertThat(
NativeLinkables.getNativeLinkables(
CxxPlatformUtils.DEFAULT_PLATFORM,
ImmutableList.of(a),
Linker.LinkableDepType.SHARED).keySet(),
Matchers.hasItem(b.getBuildTarget()));
}
@Test
public void regularDepsFromStaticLibsUsingSharedLinkageAreTransitive() {
FakeNativeLinkable b =
new FakeNativeLinkable(
"//:b",
ImmutableList.<NativeLinkable>of(),
ImmutableList.<NativeLinkable>of(),
NativeLinkable.Linkage.ANY,
NativeLinkableInput.builder()
.addAllArgs(StringArg.from("b"))
.build(),
ImmutableMap.<String, SourcePath>of());
FakeNativeLinkable a =
new FakeNativeLinkable(
"//:a",
ImmutableList.of(b),
ImmutableList.<NativeLinkable>of(),
NativeLinkable.Linkage.STATIC,
NativeLinkableInput.builder()
.addAllArgs(StringArg.from("a"))
.build(),
ImmutableMap.<String, SourcePath>of());
assertThat(
NativeLinkables.getNativeLinkables(
CxxPlatformUtils.DEFAULT_PLATFORM,
ImmutableList.of(a),
Linker.LinkableDepType.SHARED).keySet(),
Matchers.hasItem(b.getBuildTarget()));
}
@Test
public void regularDepsUsingStaticLinkageAreTransitive() {
FakeNativeLinkable b =
new FakeNativeLinkable(
"//:b",
ImmutableList.<NativeLinkable>of(),
ImmutableList.<NativeLinkable>of(),
NativeLinkable.Linkage.ANY,
NativeLinkableInput.builder()
.addAllArgs(StringArg.from("b"))
.build(),
ImmutableMap.<String, SourcePath>of());
FakeNativeLinkable a =
new FakeNativeLinkable(
"//:a",
ImmutableList.of(b),
ImmutableList.<NativeLinkable>of(),
NativeLinkable.Linkage.ANY,
NativeLinkableInput.builder()
.addAllArgs(StringArg.from("a"))
.build(),
ImmutableMap.<String, SourcePath>of());
assertThat(
NativeLinkables.getNativeLinkables(
CxxPlatformUtils.DEFAULT_PLATFORM,
ImmutableList.of(a),
Linker.LinkableDepType.STATIC).keySet(),
Matchers.hasItem(b.getBuildTarget()));
}
@Test
public void gatherTransitiveSharedLibraries() throws Exception {
FakeNativeLinkable c =
new FakeNativeLinkable(
"//:c",
ImmutableList.<NativeLinkable>of(),
ImmutableList.<NativeLinkable>of(),
NativeLinkable.Linkage.ANY,
NativeLinkableInput.builder().build(),
ImmutableMap.<String, SourcePath>of("libc.so", new FakeSourcePath("libc.so")));
FakeNativeLinkable b =
new FakeNativeLinkable(
"//:b",
ImmutableList.of(c),
ImmutableList.<NativeLinkable>of(),
NativeLinkable.Linkage.STATIC,
NativeLinkableInput.builder().build(),
ImmutableMap.<String, SourcePath>of("libb.so", new FakeSourcePath("libb.so")));
FakeNativeLinkable a =
new FakeNativeLinkable(
"//:a",
ImmutableList.of(b),
ImmutableList.<NativeLinkable>of(),
NativeLinkable.Linkage.ANY,
NativeLinkableInput.builder().build(),
ImmutableMap.<String, SourcePath>of("liba.so", new FakeSourcePath("liba.so")));
ImmutableSortedMap<String, SourcePath> sharedLibs =
NativeLinkables.getTransitiveSharedLibraries(
CxxPlatformUtils.DEFAULT_PLATFORM,
ImmutableList.of(a),
Predicates.instanceOf(NativeLinkable.class));
assertThat(
sharedLibs,
Matchers.equalTo(
ImmutableSortedMap.<String, SourcePath>of(
"liba.so", new FakeSourcePath("liba.so"),
"libc.so", new FakeSourcePath("libc.so"))));
}
@Test
public void nonNativeLinkableDepsAreIgnored() {
BuildRuleResolver resolver =
new BuildRuleResolver(TargetGraph.EMPTY, new BuildTargetNodeToBuildRuleTransformer());
SourcePathResolver pathResolver = new SourcePathResolver(resolver);
FakeNativeLinkable c =
new FakeNativeLinkable(
"//:c",
ImmutableList.<NativeLinkable>of(),
ImmutableList.<NativeLinkable>of(),
NativeLinkable.Linkage.ANY,
NativeLinkableInput.builder()
.addAllArgs(StringArg.from("c"))
.build(),
ImmutableMap.<String, SourcePath>of());
FakeBuildRule b =
new FakeBuildRule(
"//:b",
pathResolver,
c);
FakeNativeLinkable a =
new FakeNativeLinkable(
"//:a",
ImmutableList.<NativeLinkable>of(),
ImmutableList.<NativeLinkable>of(),
NativeLinkable.Linkage.ANY,
NativeLinkableInput.builder()
.addAllArgs(StringArg.from("a"))
.build(),
ImmutableMap.<String, SourcePath>of(),
b);
assertThat(a.getDeps(), Matchers.hasItem(b));
assertThat(
NativeLinkables.getNativeLinkables(
CxxPlatformUtils.DEFAULT_PLATFORM,
ImmutableList.of(a),
Linker.LinkableDepType.STATIC).keySet(),
Matchers.not(Matchers.hasItem(c.getBuildTarget())));
}
@Test
public void getLinkStyle() {
assertThat(
NativeLinkables.getLinkStyle(
NativeLinkable.Linkage.STATIC,
Linker.LinkableDepType.SHARED),
Matchers.equalTo(Linker.LinkableDepType.STATIC_PIC));
assertThat(
NativeLinkables.getLinkStyle(
NativeLinkable.Linkage.SHARED,
Linker.LinkableDepType.STATIC),
Matchers.equalTo(Linker.LinkableDepType.SHARED));
assertThat(
NativeLinkables.getLinkStyle(
NativeLinkable.Linkage.ANY,
Linker.LinkableDepType.STATIC),
Matchers.equalTo(Linker.LinkableDepType.STATIC));
}
}
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vJAXB 2.1.10 in JDK 6
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2012.06.29 at 10:15:17 AM BST
//
package org.w3._1999.xhtml;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlID;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
/**
* <p>Java class for anonymous complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element ref="{http://www.w3.org/1999/xhtml}tr" maxOccurs="unbounded"/>
* </sequence>
* <attGroup ref="{http://www.w3.org/1999/xhtml}cellhalign"/>
* <attGroup ref="{http://www.w3.org/1999/xhtml}attrs"/>
* <attGroup ref="{http://www.w3.org/1999/xhtml}cellvalign"/>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"tr"
})
@XmlRootElement(name = "thead")
public class Thead {
@XmlElement(required = true)
protected List<Tr> tr;
@XmlAttribute
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String align;
@XmlAttribute(name = "char")
protected String _char;
@XmlAttribute
protected String charoff;
@XmlAttribute
protected String onclick;
@XmlAttribute
protected String ondblclick;
@XmlAttribute
protected String onmousedown;
@XmlAttribute
protected String onmouseup;
@XmlAttribute
protected String onmouseover;
@XmlAttribute
protected String onmousemove;
@XmlAttribute
protected String onmouseout;
@XmlAttribute
protected String onkeypress;
@XmlAttribute
protected String onkeydown;
@XmlAttribute
protected String onkeyup;
@XmlAttribute(name = "lang")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String langCode;
@XmlAttribute(namespace = "http://www.w3.org/XML/1998/namespace")
protected String lang;
@XmlAttribute
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String dir;
@XmlAttribute
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlID
@XmlSchemaType(name = "ID")
protected String id;
@XmlAttribute(name = "class")
@XmlSchemaType(name = "NMTOKENS")
protected List<String> clazz;
@XmlAttribute
protected String style;
@XmlAttribute
protected String title;
@XmlAttribute
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String valign;
/**
* Gets the value of the tr property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the tr property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getTr().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link Tr }
*
*
*/
public List<Tr> getTr() {
if (tr == null) {
tr = new ArrayList<Tr>();
}
return this.tr;
}
/**
* Gets the value of the align property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getAlign() {
return align;
}
/**
* Sets the value of the align property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setAlign(String value) {
this.align = value;
}
/**
* Gets the value of the char property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getChar() {
return _char;
}
/**
* Sets the value of the char property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setChar(String value) {
this._char = value;
}
/**
* Gets the value of the charoff property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getCharoff() {
return charoff;
}
/**
* Sets the value of the charoff property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setCharoff(String value) {
this.charoff = value;
}
/**
* Gets the value of the onclick property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getOnclick() {
return onclick;
}
/**
* Sets the value of the onclick property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setOnclick(String value) {
this.onclick = value;
}
/**
* Gets the value of the ondblclick property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getOndblclick() {
return ondblclick;
}
/**
* Sets the value of the ondblclick property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setOndblclick(String value) {
this.ondblclick = value;
}
/**
* Gets the value of the onmousedown property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getOnmousedown() {
return onmousedown;
}
/**
* Sets the value of the onmousedown property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setOnmousedown(String value) {
this.onmousedown = value;
}
/**
* Gets the value of the onmouseup property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getOnmouseup() {
return onmouseup;
}
/**
* Sets the value of the onmouseup property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setOnmouseup(String value) {
this.onmouseup = value;
}
/**
* Gets the value of the onmouseover property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getOnmouseover() {
return onmouseover;
}
/**
* Sets the value of the onmouseover property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setOnmouseover(String value) {
this.onmouseover = value;
}
/**
* Gets the value of the onmousemove property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getOnmousemove() {
return onmousemove;
}
/**
* Sets the value of the onmousemove property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setOnmousemove(String value) {
this.onmousemove = value;
}
/**
* Gets the value of the onmouseout property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getOnmouseout() {
return onmouseout;
}
/**
* Sets the value of the onmouseout property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setOnmouseout(String value) {
this.onmouseout = value;
}
/**
* Gets the value of the onkeypress property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getOnkeypress() {
return onkeypress;
}
/**
* Sets the value of the onkeypress property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setOnkeypress(String value) {
this.onkeypress = value;
}
/**
* Gets the value of the onkeydown property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getOnkeydown() {
return onkeydown;
}
/**
* Sets the value of the onkeydown property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setOnkeydown(String value) {
this.onkeydown = value;
}
/**
* Gets the value of the onkeyup property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getOnkeyup() {
return onkeyup;
}
/**
* Sets the value of the onkeyup property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setOnkeyup(String value) {
this.onkeyup = value;
}
/**
* Gets the value of the langCode property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getLangCode() {
return langCode;
}
/**
* Sets the value of the langCode property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setLangCode(String value) {
this.langCode = value;
}
/**
* Gets the value of the lang property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getLang() {
return lang;
}
/**
* Sets the value of the lang property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setLang(String value) {
this.lang = value;
}
/**
* Gets the value of the dir property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDir() {
return dir;
}
/**
* Sets the value of the dir property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDir(String value) {
this.dir = value;
}
/**
* Gets the value of the id property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setId(String value) {
this.id = value;
}
/**
* Gets the value of the clazz property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the clazz property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getClazz().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getClazz() {
if (clazz == null) {
clazz = new ArrayList<String>();
}
return this.clazz;
}
/**
* Gets the value of the style property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getStyle() {
return style;
}
/**
* Sets the value of the style property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setStyle(String value) {
this.style = value;
}
/**
* Gets the value of the title property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getTitle() {
return title;
}
/**
* Sets the value of the title property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setTitle(String value) {
this.title = value;
}
/**
* Gets the value of the valign property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getValign() {
return valign;
}
/**
* Sets the value of the valign property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setValign(String value) {
this.valign = value;
}
}
| |
/*
* Schematica (http://www.schematica.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.schematica.json.impl;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.AbstractList;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import javax.json.JsonArrayBuilder;
import javax.json.JsonNumber;
import javax.json.JsonObjectBuilder;
import javax.json.JsonString;
import javax.json.JsonValue;
import org.schematica.json.EditableJsonArray;
import org.schematica.json.Json;
import org.schematica.json.JsonArray;
import org.schematica.json.JsonObject;
import org.schematica.json.impl.util.JsonValueConverter;
/**
* @author Horia Chiorean (hchiorea@redhat.com)
* @NotThreadSafe
*/
public class SchematicaEditableArray extends AbstractList<JsonValue> implements EditableJsonArray {
private final JsonArray array;
private final int arraySize;
private final List<Object> changes;
protected SchematicaEditableArray( JsonArray array ) {
this.array = array;
this.arraySize = array.size();
this.changes = new ArrayList<>();
}
@Override
public EditableJsonArray add( String value ) {
changes.add(value);
return this;
}
@Override
public EditableJsonArray add( BigDecimal value ) {
changes.add(value);
return this;
}
@Override
public EditableJsonArray add( BigInteger value ) {
changes.add(value);
return this;
}
@Override
public EditableJsonArray add( int value ) {
changes.add(value);
return this;
}
@Override
public EditableJsonArray add( long value ) {
changes.add(value);
return this;
}
@Override
public EditableJsonArray add( double value ) {
changes.add(value);
return this;
}
@Override
public EditableJsonArray add( boolean value ) {
changes.add(value);
return this;
}
@Override
public EditableJsonArray addNull() {
changes.add(NULL);
return this;
}
@Override
public EditableJsonArray add( JsonObjectBuilder builder ) {
changes.add(builder.build());
return this;
}
@Override
public EditableJsonArray add( JsonArrayBuilder builder ) {
changes.add(builder.build());
return this;
}
@Override
public EditableJsonArray add( Date value ) {
changes.add(value);
return this;
}
@Override
public EditableJsonArray add( byte[] value ) {
changes.add(value);
return this;
}
@Override
public JsonObject getJsonObject( int index ) {
return index < arraySize ? array.getJsonObject(index) : (JsonObject)changes.get(index);
}
@Override
public JsonArray getJsonArray( int index ) {
return index < arraySize ? array.getJsonArray(index) : (JsonArray)changes.get(index);
}
@Override
public long getLong( int index ) {
return index < arraySize ? array.getLong(index) : (long)changes.get(index);
}
@Override
public long getLong( int index, long defaultValue ) {
if (index < arraySize) {
return array.getLong(index, defaultValue);
}
return index < changes.size() ? (long)changes.get(index) : defaultValue;
}
@Override
public double getDouble( int index ) {
return index < arraySize ? array.getDouble(index) : (double)changes.get(index);
}
@Override
public double getDouble( int index, double defaultValue ) {
if (index < arraySize) {
return array.getDouble(index, defaultValue);
}
return index < changes.size() ? (double)changes.get(index) : defaultValue;
}
@Override
public BigInteger getBigInteger( int index ) {
return index < arraySize ? array.getBigInteger(index) : (BigInteger)changes.get(index);
}
@Override
public BigInteger getBigInteger( int index, BigInteger defaultValue ) {
if (index < arraySize) {
return array.getBigInteger(index, defaultValue);
}
return index < changes.size() ? (BigInteger)changes.get(index) : defaultValue;
}
@Override
public BigDecimal getBigDecimal( int index ) {
return index < arraySize ? array.getBigDecimal(index) : (BigDecimal)changes.get(index);
}
@Override
public BigDecimal getBigDecimal( int index, BigDecimal defaultValue ) {
if (index < arraySize) {
return array.getBigDecimal(index, defaultValue);
}
return index < changes.size() ? (BigDecimal)changes.get(index) : defaultValue;
}
@Override
public Date getDate( int index ) {
return index < arraySize ? array.getDate(index) : (Date)changes.get(index);
}
@Override
public Date getDate( int index, Date defaultValue ) {
if (index < arraySize) {
return array.getDate(index, defaultValue);
}
return index < changes.size() ? (Date)changes.get(index) : defaultValue;
}
@Override
public byte[] getBinary( int index ) {
return index < arraySize ? array.getBinary(index) : (byte[])changes.get(index);
}
@Override
public EditableJsonArray edit() {
return this;
}
@Override
public JsonArray merge( javax.json.JsonArray other ) {
return array.merge(changesToJson()).merge(other);
}
protected JsonArray changesToJson() {
org.schematica.json.JsonArrayBuilder localChanges = Json.createArrayBuilder();
for (Object value : changes) {
localChanges.add(JsonValueConverter.jsonValueFrom(value));
}
return localChanges.build();
}
@Override
public JsonNumber getJsonNumber( int index ) {
return index < arraySize ? array.getJsonNumber(index) : (JsonNumber)changes.get(index);
}
@Override
public JsonString getJsonString( int index ) {
return index < arraySize ? array.getJsonString(index) : (JsonString)changes.get(index);
}
@Override
public <T extends JsonValue> List<T> getValuesAs( Class<T> clazz ) {
List<T> values = new ArrayList<>(arraySize + changes.size());
values.addAll(array.getValuesAs(clazz));
for (Object value : values) {
values.add(clazz.cast(value));
}
return Collections.unmodifiableList(values);
}
@Override
public String getString( int index ) {
return index < arraySize ? array.getString(index) : (String)changes.get(index);
}
@Override
public String getString( int index, String defaultValue ) {
if (index < arraySize) {
return array.getString(index, defaultValue);
}
return index < changes.size() ? (String)changes.get(index) : defaultValue;
}
@Override
public int getInt( int index ) {
return index < arraySize ? array.getInt(index) : (int)changes.get(index);
}
@Override
public int getInt( int index, int defaultValue ) {
if (index < arraySize) {
return array.getInt(index, defaultValue);
}
return index < changes.size() ? (int)changes.get(index) : defaultValue;
}
@Override
public boolean getBoolean( int index ) {
return index < arraySize ? array.getBoolean(index) : (boolean)changes.get(index);
}
@Override
public boolean getBoolean( int index, boolean defaultValue ) {
if (index < arraySize) {
return array.getBoolean(index, defaultValue);
}
return index < changes.size() ? (boolean)changes.get(index) : defaultValue;
}
@Override
public boolean isNull( int index ) {
return index < arraySize ? array.isNull(index) : changes.get(index) == NULL;
}
@Override
public ValueType getValueType() {
return ValueType.ARRAY;
}
@Override
public int size() {
return arraySize + changes.size();
}
@Override
public JsonValue get( int index ) {
return index < arraySize ? array.get(index) : JsonValueConverter.jsonValueFrom(changes.get(index - arraySize));
}
@Override
public JsonArray unwrap() {
return array.merge(this);
}
protected JsonArray getArray() {
return array;
}
@Override
public boolean equals( Object o ) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
SchematicaEditableArray that = (SchematicaEditableArray)o;
if (!array.equals(that.array)) {
return false;
}
if (!changes.equals(that.changes)) {
return false;
}
return true;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + array.hashCode();
result = 31 * result + changes.hashCode();
return result;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("SchematicaEditableArray{");
sb.append("array=").append(array);
sb.append(", changes=").append(changes);
sb.append('}');
return sb.toString();
}
}
| |
package org.hisp.dhis.dataelement;
/*
* Copyright (c) 2004-2015, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import org.hisp.dhis.analytics.AggregationType;
import org.hisp.dhis.common.ListMap;
import org.hisp.dhis.common.ValueType;
import org.hisp.dhis.dataset.DataSet;
import org.hisp.dhis.hierarchy.HierarchyViolationException;
import org.hisp.dhis.period.PeriodType;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Defines service functionality for DataElements and DataElementGroups.
*
* @author Kristian Nordal
* @version $Id: DataElementService.java 6289 2008-11-14 17:53:24Z larshelg $
*/
public interface DataElementService
{
String ID = DataElementService.class.getName();
// -------------------------------------------------------------------------
// DataElement
// -------------------------------------------------------------------------
/**
* Adds a DataElement.
*
* @param dataElement the DataElement to add.
* @return a generated unique id of the added DataElement.
*/
int addDataElement( DataElement dataElement );
/**
* Updates a DataElement.
*
* @param dataElement the DataElement to update.
*/
void updateDataElement( DataElement dataElement );
/**
* Deletes a DataElement. The DataElement is also removed from any
* DataElementGroups it is a member of. It is not possible to delete a
* DataElement with children.
*
* @param dataElement the DataElement to delete.
* @throws HierarchyViolationException if the DataElement has children.
*/
void deleteDataElement( DataElement dataElement );
/**
* Returns a DataElement.
*
* @param id the id of the DataElement to return.
* @return the DataElement with the given id, or null if no match.
*/
DataElement getDataElement( int id );
/**
* Returns the DataElement with the given UID.
*
* @param uid the UID.
* @return the DataElement with the given UID, or null if no match.
*/
DataElement getDataElement( String uid );
/**
* Returns the DataElement with the given code.
*
* @param code the code.
* @return the DataElement with the given code, or null if no match.
*/
DataElement getDataElementByCode( String code );
/**
* Returns a DataElement with a given name.
*
* @param name the name of the DataElement to return.
* @return the DataElement with the given name, or null if no match.
*/
DataElement getDataElementByName( String name );
/**
* Returns List of DataElements with a given key.
*
* @param key the name of the DataElement to return.
* @return List of DataElements with a given key, or all dataelements if no
* match.
*/
List<DataElement> searchDataElementsByName( String key );
/**
* Returns a DataElement with a given short name.
*
* @param shortName the short name of the DataElement to return.
* @return the DataElement with the given short name, or null if no match.
*/
DataElement getDataElementByShortName( String shortName );
/**
* Returns all DataElements.
*
* @return a list of all DataElements, or an empty list if there
* are no DataElements.
*/
List<DataElement> getAllDataElements();
/**
* Returns all DataElements with corresponding identifiers. Returns all
* DataElements if the given argument is null.
*
* @param uids the collection of uids.
* @return a list of DataElements.
*/
List<DataElement> getDataElementsByUid( Collection<String> uids );
/**
* Returns all DataElements with types that are possible to aggregate. The
* types are currently INT and BOOL.
*
* @return all DataElements with types that are possible to aggregate.
*/
List<DataElement> getAggregateableDataElements();
/**
* Returns all DataElements with a given aggregation operator.
*
* @param aggregationType the aggregation type of the DataElements
* to return.
* @return a list of all DataElements with the given aggregation
* operator, or an empty collection if no DataElements have the
* aggregation operator.
*/
List<DataElement> getDataElementsByAggregationType( AggregationType aggregationType );
/**
* Returns all DataElements with the given domain type.
*
* @param domainType the DataElementDomainType.
* @return all DataElements with the given domainType.
*/
List<DataElement> getDataElementsByDomainType( DataElementDomain domainType );
/**
* Returns all DataElements with the given domain type.
*
* @param domainType the DataElementDomainType.
* @return all DataElements with the given domainType.
*/
List<DataElement> getDataElementsByDomainType( DataElementDomain domainType, int first, int max );
/**
* Returns all DataElements with the given value types.
*
* @param valueTypes The value types.
* @return all DataElements with the given value types.
*/
List<DataElement> getDataElementsByValueTypes( Collection<ValueType> valueTypes );
/**
* Returns all DataElements with the given type.
*
* @param valueType The value type.
* @return all DataElements with the given value type.
*/
List<DataElement> getDataElementsByValueType( ValueType valueType );
/**
* Returns the DataElements with the given PeriodType.
*
* @param periodType the PeriodType.
* @return a list of DataElements.
*/
List<DataElement> getDataElementsByPeriodType( PeriodType periodType );
/**
* Returns all DataElements with the given category combo.
*
* @param categoryCombo the DataElementCategoryCombo.
* @return all DataElements with the given category combo.
*/
List<DataElement> getDataElementByCategoryCombo( DataElementCategoryCombo categoryCombo );
/**
* Returns a Map with DataElementCategoryCombo as key and a Collection of
* the DataElements belonging to the DataElementCategoryCombo from the given
* argument List of DataElements as value.
*
* @param dataElements the DataElements to include.
* @return grouped DataElements based on their DataElementCategoryCombo.
*/
Map<DataElementCategoryCombo, List<DataElement>> getGroupedDataElementsByCategoryCombo(
List<DataElement> dataElements );
/**
* Returns the DataElementCategoryCombos associated with the given argument
* list of DataElements.
*
* @param dataElements the DataElements.
* @return a list of DataElements.
*/
List<DataElementCategoryCombo> getDataElementCategoryCombos( List<DataElement> dataElements );
/**
* Returns all DataElements which are associated with one or more
* DataElementGroupSets.
*
* @return all DataElements which are associated with one or more
* DataElementGroupSets.
*/
List<DataElement> getDataElementsWithGroupSets();
/**
* Returns all DataElements which are not member of any DataElementGroups.
*
* @return all DataElements which are not member of any DataElementGroups.
*/
List<DataElement> getDataElementsWithoutGroups();
/**
* Returns all DataElements which are not assigned to any DataSets.
*
* @return all DataElements which are not assigned to any DataSets.
*/
List<DataElement> getDataElementsWithoutDataSets();
/**
* Returns all DataElements which are assigned to at least one DataSet.
*
* @return all DataElements which are assigned to at least one DataSet.
*/
List<DataElement> getDataElementsWithDataSets();
/**
* Returns all DataElements which are assigned to any of the given DataSets.
*
* @param dataSets the collection of DataSets.
* @return all DataElements which are assigned to any of the given DataSets.
*/
List<DataElement> getDataElementsByDataSets( Collection<DataSet> dataSets );
/**
* Returns all DataElements which have the given aggregation level assigned.
*
* @param aggregationLevel the aggregation level.
* @return all DataElements which have the given aggregation level assigned.
*/
List<DataElement> getDataElementsByAggregationLevel( int aggregationLevel );
List<DataElement> getDataElementsLikeName( String name );
List<DataElement> getDataElementsBetween( int first, int max );
List<DataElement> getDataElementsBetweenByName( String name, int first, int max );
int getDataElementCount();
int getDataElementCountByName( String name );
int getDataElementCountByDomainType( DataElementDomain domainType );
/**
* Returns a mapping of data element uid and associated category option combo
* uids.
*
* @param dataElementUids the uids of the data elements to include in the map.
* @return a ListMap.
*/
ListMap<String, String> getDataElementCategoryOptionComboMap( Set<String> dataElementUids );
Map<String, Integer> getDataElementUidIdMap();
// -------------------------------------------------------------------------
// DataElementGroup
// -------------------------------------------------------------------------
/**
* Adds a DataElementGroup.
*
* @param dataElementGroup the DataElementGroup to add.
* @return a generated unique id of the added DataElementGroup.
*/
int addDataElementGroup( DataElementGroup dataElementGroup );
/**
* Updates a DataElementGroup.
*
* @param dataElementGroup the DataElementGroup to update.
*/
void updateDataElementGroup( DataElementGroup dataElementGroup );
/**
* Deletes a DataElementGroup.
*
* @param dataElementGroup the DataElementGroup to delete.
*/
void deleteDataElementGroup( DataElementGroup dataElementGroup );
/**
* Returns a DataElementGroup.
*
* @param id the id of the DataElementGroup to return.
* @return the DataElementGroup with the given id, or null if no match.
*/
DataElementGroup getDataElementGroup( int id );
/**
* Returns a DataElementGroup.
*
* @param id the id of the DataElementGroup to return.
* @param i18nDataElements whether to i18n the data elements of this group.
* @return the DataElementGroup with the given id, or null if no match.
*/
DataElementGroup getDataElementGroup( int id, boolean i18nDataElements );
/**
* Returns the data element groups with the given uids.
*
* @param uids the uid collection.
* @return the data element groups with the given uids.
*/
List<DataElementGroup> getDataElementGroupsByUid( Collection<String> uids );
/**
* Returns the DataElementGroup with the given UID.
*
* @param id the UID of the DataElementGroup to return.
* @return the DataElementGroup with the given UID, or null if no match.
*/
DataElementGroup getDataElementGroup( String uid );
/**
* Returns a DataElementGroup with a given name.
*
* @param name the name of the DataElementGroup to return.
* @return the DataElementGroup with the given name, or null if no match.
*/
DataElementGroup getDataElementGroupByName( String name );
/**
* Returns all DataElementGroups.
*
* @return a collection of all DataElementGroups, or an empty collection if
* no DataElementGroups exist.
*/
List<DataElementGroup> getAllDataElementGroups();
/**
* Returns a DataElementGroup with a given short name.
*
* @param shortName the short name of the DataElementGroup to return.
* @return the DataElementGroup with the given short name, or null if no match.
*/
DataElementGroup getDataElementGroupByShortName( String shortName );
/**
* Returns a DataElementGroup with a given code.
*
* @param code the shortName of the DataElementGroup to return.
* @return the DataElementGroup with the given code, or null if no match.
*/
DataElementGroup getDataElementGroupByCode( String code );
/**
* Returns data elements with identifier in the given id.
*
* @param groupId is the id of data element group.
* @return data elements with identifier in the given id.
*/
Set<DataElement> getDataElementsByGroupId( int groupId );
/**
* Defines the given data elements as zero is significant. All other data
* elements are defined as zero is in-significant.
*
* @param dataElementIds identifiers of data elements where zero is
* significant.
*/
void setZeroIsSignificantForDataElements( Collection<Integer> dataElementIds );
/**
* Returns all DataElement which zeroIsSignificant property is true or false
*
* @param list is zeroIsSignificant property
* @return a collection of all DataElement
*/
List<DataElement> getDataElementsByZeroIsSignificant( boolean zeroIsSignificant );
/**
* Returns all DataElement which zeroIsSignificant property is true or false
*
* @param zeroIsSignificant is zeroIsSignificant property
* @param dataElementGroup is group contain data elements
* @return a set of data elements.
*/
Set<DataElement> getDataElementsByZeroIsSignificantAndGroup( boolean zeroIsSignificant,
DataElementGroup dataElementGroup );
List<DataElementGroup> getDataElementGroupsBetween( int first, int max );
List<DataElementGroup> getDataElementGroupsBetweenByName( String name, int first, int max );
int getDataElementGroupCount();
int getDataElementGroupCountByName( String name );
List<DataElement> getDataElements( DataSet dataSet, String key, Integer max );
// -------------------------------------------------------------------------
// DataElementGroupSet
// -------------------------------------------------------------------------
int addDataElementGroupSet( DataElementGroupSet groupSet );
void updateDataElementGroupSet( DataElementGroupSet groupSet );
void deleteDataElementGroupSet( DataElementGroupSet groupSet );
DataElementGroupSet getDataElementGroupSet( int id );
DataElementGroupSet getDataElementGroupSet( int id, boolean i18nGroups );
DataElementGroupSet getDataElementGroupSet( String uid );
DataElementGroupSet getDataElementGroupSetByName( String name );
List<DataElementGroupSet> getCompulsoryDataElementGroupSetsWithMembers();
List<DataElementGroupSet> getAllDataElementGroupSets();
List<DataElementGroupSet> getDataElementGroupSetsByUid( Collection<String> uids );
List<DataElementGroupSet> getDataElementGroupSetsBetween( int first, int max );
List<DataElementGroupSet> getDataElementGroupSetsBetweenByName( String name, int first, int max );
int getDataElementGroupSetCount();
int getDataElementGroupSetCountByName( String name );
}
| |
/* This file is generated by TestGenerator, any edits will be overwritten by the next generation. */
package org.antlr.v4.test.runtime.python2;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.*;
import org.antlr.v4.test.runtime.java.ErrorQueue;
import org.antlr.v4.tool.Grammar;
@SuppressWarnings("unused")
public class TestCompositeParsers extends BasePython2Test {
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
@Test
public void testBringInLiteralsFromDelegate() throws Exception {
mkdir(tmpdir);
String slave_S =
"parser grammar S;\n" +
"a : '=' 'a' {print(\"S.a\",end='')};";
writeFile(tmpdir, "S.g4", slave_S);
StringBuilder grammarBuilder = new StringBuilder(54);
grammarBuilder.append("grammar M;\n");
grammarBuilder.append("import S;\n");
grammarBuilder.append("s : a ;\n");
grammarBuilder.append("WS : (' '|'\\n') -> skip ;");
String grammar = grammarBuilder.toString();
String input ="=a";
String found = execParser("M.g4", grammar, "MParser", "MLexer", "MListener", "MVisitor", "s", input, false);
assertEquals("S.a\n", found);
assertNull(this.stderrDuringParse);
}
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
@Test
public void testCombinedImportsCombined() throws Exception {
mkdir(tmpdir);
String slave_S =
"parser grammar S;\n" +
"tokens { A, B, C }\n" +
"x : 'x' INT {print(\"S.x\")};\n" +
"INT : '0'..'9'+ ;\n" +
"WS : (' '|'\\n') -> skip ;";
writeFile(tmpdir, "S.g4", slave_S);
StringBuilder grammarBuilder = new StringBuilder(31);
grammarBuilder.append("grammar M;\n");
grammarBuilder.append("import S;\n");
grammarBuilder.append("s : x INT;");
String grammar = grammarBuilder.toString();
writeFile(tmpdir, "M.g4", grammar);
ErrorQueue equeue = new ErrorQueue();
new Grammar(tmpdir+"/M.g4", grammar, equeue);
assertEquals("unexpected errors: " + equeue, 0, equeue.errors.size());
String input ="x 34 9";
String found = execParser("M.g4", grammar, "MParser", "MLexer", "MListener", "MVisitor", "s", input, false);
assertEquals("S.x\n", found);
assertNull(this.stderrDuringParse);
}
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
@Test
public void testDelegatesSeeSameTokenType() throws Exception {
mkdir(tmpdir);
String slave_S =
"parser grammar S;\n" +
"tokens { A, B, C }\n" +
"x : A {print(\"S.x\")};";
writeFile(tmpdir, "S.g4", slave_S);
String slave_T =
"parser grammar T;\n" +
"tokens { C, B, A } // reverse order\n" +
"y : A {print(\"T.y\")};";
writeFile(tmpdir, "T.g4", slave_T);
StringBuilder grammarBuilder = new StringBuilder(598);
grammarBuilder.append("// The lexer will create rules to match letters a, b, c.\n");
grammarBuilder.append("// The associated token types A, B, C must have the same value\n");
grammarBuilder.append("// and all import'd parsers. Since ANTLR regenerates all imports\n");
grammarBuilder.append("// for use with the delegator M, it can generate the same token type\n");
grammarBuilder.append("// mapping in each parser:\n");
grammarBuilder.append("// public static final int C=6;\n");
grammarBuilder.append("// public static final int EOF=-1;\n");
grammarBuilder.append("// public static final int B=5;\n");
grammarBuilder.append("// public static final int WS=7;\n");
grammarBuilder.append("// public static final int A=4;\n");
grammarBuilder.append("grammar M;\n");
grammarBuilder.append("import S,T;\n");
grammarBuilder.append("s : x y ; // matches AA, which should be 'aa'\n");
grammarBuilder.append("B : 'b' ; // another order: B, A, C\n");
grammarBuilder.append("A : 'a' ; \n");
grammarBuilder.append("C : 'c' ; \n");
grammarBuilder.append("WS : (' '|'\\n') -> skip ;");
String grammar = grammarBuilder.toString();
writeFile(tmpdir, "M.g4", grammar);
ErrorQueue equeue = new ErrorQueue();
Grammar g = new Grammar(tmpdir+"/M.g4", grammar, equeue);
String expectedTokenIDToTypeMap = "{EOF=-1, B=1, A=2, C=3, WS=4}";
String expectedStringLiteralToTypeMap = "{'a'=2, 'b'=1, 'c'=3}";
String expectedTypeToTokenList = "[B, A, C, WS]";
assertEquals(expectedTokenIDToTypeMap, g.tokenNameToTypeMap.toString());
assertEquals(expectedStringLiteralToTypeMap, sort(g.stringLiteralToTypeMap).toString());
assertEquals(expectedTypeToTokenList, realElements(g.typeToTokenList).toString());
assertEquals("unexpected errors: "+equeue, 0, equeue.errors.size());
String input ="aa";
String found = execParser("M.g4", grammar, "MParser", "MLexer", "MListener", "MVisitor", "s", input, false);
assertEquals(
"S.x\n" +
"T.y\n", found);
assertNull(this.stderrDuringParse);
}
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
@Test
public void testDelegatorAccessesDelegateMembers() throws Exception {
mkdir(tmpdir);
String slave_S =
"parser grammar S;\n" +
"@parser::members {\n" +
"def foo(self):\n" +
" print('foo')\n" +
"}\n" +
"a : B;";
writeFile(tmpdir, "S.g4", slave_S);
StringBuilder grammarBuilder = new StringBuilder(126);
grammarBuilder.append("grammar M; // uses no rules from the import\n");
grammarBuilder.append("import S;\n");
grammarBuilder.append("s : 'b' {self.foo()} ; // gS is import pointer\n");
grammarBuilder.append("WS : (' '|'\\n') -> skip ;");
String grammar = grammarBuilder.toString();
String input ="b";
String found = execParser("M.g4", grammar, "MParser", "MLexer", "MListener", "MVisitor", "s", input, false);
assertEquals("foo\n", found);
assertNull(this.stderrDuringParse);
}
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
@Test
public void testDelegatorInvokesDelegateRule() throws Exception {
mkdir(tmpdir);
String slave_S =
"parser grammar S;\n" +
"a : B {print(\"S.a\")};";
writeFile(tmpdir, "S.g4", slave_S);
StringBuilder grammarBuilder = new StringBuilder(104);
grammarBuilder.append("grammar M;\n");
grammarBuilder.append("import S;\n");
grammarBuilder.append("s : a ;\n");
grammarBuilder.append("B : 'b' ; // defines B from inherited token space\n");
grammarBuilder.append("WS : (' '|'\\n') -> skip ;");
String grammar = grammarBuilder.toString();
String input ="b";
String found = execParser("M.g4", grammar, "MParser", "MLexer", "MListener", "MVisitor", "s", input, false);
assertEquals("S.a\n", found);
assertNull(this.stderrDuringParse);
}
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
@Test
public void testDelegatorInvokesDelegateRuleWithArgs() throws Exception {
mkdir(tmpdir);
String slave_S =
"parser grammar S;\n" +
"a[int x] returns [int y] : B {print(\"S.a\",end='')} {$y=1000;} ;";
writeFile(tmpdir, "S.g4", slave_S);
StringBuilder grammarBuilder = new StringBuilder(131);
grammarBuilder.append("grammar M;\n");
grammarBuilder.append("import S;\n");
grammarBuilder.append("s : label=a[3] {print($label.y)} ;\n");
grammarBuilder.append("B : 'b' ; // defines B from inherited token space\n");
grammarBuilder.append("WS : (' '|'\\n') -> skip ;");
String grammar = grammarBuilder.toString();
String input ="b";
String found = execParser("M.g4", grammar, "MParser", "MLexer", "MListener", "MVisitor", "s", input, false);
assertEquals("S.a1000\n", found);
assertNull(this.stderrDuringParse);
}
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
@Test
public void testDelegatorInvokesDelegateRuleWithReturnStruct() throws Exception {
mkdir(tmpdir);
String slave_S =
"parser grammar S;\n" +
"a : B {print(\"S.a\",end='')} ;";
writeFile(tmpdir, "S.g4", slave_S);
StringBuilder grammarBuilder = new StringBuilder(128);
grammarBuilder.append("grammar M;\n");
grammarBuilder.append("import S;\n");
grammarBuilder.append("s : a {print($a.text,end='')} ;\n");
grammarBuilder.append("B : 'b' ; // defines B from inherited token space\n");
grammarBuilder.append("WS : (' '|'\\n') -> skip ;");
String grammar = grammarBuilder.toString();
String input ="b";
String found = execParser("M.g4", grammar, "MParser", "MLexer", "MListener", "MVisitor", "s", input, false);
assertEquals("S.ab\n", found);
assertNull(this.stderrDuringParse);
}
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
@Test
public void testDelegatorInvokesFirstVersionOfDelegateRule() throws Exception {
mkdir(tmpdir);
String slave_S =
"parser grammar S;\n" +
"a : b {print(\"S.a\")};\n" +
"b : B;";
writeFile(tmpdir, "S.g4", slave_S);
String slave_T =
"parser grammar T;\n" +
"a : B {print(\"T.a\")};";
writeFile(tmpdir, "T.g4", slave_T);
StringBuilder grammarBuilder = new StringBuilder(106);
grammarBuilder.append("grammar M;\n");
grammarBuilder.append("import S,T;\n");
grammarBuilder.append("s : a ;\n");
grammarBuilder.append("B : 'b' ; // defines B from inherited token space\n");
grammarBuilder.append("WS : (' '|'\\n') -> skip ;");
String grammar = grammarBuilder.toString();
String input ="b";
String found = execParser("M.g4", grammar, "MParser", "MLexer", "MListener", "MVisitor", "s", input, false);
assertEquals("S.a\n", found);
assertNull(this.stderrDuringParse);
}
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
@Test
public void testDelegatorRuleOverridesDelegate() throws Exception {
mkdir(tmpdir);
String slave_S =
"parser grammar S;\n" +
"a : b {print(\"S.a\",end='')};\n" +
"b : B ;";
writeFile(tmpdir, "S.g4", slave_S);
StringBuilder grammarBuilder = new StringBuilder(59);
grammarBuilder.append("grammar M;\n");
grammarBuilder.append("import S;\n");
grammarBuilder.append("b : 'b'|'c';\n");
grammarBuilder.append("WS : (' '|'\\n') -> skip ;");
String grammar = grammarBuilder.toString();
String input ="c";
String found = execParser("M.g4", grammar, "MParser", "MLexer", "MListener", "MVisitor", "a", input, false);
assertEquals("S.a\n", found);
assertNull(this.stderrDuringParse);
}
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
@Test
public void testDelegatorRuleOverridesDelegates() throws Exception {
mkdir(tmpdir);
String slave_S =
"parser grammar S;\n" +
"a : b {print(\"S.a\")};\n" +
"b : 'b' ;";
writeFile(tmpdir, "S.g4", slave_S);
String slave_T =
"parser grammar T;\n" +
"tokens { A }\n" +
"b : 'b' {print(\"T.b\")};";
writeFile(tmpdir, "T.g4", slave_T);
StringBuilder grammarBuilder = new StringBuilder(81);
grammarBuilder.append("grammar M;\n");
grammarBuilder.append("import S, T;\n");
grammarBuilder.append("b : 'b'|'c' {print(\"M.b\")}|B|A;\n");
grammarBuilder.append("WS : (' '|'\\n') -> skip ;");
String grammar = grammarBuilder.toString();
String input ="c";
String found = execParser("M.g4", grammar, "MParser", "MLexer", "MListener", "MVisitor", "a", input, false);
assertEquals(
"M.b\n" +
"S.a\n", found);
assertNull(this.stderrDuringParse);
}
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
@Test
public void testDelegatorRuleOverridesLookaheadInDelegate() throws Exception {
mkdir(tmpdir);
String slave_S =
"parser grammar S;\n" +
"type_ : 'int' ;\n" +
"decl : type_ ID ';'\n" +
" | type_ ID init ';' {print(\"JavaDecl: \" + $text,end='')};\n" +
"init : '=' INT;";
writeFile(tmpdir, "S.g4", slave_S);
StringBuilder grammarBuilder = new StringBuilder(121);
grammarBuilder.append("grammar M;\n");
grammarBuilder.append("import S;\n");
grammarBuilder.append("prog : decl ;\n");
grammarBuilder.append("type_ : 'int' | 'float' ;\n");
grammarBuilder.append("ID : 'a'..'z'+ ;\n");
grammarBuilder.append("INT : '0'..'9'+ ;\n");
grammarBuilder.append("WS : (' '|'\\n') -> skip;");
String grammar = grammarBuilder.toString();
String input ="float x = 3;";
String found = execParser("M.g4", grammar, "MParser", "MLexer", "MListener", "MVisitor", "prog", input, false);
assertEquals("JavaDecl: floatx=3;\n", found);
assertNull(this.stderrDuringParse);
}
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
@Test
public void testImportLexerWithOnlyFragmentRules() throws Exception {
mkdir(tmpdir);
String slave_Unicode =
"lexer grammar Unicode;\n" +
"\n" +
"fragment\n" +
"UNICODE_CLASS_Zs : '\\u0020' | '\\u00A0' | '\\u1680' | '\\u180E'\n" +
" | '\\u2000'..'\\u200A'\n" +
" | '\\u202F' | '\\u205F' | '\\u3000'\n" +
" ;\n";
writeFile(tmpdir, "Unicode.g4", slave_Unicode);
StringBuilder grammarBuilder = new StringBuilder(91);
grammarBuilder.append("grammar Test;\n");
grammarBuilder.append("import Unicode;\n");
grammarBuilder.append("\n");
grammarBuilder.append("program : 'test' 'test';\n");
grammarBuilder.append("\n");
grammarBuilder.append("WS : (UNICODE_CLASS_Zs)+ -> skip;\n");
String grammar = grammarBuilder.toString();
String input ="test test";
String found = execParser("Test.g4", grammar, "TestParser", "TestLexer", "TestListener", "TestVisitor", "program", input, false);
assertEquals("", found);
assertNull(this.stderrDuringParse);
}
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
@Test
public void testImportedGrammarWithEmptyOptions() throws Exception {
mkdir(tmpdir);
String slave_S =
"parser grammar S;\n" +
"options {}\n" +
"a : B ;";
writeFile(tmpdir, "S.g4", slave_S);
StringBuilder grammarBuilder = new StringBuilder(64);
grammarBuilder.append("grammar M;\n");
grammarBuilder.append("import S;\n");
grammarBuilder.append("s : a ;\n");
grammarBuilder.append("B : 'b' ;\n");
grammarBuilder.append("WS : (' '|'\\n') -> skip ;");
String grammar = grammarBuilder.toString();
String input ="b";
String found = execParser("M.g4", grammar, "MParser", "MLexer", "MListener", "MVisitor", "s", input, false);
assertEquals("", found);
assertNull(this.stderrDuringParse);
}
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
@Test
public void testImportedRuleWithAction() throws Exception {
mkdir(tmpdir);
String slave_S =
"parser grammar S;\n" +
"a @after {x = 0} : B;";
writeFile(tmpdir, "S.g4", slave_S);
StringBuilder grammarBuilder = new StringBuilder(62);
grammarBuilder.append("grammar M;\n");
grammarBuilder.append("import S;\n");
grammarBuilder.append("s : a;\n");
grammarBuilder.append("B : 'b';\n");
grammarBuilder.append("WS : (' '|'\\n') -> skip ;");
String grammar = grammarBuilder.toString();
String input ="b";
String found = execParser("M.g4", grammar, "MParser", "MLexer", "MListener", "MVisitor", "s", input, false);
assertEquals("", found);
assertNull(this.stderrDuringParse);
}
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
@Test
public void testKeywordVSIDOrder() throws Exception {
mkdir(tmpdir);
String slave_S =
"lexer grammar S;\n" +
"ID : 'a'..'z'+;";
writeFile(tmpdir, "S.g4", slave_S);
StringBuilder grammarBuilder = new StringBuilder(106);
grammarBuilder.append("grammar M;\n");
grammarBuilder.append("import S;\n");
grammarBuilder.append("a : A {print(\"M.a: \" + str($A))};\n");
grammarBuilder.append("A : 'abc' {print(\"M.A\")};\n");
grammarBuilder.append("WS : (' '|'\\n') -> skip ;");
String grammar = grammarBuilder.toString();
String input ="abc";
String found = execParser("M.g4", grammar, "MParser", "MLexer", "MListener", "MVisitor", "a", input, false);
assertEquals(
"M.A\n" +
"M.a: [@0,0:2='abc',<1>,1:0]\n", found);
assertNull(this.stderrDuringParse);
}
}
| |
package ru.lanbilling.webservice.wsdl;
import javax.annotation.Generated;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for soapSbssStatus complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="soapSbssStatus">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="id" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="group" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="type" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="active" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="archive" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="displaydefault" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="clientmodifyallow" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="defaultnew" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="defaultanswer" type="{http://www.w3.org/2001/XMLSchema}long" minOccurs="0"/>
* <element name="descr" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="color" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "soapSbssStatus", propOrder = {
"id",
"group",
"type",
"active",
"archive",
"displaydefault",
"clientmodifyallow",
"defaultnew",
"defaultanswer",
"descr",
"color"
})
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public class SoapSbssStatus {
@XmlElement(defaultValue = "0")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected Long id;
@XmlElement(defaultValue = "0")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected Long group;
@XmlElement(defaultValue = "0")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected Long type;
@XmlElement(defaultValue = "0")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected Long active;
@XmlElement(defaultValue = "0")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected Long archive;
@XmlElement(defaultValue = "0")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected Long displaydefault;
@XmlElement(defaultValue = "0")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected Long clientmodifyallow;
@XmlElement(defaultValue = "0")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected Long defaultnew;
@XmlElement(defaultValue = "0")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected Long defaultanswer;
@XmlElement(defaultValue = "")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected String descr;
@XmlElement(defaultValue = "")
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
protected String color;
/**
* Gets the value of the id property.
*
* @return
* possible object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public Long getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setId(Long value) {
this.id = value;
}
/**
* Gets the value of the group property.
*
* @return
* possible object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public Long getGroup() {
return group;
}
/**
* Sets the value of the group property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setGroup(Long value) {
this.group = value;
}
/**
* Gets the value of the type property.
*
* @return
* possible object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public Long getType() {
return type;
}
/**
* Sets the value of the type property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setType(Long value) {
this.type = value;
}
/**
* Gets the value of the active property.
*
* @return
* possible object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public Long getActive() {
return active;
}
/**
* Sets the value of the active property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setActive(Long value) {
this.active = value;
}
/**
* Gets the value of the archive property.
*
* @return
* possible object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public Long getArchive() {
return archive;
}
/**
* Sets the value of the archive property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setArchive(Long value) {
this.archive = value;
}
/**
* Gets the value of the displaydefault property.
*
* @return
* possible object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public Long getDisplaydefault() {
return displaydefault;
}
/**
* Sets the value of the displaydefault property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setDisplaydefault(Long value) {
this.displaydefault = value;
}
/**
* Gets the value of the clientmodifyallow property.
*
* @return
* possible object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public Long getClientmodifyallow() {
return clientmodifyallow;
}
/**
* Sets the value of the clientmodifyallow property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setClientmodifyallow(Long value) {
this.clientmodifyallow = value;
}
/**
* Gets the value of the defaultnew property.
*
* @return
* possible object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public Long getDefaultnew() {
return defaultnew;
}
/**
* Sets the value of the defaultnew property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setDefaultnew(Long value) {
this.defaultnew = value;
}
/**
* Gets the value of the defaultanswer property.
*
* @return
* possible object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public Long getDefaultanswer() {
return defaultanswer;
}
/**
* Sets the value of the defaultanswer property.
*
* @param value
* allowed object is
* {@link Long }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setDefaultanswer(Long value) {
this.defaultanswer = value;
}
/**
* Gets the value of the descr property.
*
* @return
* possible object is
* {@link String }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public String getDescr() {
return descr;
}
/**
* Sets the value of the descr property.
*
* @param value
* allowed object is
* {@link String }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setDescr(String value) {
this.descr = value;
}
/**
* Gets the value of the color property.
*
* @return
* possible object is
* {@link String }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public String getColor() {
return color;
}
/**
* Sets the value of the color property.
*
* @param value
* allowed object is
* {@link String }
*
*/
@Generated(value = "com.sun.tools.xjc.Driver", date = "2015-10-25T05:29:34+06:00", comments = "JAXB RI v2.2.11")
public void setColor(String value) {
this.color = value;
}
}
| |
/*
* RegistrationTable.java
*
* Created on October 10, 2002, 11:19 PM
*/
package gov.nist.sip.proxy.registrar;
import javax.sip.*;
import javax.sip.message.*;
import javax.sip.header.*;
import javax.sip.address.*;
import java.util.*;
//ifndef SIMULATION
//
import java.rmi.*;
import java.rmi.server.*;
//endif
//
import gov.nist.sip.proxy.*;
//ifdef SIMULATION
/*
import sim.java.util.*;
//endif
*/
/**
*
* @author olivier
* @version 1.0
*/
public class RegistrationsTable{
protected Registrar registrar;
protected Hashtable registrations;
protected Hashtable expiresTaskTable;
/** Creates new RegistrationTable */
public RegistrationsTable(Registrar registrar) {
this.registrar=registrar;
registrations=new Hashtable();
expiresTaskTable=new Hashtable();
}
public Hashtable getRegistrations() {
return registrations;
}
public Hashtable getExpiresTaskTable() {
return expiresTaskTable;
}
/*******************************************************************/
/*************************** RMI REGISTRY ***********************/
public synchronized String getRegistryXMLTags()
//ifndef SIMULATION
//
throws RemoteException
//endif
//
{
StringBuffer retval = new StringBuffer("<REGISTRATIONS>");
Collection values = registrations.values();
Iterator it = values.iterator();
while (it.hasNext()) {
Registration registration= (Registration) it.next();
retval.append(registration.getXMLTags());
}
retval.append("</REGISTRATIONS>");
return retval.toString();
}
public synchronized Vector getRegistryBindings()
//ifndef SIMULATION
//
throws RemoteException
//endif
//
{
Vector retval = new Vector();
Collection values = registrations.values();
Iterator it = values.iterator();
while (it.hasNext()) {
Registration registration= (Registration) it.next();
ExportedBinding be = registration.exportBinding();
System.out.println("adding a binding " + be);
if (be!=null)
retval.add(be);
}
return retval;
}
public synchronized int getRegistrySize()
//ifndef SIMULATION
//
throws RemoteException
//endif
//
{
Vector retval = new Vector();
Collection values = registrations.values();
return values.size();
}
/*************************************************************************/
/*************************************************************************/
public synchronized boolean hasRegistration(String key) {
boolean res=registrations.containsKey(key.toLowerCase());
if (res)
ProxyDebug.println
("RegistrationsTable, hasRegistration(), Checking registration for \""
+key.toLowerCase()+"\" : registered");
else {
ProxyDebug.println
("RegistrationsTable, hasRegistration(), Checking registration for \""
+key.toLowerCase()+"\" : not registered");
}
return res;
}
protected void addRegistration(String key,Request request) throws Exception{
Vector contacts=Registrar.getContactHeaders(request);
int expiresTimeHeader=-1;
Registration registration=new Registration();
registration.key=key;
ExpiresHeader expiresHeader=
(ExpiresHeader)request.getHeader(ExpiresHeader.NAME);
if (expiresHeader!=null) {
expiresTimeHeader=expiresHeader.getExpires();
if (expiresTimeHeader > registrar.EXPIRES_TIME_MAX ||
expiresTimeHeader < registrar.EXPIRES_TIME_MIN )
expiresTimeHeader=registrar.EXPIRES_TIME_MAX;
}
else expiresTimeHeader=registrar.EXPIRES_TIME_MAX;
for( int i=0; i<contacts.size();i++) {
ContactHeader contactHeader=(ContactHeader)contacts.elementAt(i);
if (contactHeader.getExpires()==-1 ) {
contactHeader.setExpires(expiresTimeHeader);
}
registration.addContactHeader(contactHeader);
startTimer(key,contactHeader.getExpires(),contactHeader);
}
ToHeader toHeader=(ToHeader)request.getHeader(ToHeader.NAME);
Address toAddress=toHeader.getAddress();
String displayName=toAddress.getDisplayName();
if (displayName !=null) registration.setDisplayName(displayName);
// Store the to and from headers for binding to the responder.
registration.toHeader = toHeader;
FromHeader fromHeader = (FromHeader)request.getHeader(FromHeader.NAME);
registration.fromHeader = fromHeader;
registrations.put(key,registration);
ProxyDebug.println
("RegistrationsTable, addRegistration(), registration "+
" added for the key: "+key);
printRegistrations();
updateGUI(registration,false);
}
protected void addRegistration(Registration registration) throws Exception{
Vector contacts=registration.getContactsList();
// ok to have empty contact list. This just means that the
// registration is known to the registrar but contact info
// is not available.
if (contacts==null ) {
throw new Exception
("contact list is empty, registration not added!");
}
String key=registration.getKey();
if (key==null)
throw new Exception("key is null, registration not added!");
for( int i=0; i<contacts.size();i++) {
ContactHeader contactHeader=(ContactHeader)contacts.elementAt(i);
if (contactHeader.getExpires()==-1 ) {
contactHeader.setExpires(registrar.EXPIRES_TIME_MAX);
}
startTimer(key,contactHeader.getExpires(),contactHeader);
}
registrations.put(key,registration);
ProxyDebug.println
("RegistrationsTable, addRegistration(), registration "+
" added for the key: "+key);
printRegistrations();
updateGUI(registration,false);
}
public synchronized void removeRegistration(String key) {
ProxyDebug.println("RegistrationsTable, removeRegistration(), "+
" registration removed"+
" for the key: "+key);
Registration registration=(Registration)registrations.get(key);
updateGUI(registration,true);
registrations.remove(key);
printRegistrations();
//updateGUI(registration,true);
}
public void removeContact(String key,ContactHeader contactHeader) {
ProxyDebug.println("RegistrationsTable, removeContact(), "+
" contact removed for the key: "+key);
Registration registration=(Registration)registrations.get(key);
if (registration!=null) {
registration.removeContactHeader(contactHeader);
printRegistrations();
if ( !registration.hasContacts()) {
ProxyDebug.println("RegistrationsTable, removeContact(), the registration: "+
key+
" does not contain any contacts, we remove it");
removeRegistration(key);
}
}
}
public void updateRegistration(String key,Request request) throws Exception {
ProxyDebug.println("RegistrationsTable, updateRegistration(), registration updated"+
" for the key: "+key);
Vector contacts=Registrar.getContactHeaders(request);
Registration registration=(Registration)registrations.get(key);
int expiresTime=registrar.EXPIRES_TIME_MAX;
for( int i=0; i<contacts.size();i++) {
ContactHeader contactHeader=(ContactHeader)contacts.elementAt(i);
if (contactHeader.getExpires()!=-1 ) {
expiresTime=contactHeader.getExpires();
}
else {
ExpiresHeader expiresHeader=(ExpiresHeader)request.getHeader(ExpiresHeader.NAME);
if (expiresHeader!=null) {
expiresTime=expiresHeader.getExpires();
}
}
if (expiresTime==0) {
removeContact(key,contactHeader);
}
else {
if (expiresTime > registrar.EXPIRES_TIME_MAX ||
expiresTime < registrar.EXPIRES_TIME_MIN)
expiresTime=registrar.EXPIRES_TIME_MAX;
contactHeader.setExpires(expiresTime);
if (registration.hasContactHeader(contactHeader))
registration.updateContactHeader(contactHeader);
else
registration.addContactHeader(contactHeader);
startTimer(key,expiresTime,contactHeader);
expiresTime=registrar.EXPIRES_TIME_MAX;
}
}
printRegistrations();
}
public Vector getContactHeaders(String key) {
Registration registration=(Registration)registrations.get(key);
if (registration==null) return null;
else return registration.getContactsList();
}
public void startTimer
(String key,int expiresTime,ContactHeader contactHeader) {
// we kill the precedent timer related to this key if there is one:
Address address=contactHeader.getAddress();
javax.sip.address.URI cleanedUri=Registrar.getCleanUri(address.getURI() );
String contactURI=cleanedUri.toString();
//ifdef SIMULATION
/*
SimTimer oldTimer;
//else
*/
Timer oldTimer;
//endif
//
//ifndef SIMULATION
//
synchronized(expiresTaskTable) {
oldTimer=(Timer)expiresTaskTable.get(contactURI);
}
//else
/*
synchronized(expiresTaskTable) {
oldTimer = (SimTimer)expiresTaskTable.get(contactURI);
}
//endif
*/
if (oldTimer !=null) {
ProxyDebug.println
("RegistrationsTable, startTimer(), An old timer has "+
" been stopped for the contact: "+contactURI);
oldTimer.cancel();
}
// Let's start a timer for this contact...
ExpiresTask expiresTask=new ExpiresTask(key,contactHeader,this);
//ifndef SIMULATION
//
Timer timer=new Timer();
timer.schedule(expiresTask,expiresTime*1000);
//else
/*
SimTimer timer = new SimTimer();
timer.schedule(expiresTask,expiresTime*1000);
//endif
*/
synchronized (expiresTaskTable) {
expiresTaskTable.put(contactURI,timer);
}
ProxyDebug.println("RegistrationsTable, startTimer(), timer started "+
" for the contact: "+contactURI+" , expiresTime:"+expiresTime);
}
protected void printRegistrations() {
ProxyDebug.println("********* Registration record *****************");
ProxyDebug.println();
for (Enumeration e = registrations.keys() ; e.hasMoreElements() ;) {
String keyTable=(String)e.nextElement();
Registration registration=(Registration)registrations.get(keyTable);
ProxyDebug.println("registered user: \""+keyTable+"\"");
registration.print();
ProxyDebug.println();
}
ProxyDebug.println("************************************************");
ProxyDebug.println();
}
public String getXMLTags() {
StringBuffer retval = new StringBuffer();
retval.append("<?xml version='1.0' encoding='us-ascii'?> \n");
retval.append("<REGISTRATIONS> \n");
for (Enumeration e = registrations.keys() ; e.hasMoreElements() ;) {
String keyTable=(String)e.nextElement();
Registration registration=(Registration)registrations.get(keyTable);
retval.append(registration.getXMLTags());
}
retval.append("</REGISTRATIONS> \n");
return retval.toString();
}
public void updateGUI(Registration registration,boolean toRemove) {
if (registrar.gui!=null) {
registrar.gui.updateRegistration(registration,toRemove);
}
else {
ProxyDebug.println("DEBUG, not gui to update");
}
}
}
| |
/*
* Copyright 2011 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.jboss.netty.handler.codec.http;
import java.util.Date;
import java.util.Set;
import java.util.TreeSet;
/**
* Encodes {@link Cookie}s into an HTTP header value. This encoder can encode
* the HTTP cookie version 0, 1, and 2.
* <p>
* This encoder is stateful. It maintains an internal data structure that
* holds the {@link Cookie}s added by the {@link #addCookie(String, String)}
* method. Once {@link #encode()} is called, all added {@link Cookie}s are
* encoded into an HTTP header value and all {@link Cookie}s in the internal
* data structure are removed so that the encoder can start over.
* <pre>
* // Client-side example
* {@link HttpRequest} req = ...;
* {@link CookieEncoder} encoder = new {@link CookieEncoder}(false);
* encoder.addCookie("JSESSIONID", "1234");
* res.setHeader("Cookie", encoder.encode());
*
* // Server-side example
* {@link HttpResponse} res = ...;
* {@link CookieEncoder} encoder = new {@link CookieEncoder}(true);
* encoder.addCookie("JSESSIONID", "1234");
* res.setHeader("Set-Cookie", encoder.encode());
* </pre>
*
* @see CookieDecoder
*
* @apiviz.stereotype utility
* @apiviz.has org.jboss.netty.handler.codec.http.Cookie oneway - - encodes
*/
public class CookieEncoder {
private final Set<Cookie> cookies = new TreeSet<Cookie>();
private final boolean server;
/**
* Creates a new encoder.
*
* @param server {@code true} if and only if this encoder is supposed to
* encode server-side cookies. {@code false} if and only if
* this encoder is supposed to encode client-side cookies.
*/
public CookieEncoder(boolean server) {
this.server = server;
}
/**
* Adds a new {@link Cookie} created with the specified name and value to
* this encoder.
*/
public void addCookie(String name, String value) {
cookies.add(new DefaultCookie(name, value));
}
/**
* Adds the specified {@link Cookie} to this encoder.
*/
public void addCookie(Cookie cookie) {
cookies.add(cookie);
}
/**
* Encodes the {@link Cookie}s which were added by {@link #addCookie(Cookie)}
* so far into an HTTP header value. If no {@link Cookie}s were added,
* an empty string is returned.
*/
public String encode() {
String answer;
if (server) {
answer = encodeServerSide();
} else {
answer = encodeClientSide();
}
cookies.clear();
return answer;
}
private String encodeServerSide() {
StringBuilder sb = new StringBuilder();
for (Cookie cookie: cookies) {
add(sb, cookie.getName(), cookie.getValue());
if (cookie.getMaxAge() >= 0) {
if (cookie.getVersion() == 0) {
addUnquoted(sb, CookieHeaderNames.EXPIRES,
new CookieDateFormat().format(
new Date(System.currentTimeMillis() +
cookie.getMaxAge() * 1000L)));
} else {
add(sb, CookieHeaderNames.MAX_AGE, cookie.getMaxAge());
}
}
if (cookie.getPath() != null) {
if (cookie.getVersion() > 0) {
add(sb, CookieHeaderNames.PATH, cookie.getPath());
} else {
addUnquoted(sb, CookieHeaderNames.PATH, cookie.getPath());
}
}
if (cookie.getDomain() != null) {
if (cookie.getVersion() > 0) {
add(sb, CookieHeaderNames.DOMAIN, cookie.getDomain());
} else {
addUnquoted(sb, CookieHeaderNames.DOMAIN, cookie.getDomain());
}
}
if (cookie.isSecure()) {
sb.append(CookieHeaderNames.SECURE);
sb.append((char) HttpCodecUtil.SEMICOLON);
}
if (cookie.isHttpOnly()) {
sb.append(CookieHeaderNames.HTTPONLY);
sb.append((char) HttpCodecUtil.SEMICOLON);
}
if (cookie.getVersion() >= 1) {
if (cookie.getComment() != null) {
add(sb, CookieHeaderNames.COMMENT, cookie.getComment());
}
add(sb, CookieHeaderNames.VERSION, 1);
if (cookie.getCommentUrl() != null) {
addQuoted(sb, CookieHeaderNames.COMMENTURL, cookie.getCommentUrl());
}
if (!cookie.getPorts().isEmpty()) {
sb.append(CookieHeaderNames.PORT);
sb.append((char) HttpCodecUtil.EQUALS);
sb.append((char) HttpCodecUtil.DOUBLE_QUOTE);
for (int port: cookie.getPorts()) {
sb.append(port);
sb.append((char) HttpCodecUtil.COMMA);
}
sb.setCharAt(sb.length() - 1, (char) HttpCodecUtil.DOUBLE_QUOTE);
sb.append((char) HttpCodecUtil.SEMICOLON);
}
if (cookie.isDiscard()) {
sb.append(CookieHeaderNames.DISCARD);
sb.append((char) HttpCodecUtil.SEMICOLON);
}
}
}
if (sb.length() > 0) {
sb.setLength(sb.length() - 1);
}
return sb.toString();
}
private String encodeClientSide() {
StringBuilder sb = new StringBuilder();
for (Cookie cookie: cookies) {
if (cookie.getVersion() >= 1) {
add(sb, '$' + CookieHeaderNames.VERSION, 1);
}
add(sb, cookie.getName(), cookie.getValue());
if (cookie.getPath() != null) {
add(sb, '$' + CookieHeaderNames.PATH, cookie.getPath());
}
if (cookie.getDomain() != null) {
add(sb, '$' + CookieHeaderNames.DOMAIN, cookie.getDomain());
}
if (cookie.getVersion() >= 1) {
if (!cookie.getPorts().isEmpty()) {
sb.append('$');
sb.append(CookieHeaderNames.PORT);
sb.append((char) HttpCodecUtil.EQUALS);
sb.append((char) HttpCodecUtil.DOUBLE_QUOTE);
for (int port: cookie.getPorts()) {
sb.append(port);
sb.append((char) HttpCodecUtil.COMMA);
}
sb.setCharAt(sb.length() - 1, (char) HttpCodecUtil.DOUBLE_QUOTE);
sb.append((char) HttpCodecUtil.SEMICOLON);
}
}
}
if (sb.length() > 0) {
sb.setLength(sb.length() - 1);
}
return sb.toString();
}
private static void add(StringBuilder sb, String name, String val) {
if (val == null) {
addQuoted(sb, name, "");
return;
}
for (int i = 0; i < val.length(); i ++) {
char c = val.charAt(i);
switch (c) {
case '\t': case ' ': case '"': case '(': case ')': case ',':
case '/': case ':': case ';': case '<': case '=': case '>':
case '?': case '@': case '[': case '\\': case ']':
case '{': case '}':
addQuoted(sb, name, val);
return;
}
}
addUnquoted(sb, name, val);
}
private static void addUnquoted(StringBuilder sb, String name, String val) {
sb.append(name);
sb.append((char) HttpCodecUtil.EQUALS);
sb.append(val);
sb.append((char) HttpCodecUtil.SEMICOLON);
}
private static void addQuoted(StringBuilder sb, String name, String val) {
if (val == null) {
val = "";
}
sb.append(name);
sb.append((char) HttpCodecUtil.EQUALS);
sb.append((char) HttpCodecUtil.DOUBLE_QUOTE);
sb.append(val.replace("\\", "\\\\").replace("\"", "\\\""));
sb.append((char) HttpCodecUtil.DOUBLE_QUOTE);
sb.append((char) HttpCodecUtil.SEMICOLON);
}
private static void add(StringBuilder sb, String name, int val) {
sb.append(name);
sb.append((char) HttpCodecUtil.EQUALS);
sb.append(val);
sb.append((char) HttpCodecUtil.SEMICOLON);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive;
import com.facebook.presto.GroupByHashPageIndexerFactory;
import com.facebook.presto.hive.AbstractTestHiveClient.HiveTransaction;
import com.facebook.presto.hive.AbstractTestHiveClient.Transaction;
import com.facebook.presto.hive.authentication.NoHdfsAuthentication;
import com.facebook.presto.hive.metastore.BridgingHiveMetastore;
import com.facebook.presto.hive.metastore.CachingHiveMetastore;
import com.facebook.presto.hive.metastore.Database;
import com.facebook.presto.hive.metastore.ExtendedHiveMetastore;
import com.facebook.presto.hive.metastore.PrincipalPrivileges;
import com.facebook.presto.hive.metastore.Table;
import com.facebook.presto.hive.metastore.ThriftHiveMetastore;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.ColumnMetadata;
import com.facebook.presto.spi.ConnectorOutputTableHandle;
import com.facebook.presto.spi.ConnectorPageSink;
import com.facebook.presto.spi.ConnectorPageSource;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.ConnectorSplit;
import com.facebook.presto.spi.ConnectorSplitSource;
import com.facebook.presto.spi.ConnectorTableHandle;
import com.facebook.presto.spi.ConnectorTableLayoutResult;
import com.facebook.presto.spi.ConnectorTableMetadata;
import com.facebook.presto.spi.Constraint;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.TableNotFoundException;
import com.facebook.presto.spi.connector.ConnectorMetadata;
import com.facebook.presto.spi.connector.ConnectorPageSinkProvider;
import com.facebook.presto.spi.connector.ConnectorPageSourceProvider;
import com.facebook.presto.spi.connector.ConnectorSplitManager;
import com.facebook.presto.spi.predicate.TupleDomain;
import com.facebook.presto.sql.gen.JoinCompiler;
import com.facebook.presto.testing.MaterializedResult;
import com.facebook.presto.testing.MaterializedRow;
import com.facebook.presto.testing.TestingConnectorSession;
import com.facebook.presto.testing.TestingNodeManager;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.net.HostAndPort;
import io.airlift.concurrent.BoundedExecutor;
import io.airlift.json.JsonCodec;
import io.airlift.slice.Slice;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import static com.facebook.presto.hadoop.HadoopFileStatus.isDirectory;
import static com.facebook.presto.hive.AbstractTestHiveClient.createTableProperties;
import static com.facebook.presto.hive.AbstractTestHiveClient.filterNonHiddenColumnHandles;
import static com.facebook.presto.hive.AbstractTestHiveClient.filterNonHiddenColumnMetadata;
import static com.facebook.presto.hive.AbstractTestHiveClient.getAllSplits;
import static com.facebook.presto.hive.HiveTestUtils.TYPE_MANAGER;
import static com.facebook.presto.hive.HiveTestUtils.getDefaultHiveDataStreamFactories;
import static com.facebook.presto.hive.HiveTestUtils.getDefaultHiveFileWriterFactories;
import static com.facebook.presto.hive.HiveTestUtils.getDefaultHiveRecordCursorProvider;
import static com.facebook.presto.hive.HiveTestUtils.getTypes;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.testing.MaterializedResult.materializeSourceDataStream;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.Iterables.getOnlyElement;
import static com.google.common.util.concurrent.MoreExecutors.newDirectExecutorService;
import static io.airlift.concurrent.MoreFutures.getFutureValue;
import static io.airlift.concurrent.Threads.daemonThreadsNamed;
import static io.airlift.testing.Assertions.assertEqualsIgnoreOrder;
import static java.lang.String.format;
import static java.util.Locale.ENGLISH;
import static java.util.concurrent.Executors.newCachedThreadPool;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
@Test(groups = "hive-s3")
public abstract class AbstractTestHiveClientS3
{
protected String writableBucket;
protected String database;
protected SchemaTableName tableS3;
protected SchemaTableName temporaryCreateTable;
protected HdfsEnvironment hdfsEnvironment;
protected LocationService locationService;
protected TestingHiveMetastore metastoreClient;
protected HiveMetadataFactory metadataFactory;
protected HiveTransactionManager transactionManager;
protected ConnectorSplitManager splitManager;
protected ConnectorPageSinkProvider pageSinkProvider;
protected ConnectorPageSourceProvider pageSourceProvider;
private ExecutorService executor;
@BeforeClass
public void setUp()
throws Exception
{
executor = newCachedThreadPool(daemonThreadsNamed("hive-%s"));
}
@AfterClass
public void tearDown()
throws Exception
{
if (executor != null) {
executor.shutdownNow();
executor = null;
}
}
protected void setupHive(String databaseName)
{
database = databaseName;
tableS3 = new SchemaTableName(database, "presto_test_s3");
String random = UUID.randomUUID().toString().toLowerCase(ENGLISH).replace("-", "");
temporaryCreateTable = new SchemaTableName(database, "tmp_presto_test_create_s3_" + random);
}
protected void setup(String host, int port, String databaseName, String awsAccessKey, String awsSecretKey, String writableBucket)
{
this.writableBucket = writableBucket;
setupHive(databaseName);
HiveS3Config s3Config = new HiveS3Config()
.setS3AwsAccessKey(awsAccessKey)
.setS3AwsSecretKey(awsSecretKey);
HiveClientConfig hiveClientConfig = new HiveClientConfig();
String proxy = System.getProperty("hive.metastore.thrift.client.socks-proxy");
if (proxy != null) {
hiveClientConfig.setMetastoreSocksProxy(HostAndPort.fromString(proxy));
}
HiveConnectorId connectorId = new HiveConnectorId("hive-test");
HiveCluster hiveCluster = new TestingHiveCluster(hiveClientConfig, host, port);
ExecutorService executor = newCachedThreadPool(daemonThreadsNamed("hive-s3-%s"));
HdfsConfiguration hdfsConfiguration = new HiveHdfsConfiguration(new HdfsConfigurationUpdater(hiveClientConfig, s3Config));
HivePartitionManager hivePartitionManager = new HivePartitionManager(connectorId, TYPE_MANAGER, hiveClientConfig);
hdfsEnvironment = new HdfsEnvironment(hdfsConfiguration, hiveClientConfig, new NoHdfsAuthentication());
metastoreClient = new TestingHiveMetastore(
new BridgingHiveMetastore(new ThriftHiveMetastore(hiveCluster)),
executor,
hiveClientConfig,
writableBucket,
hdfsEnvironment);
locationService = new HiveLocationService(hdfsEnvironment);
JsonCodec<PartitionUpdate> partitionUpdateCodec = JsonCodec.jsonCodec(PartitionUpdate.class);
metadataFactory = new HiveMetadataFactory(
connectorId,
hiveClientConfig,
metastoreClient,
hdfsEnvironment,
hivePartitionManager,
newDirectExecutorService(),
TYPE_MANAGER,
locationService,
new TableParameterCodec(),
partitionUpdateCodec,
new HiveTypeTranslator(),
new NodeVersion("test_version"));
transactionManager = new HiveTransactionManager();
splitManager = new HiveSplitManager(
connectorId,
transactionHandle -> ((HiveMetadata) transactionManager.get(transactionHandle)).getMetastore(),
new NamenodeStats(),
hdfsEnvironment,
new HadoopDirectoryLister(),
new BoundedExecutor(executor, hiveClientConfig.getMaxSplitIteratorThreads()),
new HiveCoercionPolicy(TYPE_MANAGER),
hiveClientConfig.getMaxOutstandingSplits(),
hiveClientConfig.getMinPartitionBatchSize(),
hiveClientConfig.getMaxPartitionBatchSize(),
hiveClientConfig.getMaxInitialSplits(),
hiveClientConfig.getRecursiveDirWalkerEnabled());
pageSinkProvider = new HivePageSinkProvider(
getDefaultHiveFileWriterFactories(hiveClientConfig),
hdfsEnvironment,
metastoreClient,
new GroupByHashPageIndexerFactory(new JoinCompiler()),
TYPE_MANAGER,
new HiveClientConfig(),
locationService,
partitionUpdateCodec,
new TestingNodeManager("fake-environment"),
new HiveEventClient(),
new HiveSessionProperties(hiveClientConfig));
pageSourceProvider = new HivePageSourceProvider(hiveClientConfig, hdfsEnvironment, getDefaultHiveRecordCursorProvider(hiveClientConfig), getDefaultHiveDataStreamFactories(hiveClientConfig), TYPE_MANAGER);
}
protected ConnectorSession newSession()
{
return new TestingConnectorSession(new HiveSessionProperties(new HiveClientConfig()).getSessionProperties());
}
protected Transaction newTransaction()
{
return new HiveTransaction(transactionManager, metadataFactory.create());
}
@Test
public void testGetRecordsS3()
throws Exception
{
try (Transaction transaction = newTransaction()) {
ConnectorMetadata metadata = transaction.getMetadata();
ConnectorSession session = newSession();
ConnectorTableHandle table = getTableHandle(metadata, tableS3);
List<ColumnHandle> columnHandles = ImmutableList.copyOf(metadata.getColumnHandles(session, table).values());
Map<String, Integer> columnIndex = indexColumns(columnHandles);
List<ConnectorTableLayoutResult> tableLayoutResults = metadata.getTableLayouts(session, table, new Constraint<>(TupleDomain.all(), bindings -> true), Optional.empty());
HiveTableLayoutHandle layoutHandle = (HiveTableLayoutHandle) getOnlyElement(tableLayoutResults).getTableLayout().getHandle();
assertEquals(layoutHandle.getPartitions().get().size(), 1);
ConnectorSplitSource splitSource = splitManager.getSplits(transaction.getTransactionHandle(), session, layoutHandle);
long sum = 0;
for (ConnectorSplit split : getAllSplits(splitSource)) {
try (ConnectorPageSource pageSource = pageSourceProvider.createPageSource(transaction.getTransactionHandle(), session, split, columnHandles)) {
MaterializedResult result = materializeSourceDataStream(session, pageSource, getTypes(columnHandles));
for (MaterializedRow row : result) {
sum += (Long) row.getField(columnIndex.get("t_bigint"));
}
}
}
assertEquals(sum, 78300);
}
}
@Test
public void testGetFileStatus()
throws Exception
{
Path basePath = new Path("s3://presto-test-hive/");
Path tablePath = new Path(basePath, "presto_test_s3");
Path filePath = new Path(tablePath, "test1.csv");
FileSystem fs = hdfsEnvironment.getFileSystem("user", basePath);
assertTrue(isDirectory(fs.getFileStatus(basePath)));
assertTrue(isDirectory(fs.getFileStatus(tablePath)));
assertFalse(isDirectory(fs.getFileStatus(filePath)));
assertFalse(fs.exists(new Path(basePath, "foo")));
}
@Test
public void testRename()
throws Exception
{
Path basePath = new Path(format("s3://%s/rename/%s/", writableBucket, UUID.randomUUID()));
FileSystem fs = hdfsEnvironment.getFileSystem("user", basePath);
assertFalse(fs.exists(basePath));
// create file foo.txt
Path path = new Path(basePath, "foo.txt");
assertTrue(fs.createNewFile(path));
assertTrue(fs.exists(path));
// rename foo.txt to bar.txt
Path newPath = new Path(basePath, "bar.txt");
assertFalse(fs.exists(newPath));
assertTrue(fs.rename(path, newPath));
assertFalse(fs.exists(path));
assertTrue(fs.exists(newPath));
// create file foo.txt and rename to bar.txt
assertTrue(fs.createNewFile(path));
assertFalse(fs.rename(path, newPath));
assertTrue(fs.exists(path));
// rename foo.txt to foo.txt
assertTrue(fs.rename(path, path));
assertTrue(fs.exists(path));
// delete foo.txt
assertTrue(fs.delete(path, false));
assertFalse(fs.exists(path));
// create directory source with file
Path source = new Path(basePath, "source");
assertTrue(fs.createNewFile(new Path(source, "test.txt")));
// rename source to non-existing target
Path target = new Path(basePath, "target");
assertFalse(fs.exists(target));
assertTrue(fs.rename(source, target));
assertFalse(fs.exists(source));
assertTrue(fs.exists(target));
// create directory source with file
assertTrue(fs.createNewFile(new Path(source, "test.txt")));
// rename source to existing target
assertTrue(fs.rename(source, target));
assertFalse(fs.exists(source));
target = new Path(target, "source");
assertTrue(fs.exists(target));
assertTrue(fs.exists(new Path(target, "test.txt")));
// delete target
target = new Path(basePath, "target");
assertTrue(fs.exists(target));
assertTrue(fs.delete(target, true));
assertFalse(fs.exists(target));
// cleanup
fs.delete(basePath, true);
}
@Test
public void testTableCreation()
throws Exception
{
for (HiveStorageFormat storageFormat : HiveStorageFormat.values()) {
try {
doCreateTable(temporaryCreateTable, storageFormat);
}
finally {
dropTable(temporaryCreateTable);
}
}
}
private void doCreateTable(SchemaTableName tableName, HiveStorageFormat storageFormat)
throws Exception
{
List<ColumnMetadata> columns = ImmutableList.<ColumnMetadata>builder()
.add(new ColumnMetadata("id", BIGINT))
.build();
MaterializedResult data = MaterializedResult.resultBuilder(newSession(), BIGINT)
.row(1L)
.row(3L)
.row(2L)
.build();
try (Transaction transaction = newTransaction()) {
ConnectorMetadata metadata = transaction.getMetadata();
ConnectorSession session = newSession();
// begin creating the table
ConnectorTableMetadata tableMetadata = new ConnectorTableMetadata(tableName, columns, createTableProperties(storageFormat));
ConnectorOutputTableHandle outputHandle = metadata.beginCreateTable(session, tableMetadata, Optional.empty());
// write the records
ConnectorPageSink sink = pageSinkProvider.createPageSink(transaction.getTransactionHandle(), session, outputHandle);
sink.appendPage(data.toPage());
Collection<Slice> fragments = getFutureValue(sink.finish());
// commit the table
metadata.finishCreateTable(session, outputHandle, fragments);
transaction.commit();
// Hack to work around the metastore not being configured for S3.
// The metastore tries to validate the location when creating the
// table, which fails without explicit configuration for S3.
// We work around that by using a dummy location when creating the
// table and update it here to the correct S3 location.
metastoreClient.updateTableLocation(
database,
tableName.getTableName(),
locationService.writePathRoot(((HiveOutputTableHandle) outputHandle).getLocationHandle()).get().toString());
}
try (Transaction transaction = newTransaction()) {
ConnectorMetadata metadata = transaction.getMetadata();
ConnectorSession session = newSession();
// load the new table
ConnectorTableHandle tableHandle = getTableHandle(metadata, tableName);
List<ColumnHandle> columnHandles = filterNonHiddenColumnHandles(metadata.getColumnHandles(session, tableHandle).values());
// verify the metadata
ConnectorTableMetadata tableMetadata = metadata.getTableMetadata(session, getTableHandle(metadata, tableName));
assertEquals(filterNonHiddenColumnMetadata(tableMetadata.getColumns()), columns);
// verify the data
List<ConnectorTableLayoutResult> tableLayoutResults = metadata.getTableLayouts(session, tableHandle, new Constraint<>(TupleDomain.all(), bindings -> true), Optional.empty());
HiveTableLayoutHandle layoutHandle = (HiveTableLayoutHandle) getOnlyElement(tableLayoutResults).getTableLayout().getHandle();
assertEquals(layoutHandle.getPartitions().get().size(), 1);
ConnectorSplitSource splitSource = splitManager.getSplits(transaction.getTransactionHandle(), session, layoutHandle);
ConnectorSplit split = getOnlyElement(getAllSplits(splitSource));
try (ConnectorPageSource pageSource = pageSourceProvider.createPageSource(transaction.getTransactionHandle(), session, split, columnHandles)) {
MaterializedResult result = materializeSourceDataStream(session, pageSource, getTypes(columnHandles));
assertEqualsIgnoreOrder(result.getMaterializedRows(), data.getMaterializedRows());
}
}
}
private void dropTable(SchemaTableName table)
{
try (Transaction transaction = newTransaction()) {
transaction.getMetastore(table.getSchemaName()).dropTable(newSession(), table.getSchemaName(), table.getTableName());
transaction.commit();
}
catch (RuntimeException e) {
// this usually occurs because the table was not created
}
}
private ConnectorTableHandle getTableHandle(ConnectorMetadata metadata, SchemaTableName tableName)
{
ConnectorTableHandle handle = metadata.getTableHandle(newSession(), tableName);
checkArgument(handle != null, "table not found: %s", tableName);
return handle;
}
private static ImmutableMap<String, Integer> indexColumns(List<ColumnHandle> columnHandles)
{
ImmutableMap.Builder<String, Integer> index = ImmutableMap.builder();
int i = 0;
for (ColumnHandle columnHandle : columnHandles) {
HiveColumnHandle hiveColumnHandle = (HiveColumnHandle) columnHandle;
index.put(hiveColumnHandle.getName(), i);
i++;
}
return index.build();
}
private static class TestingHiveMetastore
extends CachingHiveMetastore
{
private final String writableBucket;
private final HdfsEnvironment hdfsEnvironment;
public TestingHiveMetastore(ExtendedHiveMetastore delegate, ExecutorService executor, HiveClientConfig hiveClientConfig, String writableBucket, HdfsEnvironment hdfsEnvironment)
{
super(delegate, executor, hiveClientConfig);
this.writableBucket = writableBucket;
this.hdfsEnvironment = hdfsEnvironment;
}
@Override
public Optional<Database> getDatabase(String databaseName)
{
return super.getDatabase(databaseName)
.map(database -> Database.builder(database)
.setLocation(Optional.of("s3://" + writableBucket + "/"))
.build());
}
@Override
public void createTable(Table table, PrincipalPrivileges privileges)
{
// hack to work around the metastore not being configured for S3
Table.Builder tableBuilder = Table.builder(table);
tableBuilder.getStorageBuilder().setLocation("/");
super.createTable(tableBuilder.build(), privileges);
}
@Override
public void dropTable(String databaseName, String tableName, boolean deleteData)
{
try {
Optional<Table> table = getTable(databaseName, tableName);
if (!table.isPresent()) {
throw new TableNotFoundException(new SchemaTableName(databaseName, tableName));
}
// hack to work around the metastore not being configured for S3
List<String> locations = listAllDataPaths(databaseName, tableName);
Table.Builder tableBuilder = Table.builder(table.get());
tableBuilder.getStorageBuilder().setLocation("/");
// drop table
replaceTable(databaseName, tableName, tableBuilder.build(), new PrincipalPrivileges(ImmutableMultimap.of(), ImmutableMultimap.of()));
delegate.dropTable(databaseName, tableName, false);
// drop data
if (deleteData) {
for (String location : locations) {
Path path = new Path(location);
hdfsEnvironment.getFileSystem("user", path).delete(path, true);
}
}
}
catch (Exception e) {
throw Throwables.propagate(e);
}
finally {
invalidateTable(databaseName, tableName);
}
}
public void updateTableLocation(String databaseName, String tableName, String location)
{
Optional<Table> table = getTable(databaseName, tableName);
if (!table.isPresent()) {
throw new TableNotFoundException(new SchemaTableName(databaseName, tableName));
}
Table.Builder tableBuilder = Table.builder(table.get());
tableBuilder.getStorageBuilder().setLocation(location);
// NOTE: this clears the permissions
replaceTable(databaseName, tableName, tableBuilder.build(), new PrincipalPrivileges(ImmutableMultimap.of(), ImmutableMultimap.of()));
}
private List<String> listAllDataPaths(String schemaName, String tableName)
{
ImmutableList.Builder<String> locations = ImmutableList.builder();
Table table = getTable(schemaName, tableName).get();
if (table.getStorage().getLocation() != null) {
// For partitioned table, there should be nothing directly under this directory.
// But including this location in the set makes the directory content assert more
// extensive, which is desirable.
locations.add(table.getStorage().getLocation());
}
Optional<List<String>> partitionNames = getPartitionNames(schemaName, tableName);
if (partitionNames.isPresent()) {
getPartitionsByNames(schemaName, tableName, partitionNames.get()).values().stream()
.map(Optional::get)
.map(partition -> partition.getStorage().getLocation())
.filter(location -> !location.startsWith(table.getStorage().getLocation()))
.forEach(locations::add);
}
return locations.build();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package groovy.lang;
import groovy.test.GroovyTestCase;
import org.codehaus.groovy.runtime.InvokerHelper;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
public class MetaClassTest extends GroovyTestCase {
public void testMetaClass() {
Class foo = String[].class;
System.out.println(foo + " name: " + foo.getName());
MetaClass metaClass = InvokerHelper.getMetaClass(this);
assertTrue("got metaclass", metaClass != null);
metaClass.invokeMethod(this, "doSomething", new Object[0]);
}
public void testArray() {
String[] value = new String[]{"hello"};
MetaClass metaClass = InvokerHelper.getMetaClass(value);
assertTrue("got metaclass", metaClass != null);
metaClass.invokeMethod(value, "toString", new Object[0]);
}
public void testString() {
String value = "hello";
MetaClass metaClass = InvokerHelper.getMetaClass(value);
assertTrue("got metaclass", metaClass != null);
Object answer = metaClass.invokeMethod(value, "toString", new Object[0]);
assertEquals("hello", answer);
}
public void testObject() {
Object value = new Object();
MetaClass metaClass = InvokerHelper.getMetaClass(value);
assertTrue("got metaclass", metaClass != null);
metaClass.invokeMethod(value, "toString", new Object[0]);
}
public void testPublicField() {
DymmyClass dymmyClass = new DymmyClass();
MetaClass metaClass = InvokerHelper.getMetaClass(dymmyClass);
assertEquals(metaClass.getProperty(dymmyClass, "x"), Integer.valueOf(0));
assertEquals(metaClass.getProperty(dymmyClass, "y"), "none");
metaClass.setProperty(dymmyClass, "x", Integer.valueOf(25));
assertEquals(dymmyClass.x, 25);
metaClass.setProperty(dymmyClass, "y", "newvalue");
assertEquals(dymmyClass.y, "newvalue");
}
public void testSetPropertyWithInt() {
DymmyClass dymmyClass = new DymmyClass();
MetaClass metaClass = InvokerHelper.getMetaClass(dymmyClass);
metaClass.setProperty(dymmyClass, "anInt", Integer.valueOf(10));
}
public void testSetPropertyWithDoubleArray() {
DymmyClass dymmyClass = new DymmyClass();
MetaClass metaClass = InvokerHelper.getMetaClass(dymmyClass);
Double[][] matrix2 =
{
{
Double.valueOf(35), Double.valueOf(50), Double.valueOf(120)
},
{
Double.valueOf(75), Double.valueOf(80), Double.valueOf(150)
}
};
metaClass.setProperty(dymmyClass, "matrix", matrix2);
metaClass.setProperty(dymmyClass, "matrix2", matrix2);
}
public void testSetPropertyWithArray() {
DymmyClass dymmyClass = new DymmyClass();
MetaClass metaClass = InvokerHelper.getMetaClass(dymmyClass);
// test int[]
int[] ints = new int[]{
0, 1, 2, 3
};
metaClass.setProperty(dymmyClass, "ints", ints);
assertEquals(ints, metaClass.getProperty(dymmyClass, "ints"));
// test Integer[]
Integer[] integers = new Integer[]{
Integer.valueOf(0), Integer.valueOf(1), Integer.valueOf(2), Integer.valueOf(3)
};
metaClass.setProperty(dymmyClass, "integers", integers);
assertEquals(integers, metaClass.getProperty(dymmyClass, "integers"));
}
public void testSetPropertyWithList() {
DymmyClass dymmyClass = new DymmyClass();
MetaClass metaClass = InvokerHelper.getMetaClass(dymmyClass);
// test list
ArrayList list = new ArrayList();
list.add(Integer.valueOf(120));
list.add(Integer.valueOf(150));
// test int[]
metaClass.setProperty(dymmyClass, "ints", list);
// test Integer[]
metaClass.setProperty(dymmyClass, "integers", list);
}
public void testMetaMethodsOnlyAddedOnce() {
MetaClass metaClass = InvokerHelper.getMetaClass("some String");
List methods = metaClass.getMetaMethods();
for (Iterator iter = methods.iterator(); iter.hasNext();) {
MetaMethod method = (MetaMethod) iter.next();
int count = 0;
for (Iterator inner = methods.iterator(); inner.hasNext();) {
MetaMethod runner = (MetaMethod) inner.next();
if (method.equals(runner)) {
System.out.println("runner = " + runner);
System.out.println("method = " + method);
count++;
}
}
assertEquals("count of Method " + method.getName(), 1, count);
}
}
public void doSomething() {
System.out.println("Called doSomething()");
}
}
class DymmyClass {
public int x = 0;
public String y = "none";
private int anInt;
private int[] ints;
private Integer[] integers;
double[][] matrix2;
Double[][] matrix;
public Integer[] getIntegers() {
return integers;
}
public void setIntegers(Integer[] integers) {
this.integers = integers;
}
public int[] getInts() {
return ints;
}
public void setInts(int[] ints) {
this.ints = ints;
}
public int getAnInt() {
return anInt;
}
public void setAnInt(int anInt) {
this.anInt = anInt;
}
public void setMatrix(Double[][] matrix) {
this.matrix = matrix;
}
public void setMatrix2(double[][] matrixReloaded) {
this.matrix2 = matrixReloaded;
}
}
| |
/**
* Container for a Warc Record of type "response"
*
* (C) 2009 - Carnegie Mellon University
*
* 1. Redistributions of this source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. The names "Lemur", "Indri", "University of Massachusetts",
* "Carnegie Mellon", and "lemurproject" must not be used to
* endorse or promote products derived from this software without
* prior written permission. To obtain permission, contact
* license@lemurproject.org.
*
* 4. Products derived from this software may not be called "Lemur" or "Indri"
* nor may "Lemur" or "Indri" appear in their names without prior written
* permission of The Lemur Project. To obtain permission,
* contact license@lemurproject.org.
*
* THIS SOFTWARE IS PROVIDED BY THE LEMUR PROJECT AS PART OF THE CLUEWEB09
* PROJECT AND OTHER CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN
* NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
* IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* @author mhoy@cs.cmu.edu (Mark J. Hoy)
*/
package edu.cmu.lemurproject;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URISyntaxException;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Vector;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class WarcHTMLResponseRecord {
private WarcRecord warcRecord=new WarcRecord();
private static String SINGLE_SPACE=" ";
private static Pattern ALL_HTML_TAGS=Pattern.compile("<(.*?)>");
private static Pattern A_HREF_PATTERN=Pattern.compile("[aA].+?[hH][rR][eE][fF]=['\"](.+?)['\"].*?");
private static Pattern AREA_HREF_PATTERN=Pattern.compile("[aA][rR][eE][aA].+?[hH][rR][eE][fF]=['\"](.*?)['\"].*?");
private static Pattern FRAME_SRC_PATTERN=Pattern.compile("[fF][rR][aA][mM][eE].+?[sS][rR][cC]=['\"](.*?)['\"].*?");
private static Pattern IFRAME_SRC_PATTERN=Pattern.compile("[iI][fF][rR][aA][mM][eE].+?[sS][rR][cC]=['\"](.*?)['\"].*?");
private static Pattern HTTP_START_PATTERN=Pattern.compile("^[hH][tT][tT][pP][sS]?://.*");
// create our pattern set
private Vector<Pattern> patternSet=new Vector<Pattern>();
/**
* Default constructor
*/
public WarcHTMLResponseRecord() {
createPatternSet();
}
/**
* Copy constructor
* @param o
*/
public WarcHTMLResponseRecord(WarcHTMLResponseRecord o) {
this.warcRecord.set(o.warcRecord);
createPatternSet();
}
/**
* Constructor creation from a generic WARC record
* @param o
*/
public WarcHTMLResponseRecord(WarcRecord o) {
if (o.getHeaderRecordType().compareToIgnoreCase("response")==0) {
this.warcRecord.set(o);
}
createPatternSet();
}
private void createPatternSet() {
patternSet.add(A_HREF_PATTERN);
patternSet.add(AREA_HREF_PATTERN);
patternSet.add(FRAME_SRC_PATTERN);
patternSet.add(IFRAME_SRC_PATTERN);
}
public void setRecord(WarcRecord o) {
if (o.getHeaderRecordType().compareToIgnoreCase("response")==0) {
this.warcRecord.set(o);
}
}
public WarcRecord getRawRecord() {
return warcRecord;
}
public String getTargetURI() {
return warcRecord.getHeaderMetadataItem("WARC-Target-URI");
}
public String getTargetTrecID() {
return warcRecord.getHeaderMetadataItem("WARC-TREC-ID");
}
private String getNormalizedContentURL(String pageURL, String contentURL) {
String fixedContentURL = contentURL;
try {
// resolve any potentially relative paths to the full URL based on the page
java.net.URI baseURI = new java.net.URI(pageURL);
// ensure that the content doesn't have query parameters - if so, strip them
int contentParamIndex = contentURL.indexOf("?");
if (contentParamIndex > 0) {
fixedContentURL = contentURL.substring(0, contentParamIndex);
}
java.net.URI resolvedURI = baseURI.resolve(fixedContentURL);
return resolvedURI.toString();
} catch (URISyntaxException ex) {
} catch (IllegalArgumentException iaEx) {
return fixedContentURL;
} catch (Exception gEx) {
}
return "";
}
private HashSet<String> getMatchesOutputSet(Vector<String> tagSet, String baseURL) {
HashSet<String> retSet=new HashSet<String>();
Iterator<String> vIter=tagSet.iterator();
while (vIter.hasNext()) {
String thisCheckPiece=vIter.next();
Iterator<Pattern> pIter=patternSet.iterator();
boolean hasAdded=false;
while (!hasAdded && pIter.hasNext()) {
Pattern thisPattern=pIter.next();
Matcher matcher=thisPattern.matcher(thisCheckPiece);
if (matcher.find() && (matcher.groupCount() > 0)) {
String thisMatch=getNormalizedContentURL(baseURL, matcher.group(1));
if (HTTP_START_PATTERN.matcher(thisMatch).matches()) {
if (!retSet.contains(thisMatch) && !baseURL.equals(thisMatch)) {
retSet.add(thisMatch);
hasAdded=true;
} // end if (!retSet.contains(thisMatch))
} // end if (HTTP_START_PATTERN.matcher(thisMatch).matches())
} // end if (matcher.find() && (matcher.groupCount() > 0))
matcher.reset();
} // end while (!hasAdded && pIter.hasNext())
} // end while (vIter.hasNext())
return retSet;
}
/**
* Gets a vector of normalized URLs (normalized to this target URI)
* of the outlinks of the page
* @return
*/
public Vector<String> getURLOutlinks() {
Vector<String> retVec = new Vector<String>();
String baseURL = getTargetURI();
if ((baseURL == null) || (baseURL.length() == 0)) {
return retVec;
}
byte[] contentBytes=warcRecord.getContent();
ByteArrayInputStream contentStream=new ByteArrayInputStream(contentBytes);
BufferedReader inReader=new BufferedReader(new InputStreamReader(contentStream));
// forward to the first \n\n
try {
boolean inHeader=true;
String line=null;
while (inHeader && ((line=inReader.readLine())!=null)) {
if (line.trim().length()==0) {
inHeader=false;
}
}
// now we have the rest of the lines
// read them all into a string buffer
// to remove all new lines
Vector<String> htmlTags=new Vector<String>();
while ((line=inReader.readLine())!=null) {
// get all HTML tags from the line...
Matcher HTMLMatcher=ALL_HTML_TAGS.matcher(line);
while (HTMLMatcher.find()) {
htmlTags.add(HTMLMatcher.group(1));
}
}
HashSet<String> retSet=getMatchesOutputSet(htmlTags, baseURL);
Iterator<String> oIter=retSet.iterator();
while (oIter.hasNext()) {
String thisValue=oIter.next();
if (!thisValue.equals(baseURL)) {
retVec.add(thisValue);
}
}
} catch (IOException ioEx) {
retVec.clear();
}
return retVec;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.search.facet;
import static org.apache.solr.search.facet.FacetRequest.RefineMethod.NONE;
import com.google.common.collect.Sets;
import java.io.IOException;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import org.apache.lucene.search.Query;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.search.DocSet;
import org.apache.solr.search.JoinQParserPlugin;
import org.apache.solr.search.QueryContext;
import org.apache.solr.search.SyntaxError;
import org.apache.solr.search.WrappedQuery;
import org.apache.solr.search.join.GraphQuery;
import org.apache.solr.search.join.GraphQueryParser;
import org.apache.solr.util.RTimer;
/**
* A request to do facets/stats that might itself be composed of sub-FacetRequests. This is a
* cornerstone of the facet module.
*
* @see #parse(SolrQueryRequest, Map)
*/
public abstract class FacetRequest {
/** Simple structure for encapsulating a sort variable and a direction */
public static final class FacetSort {
final String sortVariable;
final SortDirection sortDirection;
public FacetSort(final String sortVariable, final SortDirection sortDirection) {
assert null != sortVariable;
assert null != sortDirection;
this.sortVariable = sortVariable;
this.sortDirection = sortDirection;
}
public boolean equals(Object other) {
if (other instanceof FacetSort) {
final FacetSort that = (FacetSort) other;
return this.sortVariable.equals(that.sortVariable)
&& this.sortDirection.equals(that.sortDirection);
}
return false;
}
public int hashCode() {
return Objects.hash(sortVariable, sortDirection);
}
public String toString() {
return sortVariable + " " + sortDirection;
}
/** Commonly Re-used "count desc" (default) */
public static final FacetSort COUNT_DESC = new FacetSort("count", SortDirection.desc);
/** Commonly Re-used "index asc" (index order / streaming) */
public static final FacetSort INDEX_ASC = new FacetSort("index", SortDirection.asc);
}
public static enum SortDirection {
asc(-1),
desc(1);
private final int multiplier;
private SortDirection(int multiplier) {
this.multiplier = multiplier;
}
public static SortDirection fromObj(Object direction) {
if (direction == null) {
// should we just default either to desc/asc??
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Missing Sort direction");
}
switch (direction.toString()) {
case "asc":
return asc;
case "desc":
return desc;
default:
throw new SolrException(
SolrException.ErrorCode.BAD_REQUEST, "Unknown Sort direction '" + direction + "'");
}
}
// asc==-1, desc==1
public int getMultiplier() {
return multiplier;
}
}
public static enum RefineMethod {
NONE,
SIMPLE;
// NONE is distinct from null since we may want to know if refinement was explicitly turned off.
public static FacetRequest.RefineMethod fromObj(Object method) {
if (method == null) return null;
if (method instanceof Boolean) {
return ((Boolean) method) ? SIMPLE : NONE;
}
if ("simple".equals(method)) {
return SIMPLE;
} else if ("none".equals(method)) {
return NONE;
} else {
throw new SolrException(
SolrException.ErrorCode.BAD_REQUEST, "Unknown RefineMethod method " + method);
}
}
}
protected Map<String, AggValueSource> facetStats; // per-bucket statistics
protected Map<String, FacetRequest> subFacets; // per-bucket sub-facets
protected boolean processEmpty;
protected Domain domain;
// domain changes
public static class Domain {
/**
* An explicit query domain, <em>ignoring all parent context</em>, expressed in JSON query
* format. Mutually exclusive to {@link #excludeTags}
*/
public List<Object> explicitQueries; // list of symbolic filters (JSON query format)
/**
* Specifies query/filter tags that should be excluded to re-compute the domain from the parent
* context. Mutually exclusive to {@link #explicitQueries}
*/
public List<String> excludeTags;
public JoinField joinField;
public GraphField graphField;
public boolean toParent;
public boolean toChildren;
// identifies the parent filter... the full set of parent documents for any block join operation
public String parents;
public List<Object> filters; // list of symbolic filters (JSON query format)
// True if a starting set of documents can be mapped onto a different set of documents not
// originally in the starting set.
public boolean canTransformDomain() {
return toParent
|| toChildren
|| (explicitQueries != null)
|| (excludeTags != null)
|| (joinField != null);
}
// Can this domain become non-empty if the input domain is empty? This does not check any
// sub-facets (see canProduceFromEmpty for that)
public boolean canBecomeNonEmpty() {
return (explicitQueries != null) || (excludeTags != null);
}
/** Are we doing a query time join across other documents */
public static class JoinField {
private static final String FROM_PARAM = "from";
private static final String TO_PARAM = "to";
private static final String METHOD_PARAM = "method";
private static final Set<String> SUPPORTED_JOIN_PROPERTIES =
Sets.newHashSet(FROM_PARAM, TO_PARAM, METHOD_PARAM);
public final String from;
public final String to;
public final String method;
private JoinField(String from, String to, String method) {
assert null != from;
assert null != to;
assert null != method;
this.from = from;
this.to = to;
this.method = method;
}
/**
* Given a <code>Domain</code>, and a (JSON) map specifying the configuration for that Domain,
* validates if a '<code>join</code>' is specified, and if so creates a <code>JoinField</code>
* and sets it on the <code>Domain</code>.
*
* <p>(params must not be null)
*/
public static void createJoinField(
FacetRequest.Domain domain, Map<String, Object> domainMap) {
assert null != domain;
assert null != domainMap;
final Object queryJoin = domainMap.get("join");
if (null != queryJoin) {
// TODO: maybe allow simple string (instead of map) to mean "self join on this field name"
if (!(queryJoin instanceof Map)) {
throw new SolrException(
SolrException.ErrorCode.BAD_REQUEST,
"'join' domain change requires a map containing the 'from' and 'to' fields");
}
@SuppressWarnings({"unchecked"})
final Map<String, String> join = (Map<String, String>) queryJoin;
if (!(join.containsKey(FROM_PARAM)
&& join.containsKey(TO_PARAM)
&& null != join.get(FROM_PARAM)
&& null != join.get(TO_PARAM))) {
throw new SolrException(
SolrException.ErrorCode.BAD_REQUEST,
"'join' domain change requires non-null 'from' and 'to' field names");
}
for (String providedKey : join.keySet()) {
if (!SUPPORTED_JOIN_PROPERTIES.contains(providedKey)) {
final String supportedPropsStr = String.join(", ", SUPPORTED_JOIN_PROPERTIES);
final String message =
String.format(
Locale.ROOT,
"'join' domain change contains unexpected key [%s], only %s supported",
providedKey,
supportedPropsStr);
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, message);
}
}
final String method = join.containsKey(METHOD_PARAM) ? join.get(METHOD_PARAM) : "index";
domain.joinField = new JoinField(join.get(FROM_PARAM), join.get(TO_PARAM), method);
}
}
/**
* Creates a Query that can be used to recompute the new "base" for this domain, relative to
* the current base of the FacetContext.
*/
public Query createDomainQuery(FacetContext fcontext) {
// NOTE: this code lives here, instead of in FacetProcessor.handleJoin, in order to minimize
// the number of classes that have to know about the number of possible settings on the join
// (ie: if we add a score mode, or some other modifier to how the joins are done)
final Query fromQuery = fcontext.base.makeQuery();
WrappedQuery wrappedFromQuery = new WrappedQuery(fromQuery);
// this shouldn't matter once we're wrapped in a join query, but just in case it ever
// does...
wrappedFromQuery.setCache(false);
return JoinQParserPlugin.createJoinQuery(wrappedFromQuery, this.from, this.to, this.method);
}
}
/** Are we doing a query time graph across other documents */
public static class GraphField {
public final SolrParams localParams;
private GraphField(SolrParams localParams) {
assert null != localParams;
this.localParams = localParams;
}
/**
* Given a <code>Domain</code>, and a (JSON) map specifying the configuration for that Domain,
* validates if a '<code>graph</code>' is specified, and if so creates a <code>GraphField
* </code> and sets it on the <code>Domain</code>.
*
* <p>(params must not be null)
*/
public static void createGraphField(
FacetRequest.Domain domain, Map<String, Object> domainMap) {
assert null != domain;
assert null != domainMap;
final Object queryGraph = domainMap.get("graph");
if (null != queryGraph) {
if (!(queryGraph instanceof Map)) {
throw new SolrException(
SolrException.ErrorCode.BAD_REQUEST,
"'graph' domain change requires a map containing the 'from' and 'to' fields");
}
@SuppressWarnings({"unchecked"})
final Map<String, String> graph = (Map<String, String>) queryGraph;
if (!(graph.containsKey("from")
&& graph.containsKey("to")
&& null != graph.get("from")
&& null != graph.get("to"))) {
throw new SolrException(
SolrException.ErrorCode.BAD_REQUEST,
"'graph' domain change requires non-null 'from' and 'to' field names");
}
domain.graphField = new GraphField(FacetParser.jsonToSolrParams(graph));
}
}
/**
* Creates a Query that can be used to recompute the new "base" for this domain, relative to
* the current base of the FacetContext.
*/
public Query createDomainQuery(FacetContext fcontext) {
final Query fromQuery = fcontext.base.makeQuery();
WrappedQuery wrappedFromQuery = new WrappedQuery(fromQuery);
// this shouldn't matter once we're wrapped in a join query, but just in case it ever
// does...
wrappedFromQuery.setCache(false);
GraphQueryParser graphParser = new GraphQueryParser(null, localParams, null, fcontext.req);
try {
GraphQuery graphQuery = (GraphQuery) graphParser.parse();
graphQuery.setQ(wrappedFromQuery);
return graphQuery;
} catch (SyntaxError syntaxError) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, syntaxError);
}
}
}
}
/**
* Factory method to parse a facet request tree. The outer keys are arbitrary labels and their
* values are facet request specifications. Will throw a {@link SolrException} if it fails to
* parse.
*
* @param req the overall request
* @param params a typed parameter structure (unlike SolrParams which are all string values).
*/
public static FacetRequest parse(SolrQueryRequest req, Map<String, Object> params) {
FacetParser<?> parser = new FacetParser.FacetTopParser(req);
try {
return parser.parse(params);
} catch (SyntaxError syntaxError) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, syntaxError);
}
}
// TODO it would be nice if there was no distinction. If the top level request had "type" as
// special then there wouldn't be a need.
/**
* Factory method to parse out a rooted facet request tree that would normally go one level below
* a label. The params must contain a "type". This is intended to be useful externally, such as by
* {@link org.apache.solr.request.SimpleFacets}.
*
* @param req the overall request
* @param params a typed parameter structure (unlike SolrParams which are all string values).
*/
public static FacetRequest parseOneFacetReq(SolrQueryRequest req, Map<String, Object> params) {
@SuppressWarnings("rawtypes")
FacetParser parser = new FacetParser.FacetTopParser(req);
try {
return (FacetRequest) parser.parseFacetOrStat("", params);
} catch (SyntaxError syntaxError) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, syntaxError);
}
}
public FacetRequest() {
facetStats = new LinkedHashMap<>();
subFacets = new LinkedHashMap<>();
}
public Map<String, AggValueSource> getFacetStats() {
return facetStats;
}
public Map<String, FacetRequest> getSubFacets() {
return subFacets;
}
/** Returns null if unset */
public RefineMethod getRefineMethod() {
return null;
}
public boolean doRefine() {
return !(getRefineMethod() == null || getRefineMethod() == NONE);
}
/**
* Returns true if this facet can return just some of the facet buckets that match all the
* criteria. This is normally true only for facets with a limit.
*/
public boolean returnsPartial() {
// TODO: should the default impl check processEmpty ?
return false;
}
/** Returns true if this facet, or any sub-facets can produce results from an empty domain. */
public boolean canProduceFromEmpty() {
if (domain != null && domain.canBecomeNonEmpty()) return true;
for (FacetRequest freq : subFacets.values()) {
if (freq.canProduceFromEmpty()) return true;
}
return false;
}
public void addStat(String key, AggValueSource stat) {
facetStats.put(key, stat);
}
public void addSubFacet(String key, FacetRequest facetRequest) {
subFacets.put(key, facetRequest);
}
@Override
public String toString() {
Map<String, Object> descr = getFacetDescription();
StringBuilder s = new StringBuilder("facet request: { ");
for (Map.Entry<String, Object> entry : descr.entrySet()) {
s.append(entry.getKey()).append(':').append(entry.getValue()).append(',');
}
s.append('}');
return s.toString();
}
/**
* Process this facet request against the given domain of docs. Note: this is currently used
* externally by {@link org.apache.solr.request.SimpleFacets}.
*/
public final Object process(SolrQueryRequest req, DocSet domain) throws IOException {
// TODO check for FacetDebugInfo? and if so set on fcontext
// rb.req.getContext().get("FacetDebugInfo");
// TODO should the SolrQueryRequest be held on the FacetRequest? It was created from
// parse(req,...) so is known.
FacetContext fcontext = new FacetContext();
fcontext.base = domain;
fcontext.req = req;
fcontext.searcher = req.getSearcher();
fcontext.qcontext = QueryContext.newContext(fcontext.searcher);
return process(fcontext);
}
/** Process the request with the facet context settings, a parameter-object. */
final Object process(FacetContext fcontext) throws IOException {
FacetProcessor<?> facetProcessor = createFacetProcessor(fcontext);
FacetDebugInfo debugInfo = fcontext.getDebugInfo();
if (debugInfo == null) {
facetProcessor.process();
} else {
if (fcontext.filter != null) {
debugInfo.setFilter(fcontext.filter.toString());
}
debugInfo.setReqDescription(getFacetDescription());
debugInfo.setProcessor(facetProcessor.getClass().getSimpleName());
debugInfo.putInfoItem("domainSize", (long) fcontext.base.size());
RTimer timer = new RTimer();
try {
facetProcessor.process();
} finally {
debugInfo.setElapse((long) timer.getTime());
}
}
return facetProcessor.getResponse();
}
public abstract FacetProcessor<? extends FacetRequest> createFacetProcessor(
FacetContext fcontext);
public abstract FacetMerger createFacetMerger(Object prototype);
public abstract Map<String, Object> getFacetDescription();
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.opengl;
import java.nio.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.Checks.*;
import static org.lwjgl.system.JNI.*;
import static org.lwjgl.system.MemoryStack.*;
import static org.lwjgl.system.MemoryUtil.*;
/**
* Native bindings to the <a target="_blank" href="https://www.khronos.org/registry/OpenGL/extensions/NV/NV_gpu_shader5.txt">NV_gpu_shader5</a> extension.
*
* <p>This extension provides a set of new features to the OpenGL Shading Language and related APIs to support capabilities of new GPUs. Shaders using the
* new functionality provided by this extension should enable this functionality via the construct</p>
*
* <pre><code>
* \#extension GL_NV_gpu_shader5 : require (or enable)</code></pre>
*
* <p>This extension was developed concurrently with the ARB_gpu_shader5 extension, and provides a superset of the features provided there. The features
* common to both extensions are documented in the ARB_gpu_shader5 specification; this document describes only the addition language features not
* available via ARB_gpu_shader5. A shader that enables this extension via an \#extension directive also implicitly enables the common capabilities
* provided by ARB_gpu_shader5.</p>
*
* <p>In addition to the capabilities of ARB_gpu_shader5, this extension provides a variety of new features for all shader types, including:</p>
*
* <ul>
* <li>support for a full set of 8-, 16-, 32-, and 64-bit scalar and vector data types, including uniform API, uniform buffer object, and shader input and
* output support;</li>
* <li>the ability to aggregate samplers into arrays, index these arrays with arbitrary expressions, and not require that non-constant indices be uniform
* across all shader invocations;</li>
* <li>new built-in functions to pack and unpack 64-bit integer types into a two-component 32-bit integer vector;</li>
* <li>new built-in functions to pack and unpack 32-bit unsigned integer types into a two-component 16-bit floating-point vector;</li>
* <li>new built-in functions to convert double-precision floating-point values to or from their 64-bit integer bit encodings;</li>
* <li>new built-in functions to compute the composite of a set of boolean conditions a group of shader threads;</li>
* <li>vector relational functions supporting comparisons of vectors of 8-, 16-, and 64-bit integer types or 16-bit floating-point types; and</li>
* <li>extending texel offset support to allow loading texel offsets from regular integer operands computed at run-time, except for lookups with gradients
* (textureGrad*).</li>
* </ul>
*
* <p>This extension also provides additional support for processing patch primitives (introduced by ARB_tessellation_shader). ARB_tessellation_shader
* requires the use of a tessellation evaluation shader when processing patches, which means that patches will never survive past the tessellation
* pipeline stage. This extension lifts that restriction, and allows patches to proceed further in the pipeline and be used</p>
*
* <ul>
* <li>as input to a geometry shader, using a new "patches" layout qualifier;</li>
* <li>as input to transform feedback;</li>
* <li>by fixed-function rasterization stages, in which case the patches are drawn as independent points.</li>
* </ul>
*
* <p>Additionally, it allows geometry shaders to read per-patch attributes written by a tessellation control shader using input variables declared with
* "patch in".</p>
*
* <p>Requires {@link GL32 OpenGL 3.2}, GLSL 1.50 and {@link ARBGPUShader5 ARB_gpu_shader5}.</p>
*/
public class NVGPUShader5 {
static { GL.initialize(); }
/** Returned by the {@code type} parameter of GetActiveAttrib, GetActiveUniform, and GetTransformFeedbackVarying. */
public static final int
GL_INT64_NV = 0x140E,
GL_UNSIGNED_INT64_NV = 0x140F,
GL_INT8_NV = 0x8FE0,
GL_INT8_VEC2_NV = 0x8FE1,
GL_INT8_VEC3_NV = 0x8FE2,
GL_INT8_VEC4_NV = 0x8FE3,
GL_INT16_NV = 0x8FE4,
GL_INT16_VEC2_NV = 0x8FE5,
GL_INT16_VEC3_NV = 0x8FE6,
GL_INT16_VEC4_NV = 0x8FE7,
GL_INT64_VEC2_NV = 0x8FE9,
GL_INT64_VEC3_NV = 0x8FEA,
GL_INT64_VEC4_NV = 0x8FEB,
GL_UNSIGNED_INT8_NV = 0x8FEC,
GL_UNSIGNED_INT8_VEC2_NV = 0x8FED,
GL_UNSIGNED_INT8_VEC3_NV = 0x8FEE,
GL_UNSIGNED_INT8_VEC4_NV = 0x8FEF,
GL_UNSIGNED_INT16_NV = 0x8FF0,
GL_UNSIGNED_INT16_VEC2_NV = 0x8FF1,
GL_UNSIGNED_INT16_VEC3_NV = 0x8FF2,
GL_UNSIGNED_INT16_VEC4_NV = 0x8FF3,
GL_UNSIGNED_INT64_VEC2_NV = 0x8FF5,
GL_UNSIGNED_INT64_VEC3_NV = 0x8FF6,
GL_UNSIGNED_INT64_VEC4_NV = 0x8FF7,
GL_FLOAT16_NV = 0x8FF8,
GL_FLOAT16_VEC2_NV = 0x8FF9,
GL_FLOAT16_VEC3_NV = 0x8FFA,
GL_FLOAT16_VEC4_NV = 0x8FFB;
protected NVGPUShader5() {
throw new UnsupportedOperationException();
}
// --- [ glUniform1i64NV ] ---
public static native void glUniform1i64NV(@NativeType("GLint") int location, @NativeType("GLint64EXT") long x);
// --- [ glUniform2i64NV ] ---
public static native void glUniform2i64NV(@NativeType("GLint") int location, @NativeType("GLint64EXT") long x, @NativeType("GLint64EXT") long y);
// --- [ glUniform3i64NV ] ---
public static native void glUniform3i64NV(@NativeType("GLint") int location, @NativeType("GLint64EXT") long x, @NativeType("GLint64EXT") long y, @NativeType("GLint64EXT") long z);
// --- [ glUniform4i64NV ] ---
public static native void glUniform4i64NV(@NativeType("GLint") int location, @NativeType("GLint64EXT") long x, @NativeType("GLint64EXT") long y, @NativeType("GLint64EXT") long z, @NativeType("GLint64EXT") long w);
// --- [ glUniform1i64vNV ] ---
public static native void nglUniform1i64vNV(int location, int count, long value);
public static void glUniform1i64vNV(@NativeType("GLint") int location, @NativeType("GLint64EXT const *") LongBuffer value) {
nglUniform1i64vNV(location, value.remaining(), memAddress(value));
}
// --- [ glUniform2i64vNV ] ---
public static native void nglUniform2i64vNV(int location, int count, long value);
public static void glUniform2i64vNV(@NativeType("GLint") int location, @NativeType("GLint64EXT const *") LongBuffer value) {
nglUniform2i64vNV(location, value.remaining() >> 1, memAddress(value));
}
// --- [ glUniform3i64vNV ] ---
public static native void nglUniform3i64vNV(int location, int count, long value);
public static void glUniform3i64vNV(@NativeType("GLint") int location, @NativeType("GLint64EXT const *") LongBuffer value) {
nglUniform3i64vNV(location, value.remaining() / 3, memAddress(value));
}
// --- [ glUniform4i64vNV ] ---
public static native void nglUniform4i64vNV(int location, int count, long value);
public static void glUniform4i64vNV(@NativeType("GLint") int location, @NativeType("GLint64EXT const *") LongBuffer value) {
nglUniform4i64vNV(location, value.remaining() >> 2, memAddress(value));
}
// --- [ glUniform1ui64NV ] ---
public static native void glUniform1ui64NV(@NativeType("GLint") int location, @NativeType("GLuint64EXT") long x);
// --- [ glUniform2ui64NV ] ---
public static native void glUniform2ui64NV(@NativeType("GLint") int location, @NativeType("GLuint64EXT") long x, @NativeType("GLuint64EXT") long y);
// --- [ glUniform3ui64NV ] ---
public static native void glUniform3ui64NV(@NativeType("GLint") int location, @NativeType("GLuint64EXT") long x, @NativeType("GLuint64EXT") long y, @NativeType("GLuint64EXT") long z);
// --- [ glUniform4ui64NV ] ---
public static native void glUniform4ui64NV(@NativeType("GLint") int location, @NativeType("GLuint64EXT") long x, @NativeType("GLuint64EXT") long y, @NativeType("GLuint64EXT") long z, @NativeType("GLuint64EXT") long w);
// --- [ glUniform1ui64vNV ] ---
public static native void nglUniform1ui64vNV(int location, int count, long value);
public static void glUniform1ui64vNV(@NativeType("GLint") int location, @NativeType("GLuint64EXT const *") LongBuffer value) {
nglUniform1ui64vNV(location, value.remaining(), memAddress(value));
}
// --- [ glUniform2ui64vNV ] ---
public static native void nglUniform2ui64vNV(int location, int count, long value);
public static void glUniform2ui64vNV(@NativeType("GLint") int location, @NativeType("GLuint64EXT *") LongBuffer value) {
nglUniform2ui64vNV(location, value.remaining() >> 1, memAddress(value));
}
// --- [ glUniform3ui64vNV ] ---
public static native void nglUniform3ui64vNV(int location, int count, long value);
public static void glUniform3ui64vNV(@NativeType("GLint") int location, @NativeType("GLuint64EXT const *") LongBuffer value) {
nglUniform3ui64vNV(location, value.remaining() / 3, memAddress(value));
}
// --- [ glUniform4ui64vNV ] ---
public static native void nglUniform4ui64vNV(int location, int count, long value);
public static void glUniform4ui64vNV(@NativeType("GLint") int location, @NativeType("GLuint64EXT const *") LongBuffer value) {
nglUniform4ui64vNV(location, value.remaining() >> 2, memAddress(value));
}
// --- [ glGetUniformi64vNV ] ---
public static native void nglGetUniformi64vNV(int program, int location, long params);
public static void glGetUniformi64vNV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLint64EXT *") LongBuffer params) {
if (CHECKS) {
check(params, 1);
}
nglGetUniformi64vNV(program, location, memAddress(params));
}
@NativeType("void")
public static long glGetUniformi64NV(@NativeType("GLuint") int program, @NativeType("GLint") int location) {
MemoryStack stack = stackGet(); int stackPointer = stack.getPointer();
try {
LongBuffer params = stack.callocLong(1);
nglGetUniformi64vNV(program, location, memAddress(params));
return params.get(0);
} finally {
stack.setPointer(stackPointer);
}
}
// --- [ glGetUniformui64vNV ] ---
public static void nglGetUniformui64vNV(int program, int location, long params) {
NVShaderBufferLoad.nglGetUniformui64vNV(program, location, params);
}
public static void glGetUniformui64vNV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLuint64EXT *") LongBuffer params) {
NVShaderBufferLoad.glGetUniformui64vNV(program, location, params);
}
@NativeType("void")
public static long glGetUniformui64NV(@NativeType("GLuint") int program, @NativeType("GLint") int location) {
return NVShaderBufferLoad.glGetUniformui64NV(program, location);
}
// --- [ glProgramUniform1i64NV ] ---
public static native void glProgramUniform1i64NV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLint64EXT") long x);
// --- [ glProgramUniform2i64NV ] ---
public static native void glProgramUniform2i64NV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLint64EXT") long x, @NativeType("GLint64EXT") long y);
// --- [ glProgramUniform3i64NV ] ---
public static native void glProgramUniform3i64NV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLint64EXT") long x, @NativeType("GLint64EXT") long y, @NativeType("GLint64EXT") long z);
// --- [ glProgramUniform4i64NV ] ---
public static native void glProgramUniform4i64NV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLint64EXT") long x, @NativeType("GLint64EXT") long y, @NativeType("GLint64EXT") long z, @NativeType("GLint64EXT") long w);
// --- [ glProgramUniform1i64vNV ] ---
public static native void nglProgramUniform1i64vNV(int program, int location, int count, long value);
public static void glProgramUniform1i64vNV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLint64EXT const *") LongBuffer value) {
nglProgramUniform1i64vNV(program, location, value.remaining(), memAddress(value));
}
// --- [ glProgramUniform2i64vNV ] ---
public static native void nglProgramUniform2i64vNV(int program, int location, int count, long value);
public static void glProgramUniform2i64vNV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLint64EXT const *") LongBuffer value) {
nglProgramUniform2i64vNV(program, location, value.remaining() >> 1, memAddress(value));
}
// --- [ glProgramUniform3i64vNV ] ---
public static native void nglProgramUniform3i64vNV(int program, int location, int count, long value);
public static void glProgramUniform3i64vNV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLint64EXT const *") LongBuffer value) {
nglProgramUniform3i64vNV(program, location, value.remaining() / 3, memAddress(value));
}
// --- [ glProgramUniform4i64vNV ] ---
public static native void nglProgramUniform4i64vNV(int program, int location, int count, long value);
public static void glProgramUniform4i64vNV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLint64EXT const *") LongBuffer value) {
nglProgramUniform4i64vNV(program, location, value.remaining() >> 2, memAddress(value));
}
// --- [ glProgramUniform1ui64NV ] ---
public static native void glProgramUniform1ui64NV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLuint64EXT") long x);
// --- [ glProgramUniform2ui64NV ] ---
public static native void glProgramUniform2ui64NV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLuint64EXT") long x, @NativeType("GLuint64EXT") long y);
// --- [ glProgramUniform3ui64NV ] ---
public static native void glProgramUniform3ui64NV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLuint64EXT") long x, @NativeType("GLuint64EXT") long y, @NativeType("GLuint64EXT") long z);
// --- [ glProgramUniform4ui64NV ] ---
public static native void glProgramUniform4ui64NV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLuint64EXT") long x, @NativeType("GLuint64EXT") long y, @NativeType("GLuint64EXT") long z, @NativeType("GLuint64EXT") long w);
// --- [ glProgramUniform1ui64vNV ] ---
public static native void nglProgramUniform1ui64vNV(int program, int location, int count, long value);
public static void glProgramUniform1ui64vNV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLuint64EXT const *") LongBuffer value) {
nglProgramUniform1ui64vNV(program, location, value.remaining(), memAddress(value));
}
// --- [ glProgramUniform2ui64vNV ] ---
public static native void nglProgramUniform2ui64vNV(int program, int location, int count, long value);
public static void glProgramUniform2ui64vNV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLuint64EXT const *") LongBuffer value) {
nglProgramUniform2ui64vNV(program, location, value.remaining() >> 1, memAddress(value));
}
// --- [ glProgramUniform3ui64vNV ] ---
public static native void nglProgramUniform3ui64vNV(int program, int location, int count, long value);
public static void glProgramUniform3ui64vNV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLuint64EXT const *") LongBuffer value) {
nglProgramUniform3ui64vNV(program, location, value.remaining() / 3, memAddress(value));
}
// --- [ glProgramUniform4ui64vNV ] ---
public static native void nglProgramUniform4ui64vNV(int program, int location, int count, long value);
public static void glProgramUniform4ui64vNV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLuint64EXT const *") LongBuffer value) {
nglProgramUniform4ui64vNV(program, location, value.remaining() >> 2, memAddress(value));
}
/** Array version of: {@link #glUniform1i64vNV Uniform1i64vNV} */
public static void glUniform1i64vNV(@NativeType("GLint") int location, @NativeType("GLint64EXT const *") long[] value) {
long __functionAddress = GL.getICD().glUniform1i64vNV;
if (CHECKS) {
check(__functionAddress);
}
callPV(location, value.length, value, __functionAddress);
}
/** Array version of: {@link #glUniform2i64vNV Uniform2i64vNV} */
public static void glUniform2i64vNV(@NativeType("GLint") int location, @NativeType("GLint64EXT const *") long[] value) {
long __functionAddress = GL.getICD().glUniform2i64vNV;
if (CHECKS) {
check(__functionAddress);
}
callPV(location, value.length >> 1, value, __functionAddress);
}
/** Array version of: {@link #glUniform3i64vNV Uniform3i64vNV} */
public static void glUniform3i64vNV(@NativeType("GLint") int location, @NativeType("GLint64EXT const *") long[] value) {
long __functionAddress = GL.getICD().glUniform3i64vNV;
if (CHECKS) {
check(__functionAddress);
}
callPV(location, value.length / 3, value, __functionAddress);
}
/** Array version of: {@link #glUniform4i64vNV Uniform4i64vNV} */
public static void glUniform4i64vNV(@NativeType("GLint") int location, @NativeType("GLint64EXT const *") long[] value) {
long __functionAddress = GL.getICD().glUniform4i64vNV;
if (CHECKS) {
check(__functionAddress);
}
callPV(location, value.length >> 2, value, __functionAddress);
}
/** Array version of: {@link #glUniform1ui64vNV Uniform1ui64vNV} */
public static void glUniform1ui64vNV(@NativeType("GLint") int location, @NativeType("GLuint64EXT const *") long[] value) {
long __functionAddress = GL.getICD().glUniform1ui64vNV;
if (CHECKS) {
check(__functionAddress);
}
callPV(location, value.length, value, __functionAddress);
}
/** Array version of: {@link #glUniform2ui64vNV Uniform2ui64vNV} */
public static void glUniform2ui64vNV(@NativeType("GLint") int location, @NativeType("GLuint64EXT *") long[] value) {
long __functionAddress = GL.getICD().glUniform2ui64vNV;
if (CHECKS) {
check(__functionAddress);
}
callPV(location, value.length >> 1, value, __functionAddress);
}
/** Array version of: {@link #glUniform3ui64vNV Uniform3ui64vNV} */
public static void glUniform3ui64vNV(@NativeType("GLint") int location, @NativeType("GLuint64EXT const *") long[] value) {
long __functionAddress = GL.getICD().glUniform3ui64vNV;
if (CHECKS) {
check(__functionAddress);
}
callPV(location, value.length / 3, value, __functionAddress);
}
/** Array version of: {@link #glUniform4ui64vNV Uniform4ui64vNV} */
public static void glUniform4ui64vNV(@NativeType("GLint") int location, @NativeType("GLuint64EXT const *") long[] value) {
long __functionAddress = GL.getICD().glUniform4ui64vNV;
if (CHECKS) {
check(__functionAddress);
}
callPV(location, value.length >> 2, value, __functionAddress);
}
/** Array version of: {@link #glGetUniformi64vNV GetUniformi64vNV} */
public static void glGetUniformi64vNV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLint64EXT *") long[] params) {
long __functionAddress = GL.getICD().glGetUniformi64vNV;
if (CHECKS) {
check(__functionAddress);
check(params, 1);
}
callPV(program, location, params, __functionAddress);
}
/** Array version of: {@link #glGetUniformui64vNV GetUniformui64vNV} */
public static void glGetUniformui64vNV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLuint64EXT *") long[] params) {
NVShaderBufferLoad.glGetUniformui64vNV(program, location, params);
}
/** Array version of: {@link #glProgramUniform1i64vNV ProgramUniform1i64vNV} */
public static void glProgramUniform1i64vNV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLint64EXT const *") long[] value) {
long __functionAddress = GL.getICD().glProgramUniform1i64vNV;
if (CHECKS) {
check(__functionAddress);
}
callPV(program, location, value.length, value, __functionAddress);
}
/** Array version of: {@link #glProgramUniform2i64vNV ProgramUniform2i64vNV} */
public static void glProgramUniform2i64vNV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLint64EXT const *") long[] value) {
long __functionAddress = GL.getICD().glProgramUniform2i64vNV;
if (CHECKS) {
check(__functionAddress);
}
callPV(program, location, value.length >> 1, value, __functionAddress);
}
/** Array version of: {@link #glProgramUniform3i64vNV ProgramUniform3i64vNV} */
public static void glProgramUniform3i64vNV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLint64EXT const *") long[] value) {
long __functionAddress = GL.getICD().glProgramUniform3i64vNV;
if (CHECKS) {
check(__functionAddress);
}
callPV(program, location, value.length / 3, value, __functionAddress);
}
/** Array version of: {@link #glProgramUniform4i64vNV ProgramUniform4i64vNV} */
public static void glProgramUniform4i64vNV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLint64EXT const *") long[] value) {
long __functionAddress = GL.getICD().glProgramUniform4i64vNV;
if (CHECKS) {
check(__functionAddress);
}
callPV(program, location, value.length >> 2, value, __functionAddress);
}
/** Array version of: {@link #glProgramUniform1ui64vNV ProgramUniform1ui64vNV} */
public static void glProgramUniform1ui64vNV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLuint64EXT const *") long[] value) {
long __functionAddress = GL.getICD().glProgramUniform1ui64vNV;
if (CHECKS) {
check(__functionAddress);
}
callPV(program, location, value.length, value, __functionAddress);
}
/** Array version of: {@link #glProgramUniform2ui64vNV ProgramUniform2ui64vNV} */
public static void glProgramUniform2ui64vNV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLuint64EXT const *") long[] value) {
long __functionAddress = GL.getICD().glProgramUniform2ui64vNV;
if (CHECKS) {
check(__functionAddress);
}
callPV(program, location, value.length >> 1, value, __functionAddress);
}
/** Array version of: {@link #glProgramUniform3ui64vNV ProgramUniform3ui64vNV} */
public static void glProgramUniform3ui64vNV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLuint64EXT const *") long[] value) {
long __functionAddress = GL.getICD().glProgramUniform3ui64vNV;
if (CHECKS) {
check(__functionAddress);
}
callPV(program, location, value.length / 3, value, __functionAddress);
}
/** Array version of: {@link #glProgramUniform4ui64vNV ProgramUniform4ui64vNV} */
public static void glProgramUniform4ui64vNV(@NativeType("GLuint") int program, @NativeType("GLint") int location, @NativeType("GLuint64EXT const *") long[] value) {
long __functionAddress = GL.getICD().glProgramUniform4ui64vNV;
if (CHECKS) {
check(__functionAddress);
}
callPV(program, location, value.length >> 2, value, __functionAddress);
}
}
| |
/*
* Copyright 2005 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.compiler.rule.builder.dialect.mvel;
import org.antlr.runtime.RecognitionException;
import org.drools.compiler.compiler.BoundIdentifiers;
import org.drools.compiler.compiler.DescrBuildError;
import org.drools.compiler.lang.descr.BaseDescr;
import org.drools.compiler.rule.builder.PackageBuildContext;
import org.drools.compiler.rule.builder.RuleBuildContext;
import org.drools.compiler.rule.builder.dialect.DialectUtil;
import org.drools.core.base.EvaluatorWrapper;
import org.drools.core.rule.MVELDialectRuntimeData;
import org.kie.api.definition.rule.Rule;
import org.mvel2.MVEL;
import org.mvel2.ParserConfiguration;
import org.mvel2.ParserContext;
import org.mvel2.optimizers.OptimizerFactory;
import org.mvel2.util.PropertyTools;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
/**
* Expression analyzer.
*/
public class MVELExprAnalyzer {
static {
// always use mvel reflective optimizer
OptimizerFactory.setDefaultOptimizer(OptimizerFactory.SAFE_REFLECTIVE);
}
public MVELExprAnalyzer() {
// intentionally left blank.
}
// ------------------------------------------------------------
// Instance methods
// ------------------------------------------------------------
/**
* Analyze an expression.
*
* @param expr
* The expression to analyze.
* @param availableIdentifiers
* Total set of declarations available.
*
* @return The <code>Set</code> of declarations used by the expression.
* @throws RecognitionException
* If an error occurs in the parser.
*/
@SuppressWarnings("unchecked")
public MVELAnalysisResult analyzeExpression(final PackageBuildContext context,
final String expr,
final BoundIdentifiers availableIdentifiers,
final Map<String, Class< ? >> localTypes,
String contextIndeifier,
Class kcontextClass) {
if ( expr.trim().length() <= 0 ) {
MVELAnalysisResult result = analyze( (Set<String>) Collections.EMPTY_SET, availableIdentifiers );
result.setMvelVariables( new HashMap<String, Class< ? >>() );
result.setTypesafe( true );
return result;
}
MVEL.COMPILER_OPT_ALLOW_NAKED_METH_CALL = true;
MVEL.COMPILER_OPT_ALLOW_OVERRIDE_ALL_PROPHANDLING = true;
MVEL.COMPILER_OPT_ALLOW_RESOLVE_INNERCLASSES_WITH_DOTNOTATION = true;
MVEL.COMPILER_OPT_SUPPORT_JAVA_STYLE_CLASS_LITERALS = true;
MVELDialect dialect = (MVELDialect) context.getDialect( "mvel" );
MVELDialectRuntimeData data = ( MVELDialectRuntimeData) context.getPkg().getDialectRuntimeRegistry().getDialectData( "mvel" );
ParserConfiguration conf = data.getParserConfiguration();
conf.setClassLoader( context.getKnowledgeBuilder().getRootClassLoader() );
// first compilation is for verification only
// @todo proper source file name
final ParserContext parserContext1 = new ParserContext( conf );
if ( localTypes != null ) {
for ( Entry entry : localTypes.entrySet() ) {
parserContext1.addInput( (String) entry.getKey(),
(Class) entry.getValue() );
}
}
if ( availableIdentifiers.getThisClass() != null ) {
parserContext1.addInput( "this",
availableIdentifiers.getThisClass() );
}
if ( availableIdentifiers.getOperators() != null ) {
for ( Entry<String, EvaluatorWrapper> opEntry : availableIdentifiers.getOperators().entrySet() ) {
parserContext1.addInput( opEntry.getKey(), opEntry.getValue().getClass() );
}
}
parserContext1.setStrictTypeEnforcement( false );
parserContext1.setStrongTyping( false );
parserContext1.setInterceptors( dialect.getInterceptors() );
Class< ? > returnType;
try {
returnType = MVEL.analyze( expr,
parserContext1 );
} catch ( Exception e ) {
BaseDescr base = (context instanceof RuleBuildContext) ? ((RuleBuildContext)context).getRuleDescr() : context.getParentDescr();
DialectUtil.copyErrorLocation(e, context.getParentDescr());
context.addError( new DescrBuildError( base,
context.getParentDescr(),
null,
"Unable to Analyse Expression " + expr + ":\n" + e.getMessage() ) );
return null;
}
Set<String> requiredInputs = new HashSet<String>();
requiredInputs.addAll( parserContext1.getInputs().keySet() );
HashMap<String, Class< ? >> variables = (HashMap<String, Class< ? >>) ((Map) parserContext1.getVariables());
if ( localTypes != null ) {
for ( String str : localTypes.keySet() ) {
// we have to do this due to mvel regressions on detecting true local vars
variables.remove( str );
}
}
// MVEL includes direct fields of context object in non-strict mode. so we need to strip those
if ( availableIdentifiers.getThisClass() != null ) {
for ( Iterator<String> it = requiredInputs.iterator(); it.hasNext(); ) {
if ( PropertyTools.getFieldOrAccessor( availableIdentifiers.getThisClass(),
it.next() ) != null ) {
it.remove();
}
}
}
// now, set the required input types and compile again
final ParserContext parserContext2 = new ParserContext( conf );
parserContext2.setStrictTypeEnforcement( true );
parserContext2.setStrongTyping( true );
parserContext2.setInterceptors( dialect.getInterceptors() );
for ( String str : requiredInputs ) {
Class< ? > cls = availableIdentifiers.getDeclrClasses().get( str );
if ( cls != null ) {
parserContext2.addInput( str, cls );
continue;
}
cls = availableIdentifiers.getGlobals().get( str );
if ( cls != null ) {
parserContext2.addInput( str, cls );
continue;
}
cls = availableIdentifiers.getOperators().keySet().contains( str ) ?
context.getConfiguration().getComponentFactory().getExpressionProcessor().getEvaluatorWrapperClass() :
null;
if ( cls != null ) {
parserContext2.addInput( str, cls );
continue;
}
if ( str.equals( contextIndeifier ) ) {
parserContext2.addInput( contextIndeifier, kcontextClass );
} else if ( str.equals( "kcontext" ) ) {
parserContext2.addInput( "kcontext", kcontextClass );
} else if ( str.equals( "rule" ) ) {
parserContext2.addInput( "rule", Rule.class );
}
if ( localTypes != null ) {
cls = localTypes.get( str );
if ( cls != null ) {
parserContext2.addInput( str, cls );
}
}
}
if ( availableIdentifiers.getThisClass() != null ) {
parserContext2.addInput( "this", availableIdentifiers.getThisClass() );
}
boolean typesafe = context.isTypesafe();
try {
returnType = MVEL.analyze( expr,
parserContext2 );
typesafe = true;
} catch ( Exception e ) {
// is this an error, or can we fall back to non-typesafe mode?
if ( typesafe ) {
BaseDescr base = (context instanceof RuleBuildContext) ? ((RuleBuildContext)context).getRuleDescr() : context.getParentDescr();
DialectUtil.copyErrorLocation(e, context.getParentDescr());
context.addError( new DescrBuildError( base,
context.getParentDescr(),
null,
"Unable to Analyse Expression " + expr + ":\n" + e.getMessage() ) );
return null;
}
}
if ( typesafe ) {
requiredInputs = new HashSet<String>();
requiredInputs.addAll( parserContext2.getInputs().keySet() );
requiredInputs.addAll( variables.keySet() );
variables = (HashMap<String, Class< ? >>) ((Map) parserContext2.getVariables());
if ( localTypes != null ) {
for ( String str : localTypes.keySet() ) {
// we have to do this due to mvel regressions on detecting true local vars
variables.remove( str );
}
}
}
MVELAnalysisResult result = analyze( requiredInputs, availableIdentifiers );
result.setReturnType( returnType );
result.setMvelVariables( variables );
result.setTypesafe( typesafe );
return result;
}
/**
* Analyse an expression.
* @throws RecognitionException
* If an error occurs in the parser.
*/
private MVELAnalysisResult analyze(final Set<String> identifiers,
final BoundIdentifiers availableIdentifiers) {
MVELAnalysisResult result = new MVELAnalysisResult();
result.setIdentifiers( identifiers );
final Set<String> notBound = new HashSet<String>( identifiers );
notBound.remove( "this" );
Map<String, Class< ? >> usedDecls = new HashMap<String, Class< ? >>();
Map<String, Class< ? >> usedGlobals = new HashMap<String, Class< ? >>();
Map<String, EvaluatorWrapper> usedOperators = new HashMap<String, EvaluatorWrapper>();
for ( Entry<String, Class< ? >> entry : availableIdentifiers.getDeclrClasses().entrySet() ) {
if ( identifiers.contains( entry.getKey() ) ) {
usedDecls.put( entry.getKey(),
entry.getValue() );
notBound.remove( entry.getKey() );
}
}
for ( Entry<String, Class< ? >> entry : availableIdentifiers.getGlobals().entrySet() ) {
if ( identifiers.contains( entry.getKey() ) ) {
usedGlobals.put( entry.getKey(),
entry.getValue() );
notBound.remove( entry.getKey() );
}
}
for ( Map.Entry<String, EvaluatorWrapper> op : availableIdentifiers.getOperators().entrySet() ) {
if ( identifiers.contains( op.getKey() ) ) {
usedOperators.put( op.getKey(),
op.getValue() );
notBound.remove( op.getKey() );
}
}
result.setBoundIdentifiers( new BoundIdentifiers( usedDecls,
usedGlobals,
usedOperators,
availableIdentifiers.getThisClass() ) );
result.setNotBoundedIdentifiers( notBound );
return result;
}
}
| |
/*
* Copyright 2012-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.loader.tools;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.TreeMap;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
import java.util.jar.Attributes;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import java.util.stream.Collectors;
import org.apache.commons.compress.archivers.jar.JarArchiveEntry;
import org.springframework.boot.loader.tools.AbstractJarWriter.EntryTransformer;
import org.springframework.boot.loader.tools.AbstractJarWriter.UnpackHandler;
import org.springframework.core.io.support.SpringFactoriesLoader;
import org.springframework.util.Assert;
import org.springframework.util.StringUtils;
/**
* Abstract base class for packagers.
*
* @author Phillip Webb
* @author Andy Wilkinson
* @author Stephane Nicoll
* @author Madhura Bhave
* @author Scott Frederick
* @since 2.3.0
*/
public abstract class Packager {
private static final String MAIN_CLASS_ATTRIBUTE = "Main-Class";
private static final String START_CLASS_ATTRIBUTE = "Start-Class";
private static final String BOOT_VERSION_ATTRIBUTE = "Spring-Boot-Version";
private static final String BOOT_CLASSES_ATTRIBUTE = "Spring-Boot-Classes";
private static final String BOOT_LIB_ATTRIBUTE = "Spring-Boot-Lib";
private static final String BOOT_CLASSPATH_INDEX_ATTRIBUTE = "Spring-Boot-Classpath-Index";
private static final String BOOT_LAYERS_INDEX_ATTRIBUTE = "Spring-Boot-Layers-Index";
private static final byte[] ZIP_FILE_HEADER = new byte[] { 'P', 'K', 3, 4 };
private static final long FIND_WARNING_TIMEOUT = TimeUnit.SECONDS.toMillis(10);
private static final String SPRING_BOOT_APPLICATION_CLASS_NAME = "org.springframework.boot.autoconfigure.SpringBootApplication";
private final List<MainClassTimeoutWarningListener> mainClassTimeoutListeners = new ArrayList<>();
private String mainClass;
private final File source;
private File backupFile;
private Layout layout;
private LayoutFactory layoutFactory;
private Layers layers;
private LayersIndex layersIndex;
private boolean includeRelevantJarModeJars = true;
/**
* Create a new {@link Packager} instance.
* @param source the source archive file to package
*/
protected Packager(File source) {
this(source, null);
}
/**
* Create a new {@link Packager} instance.
* @param source the source archive file to package
* @param layoutFactory the layout factory to use or {@code null}
* @deprecated since 2.3.10 for removal in 2.5 in favor of {@link #Packager(File)} and
* {@link #setLayoutFactory(LayoutFactory)}
*/
@Deprecated
protected Packager(File source, LayoutFactory layoutFactory) {
Assert.notNull(source, "Source file must not be null");
Assert.isTrue(source.exists() && source.isFile(),
() -> "Source must refer to an existing file, got " + source.getAbsolutePath());
this.source = source.getAbsoluteFile();
this.layoutFactory = layoutFactory;
}
/**
* Add a listener that will be triggered to display a warning if searching for the
* main class takes too long.
* @param listener the listener to add
*/
public void addMainClassTimeoutWarningListener(MainClassTimeoutWarningListener listener) {
this.mainClassTimeoutListeners.add(listener);
}
/**
* Sets the main class that should be run. If not specified the value from the
* MANIFEST will be used, or if no manifest entry is found the archive will be
* searched for a suitable class.
* @param mainClass the main class name
*/
public void setMainClass(String mainClass) {
this.mainClass = mainClass;
}
/**
* Sets the layout to use for the jar. Defaults to {@link Layouts#forFile(File)}.
* @param layout the layout
*/
public void setLayout(Layout layout) {
Assert.notNull(layout, "Layout must not be null");
this.layout = layout;
}
/**
* Sets the layout factory for the jar. The factory can be used when no specific
* layout is specified.
* @param layoutFactory the layout factory to set
*/
public void setLayoutFactory(LayoutFactory layoutFactory) {
this.layoutFactory = layoutFactory;
}
/**
* Sets the layers that should be used in the jar.
* @param layers the jar layers
*/
public void setLayers(Layers layers) {
Assert.notNull(layers, "Layers must not be null");
this.layers = layers;
this.layersIndex = new LayersIndex(layers);
}
/**
* Sets the {@link File} to use to backup the original source.
* @param backupFile the file to use to backup the original source
*/
protected void setBackupFile(File backupFile) {
this.backupFile = backupFile;
}
/**
* Sets if jarmode jars relevant for the packaging should be automatically included.
* @param includeRelevantJarModeJars if relevant jars are included
*/
public void setIncludeRelevantJarModeJars(boolean includeRelevantJarModeJars) {
this.includeRelevantJarModeJars = includeRelevantJarModeJars;
}
protected final boolean isAlreadyPackaged() {
return isAlreadyPackaged(this.source);
}
protected final boolean isAlreadyPackaged(File file) {
try (JarFile jarFile = new JarFile(file)) {
Manifest manifest = jarFile.getManifest();
return (manifest != null && manifest.getMainAttributes().getValue(BOOT_VERSION_ATTRIBUTE) != null);
}
catch (IOException ex) {
throw new IllegalStateException("Error reading archive file", ex);
}
}
protected final void write(JarFile sourceJar, Libraries libraries, AbstractJarWriter writer) throws IOException {
write(sourceJar, libraries, writer, false);
}
protected final void write(JarFile sourceJar, Libraries libraries, AbstractJarWriter writer,
boolean ensureReproducibleBuild) throws IOException {
Assert.notNull(libraries, "Libraries must not be null");
write(sourceJar, writer, new PackagedLibraries(libraries, ensureReproducibleBuild));
}
private void write(JarFile sourceJar, AbstractJarWriter writer, PackagedLibraries libraries) throws IOException {
if (isLayered()) {
writer.useLayers(this.layers, this.layersIndex);
}
writer.writeManifest(buildManifest(sourceJar));
writeLoaderClasses(writer);
writer.writeEntries(sourceJar, getEntityTransformer(), libraries.getUnpackHandler(),
libraries.getLibraryLookup());
libraries.write(writer);
if (isLayered()) {
writeLayerIndex(writer);
}
}
private void writeLoaderClasses(AbstractJarWriter writer) throws IOException {
Layout layout = getLayout();
if (layout instanceof CustomLoaderLayout) {
((CustomLoaderLayout) getLayout()).writeLoadedClasses(writer);
}
else if (layout.isExecutable()) {
writer.writeLoaderClasses();
}
}
private void writeLayerIndex(AbstractJarWriter writer) throws IOException {
String name = this.layout.getLayersIndexFileLocation();
if (StringUtils.hasLength(name)) {
Layer layer = this.layers.getLayer(name);
this.layersIndex.add(layer, name);
writer.writeEntry(name, this.layersIndex::writeTo);
}
}
private EntryTransformer getEntityTransformer() {
if (getLayout() instanceof RepackagingLayout) {
return new RepackagingEntryTransformer((RepackagingLayout) getLayout());
}
return EntryTransformer.NONE;
}
private boolean isZip(InputStreamSupplier supplier) {
try {
try (InputStream inputStream = supplier.openStream()) {
return isZip(inputStream);
}
}
catch (IOException ex) {
return false;
}
}
private boolean isZip(InputStream inputStream) throws IOException {
for (byte magicByte : ZIP_FILE_HEADER) {
if (inputStream.read() != magicByte) {
return false;
}
}
return true;
}
private Manifest buildManifest(JarFile source) throws IOException {
Manifest manifest = createInitialManifest(source);
addMainAndStartAttributes(source, manifest);
addBootAttributes(manifest.getMainAttributes());
return manifest;
}
private Manifest createInitialManifest(JarFile source) throws IOException {
if (source.getManifest() != null) {
return new Manifest(source.getManifest());
}
Manifest manifest = new Manifest();
manifest.getMainAttributes().putValue("Manifest-Version", "1.0");
return manifest;
}
private void addMainAndStartAttributes(JarFile source, Manifest manifest) throws IOException {
String mainClass = getMainClass(source, manifest);
String launcherClass = getLayout().getLauncherClassName();
if (launcherClass != null) {
Assert.state(mainClass != null, "Unable to find main class");
manifest.getMainAttributes().putValue(MAIN_CLASS_ATTRIBUTE, launcherClass);
manifest.getMainAttributes().putValue(START_CLASS_ATTRIBUTE, mainClass);
}
else if (mainClass != null) {
manifest.getMainAttributes().putValue(MAIN_CLASS_ATTRIBUTE, mainClass);
}
}
private String getMainClass(JarFile source, Manifest manifest) throws IOException {
if (this.mainClass != null) {
return this.mainClass;
}
String attributeValue = manifest.getMainAttributes().getValue(MAIN_CLASS_ATTRIBUTE);
if (attributeValue != null) {
return attributeValue;
}
return findMainMethodWithTimeoutWarning(source);
}
private String findMainMethodWithTimeoutWarning(JarFile source) throws IOException {
long startTime = System.currentTimeMillis();
String mainMethod = findMainMethod(source);
long duration = System.currentTimeMillis() - startTime;
if (duration > FIND_WARNING_TIMEOUT) {
for (MainClassTimeoutWarningListener listener : this.mainClassTimeoutListeners) {
listener.handleTimeoutWarning(duration, mainMethod);
}
}
return mainMethod;
}
protected String findMainMethod(JarFile source) throws IOException {
return MainClassFinder.findSingleMainClass(source, getLayout().getClassesLocation(),
SPRING_BOOT_APPLICATION_CLASS_NAME);
}
/**
* Return the {@link File} to use to backup the original source.
* @return the file to use to backup the original source
*/
public final File getBackupFile() {
if (this.backupFile != null) {
return this.backupFile;
}
return new File(this.source.getParentFile(), this.source.getName() + ".original");
}
protected final File getSource() {
return this.source;
}
protected final Layout getLayout() {
if (this.layout == null) {
Layout createdLayout = getLayoutFactory().getLayout(this.source);
Assert.state(createdLayout != null, "Unable to detect layout");
this.layout = createdLayout;
}
return this.layout;
}
private LayoutFactory getLayoutFactory() {
if (this.layoutFactory != null) {
return this.layoutFactory;
}
List<LayoutFactory> factories = SpringFactoriesLoader.loadFactories(LayoutFactory.class, null);
if (factories.isEmpty()) {
return new DefaultLayoutFactory();
}
Assert.state(factories.size() == 1, "No unique LayoutFactory found");
return factories.get(0);
}
private void addBootAttributes(Attributes attributes) {
attributes.putValue(BOOT_VERSION_ATTRIBUTE, getClass().getPackage().getImplementationVersion());
addBootAttributesForLayout(attributes);
}
private void addBootAttributesForLayout(Attributes attributes) {
Layout layout = getLayout();
if (layout instanceof RepackagingLayout) {
attributes.putValue(BOOT_CLASSES_ATTRIBUTE, ((RepackagingLayout) layout).getRepackagedClassesLocation());
}
else {
attributes.putValue(BOOT_CLASSES_ATTRIBUTE, layout.getClassesLocation());
}
putIfHasLength(attributes, BOOT_LIB_ATTRIBUTE, getLayout().getLibraryLocation("", LibraryScope.COMPILE));
putIfHasLength(attributes, BOOT_CLASSPATH_INDEX_ATTRIBUTE, layout.getClasspathIndexFileLocation());
if (isLayered()) {
putIfHasLength(attributes, BOOT_LAYERS_INDEX_ATTRIBUTE, layout.getLayersIndexFileLocation());
}
}
private void putIfHasLength(Attributes attributes, String name, String value) {
if (StringUtils.hasLength(value)) {
attributes.putValue(name, value);
}
}
private boolean isLayered() {
return this.layers != null;
}
/**
* Callback interface used to present a warning when finding the main class takes too
* long.
*/
@FunctionalInterface
public interface MainClassTimeoutWarningListener {
/**
* Handle a timeout warning.
* @param duration the amount of time it took to find the main method
* @param mainMethod the main method that was actually found
*/
void handleTimeoutWarning(long duration, String mainMethod);
}
/**
* An {@code EntryTransformer} that renames entries by applying a prefix.
*/
private static final class RepackagingEntryTransformer implements EntryTransformer {
private final RepackagingLayout layout;
private RepackagingEntryTransformer(RepackagingLayout layout) {
this.layout = layout;
}
@Override
public JarArchiveEntry transform(JarArchiveEntry entry) {
if (entry.getName().equals("META-INF/INDEX.LIST")) {
return null;
}
if (!isTransformable(entry)) {
return entry;
}
String transformedName = transformName(entry.getName());
JarArchiveEntry transformedEntry = new JarArchiveEntry(transformedName);
transformedEntry.setTime(entry.getTime());
transformedEntry.setSize(entry.getSize());
transformedEntry.setMethod(entry.getMethod());
if (entry.getComment() != null) {
transformedEntry.setComment(entry.getComment());
}
transformedEntry.setCompressedSize(entry.getCompressedSize());
transformedEntry.setCrc(entry.getCrc());
if (entry.getCreationTime() != null) {
transformedEntry.setCreationTime(entry.getCreationTime());
}
if (entry.getExtra() != null) {
transformedEntry.setExtra(entry.getExtra());
}
if (entry.getLastAccessTime() != null) {
transformedEntry.setLastAccessTime(entry.getLastAccessTime());
}
if (entry.getLastModifiedTime() != null) {
transformedEntry.setLastModifiedTime(entry.getLastModifiedTime());
}
return transformedEntry;
}
private String transformName(String name) {
return this.layout.getRepackagedClassesLocation() + name;
}
private boolean isTransformable(JarArchiveEntry entry) {
String name = entry.getName();
if (name.startsWith("META-INF/")) {
return name.equals("META-INF/aop.xml") || name.endsWith(".kotlin_module");
}
return !name.startsWith("BOOT-INF/") && !name.equals("module-info.class");
}
}
/**
* Libraries that should be packaged into the archive.
*/
private final class PackagedLibraries {
private final Map<String, Library> libraries;
private final UnpackHandler unpackHandler;
private final Function<JarEntry, Library> libraryLookup;
PackagedLibraries(Libraries libraries, boolean ensureReproducibleBuild) throws IOException {
this.libraries = (ensureReproducibleBuild) ? new TreeMap<>() : new LinkedHashMap<>();
libraries.doWithLibraries((library) -> {
if (isZip(library::openStream)) {
addLibrary(library);
}
});
if (isLayered() && Packager.this.includeRelevantJarModeJars) {
addLibrary(JarModeLibrary.LAYER_TOOLS);
}
this.unpackHandler = new PackagedLibrariesUnpackHandler();
this.libraryLookup = this::lookup;
}
private void addLibrary(Library library) {
String location = getLayout().getLibraryLocation(library.getName(), library.getScope());
if (location != null) {
String path = location + library.getName();
Library existing = this.libraries.putIfAbsent(path, library);
Assert.state(existing == null, () -> "Duplicate library " + library.getName());
}
}
private Library lookup(JarEntry entry) {
return this.libraries.get(entry.getName());
}
UnpackHandler getUnpackHandler() {
return this.unpackHandler;
}
Function<JarEntry, Library> getLibraryLookup() {
return this.libraryLookup;
}
void write(AbstractJarWriter writer) throws IOException {
List<String> writtenPaths = new ArrayList<>();
for (Entry<String, Library> entry : this.libraries.entrySet()) {
String path = entry.getKey();
Library library = entry.getValue();
if (library.isIncluded()) {
String location = path.substring(0, path.lastIndexOf('/') + 1);
writer.writeNestedLibrary(location, library);
writtenPaths.add(path);
}
}
if (getLayout() instanceof RepackagingLayout) {
writeClasspathIndex(writtenPaths, (RepackagingLayout) getLayout(), writer);
}
}
private void writeClasspathIndex(List<String> paths, RepackagingLayout layout, AbstractJarWriter writer)
throws IOException {
List<String> names = paths.stream().map((path) -> "- \"" + path + "\"").collect(Collectors.toList());
writer.writeIndexFile(layout.getClasspathIndexFileLocation(), names);
}
/**
* An {@link UnpackHandler} that determines that an entry needs to be unpacked if
* a library that requires unpacking has a matching entry name.
*/
private class PackagedLibrariesUnpackHandler implements UnpackHandler {
@Override
public boolean requiresUnpack(String name) {
Library library = PackagedLibraries.this.libraries.get(name);
return library != null && library.isUnpackRequired();
}
@Override
public String sha1Hash(String name) throws IOException {
Library library = PackagedLibraries.this.libraries.get(name);
Assert.notNull(library, () -> "No library found for entry name '" + name + "'");
return Digest.sha1(library::openStream);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.test;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.RandomAccessFile;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.Arrays;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Properties;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.JarOutputStream;
import java.util.zip.ZipEntry;
import javax.tools.JavaCompiler;
import javax.tools.JavaCompiler.CompilationTask;
import javax.tools.JavaFileObject;
import javax.tools.StandardJavaFileManager;
import javax.tools.ToolProvider;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.jobcontrol.JobControl;
import org.apache.hadoop.mapreduce.Job;
import org.apache.pig.ExecType;
import org.apache.pig.FuncSpec;
import org.apache.pig.LoadFunc;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.JobControlCompiler;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceOper;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.plans.MROperPlan;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POLoad;
import org.apache.pig.builtin.PigStorage;
import org.apache.pig.impl.PigContext;
import org.apache.pig.impl.io.FileSpec;
import org.apache.pig.impl.plan.OperatorKey;
import org.junit.Assert;
import org.junit.Test;
public class TestJobControlCompiler {
private static final Configuration CONF = new Configuration();
/**
* specifically tests that REGISTERED jars get added to distributed cache instead of merged into
* the job jar
* @throws Exception
*/
@Test
public void testJarAddedToDistributedCache() throws Exception {
// creating a jar with a UDF *not* in the current classloader
File tmpFile = File.createTempFile("Some_", ".jar");
tmpFile.deleteOnExit();
String className = createTestJar(tmpFile);
final String testUDFFileName = className+".class";
// creating a hadoop-site.xml and making it visible to Pig
// making sure it is at the same location as for other tests to not pick up a
// conf from a previous test
File conf_dir = new File("build/classes");
File hadoopSite = new File(conf_dir, "hadoop-site.xml");
hadoopSite.deleteOnExit();
FileWriter fw = new FileWriter(hadoopSite);
try {
fw.write("<?xml version=\"1.0\"?>\n");
fw.write("<?xml-stylesheet type=\"text/xsl\" href=\"nutch-conf.xsl\"?>\n");
fw.write("<configuration>\n");
fw.write("</configuration>\n");
} finally {
fw.close();
}
// making hadoop-site.xml visible to Pig as it REQUIRES!!! one when running in mapred mode
Thread.currentThread().setContextClassLoader(
new URLClassLoader(new URL[] {conf_dir.toURI().toURL()}));
// JobControlCompiler setup
PigContext pigContext = new PigContext(ExecType.MAPREDUCE, new Properties());
pigContext.connect();
pigContext.addJar(tmpFile.getAbsolutePath());
JobControlCompiler jobControlCompiler = new JobControlCompiler(pigContext, CONF);
MROperPlan plan = new MROperPlan();
MapReduceOper mro = new MapReduceOper(new OperatorKey());
mro.UDFs = new HashSet<String>();
mro.UDFs.add(className+"()");
plan.add(mro);
// compiling the job
JobControl jobControl = jobControlCompiler.compile(plan , "test");
JobConf jobConf = jobControl.getWaitingJobs().get(0).getJobConf();
// verifying the jar gets on distributed cache
Path[] fileClassPaths = DistributedCache.getFileClassPaths(jobConf);
Assert.assertEquals("size 1 for "+Arrays.toString(fileClassPaths), 1, fileClassPaths.length);
Path distributedCachePath = fileClassPaths[0];
Assert.assertEquals("ends with jar name: "+distributedCachePath, distributedCachePath.getName(), tmpFile.getName());
// hadoop bug requires path to not contain hdfs://hotname in front
Assert.assertTrue("starts with /: "+distributedCachePath,
distributedCachePath.toString().startsWith("/"));
Assert.assertTrue("jar pushed to distributed cache should contain testUDF",
jarContainsFileNamed(new File(fileClassPaths[0].toUri().getPath()), testUDFFileName));
// verifying the job jar does not contain the UDF
// jobConf.writeXml(System.out);
File submitJarFile = new File(jobConf.get("mapred.jar"));
Assert.assertFalse("the mapred.jar should *not* contain the testUDF", jarContainsFileNamed(submitJarFile, testUDFFileName));
}
@Test
public void testEstimateNumberOfReducers() throws Exception {
Assert.assertEquals(2, JobControlCompiler.estimateNumberOfReducers(
new Job(CONF), createMockPOLoadMapReduceOper(2L * 1000 * 1000 * 999)));
Assert.assertEquals(2, JobControlCompiler.estimateNumberOfReducers(
new Job(CONF), createMockPOLoadMapReduceOper(2L * 1000 * 1000 * 1000)));
Assert.assertEquals(3, JobControlCompiler.estimateNumberOfReducers(
new Job(CONF), createMockPOLoadMapReduceOper(2L * 1000 * 1000 * 1001)));
}
private static MapReduceOper createMockPOLoadMapReduceOper(long size) throws Exception {
MapReduceOper mro = new MapReduceOper(new OperatorKey());
mro.mapPlan.add(createPOLoadWithSize(size, new PigStorage()));
return mro;
}
public static POLoad createPOLoadWithSize(long size, LoadFunc loadFunc) throws Exception {
File file = File.createTempFile("tempFile", ".tmp");
file.deleteOnExit();
RandomAccessFile f = new RandomAccessFile(file, "rw");
f.setLength(size);
loadFunc.setLocation(file.getAbsolutePath(), new org.apache.hadoop.mapreduce.Job(CONF));
FuncSpec funcSpec = new FuncSpec(loadFunc.getClass().getCanonicalName());
POLoad poLoad = new POLoad(new OperatorKey(), loadFunc);
poLoad.setLFile(new FileSpec(file.getAbsolutePath(), funcSpec));
poLoad.setPc(new PigContext());
poLoad.setUp();
return poLoad;
}
/**
* checks if the given file name is in the jar
* @param jarFile the jar to check
* @param name the name to find (full path in the jar)
* @return true if the name was found
* @throws IOException
*/
private boolean jarContainsFileNamed(File jarFile, String name) throws IOException {
Enumeration<JarEntry> entries = new JarFile(jarFile).entries();
while (entries.hasMoreElements()) {
JarEntry entry = entries.nextElement();
if (entry.getName().equals(name)) {
return true;
}
}
return false;
}
/**
* creates a jar containing a UDF not in the current classloader
* @param jarFile the jar to create
* @return the name of the class created (in the default package)
* @throws IOException
* @throws FileNotFoundException
*/
private String createTestJar(File jarFile) throws IOException, FileNotFoundException {
// creating the source .java file
File javaFile = File.createTempFile("TestUDF", ".java");
javaFile.deleteOnExit();
String className = javaFile.getName().substring(0, javaFile.getName().lastIndexOf('.'));
FileWriter fw = new FileWriter(javaFile);
try {
fw.write("import org.apache.pig.EvalFunc;\n");
fw.write("import org.apache.pig.data.Tuple;\n");
fw.write("import java.io.IOException;\n");
fw.write("public class "+className+" extends EvalFunc<String> {\n");
fw.write(" public String exec(Tuple input) throws IOException {\n");
fw.write(" return \"test\";\n");
fw.write(" }\n");
fw.write("}\n");
} finally {
fw.close();
}
// compiling it
JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
StandardJavaFileManager fileManager = compiler.getStandardFileManager(null, null, null);
Iterable<? extends JavaFileObject> compilationUnits1 = fileManager.getJavaFileObjects(javaFile);
CompilationTask task = compiler.getTask(null, fileManager, null, null, null, compilationUnits1);
task.call();
// here is the compiled file
File classFile = new File(javaFile.getParentFile(), className+".class");
Assert.assertTrue(classFile.exists());
// putting it in the jar
JarOutputStream jos = new JarOutputStream(new FileOutputStream(jarFile));
try {
jos.putNextEntry(new ZipEntry(classFile.getName()));
try {
InputStream testClassContentIS = new FileInputStream(classFile);
try {
byte[] buffer = new byte[64000];
int n;
while ((n = testClassContentIS.read(buffer)) != -1) {
jos.write(buffer, 0, n);
}
} finally {
testClassContentIS.close();
}
}finally {
jos.closeEntry();
}
} finally {
jos.close();
}
return className;
}
}
| |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.client.block.stream;
import alluxio.Constants;
import alluxio.EmbeddedChannels;
import alluxio.client.file.FileSystemContext;
import alluxio.client.file.options.InStreamOptions;
import alluxio.network.protocol.RPCProtoMessage;
import alluxio.network.protocol.databuffer.DataBuffer;
import alluxio.network.protocol.databuffer.DataNettyBufferV2;
import alluxio.proto.dataserver.Protocol;
import alluxio.util.CommonUtils;
import alluxio.util.WaitForOptions;
import alluxio.util.io.BufferUtils;
import alluxio.util.proto.ProtoMessage;
import alluxio.wire.WorkerNetAddress;
import com.google.common.base.Function;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import io.netty.channel.embedded.EmbeddedChannel;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import java.util.Random;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
@RunWith(PowerMockRunner.class)
@PrepareForTest({FileSystemContext.class, WorkerNetAddress.class})
public final class NettyPacketReaderTest {
private static final int PACKET_SIZE = 1024;
private static final ExecutorService EXECUTOR = Executors.newFixedThreadPool(4);
private static final Random RANDOM = new Random();
private static final long BLOCK_ID = 1L;
private FileSystemContext mContext;
private WorkerNetAddress mAddress;
private EmbeddedChannels.EmbeddedEmptyCtorChannel mChannel;
private NettyPacketReader.Factory mFactory;
@Before
public void before() throws Exception {
mContext = PowerMockito.mock(FileSystemContext.class);
mAddress = Mockito.mock(WorkerNetAddress.class);
Protocol.ReadRequest readRequest =
Protocol.ReadRequest.newBuilder().setBlockId(BLOCK_ID).setPacketSize(PACKET_SIZE).build();
mFactory =
new NettyPacketReader.Factory(mContext, mAddress, readRequest, InStreamOptions.defaults());
mChannel = new EmbeddedChannels.EmbeddedEmptyCtorChannel();
PowerMockito.when(mContext.acquireNettyChannel(mAddress)).thenReturn(mChannel);
PowerMockito.doNothing().when(mContext).releaseNettyChannel(mAddress, mChannel);
}
@After
public void after() throws Exception {
mChannel.close();
}
/**
* Reads an empty file.
*/
@Test
public void readEmptyFile() throws Exception {
try (PacketReader reader = create(0, 10)) {
sendReadResponses(mChannel, 0, 0, 0);
Assert.assertEquals(null, reader.readPacket());
}
validateReadRequestSent(mChannel, 0, 10, false, PACKET_SIZE);
}
/**
* Reads all contents in a file.
*/
@Test(timeout = 1000 * 60)
public void readFullFile() throws Exception {
long length = PACKET_SIZE * 1024 + PACKET_SIZE / 3;
try (PacketReader reader = create(0, length)) {
Future<Long> checksum = sendReadResponses(mChannel, length, 0, length - 1);
long checksumActual = checkPackets(reader, 0, length);
Assert.assertEquals(checksum.get().longValue(), checksumActual);
}
validateReadRequestSent(mChannel, 0, length, false, PACKET_SIZE);
}
/**
* Reads part of a file and checks the checksum of the part that is read.
*/
@Test(timeout = 1000 * 60)
public void readPartialFile() throws Exception {
long length = PACKET_SIZE * 1024 + PACKET_SIZE / 3;
long offset = 10;
long checksumStart = 100;
long bytesToRead = length / 3;
try (PacketReader reader = create(offset, length)) {
Future<Long> checksum = sendReadResponses(mChannel, length, checksumStart, bytesToRead - 1);
long checksumActual = checkPackets(reader, checksumStart, bytesToRead);
Assert.assertEquals(checksum.get().longValue(), checksumActual);
}
validateReadRequestSent(mChannel, offset, length, false, PACKET_SIZE);
validateReadRequestSent(mChannel, offset, length, true, PACKET_SIZE);
}
/**
* Reads a file with unknown length.
*/
@Test(timeout = 1000 * 60)
public void fileLengthUnknown() throws Exception {
long lengthActual = PACKET_SIZE * 1024 + PACKET_SIZE / 3;
long checksumStart = 100;
long bytesToRead = lengthActual / 3;
try (PacketReader reader = create(0, Long.MAX_VALUE)) {
Future<Long> checksum =
sendReadResponses(mChannel, lengthActual, checksumStart, bytesToRead - 1);
long checksumActual = checkPackets(reader, checksumStart, bytesToRead);
Assert.assertEquals(checksum.get().longValue(), checksumActual);
}
validateReadRequestSent(mChannel, 0, Long.MAX_VALUE, false, PACKET_SIZE);
validateReadRequestSent(mChannel, 0, Long.MAX_VALUE, true, PACKET_SIZE);
}
/**
* Creates a {@link PacketReader}.
*
* @param offset the offset
* @param length the length
* @return the packet reader instance
*/
private PacketReader create(long offset, long length) throws Exception {
PacketReader reader = mFactory.create(offset, length);
mChannel.finishChannelCreation();
return reader;
}
/**
* Reads the packets from the given {@link PacketReader}.
*
* @param reader the packet reader
* @param checksumStart the start position to calculate the checksum
* @param bytesToRead bytes to read
* @return the checksum of the data read starting from checksumStart
*/
private long checkPackets(PacketReader reader, long checksumStart, long bytesToRead)
throws Exception {
long pos = 0;
long checksum = 0;
while (true) {
DataBuffer packet = reader.readPacket();
if (packet == null) {
break;
}
try {
Assert.assertTrue(packet instanceof DataNettyBufferV2);
ByteBuf buf = (ByteBuf) packet.getNettyOutput();
byte[] bytes = new byte[buf.readableBytes()];
buf.readBytes(bytes);
for (int i = 0; i < bytes.length; i++) {
if (pos >= checksumStart) {
checksum += BufferUtils.byteToInt(bytes[i]);
}
pos++;
if (pos >= bytesToRead) {
return checksum;
}
}
} finally {
packet.release();
}
}
return checksum;
}
/**
* Validates the read request sent.
*
* @param channel the channel
* @param offset the offset
* @param length the length
* @param cancel whether it is a cancel request
* @param packetSize the packet size
*/
private void validateReadRequestSent(final EmbeddedChannel channel, long offset, long length,
boolean cancel, long packetSize) {
Object request = CommonUtils.waitForResult("read request", new Function<Void, Object>() {
@Override
public Object apply(Void v) {
return channel.readOutbound();
}
}, WaitForOptions.defaults().setTimeout(Constants.MINUTE_MS));
Assert.assertTrue(request != null);
Assert.assertTrue(request instanceof RPCProtoMessage);
Assert.assertEquals(null, ((RPCProtoMessage) request).getPayloadDataBuffer());
Protocol.ReadRequest readRequest = ((RPCProtoMessage) request).getMessage().asReadRequest();
Assert.assertEquals(BLOCK_ID, readRequest.getBlockId());
Assert.assertEquals(offset, readRequest.getOffset());
Assert.assertEquals(length, readRequest.getLength());
Assert.assertEquals(cancel, readRequest.getCancel());
Assert.assertEquals(packetSize, readRequest.getPacketSize());
}
/**
* Sends read responses to the channel.
*
* @param channel the channel
* @param length the length
* @param start the start position to calculate the checksum
* @param end the end position to calculate the checksum
* @return the checksum
*/
private Future<Long> sendReadResponses(final EmbeddedChannel channel, final long length,
final long start, final long end) {
ProtoMessage heartbeat = new ProtoMessage(
Protocol.ReadResponse.newBuilder().setType(Protocol.ReadResponse.Type.UFS_READ_HEARTBEAT)
.build());
// Send some heartbeats first.
for (int i = 0; i < 3; ++i) {
channel.writeInbound(new RPCProtoMessage(heartbeat));
}
return EXECUTOR.submit(new Callable<Long>() {
@Override
public Long call() {
long checksum = 0;
long pos = 0;
long remaining = length;
while (remaining > 0) {
int bytesToSend = (int) Math.min(remaining, PACKET_SIZE);
byte[] data = new byte[bytesToSend];
RANDOM.nextBytes(data);
ByteBuf buf = Unpooled.wrappedBuffer(data);
RPCProtoMessage message = RPCProtoMessage.createOkResponse(new DataNettyBufferV2(buf));
channel.writeInbound(message);
remaining -= bytesToSend;
for (int i = 0; i < data.length; i++) {
if (pos >= start && pos <= end) {
checksum += BufferUtils.byteToInt(data[i]);
}
pos++;
}
}
// send EOF.
channel.writeInbound(RPCProtoMessage.createOkResponse(null));
return checksum;
}
});
}
}
| |
/*
* This file is part of Jkop
* Copyright (c) 2016 Job and Esther Technologies, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package eq.gui.sysdep.android;
import eq.gui.*;
public class AndroidTextLayout extends eq.api.Object implements TextLayout, Size
{
private eq.api.String str = null;
private android.text.StaticLayout layout = null;
private android.text.StaticLayout outline_layout = null;
private TextProperties props = null;
private int dpi = 0;
private static int to_android_color(Color c) {
if(c == null) {
return(0);
}
return(android.graphics.Color.argb((int)(c.get_a() * 255), (int)(c.get_r() * 255), (int)(c.get_g() * 255), (int)(c.get_b() * 255)));
}
public static AndroidTextLayout create(TextProperties props, int dpi) {
AndroidTextLayout v = new AndroidTextLayout();
v.dpi = dpi;
v.str = props.get_text();
if(v.str == null) {
v.str = eq.api.String.Static.for_strptr("");
}
android.text.TextPaint tp = new android.text.TextPaint();
tp.setAntiAlias(true);
tp.setColor(to_android_color(props.get_color()));
font_to_paint(props.get_font(), tp, dpi);
android.text.Layout.Alignment al = android.text.Layout.Alignment.ALIGN_NORMAL;
if(props.get_alignment() == 1) {
al = android.text.Layout.Alignment.ALIGN_CENTER;
}
if(props.get_alignment() == 2) {
al = android.text.Layout.Alignment.ALIGN_OPPOSITE;
}
java.lang.String sp = v.str.to_strptr();
int wrapwidth = props.get_wrap_width();
if(wrapwidth < 1) {
wrapwidth = (int)android.text.Layout.getDesiredWidth(sp, tp) + 1;
}
v.layout = new android.text.StaticLayout(sp, tp, wrapwidth, al, (float)1.0, (float)0.0, false);
Color outlinecolor = props.get_outline_color();
int outlinewidth = 3; //(int)props.get_outline_width();
if(outlinecolor != null && outlinewidth > 0) {
android.text.TextPaint tpo = new android.text.TextPaint();
tpo.setAntiAlias(true);
font_to_paint(props.get_font(), tpo, dpi);
tpo.setColor(to_android_color(outlinecolor));
tpo.setStrokeWidth(outlinewidth);
tpo.setStyle(android.graphics.Paint.Style.STROKE);
v.outline_layout = new android.text.StaticLayout(sp, tpo, wrapwidth, al, (float)1.0, (float)0.0, false);
}
v.props = props;
return(v);
}
private static java.util.Hashtable<String, android.graphics.Typeface> typefacecache = null;
static android.graphics.Typeface get_cached_typeface(android.content.Context c, String assetPath) {
if(typefacecache == null) {
typefacecache = new java.util.Hashtable<String, android.graphics.Typeface>();
}
if(typefacecache.containsKey(assetPath) == false) {
try {
android.graphics.Typeface t = android.graphics.Typeface.createFromAsset(c.getAssets(), assetPath);
typefacecache.put(assetPath, t);
}
catch(Exception e) {
e.printStackTrace();
return null;
}
}
return(typefacecache.get(assetPath));
}
public static android.graphics.Typeface font_to_typeface(Font font) {
String fontname = null;
int style = android.graphics.Typeface.NORMAL;
if(font != null) {
if(font.is_bold() && font.is_italic()) {
style = android.graphics.Typeface.BOLD_ITALIC;
}
else if(font.is_bold()) {
style = android.graphics.Typeface.BOLD;
}
else if(font.is_italic()) {
style = android.graphics.Typeface.ITALIC;
}
if(font.get_name() != null) {
fontname = font.get_name().to_strptr();
}
}
if(fontname == null) {
fontname = "";
}
android.graphics.Typeface typeface;
if(fontname.indexOf('.') < 0) {
typeface = android.graphics.Typeface.create(fontname, style);
}
else if(eq.api.Android.context != null) {
typeface = get_cached_typeface(eq.api.Android.context, fontname);
}
else {
typeface = android.graphics.Typeface.create("Arial", style);
}
return(typeface);
}
private static void font_to_paint(Font font, android.text.TextPaint pt, int dpi) {
if(font == null) {
return;
}
if(pt != null) {
String fontname = null;
eq.api.String fnn = font.get_name();
if(fnn != null) {
fontname = fnn.to_strptr();
}
pt.setTypeface(font_to_typeface(font));
pt.setTextSize((float)Length.Static.to_pixels(font.get_size(), dpi));
if(fontname != null && (fontname.contains(".ttf") || fontname.contains(".otf"))) {
if(font.is_italic()) {
pt.setTextSkewX(-0.25f);
}
}
}
}
public TextProperties get_text_properties() {
return(props);
}
public eq.api.String get_text() {
return(str);
}
public double get_width() {
int v = 0;
if(layout != null) {
v = layout.getWidth();
}
return((double)v);
}
public double get_height() {
int v = 0;
if(layout != null) {
v = layout.getHeight();
}
return((double)v);
}
public Rectangle get_cursor_position(int index) {
int x=0, y=0, w=0, h=0;
if(layout != null) {
android.graphics.Path mypath = new android.graphics.Path();
layout.getCursorPath(index, mypath, str.to_strptr());
android.graphics.RectF rf = new android.graphics.RectF();
mypath.computeBounds(rf, true);
x = (int)rf.left;
y = (int)rf.top;
w = (int)rf.right - (int)rf.left + 1;
h = (int)rf.bottom - (int)rf.top + 1;
}
return(Rectangle.Static.instance(x,y,w,h));
}
public int xy_to_index(double x, double y) {
java.lang.String txt = str.to_strptr();
int w = 0;
android.text.TextPaint tp = new android.text.TextPaint();
font_to_paint(props.get_font(), tp, dpi);
for(int i = 0; i < txt.length(); i++) {
java.lang.String ss = txt.substring(i, i+1);
int wrapwidth = props.get_wrap_width();
if(wrapwidth < 1) {
wrapwidth = (int)android.text.Layout.getDesiredWidth(ss, tp);
}
int cw = new android.text.StaticLayout(ss,
tp,
wrapwidth,
layout.getAlignment(),
(float)1.0,
(float)0.0,
false).getWidth();
w += cw;
if((w - (cw/2)) >= x) {
return(i);
}
}
return(txt.length());
}
public android.text.StaticLayout get_layout() {
return(layout);
}
public android.text.StaticLayout get_outline_layout() {
return(outline_layout);
}
}
| |
/*
Derby - Class com.pivotal.gemfirexd.internal.iapi.types.ReaderToUTF8Stream
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/*
* Changes for GemFireXD distributed data platform (some marked by "GemStone changes")
*
* Portions Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.internal.iapi.types;
import java.io.InputStream;
import java.io.IOException;
import java.io.EOFException;
import java.io.Reader;
import java.io.UTFDataFormatException;
import com.pivotal.gemfirexd.internal.iapi.reference.MessageId;
import com.pivotal.gemfirexd.internal.iapi.reference.SQLState;
import com.pivotal.gemfirexd.internal.iapi.services.i18n.MessageService;
import com.pivotal.gemfirexd.internal.iapi.services.io.DerbyIOException;
import com.pivotal.gemfirexd.internal.iapi.services.io.LimitReader;
import com.pivotal.gemfirexd.internal.iapi.types.TypeId;
/**
Converts a java.io.Reader to the on-disk UTF8 format used by Derby
for character types.
*/
public final class ReaderToUTF8Stream
extends InputStream
{
/**
* Application's reader wrapped in a LimitReader.
*/
private LimitReader reader;
private byte[] buffer;
private int boff;
private int blen;
private boolean eof;
private boolean multipleBuffer;
// buffer to hold the data read from stream
// and converted to UTF8 format
private final static int BUFSIZE = 32768;
/** Number of characters to truncate from this stream
The SQL standard allows for truncation of trailing spaces
for clobs,varchar,char.
If zero, no characters are truncated.
*/
private final int charsToTruncate;
private static final char SPACE = ' ';
/**
* Length of the final value, after truncation if any,
* in characters.
this stream needs to fit into a column of colWidth
if truncation error happens ,then the error message includes
information about the column width.
*/
private final int valueLength;
/** The maximum allowed length of the stream. */
private final int maximumLength;
/** The type name for the column data is inserted into. */
private final String typeName;
/**
* Create a stream that will truncate trailing blanks if required/allowed.
*
* If the stream must be truncated, the number of blanks to truncate
* is specified to allow the stream to be checked for exact length, as
* required by JDBC 3.0. If the stream is shorter or longer than specified,
* an exception is thrown during read.
*
* @param appReader application reader
* @param valueLength the length of the reader in characters
* @param numCharsToTruncate the number of trailing blanks to truncate
* @param typeName type name of the column data is inserted into
*/
public ReaderToUTF8Stream(Reader appReader,
int valueLength,
int numCharsToTruncate,
String typeName) {
this.reader = new LimitReader(appReader);
reader.setLimit(valueLength);
buffer = new byte[BUFSIZE];
blen = -1;
this.charsToTruncate = numCharsToTruncate;
this.valueLength = valueLength;
this.maximumLength = -1;
this.typeName = typeName;
}
/**
* Create a UTF-8 stream for a length less application reader.
*
* A limit is placed on the length of the reader. If the reader exceeds
* the maximum length, truncation of trailing blanks is attempted. If
* truncation fails, an exception is thrown.
*
* @param appReader application reader
* @param maximumLength maximum allowed length in number of characters for
* the reader
* @param typeName type name of the column data is inserted into
* @throws IllegalArgumentException if maximum length is negative, or type
* name is <code>null<code>
*/
public ReaderToUTF8Stream(Reader appReader,
int maximumLength,
String typeName) {
if (maximumLength < 0) {
throw new IllegalArgumentException("Maximum length for a capped " +
"stream cannot be negative: " + maximumLength);
}
if (typeName == null) {
throw new IllegalArgumentException("Type name cannot be null");
}
this.reader = new LimitReader(appReader);
reader.setLimit(maximumLength);
buffer = new byte[BUFSIZE];
blen = -1;
this.maximumLength = maximumLength;
this.typeName = typeName;
this.charsToTruncate = -1;
this.valueLength = -1;
}
/**
* read from stream; characters converted to utf-8 derby specific encoding.
* If stream has been read, and eof reached, in that case any subsequent
* read will throw an EOFException
* @see java.io.InputStream#read()
*/
public int read() throws IOException {
// when stream has been read and eof reached, stream is closed
// and buffer is set to null ( see close() method)
// since stream cannot be re-used, check if stream is closed and
// if so throw an EOFException
if ( buffer == null)
throw new EOFException(MessageService.getTextMessage(SQLState.STREAM_EOF));
// first read
if (blen < 0)
fillBuffer(2);
while (boff == blen)
{
// reached end of buffer, read more?
if (eof)
{
// we have reached the end of this stream
// cleanup here and return -1 indicating
// eof of stream
close();
return -1;
}
fillBuffer(0);
}
return buffer[boff++] & 0xff;
}
public int read(byte b[], int off, int len) throws IOException {
// when stream has been read and eof reached, stream is closed
// and buffer is set to null ( see close() method)
// since stream cannot be re-used, check if stream is closed and
// if so throw an EOFException
if ( buffer == null )
throw new EOFException(MessageService.getTextMessage
(SQLState.STREAM_EOF));
// first read
if (blen < 0)
fillBuffer(2);
int readCount = 0;
while (len > 0)
{
int copyBytes = blen - boff;
// buffer empty?
if (copyBytes == 0)
{
if (eof)
{
if (readCount > 0)
{
return readCount;
}
else
{
// we have reached the eof, so close the stream
close();
return -1;
}
}
fillBuffer(0);
continue;
}
if (len < copyBytes)
copyBytes = len;
System.arraycopy(buffer, boff, b, off, copyBytes);
boff += copyBytes;
len -= copyBytes;
readCount += copyBytes;
off += copyBytes;
}
return readCount;
}
private void fillBuffer(int startingOffset) throws IOException
{
int off = boff = startingOffset;
if (off == 0)
multipleBuffer = true;
// 6! need to leave room for a three byte UTF8 encoding
// and 3 bytes for our special end of file marker.
for (; off <= buffer.length - 6; )
{
int c = reader.read();
if (c < 0) {
eof = true;
break;
}
if ((c >= 0x0001) && (c <= 0x007F))
{
buffer[off++] = (byte) c;
}
else if (c > 0x07FF)
{
buffer[off++] = (byte) (0xE0 | ((c >> 12) & 0x0F));
buffer[off++] = (byte) (0x80 | ((c >> 6) & 0x3F));
buffer[off++] = (byte) (0x80 | ((c >> 0) & 0x3F));
}
else
{
buffer[off++] = (byte) (0xC0 | ((c >> 6) & 0x1F));
buffer[off++] = (byte) (0x80 | ((c >> 0) & 0x3F));
}
}
blen = off;
boff = 0;
if (eof)
checkSufficientData();
}
/**
* Validate the length of the stream, take corrective action if allowed.
*
* JDBC 3.0 (from tutorial book) requires that an input stream has the
* correct number of bytes in the stream.
* If the stream is too long, trailing blank truncation is attempted if
* allowed. If truncation fails, or is disallowed, an exception is thrown.
*
* @throws IOException if an errors occurs in the application stream
* @throws DerbyIOException if Derby finds a problem with the stream;
* stream is too long and cannot be truncated, or the stream length
* does not match the specified length
*/
private void checkSufficientData() throws IOException
{
// now that we finished reading from the stream; the amount
// of data that we can insert,start check for trailing spaces
if (charsToTruncate > 0)
{
reader.setLimit(charsToTruncate);
truncate();
}
// A length less stream that is capped, will return 0 even if there
// are more bytes in the application stream.
int remainingBytes = reader.clearLimit();
if (remainingBytes > 0 && valueLength > 0) {
// If we had a specified length, throw exception.
throw new DerbyIOException(
MessageService.getTextMessage(
SQLState.SET_STREAM_INEXACT_LENGTH_DATA),
SQLState.SET_STREAM_INEXACT_LENGTH_DATA);
}
// if we had a limit try reading one more character.
// JDBC 3.0 states the stream must have the correct number of
// characters in it.
if (remainingBytes == 0 && reader.read() >= 0) {
if (valueLength > -1) {
throw new DerbyIOException(
MessageService.getTextMessage(
SQLState.SET_STREAM_INEXACT_LENGTH_DATA),
SQLState.SET_STREAM_INEXACT_LENGTH_DATA);
} else {
// Stream was capped (length less) and too long.
// Try to truncate if allowed, or else throw exception.
if (canTruncate()) {
truncate();
} else {
throw new DerbyIOException(
MessageService.getTextMessage(
// GemStone changes BEGIN
SQLState.LANG_STRING_TRUNCATION,
typeName,
MessageService.getTextMessage(
MessageId.BINARY_DATA_HIDDEN,
String.valueOf(valueLength)),
String.valueOf(maximumLength)),
/* (original code)
SQLState.LANG_STRING_TRUNCATION),
*/
// GemStone changes END
SQLState.LANG_STRING_TRUNCATION);
}
}
}
// can put the correct length into the stream.
if (!multipleBuffer)
{
int utflen = blen - 2;
buffer[0] = (byte) ((utflen >>> 8) & 0xFF);
buffer[1] = (byte) ((utflen >>> 0) & 0xFF);
}
else
{
buffer[blen++] = (byte) 0xE0;
buffer[blen++] = (byte) 0x00;
buffer[blen++] = (byte) 0x00;
}
}
/**
* Determine if trailing blank truncation is allowed.
*/
private boolean canTruncate() {
// Only a few types can be truncated, default is to not allow.
if (typeName.equals(TypeId.CLOB_NAME)) {
return true;
} else if (typeName.equals(TypeId.VARCHAR_NAME)) {
return true;
}
return false;
}
/**
* Attempt to truncate the stream by removing trailing blanks.
*/
private void truncate()
throws IOException {
int c = 0;
for (;;) {
c = reader.read();
if (c < 0) {
break;
} else if (c != SPACE) {
throw new DerbyIOException(
MessageService.getTextMessage(
SQLState.LANG_STRING_TRUNCATION,
typeName,
// GemStone changes BEGIN
MessageService.getTextMessage(
MessageId.BINARY_DATA_HIDDEN,
String.valueOf(valueLength)),
String.valueOf(maximumLength)),
/* (original code)
"XXXX",
String.valueOf(valueLength)),
*/
// GemStone changes END
SQLState.LANG_STRING_TRUNCATION);
}
}
}
/**
* return resources
*/
public void close() throws IOException
{
// since stream has been read and eof reached, return buffer back to
// the vm.
// Instead of using another variable to indicate stream is closed
// a check on (buffer==null) is used instead.
buffer = null;
}
/**
* Return an optimized version of bytes available to read from
* the stream
* Note, it is not exactly per java.io.InputStream#available()
*/
public final int available()
{
int remainingBytes = reader.getLimit();
// this object buffers BUFSIZE bytes that can be read
// and when that is finished it reads the next available bytes
// from the reader object
// reader.getLimit() returns the remaining bytes available
// on this stream
return (BUFSIZE > remainingBytes ? remainingBytes : BUFSIZE);
}
}
| |
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package com.android.tools.build.bundletool.splitters;
import static com.android.bundle.Targeting.Abi.AbiAlias.ARMEABI_V7A;
import static com.android.bundle.Targeting.Abi.AbiAlias.X86;
import static com.android.bundle.Targeting.Abi.AbiAlias.X86_64;
import static com.android.tools.build.bundletool.testing.ManifestProtoUtils.androidManifest;
import static com.android.tools.build.bundletool.testing.TargetingUtils.apexImageTargeting;
import static com.android.tools.build.bundletool.testing.TargetingUtils.apexImages;
import static com.android.tools.build.bundletool.testing.TargetingUtils.apkMultiAbiTargeting;
import static com.android.tools.build.bundletool.testing.TargetingUtils.apkMultiAbiTargetingFromAllTargeting;
import static com.android.tools.build.bundletool.testing.TargetingUtils.targetedApexImage;
import static com.android.tools.build.bundletool.testing.TargetingUtils.targetedApexImageWithBuildInfo;
import static com.android.tools.build.bundletool.testing.TestUtils.extractPaths;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.extensions.proto.ProtoTruth.assertThat;
import com.android.bundle.Files.ApexImages;
import com.android.bundle.Targeting.Abi.AbiAlias;
import com.android.bundle.Targeting.ApkTargeting;
import com.android.tools.build.bundletool.model.BundleModule;
import com.android.tools.build.bundletool.model.ModuleSplit;
import com.android.tools.build.bundletool.testing.BundleModuleBuilder;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for the AbiApexImagesSplitter class. */
@RunWith(JUnit4.class)
public class AbiApexImagesSplitterTest {
@Test
public void splittingBySingleAbi_oneImageFile() throws Exception {
AbiApexImagesSplitter abiApexImagesSplitter = new AbiApexImagesSplitter();
ImmutableCollection<ModuleSplit> splits =
abiApexImagesSplitter.split(
ModuleSplit.forApex(createSingleImageModule("testModule", "x86")));
ModuleSplit x86Split = Iterables.getOnlyElement(splits.asList());
assertThat(x86Split.getApkTargeting()).isEqualTo(apkMultiAbiTargeting(X86));
assertThat(extractPaths(x86Split.getEntries())).containsExactly("apex/x86.img");
}
@Test
public void splittingBySingleAbi_twoImageFiles() throws Exception {
ApexImages apexConfig =
apexImages(
targetedApexImage("apex/x86.img", apexImageTargeting("x86")),
targetedApexImage("apex/x86_64.img", apexImageTargeting("x86_64")));
BundleModule bundleModule =
new BundleModuleBuilder("testModule")
.addFile("apex/x86.img")
.addFile("apex/x86_64.img")
.setApexConfig(apexConfig)
.setManifest(androidManifest("com.test.app"))
.build();
AbiApexImagesSplitter abiApexImagesSplitter = new AbiApexImagesSplitter();
ImmutableCollection<ModuleSplit> splits =
abiApexImagesSplitter.split(ModuleSplit.forApex(bundleModule));
assertThat(splits).hasSize(2);
ApkTargeting x86Targeting = apkMultiAbiTargeting(X86, ImmutableSet.of(X86_64));
ApkTargeting x64Targeting = apkMultiAbiTargeting(X86_64, ImmutableSet.of(X86));
ImmutableMap<ApkTargeting, ModuleSplit> splitsByTargeting =
Maps.uniqueIndex(splits, ModuleSplit::getApkTargeting);
assertThat(splitsByTargeting.keySet()).containsExactly(x86Targeting, x64Targeting);
assertThat(extractPaths(splitsByTargeting.get(x86Targeting).getEntries()))
.containsExactly("apex/x86.img");
assertThat(extractPaths(splitsByTargeting.get(x64Targeting).getEntries()))
.containsExactly("apex/x86_64.img");
}
@Test
public void splittingBySingleAbi_twoImageFilesWithBuildInfo() throws Exception {
ApexImages apexConfig =
apexImages(
targetedApexImageWithBuildInfo(
"apex/x86.img", "apex/x86.build_info.pb", apexImageTargeting("x86")),
targetedApexImageWithBuildInfo(
"apex/x86_64.img", "apex/x86_64.build_info.pb", apexImageTargeting("x86_64")));
BundleModule bundleModule =
new BundleModuleBuilder("testModule")
.addFile("apex/x86.img")
.addFile("apex/x86.build_info.pb")
.addFile("apex/x86_64.img")
.addFile("apex/x86_64.build_info.pb")
.setApexConfig(apexConfig)
.setManifest(androidManifest("com.test.app"))
.build();
AbiApexImagesSplitter abiApexImagesSplitter = new AbiApexImagesSplitter();
ImmutableCollection<ModuleSplit> splits =
abiApexImagesSplitter.split(ModuleSplit.forApex(bundleModule));
assertThat(splits).hasSize(2);
ApkTargeting x86Targeting = apkMultiAbiTargeting(X86, ImmutableSet.of(X86_64));
ApkTargeting x64Targeting = apkMultiAbiTargeting(X86_64, ImmutableSet.of(X86));
ImmutableMap<ApkTargeting, ModuleSplit> splitsByTargeting =
Maps.uniqueIndex(splits, ModuleSplit::getApkTargeting);
assertThat(splitsByTargeting.keySet()).containsExactly(x86Targeting, x64Targeting);
assertThat(extractPaths(splitsByTargeting.get(x86Targeting).getEntries()))
.containsExactly("apex/x86.img", "apex/x86.build_info.pb");
assertThat(extractPaths(splitsByTargeting.get(x64Targeting).getEntries()))
.containsExactly("apex/x86_64.img", "apex/x86_64.build_info.pb");
}
@Test
public void splittingByMultipleAbi_multipleImageFiles() throws Exception {
ApexImages apexConfig =
apexImages(
targetedApexImage("apex/x86_64.x86.img", apexImageTargeting("x86_64", "x86")),
targetedApexImage(
"apex/x86_64.armeabi-v7a.img", apexImageTargeting("x86_64", "armeabi-v7a")),
targetedApexImage("apex/x86_64.img", apexImageTargeting("x86_64")),
targetedApexImage("apex/x86.armeabi-v7a.img", apexImageTargeting("x86", "armeabi-v7a")),
targetedApexImage("apex/x86.img", apexImageTargeting("x86")),
targetedApexImage("apex/armeabi-v7a.img", apexImageTargeting("armeabi-v7a")));
BundleModule bundleModule =
new BundleModuleBuilder("testModule")
.addFile("apex/x86_64.x86.img")
.addFile("apex/x86_64.armeabi-v7a.img")
.addFile("apex/x86_64.img")
.addFile("apex/x86.armeabi-v7a.img")
.addFile("apex/x86.img")
.addFile("apex/armeabi-v7a.img")
.setApexConfig(apexConfig)
.setManifest(androidManifest("com.test.app"))
.build();
AbiApexImagesSplitter abiApexImagesSplitter = new AbiApexImagesSplitter();
ImmutableCollection<ModuleSplit> splits =
abiApexImagesSplitter.split(ModuleSplit.forApex(bundleModule));
assertThat(splits).hasSize(6);
ImmutableSet<AbiAlias> x64X86Set = ImmutableSet.of(X86, X86_64);
ImmutableSet<AbiAlias> x64ArmSet = ImmutableSet.of(ARMEABI_V7A, X86_64);
ImmutableSet<AbiAlias> x64Set = ImmutableSet.of(X86_64);
ImmutableSet<AbiAlias> x86ArmSet = ImmutableSet.of(ARMEABI_V7A, X86);
ImmutableSet<AbiAlias> x86Set = ImmutableSet.of(X86);
ImmutableSet<AbiAlias> armSet = ImmutableSet.of(ARMEABI_V7A);
ImmutableSet<ImmutableSet<AbiAlias>> allTargeting =
ImmutableSet.of(armSet, x86ArmSet, x64ArmSet, x86Set, x64X86Set, x64Set);
ApkTargeting x64X86Targeting = apkMultiAbiTargetingFromAllTargeting(x64X86Set, allTargeting);
ApkTargeting x64ArmTargeting = apkMultiAbiTargetingFromAllTargeting(x64ArmSet, allTargeting);
ApkTargeting a64Targeting = apkMultiAbiTargetingFromAllTargeting(x64Set, allTargeting);
ApkTargeting x86ArmTargeting = apkMultiAbiTargetingFromAllTargeting(x86ArmSet, allTargeting);
ApkTargeting x86Targeting = apkMultiAbiTargetingFromAllTargeting(x86Set, allTargeting);
ApkTargeting armTargeting = apkMultiAbiTargetingFromAllTargeting(armSet, allTargeting);
ImmutableMap<ApkTargeting, ModuleSplit> splitsByTargeting =
Maps.uniqueIndex(splits, ModuleSplit::getApkTargeting);
assertThat(splitsByTargeting.keySet())
.containsExactly(
x64X86Targeting,
x64ArmTargeting,
a64Targeting,
x86ArmTargeting,
x86Targeting,
armTargeting);
assertThat(extractPaths(splitsByTargeting.get(x64X86Targeting).getEntries()))
.containsExactly("apex/x86_64.x86.img");
assertThat(extractPaths(splitsByTargeting.get(x64ArmTargeting).getEntries()))
.containsExactly("apex/x86_64.armeabi-v7a.img");
assertThat(extractPaths(splitsByTargeting.get(a64Targeting).getEntries()))
.containsExactly("apex/x86_64.img");
assertThat(extractPaths(splitsByTargeting.get(x86ArmTargeting).getEntries()))
.containsExactly("apex/x86.armeabi-v7a.img");
assertThat(extractPaths(splitsByTargeting.get(x86Targeting).getEntries()))
.containsExactly("apex/x86.img");
assertThat(extractPaths(splitsByTargeting.get(armTargeting).getEntries()))
.containsExactly("apex/armeabi-v7a.img");
}
/** Creates a minimal module with one apex image file targeted at the given cpu architecture. */
private static BundleModule createSingleImageModule(String moduleName, String architecture)
throws Exception {
String relativeFilePath = "apex/" + architecture + ".img";
ApexImages apexConfig =
apexImages(targetedApexImage(relativeFilePath, apexImageTargeting(architecture)));
return new BundleModuleBuilder(moduleName)
.addFile(relativeFilePath)
.setApexConfig(apexConfig)
.setManifest(androidManifest("com.test.app"))
.build();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOVICE file
* distributed with this work for additional information
* regarding copyright ownership. Vhe ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WIVHOUV WARRANVIES OR CONDIVIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.cep.nfa;
import com.google.common.collect.LinkedHashMultimap;
import org.apache.commons.lang3.StringUtils;
import org.apache.flink.annotation.Internal;
import org.apache.flink.api.common.typeutils.TypeSerializer;
import org.apache.flink.core.memory.DataInputViewStreamWrapper;
import org.apache.flink.core.memory.DataOutputViewStreamWrapper;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.Stack;
/**
* A shared buffer implementation which stores values under a key. Additionally, the values can be
* versioned such that it is possible to retrieve their predecessor element in the buffer.
* <p>
* The idea of the implementation is to have for each key a dedicated {@link SharedBufferPage}. Each
* buffer page maintains a collection of the inserted values.
*
* The values are wrapped in a {@link SharedBufferEntry}. The shared buffer entry allows to store
* relations between different entries. A dewey versioning scheme allows to discriminate between
* different relations (e.g. preceding element).
*
* The implementation is strongly based on the paper "Efficient Pattern Matching over Event Streams".
*
* @see <a href="https://people.cs.umass.edu/~yanlei/publications/sase-sigmod08.pdf">
* https://people.cs.umass.edu/~yanlei/publications/sase-sigmod08.pdf</a>
*
* @param <K> Type of the keys
* @param <V> Type of the values
*/
public class SharedBuffer<K extends Serializable, V> implements Serializable {
private static final long serialVersionUID = 9213251042562206495L;
private final TypeSerializer<V> valueSerializer;
private transient Map<K, SharedBufferPage<K, V>> pages;
public SharedBuffer(final TypeSerializer<V> valueSerializer) {
this.valueSerializer = valueSerializer;
this.pages = new HashMap<>();
}
/**
* Stores given value (value + timestamp) under the given key. It assigns a preceding element
* relation to the entry which is defined by the previous key, value (value + timestamp).
*
* @param key Key of the current value
* @param value Current value
* @param timestamp Timestamp of the current value (a value requires always a timestamp to make it uniquely referable))
* @param previousKey Key of the value for the previous relation
* @param previousValue Value for the previous relation
* @param previousTimestamp Timestamp of the value for the previous relation
* @param version Version of the previous relation
*/
public void put(
final K key,
final V value,
final long timestamp,
final K previousKey,
final V previousValue,
final long previousTimestamp,
final DeweyNumber version) {
final SharedBufferEntry<K, V> previousSharedBufferEntry = get(previousKey, previousValue, previousTimestamp);
// sanity check whether we've found the previous element
if (previousSharedBufferEntry == null && previousValue != null) {
throw new IllegalStateException("Could not find previous shared buffer entry with " +
"key: " + previousKey + ", value: " + previousValue + " and timestamp: " +
previousTimestamp + ". This can indicate that the element belonging to the previous " +
"relation has been already pruned, even though you expect it to be still there.");
}
put(key, value, timestamp, previousSharedBufferEntry, version);
}
/**
* Stores given value (value + timestamp) under the given key. It assigns no preceding element
* relation to the entry.
*
* @param key Key of the current value
* @param value Current value
* @param timestamp Timestamp of the current value (a value requires always a timestamp to make it uniquely referable))
* @param version Version of the previous relation
*/
public void put(
final K key,
final V value,
final long timestamp,
final DeweyNumber version) {
put(key, value, timestamp, null, version);
}
private void put(
final K key,
final V value,
final long timestamp,
final SharedBufferEntry<K, V> previousSharedBufferEntry,
final DeweyNumber version) {
SharedBufferPage<K, V> page = pages.get(key);
if (page == null) {
page = new SharedBufferPage<>(key);
pages.put(key, page);
}
page.add(new ValueTimeWrapper<>(value, timestamp), previousSharedBufferEntry, version);
}
public boolean isEmpty() {
for (SharedBufferPage<K, V> page: pages.values()) {
if (!page.isEmpty()) {
return false;
}
}
return true;
}
/**
* Deletes all entries in each page which have expired with respect to given pruning timestamp.
*
* @param pruningTimestamp The time which is used for pruning. All elements whose timestamp is
* lower than the pruning timestamp will be removed.
*/
public void prune(long pruningTimestamp) {
Iterator<Map.Entry<K, SharedBufferPage<K, V>>> iter = pages.entrySet().iterator();
while (iter.hasNext()) {
SharedBufferPage<K, V> page = iter.next().getValue();
page.prune(pruningTimestamp);
if (page.isEmpty()) {
// delete page if it is empty
iter.remove();
}
}
}
/**
* Returns all elements from the previous relation starting at the given value with the
* given key and timestamp.
*
* @param key Key of the starting value
* @param value Value of the starting element
* @param timestamp Timestamp of the starting value
* @param version Version of the previous relation which shall be extracted
* @return Collection of previous relations starting with the given value
*/
public Collection<LinkedHashMultimap<K, V>> extractPatterns(
final K key,
final V value,
final long timestamp,
final DeweyNumber version) {
Collection<LinkedHashMultimap<K, V>> result = new ArrayList<>();
// stack to remember the current extraction states
Stack<ExtractionState<K, V>> extractionStates = new Stack<>();
// get the starting shared buffer entry for the previous relation
SharedBufferEntry<K, V> entry = get(key, value, timestamp);
if (entry != null) {
extractionStates.add(new ExtractionState<>(entry, version, new Stack<SharedBufferEntry<K, V>>()));
// use a depth first search to reconstruct the previous relations
while (!extractionStates.isEmpty()) {
final ExtractionState<K, V> extractionState = extractionStates.pop();
// current path of the depth first search
final Stack<SharedBufferEntry<K, V>> currentPath = extractionState.getPath();
final SharedBufferEntry<K, V> currentEntry = extractionState.getEntry();
// termination criterion
if (currentEntry == null) {
final LinkedHashMultimap<K, V> completePath = LinkedHashMultimap.create();
while(!currentPath.isEmpty()) {
final SharedBufferEntry<K, V> currentPathEntry = currentPath.pop();
completePath.put(currentPathEntry.getKey(), currentPathEntry.getValueTime().getValue());
}
result.add(completePath);
} else {
// append state to the path
currentPath.push(currentEntry);
boolean firstMatch = true;
for (SharedBufferEdge<K, V> edge : currentEntry.getEdges()) {
// we can only proceed if the current version is compatible to the version
// of this previous relation
final DeweyNumber currentVersion = extractionState.getVersion();
if (currentVersion.isCompatibleWith(edge.getVersion())) {
if (firstMatch) {
// for the first match we don't have to copy the current path
extractionStates.push(new ExtractionState<>(edge.getTarget(), edge.getVersion(), currentPath));
firstMatch = false;
} else {
final Stack<SharedBufferEntry<K, V>> copy = new Stack<>();
copy.addAll(currentPath);
extractionStates.push(
new ExtractionState<>(
edge.getTarget(),
edge.getVersion(),
copy));
}
}
}
}
}
}
return result;
}
/**
* Increases the reference counter for the given value, key, timestamp entry so that it is not
* accidentally removed.
*
* @param key Key of the value to lock
* @param value Value to lock
* @param timestamp Timestamp of the value to lock
*/
public void lock(final K key, final V value, final long timestamp) {
SharedBufferEntry<K, V> entry = get(key, value, timestamp);
if (entry != null) {
entry.increaseReferenceCounter();
}
}
/**
* Decreases the reference counter for the given value, key, timestamp entry so that it can be
* removed once the reference counter reaches 0.
*
* @param key Key of the value to release
* @param value Value to release
* @param timestamp Timestamp of the value to release
*/
public void release(final K key, final V value, final long timestamp) {
SharedBufferEntry<K, V> entry = get(key, value, timestamp);
if (entry != null) {
entry.decreaseReferenceCounter();
internalRemove(entry);
}
}
private void writeObject(ObjectOutputStream oos) throws IOException {
DataOutputViewStreamWrapper target = new DataOutputViewStreamWrapper(oos);
Map<SharedBufferEntry<K, V>, Integer> entryIDs = new HashMap<>();
int totalEdges = 0;
int entryCounter = 0;
oos.defaultWriteObject();
// number of pages
oos.writeInt(pages.size());
for (Map.Entry<K, SharedBufferPage<K, V>> pageEntry: pages.entrySet()) {
SharedBufferPage<K, V> page = pageEntry.getValue();
// key for the current page
oos.writeObject(page.getKey());
// number of page entries
oos.writeInt(page.entries.size());
for (Map.Entry<ValueTimeWrapper<V>, SharedBufferEntry<K, V>> sharedBufferEntry: page.entries.entrySet()) {
// serialize the sharedBufferEntry
SharedBufferEntry<K, V> sharedBuffer = sharedBufferEntry.getValue();
// assign id to the sharedBufferEntry for the future serialization of the previous
// relation
entryIDs.put(sharedBuffer, entryCounter++);
ValueTimeWrapper<V> valueTimeWrapper = sharedBuffer.getValueTime();
valueSerializer.serialize(valueTimeWrapper.value, target);
oos.writeLong(valueTimeWrapper.getTimestamp());
int edges = sharedBuffer.edges.size();
totalEdges += edges;
oos.writeInt(sharedBuffer.referenceCounter);
}
}
// write the edges between the shared buffer entries
oos.writeInt(totalEdges);
for (Map.Entry<K, SharedBufferPage<K, V>> pageEntry: pages.entrySet()) {
SharedBufferPage<K, V> page = pageEntry.getValue();
for (Map.Entry<ValueTimeWrapper<V>, SharedBufferEntry<K, V>> sharedBufferEntry: page.entries.entrySet()) {
SharedBufferEntry<K, V> sharedBuffer = sharedBufferEntry.getValue();
if (!entryIDs.containsKey(sharedBuffer)) {
throw new RuntimeException("Could not find id for entry: " + sharedBuffer);
} else {
int id = entryIDs.get(sharedBuffer);
for (SharedBufferEdge<K, V> edge: sharedBuffer.edges) {
// in order to serialize the previous relation we simply serialize the ids
// of the source and target SharedBufferEntry
if (edge.target != null) {
if (!entryIDs.containsKey(edge.getTarget())) {
throw new RuntimeException("Could not find id for entry: " + edge.getTarget());
} else {
int targetId = entryIDs.get(edge.getTarget());
oos.writeInt(id);
oos.writeInt(targetId);
oos.writeObject(edge.version);
}
} else {
oos.writeInt(id);
oos.writeInt(-1);
oos.writeObject(edge.version);
}
}
}
}
}
}
private void readObject(ObjectInputStream ois) throws IOException, ClassNotFoundException {
DataInputViewStreamWrapper source = new DataInputViewStreamWrapper(ois);
ArrayList<SharedBufferEntry<K, V>> entryList = new ArrayList<>();
ois.defaultReadObject();
this.pages = new HashMap<>();
int numberPages = ois.readInt();
for (int i = 0; i < numberPages; i++) {
// key of the page
@SuppressWarnings("unchecked")
K key = (K)ois.readObject();
SharedBufferPage<K, V> page = new SharedBufferPage<>(key);
pages.put(key, page);
int numberEntries = ois.readInt();
for (int j = 0; j < numberEntries; j++) {
// restore the SharedBufferEntries for the given page
V value = valueSerializer.deserialize(source);
long timestamp = ois.readLong();
ValueTimeWrapper<V> valueTimeWrapper = new ValueTimeWrapper<>(value, timestamp);
SharedBufferEntry<K, V> sharedBufferEntry = new SharedBufferEntry<K, V>(valueTimeWrapper, page);
sharedBufferEntry.referenceCounter = ois.readInt();
page.entries.put(valueTimeWrapper, sharedBufferEntry);
entryList.add(sharedBufferEntry);
}
}
// read the edges of the shared buffer entries
int numberEdges = ois.readInt();
for (int j = 0; j < numberEdges; j++) {
int sourceIndex = ois.readInt();
int targetIndex = ois.readInt();
if (sourceIndex >= entryList.size() || sourceIndex < 0) {
throw new RuntimeException("Could not find source entry with index " + sourceIndex +
". This indicates a corrupted state.");
} else {
// We've already deserialized the shared buffer entry. Simply read its ID and
// retrieve the buffer entry from the list of entries
SharedBufferEntry<K, V> sourceEntry = entryList.get(sourceIndex);
final DeweyNumber version = (DeweyNumber) ois.readObject();
final SharedBufferEntry<K, V> target;
if (targetIndex >= 0) {
if (targetIndex >= entryList.size()) {
throw new RuntimeException("Could not find target entry with index " + targetIndex +
". This indicates a corrupted state.");
} else {
target = entryList.get(targetIndex);
}
} else {
target = null;
}
sourceEntry.edges.add(new SharedBufferEdge<K, V>(target, version));
}
}
}
private SharedBuffer(
TypeSerializer<V> valueSerializer,
Map<K, SharedBufferPage<K, V>> pages) {
this.valueSerializer = valueSerializer;
this.pages = pages;
}
/**
* For backward compatibility only. Previously the key in {@link SharedBuffer} was {@link State}.
* Now it is {@link String}.
*/
@Internal
static <T> SharedBuffer<String, T> migrateSharedBuffer(SharedBuffer<State<T>, T> buffer) {
final Map<String, SharedBufferPage<String, T>> pageMap = new HashMap<>();
final Map<SharedBufferEntry<State<T>, T>, SharedBufferEntry<String, T>> entries = new HashMap<>();
for (Map.Entry<State<T>, SharedBufferPage<State<T>, T>> page : buffer.pages.entrySet()) {
final SharedBufferPage<String, T> newPage = new SharedBufferPage<>(page.getKey().getName());
pageMap.put(newPage.getKey(), newPage);
for (Map.Entry<ValueTimeWrapper<T>, SharedBufferEntry<State<T>, T>> pageEntry : page.getValue().entries.entrySet()) {
final SharedBufferEntry<String, T> newSharedBufferEntry = new SharedBufferEntry<>(
pageEntry.getKey(),
newPage);
newSharedBufferEntry.referenceCounter = pageEntry.getValue().referenceCounter;
entries.put(pageEntry.getValue(), newSharedBufferEntry);
newPage.entries.put(pageEntry.getKey(), newSharedBufferEntry);
}
}
for (Map.Entry<State<T>, SharedBufferPage<State<T>, T>> page : buffer.pages.entrySet()) {
for (Map.Entry<ValueTimeWrapper<T>, SharedBufferEntry<State<T>, T>> pageEntry : page.getValue().entries.entrySet()) {
final SharedBufferEntry<String, T> newEntry = entries.get(pageEntry.getValue());
for (SharedBufferEdge<State<T>, T> edge : pageEntry.getValue().edges) {
final SharedBufferEntry<String, T> targetNewEntry = entries.get(edge.getTarget());
final SharedBufferEdge<String, T> newEdge = new SharedBufferEdge<>(
targetNewEntry,
edge.getVersion());
newEntry.edges.add(newEdge);
}
}
}
return new SharedBuffer<>(buffer.valueSerializer, pageMap);
}
private SharedBufferEntry<K, V> get(
final K key,
final V value,
final long timestamp) {
if (pages.containsKey(key)) {
return pages
.get(key)
.get(new ValueTimeWrapper<V>(value, timestamp));
} else {
return null;
}
}
private void internalRemove(final SharedBufferEntry<K, V> entry) {
Stack<SharedBufferEntry<K, V>> entriesToRemove = new Stack<>();
entriesToRemove.add(entry);
while (!entriesToRemove.isEmpty()) {
SharedBufferEntry<K, V> currentEntry = entriesToRemove.pop();
if (currentEntry.getReferenceCounter() == 0) {
currentEntry.remove();
for (SharedBufferEdge<K, V> edge: currentEntry.getEdges()) {
if (edge.getTarget() != null) {
edge.getTarget().decreaseReferenceCounter();
entriesToRemove.push(edge.getTarget());
}
}
}
}
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
for(Map.Entry<K, SharedBufferPage<K, V>> entry :pages.entrySet()){
builder.append("Key: ").append(entry.getKey()).append("\n");
builder.append("Value: ").append(entry.getValue()).append("\n");
}
return builder.toString();
}
@Override
public boolean equals(Object obj) {
if (obj instanceof SharedBuffer) {
@SuppressWarnings("unchecked")
SharedBuffer<K, V> other = (SharedBuffer<K, V>) obj;
return pages.equals(other.pages) && valueSerializer.equals(other.valueSerializer);
} else {
return false;
}
}
@Override
public int hashCode() {
return Objects.hash(pages, valueSerializer);
}
/**
* The SharedBufferPage represents a set of elements which have been stored under the same key.
*
* @param <K> Type of the key
* @param <V> Type of the value
*/
private static class SharedBufferPage<K, V> {
// key of the page
private final K key;
// Map of entries which are stored in this page
private final HashMap<ValueTimeWrapper<V>, SharedBufferEntry<K, V>> entries;
public SharedBufferPage(final K key) {
this.key = key;
entries = new HashMap<>();
}
public K getKey() {
return key;
}
/**
* Adds a new value time pair to the page. The new entry is linked to the previous entry
* with the given version.
*
* @param valueTime Value time pair to be stored
* @param previous Previous shared buffer entry to which the new entry shall be linked
* @param version Version of the relation between the new and the previous entry
*/
public void add(final ValueTimeWrapper<V> valueTime, final SharedBufferEntry<K, V> previous, final DeweyNumber version) {
SharedBufferEntry<K, V> sharedBufferEntry = entries.get(valueTime);
if (sharedBufferEntry == null) {
sharedBufferEntry = new SharedBufferEntry<K, V>(valueTime, this);
entries.put(valueTime, sharedBufferEntry);
}
SharedBufferEdge<K, V> newEdge;
if (previous != null) {
newEdge = new SharedBufferEdge<>(previous, version);
previous.increaseReferenceCounter();
} else {
newEdge = new SharedBufferEdge<>(null, version);
}
sharedBufferEntry.addEdge(newEdge);
}
public SharedBufferEntry<K, V> get(final ValueTimeWrapper<V> valueTime) {
return entries.get(valueTime);
}
/**
* Removes all entries from the map whose timestamp is smaller than the pruning timestamp.
*
* @param pruningTimestamp Timestamp for the pruning
*/
public void prune(long pruningTimestamp) {
Iterator<Map.Entry<ValueTimeWrapper<V>, SharedBufferEntry<K, V>>> iterator = entries.entrySet().iterator();
boolean continuePruning = true;
while (iterator.hasNext() && continuePruning) {
SharedBufferEntry<K, V> entry = iterator.next().getValue();
if (entry.getValueTime().getTimestamp() <= pruningTimestamp) {
iterator.remove();
} else {
continuePruning = false;
}
}
}
public boolean isEmpty() {
return entries.isEmpty();
}
public SharedBufferEntry<K, V> remove(final ValueTimeWrapper<V> valueTime) {
return entries.remove(valueTime);
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("SharedBufferPage(\n");
for (SharedBufferEntry<K, V> entry: entries.values()) {
builder.append(entry.toString()).append("\n");
}
builder.append(")");
return builder.toString();
}
@Override
public boolean equals(Object obj) {
if (obj instanceof SharedBufferPage) {
@SuppressWarnings("unchecked")
SharedBufferPage<K, V> other = (SharedBufferPage<K, V>) obj;
return key.equals(other.key) && entries.equals(other.entries);
} else {
return false;
}
}
@Override
public int hashCode() {
return Objects.hash(key, entries);
}
}
/**
* Entry of a {@link SharedBufferPage}. The entry contains the value timestamp pair, a set of
* edges to other shared buffer entries denoting a relation, a reference to the owning page and
* a reference counter. The reference counter counts how many references are kept to this entry.
*
* @param <K> Type of the key
* @param <V> Type of the value
*/
private static class SharedBufferEntry<K, V> {
private final ValueTimeWrapper<V> valueTime;
private final Set<SharedBufferEdge<K, V>> edges;
private final SharedBufferPage<K, V> page;
private int referenceCounter;
public SharedBufferEntry(
final ValueTimeWrapper<V> valueTime,
final SharedBufferPage<K, V> page) {
this(valueTime, null, page);
}
public SharedBufferEntry(
final ValueTimeWrapper<V> valueTime,
final SharedBufferEdge<K, V> edge,
final SharedBufferPage<K, V> page) {
this.valueTime = valueTime;
edges = new HashSet<>();
if (edge != null) {
edges.add(edge);
}
referenceCounter = 0;
this.page = page;
}
public ValueTimeWrapper<V> getValueTime() {
return valueTime;
}
public Collection<SharedBufferEdge<K, V>> getEdges() {
return edges;
}
public K getKey() {
return page.getKey();
}
public void addEdge(SharedBufferEdge<K, V> edge) {
edges.add(edge);
}
public boolean remove() {
if (page != null) {
page.remove(valueTime);
return true;
} else {
return false;
}
}
public void increaseReferenceCounter() {
referenceCounter++;
}
public void decreaseReferenceCounter() {
if (referenceCounter > 0) {
referenceCounter--;
}
}
public int getReferenceCounter() {
return referenceCounter;
}
@Override
public String toString() {
return "SharedBufferEntry(" + valueTime + ", [" + StringUtils.join(edges, ", ") + "], " + referenceCounter + ")";
}
@Override
public boolean equals(Object obj) {
if (obj instanceof SharedBufferEntry) {
@SuppressWarnings("unchecked")
SharedBufferEntry<K, V> other = (SharedBufferEntry<K, V>) obj;
return valueTime.equals(other.valueTime) &&
getKey().equals(other.getKey()) &&
referenceCounter == other.referenceCounter &&
edges.equals(other.edges);
} else {
return false;
}
}
@Override
public int hashCode() {
return Objects.hash(valueTime, getKey(), referenceCounter, edges);
}
}
/**
* Versioned edge between two shared buffer entries
*
* @param <K> Type of the key
* @param <V> Type of the value
*/
public static class SharedBufferEdge<K, V> {
private final SharedBufferEntry<K, V> target;
private final DeweyNumber version;
public SharedBufferEdge(final SharedBufferEntry<K, V> target, final DeweyNumber version) {
this.target = target;
this.version = version;
}
public SharedBufferEntry<K, V> getTarget() {
return target;
}
public DeweyNumber getVersion() {
return version;
}
@Override
public String toString() {
return "SharedBufferEdge(" + target + ", " + version + ")";
}
@Override
public boolean equals(Object obj) {
if (obj instanceof SharedBufferEdge) {
@SuppressWarnings("unchecked")
SharedBufferEdge<K, V> other = (SharedBufferEdge<K, V>) obj;
if (version.equals(other.version)) {
if (target == null && other.target == null) {
return true;
} else if (target != null && other.target != null) {
return target.getKey().equals(other.target.getKey()) &&
target.getValueTime().equals(other.target.getValueTime());
} else {
return false;
}
} else {
return false;
}
} else {
return false;
}
}
@Override
public int hashCode() {
if (target != null) {
return Objects.hash(target.getKey(), target.getValueTime(), version);
} else {
return version.hashCode();
}
}
}
/**
* Wrapper for a value timestamp pair.
*
* @param <V> Type of the value
*/
static class ValueTimeWrapper<V> {
private final V value;
private final long timestamp;
public ValueTimeWrapper(final V value, final long timestamp) {
this.value = value;
this.timestamp = timestamp;
}
public V getValue() {
return value;
}
public long getTimestamp() {
return timestamp;
}
@Override
public String toString() {
return "ValueTimeWrapper(" + value + ", " + timestamp + ")";
}
@Override
public boolean equals(Object obj) {
if (obj instanceof ValueTimeWrapper) {
@SuppressWarnings("unchecked")
ValueTimeWrapper<V> other = (ValueTimeWrapper<V>)obj;
return timestamp == other.getTimestamp() && value.equals(other.getValue());
} else {
return false;
}
}
@Override
public int hashCode() {
return (int) (this.timestamp ^ this.timestamp >>> 32) + 31 * value.hashCode();
}
}
/**
* Helper class to store the extraction state while extracting a sequence of values following
* the versioned entry edges.
*
* @param <K> Type of the key
* @param <V> Type of the value
*/
private static class ExtractionState<K, V> {
private final SharedBufferEntry<K, V> entry;
private final DeweyNumber version;
private final Stack<SharedBufferEntry<K, V>> path;
public ExtractionState(
final SharedBufferEntry<K, V> entry,
final DeweyNumber version,
final Stack<SharedBufferEntry<K, V>> path) {
this.entry = entry;
this.version = version;
this.path = path;
}
public SharedBufferEntry<K, V> getEntry() {
return entry;
}
public DeweyNumber getVersion() {
return version;
}
public Stack<SharedBufferEntry<K, V>> getPath() {
return path;
}
@Override
public String toString() {
return "ExtractionState(" + entry + ", " + version + ", [" + StringUtils.join(path, ", ") + "])";
}
}
}
| |
/*
* Copyright 2014 - 2015 Real Logic Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.co.real_logic.aeron;
import org.junit.After;
import org.junit.Assume;
import org.junit.Test;
import org.junit.experimental.theories.DataPoint;
import org.junit.experimental.theories.Theories;
import org.junit.experimental.theories.Theory;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.InOrder;
import uk.co.real_logic.aeron.driver.MediaDriver;
import uk.co.real_logic.aeron.driver.ThreadingMode;
import uk.co.real_logic.aeron.logbuffer.FileBlockHandler;
import uk.co.real_logic.aeron.logbuffer.FragmentHandler;
import uk.co.real_logic.aeron.logbuffer.Header;
import uk.co.real_logic.agrona.BitUtil;
import uk.co.real_logic.agrona.concurrent.UnsafeBuffer;
import java.nio.channels.FileChannel;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.core.IsNot.not;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.*;
import static uk.co.real_logic.aeron.logbuffer.FrameDescriptor.FRAME_ALIGNMENT;
import static uk.co.real_logic.aeron.protocol.DataHeaderFlyweight.HEADER_LENGTH;
import static uk.co.real_logic.agrona.BitUtil.SIZE_OF_INT;
/**
* Test that has a publisher and subscriber and single media driver for unicast and multicast cases
*/
@RunWith(Theories.class)
public class PubAndSubTest
{
@DataPoint
public static final String UNICAST_URI = "udp://localhost:54325";
@DataPoint
public static final String MULTICAST_URI = "udp://localhost@224.20.30.39:54326";
@DataPoint
public static final String IPC_URI = "aeron:ipc";
private static final int STREAM_ID = 1;
private static final ThreadingMode THREADING_MODE = ThreadingMode.SHARED;
private final MediaDriver.Context context = new MediaDriver.Context();
private final Aeron.Context publishingAeronContext = new Aeron.Context();
private final Aeron.Context subscribingAeronContext = new Aeron.Context();
private Aeron publishingClient;
private Aeron subscribingClient;
private MediaDriver driver;
private Subscription subscription;
private Publication publication;
private UnsafeBuffer buffer = new UnsafeBuffer(new byte[8192]);
private FragmentHandler fragmentHandler = mock(FragmentHandler.class);
private FileBlockHandler fileBlockHandler = mock(FileBlockHandler.class);
private void launch(final String channel) throws Exception
{
context.threadingMode(THREADING_MODE);
driver = MediaDriver.launch(context);
publishingClient = Aeron.connect(publishingAeronContext);
subscribingClient = Aeron.connect(subscribingAeronContext);
publication = publishingClient.addPublication(channel, STREAM_ID);
subscription = subscribingClient.addSubscription(channel, STREAM_ID);
}
@After
public void closeEverything() throws Exception
{
if (null != publication)
{
publication.close();
}
if (null != subscription)
{
subscription.close();
}
if (null != subscribingClient)
{
subscribingClient.close();
}
if (null != publishingClient)
{
publishingClient.close();
}
if (null != driver)
{
driver.close();
}
context.deleteAeronDirectory();
}
@Theory
@Test(timeout = 10000)
public void shouldSpinUpAndShutdown(final String channel) throws Exception
{
launch(channel);
}
@Theory
@Test(timeout = 10000)
public void shouldReceivePublishedMessage(final String channel) throws Exception
{
launch(channel);
publishMessage();
final int fragmentsRead[] = new int[1];
SystemTestHelper.executeUntil(
() -> fragmentsRead[0] > 0,
(i) ->
{
fragmentsRead[0] += subscription.poll(fragmentHandler, 10);
Thread.yield();
},
Integer.MAX_VALUE,
TimeUnit.MILLISECONDS.toNanos(9900));
verify(fragmentHandler).onFragment(
any(UnsafeBuffer.class),
eq(HEADER_LENGTH),
eq(SIZE_OF_INT),
any(Header.class));
}
@Theory
@Test(timeout = 10000)
public void shouldReceivePublishedMessageViaPollFile(final String channel) throws Exception
{
launch(channel);
publishMessage();
final int fragmentsRead[] = new int[1];
SystemTestHelper.executeUntil(
() -> fragmentsRead[0] > 0,
(i) ->
{
fragmentsRead[0] += subscription.filePoll(fileBlockHandler, Integer.MAX_VALUE);
Thread.yield();
},
Integer.MAX_VALUE,
TimeUnit.MILLISECONDS.toNanos(9900));
final long expectedOffset = 0L;
final int messageSize = SIZE_OF_INT;
final int expectedLength = BitUtil.align(HEADER_LENGTH + messageSize, FRAME_ALIGNMENT);
final ArgumentCaptor<FileChannel> channelArgumentCaptor = ArgumentCaptor.forClass(FileChannel.class);
verify(fileBlockHandler).onBlock(
channelArgumentCaptor.capture(),
eq(expectedOffset),
eq(expectedLength),
anyInt(),
anyInt());
assertTrue("File Channel is closed", channelArgumentCaptor.getValue().isOpen());
}
private void publishMessage()
{
buffer.putInt(0, 1);
while (publication.offer(buffer, 0, SIZE_OF_INT) < 0L)
{
Thread.yield();
}
}
@Theory
@Test(timeout = 10000)
public void shouldContinueAfterBufferRollover(final String channel) throws Exception
{
final int termBufferLength = 64 * 1024;
final int numMessagesInTermBuffer = 64;
final int messageLength = (termBufferLength / numMessagesInTermBuffer) - HEADER_LENGTH;
final int numMessagesToSend = numMessagesInTermBuffer + 1;
context.termBufferLength(termBufferLength);
launch(channel);
for (int i = 0; i < numMessagesToSend; i++)
{
while (publication.offer(buffer, 0, messageLength) < 0L)
{
Thread.yield();
}
final int fragmentsRead[] = new int[1];
SystemTestHelper.executeUntil(
() -> fragmentsRead[0] > 0,
(j) ->
{
fragmentsRead[0] += subscription.poll(fragmentHandler, 10);
Thread.yield();
},
Integer.MAX_VALUE,
TimeUnit.MILLISECONDS.toNanos(500));
}
verify(fragmentHandler, times(numMessagesToSend)).onFragment(
any(UnsafeBuffer.class),
anyInt(),
eq(messageLength),
any(Header.class));
}
@Theory
@Test(timeout = 10000)
public void shouldContinueAfterRolloverWithMinimalPaddingHeader(final String channel) throws Exception
{
final int termBufferLength = 64 * 1024;
final int termBufferLengthMinusPaddingHeader = termBufferLength - HEADER_LENGTH;
final int num1kMessagesInTermBuffer = 63;
final int lastMessageLength =
termBufferLengthMinusPaddingHeader - (num1kMessagesInTermBuffer * 1024) - HEADER_LENGTH;
final int messageLength = 1024 - HEADER_LENGTH;
context.termBufferLength(termBufferLength);
launch(channel);
// lock step reception until we get to within 8 messages of the end
for (int i = 0; i < num1kMessagesInTermBuffer - 7; i++)
{
while (publication.offer(buffer, 0, messageLength) < 0L)
{
Thread.yield();
}
final int fragmentsRead[] = new int[1];
SystemTestHelper.executeUntil(
() -> fragmentsRead[0] > 0,
(j) ->
{
fragmentsRead[0] += subscription.poll(fragmentHandler, 10);
Thread.yield();
},
Integer.MAX_VALUE,
TimeUnit.MILLISECONDS.toNanos(500));
}
for (int i = 7; i > 0; i--)
{
while (publication.offer(buffer, 0, messageLength) < 0L)
{
Thread.yield();
}
}
// small enough to leave room for padding that is just a header
while (publication.offer(buffer, 0, lastMessageLength) < 0L)
{
Thread.yield();
}
// no roll over
while (publication.offer(buffer, 0, messageLength) < 0L)
{
Thread.yield();
}
final int fragmentsRead[] = new int[1];
SystemTestHelper.executeUntil(
() -> fragmentsRead[0] == 9,
(j) ->
{
fragmentsRead[0] += subscription.poll(fragmentHandler, 10);
Thread.yield();
},
Integer.MAX_VALUE,
TimeUnit.MILLISECONDS.toNanos(500));
final InOrder inOrder = inOrder(fragmentHandler);
inOrder.verify(fragmentHandler, times(num1kMessagesInTermBuffer)).onFragment(
any(UnsafeBuffer.class),
anyInt(),
eq(messageLength),
any(Header.class));
inOrder.verify(fragmentHandler, times(1)).onFragment(
any(UnsafeBuffer.class),
anyInt(),
eq(lastMessageLength),
any(Header.class));
inOrder.verify(fragmentHandler, times(1)).onFragment(
any(UnsafeBuffer.class),
anyInt(),
eq(messageLength),
any(Header.class));
}
@Theory
@Test(timeout = 10000)
public void shouldReceivePublishedMessageOneForOneWithDataLoss(final String channel) throws Exception
{
final int termBufferLength = 64 * 1024;
final int numMessagesInTermBuffer = 64;
final int messageLength = (termBufferLength / numMessagesInTermBuffer) - HEADER_LENGTH;
final int numMessagesToSend = 2 * numMessagesInTermBuffer;
context.termBufferLength(termBufferLength);
context.dataLossRate(0.10); // 10% data loss
context.dataLossSeed(0xdeadbeefL); // predictable seed
launch(channel);
Assume.assumeThat(channel, not(IPC_URI));
for (int i = 0; i < numMessagesToSend; i++)
{
while (publication.offer(buffer, 0, messageLength) < 0L)
{
Thread.yield();
}
final int fragmentsRead[] = new int[1];
SystemTestHelper.executeUntil(
() -> fragmentsRead[0] > 0,
(j) ->
{
fragmentsRead[0] += subscription.poll(fragmentHandler, 10);
Thread.yield();
},
Integer.MAX_VALUE,
TimeUnit.MILLISECONDS.toNanos(900));
}
verify(fragmentHandler, times(numMessagesToSend)).onFragment(
any(UnsafeBuffer.class),
anyInt(),
eq(messageLength),
any(Header.class));
}
@Theory
@Test(timeout = 10000)
public void shouldReceivePublishedMessageBatchedWithDataLoss(final String channel) throws Exception
{
final int termBufferLength = 64 * 1024;
final int numMessagesInTermBuffer = 64;
final int messageLength = (termBufferLength / numMessagesInTermBuffer) - HEADER_LENGTH;
final int numMessagesToSend = 2 * numMessagesInTermBuffer;
final int numBatches = 4;
final int numMessagesPerBatch = numMessagesToSend / numBatches;
context.termBufferLength(termBufferLength);
context.dataLossRate(0.10); // 10% data loss
context.dataLossSeed(0xcafebabeL); // predictable seed
launch(channel);
Assume.assumeThat(channel, not(IPC_URI));
for (int i = 0; i < numBatches; i++)
{
for (int j = 0; j < numMessagesPerBatch; j++)
{
while (publication.offer(buffer, 0, messageLength) < 0L)
{
Thread.yield();
}
}
final int fragmentsRead[] = new int[1];
SystemTestHelper.executeUntil(
() -> fragmentsRead[0] >= numMessagesPerBatch,
(j) ->
{
fragmentsRead[0] += subscription.poll(fragmentHandler, 10);
Thread.yield();
},
Integer.MAX_VALUE,
TimeUnit.MILLISECONDS.toNanos(900));
}
verify(fragmentHandler, times(numMessagesToSend)).onFragment(
any(UnsafeBuffer.class),
anyInt(),
eq(messageLength),
any(Header.class));
}
@Theory
@Test(timeout = 10000)
public void shouldContinueAfterBufferRolloverBatched(final String channel) throws Exception
{
final int termBufferLength = 64 * 1024;
final int numBatchesPerTerm = 4;
final int numMessagesPerBatch = 16;
final int numMessagesInTermBuffer = numMessagesPerBatch * numBatchesPerTerm;
final int messageLength = (termBufferLength / numMessagesInTermBuffer) - HEADER_LENGTH;
final int numMessagesToSend = numMessagesInTermBuffer + 1;
context.termBufferLength(termBufferLength);
launch(channel);
for (int i = 0; i < numBatchesPerTerm; i++)
{
for (int j = 0; j < numMessagesPerBatch; j++)
{
while (publication.offer(buffer, 0, messageLength) < 0L)
{
Thread.yield();
}
}
final int fragmentsRead[] = new int[1];
SystemTestHelper.executeUntil(
() -> fragmentsRead[0] >= numMessagesPerBatch,
(j) ->
{
fragmentsRead[0] += subscription.poll(fragmentHandler, 10);
Thread.yield();
},
Integer.MAX_VALUE,
TimeUnit.MILLISECONDS.toNanos(900));
}
while (publication.offer(buffer, 0, messageLength) < 0L)
{
Thread.yield();
}
final int fragmentsRead[] = new int[1];
SystemTestHelper.executeUntil(
() -> fragmentsRead[0] > 0,
(j) ->
{
fragmentsRead[0] += subscription.poll(fragmentHandler, 10);
Thread.yield();
},
Integer.MAX_VALUE,
TimeUnit.MILLISECONDS.toNanos(900));
verify(fragmentHandler, times(numMessagesToSend)).onFragment(
any(UnsafeBuffer.class),
anyInt(),
eq(messageLength),
any(Header.class));
}
@Theory
@Test(timeout = 10000)
public void shouldContinueAfterBufferRolloverWithPadding(final String channel) throws Exception
{
/*
* 65536 bytes in the buffer
* 63 * 1032 = 65016
* 65536 - 65016 = 520 bytes padding at the end
* so, sending 64 messages causes last to overflow
*/
final int termBufferLength = 64 * 1024;
final int messageLength = 1032 - HEADER_LENGTH;
final int numMessagesToSend = 64;
context.termBufferLength(termBufferLength);
launch(channel);
for (int i = 0; i < numMessagesToSend; i++)
{
while (publication.offer(buffer, 0, messageLength) < 0L)
{
Thread.yield();
}
final int fragmentsRead[] = new int[1];
SystemTestHelper.executeUntil(
() -> fragmentsRead[0] > 0,
(j) ->
{
fragmentsRead[0] += subscription.poll(fragmentHandler, 10);
Thread.yield();
},
Integer.MAX_VALUE,
TimeUnit.MILLISECONDS.toNanos(500));
}
verify(fragmentHandler, times(numMessagesToSend)).onFragment(
any(UnsafeBuffer.class),
anyInt(),
eq(messageLength),
any(Header.class));
}
@Theory
@Test(timeout = 10000)
public void shouldContinueAfterBufferRolloverWithPaddingBatched(final String channel) throws Exception
{
/*
* 65536 bytes in the buffer
* 63 * 1032 = 65016
* 65536 - 65016 = 520 bytes padding at the end
* so, sending 64 messages causes last to overflow
*/
final int termBufferLength = 64 * 1024;
final int messageLength = 1032 - HEADER_LENGTH;
final int numMessagesToSend = 64;
final int numBatchesPerTerm = 4;
final int numMessagesPerBatch = numMessagesToSend / numBatchesPerTerm;
context.termBufferLength(termBufferLength);
launch(channel);
for (int i = 0; i < numBatchesPerTerm; i++)
{
for (int j = 0; j < numMessagesPerBatch; j++)
{
while (publication.offer(buffer, 0, messageLength) < 0L)
{
Thread.yield();
}
}
final int fragmentsRead[] = new int[1];
SystemTestHelper.executeUntil(
() -> fragmentsRead[0] >= numMessagesPerBatch,
(j) ->
{
fragmentsRead[0] += subscription.poll(fragmentHandler, 10);
Thread.yield();
},
Integer.MAX_VALUE,
TimeUnit.MILLISECONDS.toNanos(900));
}
verify(fragmentHandler, times(numMessagesToSend)).onFragment(
any(UnsafeBuffer.class),
anyInt(),
eq(messageLength),
any(Header.class));
}
@Theory
@Test(timeout = 10000)
public void shouldReceiveOnlyAfterSendingUpToFlowControlLimit(final String channel) throws Exception
{
/*
* The subscriber will flow control before an entire term buffer. So, send until can't send no 'more.
* Then start up subscriber to drain.
*/
final int termBufferLength = 64 * 1024;
final int numMessagesPerTerm = 64;
final int messageLength = (termBufferLength / numMessagesPerTerm) - HEADER_LENGTH;
final int maxFails = 10000;
int messagesSent = 0;
context.termBufferLength(termBufferLength);
launch(channel);
for (int i = 0; i < numMessagesPerTerm; i++)
{
int offerFails = 0;
while (publication.offer(buffer, 0, messageLength) < 0L)
{
if (++offerFails > maxFails)
{
break;
}
Thread.yield();
}
if (offerFails > maxFails)
{
break;
}
messagesSent++;
}
final int fragmentsRead[] = new int[1];
final int messagesToReceive = messagesSent;
SystemTestHelper.executeUntil(
() -> fragmentsRead[0] >= messagesToReceive,
(j) ->
{
fragmentsRead[0] += subscription.poll(fragmentHandler, 10);
Thread.yield();
},
Integer.MAX_VALUE,
TimeUnit.MILLISECONDS.toNanos(500));
verify(fragmentHandler, times(messagesToReceive)).onFragment(
any(UnsafeBuffer.class),
anyInt(),
eq(messageLength),
any(Header.class));
}
@Theory
@Test(timeout = 10000)
public void shouldReceivePublishedMessageOneForOneWithReSubscription(final String channel) throws Exception
{
final int termBufferLength = 64 * 1024;
final int numMessagesInTermBuffer = 64;
final int messageLength = (termBufferLength / numMessagesInTermBuffer) - HEADER_LENGTH;
final int numMessagesToSendStageOne = numMessagesInTermBuffer / 2;
final int numMessagesToSendStageTwo = numMessagesInTermBuffer;
final CountDownLatch newImageLatch = new CountDownLatch(1);
final int stage[] = { 1 };
context.termBufferLength(termBufferLength);
subscribingAeronContext.availableImageHandler(
(image, subscription, position, info) ->
{
if (2 == stage[0])
{
newImageLatch.countDown();
}
});
launch(channel);
for (int i = 0; i < numMessagesToSendStageOne; i++)
{
while (publication.offer(buffer, 0, messageLength) < 0L)
{
Thread.yield();
}
final int fragmentsRead[] = new int[1];
SystemTestHelper.executeUntil(
() -> fragmentsRead[0] > 0,
(j) ->
{
fragmentsRead[0] += subscription.poll(fragmentHandler, 10);
Thread.yield();
},
Integer.MAX_VALUE,
TimeUnit.MILLISECONDS.toNanos(900));
}
subscription.close();
stage[0] = 2;
subscription = subscribingClient.addSubscription(channel, STREAM_ID);
newImageLatch.await();
for (int i = 0; i < numMessagesToSendStageTwo; i++)
{
while (publication.offer(buffer, 0, messageLength) < 0L)
{
Thread.yield();
}
final int fragmentsRead[] = new int[1];
SystemTestHelper.executeUntil(
() -> fragmentsRead[0] > 0,
(j) ->
{
fragmentsRead[0] += subscription.poll(fragmentHandler, 10);
Thread.yield();
},
Integer.MAX_VALUE,
TimeUnit.MILLISECONDS.toNanos(900));
}
verify(fragmentHandler, times(numMessagesToSendStageOne + numMessagesToSendStageTwo)).onFragment(
any(UnsafeBuffer.class),
anyInt(),
eq(messageLength),
any(Header.class));
}
@Theory
@Test(timeout = 10000)
public void shouldFragmentExactMessageLengthsCorrectly(final String channel) throws Exception
{
final int termBufferLength = 64 * 1024;
final int numFragmentsPerMessage = 2;
final int mtuLength = 4096;
final int frameLength = mtuLength - HEADER_LENGTH;
final int messageLength = frameLength * numFragmentsPerMessage;
final int numMessagesToSend = 2;
final int numFramesToExpect = numMessagesToSend * numFragmentsPerMessage;
context.termBufferLength(termBufferLength)
.mtuLength(mtuLength);
launch(channel);
Assume.assumeThat(channel, not(IPC_URI));
for (int i = 0; i < numMessagesToSend; i++)
{
while (publication.offer(buffer, 0, messageLength) < 0L)
{
Thread.yield();
}
}
final int fragmentsRead[] = new int[1];
SystemTestHelper.executeUntil(
() -> fragmentsRead[0] > numFramesToExpect,
(j) ->
{
fragmentsRead[0] += subscription.poll(fragmentHandler, 10);
Thread.yield();
},
Integer.MAX_VALUE,
TimeUnit.MILLISECONDS.toNanos(500));
verify(fragmentHandler, times(numFramesToExpect)).onFragment(
any(UnsafeBuffer.class),
anyInt(),
eq(frameLength),
any(Header.class));
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.scopeView;
import com.intellij.ProjectTopics;
import com.intellij.history.LocalHistory;
import com.intellij.history.LocalHistoryAction;
import com.intellij.ide.*;
import com.intellij.ide.dnd.aware.DnDAwareTree;
import com.intellij.ide.projectView.ProjectView;
import com.intellij.ide.projectView.ProjectViewNodeDecorator;
import com.intellij.ide.projectView.impl.AbstractProjectViewPane;
import com.intellij.ide.projectView.impl.ModuleGroup;
import com.intellij.ide.projectView.impl.ProjectViewPane;
import com.intellij.ide.projectView.impl.ProjectViewTree;
import com.intellij.ide.scopeView.nodes.BasePsiNode;
import com.intellij.ide.ui.customization.CustomizationUtil;
import com.intellij.ide.util.DeleteHandler;
import com.intellij.ide.util.DirectoryChooserUtil;
import com.intellij.ide.util.EditorHelper;
import com.intellij.ide.util.treeView.TreeState;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.colors.CodeInsightColors;
import com.intellij.openapi.editor.colors.EditorColorsManager;
import com.intellij.openapi.editor.markup.EffectType;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.fileTypes.StdFileTypes;
import com.intellij.openapi.ide.CopyPasteManager;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleUtil;
import com.intellij.openapi.module.ModuleUtilCore;
import com.intellij.openapi.project.IndexNotReadyException;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ModuleRootAdapter;
import com.intellij.openapi.roots.ModuleRootEvent;
import com.intellij.openapi.roots.ui.configuration.actions.ModuleDeleteProvider;
import com.intellij.openapi.util.ActionCallback;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.vcs.FileStatusListener;
import com.intellij.openapi.vcs.FileStatusManager;
import com.intellij.openapi.vcs.changes.*;
import com.intellij.openapi.vfs.VfsUtilCore;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.wm.ToolWindowManager;
import com.intellij.packageDependencies.DefaultScopesProvider;
import com.intellij.packageDependencies.DependencyValidationManager;
import com.intellij.packageDependencies.ui.*;
import com.intellij.problems.WolfTheProblemSolver;
import com.intellij.psi.*;
import com.intellij.psi.search.scope.packageSet.*;
import com.intellij.psi.util.PsiUtilCore;
import com.intellij.ui.*;
import com.intellij.util.EditSourceOnDoubleClickHandler;
import com.intellij.util.Function;
import com.intellij.util.FunctionUtil;
import com.intellij.util.OpenSourceUtil;
import com.intellij.util.containers.HashSet;
import com.intellij.util.messages.MessageBusConnection;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.tree.TreeUtil;
import com.intellij.util.ui.update.MergingUpdateQueue;
import com.intellij.util.ui.update.UiNotifyConnector;
import com.intellij.util.ui.update.Update;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.TreeExpansionEvent;
import javax.swing.event.TreeWillExpandListener;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.ExpandVetoException;
import javax.swing.tree.TreePath;
import java.awt.*;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.lang.ref.WeakReference;
import java.util.*;
import java.util.List;
/**
* User: anna
* Date: 25-Jan-2006
*/
public class ScopeTreeViewPanel extends JPanel implements Disposable {
private static final Logger LOG = Logger.getInstance("com.intellij.ide.scopeView.ScopeTreeViewPanel");
private final IdeView myIdeView = new MyIdeView();
private final MyPsiTreeChangeAdapter myPsiTreeChangeAdapter = new MyPsiTreeChangeAdapter();
private final DnDAwareTree myTree = new DnDAwareTree(){
@Override
public boolean isFileColorsEnabled() {
return ProjectViewTree.isFileColorsEnabledFor(this);
}
@Nullable
@Override
public Color getFileColorFor(DefaultMutableTreeNode node) {
if (!(node instanceof PackageDependenciesNode)) {
return null;
}
return ProjectViewTree.getColorForObject(((PackageDependenciesNode)node).getPsiElement(), myProject,
FunctionUtil.<PsiElement>id());
}
};
@NotNull
private final Project myProject;
private FileTreeModelBuilder myBuilder;
private String CURRENT_SCOPE_NAME;
private TreeExpansionMonitor<PackageDependenciesNode> myTreeExpansionMonitor;
private CopyPasteDelegator myCopyPasteDelegator;
private final MyDeletePSIElementProvider myDeletePSIElementProvider = new MyDeletePSIElementProvider();
private final ModuleDeleteProvider myDeleteModuleProvider = new ModuleDeleteProvider();
private final DependencyValidationManager myDependencyValidationManager;
private final WolfTheProblemSolver.ProblemListener myProblemListener = new MyProblemListener();
private final FileStatusListener myFileStatusListener = new FileStatusListener() {
@Override
public void fileStatusesChanged() {
final List<TreePath> treePaths = TreeUtil.collectExpandedPaths(myTree);
for (TreePath treePath : treePaths) {
final Object component = treePath.getLastPathComponent();
if (component instanceof PackageDependenciesNode) {
((PackageDependenciesNode)component).updateColor();
for (int i = 0; i< ((PackageDependenciesNode)component).getChildCount(); i++) {
((PackageDependenciesNode)((PackageDependenciesNode)component).getChildAt(i)).updateColor();
}
}
}
}
@Override
public void fileStatusChanged(@NotNull VirtualFile virtualFile) {
if (!virtualFile.isValid()) return;
final PsiFile file = PsiManager.getInstance(myProject).findFile(virtualFile);
if (file != null) {
final NamedScope currentScope = getCurrentScope();
final PackageSet value = currentScope.getValue();
if (value != null && value.contains(file, NamedScopesHolder.getHolder(myProject, currentScope.getName(), myDependencyValidationManager))) {
if (!myBuilder.hasFileNode(virtualFile)) return;
final PackageDependenciesNode node = myBuilder.getFileParentNode(virtualFile);
final PackageDependenciesNode[] nodes = FileTreeModelBuilder.findNodeForPsiElement(node, file);
if (nodes != null) {
for (PackageDependenciesNode dependenciesNode : nodes) {
dependenciesNode.updateColor();
}
}
}
}
}
};
private final MergingUpdateQueue myUpdateQueue = new MergingUpdateQueue("ScopeViewUpdate", 300, isTreeShowing(), myTree);
private final ScopeTreeViewPanel.MyChangesListListener myChangesListListener = new MyChangesListListener();
protected ActionCallback myActionCallback;
public ScopeTreeViewPanel(@NotNull Project project) {
super(new BorderLayout());
myUpdateQueue.setPassThrough(false); // we don't want passthrough mode, even in unit tests
myProject = project;
initTree();
add(ScrollPaneFactory.createScrollPane(myTree), BorderLayout.CENTER);
myDependencyValidationManager = DependencyValidationManager.getInstance(myProject);
final UiNotifyConnector uiNotifyConnector = new UiNotifyConnector(myTree, myUpdateQueue);
Disposer.register(this, myUpdateQueue);
Disposer.register(this, uiNotifyConnector);
if (isTreeShowing()) {
myUpdateQueue.showNotify();
}
}
public void initListeners() {
final MessageBusConnection connection = myProject.getMessageBus().connect(this);
connection.subscribe(ProjectTopics.PROJECT_ROOTS, new MyModuleRootListener());
PsiManager.getInstance(myProject).addPsiTreeChangeListener(myPsiTreeChangeAdapter);
WolfTheProblemSolver.getInstance(myProject).addProblemListener(myProblemListener);
ChangeListManager.getInstance(myProject).addChangeListListener(myChangesListListener);
FileStatusManager.getInstance(myProject).addFileStatusListener(myFileStatusListener, myProject);
}
@Override
public void dispose() {
FileTreeModelBuilder.clearCaches(myProject);
PsiManager.getInstance(myProject).removePsiTreeChangeListener(myPsiTreeChangeAdapter);
WolfTheProblemSolver.getInstance(myProject).removeProblemListener(myProblemListener);
ChangeListManager.getInstance(myProject).removeChangeListListener(myChangesListListener);
}
public void selectNode(final PsiElement element, final PsiFileSystemItem file, final boolean requestFocus) {
final Runnable runnable = new Runnable() {
@Override
public void run() {
myUpdateQueue.queue(new Update("Select") {
@Override
public void run() {
if (myProject.isDisposed()) return;
PackageDependenciesNode node = myBuilder.findNode(file, element);
if (node != null && node.getPsiElement() != element) {
final TreePath path = new TreePath(node.getPath());
if (myTree.isCollapsed(path)) {
myTree.expandPath(path);
myTree.makeVisible(path);
}
}
node = myBuilder.findNode(file, element);
if (node != null) {
TreeUtil.selectPath(myTree, new TreePath(node.getPath()));
if (requestFocus) {
myTree.requestFocus();
}
}
}
});
}
};
doWhenDone(runnable);
}
private void doWhenDone(Runnable runnable) {
if (myActionCallback == null || ApplicationManager.getApplication().isUnitTestMode()) {
runnable.run();
}
else {
myActionCallback.doWhenDone(runnable);
}
}
public void selectScope(final NamedScope scope) {
refreshScope(scope);
if (scope != DefaultScopesProvider.getAllScope() && scope != null) {
CURRENT_SCOPE_NAME = scope.getName();
}
}
public JPanel getPanel() {
return this;
}
private void initTree() {
myTree.setCellRenderer(new MyTreeCellRenderer());
myTree.setRootVisible(false);
myTree.setShowsRootHandles(true);
UIUtil.setLineStyleAngled(myTree);
TreeUtil.installActions(myTree);
EditSourceOnDoubleClickHandler.install(myTree);
new TreeSpeedSearch(myTree);
myCopyPasteDelegator = new CopyPasteDelegator(myProject, this) {
@Override
@NotNull
protected PsiElement[] getSelectedElements() {
return getSelectedPsiElements();
}
};
myTreeExpansionMonitor = PackageTreeExpansionMonitor.install(myTree, myProject);
final ScopeTreeStructureExpander[] extensions = Extensions.getExtensions(ScopeTreeStructureExpander.EP_NAME, myProject);
for (ScopeTreeStructureExpander expander : extensions) {
myTree.addTreeWillExpandListener(expander);
}
if (extensions.length == 0) {
myTree.addTreeWillExpandListener(new SortingExpandListener());
}
myTree.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
if (KeyEvent.VK_ENTER == e.getKeyCode()) {
final Object component = myTree.getLastSelectedPathComponent();
if (component instanceof DefaultMutableTreeNode) {
final DefaultMutableTreeNode selectedNode = (DefaultMutableTreeNode)component;
if (selectedNode.isLeaf()) {
OpenSourceUtil.openSourcesFrom(DataManager.getInstance().getDataContext(myTree), false);
}
}
}
}
});
CustomizationUtil.installPopupHandler(myTree, IdeActions.GROUP_PROJECT_VIEW_POPUP, ActionPlaces.PROJECT_VIEW_POPUP);
}
@NotNull
private PsiElement[] getSelectedPsiElements() {
final TreePath[] treePaths = myTree.getSelectionPaths();
if (treePaths != null) {
Set<PsiElement> result = new HashSet<PsiElement>();
for (TreePath path : treePaths) {
PackageDependenciesNode node = (PackageDependenciesNode)path.getLastPathComponent();
final PsiElement psiElement = node.getPsiElement();
if (psiElement != null && psiElement.isValid()) {
result.add(psiElement);
}
}
return PsiUtilCore.toPsiElementArray(result);
}
return PsiElement.EMPTY_ARRAY;
}
public void refreshScope(@Nullable NamedScope scope) {
FileTreeModelBuilder.clearCaches(myProject);
if (scope == null) { //was deleted
scope = DefaultScopesProvider.getAllScope();
}
LOG.assertTrue(scope != null);
final NamedScopesHolder holder = NamedScopesHolder.getHolder(myProject, scope.getName(), myDependencyValidationManager);
final PackageSet packageSet = scope.getValue() != null ? scope.getValue() : new InvalidPackageSet("");
final DependenciesPanel.DependencyPanelSettings settings = new DependenciesPanel.DependencyPanelSettings();
settings.UI_FILTER_LEGALS = true;
settings.UI_GROUP_BY_SCOPE_TYPE = false;
settings.UI_SHOW_FILES = true;
final ProjectView projectView = ProjectView.getInstance(myProject);
settings.UI_FLATTEN_PACKAGES = projectView.isFlattenPackages(ScopeViewPane.ID);
settings.UI_COMPACT_EMPTY_MIDDLE_PACKAGES = projectView.isHideEmptyMiddlePackages(ScopeViewPane.ID);
settings.UI_SHOW_MODULES = projectView.isShowModules(ScopeViewPane.ID);
settings.UI_SHOW_MODULE_GROUPS = projectView.isShowModules(ScopeViewPane.ID);
myBuilder = new FileTreeModelBuilder(myProject, new Marker() {
@Override
public boolean isMarked(VirtualFile file) {
return packageSet != null && (packageSet instanceof PackageSetBase ? ((PackageSetBase)packageSet).contains(file, myProject, holder) : packageSet.contains(PackageSetBase.getPsiFile(file, myProject), holder));
}
}, settings);
myTree.setPaintBusy(true);
myBuilder.setTree(myTree);
myTree.getEmptyText().setText("Loading...");
myActionCallback = new ActionCallback();
myTree.putClientProperty(TreeState.CALLBACK, new WeakReference<ActionCallback>(myActionCallback));
myTree.setModel(myBuilder.build(myProject, true, new Runnable(){
@Override
public void run() {
myTree.setPaintBusy(false);
myTree.getEmptyText().setText(UIBundle.message("message.nothingToShow"));
myActionCallback.setDone();
}
}));
((PackageDependenciesNode)myTree.getModel().getRoot()).sortChildren();
((DefaultTreeModel)myTree.getModel()).reload();
FileTreeModelBuilder.clearCaches(myProject);
}
protected NamedScope getCurrentScope() {
NamedScope scope = NamedScopesHolder.getScope(myProject, CURRENT_SCOPE_NAME);
if (scope == null) {
scope = DefaultScopesProvider.getAllScope();
}
LOG.assertTrue(scope != null);
return scope;
}
@Nullable
public Object getData(String dataId) {
if (LangDataKeys.MODULE_CONTEXT.is(dataId)) {
final TreePath selectionPath = myTree.getSelectionPath();
if (selectionPath != null) {
PackageDependenciesNode node = (PackageDependenciesNode)selectionPath.getLastPathComponent();
if (node instanceof ModuleNode) {
return ((ModuleNode)node).getModule();
}
}
}
if (LangDataKeys.PSI_ELEMENT.is(dataId)) {
final TreePath selectionPath = myTree.getSelectionPath();
if (selectionPath != null) {
PackageDependenciesNode node = (PackageDependenciesNode)selectionPath.getLastPathComponent();
return node != null && node.isValid() ? node.getPsiElement() : null;
}
}
final TreePath[] treePaths = myTree.getSelectionPaths();
if (treePaths != null) {
if (LangDataKeys.PSI_ELEMENT_ARRAY.is(dataId)) {
Set<PsiElement> psiElements = new HashSet<PsiElement>();
for (TreePath treePath : treePaths) {
final PackageDependenciesNode node = (PackageDependenciesNode)treePath.getLastPathComponent();
if (node.isValid()) {
final PsiElement psiElement = node.getPsiElement();
if (psiElement != null) {
psiElements.add(psiElement);
}
}
}
return psiElements.isEmpty() ? null : PsiUtilCore.toPsiElementArray(psiElements);
}
}
if (LangDataKeys.IDE_VIEW.is(dataId)) {
return myIdeView;
}
if (PlatformDataKeys.CUT_PROVIDER.is(dataId)) {
return myCopyPasteDelegator.getCutProvider();
}
if (PlatformDataKeys.COPY_PROVIDER.is(dataId)) {
return myCopyPasteDelegator.getCopyProvider();
}
if (PlatformDataKeys.PASTE_PROVIDER.is(dataId)) {
return myCopyPasteDelegator.getPasteProvider();
}
if (PlatformDataKeys.DELETE_ELEMENT_PROVIDER.is(dataId)) {
if (getSelectedModules() != null) {
return myDeleteModuleProvider;
}
return myDeletePSIElementProvider;
}
if (LangDataKeys.PASTE_TARGET_PSI_ELEMENT.is(dataId)) {
final TreePath selectionPath = myTree.getSelectionPath();
if (selectionPath != null) {
final Object pathComponent = selectionPath.getLastPathComponent();
if (pathComponent instanceof DirectoryNode) {
return ((DirectoryNode)pathComponent).getTargetDirectory();
}
}
}
return null;
}
@Nullable
private Module[] getSelectedModules() {
final TreePath[] treePaths = myTree.getSelectionPaths();
if (treePaths != null) {
Set<Module> result = new HashSet<Module>();
for (TreePath path : treePaths) {
PackageDependenciesNode node = (PackageDependenciesNode)path.getLastPathComponent();
if (node instanceof ModuleNode) {
result.add(((ModuleNode)node).getModule());
}
else if (node instanceof ModuleGroupNode) {
final ModuleGroupNode groupNode = (ModuleGroupNode)node;
final ModuleGroup moduleGroup = groupNode.getModuleGroup();
result.addAll(moduleGroup.modulesInGroup(myProject, true));
}
}
return result.isEmpty() ? null : result.toArray(new Module[result.size()]);
}
return null;
}
private void reload(@Nullable final DefaultMutableTreeNode rootToReload) {
final DefaultTreeModel treeModel = (DefaultTreeModel)myTree.getModel();
if (rootToReload != null && rootToReload != treeModel.getRoot()) {
final List<TreePath> treePaths = TreeUtil.collectExpandedPaths(myTree, new TreePath(rootToReload.getPath()));
final List<TreePath> selectionPaths = TreeUtil.collectSelectedPaths(myTree, new TreePath(rootToReload.getPath()));
final TreePath path = new TreePath(rootToReload.getPath());
final boolean wasCollapsed = myTree.isCollapsed(path);
final Runnable runnable = new Runnable() {
@Override
public void run() {
if (!isTreeShowing() || rootToReload.getParent() == null) return;
TreeUtil.sort(rootToReload, getNodeComparator());
treeModel.reload(rootToReload);
if (!wasCollapsed) {
myTree.collapsePath(path);
for (TreePath treePath : treePaths) {
myTree.expandPath(treePath);
}
for (TreePath selectionPath : selectionPaths) {
TreeUtil.selectPath(myTree, selectionPath);
}
}
}
};
if (ApplicationManager.getApplication().isUnitTestMode()) {
runnable.run();
} else {
SwingUtilities.invokeLater(runnable);
}
}
else {
TreeUtil.sort(treeModel, getNodeComparator());
treeModel.reload();
}
}
private DependencyNodeComparator getNodeComparator() {
return new DependencyNodeComparator(ProjectView.getInstance(myProject).isSortByType(ScopeViewPane.ID));
}
public void setSortByType() {
myTreeExpansionMonitor.freeze();
reload(null);
myTreeExpansionMonitor.restore();
}
ActionCallback getActionCallback() {
return myActionCallback;
}
private class MyTreeCellRenderer extends ColoredTreeCellRenderer {
private WolfTheProblemSolver myWolfTheProblemSolver = WolfTheProblemSolver.getInstance(myProject);
@Override
public void customizeCellRenderer(JTree tree,
Object value,
boolean selected,
boolean expanded,
boolean leaf,
int row,
boolean hasFocus) {
if (value instanceof PackageDependenciesNode) {
PackageDependenciesNode node = (PackageDependenciesNode)value;
try {
setIcon(node.getIcon());
}
catch (IndexNotReadyException ignore) {
}
final SimpleTextAttributes regularAttributes = SimpleTextAttributes.REGULAR_ATTRIBUTES;
TextAttributes textAttributes = regularAttributes.toTextAttributes();
if (node instanceof BasePsiNode && ((BasePsiNode)node).isDeprecated()) {
textAttributes =
EditorColorsManager.getInstance().getGlobalScheme().getAttributes(CodeInsightColors.DEPRECATED_ATTRIBUTES).clone();
}
final PsiElement psiElement = node.getPsiElement();
textAttributes.setForegroundColor(CopyPasteManager.getInstance().isCutElement(psiElement) ? CopyPasteManager.CUT_COLOR : node.getColor());
if (getCurrentScope() != DefaultScopesProvider.getInstance(myProject).getProblemsScope()) {
final PsiFile containingFile = psiElement != null ? psiElement.getContainingFile() : null;
final VirtualFile virtualFile = PsiUtilCore.getVirtualFile(psiElement);
boolean isProblem;
if (containingFile != null) {
isProblem = myWolfTheProblemSolver.isProblemFile(virtualFile);
}
else if (virtualFile != null) {
isProblem = myWolfTheProblemSolver.hasProblemFilesBeneath(new Condition<VirtualFile>() {
@Override
public boolean value(VirtualFile file) {
return VfsUtilCore.isAncestor(virtualFile, file, false);
}
});
}
else {
final Module module = node instanceof ModuleNode ? ((ModuleNode)node).getModule() : null;
isProblem = module != null && myWolfTheProblemSolver.hasProblemFilesBeneath(module);
}
if (isProblem) {
textAttributes.setEffectColor(JBColor.RED);
textAttributes.setEffectType(EffectType.WAVE_UNDERSCORE);
}
}
append(node.toString(), SimpleTextAttributes.fromTextAttributes(textAttributes));
String oldToString = toString();
if (!myProject.isDisposed()) {
for(ProjectViewNodeDecorator decorator: Extensions.getExtensions(ProjectViewNodeDecorator.EP_NAME, myProject)) {
decorator.decorate(node, this);
}
}
if (toString().equals(oldToString)) { // nothing was decorated
final String locationString = node.getComment();
if (locationString != null && locationString.length() > 0) {
append(" (" + locationString + ")", SimpleTextAttributes.GRAY_ATTRIBUTES);
}
}
}
}
}
private class MyPsiTreeChangeAdapter extends PsiTreeChangeAdapter {
@Override
public void childAdded(@NotNull final PsiTreeChangeEvent event) {
final PsiElement element = event.getParent();
final PsiElement child = event.getChild();
if (child == null) return;
if (element.getContainingFile() == null) {
queueUpdate(new Runnable() {
@Override
public void run() {
if (!child.isValid()) return;
processNodeCreation(child);
}
}, false);
}
}
private void processNodeCreation(final PsiElement psiElement) {
if (psiElement instanceof PsiFile && !isInjected((PsiFile)psiElement)) {
final PackageDependenciesNode rootToReload = myBuilder.addFileNode((PsiFile)psiElement);
if (rootToReload != null) {
reload(rootToReload);
}
}
else if (psiElement instanceof PsiDirectory) {
final PsiElement[] children = psiElement.getChildren();
if (children.length > 0) {
queueRefreshScope(getCurrentScope(), (PsiDirectory)psiElement);
} else {
final PackageDependenciesNode node = myBuilder.addDirNode((PsiDirectory)psiElement);
if (node != null) {
reload((DefaultMutableTreeNode)node.getParent());
}
}
}
}
@Override
public void beforeChildRemoval(@NotNull final PsiTreeChangeEvent event) {
final PsiElement child = event.getChild();
final PsiElement parent = event.getParent();
if (parent instanceof PsiDirectory && (child instanceof PsiFile && !isInjected((PsiFile)child) || child instanceof PsiDirectory)) {
queueUpdate(new Runnable() {
@Override
public void run() {
final DefaultMutableTreeNode rootToReload = myBuilder.removeNode(child, (PsiDirectory)parent);
if (rootToReload != null) {
reload(rootToReload);
}
}
}, true);
}
}
@Override
public void beforeChildMovement(@NotNull PsiTreeChangeEvent event) {
final PsiElement oldParent = event.getOldParent();
final PsiElement child = event.getChild();
if (oldParent instanceof PsiDirectory) {
if (child instanceof PsiFileSystemItem && (!(child instanceof PsiFile) || !isInjected((PsiFile)child))) {
queueUpdate(new Runnable() {
@Override
public void run() {
final DefaultMutableTreeNode rootToReload =
myBuilder.removeNode(child, child instanceof PsiDirectory ? (PsiDirectory)child : (PsiDirectory)oldParent);
if (rootToReload != null) {
reload(rootToReload);
}
}
}, true);
}
}
}
@Override
public void childMoved(@NotNull PsiTreeChangeEvent event) {
final PsiElement newParent = event.getNewParent();
final PsiElement child = event.getChild();
if (newParent instanceof PsiDirectory) {
if (child instanceof PsiFileSystemItem && (!(child instanceof PsiFile) || !isInjected((PsiFile)child))) {
final PsiFileSystemItem file = (PsiFileSystemItem)child;
queueUpdate(new Runnable() {
@Override
public void run() {
final VirtualFile virtualFile = file.getVirtualFile();
if (virtualFile != null && virtualFile.isValid()) {
final PsiFileSystemItem newFile = file.isValid() ? file :
(file.isDirectory() ? PsiManager.getInstance(myProject).findDirectory(virtualFile)
: PsiManager.getInstance(myProject).findFile(virtualFile));
if (newFile != null) {
final PackageDependenciesNode rootToReload = newFile.isDirectory() ? myBuilder.addDirNode((PsiDirectory)newFile)
: myBuilder.addFileNode((PsiFile)newFile);
if (rootToReload != null) {
reload(rootToReload);
}
}
}
}
}, true);
}
}
}
@Override
public void childrenChanged(@NotNull PsiTreeChangeEvent event) {
final PsiElement parent = event.getParent();
final PsiFile file = parent.getContainingFile();
if (file != null && file.getFileType() == StdFileTypes.JAVA) {
if (!file.getViewProvider().isPhysical() && !isInjected(file)) return;
queueUpdate(new Runnable() {
@Override
public void run() {
if (file.isValid() && file.getViewProvider().isPhysical()) {
final NamedScope scope = getCurrentScope();
final PackageSet packageSet = scope.getValue();
if (packageSet == null) return; //invalid scope selected
if (packageSet.contains(file, NamedScopesHolder.getHolder(myProject, scope.getName(), myDependencyValidationManager))){
reload(myBuilder.getFileParentNode(file.getVirtualFile()));
}
}
}
}, false);
}
}
@Override
public final void propertyChanged(@NotNull PsiTreeChangeEvent event) {
String propertyName = event.getPropertyName();
final PsiElement element = event.getElement();
if (element != null) {
final NamedScope scope = getCurrentScope();
if (propertyName.equals(PsiTreeChangeEvent.PROP_FILE_NAME) || propertyName.equals(PsiTreeChangeEvent.PROP_FILE_TYPES)) {
queueUpdate(new Runnable() {
@Override
public void run() {
if (element.isValid()) {
processRenamed(scope, element.getContainingFile());
}
}
}, false);
}
else if (propertyName.equals(PsiTreeChangeEvent.PROP_DIRECTORY_NAME)) {
final PackageSet value = getCurrentScope().getValue();
if (!(value instanceof PackageSetBase) || ((PackageSetBase)value).contains(((PsiDirectory)element).getVirtualFile(), myProject, myDependencyValidationManager)) {
queueRefreshScope(scope, (PsiDirectory)element);
}
}
}
}
@Override
public void childReplaced(@NotNull final PsiTreeChangeEvent event) {
final NamedScope scope = getCurrentScope();
final PsiElement element = event.getNewChild();
final PsiFile psiFile = event.getFile();
if (psiFile != null && !isInjected(psiFile)) {
if (psiFile.getLanguage() == psiFile.getViewProvider().getBaseLanguage()) {
queueUpdate(new Runnable() {
@Override
public void run() {
processRenamed(scope, psiFile);
}
}, false);
}
}
else if (element instanceof PsiDirectory && element.isValid()) {
queueRefreshScope(scope, (PsiDirectory)element);
}
}
private boolean isInjected(PsiFile psiFile) {
return InjectedLanguageManager.getInstance(myProject).isInjectedFragment(psiFile);
}
private void queueRefreshScope(final NamedScope scope, final PsiDirectory dir) {
myUpdateQueue.cancelAllUpdates();
queueUpdate(new Runnable() {
@Override
public void run() {
myTreeExpansionMonitor.freeze();
refreshScope(scope);
doWhenDone(new Runnable() {
@Override
public void run() {
myTreeExpansionMonitor.restore();
final PackageDependenciesNode dirNode = myBuilder.findNode(dir, dir);
if (dirNode != null) {
TreeUtil.selectPath(myTree, new TreePath(dirNode.getPath()));
}
}
});
}
}, false);
}
private void processRenamed(final NamedScope scope, final PsiFile file) {
if (!file.isValid() || !file.getViewProvider().isPhysical()) return;
final PackageSet packageSet = scope.getValue();
if (packageSet == null) return; //invalid scope selected
if (packageSet.contains(file, NamedScopesHolder.getHolder(myProject, scope.getName(), myDependencyValidationManager))) {
reload(myBuilder.addFileNode(file));
}
else {
final DefaultMutableTreeNode rootToReload = myBuilder.removeNode(file, file.getParent());
if (rootToReload != null) {
reload(rootToReload);
}
}
}
//expand/collapse state should be restored in actual request if needed
private void queueUpdate(final Runnable request, boolean updateImmediately) {
final Runnable wrapped = new Runnable() {
@Override
public void run() {
if (myProject.isDisposed()) return;
request.run();
}
};
if (updateImmediately && isTreeShowing()) {
myUpdateQueue.run(new Update(request) {
@Override
public void run() {
wrapped.run();
}
});
}
else {
myUpdateQueue.queue(new Update(request) {
@Override
public void run() {
wrapped.run();
}
@Override
public boolean isExpired() {
return !isTreeShowing();
}
});
}
}
}
private class MyModuleRootListener extends ModuleRootAdapter {
@Override
public void rootsChanged(ModuleRootEvent event) {
myUpdateQueue.cancelAllUpdates();
myUpdateQueue.queue(new Update("RootsChanged") {
@Override
public void run() {
refreshScope(getCurrentScope());
}
@Override
public boolean isExpired() {
return !isTreeShowing();
}
});
}
}
private class MyIdeView implements IdeView {
@Override
public void selectElement(final PsiElement element) {
if (element != null) {
final PackageSet packageSet = getCurrentScope().getValue();
final PsiFile psiFile = element.getContainingFile();
if (packageSet == null) return;
final VirtualFile virtualFile = psiFile != null ? psiFile.getVirtualFile() :
(element instanceof PsiDirectory ? ((PsiDirectory)element).getVirtualFile() : null);
if (virtualFile != null) {
final ProjectView projectView = ProjectView.getInstance(myProject);
final NamedScopesHolder holder = NamedScopesHolder.getHolder(myProject, CURRENT_SCOPE_NAME, myDependencyValidationManager);
if (packageSet instanceof PackageSetBase && !((PackageSetBase)packageSet).contains(virtualFile, myProject, holder) ||
psiFile != null && !packageSet.contains(psiFile, holder)) {
projectView.changeView(ProjectViewPane.ID);
}
projectView.select(element, virtualFile, false);
}
Editor editor = EditorHelper.openInEditor(element);
if (editor != null) {
ToolWindowManager.getInstance(myProject).activateEditorComponent();
}
}
}
@Nullable
private PsiDirectory getDirectory() {
final TreePath[] selectedPaths = myTree.getSelectionPaths();
if (selectedPaths != null) {
if (selectedPaths.length != 1) return null;
TreePath path = selectedPaths[0];
final PackageDependenciesNode node = (PackageDependenciesNode)path.getLastPathComponent();
if (!node.isValid()) return null;
if (node instanceof DirectoryNode) {
return (PsiDirectory)node.getPsiElement();
}
else if (node instanceof BasePsiNode) {
final PsiElement psiElement = node.getPsiElement();
LOG.assertTrue(psiElement != null);
final PsiFile psiFile = psiElement.getContainingFile();
LOG.assertTrue(psiFile != null);
return psiFile.getContainingDirectory();
}
else if (node instanceof FileNode) {
final PsiFile psiFile = (PsiFile)node.getPsiElement();
LOG.assertTrue(psiFile != null);
return psiFile.getContainingDirectory();
}
}
return null;
}
@Override
public PsiDirectory[] getDirectories() {
PsiDirectory directory = getDirectory();
return directory == null ? PsiDirectory.EMPTY_ARRAY : new PsiDirectory[]{directory};
}
@Override
@Nullable
public PsiDirectory getOrChooseDirectory() {
return DirectoryChooserUtil.getOrChooseDirectory(this);
}
}
private final class MyDeletePSIElementProvider implements DeleteProvider {
@Override
public boolean canDeleteElement(@NotNull DataContext dataContext) {
final PsiElement[] elements = getSelectedPsiElements();
return DeleteHandler.shouldEnableDeleteAction(elements);
}
@Override
public void deleteElement(@NotNull DataContext dataContext) {
List<PsiElement> allElements = Arrays.asList(getSelectedPsiElements());
ArrayList<PsiElement> validElements = new ArrayList<PsiElement>();
for (PsiElement psiElement : allElements) {
if (psiElement != null && psiElement.isValid()) validElements.add(psiElement);
}
final PsiElement[] elements = PsiUtilCore.toPsiElementArray(validElements);
LocalHistoryAction a = LocalHistory.getInstance().startAction(IdeBundle.message("progress.deleting"));
try {
DeleteHandler.deletePsiElement(elements, myProject);
}
finally {
a.finish();
}
}
}
public DnDAwareTree getTree() {
return myTree;
}
private class MyProblemListener extends WolfTheProblemSolver.ProblemListener {
@Override
public void problemsAppeared(@NotNull VirtualFile file) {
addNode(file, DefaultScopesProvider.getInstance(myProject).getProblemsScope().getName());
}
@Override
public void problemsDisappeared(@NotNull VirtualFile file) {
removeNode(file, DefaultScopesProvider.getInstance(myProject).getProblemsScope().getName());
}
}
private void addNode(VirtualFile file, final String scopeName) {
queueUpdate(file, new Function<PsiFile, DefaultMutableTreeNode>() {
@Override
@Nullable
public DefaultMutableTreeNode fun(final PsiFile psiFile) {
return myBuilder.addFileNode(psiFile);
}
}, scopeName);
}
private void removeNode(VirtualFile file, final String scopeName) {
queueUpdate(file, new Function<PsiFile, DefaultMutableTreeNode>() {
@Override
@Nullable
public DefaultMutableTreeNode fun(final PsiFile psiFile) {
return myBuilder.removeNode(psiFile, psiFile.getContainingDirectory());
}
}, scopeName);
}
private void queueUpdate(final VirtualFile fileToRefresh,
final Function<PsiFile, DefaultMutableTreeNode> rootToReloadGetter, final String scopeName) {
if (myProject.isDisposed()) return;
AbstractProjectViewPane pane = ProjectView.getInstance(myProject).getCurrentProjectViewPane();
if (pane == null || !ScopeViewPane.ID.equals(pane.getId()) ||
!scopeName.equals(pane.getSubId())) {
return;
}
myUpdateQueue.queue(new Update(fileToRefresh) {
@Override
public void run() {
if (myProject.isDisposed() || !fileToRefresh.isValid()) return;
final PsiFile psiFile = PsiManager.getInstance(myProject).findFile(fileToRefresh);
if (psiFile != null) {
reload(rootToReloadGetter.fun(psiFile));
}
}
@Override
public boolean isExpired() {
return !isTreeShowing();
}
});
}
private boolean isTreeShowing() {
return myTree.isShowing() || ApplicationManager.getApplication().isUnitTestMode();
}
private class MyChangesListListener extends ChangeListAdapter {
@Override
public void changeListAdded(ChangeList list) {
fireListeners(list, null);
}
@Override
public void changeListRemoved(ChangeList list) {
fireListeners(list, null);
}
@Override
public void changeListRenamed(ChangeList list, String oldName) {
fireListeners(list, oldName);
}
private void fireListeners(ChangeList list, @Nullable String oldName) {
AbstractProjectViewPane pane = ProjectView.getInstance(myProject).getCurrentProjectViewPane();
if (pane == null || !ScopeViewPane.ID.equals(pane.getId())) {
return;
}
final String subId = pane.getSubId();
if (!list.getName().equals(subId) && (oldName == null || !oldName.equals(subId))) {
return;
}
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
myDependencyValidationManager.fireScopeListeners();
}
}, myProject.getDisposed());
}
@Override
public void changesRemoved(Collection<Change> changes, ChangeList fromList) {
final String name = fromList.getName();
final Set<VirtualFile> files = new HashSet<VirtualFile>();
collectFiles(changes, files);
for (VirtualFile file : files) {
removeNode(file, name);
}
}
@Override
public void changesAdded(Collection<Change> changes, ChangeList toList) {
final String name = toList.getName();
final Set<VirtualFile> files = new HashSet<VirtualFile>();
collectFiles(changes, files);
for (VirtualFile file : files) {
addNode(file, name);
}
}
private void collectFiles(Collection<Change> changes, Set<VirtualFile> files) {
for (Change change : changes) {
final ContentRevision afterRevision = change.getAfterRevision();
if (afterRevision != null) {
final VirtualFile virtualFile = afterRevision.getFile().getVirtualFile();
if (virtualFile != null) {
files.add(virtualFile);
}
}
}
}
}
private class SortingExpandListener implements TreeWillExpandListener {
@Override
public void treeWillExpand(TreeExpansionEvent event) throws ExpandVetoException {
final TreePath path = event.getPath();
if (path == null) return;
final PackageDependenciesNode node = (PackageDependenciesNode)path.getLastPathComponent();
node.sortChildren();
((DefaultTreeModel)myTree.getModel()).reload(node);
}
@Override
public void treeWillCollapse(TreeExpansionEvent event) throws ExpandVetoException {}
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.daemon;
import com.intellij.codeHighlighting.Pass;
import com.intellij.codeInsight.CodeInsightTestCase;
import com.intellij.codeInsight.daemon.impl.DaemonCodeAnalyzerEx;
import com.intellij.codeInsight.daemon.impl.DaemonCodeAnalyzerImpl;
import com.intellij.codeInsight.daemon.impl.HighlightInfo;
import com.intellij.codeInsight.daemon.quickFix.LightQuickFixTestCase;
import com.intellij.codeInsight.intention.IntentionAction;
import com.intellij.codeInsight.intention.IntentionManager;
import com.intellij.codeInsight.intention.impl.ShowIntentionActionsHandler;
import com.intellij.codeInspection.InspectionProfile;
import com.intellij.codeInspection.InspectionProfileEntry;
import com.intellij.codeInspection.InspectionToolProvider;
import com.intellij.codeInspection.LocalInspectionTool;
import com.intellij.codeInspection.ex.InspectionProfileImpl;
import com.intellij.codeInspection.ex.InspectionToolRegistrar;
import com.intellij.codeInspection.ex.InspectionToolWrapper;
import com.intellij.ide.highlighter.JavaFileType;
import com.intellij.ide.startup.StartupManagerEx;
import com.intellij.ide.startup.impl.StartupManagerImpl;
import com.intellij.lang.ExternalAnnotatorsFilter;
import com.intellij.lang.LanguageAnnotators;
import com.intellij.lang.StdLanguages;
import com.intellij.lang.annotation.HighlightSeverity;
import com.intellij.lang.injection.InjectedLanguageManager;
import com.intellij.lang.java.JavaLanguage;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.Result;
import com.intellij.openapi.application.ex.PathManagerEx;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ModuleRootManager;
import com.intellij.openapi.startup.StartupManager;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.vfs.*;
import com.intellij.profile.codeInspection.InspectionProjectProfileManager;
import com.intellij.psi.*;
import com.intellij.psi.impl.JavaPsiFacadeEx;
import com.intellij.psi.impl.cache.CacheManager;
import com.intellij.psi.impl.search.IndexPatternBuilder;
import com.intellij.psi.impl.source.resolve.reference.ReferenceProvidersRegistry;
import com.intellij.psi.impl.source.tree.TreeElement;
import com.intellij.psi.impl.source.tree.TreeUtil;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.UsageSearchContext;
import com.intellij.psi.xml.XmlFileNSInfoProvider;
import com.intellij.testFramework.ExpectedHighlightingData;
import com.intellij.testFramework.FileTreeAccessFilter;
import com.intellij.testFramework.HighlightTestInfo;
import com.intellij.testFramework.LightPlatformTestCase;
import com.intellij.testFramework.fixtures.impl.CodeInsightTestFixtureImpl;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.UIUtil;
import com.intellij.xml.XmlSchemaProvider;
import gnu.trove.TIntArrayList;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.io.IOException;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
public abstract class DaemonAnalyzerTestCase extends CodeInsightTestCase {
private VirtualFileFilter myVirtualFileFilter = new FileTreeAccessFilter();
@Override
protected void setUp() throws Exception {
super.setUp();
final LocalInspectionTool[] tools = configureLocalInspectionTools();
CodeInsightTestFixtureImpl.configureInspections(tools, getProject(), Collections.<String>emptyList(),
getTestRootDisposable());
DaemonCodeAnalyzerImpl daemonCodeAnalyzer = (DaemonCodeAnalyzerImpl)DaemonCodeAnalyzer.getInstance(getProject());
daemonCodeAnalyzer.prepareForTest();
final StartupManagerImpl startupManager = (StartupManagerImpl)StartupManagerEx.getInstanceEx(getProject());
startupManager.runStartupActivities();
startupManager.startCacheUpdate();
startupManager.runPostStartupActivities();
DaemonCodeAnalyzerSettings.getInstance().setImportHintEnabled(false);
if (isPerformanceTest()) {
IntentionManager.getInstance().getAvailableIntentionActions(); // hack to avoid slowdowns in PyExtensionFactory
PathManagerEx.getTestDataPath(); // to cache stuff
ReferenceProvidersRegistry.getInstance(); // pre-load tons of classes
InjectedLanguageManager.getInstance(getProject()); // zillion of Dom Sem classes
LanguageAnnotators.INSTANCE.allForLanguage(JavaLanguage.INSTANCE); // pile of annotator classes loads
LanguageAnnotators.INSTANCE.allForLanguage(StdLanguages.XML);
ProblemHighlightFilter.EP_NAME.getExtensions();
Extensions.getExtensions(ImplicitUsageProvider.EP_NAME);
Extensions.getExtensions(XmlSchemaProvider.EP_NAME);
Extensions.getExtensions(XmlFileNSInfoProvider.EP_NAME);
Extensions.getExtensions(ExternalAnnotatorsFilter.EXTENSION_POINT_NAME);
Extensions.getExtensions(IndexPatternBuilder.EP_NAME);
}
}
@Override
protected void tearDown() throws Exception {
try {
DaemonCodeAnalyzerSettings.getInstance().setImportHintEnabled(true); // return default value to avoid unnecessary save
final Project project = getProject();
if (project != null) {
((StartupManagerImpl)StartupManager.getInstance(project)).checkCleared();
((DaemonCodeAnalyzerImpl)DaemonCodeAnalyzer.getInstance(project)).cleanupAfterTest();
}
}
finally {
super.tearDown();
}
//((VirtualFilePointerManagerImpl)VirtualFilePointerManager.getInstance()).assertPointersDisposed();
}
protected void enableInspectionTool(@NotNull InspectionProfileEntry tool) {
InspectionToolWrapper toolWrapper = InspectionToolRegistrar.wrapTool(tool);
LightPlatformTestCase.enableInspectionTool(getProject(), toolWrapper);
}
protected void enableInspectionTools(@NotNull InspectionProfileEntry... tools) {
for (InspectionProfileEntry tool : tools) {
enableInspectionTool(tool);
}
}
protected void enableInspectionToolsFromProvider(InspectionToolProvider toolProvider){
try {
for (Class c : toolProvider.getInspectionClasses()) {
enableInspectionTool((InspectionProfileEntry)c.newInstance());
}
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
protected void disableInspectionTool(@NotNull String shortName){
InspectionProfile profile = InspectionProjectProfileManager.getInstance(getProject()).getInspectionProfile();
if (profile.getInspectionTool(shortName, getProject()) != null) {
((InspectionProfileImpl)profile).disableTool(shortName, getProject());
}
}
protected LocalInspectionTool[] configureLocalInspectionTools() {
return LocalInspectionTool.EMPTY_ARRAY;
}
protected static LocalInspectionTool[] createLocalInspectionTools(final InspectionToolProvider... provider) {
final ArrayList<LocalInspectionTool> result = new ArrayList<LocalInspectionTool>();
for (InspectionToolProvider toolProvider : provider) {
for (Class aClass : toolProvider.getInspectionClasses()) {
try {
final Object tool = aClass.newInstance();
assertTrue(tool instanceof LocalInspectionTool);
result.add((LocalInspectionTool)tool);
}
catch (Exception e) {
LOG.error(e);
}
}
}
return result.toArray(new LocalInspectionTool[result.size()]);
}
protected void doTest(@NonNls @NotNull String filePath, boolean checkWarnings, boolean checkInfos, boolean checkWeakWarnings) throws Exception {
configureByFile(filePath);
doDoTest(checkWarnings, checkInfos, checkWeakWarnings);
}
protected void doTest(@NonNls @NotNull String filePath, boolean checkWarnings, boolean checkInfos) throws Exception {
doTest(filePath, checkWarnings, checkInfos, false);
}
protected void doTest(@NonNls @NotNull String filePath, @NonNls String projectRoot, boolean checkWarnings, boolean checkInfos) throws Exception {
configureByFile(filePath, projectRoot);
doDoTest(checkWarnings, checkInfos);
}
@NotNull
@SuppressWarnings("TestMethodWithIncorrectSignature")
protected HighlightTestInfo testFile(@NonNls @NotNull String... filePath) {
return new HighlightTestInfo(getTestRootDisposable(), filePath) {
@Override
public HighlightTestInfo doTest() {
try { configureByFiles(projectRoot, filePaths); }
catch (Exception e) { throw new RuntimeException(e); }
ExpectedHighlightingData data = new JavaExpectedHighlightingData(myEditor.getDocument(), checkWarnings, checkWeakWarnings, checkInfos, myFile);
if (checkSymbolNames) data.checkSymbolNames();
checkHighlighting(data);
return this;
}
};
}
protected void doTest(@NotNull VirtualFile vFile, boolean checkWarnings, boolean checkInfos) throws Exception {
doTest(new VirtualFile[] { vFile }, checkWarnings, checkInfos );
}
protected void doTest(@NotNull VirtualFile[] vFile, boolean checkWarnings, boolean checkInfos) throws Exception {
configureByFiles(null, vFile);
doDoTest(checkWarnings, checkInfos);
}
protected void doTest(boolean checkWarnings, boolean checkInfos, @NotNull String ... files) throws Exception {
configureByFiles(null, files);
doDoTest(checkWarnings, checkInfos);
}
@NotNull
protected Collection<HighlightInfo> doDoTest(boolean checkWarnings, boolean checkInfos) {
return doDoTest(checkWarnings, checkInfos, false);
}
protected Collection<HighlightInfo> doDoTest(final boolean checkWarnings, final boolean checkInfos, final boolean checkWeakWarnings) {
return ContainerUtil.filter(
checkHighlighting(new ExpectedHighlightingData(myEditor.getDocument(), checkWarnings, checkWeakWarnings, checkInfos, myFile)),
new Condition<HighlightInfo>() {
@Override
public boolean value(HighlightInfo info) {
return info.getSeverity() == HighlightSeverity.INFORMATION && checkInfos ||
info.getSeverity() == HighlightSeverity.WARNING && checkWarnings ||
info.getSeverity() == HighlightSeverity.WEAK_WARNING && checkWeakWarnings ||
info.getSeverity().compareTo(HighlightSeverity.WARNING) > 0;
}
});
}
@NotNull
protected Collection<HighlightInfo> checkHighlighting(@NotNull final ExpectedHighlightingData data) {
data.init();
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
//to load text
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
TreeUtil.clearCaches((TreeElement)myFile.getNode());
}
});
//to initialize caches
if (!DumbService.isDumb(getProject())) {
CacheManager.SERVICE.getInstance(myProject).getFilesWithWord("XXX", UsageSearchContext.IN_COMMENTS, GlobalSearchScope.allScope(myProject), true);
}
final JavaPsiFacadeEx facade = getJavaFacade();
if (facade != null) {
facade.setAssertOnFileLoadingFilter(myVirtualFileFilter, myTestRootDisposable); // check repository work
}
try {
Collection<HighlightInfo> infos = doHighlighting();
String text = myEditor.getDocument().getText();
data.checkLineMarkers(DaemonCodeAnalyzerImpl.getLineMarkers(getDocument(getFile()), getProject()), text);
data.checkResult(infos, text);
return infos;
}
finally {
if (facade != null) {
facade.setAssertOnFileLoadingFilter(VirtualFileFilter.NONE, myTestRootDisposable);
}
}
}
@Override
protected Editor createEditor(@NotNull VirtualFile file) {
if (myVirtualFileFilter instanceof FileTreeAccessFilter) {
allowTreeAccessForFile(file);
}
return super.createEditor(file);
}
protected void setVirtualFileFilter(@NotNull VirtualFileFilter filter) {
myVirtualFileFilter = filter;
}
protected void allowTreeAccessForFile(@NotNull VirtualFile file) {
assert myVirtualFileFilter instanceof FileTreeAccessFilter : "configured filter does not support this method";
((FileTreeAccessFilter)myVirtualFileFilter).allowTreeAccessForFile(file);
}
protected void allowTreeAccessForAllFiles() {
assert myVirtualFileFilter instanceof FileTreeAccessFilter : "configured filter does not support this method";
((FileTreeAccessFilter)myVirtualFileFilter).allowTreeAccessForAllFiles();
}
@NotNull
protected List<HighlightInfo> highlightErrors() {
return doHighlighting(HighlightSeverity.ERROR);
}
@NotNull
protected List<HighlightInfo> doHighlighting(@NotNull HighlightSeverity minSeverity) {
return filter(doHighlighting(), minSeverity);
}
@NotNull
protected List<HighlightInfo> doHighlighting() {
PsiDocumentManager.getInstance(myProject).commitAllDocuments();
TIntArrayList toIgnore = new TIntArrayList();
if (!doTestLineMarkers()) {
toIgnore.add(Pass.UPDATE_OVERRIDDEN_MARKERS);
toIgnore.add(Pass.VISIBLE_LINE_MARKERS);
toIgnore.add(Pass.LINE_MARKERS);
}
if (!doExternalValidation()) {
toIgnore.add(Pass.EXTERNAL_TOOLS);
}
if (forceExternalValidation()) {
toIgnore.add(Pass.LINE_MARKERS);
toIgnore.add(Pass.LOCAL_INSPECTIONS);
toIgnore.add(Pass.WHOLE_FILE_LOCAL_INSPECTIONS);
toIgnore.add(Pass.POPUP_HINTS);
toIgnore.add(Pass.UPDATE_ALL);
toIgnore.add(Pass.UPDATE_OVERRIDDEN_MARKERS);
toIgnore.add(Pass.VISIBLE_LINE_MARKERS);
}
boolean canChange = canChangeDocumentDuringHighlighting();
List<HighlightInfo> infos = CodeInsightTestFixtureImpl.instantiateAndRun(getFile(), getEditor(), toIgnore.toNativeArray(), canChange);
if (!canChange) {
Document document = getDocument(getFile());
DaemonCodeAnalyzerEx daemonCodeAnalyzer = DaemonCodeAnalyzerEx.getInstanceEx(myProject);
daemonCodeAnalyzer.getFileStatusMap().assertAllDirtyScopesAreNull(document);
}
return infos;
}
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.METHOD, ElementType.TYPE})
public @interface CanChangeDocumentDuringHighlighting {}
private boolean canChangeDocumentDuringHighlighting() {
return annotatedWith(CanChangeDocumentDuringHighlighting.class);
}
@NotNull
public static List<HighlightInfo> filter(@NotNull List<HighlightInfo> infos, @NotNull HighlightSeverity minSeverity) {
ArrayList<HighlightInfo> result = new ArrayList<HighlightInfo>();
for (final HighlightInfo info : infos) {
if (info.getSeverity().compareTo(minSeverity) >= 0) result.add(info);
}
return result;
}
protected boolean doTestLineMarkers() {
return false;
}
protected boolean doExternalValidation() {
return true;
}
protected boolean forceExternalValidation() {
return false;
}
protected static void findAndInvokeIntentionAction(@NotNull Collection<HighlightInfo> infos, @NotNull String intentionActionName, @NotNull Editor editor,
@NotNull PsiFile file) throws IncorrectOperationException {
IntentionAction intentionAction = findIntentionAction(infos, intentionActionName, editor, file);
assertNotNull(intentionActionName, intentionAction);
assertTrue(ShowIntentionActionsHandler.chooseActionAndInvoke(file, editor, intentionAction, intentionActionName));
UIUtil.dispatchAllInvocationEvents();
}
protected static IntentionAction findIntentionAction(@NotNull Collection<HighlightInfo> infos, @NotNull String intentionActionName, @NotNull Editor editor,
@NotNull PsiFile file) {
List<IntentionAction> actions = LightQuickFixTestCase.getAvailableActions(editor, file);
IntentionAction intentionAction = LightQuickFixTestCase.findActionWithText(actions, intentionActionName);
if (intentionAction == null) {
final List<IntentionAction> availableActions = new ArrayList<IntentionAction>();
for (HighlightInfo info :infos) {
if (info.quickFixActionRanges != null) {
for (Pair<HighlightInfo.IntentionActionDescriptor, TextRange> pair : info.quickFixActionRanges) {
IntentionAction action = pair.first.getAction();
if (action.isAvailable(file.getProject(), editor, file)) availableActions.add(action);
}
}
}
intentionAction = LightQuickFixTestCase.findActionWithText(
availableActions,
intentionActionName
);
}
return intentionAction;
}
public void checkHighlighting(Editor editor, boolean checkWarnings, boolean checkInfos) {
setActiveEditor(editor);
doDoTest(checkWarnings, checkInfos);
}
public PsiClass createClass(String text) throws IOException {
return createClass(myModule, text);
}
protected PsiClass createClass(final Module module, final String text) throws IOException {
return new WriteCommandAction<PsiClass>(getProject()) {
@Override
protected void run(@NotNull Result<PsiClass> result) throws Throwable {
final PsiFileFactory factory = PsiFileFactory.getInstance(getProject());
final PsiJavaFile javaFile = (PsiJavaFile)factory.createFileFromText("a.java", JavaFileType.INSTANCE, text);
final String qname = javaFile.getClasses()[0].getQualifiedName();
assertNotNull(qname);
final VirtualFile[] files = ModuleRootManager.getInstance(module).getSourceRoots();
File dir;
if (files.length > 0) {
dir = VfsUtilCore.virtualToIoFile(files[0]);
}
else {
dir = createTempDirectory();
VirtualFile vDir =
LocalFileSystem.getInstance().refreshAndFindFileByPath(dir.getCanonicalPath().replace(File.separatorChar, '/'));
addSourceContentToRoots(module, vDir);
}
File file = new File(dir, qname.replace('.', '/') + ".java");
FileUtil.createIfDoesntExist(file);
VirtualFile vFile = LocalFileSystem.getInstance().refreshAndFindFileByPath(file.getCanonicalPath().replace(File.separatorChar, '/'));
assertNotNull(vFile);
VfsUtil.saveText(vFile, text);
PsiJavaFile psiFile = (PsiJavaFile)myPsiManager.findFile(vFile);
assertNotNull(psiFile);
PsiClass psiClass = psiFile.getClasses()[0];
result.setResult(psiClass);
}
}.execute().throwException().getResultObject();
}
}
| |
package org.apache.helix.zookeeper.impl.client;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.concurrent.TimeUnit;
import org.apache.helix.msdcommon.constant.MetadataStoreRoutingConstants;
import org.apache.helix.msdcommon.datamodel.MetadataStoreRoutingData;
import org.apache.helix.msdcommon.exception.InvalidRoutingDataException;
import org.apache.helix.msdcommon.mock.MockMetadataStoreDirectoryServer;
import org.apache.helix.zookeeper.api.client.RealmAwareZkClient;
import org.apache.helix.zookeeper.constant.RoutingDataReaderType;
import org.apache.helix.zookeeper.constant.RoutingSystemPropertyKeys;
import org.apache.helix.zookeeper.constant.TestConstants;
import org.apache.helix.zookeeper.datamodel.ZNRecord;
import org.apache.helix.zookeeper.datamodel.serializer.ZNRecordSerializer;
import org.apache.helix.zookeeper.routing.RoutingDataManager;
import org.apache.helix.zookeeper.zkclient.IZkStateListener;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.Op;
import org.apache.zookeeper.Watcher;
import org.apache.zookeeper.ZooDefs;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
public class TestFederatedZkClient extends RealmAwareZkClientTestBase {
private static final String TEST_SHARDING_KEY_PREFIX = ZK_SHARDING_KEY_PREFIX;
private static final String TEST_REALM_ONE_VALID_PATH = TEST_SHARDING_KEY_PREFIX + "/1/a/b/c";
private static final String TEST_REALM_TWO_VALID_PATH = TEST_SHARDING_KEY_PREFIX + "/2/x/y/z";
private static final String TEST_INVALID_PATH = TEST_SHARDING_KEY_PREFIX + "invalid/a/b/c";
private static final String UNSUPPORTED_OPERATION_MESSAGE =
"Session-aware operation is not supported by FederatedZkClient.";
private RealmAwareZkClient _realmAwareZkClient;
@BeforeClass
public void beforeClass() throws IOException, InvalidRoutingDataException {
System.out.println("Starting " + TestFederatedZkClient.class.getSimpleName());
super.beforeClass();
// Feed the raw routing data into TrieRoutingData to construct an in-memory representation
// of routing information.
_realmAwareZkClient =
new FederatedZkClient(new RealmAwareZkClient.RealmAwareZkConnectionConfig.Builder().build(),
new RealmAwareZkClient.RealmAwareZkClientConfig());
}
@AfterClass
public void afterClass() {
super.afterClass();
// Close it as it is created in before class.
_realmAwareZkClient.close();
System.out.println("Ending " + TestFederatedZkClient.class.getSimpleName());
}
/*
* Tests that an unsupported operation should throw an UnsupportedOperationException.
*/
@Test
public void testUnsupportedOperations() throws IOException, InvalidRoutingDataException {
// Test creating ephemeral.
try {
_realmAwareZkClient.create(TEST_REALM_ONE_VALID_PATH, "Hello", CreateMode.EPHEMERAL);
Assert.fail("Ephemeral node should not be created.");
} catch (UnsupportedOperationException ex) {
Assert.assertTrue(ex.getMessage().startsWith(UNSUPPORTED_OPERATION_MESSAGE));
}
// Test creating ephemeral sequential.
try {
_realmAwareZkClient
.create(TEST_REALM_ONE_VALID_PATH, "Hello", CreateMode.EPHEMERAL_SEQUENTIAL);
Assert.fail("Ephemeral node should not be created.");
} catch (UnsupportedOperationException ex) {
Assert.assertTrue(ex.getMessage().startsWith(UNSUPPORTED_OPERATION_MESSAGE));
}
List<Op> ops = Arrays.asList(
Op.create(TEST_REALM_ONE_VALID_PATH, new byte[0], ZooDefs.Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT), Op.delete(TEST_REALM_ONE_VALID_PATH, -1));
try {
_realmAwareZkClient.multi(ops);
Assert.fail("multi() should not be supported.");
} catch (UnsupportedOperationException ex) {
Assert.assertTrue(ex.getMessage().startsWith(UNSUPPORTED_OPERATION_MESSAGE));
}
try {
_realmAwareZkClient.getSessionId();
Assert.fail("getSessionId() should not be supported.");
} catch (UnsupportedOperationException ex) {
Assert.assertTrue(ex.getMessage().startsWith(UNSUPPORTED_OPERATION_MESSAGE));
}
try {
_realmAwareZkClient.getServers();
Assert.fail("getServers() should not be supported.");
} catch (UnsupportedOperationException ex) {
Assert.assertTrue(ex.getMessage().startsWith(UNSUPPORTED_OPERATION_MESSAGE));
}
try {
_realmAwareZkClient.waitUntilConnected(5L, TimeUnit.SECONDS);
Assert.fail("getServers() should not be supported.");
} catch (UnsupportedOperationException ex) {
Assert.assertTrue(ex.getMessage().startsWith(UNSUPPORTED_OPERATION_MESSAGE));
}
// Test state change subscription.
IZkStateListener listener = new IZkStateListener() {
@Override
public void handleStateChanged(Watcher.Event.KeeperState state) {
System.out.println("Handle new state: " + state);
}
@Override
public void handleNewSession(String sessionId) {
System.out.println("Handle new session: " + sessionId);
}
@Override
public void handleSessionEstablishmentError(Throwable error) {
System.out.println("Handle session establishment error: " + error);
}
};
try {
_realmAwareZkClient.subscribeStateChanges(listener);
Assert.fail("getServers() should not be supported.");
} catch (UnsupportedOperationException ex) {
Assert.assertTrue(ex.getMessage().startsWith(UNSUPPORTED_OPERATION_MESSAGE));
}
try {
_realmAwareZkClient.unsubscribeStateChanges(listener);
Assert.fail("getServers() should not be supported.");
} catch (UnsupportedOperationException ex) {
Assert.assertTrue(ex.getMessage().startsWith(UNSUPPORTED_OPERATION_MESSAGE));
}
}
/*
* Tests the persistent create() call against a valid path and an invalid path.
* Valid path is one that belongs to the realm designated by the sharding key.
* Invalid path is one that does not belong to the realm designated by the sharding key.
*/
@Test(dependsOnMethods = "testUnsupportedOperations")
public void testCreatePersistent() {
_realmAwareZkClient.setZkSerializer(new ZNRecordSerializer());
// Create a dummy ZNRecord
ZNRecord znRecord = new ZNRecord("DummyRecord");
znRecord.setSimpleField("Dummy", "Value");
// Test writing and reading against the validPath
_realmAwareZkClient.createPersistent(TEST_REALM_ONE_VALID_PATH, true);
_realmAwareZkClient.writeData(TEST_REALM_ONE_VALID_PATH, znRecord);
Assert.assertEquals(_realmAwareZkClient.readData(TEST_REALM_ONE_VALID_PATH), znRecord);
// Test writing and reading against the invalid path
try {
_realmAwareZkClient.createPersistent(TEST_INVALID_PATH, true);
Assert.fail("Create() should not succeed on an invalid path!");
} catch (NoSuchElementException ex) {
Assert.assertEquals(ex.getMessage(),
"No sharding key found within the provided path. Path: " + TEST_INVALID_PATH);
}
}
/*
* Tests that exists() works on valid path and fails on invalid path.
*/
@Test(dependsOnMethods = "testCreatePersistent")
public void testExists() {
Assert.assertTrue(_realmAwareZkClient.exists(TEST_REALM_ONE_VALID_PATH));
try {
_realmAwareZkClient.exists(TEST_INVALID_PATH);
Assert.fail("Exists() should not succeed on an invalid path!");
} catch (NoSuchElementException ex) {
Assert.assertEquals(ex.getMessage(),
"No sharding key found within the provided path. Path: " + TEST_INVALID_PATH);
}
}
/*
* Tests that delete() works on valid path and fails on invalid path.
*/
@Test(dependsOnMethods = "testExists")
public void testDelete() {
try {
_realmAwareZkClient.delete(TEST_INVALID_PATH);
Assert.fail("Exists() should not succeed on an invalid path!");
} catch (NoSuchElementException ex) {
Assert.assertEquals(ex.getMessage(),
"No sharding key found within the provided path. Path: " + TEST_INVALID_PATH);
}
Assert.assertTrue(_realmAwareZkClient.delete(TEST_REALM_ONE_VALID_PATH));
Assert.assertFalse(_realmAwareZkClient.exists(TEST_REALM_ONE_VALID_PATH));
}
/*
* Tests that multi-realm feature.
*/
@Test(dependsOnMethods = "testDelete")
public void testMultiRealmCRUD() {
ZNRecord realmOneZnRecord = new ZNRecord("realmOne");
realmOneZnRecord.setSimpleField("realmOne", "Value");
ZNRecord realmTwoZnRecord = new ZNRecord("realmTwo");
realmTwoZnRecord.setSimpleField("realmTwo", "Value");
// Writing on realmOne.
_realmAwareZkClient.createPersistent(TEST_REALM_ONE_VALID_PATH, true);
_realmAwareZkClient.writeData(TEST_REALM_ONE_VALID_PATH, realmOneZnRecord);
// RealmOne path is created but realmTwo path is not.
Assert.assertTrue(_realmAwareZkClient.exists(TEST_REALM_ONE_VALID_PATH));
Assert.assertFalse(_realmAwareZkClient.exists(TEST_REALM_TWO_VALID_PATH));
// Writing on realmTwo.
_realmAwareZkClient.createPersistent(TEST_REALM_TWO_VALID_PATH, true);
_realmAwareZkClient.writeData(TEST_REALM_TWO_VALID_PATH, realmTwoZnRecord);
// RealmTwo path is created.
Assert.assertTrue(_realmAwareZkClient.exists(TEST_REALM_TWO_VALID_PATH));
// Reading on both realms.
Assert.assertEquals(_realmAwareZkClient.readData(TEST_REALM_ONE_VALID_PATH), realmOneZnRecord);
Assert.assertEquals(_realmAwareZkClient.readData(TEST_REALM_TWO_VALID_PATH), realmTwoZnRecord);
Assert.assertTrue(_realmAwareZkClient.delete(TEST_REALM_ONE_VALID_PATH));
Assert.assertFalse(_realmAwareZkClient.exists(TEST_REALM_ONE_VALID_PATH));
// Deleting on realmOne does not delete on realmTwo.
Assert.assertTrue(_realmAwareZkClient.exists(TEST_REALM_TWO_VALID_PATH));
// Deleting on realmTwo.
Assert.assertTrue(_realmAwareZkClient.delete(TEST_REALM_TWO_VALID_PATH));
Assert.assertFalse(_realmAwareZkClient.exists(TEST_REALM_TWO_VALID_PATH));
}
/**
* This tests the routing data update feature only enabled when
* RoutingSystemPropertyKeys.UPDATE_ROUTING_DATA_ON_CACHE_MISS is set to true.
* Routing data source is MSDS.
*/
@Test(dependsOnMethods = "testMultiRealmCRUD")
public void testUpdateRoutingDataOnCacheMissMSDS()
throws IOException, InvalidRoutingDataException {
// Enable routing data update upon cache miss
System.setProperty(RoutingSystemPropertyKeys.UPDATE_ROUTING_DATA_ON_CACHE_MISS, "true");
// Set the routing data update interval to 0 so there's no delay in testing
System.setProperty(RoutingSystemPropertyKeys.ROUTING_DATA_UPDATE_INTERVAL_MS, "0");
RoutingDataManager.getInstance().getMetadataStoreRoutingData();
_msdsServer.stopServer();
/*
* Test is 2-tiered because cache update is 2-tiered
* Case 1:
* - RoutingDataManager (in-memory) does not have the key
* - MSDS has the key
* This simulates a case where FederatedZkClient must do a I/O based update.
*/
// Start MSDS with a new key
String newShardingKey = "/sharding-key-9";
String zkRealm = "localhost:2127";
Map<String, Collection<String>> rawRoutingData = new HashMap<>();
rawRoutingData.put(zkRealm, new ArrayList<>());
rawRoutingData.get(zkRealm).add(newShardingKey); // Add a new key
_msdsServer = new MockMetadataStoreDirectoryServer(MSDS_HOSTNAME, MSDS_PORT, MSDS_NAMESPACE,
rawRoutingData);
_msdsServer.startServer();
// Verify that RoutingDataManager does not have the key
MetadataStoreRoutingData routingData =
RoutingDataManager.getInstance().getMetadataStoreRoutingData();
try {
routingData.getMetadataStoreRealm(newShardingKey);
Assert.fail("Must throw NoSuchElementException!");
} catch (NoSuchElementException e) {
// Expected
}
// Create a new FederatedZkClient
FederatedZkClient federatedZkClient = new FederatedZkClient(
new RealmAwareZkClient.RealmAwareZkConnectionConfig.Builder()
.setRoutingDataSourceType(RoutingDataReaderType.HTTP.name())
.setRoutingDataSourceEndpoint(
"http://" + MSDS_HOSTNAME + ":" + MSDS_PORT + "/admin/v2/namespaces/"
+ MSDS_NAMESPACE).build(), new RealmAwareZkClient.RealmAwareZkClientConfig());
// exists() must succeed and RoutingDataManager should now have the key (cache update must have
// happened)
// False expected for the following call because the znode does not exist and we are checking
// whether the call succeeds or not
Assert.assertFalse(federatedZkClient.exists(newShardingKey));
Assert.assertEquals(zkRealm, RoutingDataManager.getInstance().getMetadataStoreRoutingData()
.getMetadataStoreRealm(newShardingKey));
/*
* Case 2:
* - RoutingDataManager has the key
* - MSDS does not have the key
* - continue using the same ZkClient because we want an existing federated client that does
* not have the key
*/
_msdsServer.stopServer();
// Create an MSDS with the key and reset MSDS so it doesn't contain the key
String newShardingKey2 = "/sharding-key-10";
rawRoutingData.get(zkRealm).add(newShardingKey2);
_msdsServer = new MockMetadataStoreDirectoryServer(MSDS_HOSTNAME, MSDS_PORT, MSDS_NAMESPACE,
rawRoutingData);
_msdsServer.startServer();
// Make sure RoutingDataManager has the key
RoutingDataManager.getInstance().reset();
Assert.assertEquals(zkRealm, RoutingDataManager.getInstance().getMetadataStoreRoutingData()
.getMetadataStoreRealm(newShardingKey2));
// Reset MSDS so it doesn't contain the key
_msdsServer.stopServer();
_msdsServer = new MockMetadataStoreDirectoryServer(MSDS_HOSTNAME, MSDS_PORT, MSDS_NAMESPACE,
TestConstants.FAKE_ROUTING_DATA); // FAKE_ROUTING_DATA doesn't contain the key
_msdsServer.startServer();
// exists() must succeed and RoutingDataManager should still have the key
// This means that we do not do a hard update (I/O based update) because in-memory cache already
// has the key
// False expected for the following call because the znode does not exist and we are checking
// whether the call succeeds or not
Assert.assertFalse(federatedZkClient.exists(newShardingKey2));
Assert.assertEquals(zkRealm, RoutingDataManager.getInstance().getMetadataStoreRoutingData()
.getMetadataStoreRealm(newShardingKey2));
// Also check that MSDS does not have the new sharding key through resetting RoutingDataManager
// and re-reading from MSDS
RoutingDataManager.getInstance().reset();
try {
RoutingDataManager.getInstance().getMetadataStoreRoutingData()
.getMetadataStoreRealm(newShardingKey2);
Assert.fail("NoSuchElementException expected!");
} catch (NoSuchElementException e) {
// Expected because MSDS does not contain the key
}
// Clean up federatedZkClient
federatedZkClient.close();
// Shut down MSDS
_msdsServer.stopServer();
// Disable System property
System.clearProperty(RoutingSystemPropertyKeys.UPDATE_ROUTING_DATA_ON_CACHE_MISS);
System.clearProperty(RoutingSystemPropertyKeys.ROUTING_DATA_UPDATE_INTERVAL_MS);
}
/**
* This tests the routing data update feature only enabled when
* RoutingSystemPropertyKeys.UPDATE_ROUTING_DATA_ON_CACHE_MISS is set to true.
* Routing data source is ZK.
*/
@Test(dependsOnMethods = "testUpdateRoutingDataOnCacheMissMSDS")
public void testUpdateRoutingDataOnCacheMissZK() throws IOException, InvalidRoutingDataException {
// Set up routing data in ZK with empty sharding key list
String zkRealm = "localhost:2127";
String newShardingKey = "/sharding-key-9";
String newShardingKey2 = "/sharding-key-10";
ZkClient zkClient =
new ZkClient.Builder().setZkServer(zkRealm).setZkSerializer(new ZNRecordSerializer())
.build();
zkClient.create(MetadataStoreRoutingConstants.ROUTING_DATA_PATH, null, CreateMode.PERSISTENT);
ZNRecord zkRealmRecord = new ZNRecord(zkRealm);
List<String> keyList =
new ArrayList<>(TestConstants.TEST_KEY_LIST_1); // Need a non-empty keyList
zkRealmRecord.setListField(MetadataStoreRoutingConstants.ZNRECORD_LIST_FIELD_KEY, keyList);
zkClient.create(MetadataStoreRoutingConstants.ROUTING_DATA_PATH + "/" + zkRealm, zkRealmRecord,
CreateMode.PERSISTENT);
// Enable routing data update upon cache miss
System.setProperty(RoutingSystemPropertyKeys.UPDATE_ROUTING_DATA_ON_CACHE_MISS, "true");
// Set the routing data update interval to 0 so there's no delay in testing
System.setProperty(RoutingSystemPropertyKeys.ROUTING_DATA_UPDATE_INTERVAL_MS, "0");
RoutingDataManager.getInstance().reset();
RoutingDataManager.getInstance().getMetadataStoreRoutingData(RoutingDataReaderType.ZK, zkRealm);
/*
* Test is 2-tiered because cache update is 2-tiered
* Case 1:
* - RoutingDataManager does not have the key
* - ZK has the key
* This simulates a case where FederatedZkClient must do a I/O based update (must read from ZK).
*/
// Add the key to ZK
zkRealmRecord.getListField(MetadataStoreRoutingConstants.ZNRECORD_LIST_FIELD_KEY)
.add(newShardingKey);
zkClient
.writeData(MetadataStoreRoutingConstants.ROUTING_DATA_PATH + "/" + zkRealm, zkRealmRecord);
// Verify that RoutingDataManager does not have the key
MetadataStoreRoutingData routingData = RoutingDataManager.getInstance()
.getMetadataStoreRoutingData(RoutingDataReaderType.ZK, zkRealm);
try {
routingData.getMetadataStoreRealm(newShardingKey);
Assert.fail("Must throw NoSuchElementException!");
} catch (NoSuchElementException e) {
// Expected
}
// Create a new FederatedZkClient
FederatedZkClient federatedZkClient = new FederatedZkClient(
new RealmAwareZkClient.RealmAwareZkConnectionConfig.Builder()
.setRoutingDataSourceType(RoutingDataReaderType.ZK.name())
.setRoutingDataSourceEndpoint(zkRealm).build(),
new RealmAwareZkClient.RealmAwareZkClientConfig());
// exists() must succeed and RoutingDataManager should now have the key (cache update must
// have happened)
// False expected for the following call because the znode does not exist and we are checking
// whether the call succeeds or not
Assert.assertFalse(federatedZkClient.exists(newShardingKey));
Assert.assertEquals(zkRealm, RoutingDataManager.getInstance()
.getMetadataStoreRoutingData(RoutingDataReaderType.ZK, zkRealm)
.getMetadataStoreRealm(newShardingKey));
/*
* Case 2:
* - RoutingDataManager has the key
* - ZK does not have the key
* - continue using the same ZkClient because we want an existing federated client that does
* not have the key
*/
// Add newShardingKey2 to ZK's routing data (in order to give RoutingDataManager the key)
zkRealmRecord.getListField(MetadataStoreRoutingConstants.ZNRECORD_LIST_FIELD_KEY)
.add(newShardingKey2);
zkClient
.writeData(MetadataStoreRoutingConstants.ROUTING_DATA_PATH + "/" + zkRealm, zkRealmRecord);
// Update RoutingDataManager so it has the key
RoutingDataManager.getInstance().reset();
Assert.assertEquals(zkRealm, RoutingDataManager.getInstance()
.getMetadataStoreRoutingData(RoutingDataReaderType.ZK, zkRealm)
.getMetadataStoreRealm(newShardingKey2));
// Remove newShardingKey2 from ZK
zkRealmRecord.getListField(MetadataStoreRoutingConstants.ZNRECORD_LIST_FIELD_KEY)
.remove(newShardingKey2);
zkClient
.writeData(MetadataStoreRoutingConstants.ROUTING_DATA_PATH + "/" + zkRealm, zkRealmRecord);
// exists() must succeed and RoutingDataManager should still have the key
// This means that we do not do a hard update (I/O based update) because in-memory cache already
// has the key
// False expected for the following call because the znode does not exist and we are checking
// whether the call succeeds or not
Assert.assertFalse(federatedZkClient.exists(newShardingKey2));
Assert.assertEquals(zkRealm, RoutingDataManager.getInstance()
.getMetadataStoreRoutingData(RoutingDataReaderType.ZK, zkRealm)
.getMetadataStoreRealm(newShardingKey2));
// Also check that ZK does not have the new sharding key through resetting RoutingDataManager
// and re-reading from ZK
RoutingDataManager.getInstance().reset();
try {
RoutingDataManager.getInstance()
.getMetadataStoreRoutingData(RoutingDataReaderType.ZK, zkRealm)
.getMetadataStoreRealm(newShardingKey2);
Assert.fail("NoSuchElementException expected!");
} catch (NoSuchElementException e) {
// Expected because ZK does not contain the key
}
// Clean up federatedZkClient
federatedZkClient.close();
// Clean up ZK writes and ZkClient
zkClient.deleteRecursively(MetadataStoreRoutingConstants.ROUTING_DATA_PATH);
zkClient.close();
// Disable System property
System.clearProperty(RoutingSystemPropertyKeys.UPDATE_ROUTING_DATA_ON_CACHE_MISS);
System.clearProperty(RoutingSystemPropertyKeys.ROUTING_DATA_UPDATE_INTERVAL_MS);
}
/**
* Test that throttle based on last reset timestamp works correctly. Here, we use ZK as the
* routing data source.
* Test scenario: set the throttle value to a high value and check that routing data update from
* the routing data source does NOT happen (because it would be throttled).
*/
@Test(dependsOnMethods = "testUpdateRoutingDataOnCacheMissZK")
public void testRoutingDataUpdateThrottle() throws InvalidRoutingDataException {
// Call reset to set the last reset() timestamp in RoutingDataManager
RoutingDataManager.getInstance().reset();
// Set up routing data in ZK with empty sharding key list
String zkRealm = "localhost:2127";
String newShardingKey = "/throttle";
ZkClient zkClient =
new ZkClient.Builder().setZkServer(zkRealm).setZkSerializer(new ZNRecordSerializer())
.build();
zkClient.create(MetadataStoreRoutingConstants.ROUTING_DATA_PATH, null, CreateMode.PERSISTENT);
ZNRecord zkRealmRecord = new ZNRecord(zkRealm);
zkRealmRecord.setListField(MetadataStoreRoutingConstants.ZNRECORD_LIST_FIELD_KEY,
new ArrayList<>(TestConstants.TEST_KEY_LIST_1));
zkClient.create(MetadataStoreRoutingConstants.ROUTING_DATA_PATH + "/" + zkRealm, zkRealmRecord,
CreateMode.PERSISTENT);
// Enable routing data update upon cache miss
System.setProperty(RoutingSystemPropertyKeys.UPDATE_ROUTING_DATA_ON_CACHE_MISS, "true");
// Set the throttle value to a very long value
System.setProperty(RoutingSystemPropertyKeys.ROUTING_DATA_UPDATE_INTERVAL_MS,
String.valueOf(Integer.MAX_VALUE));
// Create a new FederatedZkClient, whose _routingDataUpdateInterval should be MAX_VALUE
FederatedZkClient federatedZkClient = new FederatedZkClient(
new RealmAwareZkClient.RealmAwareZkConnectionConfig.Builder()
.setRoutingDataSourceType(RoutingDataReaderType.ZK.name())
.setRoutingDataSourceEndpoint(zkRealm).build(),
new RealmAwareZkClient.RealmAwareZkClientConfig());
// Add newShardingKey to ZK's routing data
zkRealmRecord.getListField(MetadataStoreRoutingConstants.ZNRECORD_LIST_FIELD_KEY)
.add(newShardingKey);
zkClient
.writeData(MetadataStoreRoutingConstants.ROUTING_DATA_PATH + "/" + zkRealm, zkRealmRecord);
try {
Assert.assertFalse(federatedZkClient.exists(newShardingKey));
Assert.fail("NoSuchElementException expected!");
} catch (NoSuchElementException e) {
// Expected because it should not read from the routing data source because of the throttle
}
// Clean up
zkClient.deleteRecursively(MetadataStoreRoutingConstants.ROUTING_DATA_PATH);
zkClient.close();
federatedZkClient.close();
System.clearProperty(RoutingSystemPropertyKeys.UPDATE_ROUTING_DATA_ON_CACHE_MISS);
System.clearProperty(RoutingSystemPropertyKeys.ROUTING_DATA_UPDATE_INTERVAL_MS);
}
/*
* Tests that close() works.
* TODO: test that all raw zkClients are closed after FederatedZkClient close() is called. This
* could help avoid ZkClient leakage.
*/
@Test(dependsOnMethods = "testRoutingDataUpdateThrottle")
public void testClose() {
Assert.assertFalse(_realmAwareZkClient.isClosed());
_realmAwareZkClient.close();
Assert.assertTrue(_realmAwareZkClient.isClosed());
// Client is closed, so operation should not be executed.
try {
_realmAwareZkClient.createPersistent(TEST_REALM_ONE_VALID_PATH);
Assert
.fail("createPersistent() should not be executed because RealmAwareZkClient is closed.");
} catch (IllegalStateException ex) {
Assert.assertEquals(ex.getMessage(), "FederatedZkClient is closed!");
}
}
}
| |
package basementbobs.queencitybuschaser.ui;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import android.app.AlertDialog;
import android.content.Context;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.Paint.Style;
import android.os.Bundle;
import android.os.Handler;
import android.support.v4.app.ActionBarDrawerToggle;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.FragmentManager;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBarActivity;
import android.util.SparseArray;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.ScrollView;
import android.widget.TextView;
import android.widget.Toast;
import basementbobs.queencitybuschaser.api.TransitLiveApi;
import basementbobs.queencitybuschaser.api.TransitLiveDetour;
import basementbobs.queencitybuschaser.api.TransitLiveLocationData;
import basementbobs.queencitybuschaser.api.TransitLiveRouteData;
import basementbobs.queencitybuschaser.api.TransitLiveStopData;
import basementbobs.queencitybuschaser.ui.AlertDialogFragment.AlertDialogListener;
import basementbobs.queencitybuschaser.ui.MapMenuRouteSelector.OnRouteSelectChangedListener;
import basementbobs.queencitybuschaser.utils.BitmapUtils;
import basementbobs.queencitybuschaser.utils.DataRetainer;
import basementbobs.queencitybuschaser.utils.ManagedAsyncTask;
import basementbobs.transitdemo.R;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.GoogleMap.OnCameraChangeListener;
import com.google.android.gms.maps.GoogleMap.OnInfoWindowClickListener;
import com.google.android.gms.maps.GoogleMap.OnMarkerClickListener;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.BitmapDescriptor;
import com.google.android.gms.maps.model.BitmapDescriptorFactory;
import com.google.android.gms.maps.model.CameraPosition;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.LatLngBounds;
import com.google.android.gms.maps.model.Marker;
import com.google.android.gms.maps.model.MarkerOptions;
import com.google.android.gms.maps.model.Polyline;
import com.google.android.gms.maps.model.PolylineOptions;
public class MainActivity extends ActionBarActivity implements OnRouteSelectChangedListener, AlertDialogListener {
public static final String TAG_NETWORK_ERROR = "networkerror";
public static final String TAG_DATARETAINER = "MainActivityRetainer";
private static final int UPDATE_RATE = 2000;
private static final int STOP_MARKER_WIDTH = 12;
private static final int STOP_POLYLINE_WIDTH = 4;
private static final double ROUTE_ANIMATE_ZOOM_THRESHOLD = 15;
public static class MainDataRetainer extends DataRetainer {
public static MainDataRetainer getInstance(FragmentManager fm, String tag) {
return DataRetainer.getInstance(MainDataRetainer.class, fm, tag);
}
public SparseArray<List<TransitLiveLocationData>> busLocations = new SparseArray<List<TransitLiveLocationData>>();
public SparseArray<Marker> busMarkers = new SparseArray<Marker>();
public SparseArray<TransitLiveRouteData> routes = new SparseArray<TransitLiveRouteData>();
public SparseArray<BitmapDescriptor> routeBitmaps = new SparseArray<BitmapDescriptor>();
public HashSet<Integer> selectedRoutes = new HashSet<Integer>();
public SparseArray<TransitLiveStopData> stops = new SparseArray<TransitLiveStopData>();
public SparseArray<List<Integer>> routeStopIds = new SparseArray<List<Integer>>();
public SparseArray<List<Marker>> stopMarkers = new SparseArray<List<Marker>>();
public SparseArray<TransitLiveDetour> detours = new SparseArray<TransitLiveDetour>();
public SparseArray<List<LatLng>> paths = new SparseArray<List<LatLng>>();
public SparseArray<Polyline> pathPolyLines = new SparseArray<Polyline>();
public Integer followBusId;
public boolean enableStopMarkers;
}
DrawerLayout drawer;
ActionBarDrawerToggle drawerToggle;
LinearLayout drawerRouteList;
TextView clearAllButton;
Button cancelFollowButton;
SupportMapFragment mapFragment;
GoogleMap map;
MainDataRetainer dr;
UpdateRouteListTask updateRouteListTask;
Handler handler;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
// data retainer
dr = MainDataRetainer.getInstance(getSupportFragmentManager(), TAG_DATARETAINER);
// setup drawer controls
drawer = (DrawerLayout) findViewById(R.id.main_container);
drawerToggle = new ActionBarDrawerToggle(
this,
drawer,
R.drawable.ic_drawer,
R.string.drawer_open,
R.string.drawer_close);
drawer.setDrawerListener(drawerToggle);
drawer.setScrimColor(Color.TRANSPARENT);
drawer.setDrawerListener(new DrawerLayout.SimpleDrawerListener() {
@Override
public void onDrawerStateChanged(int newState) {
switch(newState) {
case DrawerLayout.STATE_SETTLING:
((ScrollView) drawerRouteList.getParent()).fling(0);
break;
}
}
@Override
public void onDrawerClosed(View drawerView) {
updateClearAllButton();
}
@Override
public void onDrawerOpened(View drawerView) {
updateClearAllButton();
}
});
drawerRouteList = (LinearLayout) findViewById(R.id.main_drawer_container);
// action bar
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setHomeButtonEnabled(true);
// misc view
clearAllButton = (TextView) findViewById(R.id.main_drawer_clearall);
clearAllButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View arg0) {
for (int routeId : dr.selectedRoutes) {
removePath(routeId);
removeStopMarkers(routeId);
}
dr.selectedRoutes.clear();
updateRouteSelector();
refreshMarkers();
}
});
cancelFollowButton = (Button) findViewById(R.id.main_button_cancelfollow);
cancelFollowButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View arg0) {
cancelFollowing();
}
});
handler = new Handler();
// setup map
mapFragment = (SupportMapFragment) getSupportFragmentManager().findFragmentById(R.id.main_map);
mapFragment.getView().post(new Runnable() {
@Override
public void run() {
if (mapFragment == null) {
mapFragment = (SupportMapFragment) getSupportFragmentManager().findFragmentById(R.id.main_map);
}
map = mapFragment.getMap();
map.setMyLocationEnabled(true);
map.getUiSettings().setRotateGesturesEnabled(false);
map.getUiSettings().setTiltGesturesEnabled(false);
map.getUiSettings().setCompassEnabled(false);
QcInfoWindow infoWindow = new QcInfoWindow(MainActivity.this, dr);
map.setInfoWindowAdapter(infoWindow);
//map.setTrafficEnabled(true);
map.setOnMarkerClickListener(new OnMarkerClickListener() {
@Override
public boolean onMarkerClick(Marker marker) {
try {
int busId;
for (int i = 0, size = dr.busMarkers.size(); i < size; i++) {
busId = dr.busMarkers.keyAt(i);
if (dr.busMarkers.get(busId).getSnippet().equals(marker.getSnippet())) {
beginFollowingBus(busId);
return false;
}
}
} catch (Exception e) {
e.printStackTrace();
}
return false;
}
});
map.setOnInfoWindowClickListener(new OnInfoWindowClickListener() {
@Override
public void onInfoWindowClick(Marker marker) {
try {
int routeId = Integer.parseInt(marker.getSnippet().split(",")[1].replace(" Route: ", ""));
toggleRoute(routeId);
updateRouteSelector();
} catch (Exception e) {
e.printStackTrace();
}
}
});
map.setOnCameraChangeListener(new OnCameraChangeListener() {
@Override
public void onCameraChange(CameraPosition camera) {
boolean enable = camera.zoom > 14;
dr.enableStopMarkers = enable;
//if (enable != dr.enableStopMarkers) {
updateMarkerVisibility();
//}
}
});
}
});
if (savedInstanceState == null) {
mapFragment.setRetainInstance(true);
mapFragment.getView().post(new Runnable() {
@Override
public void run() {
map.moveCamera(CameraUpdateFactory.newLatLngBounds(new LatLngBounds(
new LatLng(50.430174, -104.664688),
new LatLng(50.486566, -104.543495)), 0));
}
});
}
if (savedInstanceState == null) {
startUpdateRoutesTask();
}
else if (dr != null) {
// retain state
//displayData();
updateRouteSelector();
beginFollowingBus(dr.followBusId);
}
}
private void refreshMarkers() {
mapFragment.getView().post(new Runnable() {
@Override
public void run() {
updateMapWithBusLocations(dr.busLocations);
for (Integer routeId : dr.selectedRoutes) {
placeStopMarkers(routeId);
placePath(routeId, false);
}
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
@Override
protected void onStart() {
super.onStart();
}
@Override
protected void onResume() {
super.onResume();
handler.post(updateBusLocationsRunnable);
}
@Override
protected void onPause() {
super.onPause();
handler.removeCallbacks(updateBusLocationsRunnable);
}
@Override
protected void onStop() {
super.onStop();
}
@Override
protected void onPostCreate(Bundle savedInstanceState) {
super.onPostCreate(savedInstanceState);
drawerToggle.syncState();
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
drawerToggle.onConfigurationChanged(newConfig);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (drawerToggle.onOptionsItemSelected(item)) {
return true;
}
switch (item.getItemId()) {
case R.id.action_updateroutedata:
startUpdateRoutesTask();
return true;
}
return super.onOptionsItemSelected(item);
}
public void updateBusLocations() {
new UpdateBusLocationsTask(this).execute();
}
private final Runnable updateBusLocationsRunnable = new Runnable() {
@Override
public void run() {
updateBusLocations();
handler.postDelayed(this, UPDATE_RATE);
}
};
public void updateMapWithBusLocations(SparseArray<List<TransitLiveLocationData>> locations) {
if (map == null || locations == null || dr.routes.size() == 0) {
return;
}
dr.busLocations = locations;
HashSet<Integer> newBusIds = new HashSet<Integer>();
for (int i = 0, size = locations.size(); i < size; i++) {
int routeId = locations.keyAt(i);
// only update with selected routes
if (dr.selectedRoutes.size() > 0 && dr.selectedRoutes.contains(routeId) == false) {
continue;
}
// skip unknown routes
if (dr.routes.get(routeId) == null) {
continue;
}
for (TransitLiveLocationData bus : locations.get(routeId)) {
Marker marker = dr.busMarkers.get(bus.getBus_id());
LatLng latlng = new LatLng(bus.getLatitude(), bus.getLongitude());
// list of markers that will not be deleted
newBusIds.add(bus.getBus_id());
if (marker != null) {
// update old marker
marker.setPosition(latlng);
}
else {
// create a new marker
dr.busMarkers.put(bus.getBus_id(),
map.addMarker(new MarkerOptions()
.position(latlng)
.anchor(0.5f, 0.7f)
.snippet(String.format("bus:%s", bus.getBus_id()))
.icon(dr.routeBitmaps.get(bus.getRoute_id()))));
}
// focus map if marker is set to be followed
if (bus.getBus_id().equals(dr.followBusId)) {
map.animateCamera(CameraUpdateFactory.newLatLng(latlng));
}
}
}
// delete old markers
ArrayList<Integer> idsToDelete = new ArrayList<Integer>();
for (int i = 0, size = dr.busMarkers.size(); i < size; i++) {
int busId = dr.busMarkers.keyAt(i);
if (newBusIds.contains(busId) == false) {
idsToDelete.add(busId);
}
}
for (Integer id : idsToDelete) {
dr.busMarkers.get(id).remove();
dr.busMarkers.remove(id);
}
}
private class UpdateBusLocationsTask extends ManagedAsyncTask<Void, Void, SparseArray<List<TransitLiveLocationData>>>{
public UpdateBusLocationsTask(FragmentActivity activity) {
super(activity);
}
@Override
protected SparseArray<List<TransitLiveLocationData>> doInBackground(Void... params) {
if (dr.routes == null || dr.routes.size() == 0) {
return null;
}
try {
return TransitLiveApi.getAllBusLocations();
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
@Override
protected void onPostExecute(SparseArray<List<TransitLiveLocationData>> locations) {
if (locations == null) {
return;
}
updateMapWithBusLocations(locations);
}
}
private class UpdateRouteListTask extends ManagedAsyncTask<Void, Void, SparseArray<TransitLiveRouteData>>{
public UpdateRouteListTask(FragmentActivity activity) {
super(activity);
}
@Override
protected void onPreExecute() {
Toast.makeText(getApplicationContext(),
"Updating route data",
Toast.LENGTH_SHORT).show();
}
@Override
protected SparseArray<TransitLiveRouteData> doInBackground(Void... params) {
try {
return TransitLiveApi.getAllRouteData();
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
@Override
protected void onPostExecute(SparseArray<TransitLiveRouteData> routes) {
if (routes == null) {
AlertDialogFragment fragment = new AlertDialogFragment.Builder()
.setTitle("Error")
.setMessage("Unable to update routes. Please check your network settings and try again.")
.setPositiveText("Retry")
.build();
fragment.show(getSupportFragmentManager(), "networkerror");
return;
}
dr.routes = routes;
updateRouteSelector();
}
}
private class UpdateDetoursTask extends ManagedAsyncTask<Void, Void, List<TransitLiveDetour>>{
public UpdateDetoursTask(FragmentActivity activity) {
super(activity);
}
@Override
protected List<TransitLiveDetour> doInBackground(Void... params) {
try {
return TransitLiveApi.getDetours().getDetours();
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
@Override
protected void onPostExecute(List<TransitLiveDetour> detours) {
if (detours == null) {
return;
}
dr.detours.clear();
for (TransitLiveDetour detour : detours) {
dr.detours.put(detour.getStopId(), detour);
}
}
}
private void startUpdateRoutesTask() {
// also download detours
new UpdateDetoursTask(this).execute();
if (updateRouteListTask != null) {
updateRouteListTask.cancel(true);
}
dr.busLocations.clear();
dr.detours.clear();
dr.enableStopMarkers = false;
dr.followBusId = null;
dr.routeStopIds.clear();
dr.selectedRoutes.clear();
dr.stops.clear();
dr.routes.clear();
dr.busMarkers.clear();
dr.pathPolyLines.clear();
dr.routeBitmaps.clear();
dr.stopMarkers.clear();
updateRouteSelector();
updateClearAllButton();
cancelFollowing();
refreshMarkers();
mapFragment.getView().post(new Runnable() {
@Override
public void run() {
map.clear();
}
});
updateRouteListTask = new UpdateRouteListTask(this);
updateRouteListTask.execute();
}
private void updateRouteSelector() {
drawerRouteList.removeAllViews();
int height = (int) BitmapUtils.convertDpToPixel(25, MainActivity.this);
int width = (int) BitmapUtils.convertDpToPixel(26, MainActivity.this);
Bitmap bmap = BitmapFactory.decodeResource(getResources(), R.drawable.busicon);
bmap = Bitmap.createScaledBitmap(bmap.copy(Bitmap.Config.ARGB_8888, true), width, height, false);
for (int i = 0; i < dr.routes.size(); i++) {
int routeId = dr.routes.keyAt(i);
TransitLiveRouteData route = dr.routes.get(routeId);
// create marker icon
int colour = route.getColourInt();
Bitmap copy = bmap.copy(Bitmap.Config.ARGB_8888, true);
Canvas canvas = new Canvas(copy);
canvas.drawColor(BitmapUtils.addAlpha(colour, 210), PorterDuff.Mode.MULTIPLY);
dr.routeBitmaps.put(routeId, BitmapDescriptorFactory.fromBitmap(copy));
// create route menu item
MapMenuRouteSelector selector = new MapMenuRouteSelector(MainActivity.this, route);
selector.setChecked(dr.selectedRoutes.contains(routeId)); // do this before the listener
selector.setOnRouteSelectChangedListener(MainActivity.this);
drawerRouteList.addView(selector);
}
updateClearAllButton();
}
private void updateClearAllButton() {
if (dr.selectedRoutes.size() > 0) {
clearAllButton.setVisibility(View.VISIBLE);
} else {
clearAllButton.setVisibility(View.GONE);
}
}
private class UpdateStopsTask extends ManagedAsyncTask<Void, Void, List<TransitLiveStopData>>{
int routeId;
public UpdateStopsTask(FragmentActivity activity, int routeId) {
super(activity);
this.routeId = routeId;
}
@Override
protected void onPreExecute() {
Toast.makeText(getActivity(),
"Downloading route #" + routeId,
Toast.LENGTH_SHORT).show();
}
@Override
protected List<TransitLiveStopData> doInBackground(Void... params) {
try {
return TransitLiveApi.getStopsData(routeId).getStops();
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
@Override
protected void onPostExecute(List<TransitLiveStopData> result) {
if (result == null) {
return;
}
ArrayList<Integer> routeMapping = new ArrayList<Integer>();
dr.routeStopIds.put(routeId, routeMapping);
for(TransitLiveStopData stop : result) {
dr.stops.put(stop.getStopId(), stop);
routeMapping.add(stop.getStopId());
}
if (dr.selectedRoutes.contains(routeId)) {
placeStopMarkers(routeId);
}
}
}
private void placeStopMarkers(final int routeId) {
List<Marker> markers = dr.stopMarkers.get(routeId);
List<Integer> stopIds = dr.routeStopIds.get(routeId);
TransitLiveRouteData route = dr.routes.get(routeId);
if (map == null || stopIds == null || stopIds.size() == 0) {
return;
}
// clear any markers for this route if they already there for some reason
if (markers != null && markers.size() > 0) {
for (Marker marker : markers) {
marker.remove();
}
} else {
markers = new ArrayList<Marker>();
dr.stopMarkers.put(routeId, markers);
}
// create marker
Bitmap regularBmp = createStopMarker(route.getColourInt(), this, false);
Bitmap detourBmp = createStopMarker(route.getColourInt(), this, true);
BitmapDescriptor regularBmpd = BitmapDescriptorFactory.fromBitmap(regularBmp);
BitmapDescriptor detourBmpd = BitmapDescriptorFactory.fromBitmap(detourBmp);
for (int stopId : stopIds) {
TransitLiveStopData stop = dr.stops.get(stopId);
LatLng latlng = new LatLng(stop.getLatitude(), stop.getLongitude());
boolean isDetour = dr.detours.get(stopId) != null;
markers.add(map.addMarker(new MarkerOptions()
.position(latlng)
.title(stop.getName())
.anchor(0.5f, 0.5f)
.snippet(String.format("stop:%s", stopId))
.visible(dr.enableStopMarkers)
.icon(isDetour ? detourBmpd : regularBmpd)));
}
}
public static Bitmap createStopMarker(int color, Context context, boolean isDetour) {
int width = BitmapUtils.convertDpToPixel(STOP_MARKER_WIDTH, context);
Paint paint = new Paint();
paint.setColor(BitmapUtils.addAlpha(color, 200));
if (isDetour) {
paint.setColor(paint.getColor() ^ 0x11FFFFFF);
}
Bitmap bmp = Bitmap.createBitmap(width, width, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bmp);
canvas.drawCircle(width/2, width/2, width/2, paint);
return bmp;
}
private void removeStopMarkers(int routeId) {
List<Marker> markers = dr.stopMarkers.get(routeId);
if (markers == null) {
return;
}
for (Marker marker : markers) {
marker.remove();
}
dr.stopMarkers.remove(routeId);
}
private class UpdatePathTask extends ManagedAsyncTask<Void, Void, List<LatLng>>{
int routeId;
public UpdatePathTask(FragmentActivity activity, int routeId) {
super(activity);
this.routeId = routeId;
}
@Override
protected List<LatLng> doInBackground(Void... params) {
try {
return TransitLiveApi.getPathData(routeId);
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
@Override
protected void onPostExecute(List<LatLng> result) {
if (result == null) {
return;
}
dr.paths.put(routeId, result);
if (dr.selectedRoutes.contains(routeId)) {
placePath(routeId, true);
}
}
}
private void placePath(int routeId, boolean cameraEffects) {
List<LatLng> path = dr.paths.get(routeId);
PolylineOptions polyline = new PolylineOptions();
LatLngBounds.Builder bounds = new LatLngBounds.Builder();
if (map == null || path == null) {
return;
}
if (dr.pathPolyLines.get(routeId) != null) {
dr.pathPolyLines.get(routeId).remove();
}
TransitLiveRouteData route = dr.routes.get(routeId);
polyline.color(BitmapUtils.addAlpha(route.getColourInt(), 100))
.width(BitmapUtils.convertDpToPixel(STOP_POLYLINE_WIDTH, this));
for (LatLng latlng : path) {
polyline.add(latlng);
bounds.include(latlng);
}
polyline.add(path.get(0));
dr.pathPolyLines.put(routeId,
map.addPolyline(polyline));
if (cameraEffects && map.getCameraPosition().zoom < ROUTE_ANIMATE_ZOOM_THRESHOLD) {
map.animateCamera(CameraUpdateFactory.newLatLngBounds(bounds.build(), 4));
}
}
private void removePath(int routeId) {
Polyline polyline = dr.pathPolyLines.get(routeId);
if (polyline != null) {
polyline.remove();
}
dr.pathPolyLines.remove(routeId);
}
@Override
public void onRouteSelectChangedListener(MapMenuRouteSelector selector, boolean isChecked) {
int routeId = selector.getRouteData().getRouteId();
enableRoute(routeId, isChecked);
}
private void toggleRoute(int routeId) {
enableRoute(routeId, !dr.selectedRoutes.contains(routeId));
}
private void enableRoute(int routeId, boolean enable) {
if (enable) {
if (dr.selectedRoutes.add(routeId)) {
if (dr.routeStopIds.get(routeId) == null) {
new UpdateStopsTask(this, routeId).execute();
} else {
placeStopMarkers(routeId);
}
if (dr.paths.get(routeId) == null) {
new UpdatePathTask(this, routeId).execute();
} else {
placePath(routeId, true);
}
handler.postDelayed(new Runnable() {
@Override
public void run() {
if (drawer != null) {
drawer.closeDrawers();
}
}
}, 250);
if (dr.selectedRoutes.size() == 3) {
Toast.makeText(this,
"Note: Selecting too many routes may result in a poorly performing map",
Toast.LENGTH_LONG).show();
}
}
} else {
dr.selectedRoutes.remove(routeId);
removeStopMarkers(routeId);
removePath(routeId);
}
updateMapWithBusLocations(dr.busLocations);
}
private void updateMarkerVisibility() {
for (int i = 0, size = dr.stopMarkers.size(); i < size; i++) {
int routeId = dr.stopMarkers.keyAt(i);
List<Marker> markers = dr.stopMarkers.get(routeId);
for (Marker marker : markers) {
if (dr.enableStopMarkers != marker.isVisible()) {
marker.setVisible(dr.enableStopMarkers);
}
}
}
}
private void beginFollowingBus(Integer busId) {
if (busId != null) {
dr.followBusId = busId;
cancelFollowButton.setVisibility(View.VISIBLE);
}
}
private void cancelFollowing() {
if (dr.followBusId != null) {
Marker marker = dr.busMarkers.get(dr.followBusId);
if (marker != null) {
marker.hideInfoWindow();
}
}
dr.followBusId = null;
cancelFollowButton.setVisibility(View.GONE);
}
// ALERT DIALOG LISTENER
@Override
public void onClick(String tag, int button) {
switch (button) {
case AlertDialog.BUTTON_NEGATIVE:
break;
case AlertDialog.BUTTON_NEUTRAL:
break;
case AlertDialog.BUTTON_POSITIVE:
if (TAG_NETWORK_ERROR.equals(tag)) {
startUpdateRoutesTask();
}
break;
}
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.impl.smartPointers;
import com.intellij.openapi.project.Project;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.*;
import com.intellij.psi.impl.PsiSubstitutorImpl;
import com.intellij.psi.impl.source.PsiImmediateClassType;
import com.intellij.psi.util.PsiUtil;
import com.intellij.util.NullableFunction;
import com.intellij.util.containers.ContainerUtil;
import java.util.HashMap;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.stream.Stream;
/**
* @author max
*/
public class SmartTypePointerManagerImpl extends SmartTypePointerManager {
private static final SmartTypePointer NULL_POINTER = new SmartTypePointer() {
@Override
public PsiType getType() { return null; }
};
private final SmartPointerManager myPsiPointerManager;
private final Project myProject;
public SmartTypePointerManagerImpl(final SmartPointerManager psiPointerManager, final Project project) {
myPsiPointerManager = psiPointerManager;
myProject = project;
}
@Override
@NotNull
public SmartTypePointer createSmartTypePointer(@NotNull PsiType type) {
final SmartTypePointer pointer = type.accept(new SmartTypeCreatingVisitor());
return pointer != null ? pointer : NULL_POINTER;
}
private static class SimpleTypePointer implements SmartTypePointer {
private final PsiType myType;
private SimpleTypePointer(@NotNull PsiType type) {
myType = type;
}
@Override
public PsiType getType() {
return myType;
}
}
private static class ArrayTypePointer extends TypePointerBase<PsiArrayType> {
private final SmartTypePointer myComponentTypePointer;
public ArrayTypePointer(@NotNull PsiArrayType type, @NotNull SmartTypePointer componentTypePointer) {
super(type);
myComponentTypePointer = componentTypePointer;
}
@Nullable
@Override
protected PsiArrayType calcType() {
final PsiType type = myComponentTypePointer.getType();
return type == null ? null : new PsiArrayType(type);
}
}
private static class WildcardTypePointer extends TypePointerBase<PsiWildcardType> {
private final PsiManager myManager;
private final SmartTypePointer myBoundPointer;
private final boolean myIsExtending;
public WildcardTypePointer(@NotNull PsiWildcardType type, @Nullable SmartTypePointer boundPointer) {
super(type);
myManager = type.getManager();
myBoundPointer = boundPointer;
myIsExtending = type.isExtends();
}
@Override
protected PsiWildcardType calcType() {
if (myBoundPointer == null) {
return PsiWildcardType.createUnbounded(myManager);
}
else {
final PsiType type = myBoundPointer.getType();
assert type != null : myBoundPointer;
if (myIsExtending) {
return PsiWildcardType.createExtends(myManager, type);
}
return PsiWildcardType.createSuper(myManager, type);
}
}
}
private static class ClassTypePointer extends TypePointerBase<PsiClassType> {
private final SmartPsiElementPointer myClass;
private final LanguageLevel myLevel;
private final Map<SmartPsiElementPointer<PsiTypeParameter>, SmartTypePointer> myMap;
private final SmartPsiElementPointer[] myAnnotations;
public ClassTypePointer(@NotNull PsiClassType type,
@NotNull SmartPsiElementPointer aClass,
@NotNull LanguageLevel languageLevel,
@NotNull Map<SmartPsiElementPointer<PsiTypeParameter>, SmartTypePointer> map,
@NotNull SmartPsiElementPointer[] annotations) {
super(type);
myClass = aClass;
myLevel = languageLevel;
myMap = map;
myAnnotations = annotations;
}
@Override
protected PsiClassType calcType() {
final PsiElement classElement = myClass.getElement();
if (!(classElement instanceof PsiClass)) return null;
Map<PsiTypeParameter, PsiType> resurrected = new HashMap<>();
final Set<Map.Entry<SmartPsiElementPointer<PsiTypeParameter>, SmartTypePointer>> set = myMap.entrySet();
for (Map.Entry<SmartPsiElementPointer<PsiTypeParameter>, SmartTypePointer> entry : set) {
PsiElement element = entry.getKey().getElement();
if (element != null) {
SmartTypePointer typePointer = entry.getValue();
resurrected.put((PsiTypeParameter)element, typePointer == null ? null : typePointer.getType());
}
}
for (PsiTypeParameter typeParameter : PsiUtil.typeParametersIterable((PsiClass)classElement)) {
if (!resurrected.containsKey(typeParameter)) {
resurrected.put(typeParameter, null);
}
}
final PsiSubstitutor resurrectedSubstitutor = PsiSubstitutorImpl.createSubstitutor(resurrected);
PsiAnnotation[] resurrectedAnnotations = Stream.of(myAnnotations).map(SmartPsiElementPointer::getElement).filter(Objects::nonNull).toArray(PsiAnnotation[]::new);
return new PsiImmediateClassType((PsiClass)classElement, resurrectedSubstitutor, myLevel, resurrectedAnnotations);
}
}
private class DisjunctionTypePointer extends TypePointerBase<PsiDisjunctionType> {
private final List<SmartTypePointer> myPointers;
private DisjunctionTypePointer(@NotNull PsiDisjunctionType type) {
super(type);
myPointers = ContainerUtil.map(type.getDisjunctions(), psiType -> createSmartTypePointer(psiType));
}
@Override
protected PsiDisjunctionType calcType() {
final List<PsiType> types = ContainerUtil.map(myPointers,
(NullableFunction<SmartTypePointer, PsiType>)typePointer -> typePointer.getType());
return new PsiDisjunctionType(types, PsiManager.getInstance(myProject));
}
}
private class SmartTypeCreatingVisitor extends PsiTypeVisitor<SmartTypePointer> {
@Override
public SmartTypePointer visitPrimitiveType(PsiPrimitiveType primitiveType) {
return new SimpleTypePointer(primitiveType);
}
@Override
public SmartTypePointer visitArrayType(PsiArrayType arrayType) {
final SmartTypePointer componentTypePointer = arrayType.getComponentType().accept(this);
return componentTypePointer != null ? new ArrayTypePointer(arrayType, componentTypePointer) : null;
}
@Override
public SmartTypePointer visitWildcardType(PsiWildcardType wildcardType) {
final PsiType bound = wildcardType.getBound();
final SmartTypePointer boundPointer = bound == null ? null : bound.accept(this);
return new WildcardTypePointer(wildcardType, boundPointer);
}
@Override
public SmartTypePointer visitClassType(PsiClassType classType) {
final PsiClassType.ClassResolveResult resolveResult = classType.resolveGenerics();
final PsiClass aClass = resolveResult.getElement();
if (aClass == null) {
return createClassReferenceTypePointer(classType);
}
final PsiSubstitutor substitutor = resolveResult.getSubstitutor();
final HashMap<SmartPsiElementPointer<PsiTypeParameter>, SmartTypePointer> pointerMap = new HashMap<>();
final Map<PsiTypeParameter, PsiType> map = new HashMap<>();
for (PsiTypeParameter typeParameter : PsiUtil.typeParametersIterable(aClass)) {
final PsiType substitutionResult = substitutor.substitute(typeParameter);
if (substitutionResult != null) {
final SmartPsiElementPointer<PsiTypeParameter> pointer = myPsiPointerManager.createSmartPsiElementPointer(typeParameter);
SmartTypePointer typePointer = substitutionResult.accept(this);
pointerMap.put(pointer, typePointer);
map.put(typeParameter, typePointer.getType());
} else {
map.put(typeParameter, null);
}
}
SmartPsiElementPointer[] annotationPointers =
Stream
.of(classType.getAnnotations())
.map(myPsiPointerManager::createSmartPsiElementPointer)
.toArray(SmartPsiElementPointer[]::new);
LanguageLevel languageLevel = classType.getLanguageLevel();
return new ClassTypePointer(new PsiImmediateClassType(aClass,
PsiSubstitutorImpl.createSubstitutor(map),
languageLevel,
classType.getAnnotations()),
myPsiPointerManager.createSmartPsiElementPointer(aClass),
languageLevel,
pointerMap,
annotationPointers);
}
@Override
public SmartTypePointer visitDisjunctionType(PsiDisjunctionType disjunctionType) {
return new DisjunctionTypePointer(disjunctionType);
}
}
@NotNull
private SmartTypePointer createClassReferenceTypePointer(@NotNull PsiClassType classType) {
for (ClassTypePointerFactory factory : ClassTypePointerFactory.EP_NAME.getExtensions()) {
SmartTypePointer pointer = factory.createClassTypePointer(classType, myProject);
if (pointer != null) {
return pointer;
}
}
return new SimpleTypePointer(classType);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.omid.benchmarks.tso;
import com.google.common.util.concurrent.RateLimiter;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.google.inject.Binder;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Module;
import org.apache.omid.benchmarks.utils.IntegerGenerator;
import org.apache.omid.committable.CommitTable;
import org.apache.omid.metrics.Counter;
import org.apache.omid.metrics.MetricsRegistry;
import org.apache.omid.metrics.Timer;
import org.apache.omid.tso.util.DummyCellIdImpl;
import org.apache.omid.tso.client.AbortException;
import org.apache.omid.tso.client.CellId;
import org.apache.omid.tso.client.OmidClientConfiguration;
import org.apache.omid.tso.client.TSOClient;
import org.apache.omid.tso.client.TSOFuture;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import static com.codahale.metrics.MetricRegistry.name;
class RawTxRunner implements Runnable {
private static final Logger LOG = LoggerFactory.getLogger(RawTxRunner.class);
private static volatile int txRunnerCounter = 0;
private int txRunnerId = txRunnerCounter++;
// Config params
private final int writesetSize;
private final boolean fixedWriteSetSize;
private final long commitDelayInMs;
private final int percentageOfReadOnlyTxs;
private final IntegerGenerator cellIdGenerator;
private final Random randomGen;
// Main elements
private final TSOClient tsoClient;
private final CommitTable.Client commitTableClient;
// Asynchronous executor for tx post begin sequence: TimestampListener -> Committer -> CommitListener
private final ScheduledExecutorService callbackExec =
Executors.newScheduledThreadPool(1, new ThreadFactoryBuilder()
.setNameFormat("tx-runner-" + txRunnerId + "-callback")
.setUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
@Override
public void uncaughtException(Thread t, Throwable e) {
LOG.error("Thread {} threw exception", t, e);
}
}).build());
// Statistics to save
private final Timer timestampTimer;
private final Timer commitTimer;
private final Timer abortTimer;
private final Counter errorCounter;
// Allows to setup a maximum rate for the client in req/sec
private final RateLimiter rateLimiter;
// Is this TxRunner still running?
private volatile boolean isRunning = false;
RawTxRunner(final TSOServerBenchmarkConfig expConfig) throws IOException, InterruptedException {
// Injector configuration
List<Module> guiceModules = new ArrayList<>();
guiceModules.add(new Module() {
@Override
public void configure(Binder binder) {
binder.bind(MetricsRegistry.class).toInstance(expConfig.getMetrics());
}
});
guiceModules.add(expConfig.getCommitTableStoreModule());
Injector injector = Guice.createInjector(guiceModules);
// Tx Runner config
this.writesetSize = expConfig.getWritesetSize();
this.fixedWriteSetSize = expConfig.isFixedWritesetSize();
this.commitDelayInMs = expConfig.getCommitDelayInMs();
this.percentageOfReadOnlyTxs = expConfig.getPercentageOfReadOnlyTxs();
this.cellIdGenerator = expConfig.getCellIdGenerator();
this.randomGen = new Random(System.currentTimeMillis() * txRunnerId); // to make it channel dependent
int txRateInReqPerSec = expConfig.getTxRateInRequestPerSecond();
long warmUpPeriodInSecs = expConfig.getWarmUpPeriodInSecs();
LOG.info("TxRunner-{} [ Tx Rate (Req per Sec) -> {} ]", txRunnerId, txRateInReqPerSec);
LOG.info("TxRunner-{} [ Warm Up Period -> {} Secs ]", txRunnerId, warmUpPeriodInSecs);
LOG.info("TxRunner-{} [ Cell Id Distribution Generator -> {} ]", txRunnerId, expConfig.getCellIdGenerator().getClass());
LOG.info("TxRunner-{} [ Max Tx Size -> {} Fixed: {} ]", txRunnerId, writesetSize, fixedWriteSetSize);
LOG.info("TxRunner-{} [ Commit delay -> {} Ms ]", txRunnerId, commitDelayInMs);
LOG.info("TxRunner-{} [ % of Read-Only Tx -> {} % ]", txRunnerId, percentageOfReadOnlyTxs);
// Commit table client initialization
CommitTable commitTable = injector.getInstance(CommitTable.class);
this.commitTableClient = commitTable.getClient();
// Stat initialization
MetricsRegistry metrics = injector.getInstance(MetricsRegistry.class);
String hostName = InetAddress.getLocalHost().getHostName();
this.timestampTimer = metrics.timer(name("tx_runner", Integer.toString(txRunnerId), hostName, "timestamp"));
this.commitTimer = metrics.timer(name("tx_runner", Integer.toString(txRunnerId), hostName, "commit"));
this.abortTimer = metrics.timer(name("tx_runner", Integer.toString(txRunnerId), hostName, "abort"));
this.errorCounter = metrics.counter(name("tx_runner", Integer.toString(txRunnerId), hostName, "errors"));
LOG.info("TxRunner-{} [ Metrics provider module -> {} ]", txRunnerId, expConfig.getMetrics().getClass());
// TSO Client initialization
OmidClientConfiguration tsoClientConf = expConfig.getOmidClientConfiguration();
this.tsoClient = TSOClient.newInstance(tsoClientConf);
LOG.info("TxRunner-{} [ Connection Type {}/Connection String {} ]", txRunnerId,
tsoClientConf.getConnectionType(), tsoClientConf.getConnectionString());
// Limiter for configured request per second
this.rateLimiter = RateLimiter.create((double) txRateInReqPerSec, warmUpPeriodInSecs, TimeUnit.SECONDS);
}
@Override
public void run() {
isRunning = true;
while (isRunning) {
rateLimiter.acquire();
long tsRequestTime = System.nanoTime();
final TSOFuture<Long> tsFuture = tsoClient.getNewStartTimestamp();
tsFuture.addListener(new TimestampListener(tsFuture, tsRequestTime), callbackExec);
}
shutdown();
}
void stop() {
isRunning = false;
}
private void shutdown() {
try {
LOG.info("Finishing TxRunner in 3 secs", txRunnerId);
boolean wasSuccess = callbackExec.awaitTermination(3, TimeUnit.SECONDS);
if (!wasSuccess) {
callbackExec.shutdownNow();
}
commitTableClient.close();
tsoClient.close().get();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
// ignore
} catch (ExecutionException | IOException e) {
// ignore
} finally {
LOG.info("TxRunner {} finished", txRunnerId);
}
}
private class TimestampListener implements Runnable {
final TSOFuture<Long> tsFuture;
final long tsRequestTime;
TimestampListener(TSOFuture<Long> tsFuture, long tsRequestTime) {
this.tsFuture = tsFuture;
this.tsRequestTime = tsRequestTime;
}
@Override
public void run() {
try {
long txId = tsFuture.get();
timestampTimer.update(System.nanoTime() - tsRequestTime);
if (commitDelayInMs <= 0) {
callbackExec.execute(new Committer(txId));
} else {
callbackExec.schedule(new Committer(txId), commitDelayInMs, TimeUnit.MILLISECONDS);
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
errorCounter.inc();
} catch (ExecutionException e) {
errorCounter.inc();
}
}
}
private class Committer implements Runnable {
final long txId;
Committer(long txId) {
this.txId = txId;
}
@Override
public void run() {
int txWritesetSize = calculateTxWritesetSize();
if (txWritesetSize == 0) {
return; // Read only tx, no need to commit
}
// Otherwise, we create the writeset...
final Set<CellId> cells = new HashSet<>();
for (byte i = 0; i < txWritesetSize; i++) {
long cellId = cellIdGenerator.nextInt();
cells.add(new DummyCellIdImpl(cellId));
}
// ... and we commit the transaction
long startCommitTimeInNs = System.nanoTime();
final TSOFuture<Long> commitFuture = tsoClient.commit(txId, cells);
commitFuture.addListener(new CommitListener(txId, commitFuture, startCommitTimeInNs), callbackExec);
}
private int calculateTxWritesetSize() {
int txSize = 0;
boolean readOnly = (randomGen.nextFloat() * 100) < percentageOfReadOnlyTxs;
if (!readOnly) {
if (fixedWriteSetSize) {
txSize = writesetSize;
} else {
txSize = randomGen.nextInt(writesetSize) + 1;
}
}
return txSize;
}
}
private class CommitListener implements Runnable {
final long txId;
final long commitRequestTime;
final TSOFuture<Long> commitFuture;
CommitListener(long txId, TSOFuture<Long> commitFuture, long commitRequestTime) {
this.txId = txId;
this.commitFuture = commitFuture;
this.commitRequestTime = commitRequestTime;
}
@Override
public void run() {
try {
commitFuture.get();
commitTableClient.completeTransaction(txId).get();
commitTimer.update(System.nanoTime() - commitRequestTime);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
errorCounter.inc();
} catch (ExecutionException e) {
if (e.getCause() instanceof AbortException) {
abortTimer.update(System.nanoTime() - commitRequestTime);
} else {
errorCounter.inc();
}
}
}
}
}
| |
package com.aspose.words.maven;
import com.aspose.words.maven.artifacts.Metadata;
import com.aspose.words.maven.utils.AsposeConstants;
import com.aspose.words.maven.utils.AsposeJavaAPI;
import com.aspose.words.maven.utils.AsposeMavenProjectManager;
import static com.aspose.words.maven.utils.AsposeMavenProjectManager.getAsposeProjectMavenDependencies;
import com.aspose.words.maven.utils.AsposeWordsJavaAPI;
import com.aspose.words.maven.utils.TasksExecutor;
import java.awt.Component;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
import javax.swing.JComponent;
import javax.swing.event.ChangeListener;
import org.netbeans.api.progress.ProgressHandle;
import org.netbeans.api.project.ProjectManager;
import org.netbeans.api.templates.TemplateRegistration;
import org.netbeans.spi.project.ui.support.ProjectChooser;
import org.netbeans.spi.project.ui.templates.support.Templates;
import org.openide.WizardDescriptor;
import org.openide.filesystems.FileObject;
import org.openide.filesystems.FileUtil;
import org.openide.util.Exceptions;
import org.openide.util.NbBundle;
import org.openide.util.NbBundle.Messages;
import org.openide.xml.XMLUtil;
import org.w3c.dom.DOMException;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
/**
* @author Adeel Ilyas
*/
@TemplateRegistration(
folder = "Project/Maven2",
displayName = "#Aspose_displayName",
description = "AsposeWordsMavenDescription.html",
iconBase = "com/aspose/words/maven/Aspose.png",
position = 1,
content = "AsposeMavenProject.zip")
@Messages("Aspose_displayName=Aspose.Words Maven Project")
public class AsposeMavenProjectWizardIterator implements WizardDescriptor.ProgressInstantiatingIterator<WizardDescriptor> {
private int index;
private WizardDescriptor.Panel[] panels;
private WizardDescriptor wiz;
List<String> list = new ArrayList<>();
/**
*
*/
public AsposeMavenProjectWizardIterator() {
}
/**
*
* @return
*/
public static AsposeMavenProjectWizardIterator createIterator() {
return new AsposeMavenProjectWizardIterator();
}
private WizardDescriptor.Panel[] createPanels() {
return new WizardDescriptor.Panel[]{
new AsposeMavenBasicWizardPanel()
};
}
/**
*
* @return
*/
private String[] createSteps() {
return new String[]{
NbBundle.getMessage(AsposeMavenProjectWizardIterator.class, "LBL_CreateProjectStep"),
};
}
/**
*
* @return
* @throws IOException
*/
@Override
public Set<?> instantiate() throws IOException {
throw new AssertionError("instantiate(ProgressHandle) " //NOI18N
+ "should have been called"); //NOI18N
}
/**
*
* @param ph
* @return
* @throws IOException
*/
@Override
public Set instantiate(ProgressHandle ph) throws IOException {
ph.start();
ph.switchToIndeterminate();
ph.progress("Processing...");
final AsposeMavenProjectManager asposeMavenProjectManager = AsposeMavenProjectManager.initialize(wiz);
final AsposeJavaAPI asposeWordsJavaAPI = AsposeWordsJavaAPI.initialize(asposeMavenProjectManager);
boolean isDownloadExamplesSelected = (boolean) wiz.getProperty("downloadExamples");
// Downloading Aspose.Words Java (mvn based) examples...
if (isDownloadExamplesSelected) {
TasksExecutor tasksExecutionDownloadExamples = new TasksExecutor(NbBundle.getMessage(AsposeMavenProjectWizardIterator.class, "AsposeManager.progressExamplesTitle"));
// Downloading Aspose API mvn based examples
tasksExecutionDownloadExamples.addNewTask(asposeMavenProjectManager.createDownloadExamplesTask(asposeWordsJavaAPI));
// Execute the tasks
tasksExecutionDownloadExamples.processTasks();
}
TasksExecutor tasksExecutionRetrieve = new TasksExecutor(NbBundle.getMessage(AsposeMavenProjectWizardIterator.class, "AsposeManager.progressTitle"));
// Retrieving Aspose.Words Java Maven artifact...
tasksExecutionRetrieve.addNewTask(asposeMavenProjectManager.retrieveAsposeAPIMavenTask(asposeWordsJavaAPI));
// Execute the tasks
tasksExecutionRetrieve.processTasks();
// Creating Maven project
ph.progress(NbBundle.getMessage(AsposeMavenProjectWizardIterator.class, "AsposeManager.projectMessage"));
Set<FileObject> resultSet = new LinkedHashSet<>();
File projectDir = FileUtil.normalizeFile((File) wiz.getProperty("projdir"));
projectDir.mkdirs();
FileObject template = Templates.getTemplate(wiz);
FileObject projectRoot = FileUtil.toFileObject(projectDir);
createAsposeMavenProject(template.getInputStream(), projectRoot);
createStartupPackage(projectRoot);
resultSet.add(projectRoot);
// Look for nested projects to open as well:
Enumeration<? extends FileObject> e = projectRoot.getFolders(true);
while (e.hasMoreElements()) {
FileObject subfolder = e.nextElement();
if (ProjectManager.getDefault().isProject(subfolder)) {
resultSet.add(subfolder);
}
}
File parent = projectDir.getParentFile();
if (parent != null && parent.exists()) {
ProjectChooser.setProjectsFolder(parent);
}
ph.finish();
return resultSet;
}
/**
*
* @param wiz
*/
@Override
public void initialize(WizardDescriptor wiz) {
this.wiz = wiz;
index = 0;
panels = createPanels();
// Make sure list of steps is accurate.
String[] steps = createSteps();
for (int i = 0; i < panels.length; i++) {
Component c = panels[i].getComponent();
if (steps[i] == null) {
// Default step name to component name of panel.
// Mainly useful for getting the name of the target
// chooser to appear in the list of steps.
steps[i] = c.getName();
}
if (c instanceof JComponent) { // assume Swing components
JComponent jc = (JComponent) c;
// Step #.
// TODO if using org.openide.dialogs >= 7.8, can use WizardDescriptor.PROP_*:
jc.putClientProperty("WizardPanel_contentSelectedIndex", i);
// Step name (actually the whole list for reference).
jc.putClientProperty("WizardPanel_contentData", steps);
}
}
}
/**
*
* @param wiz
*/
@Override
public void uninitialize(WizardDescriptor wiz) {
this.wiz.putProperty("projdir", null);
this.wiz.putProperty("name", null);
this.wiz = null;
panels = null;
}
/**
*
* @return
*/
@Override
public String name() {
return MessageFormat.format("{0} of {1}",
new Object[]{
index + 1, panels.length
});
}
/**
*
* @return
*/
@Override
public boolean hasNext() {
return index < panels.length - 1;
}
/**
*
* @return
*/
@Override
public boolean hasPrevious() {
return index > 0;
}
/**
*
*/
@Override
public void nextPanel() {
if (!hasNext()) {
throw new NoSuchElementException();
}
index++;
}
/**
*
*/
@Override
public void previousPanel() {
if (!hasPrevious()) {
throw new NoSuchElementException();
}
index--;
}
/**
*
* @return
*/
@Override
public WizardDescriptor.Panel current() {
return panels[index];
}
/**
*
* @param l
*/
@Override
public final void addChangeListener(ChangeListener l) {
}
/**
*
* @param l
*/
@Override
public final void removeChangeListener(ChangeListener l) {
}
private void createAsposeMavenProject(InputStream source, FileObject projectRoot) throws IOException {
try {
ZipInputStream str = new ZipInputStream(source);
ZipEntry entry;
while ((entry = str.getNextEntry()) != null) {
if (entry.isDirectory()) {
FileUtil.createFolder(projectRoot, entry.getName());
} else {
FileObject fo = FileUtil.createData(projectRoot, entry.getName());
if (AsposeConstants.MAVEN_POM_XML.equals(entry.getName())) {
/*
Special handling for maven pom.xml:
1. Defining / creating project artifacts
2. Adding latest Aspose.Words Maven Dependency reference into pom.xml
*/
configureProjectMavenPOM(fo, str);
} else {
writeFile(str, fo);
}
}
}
} finally {
source.close();
}
}
private void createStartupPackage(FileObject projectRoot) throws IOException {
String startupPackage = wiz.getProperty("package").toString().replace(".", File.separator);
FileUtil.createFolder(projectRoot, "src" + File.separator + "main" + File.separator + "java" + File.separator + startupPackage);
}
private static void writeFile(ZipInputStream str, FileObject fo) throws IOException {
try (OutputStream out = fo.getOutputStream()) {
FileUtil.copy(str, out);
}
}
private void configureProjectMavenPOM(FileObject fo, ZipInputStream str) throws IOException {
String groupId = (String) wiz.getProperty("groupId");
String artifactId = (String) wiz.getProperty("artifactId");
String version = (String) wiz.getProperty("version");
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
FileUtil.copy(str, baos);
Document doc = XMLUtil.parse(new InputSource(new ByteArrayInputStream(baos.toByteArray())), false, false, null, null);
Element root = doc.getDocumentElement();
Node node = root.getElementsByTagName("groupId").item(0);
node.setTextContent(groupId);
node = root.getElementsByTagName("artifactId").item(0);
node.setTextContent(artifactId);
node = root.getElementsByTagName("version").item(0);
node.setTextContent(version);
updateProjectPom(doc);
try (OutputStream out = fo.getOutputStream()) {
XMLUtil.write(doc, out, "UTF-8");
}
} catch (IOException | SAXException | DOMException ex) {
Exceptions.printStackTrace(ex);
writeFile(str, fo);
}
}
private void updateProjectPom(Document pomDocument) {
// Get the root element
Node projectNode = pomDocument.getFirstChild();
// Adding Dependencies here
Element dependenciesTag = pomDocument.createElement("dependencies");
projectNode.appendChild(dependenciesTag);
for (Metadata dependency : getAsposeProjectMavenDependencies()) {
addAsposeMavenDependency(pomDocument, dependenciesTag, dependency);
}
}
private void addAsposeMavenDependency(Document doc, Element dependenciesTag, Metadata dependency) {
Element dependencyTag = doc.createElement("dependency");
dependenciesTag.appendChild(dependencyTag);
Element groupIdTag = doc.createElement("groupId");
groupIdTag.appendChild(doc.createTextNode(dependency.getGroupId()));
dependencyTag.appendChild(groupIdTag);
Element artifactId = doc.createElement("artifactId");
artifactId.appendChild(doc.createTextNode(dependency.getArtifactId()));
dependencyTag.appendChild(artifactId);
Element version = doc.createElement("version");
version.appendChild(doc.createTextNode(dependency.getVersioning().getLatest()));
dependencyTag.appendChild(version);
if (dependency.getClassifier() != null) {
Element classifer = doc.createElement("classifier");
classifer.appendChild(doc.createTextNode(dependency.getClassifier()));
dependencyTag.appendChild(classifer);
}
}
}
| |
package com.admin.tool.entity;
import java.util.Date;
import java.util.List;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.JoinTable;
import javax.persistence.ManyToMany;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
import javax.persistence.Table;
import javax.validation.constraints.Size;
import org.hibernate.validator.constraints.Email;
import org.springframework.format.annotation.DateTimeFormat;
import com.admin.tool.annotation.UniqueUsername;
import com.fasterxml.jackson.annotation.JsonFormat;
@Entity
@Table(name = "user")
public class User {
@Id
@GeneratedValue
private Integer id;
@Size(min = 3, message = "Name must be at least 3 characters!")
@Column(unique = true)
@UniqueUsername(message = "Such username already exists!")
private String name;
@Size(min = 1, message = "Invalid email address!")
@Email(message = "Invalid email address!")
private String email;
@Size(min = 5, message = "Name must be at least 5 characters!")
private String password;
private boolean enabled;
@ManyToMany(fetch = FetchType.EAGER)
@JoinTable
private List<Role> roles;
@OneToMany(mappedBy = "user",cascade = CascadeType.REMOVE)
private List<Blog> blogs;
@OneToMany(mappedBy = "user",cascade = CascadeType.REMOVE, fetch = FetchType.EAGER)
private List<UploadedFile> uploadedFiles;
//@OneToOne
//private UploadedFile uploadedFiles;
@JsonFormat(shape=JsonFormat.Shape.STRING, pattern="EEE, dd MMM yyyy HH:mm:ss zzz")
private Date createdDate;
@JsonFormat(shape=JsonFormat.Shape.STRING, pattern="EEE, dd MMM yyyy HH:mm:ss zzz")
private Date lastLoginDate;
private String secretKey;
private Boolean twoFactorAuthInitialised;
private boolean isAuthenticated;
private boolean isVerified;
private boolean isVerifiedError;
private boolean isResetTwoFactorAuth;
public User(){
}
public Date getCreatedDate() {
return createdDate;
}
public void setCreatedDate(Date createdDate) {
this.createdDate = createdDate;
}
public Date getLastLoginDate() {
return lastLoginDate;
}
public void setLastLoginDate(Date lastLoginDate) {
this.lastLoginDate = lastLoginDate;
}
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public List<Blog> getBlogs() {
return blogs;
}
public void setBlogs(List<Blog> blogs) {
this.blogs = blogs;
}
public List<Role> getRoles() {
return roles;
}
public void setRoles(List<Role> roles) {
this.roles = roles;
}
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getSecretKey() {
return secretKey;
}
public void setSecretKey(String secretKey) {
this.secretKey = secretKey;
}
public Boolean getTwoFactorAuthInitialised() {
return twoFactorAuthInitialised;
}
public void setTwoFactorAuthInitialised(Boolean twoFactorAuthInitialised) {
this.twoFactorAuthInitialised = twoFactorAuthInitialised;
}
public boolean isAuthenticated() {
return isAuthenticated;
}
public void setAuthenticated(boolean isAuthenticated) {
this.isAuthenticated = isAuthenticated;
}
public boolean isVerified() {
return isVerified;
}
public void setVerified(boolean isVerified) {
this.isVerified = isVerified;
}
public boolean isVerifiedError() {
return isVerifiedError;
}
public void setVerifiedError(boolean isVerifiedError) {
this.isVerifiedError = isVerifiedError;
}
public boolean isResetTwoFactorAuth() {
return isResetTwoFactorAuth;
}
public void setResetTwoFactorAuth(boolean isResetTwoFactorAuth) {
this.isResetTwoFactorAuth = isResetTwoFactorAuth;
}
public List<UploadedFile> getUploadedFile() {
return uploadedFiles;
}
public void setUploadedFile(List<UploadedFile> uploadedFiles) {
this.uploadedFiles = uploadedFiles;
}
/*
public UploadedFile getUploadedFile() {
return uploadedFiles;
}
public void setUploadedFile(UploadedFile uploadedFiles) {
this.uploadedFiles = uploadedFiles;
}
*/
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.marshaller.optimized;
import java.io.IOException;
import java.io.ObjectStreamClass;
import java.io.Serializable;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.nio.charset.Charset;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.HashSet;
import java.util.List;
import java.util.concurrent.ConcurrentMap;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteException;
import org.apache.ignite.internal.util.GridUnsafe;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.marshaller.MarshallerContext;
import org.apache.ignite.marshaller.jdk.JdkMarshaller;
import static org.apache.ignite.internal.MarshallerPlatformIds.JAVA_ID;
/**
* Miscellaneous utility methods to facilitate {@link OptimizedMarshaller}.
*/
class OptimizedMarshallerUtils {
/** */
static final long HASH_SET_MAP_OFF;
/** */
static final byte JDK = -2;
/** */
static final byte HANDLE = -1;
/** */
static final byte NULL = 0;
/** */
static final byte BYTE = 1;
/** */
static final byte SHORT = 2;
/** */
static final byte INT = 3;
/** */
static final byte LONG = 4;
/** */
static final byte FLOAT = 5;
/** */
static final byte DOUBLE = 6;
/** */
static final byte CHAR = 7;
/** */
static final byte BOOLEAN = 8;
/** */
static final byte BYTE_ARR = 9;
/** */
static final byte SHORT_ARR = 10;
/** */
static final byte INT_ARR = 11;
/** */
static final byte LONG_ARR = 12;
/** */
static final byte FLOAT_ARR = 13;
/** */
static final byte DOUBLE_ARR = 14;
/** */
static final byte CHAR_ARR = 15;
/** */
static final byte BOOLEAN_ARR = 16;
/** */
static final byte OBJ_ARR = 17;
/** */
static final byte STR = 18;
/** */
static final byte UUID = 19;
/** */
static final byte PROPS = 20;
/** */
static final byte ARRAY_LIST = 21;
/** */
static final byte HASH_MAP = 22;
/** */
static final byte HASH_SET = 23;
/** */
static final byte LINKED_LIST = 24;
/** */
static final byte LINKED_HASH_MAP = 25;
/** */
static final byte LINKED_HASH_SET = 26;
/** */
static final byte DATE = 27;
/** */
static final byte CLS = 28;
/** */
static final byte PROXY = 29;
/** */
static final byte ENUM = 100;
/** */
static final byte EXTERNALIZABLE = 101;
/** */
static final byte SERIALIZABLE = 102;
/** UTF-8 character name. */
static final Charset UTF_8 = Charset.forName("UTF-8");
/** JDK marshaller. */
static final JdkMarshaller JDK_MARSH = new JdkMarshaller();
static {
long mapOff;
try {
mapOff = GridUnsafe.objectFieldOffset(HashSet.class.getDeclaredField("map"));
}
catch (NoSuchFieldException ignored) {
try {
// Workaround for legacy IBM JRE.
mapOff = GridUnsafe.objectFieldOffset(HashSet.class.getDeclaredField("backingMap"));
}
catch (NoSuchFieldException e2) {
throw new IgniteException("Initialization failure.", e2);
}
}
HASH_SET_MAP_OFF = mapOff;
}
/**
*/
private OptimizedMarshallerUtils() {
// No-op.
}
/**
* Gets descriptor for provided class.
*
* @param clsMap Class descriptors by class map.
* @param cls Class.
* @param ctx Context.
* @param mapper ID mapper.
* @return Descriptor.
* @throws IOException In case of error.
*/
static OptimizedClassDescriptor classDescriptor(
ConcurrentMap<Class, OptimizedClassDescriptor> clsMap,
Class cls,
MarshallerContext ctx,
OptimizedMarshallerIdMapper mapper)
throws IOException
{
OptimizedClassDescriptor desc = clsMap.get(cls);
if (desc == null) {
int typeId = resolveTypeId(cls.getName(), mapper);
boolean registered;
try {
registered = ctx.registerClassName(JAVA_ID, typeId, cls.getName());
}
catch (IgniteCheckedException e) {
throw new IOException("Failed to register class: " + cls.getName(), e);
}
desc = new OptimizedClassDescriptor(cls, registered ? typeId : 0, clsMap, ctx, mapper);
if (registered) {
OptimizedClassDescriptor old = clsMap.putIfAbsent(cls, desc);
if (old != null)
desc = old;
}
}
return desc;
}
/**
* @param clsName Class name.
* @param mapper Mapper.
* @return Type ID.
*/
private static int resolveTypeId(String clsName, OptimizedMarshallerIdMapper mapper) {
int typeId;
if (mapper != null) {
typeId = mapper.typeId(clsName);
if (typeId == 0)
typeId = clsName.hashCode();
}
else
typeId = clsName.hashCode();
return typeId;
}
/**
* Gets descriptor for provided ID.
*
* @param clsMap Class descriptors by class map.
* @param typeId ID.
* @param ldr Class loader.
* @param ctx Context.
* @param mapper ID mapper.
* @return Descriptor.
* @throws IOException In case of error.
* @throws ClassNotFoundException If class was not found.
*/
static OptimizedClassDescriptor classDescriptor(
ConcurrentMap<Class, OptimizedClassDescriptor> clsMap,
int typeId,
ClassLoader ldr,
MarshallerContext ctx,
OptimizedMarshallerIdMapper mapper) throws IOException, ClassNotFoundException {
Class cls;
try {
cls = ctx.getClass(typeId, ldr);
}
catch (IgniteCheckedException e) {
throw new IOException("Failed to resolve class for ID: " + typeId, e);
}
OptimizedClassDescriptor desc = clsMap.get(cls);
if (desc == null) {
OptimizedClassDescriptor old = clsMap.putIfAbsent(cls, desc =
new OptimizedClassDescriptor(cls, resolveTypeId(cls.getName(), mapper), clsMap, ctx, mapper));
if (old != null)
desc = old;
}
return desc;
}
/**
* Computes the serial version UID value for the given class. The code is taken from {@link
* ObjectStreamClass#computeDefaultSUID(Class)}.
*
* @param cls A class.
* @param fields Fields.
* @return A serial version UID.
* @throws IOException If failed.
*/
@SuppressWarnings("ForLoopReplaceableByForEach")
static short computeSerialVersionUid(Class cls, List<Field> fields) throws IOException {
if (Serializable.class.isAssignableFrom(cls) && !Enum.class.isAssignableFrom(cls)) {
try {
Field field = cls.getDeclaredField("serialVersionUID");
if (field.getType() == long.class) {
int mod = field.getModifiers();
if (Modifier.isStatic(mod) && Modifier.isFinal(mod)) {
field.setAccessible(true);
return (short)field.getLong(null);
}
}
}
catch (NoSuchFieldException ignored) {
// No-op.
}
catch (IllegalAccessException e) {
throw new IOException(e);
}
if (OptimizedMarshaller.USE_DFLT_SUID)
return (short)ObjectStreamClass.lookup(cls).getSerialVersionUID();
}
MessageDigest md;
try {
md = MessageDigest.getInstance("SHA");
}
catch (NoSuchAlgorithmException e) {
throw new IOException("Failed to get digest for SHA.", e);
}
md.update(cls.getName().getBytes(UTF_8));
if (!F.isEmpty(fields)) {
for (int i = 0; i < fields.size(); i++) {
Field f = fields.get(i);
md.update(f.getName().getBytes(UTF_8));
md.update(f.getType().getName().getBytes(UTF_8));
}
}
byte[] hashBytes = md.digest();
long hash = 0;
// Composes a single-long hash from the byte[] hash.
for (int i = Math.min(hashBytes.length, 8) - 1; i >= 0; i--)
hash = (hash << 8) | (hashBytes[i] & 0xFF);
return (short)hash;
}
/**
* Gets byte field value.
*
* @param obj Object.
* @param off Field offset.
* @return Byte value.
*/
static byte getByte(Object obj, long off) {
return GridUnsafe.getByteField(obj, off);
}
/**
* Sets byte field value.
*
* @param obj Object.
* @param off Field offset.
* @param val Value.
*/
static void setByte(Object obj, long off, byte val) {
GridUnsafe.putByteField(obj, off, val);
}
/**
* Gets short field value.
*
* @param obj Object.
* @param off Field offset.
* @return Short value.
*/
static short getShort(Object obj, long off) {
return GridUnsafe.getShortField(obj, off);
}
/**
* Sets short field value.
*
* @param obj Object.
* @param off Field offset.
* @param val Value.
*/
static void setShort(Object obj, long off, short val) {
GridUnsafe.putShortField(obj, off, val);
}
/**
* Gets integer field value.
*
* @param obj Object.
* @param off Field offset.
* @return Integer value.
*/
static int getInt(Object obj, long off) {
return GridUnsafe.getIntField(obj, off);
}
/**
* Sets integer field value.
*
* @param obj Object.
* @param off Field offset.
* @param val Value.
*/
static void setInt(Object obj, long off, int val) {
GridUnsafe.putIntField(obj, off, val);
}
/**
* Gets long field value.
*
* @param obj Object.
* @param off Field offset.
* @return Long value.
*/
static long getLong(Object obj, long off) {
return GridUnsafe.getLongField(obj, off);
}
/**
* Sets long field value.
*
* @param obj Object.
* @param off Field offset.
* @param val Value.
*/
static void setLong(Object obj, long off, long val) {
GridUnsafe.putLongField(obj, off, val);
}
/**
* Gets float field value.
*
* @param obj Object.
* @param off Field offset.
* @return Float value.
*/
static float getFloat(Object obj, long off) {
return GridUnsafe.getFloatField(obj, off);
}
/**
* Sets float field value.
*
* @param obj Object.
* @param off Field offset.
* @param val Value.
*/
static void setFloat(Object obj, long off, float val) {
GridUnsafe.putFloatField(obj, off, val);
}
/**
* Gets double field value.
*
* @param obj Object.
* @param off Field offset.
* @return Double value.
*/
static double getDouble(Object obj, long off) {
return GridUnsafe.getDoubleField(obj, off);
}
/**
* Sets double field value.
*
* @param obj Object.
* @param off Field offset.
* @param val Value.
*/
static void setDouble(Object obj, long off, double val) {
GridUnsafe.putDoubleField(obj, off, val);
}
/**
* Gets char field value.
*
* @param obj Object.
* @param off Field offset.
* @return Char value.
*/
static char getChar(Object obj, long off) {
return GridUnsafe.getCharField(obj, off);
}
/**
* Sets char field value.
*
* @param obj Object.
* @param off Field offset.
* @param val Value.
*/
static void setChar(Object obj, long off, char val) {
GridUnsafe.putCharField(obj, off, val);
}
/**
* Gets boolean field value.
*
* @param obj Object.
* @param off Field offset.
* @return Boolean value.
*/
static boolean getBoolean(Object obj, long off) {
return GridUnsafe.getBooleanField(obj, off);
}
/**
* Sets boolean field value.
*
* @param obj Object.
* @param off Field offset.
* @param val Value.
*/
static void setBoolean(Object obj, long off, boolean val) {
GridUnsafe.putBooleanField(obj, off, val);
}
/**
* Gets field value.
*
* @param obj Object.
* @param off Field offset.
* @return Value.
*/
static Object getObject(Object obj, long off) {
return GridUnsafe.getObjectField(obj, off);
}
/**
* Sets field value.
*
* @param obj Object.
* @param off Field offset.
* @param val Value.
*/
static void setObject(Object obj, long off, Object val) {
GridUnsafe.putObjectField(obj, off, val);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.txn.compactor;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.ServerUtils;
import org.apache.hadoop.hive.common.ValidCompactorWriteIdList;
import org.apache.hadoop.hive.common.ValidTxnList;
import org.apache.hadoop.hive.common.ValidWriteIdList;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.hive.metastore.IMetaStoreClient;
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.TransactionalValidationListener;
import org.apache.hadoop.hive.metastore.api.AbortTxnRequest;
import org.apache.hadoop.hive.metastore.api.AllocateTableWriteIdsRequest;
import org.apache.hadoop.hive.metastore.api.AllocateTableWriteIdsResponse;
import org.apache.hadoop.hive.metastore.api.CommitTxnRequest;
import org.apache.hadoop.hive.metastore.api.CompactionRequest;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.GetValidWriteIdsRequest;
import org.apache.hadoop.hive.metastore.api.LockRequest;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchTxnException;
import org.apache.hadoop.hive.metastore.api.OpenTxnRequest;
import org.apache.hadoop.hive.metastore.api.OpenTxnsResponse;
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.SerDeInfo;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.api.TxnAbortedException;
import org.apache.hadoop.hive.metastore.api.TxnType;
import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.metrics.AcidMetricService;
import org.apache.hadoop.hive.metastore.txn.CompactionInfo;
import org.apache.hadoop.hive.metastore.txn.TxnCommonUtils;
import org.apache.hadoop.hive.metastore.utils.TestTxnDbUtil;
import org.apache.hadoop.hive.metastore.txn.TxnStore;
import org.apache.hadoop.hive.metastore.txn.TxnUtils;
import org.apache.hadoop.hive.ql.io.AcidInputFormat;
import org.apache.hadoop.hive.ql.io.AcidOutputFormat;
import org.apache.hadoop.hive.ql.io.AcidUtils;
import org.apache.hadoop.hive.ql.io.RecordIdentifier;
import org.apache.hadoop.hive.ql.io.RecordUpdater;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.RecordWriter;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.util.Progressable;
import org.apache.thrift.TException;
import org.junit.Before;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.EOFException;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.Stack;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.hadoop.hive.ql.txn.compactor.CompactorTestUtilities.CompactorThreadType;
/**
* Super class for all of the compactor test modules.
*/
public abstract class CompactorTest {
static final private String CLASS_NAME = CompactorTest.class.getName();
static final private Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
public static final String WORKER_VERSION = "4.0.0";
protected TxnStore txnHandler;
protected IMetaStoreClient ms;
protected HiveConf conf;
private final AtomicBoolean stop = new AtomicBoolean();
protected File tmpdir;
@Before
public void setup() throws Exception {
conf = new HiveConf();
TestTxnDbUtil.setConfValues(conf);
TestTxnDbUtil.cleanDb(conf);
TestTxnDbUtil.prepDb(conf);
ms = new HiveMetaStoreClient(conf);
txnHandler = TxnUtils.getTxnStore(conf);
tmpdir = new File(Files.createTempDirectory("compactor_test_table_").toString());
}
protected void compactorTestCleanup() throws IOException {
FileUtils.deleteDirectory(tmpdir);
}
protected void startInitiator() throws Exception {
startThread(CompactorThreadType.INITIATOR, true);
}
protected void startWorker() throws Exception {
startThread(CompactorThreadType.WORKER, true);
}
protected void startCleaner() throws Exception {
startThread(CompactorThreadType.CLEANER, true);
}
protected void runAcidMetricService() throws Exception {
TestTxnDbUtil.setConfValues(conf);
AcidMetricService t = new AcidMetricService();
t.setConf(conf);
t.run();
}
protected Table newTable(String dbName, String tableName, boolean partitioned) throws TException {
return newTable(dbName, tableName, partitioned, new HashMap<String, String>(), null, false);
}
protected Table newTable(String dbName, String tableName, boolean partitioned,
Map<String, String> parameters) throws TException {
return newTable(dbName, tableName, partitioned, parameters, null, false);
}
protected Table newTable(String dbName, String tableName, boolean partitioned,
Map<String, String> parameters, List<Order> sortCols,
boolean isTemporary)
throws TException {
Table table = new Table();
table.setTableType(TableType.MANAGED_TABLE.name());
table.setTableName(tableName);
table.setDbName(dbName);
table.setOwner("me");
table.setSd(newStorageDescriptor(getLocation(tableName, null), sortCols));
List<FieldSchema> partKeys = new ArrayList<FieldSchema>(1);
if (partitioned) {
partKeys.add(new FieldSchema("ds", "string", "no comment"));
table.setPartitionKeys(partKeys);
}
// Set the table as transactional for compaction to work
if (parameters == null) {
parameters = new HashMap<>();
}
parameters.put(hive_metastoreConstants.TABLE_IS_TRANSACTIONAL, "true");
if (sortCols != null) {
// Sort columns are not allowed for full ACID table. So, change it to insert-only table
parameters.put(hive_metastoreConstants.TABLE_TRANSACTIONAL_PROPERTIES,
TransactionalValidationListener.INSERTONLY_TRANSACTIONAL_PROPERTY);
}
table.setParameters(parameters);
if (isTemporary) table.setTemporary(true);
// drop the table first, in case some previous test created it
ms.dropTable(dbName, tableName);
ms.createTable(table);
return table;
}
protected Partition newPartition(Table t, String value) throws Exception {
return newPartition(t, value, null);
}
protected Partition newPartition(Table t, String value, List<Order> sortCols) throws Exception {
Partition part = new Partition();
part.addToValues(value);
part.setDbName(t.getDbName());
part.setTableName(t.getTableName());
part.setSd(newStorageDescriptor(getLocation(t.getTableName(), value), sortCols));
part.setParameters(new HashMap<String, String>());
ms.add_partition(part);
return part;
}
protected long openTxn() throws MetaException {
return openTxn(TxnType.DEFAULT);
}
protected long openTxn(TxnType txnType) throws MetaException {
OpenTxnRequest rqst = new OpenTxnRequest(1, System.getProperty("user.name"), ServerUtils.hostname());
rqst.setTxn_type(txnType);
if (txnType == TxnType.REPL_CREATED) {
rqst.setReplPolicy("default.*");
rqst.setReplSrcTxnIds(Arrays.asList(1L));
}
List<Long> txns = txnHandler.openTxns(rqst).getTxn_ids();
return txns.get(0);
}
protected long allocateWriteId(String dbName, String tblName, long txnid)
throws MetaException, TxnAbortedException, NoSuchTxnException {
AllocateTableWriteIdsRequest awiRqst
= new AllocateTableWriteIdsRequest(dbName, tblName);
awiRqst.setTxnIds(Collections.singletonList(txnid));
AllocateTableWriteIdsResponse awiResp = txnHandler.allocateTableWriteIds(awiRqst);
return awiResp.getTxnToWriteIds().get(0).getWriteId();
}
protected void addDeltaFile(Table t, Partition p, long minTxn, long maxTxn, int numRecords)
throws Exception {
addFile(t, p, minTxn, maxTxn, numRecords, FileType.DELTA, 2, true);
}
protected void addLengthFile(Table t, Partition p, long minTxn, long maxTxn, int numRecords)
throws Exception {
addFile(t, p, minTxn, maxTxn, numRecords, FileType.LENGTH_FILE, 2, true);
}
protected void addBaseFile(Table t, Partition p, long maxTxn, int numRecords) throws Exception {
addFile(t, p, 0, maxTxn, numRecords, FileType.BASE, 2, true);
}
protected void addBaseFile(Table t, Partition p, long maxTxn, int numRecords, long visibilityId) throws Exception {
addFile(t, p, 0, maxTxn, numRecords, FileType.BASE, 2, true, visibilityId);
}
protected void addLegacyFile(Table t, Partition p, int numRecords) throws Exception {
addFile(t, p, 0, 0, numRecords, FileType.LEGACY, 2, true);
}
protected void addDeltaFile(Table t, Partition p, long minTxn, long maxTxn, int numRecords,
int numBuckets, boolean allBucketsPresent) throws Exception {
addFile(t, p, minTxn, maxTxn, numRecords, FileType.DELTA, numBuckets, allBucketsPresent);
}
protected void addBaseFile(Table t, Partition p, long maxTxn, int numRecords, int numBuckets,
boolean allBucketsPresent) throws Exception {
addFile(t, p, 0, maxTxn, numRecords, FileType.BASE, numBuckets, allBucketsPresent);
}
protected List<Path> getDirectories(HiveConf conf, Table t, Partition p) throws Exception {
String partValue = (p == null) ? null : p.getValues().get(0);
String location = getLocation(t.getTableName(), partValue);
Path dir = new Path(location);
FileSystem fs = FileSystem.get(conf);
FileStatus[] stats = fs.listStatus(dir);
List<Path> paths = new ArrayList<Path>(stats.length);
for (int i = 0; i < stats.length; i++) paths.add(stats[i].getPath());
return paths;
}
protected void burnThroughTransactions(String dbName, String tblName, int num)
throws MetaException, NoSuchTxnException, TxnAbortedException {
burnThroughTransactions(dbName, tblName, num, null, null);
}
protected void burnThroughTransactions(String dbName, String tblName, int num, Set<Long> open, Set<Long> aborted)
throws NoSuchTxnException, TxnAbortedException, MetaException {
burnThroughTransactions(dbName, tblName, num, open, aborted, null);
}
protected void burnThroughTransactions(String dbName, String tblName, int num, Set<Long> open, Set<Long> aborted, LockRequest lockReq)
throws MetaException, NoSuchTxnException, TxnAbortedException {
OpenTxnsResponse rsp = txnHandler.openTxns(new OpenTxnRequest(num, "me", "localhost"));
AllocateTableWriteIdsRequest awiRqst = new AllocateTableWriteIdsRequest(dbName, tblName);
awiRqst.setTxnIds(rsp.getTxn_ids());
AllocateTableWriteIdsResponse awiResp = txnHandler.allocateTableWriteIds(awiRqst);
int i = 0;
for (long tid : rsp.getTxn_ids()) {
assert(awiResp.getTxnToWriteIds().get(i++).getTxnId() == tid);
if(lockReq != null) {
lockReq.setTxnid(tid);
txnHandler.lock(lockReq);
}
if (aborted != null && aborted.contains(tid)) {
txnHandler.abortTxn(new AbortTxnRequest(tid));
} else if (open == null || (open != null && !open.contains(tid))) {
txnHandler.commitTxn(new CommitTxnRequest(tid));
}
}
}
protected void stopThread() {
stop.set(true);
}
private StorageDescriptor newStorageDescriptor(String location, List<Order> sortCols) {
StorageDescriptor sd = new StorageDescriptor();
List<FieldSchema> cols = new ArrayList<FieldSchema>(2);
cols.add(new FieldSchema("a", "varchar(25)", "still no comment"));
cols.add(new FieldSchema("b", "int", "comment"));
sd.setCols(cols);
sd.setLocation(location);
sd.setInputFormat(MockInputFormat.class.getName());
sd.setOutputFormat(MockOutputFormat.class.getName());
sd.setNumBuckets(1);
SerDeInfo serde = new SerDeInfo();
serde.setSerializationLib(LazySimpleSerDe.class.getName());
sd.setSerdeInfo(serde);
List<String> bucketCols = new ArrayList<String>(1);
bucketCols.add("a");
sd.setBucketCols(bucketCols);
if (sortCols != null) {
sd.setSortCols(sortCols);
}
return sd;
}
// I can't do this with @Before because I want to be able to control when the thread starts
private void startThread(CompactorThreadType type, boolean stopAfterOne) throws Exception {
TestTxnDbUtil.setConfValues(conf);
CompactorThread t;
switch (type) {
case INITIATOR: t = new Initiator(); break;
case WORKER: t = new Worker(); break;
case CLEANER: t = new Cleaner(); break;
default: throw new RuntimeException("Huh? Unknown thread type.");
}
t.setThreadId((int) t.getId());
t.setConf(conf);
stop.set(stopAfterOne);
t.init(stop);
if (stopAfterOne) t.run();
else t.start();
}
private String getLocation(String tableName, String partValue) {
String location = tmpdir.getAbsolutePath() +
System.getProperty("file.separator") + tableName;
if (partValue != null) {
location += System.getProperty("file.separator") + "ds=" + partValue;
}
return location;
}
private enum FileType {BASE, DELTA, LEGACY, LENGTH_FILE}
private void addFile(Table t, Partition p, long minTxn, long maxTxn, int numRecords, FileType type, int numBuckets,
boolean allBucketsPresent) throws Exception {
addFile(t, p, minTxn, maxTxn, numRecords, type, numBuckets, allBucketsPresent, 0);
}
private void addFile(Table t, Partition p, long minTxn, long maxTxn, int numRecords, FileType type, int numBuckets,
boolean allBucketsPresent, long visibilityId) throws Exception {
String partValue = (p == null) ? null : p.getValues().get(0);
Path location = new Path(getLocation(t.getTableName(), partValue));
String filename = null;
switch (type) {
case BASE: filename = AcidUtils.BASE_PREFIX + maxTxn + (visibilityId > 0 ? AcidUtils.VISIBILITY_PREFIX + visibilityId : ""); break;
case LENGTH_FILE: // Fall through to delta
case DELTA: filename = makeDeltaDirName(minTxn, maxTxn); break;
case LEGACY: break; // handled below
}
FileSystem fs = FileSystem.get(conf);
for (int bucket = 0; bucket < numBuckets; bucket++) {
if (bucket == 0 && !allBucketsPresent) continue; // skip one
Path partFile = null;
if (type == FileType.LEGACY) {
partFile = new Path(location, String.format(AcidUtils.LEGACY_FILE_BUCKET_DIGITS, bucket) + "_0");
} else {
Path dir = new Path(location, filename);
fs.mkdirs(dir);
partFile = AcidUtils.createBucketFile(dir, bucket);
if (type == FileType.LENGTH_FILE) {
partFile = new Path(partFile.toString() + AcidUtils.DELTA_SIDE_FILE_SUFFIX);
}
}
FSDataOutputStream out = fs.create(partFile);
if (type == FileType.LENGTH_FILE) {
out.writeInt(numRecords);//hmm - length files should store length in bytes...
} else {
for (int i = 0; i < numRecords; i++) {
RecordIdentifier ri = new RecordIdentifier(maxTxn - 1, bucket, i);
ri.write(out);
out.writeBytes("mary had a little lamb its fleece was white as snow\n");
}
}
out.close();
}
}
static class MockInputFormat implements AcidInputFormat<WritableComparable,Text> {
@Override
public AcidInputFormat.RowReader<Text> getReader(InputSplit split,
Options options) throws
IOException {
return null;
}
@Override
public RawReader<Text> getRawReader(Configuration conf, boolean collapseEvents, int bucket,
ValidWriteIdList validWriteIdList,
Path baseDirectory, Path[] deltaDirectory, Map<String, Integer> deltaToAttemptId) throws IOException {
List<Path> filesToRead = new ArrayList<Path>();
if (baseDirectory != null) {
if (baseDirectory.getName().startsWith(AcidUtils.BASE_PREFIX)) {
Path p = AcidUtils.createBucketFile(baseDirectory, bucket);
FileSystem fs = p.getFileSystem(conf);
if (fs.exists(p)) filesToRead.add(p);
} else {
filesToRead.add(new Path(baseDirectory, "000000_0"));
}
}
for (int i = 0; i < deltaDirectory.length; i++) {
Path p = AcidUtils.createBucketFile(deltaDirectory[i], bucket);
FileSystem fs = p.getFileSystem(conf);
if (fs.exists(p)) filesToRead.add(p);
}
return new MockRawReader(conf, filesToRead);
}
@Override
public InputSplit[] getSplits(JobConf entries, int i) throws IOException {
return new InputSplit[0];
}
@Override
public RecordReader<WritableComparable, Text> getRecordReader(InputSplit inputSplit, JobConf entries,
Reporter reporter) throws IOException {
return null;
}
@Override
public boolean validateInput(FileSystem fs, HiveConf conf, List<FileStatus> files) throws
IOException {
return false;
}
}
static class MockRawReader implements AcidInputFormat.RawReader<Text> {
private final Stack<Path> filesToRead;
private final Configuration conf;
private FSDataInputStream is = null;
private final FileSystem fs;
private boolean lastWasDelete = true;
MockRawReader(Configuration conf, List<Path> files) throws IOException {
filesToRead = new Stack<Path>();
for (Path file : files) filesToRead.push(file);
this.conf = conf;
fs = FileSystem.get(conf);
}
@Override
public ObjectInspector getObjectInspector() {
return null;
}
/**
* This is bogus especially with split update acid tables. This causes compaction to create
* delete_delta_x_y where none existed before. Makes the data layout such as would never be
* created by 'real' code path.
*/
@Override
public boolean isDelete(Text value) {
// Alternate between returning deleted and not. This is easier than actually
// tracking operations. We test that this is getting properly called by checking that only
// half the records show up in base files after major compactions.
lastWasDelete = !lastWasDelete;
return lastWasDelete;
}
@Override
public boolean next(RecordIdentifier identifier, Text text) throws IOException {
if (is == null) {
// Open the next file
if (filesToRead.empty()) return false;
Path p = filesToRead.pop();
LOG.debug("Reading records from " + p.toString());
is = fs.open(p);
}
String line = null;
try {
identifier.readFields(is);
line = is.readLine();
} catch (EOFException e) {
}
if (line == null) {
// Set our current entry to null (since it's done) and try again.
is = null;
return next(identifier, text);
}
text.set(line);
return true;
}
@Override
public RecordIdentifier createKey() {
return new RecordIdentifier();
}
@Override
public Text createValue() {
return new Text();
}
@Override
public long getPos() throws IOException {
return 0;
}
@Override
public void close() throws IOException {
}
@Override
public float getProgress() throws IOException {
return 0;
}
}
// This class isn't used and I suspect does totally the wrong thing. It's only here so that I
// can provide some output format to the tables and partitions I create. I actually write to
// those tables directory.
static class MockOutputFormat implements AcidOutputFormat<WritableComparable, Text> {
@Override
public RecordUpdater getRecordUpdater(Path path, Options options) throws
IOException {
return null;
}
@Override
public org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter getRawRecordWriter(Path path, Options options) throws IOException {
return new MockRecordWriter(path, options);
}
@Override
public org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter getHiveRecordWriter(JobConf jc, Path finalOutPath,
Class<? extends Writable> valueClass,
boolean isCompressed, Properties tableProperties,
Progressable progress) throws IOException {
return null;
}
@Override
public RecordWriter<WritableComparable, Text> getRecordWriter(FileSystem fileSystem, JobConf entries,
String s,
Progressable progressable) throws
IOException {
return null;
}
@Override
public void checkOutputSpecs(FileSystem fileSystem, JobConf entries) throws IOException {
}
}
// This class isn't used and I suspect does totally the wrong thing. It's only here so that I
// can provide some output format to the tables and partitions I create. I actually write to
// those tables directory.
static class MockRecordWriter implements org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter {
private final FSDataOutputStream os;
MockRecordWriter(Path basedir, AcidOutputFormat.Options options) throws IOException {
FileSystem fs = FileSystem.get(options.getConfiguration());
Path p = AcidUtils.createFilename(basedir, options);
os = fs.create(p);
}
@Override
public void write(Writable w) throws IOException {
Text t = (Text)w;
os.writeBytes(t.toString());
os.writeBytes("\n");
}
@Override
public void close(boolean abort) throws IOException {
os.close();
}
}
/**
* in Hive 1.3.0 delta file names changed to delta_xxxx_yyyy_zzzz; prior to that
* the name was delta_xxxx_yyyy. We want to run compaction tests such that both formats
* are used since new (1.3) code has to be able to read old files.
*/
abstract boolean useHive130DeltaDirName();
String makeDeltaDirName(long minTxnId, long maxTxnId) {
if(minTxnId != maxTxnId) {
//covers both streaming api and post compaction style.
return makeDeltaDirNameCompacted(minTxnId, maxTxnId);
}
return useHive130DeltaDirName() ?
AcidUtils.deltaSubdir(minTxnId, maxTxnId, 0) : AcidUtils.deltaSubdir(minTxnId, maxTxnId);
}
/**
* delta dir name after compaction
*/
String makeDeltaDirNameCompacted(long minTxnId, long maxTxnId) {
return AcidUtils.deltaSubdir(minTxnId, maxTxnId);
}
String makeDeleteDeltaDirNameCompacted(long minTxnId, long maxTxnId) {
return AcidUtils.deleteDeltaSubdir(minTxnId, maxTxnId);
}
protected long compactInTxn(CompactionRequest rqst) throws Exception {
txnHandler.compact(rqst);
CompactionInfo ci = txnHandler.findNextToCompact("fred", WORKER_VERSION);
ci.runAs = System.getProperty("user.name");
long compactorTxnId = openTxn(TxnType.COMPACTION);
// Need to create a valid writeIdList to set the highestWriteId in ci
ValidTxnList validTxnList = TxnCommonUtils.createValidReadTxnList(txnHandler.getOpenTxns(), compactorTxnId);
GetValidWriteIdsRequest writeIdsRequest = new GetValidWriteIdsRequest();
writeIdsRequest.setValidTxnList(validTxnList.writeToString());
writeIdsRequest
.setFullTableNames(Collections.singletonList(TxnUtils.getFullTableName(rqst.getDbname(), rqst.getTablename())));
// with this ValidWriteIdList is capped at whatever HWM validTxnList has
ValidCompactorWriteIdList tblValidWriteIds = TxnUtils
.createValidCompactWriteIdList(txnHandler.getValidWriteIds(writeIdsRequest).getTblValidWriteIds().get(0));
ci.highestWriteId = tblValidWriteIds.getHighWatermark();
txnHandler.updateCompactorState(ci, compactorTxnId);
txnHandler.markCompacted(ci);
txnHandler.commitTxn(new CommitTxnRequest(compactorTxnId));
Thread.sleep(MetastoreConf.getTimeVar(conf, MetastoreConf.ConfVars.TXN_OPENTXN_TIMEOUT, TimeUnit.MILLISECONDS));
return compactorTxnId;
}
}
| |
package org.javagems.core.mail;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.mail.EmailAttachment;
import org.apache.commons.mail.HtmlEmail;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import javax.activation.FileDataSource;
import javax.mail.internet.InternetAddress;
public class EmailFactory {
private class EmbbededImg {
String description;
File image;
public EmbbededImg(File image, String description) {
this.description = description;
this.image = image;
}
public String getDescription() {
return description;
}
public File getImage() {
return image;
}
}
protected static final String SMTP_HOST = "mail.smtp.host";
protected static final String FROM = "mail.from";
protected static final String FROM_ALIAS = "mail.from.alias";
protected static final String REPLYTO = "mail.replyto";
private List<InternetAddress> to;
private List<InternetAddress> cc;
private List<InternetAddress> bcc;
private String subject;
private String text;
private String html;
private List<EmailAttachment> attachments;
private Map<String, EmbbededImg> embedded;
private Map<String, String> replacements;
private Configuration configuration;
public static EmailFactory getInstance(Configuration configuration) {
return new EmailFactory(configuration);
}
private EmailFactory(Configuration configuration) {
this.configuration = configuration;
this.to = new ArrayList<InternetAddress>();
this.cc = new ArrayList<InternetAddress>();
this.bcc = new ArrayList<InternetAddress>();
this.attachments = new ArrayList<EmailAttachment>();
this.embedded = new HashMap<String, EmbbededImg>();
this.replacements = new HashMap<String, String>();
}
public EmailFactory to(Collection<InternetAddress> to) {
this.to = new ArrayList<InternetAddress>(to);
return this;
}
public EmailFactory addTo(Collection<InternetAddress> to) {
this.to.addAll(to);
return this;
}
public EmailFactory addTo(InternetAddress... to) {
this.to.addAll(Arrays.asList(to));
return this;
}
public EmailFactory cc(Collection<InternetAddress> cc) {
this.cc = new ArrayList<InternetAddress>(cc);
return this;
}
public EmailFactory addCc(Collection<InternetAddress> cc) {
this.cc.addAll(cc);
return this;
}
public EmailFactory addCc(InternetAddress... cc) {
this.cc.addAll(Arrays.asList(cc));
return this;
}
public EmailFactory bcc(Collection<InternetAddress> bcc) {
this.bcc = new ArrayList<InternetAddress>(bcc);
return this;
}
public EmailFactory addBcc(Collection<InternetAddress> bcc) {
this.bcc.addAll(bcc);
return this;
}
public EmailFactory addBcc(InternetAddress... bcc) {
this.bcc.addAll(Arrays.asList(bcc));
return this;
}
public EmailFactory subject(String subject) {
this.subject = subject;
return this;
}
public EmailFactory text(String text) {
this.text = text;
return this;
}
public EmailFactory html(String html) {
this.html = html;
return this;
}
public EmailFactory setAttachments(Collection<EmailAttachment> attachments) {
this.attachments = new ArrayList<EmailAttachment>(attachments);
return this;
}
public EmailFactory addAttachments(Collection<EmailAttachment> attachments) {
this.attachments.addAll(attachments);
return this;
}
public EmailFactory addAttachments(EmailAttachment... attachments) {
this.attachments.addAll(Arrays.asList(attachments));
return this;
}
public EmailFactory attach(File file) {
this.attach(file, null);
return this;
}
public EmailFactory attach(File file, String name) {
this.attach(file, name, null);
return this;
}
public EmailFactory attach(File file, String name, String description) {
EmailAttachment attachment = new EmailAttachment();
attachment.setPath(file.getAbsolutePath());
attachment.setDisposition(EmailAttachment.ATTACHMENT);
attachment.setName(name);
attachment.setDescription(description);
this.addAttachments(attachment);
return this;
}
public EmailFactory embed(File image, String description, String cid) {
this.embedded.put(cid, new EmbbededImg(image, description));
return this;
}
public HtmlEmail make() throws Exception {
HtmlEmail email = new HtmlEmail();
email.setHostName(this.configuration.getString(SMTP_HOST));
email.setFrom(this.configuration.getString(FROM), this.configuration.getString(FROM_ALIAS));
email.setReplyTo(Arrays.asList(new InternetAddress(this.configuration.getString(REPLYTO))));
for (Entry<String, EmbbededImg> entry : this.embedded.entrySet()) {
email.embed(new FileDataSource(entry.getValue().getImage()), entry.getValue().getDescription(), entry.getKey());
}
if (!StringUtils.isEmpty(this.subject)) {
String subject = this.subject;
for (Entry<String, String> entry : this.replacements.entrySet()) {
String value = StringUtils.isEmpty(entry.getValue()) ? "" : entry.getValue();
subject = subject.replaceAll(entry.getKey(), value);
}
email.setSubject(subject);
}
if (!StringUtils.isEmpty(this.text)) {
String text = this.text;
for (Entry<String, String> entry : this.replacements.entrySet()) {
String value = StringUtils.isEmpty(entry.getValue()) ? "" : entry.getValue();
text = text.replaceAll(entry.getKey(), value);
}
email.setTextMsg(text);
}
if (!StringUtils.isEmpty(this.html)) {
String html = this.html;
for (Entry<String, String> entry : this.replacements.entrySet()) {
String value = StringUtils.isEmpty(entry.getValue()) ? "" : entry.getValue();
html = html.replaceAll(entry.getKey(), value);
}
email.setHtmlMsg(html);
}
if (!CollectionUtils.isEmpty(this.attachments)) {
for (EmailAttachment attachment : this.attachments) {
email.attach(attachment);
}
}
if (!CollectionUtils.isEmpty(this.to)) {
email.setTo(this.to);
}
if (!CollectionUtils.isEmpty(this.cc)) {
email.setCc(this.cc);
}
if (!CollectionUtils.isEmpty(this.bcc)) {
email.setBcc(this.bcc);
}
return email;
}
public EmailFactory replace(String key, String value) {
this.replacements.put(key, value);
return this;
}
}
| |
/**
* Copyright 2005-2014 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.camel.facade;
import org.apache.camel.api.management.mbean.ManagedBacklogTracerMBean;
import org.apache.camel.component.seda.SedaEndpoint;
import org.apache.camel.management.mbean.*;
import io.fabric8.camel.facade.mbean.*;
import javax.management.*;
import java.io.IOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
/**
* Common facade support for both local and remote.
* <p/>
* This implementation will provide most implementation supports as it turns out
* that both the local and remote {@link org.apache.camel.CamelContext} will use the JMX API to
* gather information.
*/
public abstract class CamelFacadeSupport implements CamelFacade {
protected String camelContextManagementName;
protected final MBeanServerConnection mBeanServer;
protected CamelFacadeSupport(String camelContextManagementName, MBeanServerConnection mBeanServer) throws Exception {
this.mBeanServer = mBeanServer;
this.camelContextManagementName = camelContextManagementName;
}
protected MBeanServerConnection getMBeanServerConnection() throws Exception {
return mBeanServer;
}
protected Set<ObjectInstance> queryNames(ObjectName name, QueryExp query) throws Exception {
return getMBeanServerConnection().queryMBeans(name, query);
}
static public <T> T addGetId(Class<T> ic, final Object target, final String id) throws Exception {
return ic.cast(Proxy.newProxyInstance(ic.getClassLoader(), new Class[]{ic}, new InvocationHandler() {
@Override
public Object invoke(Object o, Method method, Object[] objects) throws Throwable {
if (method.getName() == "getId" && method.getParameterTypes().length == 0) {
return id;
}
return method.invoke(target, objects);
}
}));
}
@SuppressWarnings("unchecked")
protected Object newProxyInstance(ObjectName objectName, Class interfaceClass, boolean notificationBroadcaster) throws Exception {
Object jmx_proxy = MBeanServerInvocationHandler.newProxyInstance(getMBeanServerConnection(), objectName, interfaceClass, notificationBroadcaster);
return addGetId(interfaceClass, jmx_proxy, objectName.getCanonicalName());
}
/**
* Finds all CamelContext's registered on a certain JMX-Server or, if a
* JMX-BrokerName has been set, the broker with that name.
*
* @param connection not <code>null</code>
* @param managementName to find a specific context by its management name
* @return Set with ObjectName-elements
*/
protected Set<ObjectName> findCamelContexts(MBeanServerConnection connection, String managementName) throws Exception {
String id = managementName != null ? managementName : camelContextManagementName;
ObjectName name;
if (id != null) {
name = new ObjectName("org.apache.camel:context=" + managementName + ",type=context,*");
} else {
name = new ObjectName("org.apache.camel:context=*,type=context,*");
}
Set<ObjectName> camels = connection.queryNames(name, null);
return camels;
}
// CamelFacade
//---------------------------------------------------------------
@Override
public List<CamelContextMBean> getCamelContexts() throws Exception {
MBeanServerConnection connection = getMBeanServerConnection();
Set<ObjectName> names = findCamelContexts(connection, null);
List<CamelContextMBean> answer = new ArrayList<CamelContextMBean>();
for (ObjectName on : names) {
CamelContextMBean context = (CamelContextMBean) newProxyInstance(on, CamelContextMBean.class, true);
answer.add(context);
}
return answer;
}
@Override
public CamelContextMBean getCamelContext(String managementName) throws Exception {
MBeanServerConnection connection = getMBeanServerConnection();
Set contexts = findCamelContexts(connection, managementName);
if (contexts.size() == 0) {
throw new IOException("No CamelContext could be found in the JMX.");
}
// we just take the first CamelContext as it matches the context id
ObjectName name = (ObjectName) contexts.iterator().next();
CamelContextMBean mbean = (CamelContextMBean) newProxyInstance(name, CamelContextMBean.class, true);
return mbean;
}
@Override
public CamelFabricTracerMBean getFabricTracer(String managementName) throws Exception {
String id = managementName != null ? managementName : camelContextManagementName;
ObjectName query = ObjectName.getInstance("org.apache.camel:context=" + id + ",type=fabric,*");
Set<ObjectInstance> names = queryNames(query, null);
for (ObjectInstance on : names) {
if (on.getClassName().equals("org.apache.camel.fabric.FabricTracer")) {
CamelFabricTracerMBean tracer = (CamelFabricTracerMBean) newProxyInstance(on.getObjectName(), CamelFabricTracerMBean.class, true);
return tracer;
}
}
// tracer not found
return null;
}
@Override
public ManagedBacklogTracerMBean getCamelTracer(String managementName) throws Exception {
String id = managementName != null ? managementName : camelContextManagementName;
ObjectName query = ObjectName.getInstance("org.apache.camel:context=" + id + ",type=tracer,*");
Set<ObjectInstance> names = queryNames(query, null);
for (ObjectInstance on : names) {
if (on.getClassName().equals("org.apache.camel.management.mbean.ManagedBacklogTracer")) {
ManagedBacklogTracerMBean tracer = (ManagedBacklogTracerMBean) newProxyInstance(on.getObjectName(), ManagedBacklogTracerMBean.class, true);
return tracer;
}
}
// tracer not found
return null;
}
@Override
public List<CamelComponentMBean> getComponents(String managementName) throws Exception {
String id = managementName != null ? managementName : camelContextManagementName;
ObjectName query = ObjectName.getInstance("org.apache.camel:context=" + id + ",type=components,*");
Set<ObjectInstance> names = queryNames(query, null);
List<CamelComponentMBean> answer = new ArrayList<CamelComponentMBean>();
for (ObjectInstance on : names) {
CamelComponentMBean component = (CamelComponentMBean) newProxyInstance(on.getObjectName(), CamelComponentMBean.class, true);
answer.add(component);
}
return answer;
}
@Override
public List<CamelRouteMBean> getRoutes(String managementName) throws Exception {
String id = managementName != null ? managementName : camelContextManagementName;
ObjectName query = ObjectName.getInstance("org.apache.camel:context=" + id + ",type=routes,*");
Set<ObjectInstance> names = queryNames(query, null);
List<CamelRouteMBean> answer = new ArrayList<CamelRouteMBean>();
for (ObjectInstance on : names) {
CamelRouteMBean route;
if (ManagedSuspendableRoute.class.getName().equals(on.getClassName())) {
route = (CamelRouteMBean) newProxyInstance(on.getObjectName(), CamelSuspendableRouteMBean.class, true);
} else {
route = (CamelRouteMBean) newProxyInstance(on.getObjectName(), CamelRouteMBean.class, true);
}
answer.add(route);
}
return answer;
}
@Override
public List<CamelEndpointMBean> getEndpoints(String managementName) throws Exception {
String id = managementName != null ? managementName : camelContextManagementName;
ObjectName query = ObjectName.getInstance("org.apache.camel:context=" + id + ",type=endpoints,*");
Set<ObjectInstance> names = queryNames(query, null);
List<CamelEndpointMBean> answer = new ArrayList<CamelEndpointMBean>();
for (ObjectInstance on : names) {
CamelEndpointMBean endpoint;
if (ManagedBrowsableEndpoint.class.getName().equals(on.getClassName()) || SedaEndpoint.class.getName().equals(on.getClassName())) {
endpoint = (CamelEndpointMBean) newProxyInstance(on.getObjectName(), CamelBrowsableEndpointMBean.class, true);
} else if (on.getClassName().startsWith("org.apache.camel.component.jms")) {
// special for JMS endpoints as they are browsable as well
endpoint = (CamelEndpointMBean) newProxyInstance(on.getObjectName(), CamelBrowsableEndpointMBean.class, true);
} else {
endpoint = (CamelEndpointMBean) newProxyInstance(on.getObjectName(), CamelEndpointMBean.class, true);
}
answer.add(endpoint);
}
return answer;
}
@Override
public List<CamelConsumerMBean> getConsumers(String managementName) throws Exception {
String id = managementName != null ? managementName : camelContextManagementName;
ObjectName query = ObjectName.getInstance("org.apache.camel:context=" + id + ",type=consumers,*");
Set<ObjectInstance> names = queryNames(query, null);
List<CamelConsumerMBean> answer = new ArrayList<CamelConsumerMBean>();
for (ObjectInstance on : names) {
CamelConsumerMBean consumer;
if (ManagedScheduledPollConsumer.class.getName().equals(on.getClassName())) {
consumer = (CamelConsumerMBean) newProxyInstance(on.getObjectName(), CamelScheduledPollConsumerMBean.class, true);
} else {
consumer = (CamelConsumerMBean) newProxyInstance(on.getObjectName(), CamelConsumerMBean.class, true);
}
answer.add(consumer);
}
return answer;
}
@Override
public List<CamelProcessorMBean> getProcessors(String managementName) throws Exception {
String id = managementName != null ? managementName : camelContextManagementName;
ObjectName query = ObjectName.getInstance("org.apache.camel:context=" + id + ",type=processors,*");
Set<ObjectInstance> names = queryNames(query, null);
List<CamelProcessorMBean> answer = new ArrayList<CamelProcessorMBean>();
for (ObjectInstance on : names) {
CamelProcessorMBean processor;
if (ManagedSendProcessor.class.getName().equals(on.getClassName())) {
processor = (CamelProcessorMBean) newProxyInstance(on.getObjectName(), CamelSendProcessorMBean.class, true);
} else if (ManagedDelayer.class.getName().equals(on.getClassName())) {
processor = (CamelProcessorMBean) newProxyInstance(on.getObjectName(), CamelDelayProcessorMBean.class, true);
} else if (ManagedThrottler.class.getName().equals(on.getClassName())) {
processor = (CamelProcessorMBean) newProxyInstance(on.getObjectName(), CamelThrottleProcessorMBean.class, true);
} else {
processor = (CamelProcessorMBean) newProxyInstance(on.getObjectName(), CamelProcessorMBean.class, true);
}
answer.add(processor);
}
return answer;
}
@Override
public List<CamelThreadPoolMBean> getThreadPools(String managementName) throws Exception {
String id = managementName != null ? managementName : camelContextManagementName;
ObjectName query = ObjectName.getInstance("org.apache.camel:context=" + id + ",type=threadpools,*");
Set<ObjectInstance> names = queryNames(query, null);
List<CamelThreadPoolMBean> answer = new ArrayList<CamelThreadPoolMBean>();
for (ObjectInstance on : names) {
CamelThreadPoolMBean pool = (CamelThreadPoolMBean) newProxyInstance(on.getObjectName(), CamelThreadPoolMBean.class, true);
answer.add(pool);
}
return answer;
}
@Override
public String dumpRoutesStatsAsXml(String managementName) throws Exception {
CamelContextMBean context = getCamelContext(managementName);
try {
return context.dumpRoutesStatsAsXml(false, true);
} catch (Exception e) {
// ignore as the method may not be available in older Camel releases
}
// fallback and use backwards compatible which is slower
return CamelBackwardsCompatibleSupport.dumpRoutesStatsAsXml(this, managementName);
}
}
| |
/**
* Copyright (c) 2013 Oracle and/or its affiliates. All rights reserved.
*
* You may not modify, use, reproduce, or distribute this software except in
* compliance with the terms of the License at:
* http://java.net/projects/javaeetutorial/pages/BerkeleyLicense
*/
package javaeetutorial.roster.request;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
import javaeetutorial.roster.entity.League;
import javaeetutorial.roster.entity.League_;
import javaeetutorial.roster.entity.Player;
import javaeetutorial.roster.entity.Player_;
import javaeetutorial.roster.entity.SummerLeague;
import javaeetutorial.roster.entity.Team;
import javaeetutorial.roster.entity.Team_;
import javaeetutorial.roster.entity.WinterLeague;
import javaeetutorial.roster.util.IncorrectSportException;
import javaeetutorial.roster.util.LeagueDetails;
import javaeetutorial.roster.util.PlayerDetails;
import javaeetutorial.roster.util.TeamDetails;
import javax.annotation.PostConstruct;
import javax.ejb.EJBException;
import javax.ejb.Stateful;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.Join;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
/**
* This is the bean class for the RequestBean enterprise bean.
*
* @author ian
*/
@Stateful
public class RequestBean implements Request, Serializable {
private static final Logger logger = Logger.getLogger("roster.request.RequestBean");
@PersistenceContext
private EntityManager em;
private CriteriaBuilder cb;
@PostConstruct
private void init() {
cb = em.getCriteriaBuilder();
}
@Override
public void createPlayer(String id,
String name,
String position,
double salary) {
logger.info("createPlayer");
try {
Player player = new Player(id, name, position, salary);
em.persist(player);
} catch (Exception ex) {
throw new EJBException(ex);
}
}
@Override
public void addPlayer(String playerId, String teamId) {
logger.info("addPlayer");
try {
Player player = em.find(Player.class, playerId);
Team team = em.find(Team.class, teamId);
team.addPlayer(player);
player.addTeam(team);
} catch (Exception ex) {
throw new EJBException(ex);
}
}
@Override
public void removePlayer(String playerId) {
logger.info("removePlayer");
try {
Player player = em.find(Player.class, playerId);
Collection<Team> teams = player.getTeams();
Iterator<Team> i = teams.iterator();
while (i.hasNext()) {
Team team = i.next();
team.dropPlayer(player);
}
em.remove(player);
} catch (Exception ex) {
throw new EJBException(ex);
}
}
@Override
public void dropPlayer(String playerId, String teamId) {
logger.info("dropPlayer");
try {
Player player = em.find(Player.class, playerId);
Team team = em.find(Team.class, teamId);
team.dropPlayer(player);
player.dropTeam(team);
} catch (Exception ex) {
throw new EJBException(ex);
}
}
@Override
public PlayerDetails getPlayer(String playerId) {
logger.info("getPlayerDetails");
try {
Player player = em.find(Player.class, playerId);
PlayerDetails playerDetails = new PlayerDetails(player.getId(),
player.getName(),
player.getPosition(),
player.getSalary());
return playerDetails;
} catch (Exception ex) {
throw new EJBException(ex);
}
}
@Override
public List<PlayerDetails> getPlayersOfTeam(String teamId) {
logger.info("getPlayersOfTeam");
List<PlayerDetails> playerList = null;
try {
Team team = em.find(Team.class, teamId);
playerList = this.copyPlayersToDetails((List<Player>) team.getPlayers());
} catch (Exception ex) {
throw new EJBException(ex);
}
return playerList;
}
@Override
public List<TeamDetails> getTeamsOfLeague(String leagueId) {
logger.info("getTeamsOfLeague");
List<TeamDetails> detailsList = new ArrayList<>();
Collection<Team> teams = null;
try {
League league = em.find(League.class, leagueId);
teams = league.getTeams();
} catch (Exception ex) {
throw new EJBException(ex);
}
Iterator<Team> i = teams.iterator();
while (i.hasNext()) {
Team team = (Team) i.next();
TeamDetails teamDetails = new TeamDetails(team.getId(),
team.getName(),
team.getCity());
detailsList.add(teamDetails);
}
return detailsList;
}
@Override
public List<PlayerDetails> getPlayersByPosition(String position) {
logger.info("getPlayersByPosition");
List<Player> players = null;
try {
CriteriaQuery<Player> cq = cb.createQuery(Player.class);
if (cq != null) {
Root<Player> player = cq.from(Player.class);
// Get MetaModel from Root
//EntityType<Player> Player_ = player.getModel();
// set the where clause
cq.where(cb.equal(player.get(Player_.position), position));
cq.select(player);
TypedQuery<Player> q = em.createQuery(cq);
players = q.getResultList();
}
return copyPlayersToDetails(players);
} catch (Exception ex) {
throw new EJBException(ex);
}
}
@Override
public List<PlayerDetails> getPlayersByHigherSalary(String name) {
logger.info("getPlayersByHigherSalary");
List<Player> players = null;
try {
CriteriaQuery<Player> cq = cb.createQuery(Player.class);
if (cq != null) {
Root<Player> player1 = cq.from(Player.class);
Root<Player> player2 = cq.from(Player.class);
// Get MetaModel from Root
//EntityType<Player> Player_ = player1.getModel();
// create a Predicate object that finds players with a salary
// greater than player1
Predicate gtPredicate = cb.greaterThan(
player1.get(Player_.salary),
player2.get(Player_.salary));
// create a Predicate object that finds the player based on
// the name parameter
Predicate equalPredicate = cb.equal(
player2.get(Player_.name),
name);
// set the where clause with the predicates
cq.where(gtPredicate, equalPredicate);
// set the select clause, and return only unique entries
cq.select(player1).distinct(true);
TypedQuery<Player> q = em.createQuery(cq);
players = q.getResultList();
}
return copyPlayersToDetails(players);
} catch (Exception ex) {
throw new EJBException(ex);
}
}
@Override
public List<PlayerDetails> getPlayersBySalaryRange(double low, double high) {
logger.info("getPlayersBySalaryRange");
List<Player> players = null;
try {
CriteriaQuery<Player> cq = cb.createQuery(Player.class);
if (cq != null) {
Root<Player> player = cq.from(Player.class);
// Get MetaModel from Root
//EntityType<Player> Player_ = player.getModel();
// set the where clause
cq.where(cb.between(player.get(
Player_.salary),
low,
high));
// set the select clause
cq.select(player).distinct(true);
TypedQuery<Player> q = em.createQuery(cq);
players = q.getResultList();
}
return copyPlayersToDetails(players);
} catch (Exception ex) {
throw new EJBException(ex);
}
}
@Override
public List<PlayerDetails> getPlayersByLeagueId(String leagueId) {
logger.info("getPlayersByLeagueId");
List<Player> players = null;
try {
CriteriaQuery<Player> cq = cb.createQuery(Player.class);
if (cq != null) {
Root<Player> player = cq.from(Player.class);
Join<Player, Team> team = player.join(Player_.teams);
Join<Team, League> league = team.join(Team_.league);
// Get MetaModel from Root
//EntityType<Player> Player_ = player.getModel();
// set the where clause
cq.where(cb.equal(league.get(League_.id), leagueId));
cq.select(player).distinct(true);
TypedQuery<Player> q = em.createQuery(cq);
players = q.getResultList();
}
return copyPlayersToDetails(players);
} catch (Exception ex) {
throw new EJBException(ex);
}
}
@Override
public List<PlayerDetails> getPlayersBySport(String sport) {
logger.info("getPlayersByLeagueId");
List<Player> players = null;
try {
CriteriaQuery<Player> cq = cb.createQuery(Player.class);
if (cq != null) {
Root<Player> player = cq.from(Player.class);
Join<Player, Team> team = player.join(Player_.teams);
Join<Team, League> league = team.join(Team_.league);
// Get MetaModel from Root
//EntityType<Player> Player_ = player.getModel();
// set the where clause
cq.where(cb.equal(league.get(League_.sport), sport));
cq.select(player).distinct(true);
TypedQuery<Player> q = em.createQuery(cq);
players = q.getResultList();
}
return copyPlayersToDetails(players);
} catch (Exception ex) {
throw new EJBException(ex);
}
}
@Override
public List<PlayerDetails> getPlayersByCity(String city) {
logger.info("getPlayersByCity");
List<Player> players = null;
try {
CriteriaQuery<Player> cq = cb.createQuery(Player.class);
if (cq != null) {
Root<Player> player = cq.from(Player.class);
Join<Player, Team> team = player.join(Player_.teams);
// Get MetaModel from Root
//EntityType<Player> Player_ = player.getModel();
// set the where clause
cq.where(cb.equal(team.get(Team_.city), city));
cq.select(player).distinct(true);
TypedQuery<Player> q = em.createQuery(cq);
players = q.getResultList();
}
return copyPlayersToDetails(players);
} catch (Exception ex) {
throw new EJBException(ex);
}
}
@Override
public List<PlayerDetails> getAllPlayers() {
logger.info("getAllPlayers");
List<Player> players = null;
try {
CriteriaQuery<Player> cq = cb.createQuery(Player.class);
if (cq != null) {
Root<Player> player = cq.from(Player.class);
cq.select(player);
TypedQuery<Player> q = em.createQuery(cq);
players = q.getResultList();
}
return copyPlayersToDetails(players);
} catch (Exception ex) {
throw new EJBException(ex);
}
}
@Override
public List<PlayerDetails> getPlayersNotOnTeam() {
logger.info("getPlayersNotOnTeam");
List<Player> players = null;
try {
CriteriaQuery<Player> cq = cb.createQuery(Player.class);
if (cq != null) {
Root<Player> player = cq.from(Player.class);
// Get MetaModel from Root
//EntityType<Player> Player_ = player.getModel();
// set the where clause
cq.where(cb.isEmpty(player.get(Player_.teams)));
cq.select(player).distinct(true);
TypedQuery<Player> q = em.createQuery(cq);
players = q.getResultList();
}
return copyPlayersToDetails(players);
} catch (Exception ex) {
throw new EJBException(ex);
}
}
@Override
public List<PlayerDetails> getPlayersByPositionAndName(String position, String name) {
logger.info("getPlayersByPositionAndName");
List<Player> players = null;
try {
CriteriaQuery<Player> cq = cb.createQuery(Player.class);
if (cq != null) {
Root<Player> player = cq.from(Player.class);
// Get MetaModel from Root
//EntityType<Player> Player_ = player.getModel();
// set the where clause
cq.where(cb.equal(player.get(Player_.position), position),
cb.equal(player.get(Player_.name), name));
cq.select(player).distinct(true);
TypedQuery<Player> q = em.createQuery(cq);
players = q.getResultList();
}
return copyPlayersToDetails(players);
} catch (Exception ex) {
throw new EJBException(ex);
}
}
@Override
public List<LeagueDetails> getLeaguesOfPlayer(String playerId) {
logger.info("getLeaguesOfPlayer");
List<LeagueDetails> detailsList = new ArrayList<>();
List<League> leagues = null;
try {
CriteriaQuery<League> cq = cb.createQuery(League.class);
if (cq != null) {
Root<League> league = cq.from(League.class);
//EntityType<League> League_ = league.getModel();
Join<League, Team> team = league.join(League_.teams);
//EntityType<Team> Team_ = team.getModel();
Join<Team, Player> player = team.join(Team_.players);
cq.where(cb.equal(player.get(Player_.id), playerId));
cq.select(league).distinct(true);
TypedQuery<League> q = em.createQuery(cq);
leagues = q.getResultList();
}
} catch (Exception ex) {
throw new EJBException(ex);
}
if (leagues == null) {
logger.log(Level.WARNING, "No leagues found for player with ID {0}.", playerId);
return null;
} else {
Iterator<League> i = leagues.iterator();
while (i.hasNext()) {
League league = (League) i.next();
LeagueDetails leagueDetails = new LeagueDetails(league.getId(),
league.getName(),
league.getSport());
detailsList.add(leagueDetails);
}
}
return detailsList;
}
@Override
public List<String> getSportsOfPlayer(String playerId) {
logger.info("getSportsOfPlayer");
List<String> sports = new ArrayList<>();
try {
CriteriaQuery<String> cq = cb.createQuery(String.class);
if (cq != null) {
Root<Player> player = cq.from(Player.class);
Join<Player, Team> team = player.join(Player_.teams);
Join<Team, League> league = team.join(Team_.league);
// Get MetaModel from Root
//EntityType<Player> Player_ = player.getModel();
// set the where clause
cq.where(cb.equal(player.get(Player_.id), playerId));
cq.select(league.get(League_.sport)).distinct(true);
TypedQuery<String> q = em.createQuery(cq);
sports = q.getResultList();
}
// Player player = em.find(Player.class, playerId);
// Iterator<Team> i = player.getTeams().iterator();
// while (i.hasNext()) {
// Team team = i.next();
// League league = team.getLeague();
// sports.add(league.getSport());
// }
} catch (Exception ex) {
throw new EJBException(ex);
}
return sports;
}
@Override
public void createTeamInLeague(TeamDetails teamDetails, String leagueId) {
logger.info("createTeamInLeague");
try {
League league = em.find(League.class, leagueId);
Team team = new Team(teamDetails.getId(),
teamDetails.getName(),
teamDetails.getCity());
em.persist(team);
team.setLeague(league);
league.addTeam(team);
} catch (Exception ex) {
throw new EJBException(ex);
}
}
@Override
public void removeTeam(String teamId) {
logger.info("removeTeam");
try {
Team team = em.find(Team.class, teamId);
Collection<Player> players = team.getPlayers();
Iterator<Player> i = players.iterator();
while (i.hasNext()) {
Player player = (Player) i.next();
player.dropTeam(team);
}
em.remove(team);
} catch (Exception ex) {
throw new EJBException(ex);
}
}
@Override
public TeamDetails getTeam(String teamId) {
logger.info("getTeam");
TeamDetails teamDetails = null;
try {
Team team = em.find(Team.class, teamId);
teamDetails = new TeamDetails(team.getId(), team.getName(), team.getCity());
} catch (Exception ex) {
throw new EJBException(ex);
}
return teamDetails;
}
@Override
public void createLeague(LeagueDetails leagueDetails) {
logger.info("createLeague");
try {
if (leagueDetails.getSport().equalsIgnoreCase("soccer")
|| leagueDetails.getSport().equalsIgnoreCase("swimming")
|| leagueDetails.getSport().equalsIgnoreCase("basketball")
|| leagueDetails.getSport().equalsIgnoreCase("baseball")) {
SummerLeague league = new SummerLeague(leagueDetails.getId(),
leagueDetails.getName(),
leagueDetails.getSport());
em.persist(league);
} else if (leagueDetails.getSport().equalsIgnoreCase("hockey")
|| leagueDetails.getSport().equalsIgnoreCase("skiing")
|| leagueDetails.getSport().equalsIgnoreCase("snowboarding")) {
WinterLeague league = new WinterLeague(leagueDetails.getId(),
leagueDetails.getName(),
leagueDetails.getSport());
em.persist(league);
} else {
throw new IncorrectSportException("The specified sport is not valid.");
}
} catch (Exception ex) {
throw new EJBException(ex);
}
}
@Override
public void removeLeague(String leagueId) {
logger.info("removeLeague");
try {
League league = em.find(League.class, leagueId);
em.remove(league);
} catch (Exception ex) {
throw new EJBException(ex);
}
}
@Override
public LeagueDetails getLeague(String leagueId) {
logger.info("getLeague");
LeagueDetails leagueDetails = null;
try {
League league = em.find(League.class, leagueId);
leagueDetails = new LeagueDetails(league.getId(),
league.getName(),
league.getSport());
} catch (Exception ex) {
throw new EJBException(ex);
}
return leagueDetails;
}
private List<PlayerDetails> copyPlayersToDetails(List<Player> players) {
List<PlayerDetails> detailsList = new ArrayList<>();
Iterator<Player> i = players.iterator();
while (i.hasNext()) {
Player player = (Player) i.next();
PlayerDetails playerDetails = new PlayerDetails(player.getId(),
player.getName(),
player.getPosition(),
player.getSalary());
detailsList.add(playerDetails);
}
return detailsList;
}
}
| |
/*
* Copyright (C) 2015 iWedia S.A. Licensed under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law
* or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package com.iwedia.example.tvinput.engine.utils;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.media.tv.TvContentRating;
import android.media.tv.TvContract;
import android.net.Uri;
import com.iwedia.dtv.epg.EpgEvent;
import com.iwedia.dtv.epg.IEpgControl;
import com.iwedia.example.tvinput.TvService;
import com.iwedia.example.tvinput.data.ChannelDescriptor;
import com.iwedia.example.tvinput.data.EpgProgram;
import com.iwedia.example.tvinput.engine.ChannelManager;
import com.iwedia.example.tvinput.engine.DtvManager;
import com.iwedia.example.tvinput.utils.Logger;
import java.util.ArrayList;
/**
* Abstract class that contains mutual methods for EPG runnable classes
*/
public abstract class EpgRunnable implements Runnable {
/** Object used to write to logcat output */
private final Logger mLog = new Logger(TvService.APP_NAME + EpgRunnable.class.getSimpleName(),
99);
/** Domain used for content rating */
private static final String DOMAIN = "com.android.tv";
/** Content rating system */
private static final String RATING_SYSTEM = "DVB";
/** Projection for DB filling */
private static final String[] projection = {
TvContract.Programs.COLUMN_TITLE
};
protected int mServiceIndex;
protected Long mFrequency;
/** Application context */
protected final Context mContext;
/** DvbManager for accessing middleware API */
protected DtvManager mDtvManager;
/** Channel Manager */
private ChannelManager mChannelManager;
/**
* Contructor
*
* @param context Application context
* @param channelId Channel id for Now/Next event
*/
protected EpgRunnable(Context context) {
mContext = context;
mDtvManager = DtvManager.getInstance();
mChannelManager = mDtvManager.getChannelManager();
}
/**
* Convert DVB rating from middleware values to predefined String constants
*
* @param rate DVB rate of the current program
* @return Converted rate to String constant
*/
public static String convertDVBRating(int rate) {
switch (rate) {
case 0:
case 1:
case 2:
case 3:
case 4:
return "DVB_4";
case 5:
return "DVB_5";
case 6:
return "DVB_6";
case 7:
return "DVB_7";
case 8:
return "DVB_8";
case 9:
return "DVB_9";
case 10:
return "DVB_10";
case 11:
return "DVB_11";
case 12:
return "DVB_12";
case 13:
return "DVB_13";
case 14:
return "DVB_14";
case 15:
return "DVB_15";
case 16:
return "DVB_16";
case 17:
return "DVB_17";
case 18:
return "DVB_18";
default:
return "DVB_4";
}
}
/**
* Convert program genre from middleware values to predifined constants
*
* @param genre Genre of the program
* @return String value of the program
*/
public static String convertDVBGenre(int genre) {
switch (genre) {
case 0x1:
return "MOVIES";
case 0x2:
return "NEWS";
case 0x3:
// epg_genre_show_game_show;
return "GAMING";
case 0x4:
return "SPORTS";
case 0x5:
return "FAMILY_KIDS";
case 0x6:
// epg_genre_music_ballet_dance;
return "DRAMA";
case 0x7:
// epg_genre_arts_culture;
return "EDUCATION";
case 0x8:
// epg_genre_social_political_issues;
return "NEWS";
case 0x9:
return "EDUCATION";
case 0xA:
// epg_genre_leisure_hobbies;
return "TRAVEL";
default:
return "ANIMAL_WILDLIFE";
}
}
private ContentValues makeProgramContentValues(EpgEvent event, int channelIndex) {
mLog.d("[makeProgramContentValues] " + channelIndex);
mLog.d("[makeProgramContentValues] " + event);
long startTimeMilis, endTimeMilis, dirationMilis;
String genre;
TvContentRating rating;
TvContentRating[] contentRatings;
EpgProgram tempProg = null;
String longDesc = "";
IEpgControl epgControl = mDtvManager.getEpgControl();
ChannelDescriptor channel = mChannelManager.getChannelByIndex(channelIndex - 1);
if (channel == null) {
mLog.e("[makeProgramContentValues][channel not found]");
return null;
}
if (event == null) {
mLog.e("[makeProgramContentValues][event is null]");
return null;
}
startTimeMilis = event.getStartTime().getCalendar().getTimeInMillis();
endTimeMilis = event.getEndTime().getCalendar().getTimeInMillis();
dirationMilis = endTimeMilis - startTimeMilis;
if (dirationMilis <= 0) {
mLog.e("[makeProgramContentValues][duration value is invalid]");
return null;
}
if (checkifExist(channel.getChannelId(), startTimeMilis, endTimeMilis)) {
mLog.w("[makeProgramContentValues][program exist]");
return null;
}
rating = TvContentRating.createRating(
DOMAIN, RATING_SYSTEM, convertDVBRating(event.getParentalRate()));
contentRatings = new TvContentRating[] {
rating
};
genre = convertDVBGenre(event.getGenre());
longDesc = epgControl.getEventExtendedDescription(
mDtvManager.getEpgManager().getEpgFilterID(),
event.getEventId(), channelIndex);
tempProg = new EpgProgram.Builder()
.setChannelId(channel.getChannelId())
.setTitle(event.getName())
.setCanonicalGenres(genre)
.setDescription(event.getDescription())
.setLongDescription(longDesc)
.setStartTimeUtcMillis(startTimeMilis)
.setEndTimeUtcMillis(endTimeMilis)
.setContentRatings(contentRatings).build();
return tempProg.toContentValues();
}
protected boolean addProgram(EpgEvent event, int channelIndex) {
ContentValues values = makeProgramContentValues(event, channelIndex);
if (values == null) {
return false;
}
mLog.d("[addProgram][begin]");
Uri uri = mContext.getContentResolver().insert(TvContract.Programs.CONTENT_URI, values);
mLog.d("[addProgram][end] " + uri);
return true;
}
protected void addPrograms(ArrayList<EpgEvent> events, int channelIndex) {
ArrayList<ContentValues> list = new ArrayList<ContentValues>();
for (EpgEvent event : events) {
ContentValues values = makeProgramContentValues(event, channelIndex);
if (values != null) {
list.add(values);
}
}
ContentValues array[] = new ContentValues[list.size()];
list.toArray(array);
mLog.d("[addPrograms][begin]");
mContext.getContentResolver().bulkInsert(TvContract.Programs.CONTENT_URI, array);
mLog.d("[addPrograms][end]");
}
/**
* This method is used to check if the current event is already present in the DB
*
* @param program Program to check in DB
* @return True if the program is present in the DB, false otherwise
*/
protected boolean checkifExist(long channelID, long startTime, long endTime) {
Uri uri = TvContract.buildProgramsUriForChannel(channelID, startTime, endTime);
Cursor cursor = mContext.getContentResolver().query(uri, projection, null, null, null);
if (cursor == null || cursor.getCount() == 0) {
cursor.close();
mLog.w("[checkifExist][item does not exist in DB][[uri: " + uri.toString() + "]");
return false;
} else {
cursor.close();
return true;
}
/**
* TODO Implement program update
*/
}
protected void dumpEvent(EpgEvent event) {
mLog.d("Event ID: " + event.getEventId());
mLog.d("Event Desc: " + event.getDescription());
mLog.d("Event StartTime: " + event.getStartTime().toString());
mLog.d("Event EndTime: " + event.getEndTime().toString());
}
}
| |
// https://github.com/gwenn/sqlite-dialect/blob/master/src/main/java/org/hibernate/dialect/SQLiteDialect.java
package org.hibernate.dialect;
import java.sql.SQLException;
import java.sql.Types;
import org.hibernate.JDBCException;
import org.hibernate.ScrollMode;
import org.hibernate.dialect.function.AbstractAnsiTrimEmulationFunction;
import org.hibernate.dialect.function.NoArgSQLFunction;
import org.hibernate.dialect.function.SQLFunction;
import org.hibernate.dialect.function.SQLFunctionTemplate;
import org.hibernate.dialect.function.StandardSQLFunction;
import org.hibernate.dialect.function.VarArgsSQLFunction;
import org.hibernate.dialect.identity.IdentityColumnSupport;
import org.hibernate.dialect.identity.SQLiteDialectIdentityColumnSupport;
import org.hibernate.dialect.pagination.AbstractLimitHandler;
import org.hibernate.dialect.pagination.LimitHandler;
import org.hibernate.dialect.pagination.LimitHelper;
import org.hibernate.dialect.unique.DefaultUniqueDelegate;
import org.hibernate.dialect.unique.UniqueDelegate;
import org.hibernate.engine.spi.RowSelection;
import org.hibernate.exception.DataException;
import org.hibernate.exception.JDBCConnectionException;
import org.hibernate.exception.LockAcquisitionException;
import org.hibernate.exception.spi.SQLExceptionConversionDelegate;
import org.hibernate.exception.spi.TemplatedViolatedConstraintNameExtracter;
import org.hibernate.exception.spi.ViolatedConstraintNameExtracter;
import org.hibernate.internal.util.JdbcExceptionHelper;
import org.hibernate.mapping.Column;
import org.hibernate.type.StandardBasicTypes;
/**
* An SQL dialect for SQLite 3.
*/
public class SQLiteDialect extends Dialect {
private final UniqueDelegate uniqueDelegate;
public SQLiteDialect() {
registerColumnType( Types.BIT, "boolean" );
//registerColumnType(Types.FLOAT, "float");
//registerColumnType(Types.DOUBLE, "double");
registerColumnType( Types.DECIMAL, "decimal" );
registerColumnType( Types.CHAR, "char" );
registerColumnType( Types.LONGVARCHAR, "longvarchar" );
registerColumnType( Types.TIMESTAMP, "datetime" );
registerColumnType( Types.BINARY, "blob" );
registerColumnType( Types.VARBINARY, "blob" );
registerColumnType( Types.LONGVARBINARY, "blob" );
registerFunction( "concat", new VarArgsSQLFunction( StandardBasicTypes.STRING, "", "||", "" ) );
registerFunction( "mod", new SQLFunctionTemplate( StandardBasicTypes.INTEGER, "?1 % ?2" ) );
registerFunction( "quote", new StandardSQLFunction( "quote", StandardBasicTypes.STRING ) );
registerFunction( "random", new NoArgSQLFunction( "random", StandardBasicTypes.INTEGER ) );
registerFunction( "round", new StandardSQLFunction( "round" ) );
registerFunction( "substr", new StandardSQLFunction( "substr", StandardBasicTypes.STRING ) );
registerFunction( "trim", new AbstractAnsiTrimEmulationFunction() {
protected SQLFunction resolveBothSpaceTrimFunction() {
return new SQLFunctionTemplate( StandardBasicTypes.STRING, "trim(?1)" );
}
protected SQLFunction resolveBothSpaceTrimFromFunction() {
return new SQLFunctionTemplate( StandardBasicTypes.STRING, "trim(?2)" );
}
protected SQLFunction resolveLeadingSpaceTrimFunction() {
return new SQLFunctionTemplate( StandardBasicTypes.STRING, "ltrim(?1)" );
}
protected SQLFunction resolveTrailingSpaceTrimFunction() {
return new SQLFunctionTemplate( StandardBasicTypes.STRING, "rtrim(?1)" );
}
protected SQLFunction resolveBothTrimFunction() {
return new SQLFunctionTemplate( StandardBasicTypes.STRING, "trim(?1, ?2)" );
}
protected SQLFunction resolveLeadingTrimFunction() {
return new SQLFunctionTemplate( StandardBasicTypes.STRING, "ltrim(?1, ?2)" );
}
protected SQLFunction resolveTrailingTrimFunction() {
return new SQLFunctionTemplate( StandardBasicTypes.STRING, "rtrim(?1, ?2)" );
}
} );
uniqueDelegate = new SQLiteUniqueDelegate( this );
}
// database type mapping support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@Override
public String getCastTypeName(int code) {
// FIXME
return super.getCastTypeName( code );
}
// IDENTITY support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
private static final SQLiteDialectIdentityColumnSupport IDENTITY_COLUMN_SUPPORT = new SQLiteDialectIdentityColumnSupport();
@Override
public IdentityColumnSupport getIdentityColumnSupport() {
return IDENTITY_COLUMN_SUPPORT;
}
// limit/offset support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
private static final AbstractLimitHandler LIMIT_HANDLER = new AbstractLimitHandler() {
@Override
public String processSql(String sql, RowSelection selection) {
final boolean hasOffset = LimitHelper.hasFirstRow( selection );
return sql + (hasOffset ? " limit ? offset ?" : " limit ?");
}
@Override
public boolean supportsLimit() {
return true;
}
@Override
public boolean bindLimitParametersInReverseOrder() {
return true;
}
};
@Override
public LimitHandler getLimitHandler() {
return LIMIT_HANDLER;
}
// lock acquisition support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@Override
public boolean supportsLockTimeouts() {
// may be http://sqlite.org/c3ref/db_mutex.html ?
return false;
}
@Override
public String getForUpdateString() {
return "";
}
@Override
public boolean supportsOuterJoinForUpdate() {
return false;
}
/*
@Override
public boolean dropTemporaryTableAfterUse() {
return true; // temporary tables are only dropped when the connection is closed. If the connection is pooled...
}
*/
// current timestamp support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@Override
public boolean supportsCurrentTimestampSelection() {
return true;
}
public boolean isCurrentTimestampSelectStringCallable() {
return false;
}
@Override
public String getCurrentTimestampSelectString() {
return "select current_timestamp";
}
// SQLException support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
private static final int SQLITE_BUSY = 5;
private static final int SQLITE_LOCKED = 6;
private static final int SQLITE_IOERR = 10;
private static final int SQLITE_CORRUPT = 11;
private static final int SQLITE_NOTFOUND = 12;
private static final int SQLITE_FULL = 13;
private static final int SQLITE_CANTOPEN = 14;
private static final int SQLITE_PROTOCOL = 15;
private static final int SQLITE_TOOBIG = 18;
private static final int SQLITE_CONSTRAINT = 19;
private static final int SQLITE_MISMATCH = 20;
private static final int SQLITE_NOTADB = 26;
@Override
public SQLExceptionConversionDelegate buildSQLExceptionConversionDelegate() {
return new SQLExceptionConversionDelegate() {
@Override
public JDBCException convert(SQLException sqlException, String message, String sql) {
final int errorCode = JdbcExceptionHelper.extractErrorCode( sqlException );
if (errorCode == SQLITE_TOOBIG || errorCode == SQLITE_MISMATCH) {
return new DataException( message, sqlException, sql );
}
else if (errorCode == SQLITE_BUSY || errorCode == SQLITE_LOCKED) {
return new LockAcquisitionException( message, sqlException, sql );
}
else if ((errorCode >= SQLITE_IOERR && errorCode <= SQLITE_PROTOCOL) || errorCode == SQLITE_NOTADB) {
return new JDBCConnectionException( message, sqlException, sql );
}
// returning null allows other delegates to operate
return null;
}
};
}
public ViolatedConstraintNameExtracter getViolatedConstraintNameExtracter() {
return EXTRACTER;
}
private static final ViolatedConstraintNameExtracter EXTRACTER = new TemplatedViolatedConstraintNameExtracter() {
@Override
protected String doExtractConstraintName(SQLException sqle) throws NumberFormatException {
final int errorCode = JdbcExceptionHelper.extractErrorCode( sqle );
if (errorCode == SQLITE_CONSTRAINT) {
return extractUsingTemplate( "constraint ", " failed", sqle.getMessage() );
}
return null;
}
};
// union subclass support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@Override
public boolean supportsUnionAll() {
return true;
}
// DDL support ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@Override
public boolean canCreateSchema() {
return false;
}
@Override
public boolean hasAlterTable() {
// As specified in NHibernate dialect
return false;
}
@Override
public boolean dropConstraints() {
return false;
}
@Override
public boolean qualifyIndexName() {
return false;
}
@Override
public String getAddColumnString() {
return "add column";
}
@Override
public String getDropForeignKeyString() {
throw new UnsupportedOperationException( "No drop foreign key syntax supported by SQLiteDialect" );
}
@Override
public String getAddForeignKeyConstraintString(String constraintName,
String[] foreignKey, String referencedTable, String[] primaryKey,
boolean referencesPrimaryKey) {
throw new UnsupportedOperationException( "No add foreign key syntax supported by SQLiteDialect" );
}
@Override
public String getAddPrimaryKeyConstraintString(String constraintName) {
throw new UnsupportedOperationException( "No add primary key syntax supported by SQLiteDialect" );
}
@Override
public boolean supportsCommentOn() {
return true;
}
@Override
public boolean supportsIfExistsBeforeTableName() {
return true;
}
/* not case insensitive for unicode characters by default (ICU extension needed)
public boolean supportsCaseInsensitiveLike() {
return true;
}
*/
@Override
public boolean doesReadCommittedCauseWritersToBlockReaders() {
// TODO Validate (WAL mode...)
return true;
}
public boolean doesRepeatableReadCauseReadersToBlockWriters() {
return true;
}
@Override
public boolean supportsTupleDistinctCounts() {
return false;
}
public int getInExpressionCountLimit() {
// Compile/runtime time option: http://sqlite.org/limits.html#max_variable_number
return 1000;
}
@Override
public UniqueDelegate getUniqueDelegate() {
return uniqueDelegate;
}
private static class SQLiteUniqueDelegate extends DefaultUniqueDelegate {
public SQLiteUniqueDelegate(Dialect dialect) {
super( dialect );
}
@Override
public String getColumnDefinitionUniquenessFragment(Column column) {
return " unique";
}
}
@Override
public String getSelectGUIDString() {
return "select hex(randomblob(16))";
}
@Override
public ScrollMode defaultScrollMode() {
return ScrollMode.FORWARD_ONLY;
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
/**
* <p>
* Describes the configuration of a Spot fleet request.
* </p>
*/
public class SpotFleetRequestConfigData implements Serializable, Cloneable {
/**
* <p>
* A unique, case-sensitive identifier you provide to ensure idempotency of
* your listings. This helps avoid duplicate listings. For more information,
* see <a href=
* "http://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html"
* >Ensuring Idempotency</a>.
* </p>
*/
private String clientToken;
/**
* <p>
* The bid price per unit hour.
* </p>
*/
private String spotPrice;
/**
* <p>
* The number of units to request. You can choose to set the target capacity
* in terms of instances or a performance characteristic that is important
* to your application workload, such as vCPUs, memory, or I/O.
* </p>
*/
private Integer targetCapacity;
/**
* <p>
* The start date and time of the request, in UTC format (for example,
* <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z). The
* default is to start fulfilling the request immediately.
* </p>
*/
private java.util.Date validFrom;
/**
* <p>
* The end date and time of the request, in UTC format (for example,
* <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z). At this
* point, no new Spot instance requests are placed or enabled to fulfill the
* request.
* </p>
*/
private java.util.Date validUntil;
/**
* <p>
* Indicates whether running Spot instances should be terminated when the
* Spot fleet request expires.
* </p>
*/
private Boolean terminateInstancesWithExpiration;
/**
* <p>
* Grants the Spot fleet permission to terminate Spot instances on your
* behalf when you cancel its Spot fleet request using
* <a>CancelSpotFleetRequests</a> or when the Spot fleet request expires, if
* you set <code>terminateInstancesWithExpiration</code>.
* </p>
*/
private String iamFleetRole;
/**
* <p>
* Information about the launch specifications for the Spot fleet request.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<SpotFleetLaunchSpecification> launchSpecifications;
/**
* <p>
* Indicates whether running Spot instances should be terminated if the
* target capacity of the Spot fleet request is decreased below the current
* size of the Spot fleet.
* </p>
*/
private String excessCapacityTerminationPolicy;
/**
* <p>
* Indicates how to allocate the target capacity across the Spot pools
* specified by the Spot fleet request. The default is
* <code>lowestPrice</code>.
* </p>
*/
private String allocationStrategy;
/**
* <p>
* A unique, case-sensitive identifier you provide to ensure idempotency of
* your listings. This helps avoid duplicate listings. For more information,
* see <a href=
* "http://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html"
* >Ensuring Idempotency</a>.
* </p>
*
* @param clientToken
* A unique, case-sensitive identifier you provide to ensure
* idempotency of your listings. This helps avoid duplicate listings.
* For more information, see <a href=
* "http://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html"
* >Ensuring Idempotency</a>.
*/
public void setClientToken(String clientToken) {
this.clientToken = clientToken;
}
/**
* <p>
* A unique, case-sensitive identifier you provide to ensure idempotency of
* your listings. This helps avoid duplicate listings. For more information,
* see <a href=
* "http://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html"
* >Ensuring Idempotency</a>.
* </p>
*
* @return A unique, case-sensitive identifier you provide to ensure
* idempotency of your listings. This helps avoid duplicate
* listings. For more information, see <a href=
* "http://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html"
* >Ensuring Idempotency</a>.
*/
public String getClientToken() {
return this.clientToken;
}
/**
* <p>
* A unique, case-sensitive identifier you provide to ensure idempotency of
* your listings. This helps avoid duplicate listings. For more information,
* see <a href=
* "http://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html"
* >Ensuring Idempotency</a>.
* </p>
*
* @param clientToken
* A unique, case-sensitive identifier you provide to ensure
* idempotency of your listings. This helps avoid duplicate listings.
* For more information, see <a href=
* "http://docs.aws.amazon.com/AWSEC2/latest/APIReference/Run_Instance_Idempotency.html"
* >Ensuring Idempotency</a>.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public SpotFleetRequestConfigData withClientToken(String clientToken) {
setClientToken(clientToken);
return this;
}
/**
* <p>
* The bid price per unit hour.
* </p>
*
* @param spotPrice
* The bid price per unit hour.
*/
public void setSpotPrice(String spotPrice) {
this.spotPrice = spotPrice;
}
/**
* <p>
* The bid price per unit hour.
* </p>
*
* @return The bid price per unit hour.
*/
public String getSpotPrice() {
return this.spotPrice;
}
/**
* <p>
* The bid price per unit hour.
* </p>
*
* @param spotPrice
* The bid price per unit hour.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public SpotFleetRequestConfigData withSpotPrice(String spotPrice) {
setSpotPrice(spotPrice);
return this;
}
/**
* <p>
* The number of units to request. You can choose to set the target capacity
* in terms of instances or a performance characteristic that is important
* to your application workload, such as vCPUs, memory, or I/O.
* </p>
*
* @param targetCapacity
* The number of units to request. You can choose to set the target
* capacity in terms of instances or a performance characteristic
* that is important to your application workload, such as vCPUs,
* memory, or I/O.
*/
public void setTargetCapacity(Integer targetCapacity) {
this.targetCapacity = targetCapacity;
}
/**
* <p>
* The number of units to request. You can choose to set the target capacity
* in terms of instances or a performance characteristic that is important
* to your application workload, such as vCPUs, memory, or I/O.
* </p>
*
* @return The number of units to request. You can choose to set the target
* capacity in terms of instances or a performance characteristic
* that is important to your application workload, such as vCPUs,
* memory, or I/O.
*/
public Integer getTargetCapacity() {
return this.targetCapacity;
}
/**
* <p>
* The number of units to request. You can choose to set the target capacity
* in terms of instances or a performance characteristic that is important
* to your application workload, such as vCPUs, memory, or I/O.
* </p>
*
* @param targetCapacity
* The number of units to request. You can choose to set the target
* capacity in terms of instances or a performance characteristic
* that is important to your application workload, such as vCPUs,
* memory, or I/O.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public SpotFleetRequestConfigData withTargetCapacity(Integer targetCapacity) {
setTargetCapacity(targetCapacity);
return this;
}
/**
* <p>
* The start date and time of the request, in UTC format (for example,
* <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z). The
* default is to start fulfilling the request immediately.
* </p>
*
* @param validFrom
* The start date and time of the request, in UTC format (for
* example,
* <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z).
* The default is to start fulfilling the request immediately.
*/
public void setValidFrom(java.util.Date validFrom) {
this.validFrom = validFrom;
}
/**
* <p>
* The start date and time of the request, in UTC format (for example,
* <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z). The
* default is to start fulfilling the request immediately.
* </p>
*
* @return The start date and time of the request, in UTC format (for
* example,
* <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z).
* The default is to start fulfilling the request immediately.
*/
public java.util.Date getValidFrom() {
return this.validFrom;
}
/**
* <p>
* The start date and time of the request, in UTC format (for example,
* <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z). The
* default is to start fulfilling the request immediately.
* </p>
*
* @param validFrom
* The start date and time of the request, in UTC format (for
* example,
* <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z).
* The default is to start fulfilling the request immediately.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public SpotFleetRequestConfigData withValidFrom(java.util.Date validFrom) {
setValidFrom(validFrom);
return this;
}
/**
* <p>
* The end date and time of the request, in UTC format (for example,
* <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z). At this
* point, no new Spot instance requests are placed or enabled to fulfill the
* request.
* </p>
*
* @param validUntil
* The end date and time of the request, in UTC format (for example,
* <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z).
* At this point, no new Spot instance requests are placed or enabled
* to fulfill the request.
*/
public void setValidUntil(java.util.Date validUntil) {
this.validUntil = validUntil;
}
/**
* <p>
* The end date and time of the request, in UTC format (for example,
* <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z). At this
* point, no new Spot instance requests are placed or enabled to fulfill the
* request.
* </p>
*
* @return The end date and time of the request, in UTC format (for example,
* <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z).
* At this point, no new Spot instance requests are placed or
* enabled to fulfill the request.
*/
public java.util.Date getValidUntil() {
return this.validUntil;
}
/**
* <p>
* The end date and time of the request, in UTC format (for example,
* <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z). At this
* point, no new Spot instance requests are placed or enabled to fulfill the
* request.
* </p>
*
* @param validUntil
* The end date and time of the request, in UTC format (for example,
* <i>YYYY</i>-<i>MM</i>-<i>DD</i>T<i>HH</i>:<i>MM</i>:<i>SS</i>Z).
* At this point, no new Spot instance requests are placed or enabled
* to fulfill the request.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public SpotFleetRequestConfigData withValidUntil(java.util.Date validUntil) {
setValidUntil(validUntil);
return this;
}
/**
* <p>
* Indicates whether running Spot instances should be terminated when the
* Spot fleet request expires.
* </p>
*
* @param terminateInstancesWithExpiration
* Indicates whether running Spot instances should be terminated when
* the Spot fleet request expires.
*/
public void setTerminateInstancesWithExpiration(
Boolean terminateInstancesWithExpiration) {
this.terminateInstancesWithExpiration = terminateInstancesWithExpiration;
}
/**
* <p>
* Indicates whether running Spot instances should be terminated when the
* Spot fleet request expires.
* </p>
*
* @return Indicates whether running Spot instances should be terminated
* when the Spot fleet request expires.
*/
public Boolean getTerminateInstancesWithExpiration() {
return this.terminateInstancesWithExpiration;
}
/**
* <p>
* Indicates whether running Spot instances should be terminated when the
* Spot fleet request expires.
* </p>
*
* @param terminateInstancesWithExpiration
* Indicates whether running Spot instances should be terminated when
* the Spot fleet request expires.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public SpotFleetRequestConfigData withTerminateInstancesWithExpiration(
Boolean terminateInstancesWithExpiration) {
setTerminateInstancesWithExpiration(terminateInstancesWithExpiration);
return this;
}
/**
* <p>
* Indicates whether running Spot instances should be terminated when the
* Spot fleet request expires.
* </p>
*
* @return Indicates whether running Spot instances should be terminated
* when the Spot fleet request expires.
*/
public Boolean isTerminateInstancesWithExpiration() {
return this.terminateInstancesWithExpiration;
}
/**
* <p>
* Grants the Spot fleet permission to terminate Spot instances on your
* behalf when you cancel its Spot fleet request using
* <a>CancelSpotFleetRequests</a> or when the Spot fleet request expires, if
* you set <code>terminateInstancesWithExpiration</code>.
* </p>
*
* @param iamFleetRole
* Grants the Spot fleet permission to terminate Spot instances on
* your behalf when you cancel its Spot fleet request using
* <a>CancelSpotFleetRequests</a> or when the Spot fleet request
* expires, if you set <code>terminateInstancesWithExpiration</code>.
*/
public void setIamFleetRole(String iamFleetRole) {
this.iamFleetRole = iamFleetRole;
}
/**
* <p>
* Grants the Spot fleet permission to terminate Spot instances on your
* behalf when you cancel its Spot fleet request using
* <a>CancelSpotFleetRequests</a> or when the Spot fleet request expires, if
* you set <code>terminateInstancesWithExpiration</code>.
* </p>
*
* @return Grants the Spot fleet permission to terminate Spot instances on
* your behalf when you cancel its Spot fleet request using
* <a>CancelSpotFleetRequests</a> or when the Spot fleet request
* expires, if you set <code>terminateInstancesWithExpiration</code>
* .
*/
public String getIamFleetRole() {
return this.iamFleetRole;
}
/**
* <p>
* Grants the Spot fleet permission to terminate Spot instances on your
* behalf when you cancel its Spot fleet request using
* <a>CancelSpotFleetRequests</a> or when the Spot fleet request expires, if
* you set <code>terminateInstancesWithExpiration</code>.
* </p>
*
* @param iamFleetRole
* Grants the Spot fleet permission to terminate Spot instances on
* your behalf when you cancel its Spot fleet request using
* <a>CancelSpotFleetRequests</a> or when the Spot fleet request
* expires, if you set <code>terminateInstancesWithExpiration</code>.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public SpotFleetRequestConfigData withIamFleetRole(String iamFleetRole) {
setIamFleetRole(iamFleetRole);
return this;
}
/**
* <p>
* Information about the launch specifications for the Spot fleet request.
* </p>
*
* @return Information about the launch specifications for the Spot fleet
* request.
*/
public java.util.List<SpotFleetLaunchSpecification> getLaunchSpecifications() {
if (launchSpecifications == null) {
launchSpecifications = new com.amazonaws.internal.SdkInternalList<SpotFleetLaunchSpecification>();
}
return launchSpecifications;
}
/**
* <p>
* Information about the launch specifications for the Spot fleet request.
* </p>
*
* @param launchSpecifications
* Information about the launch specifications for the Spot fleet
* request.
*/
public void setLaunchSpecifications(
java.util.Collection<SpotFleetLaunchSpecification> launchSpecifications) {
if (launchSpecifications == null) {
this.launchSpecifications = null;
return;
}
this.launchSpecifications = new com.amazonaws.internal.SdkInternalList<SpotFleetLaunchSpecification>(
launchSpecifications);
}
/**
* <p>
* Information about the launch specifications for the Spot fleet request.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setLaunchSpecifications(java.util.Collection)} or
* {@link #withLaunchSpecifications(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param launchSpecifications
* Information about the launch specifications for the Spot fleet
* request.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public SpotFleetRequestConfigData withLaunchSpecifications(
SpotFleetLaunchSpecification... launchSpecifications) {
if (this.launchSpecifications == null) {
setLaunchSpecifications(new com.amazonaws.internal.SdkInternalList<SpotFleetLaunchSpecification>(
launchSpecifications.length));
}
for (SpotFleetLaunchSpecification ele : launchSpecifications) {
this.launchSpecifications.add(ele);
}
return this;
}
/**
* <p>
* Information about the launch specifications for the Spot fleet request.
* </p>
*
* @param launchSpecifications
* Information about the launch specifications for the Spot fleet
* request.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public SpotFleetRequestConfigData withLaunchSpecifications(
java.util.Collection<SpotFleetLaunchSpecification> launchSpecifications) {
setLaunchSpecifications(launchSpecifications);
return this;
}
/**
* <p>
* Indicates whether running Spot instances should be terminated if the
* target capacity of the Spot fleet request is decreased below the current
* size of the Spot fleet.
* </p>
*
* @param excessCapacityTerminationPolicy
* Indicates whether running Spot instances should be terminated if
* the target capacity of the Spot fleet request is decreased below
* the current size of the Spot fleet.
* @see ExcessCapacityTerminationPolicy
*/
public void setExcessCapacityTerminationPolicy(
String excessCapacityTerminationPolicy) {
this.excessCapacityTerminationPolicy = excessCapacityTerminationPolicy;
}
/**
* <p>
* Indicates whether running Spot instances should be terminated if the
* target capacity of the Spot fleet request is decreased below the current
* size of the Spot fleet.
* </p>
*
* @return Indicates whether running Spot instances should be terminated if
* the target capacity of the Spot fleet request is decreased below
* the current size of the Spot fleet.
* @see ExcessCapacityTerminationPolicy
*/
public String getExcessCapacityTerminationPolicy() {
return this.excessCapacityTerminationPolicy;
}
/**
* <p>
* Indicates whether running Spot instances should be terminated if the
* target capacity of the Spot fleet request is decreased below the current
* size of the Spot fleet.
* </p>
*
* @param excessCapacityTerminationPolicy
* Indicates whether running Spot instances should be terminated if
* the target capacity of the Spot fleet request is decreased below
* the current size of the Spot fleet.
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see ExcessCapacityTerminationPolicy
*/
public SpotFleetRequestConfigData withExcessCapacityTerminationPolicy(
String excessCapacityTerminationPolicy) {
setExcessCapacityTerminationPolicy(excessCapacityTerminationPolicy);
return this;
}
/**
* <p>
* Indicates whether running Spot instances should be terminated if the
* target capacity of the Spot fleet request is decreased below the current
* size of the Spot fleet.
* </p>
*
* @param excessCapacityTerminationPolicy
* Indicates whether running Spot instances should be terminated if
* the target capacity of the Spot fleet request is decreased below
* the current size of the Spot fleet.
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see ExcessCapacityTerminationPolicy
*/
public void setExcessCapacityTerminationPolicy(
ExcessCapacityTerminationPolicy excessCapacityTerminationPolicy) {
this.excessCapacityTerminationPolicy = excessCapacityTerminationPolicy
.toString();
}
/**
* <p>
* Indicates whether running Spot instances should be terminated if the
* target capacity of the Spot fleet request is decreased below the current
* size of the Spot fleet.
* </p>
*
* @param excessCapacityTerminationPolicy
* Indicates whether running Spot instances should be terminated if
* the target capacity of the Spot fleet request is decreased below
* the current size of the Spot fleet.
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see ExcessCapacityTerminationPolicy
*/
public SpotFleetRequestConfigData withExcessCapacityTerminationPolicy(
ExcessCapacityTerminationPolicy excessCapacityTerminationPolicy) {
setExcessCapacityTerminationPolicy(excessCapacityTerminationPolicy);
return this;
}
/**
* <p>
* Indicates how to allocate the target capacity across the Spot pools
* specified by the Spot fleet request. The default is
* <code>lowestPrice</code>.
* </p>
*
* @param allocationStrategy
* Indicates how to allocate the target capacity across the Spot
* pools specified by the Spot fleet request. The default is
* <code>lowestPrice</code>.
* @see AllocationStrategy
*/
public void setAllocationStrategy(String allocationStrategy) {
this.allocationStrategy = allocationStrategy;
}
/**
* <p>
* Indicates how to allocate the target capacity across the Spot pools
* specified by the Spot fleet request. The default is
* <code>lowestPrice</code>.
* </p>
*
* @return Indicates how to allocate the target capacity across the Spot
* pools specified by the Spot fleet request. The default is
* <code>lowestPrice</code>.
* @see AllocationStrategy
*/
public String getAllocationStrategy() {
return this.allocationStrategy;
}
/**
* <p>
* Indicates how to allocate the target capacity across the Spot pools
* specified by the Spot fleet request. The default is
* <code>lowestPrice</code>.
* </p>
*
* @param allocationStrategy
* Indicates how to allocate the target capacity across the Spot
* pools specified by the Spot fleet request. The default is
* <code>lowestPrice</code>.
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see AllocationStrategy
*/
public SpotFleetRequestConfigData withAllocationStrategy(
String allocationStrategy) {
setAllocationStrategy(allocationStrategy);
return this;
}
/**
* <p>
* Indicates how to allocate the target capacity across the Spot pools
* specified by the Spot fleet request. The default is
* <code>lowestPrice</code>.
* </p>
*
* @param allocationStrategy
* Indicates how to allocate the target capacity across the Spot
* pools specified by the Spot fleet request. The default is
* <code>lowestPrice</code>.
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see AllocationStrategy
*/
public void setAllocationStrategy(AllocationStrategy allocationStrategy) {
this.allocationStrategy = allocationStrategy.toString();
}
/**
* <p>
* Indicates how to allocate the target capacity across the Spot pools
* specified by the Spot fleet request. The default is
* <code>lowestPrice</code>.
* </p>
*
* @param allocationStrategy
* Indicates how to allocate the target capacity across the Spot
* pools specified by the Spot fleet request. The default is
* <code>lowestPrice</code>.
* @return Returns a reference to this object so that method calls can be
* chained together.
* @see AllocationStrategy
*/
public SpotFleetRequestConfigData withAllocationStrategy(
AllocationStrategy allocationStrategy) {
setAllocationStrategy(allocationStrategy);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getClientToken() != null)
sb.append("ClientToken: " + getClientToken() + ",");
if (getSpotPrice() != null)
sb.append("SpotPrice: " + getSpotPrice() + ",");
if (getTargetCapacity() != null)
sb.append("TargetCapacity: " + getTargetCapacity() + ",");
if (getValidFrom() != null)
sb.append("ValidFrom: " + getValidFrom() + ",");
if (getValidUntil() != null)
sb.append("ValidUntil: " + getValidUntil() + ",");
if (getTerminateInstancesWithExpiration() != null)
sb.append("TerminateInstancesWithExpiration: "
+ getTerminateInstancesWithExpiration() + ",");
if (getIamFleetRole() != null)
sb.append("IamFleetRole: " + getIamFleetRole() + ",");
if (getLaunchSpecifications() != null)
sb.append("LaunchSpecifications: " + getLaunchSpecifications()
+ ",");
if (getExcessCapacityTerminationPolicy() != null)
sb.append("ExcessCapacityTerminationPolicy: "
+ getExcessCapacityTerminationPolicy() + ",");
if (getAllocationStrategy() != null)
sb.append("AllocationStrategy: " + getAllocationStrategy());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof SpotFleetRequestConfigData == false)
return false;
SpotFleetRequestConfigData other = (SpotFleetRequestConfigData) obj;
if (other.getClientToken() == null ^ this.getClientToken() == null)
return false;
if (other.getClientToken() != null
&& other.getClientToken().equals(this.getClientToken()) == false)
return false;
if (other.getSpotPrice() == null ^ this.getSpotPrice() == null)
return false;
if (other.getSpotPrice() != null
&& other.getSpotPrice().equals(this.getSpotPrice()) == false)
return false;
if (other.getTargetCapacity() == null
^ this.getTargetCapacity() == null)
return false;
if (other.getTargetCapacity() != null
&& other.getTargetCapacity().equals(this.getTargetCapacity()) == false)
return false;
if (other.getValidFrom() == null ^ this.getValidFrom() == null)
return false;
if (other.getValidFrom() != null
&& other.getValidFrom().equals(this.getValidFrom()) == false)
return false;
if (other.getValidUntil() == null ^ this.getValidUntil() == null)
return false;
if (other.getValidUntil() != null
&& other.getValidUntil().equals(this.getValidUntil()) == false)
return false;
if (other.getTerminateInstancesWithExpiration() == null
^ this.getTerminateInstancesWithExpiration() == null)
return false;
if (other.getTerminateInstancesWithExpiration() != null
&& other.getTerminateInstancesWithExpiration().equals(
this.getTerminateInstancesWithExpiration()) == false)
return false;
if (other.getIamFleetRole() == null ^ this.getIamFleetRole() == null)
return false;
if (other.getIamFleetRole() != null
&& other.getIamFleetRole().equals(this.getIamFleetRole()) == false)
return false;
if (other.getLaunchSpecifications() == null
^ this.getLaunchSpecifications() == null)
return false;
if (other.getLaunchSpecifications() != null
&& other.getLaunchSpecifications().equals(
this.getLaunchSpecifications()) == false)
return false;
if (other.getExcessCapacityTerminationPolicy() == null
^ this.getExcessCapacityTerminationPolicy() == null)
return false;
if (other.getExcessCapacityTerminationPolicy() != null
&& other.getExcessCapacityTerminationPolicy().equals(
this.getExcessCapacityTerminationPolicy()) == false)
return false;
if (other.getAllocationStrategy() == null
^ this.getAllocationStrategy() == null)
return false;
if (other.getAllocationStrategy() != null
&& other.getAllocationStrategy().equals(
this.getAllocationStrategy()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime
* hashCode
+ ((getClientToken() == null) ? 0 : getClientToken().hashCode());
hashCode = prime * hashCode
+ ((getSpotPrice() == null) ? 0 : getSpotPrice().hashCode());
hashCode = prime
* hashCode
+ ((getTargetCapacity() == null) ? 0 : getTargetCapacity()
.hashCode());
hashCode = prime * hashCode
+ ((getValidFrom() == null) ? 0 : getValidFrom().hashCode());
hashCode = prime * hashCode
+ ((getValidUntil() == null) ? 0 : getValidUntil().hashCode());
hashCode = prime
* hashCode
+ ((getTerminateInstancesWithExpiration() == null) ? 0
: getTerminateInstancesWithExpiration().hashCode());
hashCode = prime
* hashCode
+ ((getIamFleetRole() == null) ? 0 : getIamFleetRole()
.hashCode());
hashCode = prime
* hashCode
+ ((getLaunchSpecifications() == null) ? 0
: getLaunchSpecifications().hashCode());
hashCode = prime
* hashCode
+ ((getExcessCapacityTerminationPolicy() == null) ? 0
: getExcessCapacityTerminationPolicy().hashCode());
hashCode = prime
* hashCode
+ ((getAllocationStrategy() == null) ? 0
: getAllocationStrategy().hashCode());
return hashCode;
}
@Override
public SpotFleetRequestConfigData clone() {
try {
return (SpotFleetRequestConfigData) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!", e);
}
}
}
| |
// Copyright 2000-2022 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package com.intellij.execution.services;
import com.intellij.execution.ExecutionBundle;
import com.intellij.execution.services.ServiceEventListener.ServiceEvent;
import com.intellij.execution.services.ServiceModel.ServiceViewItem;
import com.intellij.execution.services.ServiceModelFilter.ServiceViewFilter;
import com.intellij.execution.services.ServiceViewDragHelper.ServiceViewDragBean;
import com.intellij.execution.services.ServiceViewModel.*;
import com.intellij.icons.AllIcons;
import com.intellij.ide.lightEdit.LightEditUtil;
import com.intellij.ide.projectView.PresentationData;
import com.intellij.ide.util.treeView.TreeState;
import com.intellij.navigation.ItemPresentation;
import com.intellij.openapi.actionSystem.ActionManager;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.Presentation;
import com.intellij.openapi.application.AppUIExecutor;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.components.PersistentStateComponent;
import com.intellij.openapi.components.State;
import com.intellij.openapi.components.Storage;
import com.intellij.openapi.components.StoragePathMacros;
import com.intellij.openapi.extensions.ExtensionPointListener;
import com.intellij.openapi.extensions.PluginDescriptor;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.NlsSafe;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.wm.ToolWindow;
import com.intellij.openapi.wm.ToolWindowId;
import com.intellij.openapi.wm.ToolWindowManager;
import com.intellij.openapi.wm.ex.ToolWindowEx;
import com.intellij.ui.AppUIUtil;
import com.intellij.ui.AutoScrollToSourceHandler;
import com.intellij.ui.content.*;
import com.intellij.util.SmartList;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.FactoryMap;
import com.intellij.util.containers.SmartHashSet;
import kotlin.Unit;
import org.jdom.Element;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.concurrency.AsyncPromise;
import org.jetbrains.concurrency.Promise;
import java.lang.ref.WeakReference;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Consumer;
import java.util.function.Function;
import static com.intellij.execution.services.ServiceViewContributor.CONTRIBUTOR_EP_NAME;
@State(name = "ServiceViewManager", storages = @Storage(StoragePathMacros.PRODUCT_WORKSPACE_FILE))
public final class ServiceViewManagerImpl implements ServiceViewManager, PersistentStateComponent<ServiceViewManagerImpl.State> {
private static final @NonNls String HELP_ID = "services.tool.window";
private final Project myProject;
private State myState = new State();
private final ServiceModel myModel;
private final ServiceModelFilter myModelFilter;
private final Map<String, Collection<ServiceViewContributor<?>>> myGroups = new ConcurrentHashMap<>();
private final List<ServiceViewContentHolder> myContentHolders = new SmartList<>();
private boolean myActivationActionsRegistered;
private AutoScrollToSourceHandler myAutoScrollToSourceHandler;
private final Set<String> myActiveToolWindowIds = new SmartHashSet<>();
private boolean myRegisteringToolWindowAvailable;
public ServiceViewManagerImpl(@NotNull Project project) {
myProject = project;
LightEditUtil.forbidServiceInLightEditMode(project, getClass());
myModel = new ServiceModel(myProject);
Disposer.register(myProject, myModel);
myModelFilter = new ServiceModelFilter();
loadGroups(CONTRIBUTOR_EP_NAME.getExtensionList());
myProject.getMessageBus().connect(myModel).subscribe(ServiceEventListener.TOPIC, e -> {
myModel.handle(e).onSuccess(o -> eventHandled(e));
});
initRoots();
CONTRIBUTOR_EP_NAME.addExtensionPointListener(new ServiceViewExtensionPointListener(), myProject);
}
private void eventHandled(@NotNull ServiceEvent e) {
String toolWindowId = getToolWindowId(e.contributorClass);
if (toolWindowId == null) {
return;
}
ServiceViewItem eventRoot = ContainerUtil.find(myModel.getRoots(), root -> e.contributorClass.isInstance(root.getRootContributor()));
if (eventRoot != null) {
boolean show = !(eventRoot.getViewDescriptor() instanceof ServiceViewNonActivatingDescriptor);
updateToolWindow(toolWindowId, true, show);
}
else {
Set<? extends ServiceViewContributor<?>> activeContributors = getActiveContributors();
Collection<ServiceViewContributor<?>> toolWindowContributors = myGroups.get(toolWindowId);
updateToolWindow(toolWindowId, ContainerUtil.intersects(activeContributors, toolWindowContributors), false);
}
}
private void initRoots() {
myModel.getInvoker().invokeLater(() -> {
myModel.initRoots().onSuccess(o -> {
Set<? extends ServiceViewContributor<?>> activeContributors = getActiveContributors();
Map<String, Boolean> toolWindowIds = new HashMap<>();
for (ServiceViewContributor<?> contributor : CONTRIBUTOR_EP_NAME.getExtensionList()) {
String toolWindowId = getToolWindowId(contributor.getClass());
if (toolWindowId != null) {
Boolean active = toolWindowIds.putIfAbsent(toolWindowId, activeContributors.contains(contributor));
if (active == Boolean.FALSE && activeContributors.contains(contributor)) {
toolWindowIds.put(toolWindowId, Boolean.TRUE);
}
}
}
for (Map.Entry<String, Boolean> entry : toolWindowIds.entrySet()) {
registerToolWindow(entry.getKey(), entry.getValue());
}
});
});
}
private Set<? extends ServiceViewContributor<?>> getActiveContributors() {
return ContainerUtil.map2Set(myModel.getRoots(), ServiceViewItem::getRootContributor);
}
private @Nullable ServiceViewContentHolder getContentHolder(@NotNull Class<?> contributorClass) {
for (ServiceViewContentHolder holder : myContentHolders) {
for (ServiceViewContributor<?> rootContributor : holder.rootContributors) {
if (contributorClass.isInstance(rootContributor)) {
return holder;
}
}
}
return null;
}
private void registerToolWindow(@NotNull String toolWindowId, boolean active) {
if (myProject.isDefault()) {
return;
}
ToolWindowManager toolWindowManager = ToolWindowManager.getInstance(myProject);
toolWindowManager.invokeLater(() -> {
if (!myActivationActionsRegistered) {
myActivationActionsRegistered = true;
Collection<ServiceViewContributor<?>> contributors = myGroups.get(ToolWindowId.SERVICES);
if (contributors != null) {
registerActivateByContributorActions(myProject, contributors);
}
}
myRegisteringToolWindowAvailable = active;
try {
ToolWindow toolWindow = toolWindowManager.registerToolWindow(toolWindowId, builder -> {
builder.contentFactory = new ServiceViewToolWindowFactory();
builder.icon = AllIcons.Toolwindows.ToolWindowServices;
if (toolWindowId.equals(ToolWindowId.SERVICES)) {
builder.stripeTitle = () -> {
@NlsSafe String title = toolWindowId;
return title;
};
}
return Unit.INSTANCE;
});
if (active) {
myActiveToolWindowIds.add(toolWindowId);
}
else {
toolWindow.setShowStripeButton(false);
}
}
finally {
myRegisteringToolWindowAvailable = false;
}
});
}
private void updateToolWindow(@NotNull String toolWindowId, boolean active, boolean show) {
if (myProject.isDisposed() || myProject.isDefault()) {
return;
}
ApplicationManager.getApplication().invokeLater(() -> {
ToolWindow toolWindow = ToolWindowManager.getInstance(myProject).getToolWindow(toolWindowId);
if (toolWindow == null) {
return;
}
if (active) {
boolean doShow = show && !myActiveToolWindowIds.contains(toolWindowId) && !toolWindow.isShowStripeButton();
myActiveToolWindowIds.add(toolWindowId);
if (doShow) {
toolWindow.show();
}
}
else if (myActiveToolWindowIds.remove(toolWindowId)) {
// Hide tool window only if model roots became empty and there were some services shown before update.
toolWindow.hide();
toolWindow.setShowStripeButton(false);
}
}, ModalityState.NON_MODAL, myProject.getDisposed());
}
boolean shouldBeAvailable() {
return myRegisteringToolWindowAvailable;
}
void createToolWindowContent(@NotNull ToolWindow toolWindow) {
String toolWindowId = toolWindow.getId();
Collection<ServiceViewContributor<?>> contributors = myGroups.get(toolWindowId);
if (contributors == null) return;
if (myAutoScrollToSourceHandler == null) {
myAutoScrollToSourceHandler = ServiceViewSourceScrollHelper.createAutoScrollToSourceHandler(myProject);
}
ToolWindowEx toolWindowEx = (ToolWindowEx)toolWindow;
ServiceViewSourceScrollHelper.installAutoScrollSupport(myProject, toolWindowEx, myAutoScrollToSourceHandler);
Pair<ServiceViewState, List<ServiceViewState>> states = getServiceViewStates(toolWindowId);
AllServicesModel mainModel = new AllServicesModel(myModel, myModelFilter, contributors);
ServiceView mainView = ServiceView.createView(myProject, mainModel, prepareViewState(states.first));
mainView.setAutoScrollToSourceHandler(myAutoScrollToSourceHandler);
ContentManager contentManager = toolWindow.getContentManager();
ServiceViewContentHolder holder = new ServiceViewContentHolder(mainView, contentManager, contributors, toolWindowId);
myContentHolders.add(holder);
contentManager.addContentManagerListener(new ServiceViewContentMangerListener(myModelFilter, myAutoScrollToSourceHandler, holder));
addMainContent(toolWindow.getContentManager(), mainView);
loadViews(contentManager, mainView, contributors, states.second);
ServiceViewDragHelper.installDnDSupport(myProject, toolWindowEx.getDecorator(), contentManager);
}
private void addMainContent(ContentManager contentManager, ServiceView mainView) {
Content mainContent = ContentFactory.SERVICE.getInstance().createContent(mainView, null, false);
mainContent.putUserData(ToolWindow.SHOW_CONTENT_ICON, Boolean.TRUE);
mainContent.setHelpId(getToolWindowContextHelpId());
mainContent.setCloseable(false);
Disposer.register(mainContent, mainView);
Disposer.register(mainContent, mainView.getModel());
contentManager.addContent(mainContent);
mainView.getModel().addModelListener(() -> {
boolean isEmpty = mainView.getModel().getRoots().isEmpty();
AppUIExecutor.onUiThread().expireWith(myProject).submit(() -> {
if (contentManager.isDisposed()) return;
if (isEmpty) {
if (contentManager.getIndexOfContent(mainContent) < 0) {
if (contentManager.getContentCount() == 0) {
contentManager.addContent(mainContent, 0);
}
}
else if (contentManager.getContentCount() > 1) {
contentManager.removeContent(mainContent, false);
}
}
else {
if (contentManager.getIndexOfContent(mainContent) < 0) {
contentManager.addContent(mainContent, 0);
}
}
});
});
}
private void loadViews(ContentManager contentManager,
ServiceView mainView,
Collection<? extends ServiceViewContributor<?>> contributors,
List<ServiceViewState> viewStates) {
myModel.getInvoker().invokeLater(() -> {
Map<String, ServiceViewContributor<?>> contributorsMap = FactoryMap.create(className -> {
for (ServiceViewContributor<?> contributor : contributors) {
if (className.equals(contributor.getClass().getName())) {
return contributor;
}
}
return null;
});
List<ServiceViewFilter> filters = new ArrayList<>();
List<Pair<ServiceViewModel, ServiceViewState>> loadedModels = new ArrayList<>();
ServiceViewModel toSelect = null;
for (ServiceViewState viewState : viewStates) {
ServiceViewFilter parentFilter = mainView.getModel().getFilter();
if (viewState.parentView >= 0 && viewState.parentView < filters.size()) {
parentFilter = filters.get(viewState.parentView);
}
ServiceViewFilter filter = parentFilter;
ServiceViewModel viewModel = ServiceViewModel.loadModel(viewState, myModel, myModelFilter, parentFilter, contributorsMap);
if (viewModel != null) {
loadedModels.add(Pair.create(viewModel, viewState));
if (viewState.isSelected) {
toSelect = viewModel;
}
filter = viewModel.getFilter();
}
filters.add(filter);
}
if (!loadedModels.isEmpty()) {
ServiceViewModel modelToSelect = toSelect;
AppUIExecutor.onUiThread().expireWith(myProject).submit(() -> {
for (Pair<ServiceViewModel, ServiceViewState> pair : loadedModels) {
extract(contentManager, pair.first, pair.second, false);
}
selectContentByModel(contentManager, modelToSelect);
});
}
});
}
@Override
public @NotNull Promise<Void> select(@NotNull Object service, @NotNull Class<?> contributorClass, boolean activate, boolean focus) {
AsyncPromise<Void> result = new AsyncPromise<>();
// Ensure model is updated, then iterate over service views on EDT in order to find view with service and select it.
myModel.getInvoker().invoke(() -> AppUIUtil.invokeLaterIfProjectAlive(myProject, () -> {
String toolWindowId = getToolWindowId(contributorClass);
if (toolWindowId == null) {
result.setError("Contributor group not found");
return;
}
Runnable runnable = () -> promiseFindView(contributorClass, result,
serviceView -> serviceView.select(service, contributorClass),
content -> selectContent(content, focus, myProject));
ToolWindow toolWindow = activate ? ToolWindowManager.getInstance(myProject).getToolWindow(toolWindowId) : null;
if (toolWindow != null) {
toolWindow.activate(runnable, focus, focus);
}
else {
runnable.run();
}
}));
return result;
}
private void promiseFindView(Class<?> contributorClass, AsyncPromise<Void> result,
Function<? super ServiceView, ? extends Promise<?>> action, Consumer<? super Content> onSuccess) {
ServiceViewContentHolder holder = getContentHolder(contributorClass);
if (holder == null) {
result.setError("Content manager not initialized");
return;
}
List<Content> contents = new SmartList<>(holder.contentManager.getContents());
if (contents.isEmpty()) {
result.setError("Content not initialized");
return;
}
Collections.reverse(contents);
promiseFindView(contents.iterator(), result, action, onSuccess);
}
private static void promiseFindView(Iterator<? extends Content> iterator, AsyncPromise<Void> result,
Function<? super ServiceView, ? extends Promise<?>> action, Consumer<? super Content> onSuccess) {
Content content = iterator.next();
ServiceView serviceView = getServiceView(content);
if (serviceView == null) {
if (iterator.hasNext()) {
promiseFindView(iterator, result, action, onSuccess);
}
else {
result.setError("Not services content");
}
return;
}
action.apply(serviceView)
.onSuccess(v -> {
if (onSuccess != null) {
onSuccess.accept(content);
}
result.setResult(null);
})
.onError(e -> {
if (iterator.hasNext()) {
promiseFindView(iterator, result, action, onSuccess);
}
else {
result.setError(e);
}
});
}
private static void selectContent(Content content, boolean focus, Project project) {
AppUIExecutor.onUiThread().expireWith(project).submit(() -> {
ContentManager contentManager = content.getManager();
if (contentManager == null) return;
if (contentManager.getSelectedContent() != content && contentManager.getIndexOfContent(content) >= 0) {
contentManager.setSelectedContent(content, focus);
}
});
}
@Override
public @NotNull Promise<Void> expand(@NotNull Object service, @NotNull Class<?> contributorClass) {
AsyncPromise<Void> result = new AsyncPromise<>();
// Ensure model is updated, then iterate over service views on EDT in order to find view with service and select it.
myModel.getInvoker().invoke(() -> AppUIUtil.invokeLaterIfProjectAlive(myProject, () ->
promiseFindView(contributorClass, result,
serviceView -> serviceView.expand(service, contributorClass),
null)));
return result;
}
@Override
public @NotNull Promise<Void> extract(@NotNull Object service, @NotNull Class<?> contributorClass) {
AsyncPromise<Void> result = new AsyncPromise<>();
myModel.getInvoker().invoke(() -> AppUIUtil.invokeLaterIfProjectAlive(myProject, () ->
promiseFindView(contributorClass, result,
serviceView -> serviceView.extract(service, contributorClass),
null)));
return result;
}
@NotNull
Promise<Void> select(@NotNull VirtualFile virtualFile) {
List<ServiceViewItem> selectedItems = new SmartList<>();
for (ServiceViewContentHolder contentHolder : myContentHolders) {
Content content = contentHolder.contentManager.getSelectedContent();
if (content == null) continue;
ServiceView serviceView = getServiceView(content);
if (serviceView == null) continue;
List<ServiceViewItem> items = serviceView.getSelectedItems();
ContainerUtil.addIfNotNull(selectedItems, ContainerUtil.getOnlyItem(items));
}
AsyncPromise<Void> result = new AsyncPromise<>();
myModel.getInvoker().invoke(() -> {
Condition<? super ServiceViewItem> fileCondition = item -> {
ServiceViewDescriptor descriptor = item.getViewDescriptor();
return descriptor instanceof ServiceViewLocatableDescriptor &&
virtualFile.equals(((ServiceViewLocatableDescriptor)descriptor).getVirtualFile());
};
// Multiple services may target to one virtual file.
// Do nothing if service, targeting to the given virtual file, is selected,
// otherwise it may lead to jumping selection,
// if editor have just been selected due to some service selection.
if (ContainerUtil.find(selectedItems, fileCondition) != null) {
result.setResult(null);
return;
}
ServiceViewItem fileItem = myModel.findItem(
fileCondition,
item -> !(item instanceof ServiceModel.ServiceNode) ||
item.getViewDescriptor() instanceof ServiceViewLocatableDescriptor
);
if (fileItem != null) {
Promise<Void> promise = select(fileItem.getValue(), fileItem.getRootContributor().getClass(), false, false);
promise.processed(result);
}
});
return result;
}
void extract(@NotNull ServiceViewDragBean dragBean) {
List<ServiceViewItem> items = dragBean.getItems();
if (items.isEmpty()) return;
ServiceView serviceView = dragBean.getServiceView();
ServiceViewContentHolder holder = getContentHolder(serviceView);
if (holder == null) return;
ServiceViewFilter parentFilter = serviceView.getModel().getFilter();
ServiceViewModel viewModel = ServiceViewModel.createModel(items, dragBean.getContributor(), myModel, myModelFilter, parentFilter);
ServiceViewState state = new ServiceViewState();
serviceView.saveState(state);
extract(holder.contentManager, viewModel, state, true);
}
private void extract(ContentManager contentManager, ServiceViewModel viewModel, ServiceViewState viewState, boolean select) {
ServiceView serviceView = ServiceView.createView(myProject, viewModel, prepareViewState(viewState));
ItemPresentation presentation = getContentPresentation(myProject, viewModel, viewState);
if (presentation == null) return;
Content content = addServiceContent(contentManager, serviceView, presentation, select);
if (viewModel instanceof GroupModel) {
extractGroup((GroupModel)viewModel, content);
}
else if (viewModel instanceof SingeServiceModel) {
extractService((SingeServiceModel)viewModel, content);
}
else if (viewModel instanceof ServiceListModel) {
extractList((ServiceListModel)viewModel, content);
}
}
private static void extractGroup(GroupModel viewModel, Content content) {
viewModel.addModelListener(() -> updateContentTab(viewModel.getGroup(), content));
updateContentTab(viewModel.getGroup(), content);
}
private void extractService(SingeServiceModel viewModel, Content content) {
ContentManager contentManager = content.getManager();
viewModel.addModelListener(() -> {
ServiceViewItem item = viewModel.getService();
if (item != null && !viewModel.getChildren(item).isEmpty() && contentManager != null) {
AppUIExecutor.onUiThread().expireWith(myProject).submit(() -> {
ServiceViewItem viewItem = viewModel.getService();
if (viewItem == null) return;
int index = contentManager.getIndexOfContent(content);
if (index < 0) return;
contentManager.removeContent(content, true);
ServiceListModel listModel = new ServiceListModel(myModel, myModelFilter, new SmartList<>(viewItem),
viewModel.getFilter().getParent());
ServiceView listView = ServiceView.createView(myProject, listModel, prepareViewState(new ServiceViewState()));
Content listContent =
addServiceContent(contentManager, listView, viewItem.getViewDescriptor().getContentPresentation(), true, index);
extractList(listModel, listContent);
});
}
else {
updateContentTab(item, content);
}
});
updateContentTab(viewModel.getService(), content);
}
private static void extractList(ServiceListModel viewModel, Content content) {
viewModel.addModelListener(() -> updateContentTab(ContainerUtil.getOnlyItem(viewModel.getRoots()), content));
updateContentTab(ContainerUtil.getOnlyItem(viewModel.getRoots()), content);
}
private static ItemPresentation getContentPresentation(Project project, ServiceViewModel viewModel, ServiceViewState viewState) {
if (viewModel instanceof ContributorModel) {
return ((ContributorModel)viewModel).getContributor().getViewDescriptor(project).getContentPresentation();
}
else if (viewModel instanceof GroupModel) {
return ((GroupModel)viewModel).getGroup().getViewDescriptor().getContentPresentation();
}
else if (viewModel instanceof SingeServiceModel) {
return ((SingeServiceModel)viewModel).getService().getViewDescriptor().getContentPresentation();
}
else if (viewModel instanceof ServiceListModel) {
List<ServiceViewItem> items = ((ServiceListModel)viewModel).getItems();
if (items.size() == 1) {
return items.get(0).getViewDescriptor().getContentPresentation();
}
String name = viewState.id;
if (StringUtil.isEmpty(name)) {
name = Messages.showInputDialog(project,
ExecutionBundle.message("service.view.group.label"),
ExecutionBundle.message("service.view.group.title"),
null, null, null);
if (StringUtil.isEmpty(name)) return null;
}
return new PresentationData(name, null, AllIcons.Nodes.Folder, null);
}
return null;
}
private static Content addServiceContent(ContentManager contentManager, ServiceView serviceView, ItemPresentation presentation,
boolean select) {
return addServiceContent(contentManager, serviceView, presentation, select, -1);
}
private static Content addServiceContent(ContentManager contentManager, ServiceView serviceView, ItemPresentation presentation,
boolean select, int index) {
Content content =
ContentFactory.SERVICE.getInstance().createContent(serviceView, ServiceViewDragHelper.getDisplayName(presentation), false);
content.putUserData(ToolWindow.SHOW_CONTENT_ICON, Boolean.TRUE);
content.setHelpId(getToolWindowContextHelpId());
content.setCloseable(true);
content.setIcon(presentation.getIcon(false));
Disposer.register(content, serviceView);
Disposer.register(content, serviceView.getModel());
contentManager.addContent(content, index);
if (select) {
contentManager.setSelectedContent(content);
}
return content;
}
private static void updateContentTab(ServiceViewItem item, Content content) {
if (item != null) {
WeakReference<ServiceViewItem> itemRef = new WeakReference<>(item);
AppUIExecutor.onUiThread().expireWith(content).submit(() -> {
ServiceViewItem viewItem = itemRef.get();
if (viewItem == null) return;
ItemPresentation itemPresentation = viewItem.getViewDescriptor().getContentPresentation();
content.setDisplayName(ServiceViewDragHelper.getDisplayName(itemPresentation));
content.setIcon(itemPresentation.getIcon(false));
content.setTabColor(viewItem.getColor());
});
}
}
private void loadGroups(Collection<? extends ServiceViewContributor<?>> contributors) {
if (Registry.is("ide.service.view.split")) {
for (ServiceViewContributor<?> contributor : contributors) {
myGroups.put(contributor.getViewDescriptor(myProject).getId(), new SmartList<>(contributor));
}
}
else if (!contributors.isEmpty()) {
String servicesToolWindowId = ToolWindowId.SERVICES;
Collection<ServiceViewContributor<?>> servicesContributors =
myGroups.computeIfAbsent(servicesToolWindowId, __ -> ContainerUtil.newConcurrentSet());
servicesContributors.addAll(contributors);
}
}
private @NotNull Pair<ServiceViewState, List<ServiceViewState>> getServiceViewStates(@NotNull String groupId) {
List<ServiceViewState> states = ContainerUtil.filter(myState.viewStates, state -> groupId.equals(state.groupId));
ServiceViewState mainState = ContainerUtil.find(states, state -> StringUtil.isEmpty(state.viewType));
if (mainState == null) {
mainState = new ServiceViewState();
}
else {
states.remove(mainState);
}
return Pair.create(mainState, states);
}
@Override
public @NotNull State getState() {
ContainerUtil.retainAll(myState.viewStates, state -> myGroups.containsKey(state.groupId));
for (ServiceViewContentHolder holder : myContentHolders) {
ContainerUtil.retainAll(myState.viewStates, state -> !holder.toolWindowId.equals(state.groupId));
ServiceViewFilter mainFilter = holder.mainView.getModel().getFilter();
ServiceViewState mainState = new ServiceViewState();
myState.viewStates.add(mainState);
holder.mainView.saveState(mainState);
mainState.groupId = holder.toolWindowId;
mainState.treeStateElement = new Element("root");
mainState.treeState.writeExternal(mainState.treeStateElement);
mainState.clearTreeState();
List<ServiceView> processedViews = new SmartList<>();
for (Content content : holder.contentManager.getContents()) {
ServiceView serviceView = getServiceView(content);
if (serviceView == null || isMainView(serviceView)) continue;
ServiceViewState viewState = new ServiceViewState();
processedViews.add(serviceView);
myState.viewStates.add(viewState);
serviceView.saveState(viewState);
viewState.groupId = holder.toolWindowId;
viewState.isSelected = holder.contentManager.isSelected(content);
ServiceViewModel viewModel = serviceView.getModel();
if (viewModel instanceof ServiceListModel) {
viewState.id = content.getDisplayName();
}
ServiceViewFilter parentFilter = viewModel.getFilter().getParent();
if (parentFilter != null && !parentFilter.equals(mainFilter)) {
for (int i = 0; i < processedViews.size(); i++) {
ServiceView parentView = processedViews.get(i);
if (parentView.getModel().getFilter().equals(parentFilter)) {
viewState.parentView = i;
break;
}
}
}
viewState.treeStateElement = new Element("root");
viewState.treeState.writeExternal(viewState.treeStateElement);
viewState.clearTreeState();
}
}
return myState;
}
@Override
public void loadState(@NotNull State state) {
myState = state;
for (ServiceViewState viewState : myState.viewStates) {
viewState.treeState = TreeState.createFrom(viewState.treeStateElement);
}
}
static final class State {
public List<ServiceViewState> viewStates = new ArrayList<>();
public boolean showServicesTree = true;
}
static String getToolWindowContextHelpId() {
return HELP_ID;
}
private ServiceViewState prepareViewState(ServiceViewState state) {
state.showServicesTree = myState.showServicesTree;
return state;
}
boolean isShowServicesTree() {
return myState.showServicesTree;
}
void setShowServicesTree(boolean value) {
myState.showServicesTree = value;
for (ServiceViewContentHolder holder : myContentHolders) {
for (ServiceView serviceView : holder.getServiceViews()) {
serviceView.getUi().setMasterComponentVisible(value);
}
}
}
boolean isSplitByTypeEnabled(@NotNull ServiceView selectedView) {
if (!isMainView(selectedView) ||
selectedView.getModel().getVisibleRoots().isEmpty()) {
return false;
}
ServiceViewContentHolder holder = getContentHolder(selectedView);
if (holder == null) return false;
for (Content content : holder.contentManager.getContents()) {
ServiceView serviceView = getServiceView(content);
if (serviceView != null && serviceView != selectedView && !(serviceView.getModel() instanceof ContributorModel)) return false;
}
return true;
}
void splitByType(@NotNull ServiceView selectedView) {
ServiceViewContentHolder holder = getContentHolder(selectedView);
if (holder == null) return;
myModel.getInvoker().invokeLater(() -> {
List<ServiceViewContributor<?>> contributors = ContainerUtil.map(myModel.getRoots(), ServiceViewItem::getRootContributor);
AppUIUtil.invokeOnEdt(() -> {
for (ServiceViewContributor<?> contributor : contributors) {
splitByType(holder.contentManager, contributor);
}
});
});
}
private ServiceViewContentHolder getContentHolder(ServiceView serviceView) {
for (ServiceViewContentHolder holder : myContentHolders) {
if (holder.getServiceViews().contains(serviceView)) {
return holder;
}
}
return null;
}
private void splitByType(ContentManager contentManager, ServiceViewContributor<?> contributor) {
for (Content content : contentManager.getContents()) {
ServiceView serviceView = getServiceView(content);
if (serviceView != null) {
ServiceViewModel viewModel = serviceView.getModel();
if (viewModel instanceof ContributorModel && contributor.equals(((ContributorModel)viewModel).getContributor())) {
return;
}
}
}
ContributorModel contributorModel = new ContributorModel(myModel, myModelFilter, contributor, null);
extract(contentManager, contributorModel, prepareViewState(new ServiceViewState()), true);
}
public @NotNull List<Object> getChildrenSafe(@NotNull AnActionEvent e, @NotNull List<Object> valueSubPath, @NotNull Class<?> contributorClass) {
ServiceView serviceView = ServiceViewActionProvider.getSelectedView(e);
return serviceView != null ? serviceView.getChildrenSafe(valueSubPath, contributorClass) : Collections.emptyList();
}
public @Nullable String getToolWindowId(@NotNull Class<?> contributorClass) {
for (Map.Entry<String, Collection<ServiceViewContributor<?>>> entry : myGroups.entrySet()) {
if (entry.getValue().stream().anyMatch(contributorClass::isInstance)) {
return entry.getKey();
}
}
return null;
}
private static boolean isMainView(@NotNull ServiceView serviceView) {
return serviceView.getModel() instanceof AllServicesModel;
}
private static @Nullable Content getMainContent(@NotNull ContentManager contentManager) {
for (Content content : contentManager.getContents()) {
ServiceView serviceView = getServiceView(content);
if (serviceView != null && isMainView(serviceView)) {
return content;
}
}
return null;
}
private static @Nullable ServiceView getServiceView(Content content) {
Object component = content.getComponent();
return component instanceof ServiceView ? (ServiceView)component : null;
}
private static void selectContentByModel(@NotNull ContentManager contentManager, @Nullable ServiceViewModel modelToSelect) {
if (modelToSelect != null) {
for (Content content : contentManager.getContents()) {
ServiceView serviceView = getServiceView(content);
if (serviceView != null && serviceView.getModel() == modelToSelect) {
contentManager.setSelectedContent(content);
break;
}
}
}
else {
Content content = getMainContent(contentManager);
if (content != null) {
contentManager.setSelectedContent(content);
}
}
}
private static void selectContentByContributor(@NotNull ContentManager contentManager, @NotNull ServiceViewContributor<?> contributor) {
Content mainContent = null;
for (Content content : contentManager.getContents()) {
ServiceView serviceView = getServiceView(content);
if (serviceView != null) {
if (serviceView.getModel() instanceof ContributorModel &&
contributor.equals(((ContributorModel)serviceView.getModel()).getContributor())) {
contentManager.setSelectedContent(content, true);
return;
}
if (isMainView(serviceView)) {
mainContent = content;
}
}
}
if (mainContent != null) {
contentManager.setSelectedContent(mainContent, true);
}
}
private static final class ServiceViewContentMangerListener implements ContentManagerListener {
private final ServiceModelFilter myModelFilter;
private final AutoScrollToSourceHandler myAutoScrollToSourceHandler;
private final ServiceViewContentHolder myContentHolder;
private final ContentManager myContentManager;
ServiceViewContentMangerListener(@NotNull ServiceModelFilter modelFilter,
@NotNull AutoScrollToSourceHandler toSourceHandler,
@NotNull ServiceViewContentHolder contentHolder) {
myModelFilter = modelFilter;
myAutoScrollToSourceHandler = toSourceHandler;
myContentHolder = contentHolder;
myContentManager = contentHolder.contentManager;
}
@Override
public void contentAdded(@NotNull ContentManagerEvent event) {
Content content = event.getContent();
ServiceView serviceView = getServiceView(content);
if (serviceView != null && !isMainView(serviceView)) {
serviceView.setAutoScrollToSourceHandler(myAutoScrollToSourceHandler);
myModelFilter.addFilter(serviceView.getModel().getFilter());
myContentHolder.processAllModels(ServiceViewModel::filtersChanged);
serviceView.getModel().addModelListener(() -> {
if (serviceView.getModel().getRoots().isEmpty()) {
AppUIExecutor.onUiThread().expireWith(myContentManager).submit(() -> myContentManager.removeContent(content, true));
}
});
}
if (myContentManager.getContentCount() > 1) {
Content mainContent = getMainContent(myContentManager);
if (mainContent != null) {
mainContent.setDisplayName(ExecutionBundle.message("service.view.all.services"));
}
}
}
@Override
public void contentRemoved(@NotNull ContentManagerEvent event) {
ServiceView serviceView = getServiceView(event.getContent());
if (serviceView != null && !isMainView(serviceView)) {
myModelFilter.removeFilter(serviceView.getModel().getFilter());
myContentHolder.processAllModels(ServiceViewModel::filtersChanged);
}
if (myContentManager.getContentCount() == 1) {
Content mainContent = getMainContent(myContentManager);
if (mainContent != null) {
mainContent.setDisplayName(null);
}
}
}
@Override
public void selectionChanged(@NotNull ContentManagerEvent event) {
ServiceView serviceView = getServiceView(event.getContent());
if (serviceView == null) return;
if (event.getOperation() == ContentManagerEvent.ContentOperation.add) {
serviceView.onViewSelected();
}
else {
serviceView.onViewUnselected();
}
}
}
private static void registerActivateByContributorActions(Project project, Collection<? extends ServiceViewContributor<?>> contributors) {
for (ServiceViewContributor<?> contributor : contributors) {
ActionManager actionManager = ActionManager.getInstance();
String actionId = getActivateContributorActionId(contributor);
if (actionId == null) continue;
AnAction action = actionManager.getAction(actionId);
if (action == null) {
action = new ActivateToolWindowByContributorAction(contributor, contributor.getViewDescriptor(project).getPresentation());
actionManager.registerAction(actionId, action);
}
}
}
private static String getActivateContributorActionId(ServiceViewContributor<?> contributor) {
String name = contributor.getClass().getSimpleName();
return name.isEmpty() ? null : "ServiceView.Activate" + name;
}
private final class ServiceViewExtensionPointListener implements ExtensionPointListener<ServiceViewContributor<?>> {
@Override
public void extensionAdded(@NotNull ServiceViewContributor<?> extension, @NotNull PluginDescriptor pluginDescriptor) {
List<ServiceViewContributor<?>> contributors = new SmartList<>(extension);
loadGroups(contributors);
String toolWindowId = getToolWindowId(extension.getClass());
boolean register = myGroups.get(toolWindowId).size() == 1;
ServiceEvent e = ServiceEvent.createResetEvent(extension.getClass());
myModel.handle(e).onSuccess(o -> {
if (register) {
ServiceViewItem eventRoot = ContainerUtil.find(myModel.getRoots(), root -> {
return extension.getClass().isInstance(root.getRootContributor());
});
assert toolWindowId != null;
registerToolWindow(toolWindowId, eventRoot != null);
}
else {
eventHandled(e);
}
if (ToolWindowId.SERVICES.equals(toolWindowId)) {
AppUIExecutor.onUiThread().expireWith(myProject).submit(() -> registerActivateByContributorActions(myProject, contributors));
}
});
}
@Override
public void extensionRemoved(@NotNull ServiceViewContributor<?> extension, @NotNull PluginDescriptor pluginDescriptor) {
ServiceEvent e = ServiceEvent.createSyncResetEvent(extension.getClass());
myModel.handle(e).onProcessed(o -> {
eventHandled(e);
for (Map.Entry<String, Collection<ServiceViewContributor<?>>> entry : myGroups.entrySet()) {
if (entry.getValue().remove(extension)) {
if (entry.getValue().isEmpty()) {
unregisterToolWindow(entry.getKey());
}
break;
}
}
unregisterActivateByContributorActions(extension);
});
}
private void unregisterToolWindow(String toolWindowId) {
myActiveToolWindowIds.remove(toolWindowId);
myGroups.remove(toolWindowId);
for (ServiceViewContentHolder holder : myContentHolders) {
if (holder.toolWindowId.equals(toolWindowId)) {
myContentHolders.remove(holder);
break;
}
}
ToolWindowManager toolWindowManager = ToolWindowManager.getInstance(myProject);
toolWindowManager.invokeLater(() -> {
if (myProject.isDisposed() || myProject.isDefault()) return;
ToolWindow toolWindow = toolWindowManager.getToolWindow(toolWindowId);
if (toolWindow != null) {
toolWindow.remove();
}
});
}
private void unregisterActivateByContributorActions(ServiceViewContributor<?> extension) {
String actionId = getActivateContributorActionId(extension);
if (actionId != null) {
ActionManager actionManager = ActionManager.getInstance();
AnAction action = actionManager.getAction(actionId);
if (action != null) {
actionManager.unregisterAction(actionId);
}
}
}
}
private static class ActivateToolWindowByContributorAction extends DumbAwareAction {
private final ServiceViewContributor<?> myContributor;
ActivateToolWindowByContributorAction(ServiceViewContributor<?> contributor, ItemPresentation contributorPresentation) {
myContributor = contributor;
Presentation templatePresentation = getTemplatePresentation();
templatePresentation.setText(ExecutionBundle.messagePointer("service.view.activate.tool.window.action.name",
ServiceViewDragHelper.getDisplayName(contributorPresentation)));
templatePresentation.setIcon(contributorPresentation.getIcon(false));
templatePresentation.setDescription(ExecutionBundle.messagePointer("service.view.activate.tool.window.action.description"));
}
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
Project project = e.getProject();
if (project == null) return;
String toolWindowId =
((ServiceViewManagerImpl)ServiceViewManager.getInstance(project)).getToolWindowId(myContributor.getClass());
if (toolWindowId == null) return;
ToolWindow toolWindow = ToolWindowManager.getInstance(project).getToolWindow(toolWindowId);
if (toolWindow != null) {
toolWindow.activate(() -> {
ServiceViewContentHolder holder =
((ServiceViewManagerImpl)ServiceViewManager.getInstance(project)).getContentHolder(myContributor.getClass());
if (holder != null) {
selectContentByContributor(holder.contentManager, myContributor);
}
});
}
}
}
private static class ServiceViewContentHolder {
final ServiceView mainView;
final ContentManager contentManager;
final Collection<ServiceViewContributor<?>> rootContributors;
final String toolWindowId;
ServiceViewContentHolder(ServiceView mainView,
ContentManager contentManager,
Collection<ServiceViewContributor<?>> rootContributors,
String toolWindowId) {
this.mainView = mainView;
this.contentManager = contentManager;
this.rootContributors = rootContributors;
this.toolWindowId = toolWindowId;
}
List<ServiceView> getServiceViews() {
List<ServiceView> views = ContainerUtil.mapNotNull(contentManager.getContents(), ServiceViewManagerImpl::getServiceView);
if (views.isEmpty()) return new SmartList<>(mainView);
if (!views.contains(mainView)) {
views.add(0, mainView);
}
return views;
}
private void processAllModels(Consumer<? super ServiceViewModel> consumer) {
List<ServiceViewModel> models = ContainerUtil.map(getServiceViews(), ServiceView::getModel);
ServiceViewModel model = ContainerUtil.getFirstItem(models);
if (model != null) {
model.getInvoker().invokeLater(() -> {
for (ServiceViewModel viewModel : models) {
consumer.accept(viewModel);
}
});
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.lockmgr;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hive.common.ValidTxnList;
import org.apache.hadoop.hive.common.ValidReadTxnList;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.metadata.DummyPartition;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.util.ReflectionUtils;
import java.util.*;
/**
* An implementation of {@link HiveTxnManager} that does not support
* transactions. This provides default Hive behavior.
*/
class DummyTxnManager extends HiveTxnManagerImpl {
static final private Log LOG =
LogFactory.getLog(DummyTxnManager.class.getName());
private HiveLockManager lockMgr;
@Override
public long openTxn(String user) throws LockException {
// No-op
return 0L;
}
@Override
public HiveLockManager getLockManager() throws LockException {
if (lockMgr == null) {
boolean supportConcurrency =
conf.getBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY);
if (supportConcurrency) {
String lockMgrName =
conf.getVar(HiveConf.ConfVars.HIVE_LOCK_MANAGER);
if ((lockMgrName == null) || (lockMgrName.isEmpty())) {
throw new LockException(ErrorMsg.LOCKMGR_NOT_SPECIFIED.getMsg());
}
try {
LOG.info("Creating lock manager of type " + lockMgrName);
lockMgr = (HiveLockManager)ReflectionUtils.newInstance(
conf.getClassByName(lockMgrName), conf);
lockMgr.setContext(new HiveLockManagerCtx(conf));
} catch (Exception e) {
// set hiveLockMgr to null just in case this invalid manager got set to
// next query's ctx.
if (lockMgr != null) {
try {
lockMgr.close();
} catch (LockException e1) {
//nothing can do here
}
lockMgr = null;
}
throw new LockException(ErrorMsg.LOCKMGR_NOT_INITIALIZED.getMsg() +
e.getMessage());
}
} else {
LOG.info("Concurrency mode is disabled, not creating a lock manager");
return null;
}
}
// Force a re-read of the configuration file. This is done because
// different queries in the session may be using the same lock manager.
lockMgr.refresh();
return lockMgr;
}
@Override
public void acquireLocks(QueryPlan plan, Context ctx, String username) throws LockException {
// Make sure we've built the lock manager
getLockManager();
// If the lock manager is still null, then it means we aren't using a
// lock manager
if (lockMgr == null) return;
List<HiveLockObj> lockObjects = new ArrayList<HiveLockObj>();
// Sort all the inputs, outputs.
// If a lock needs to be acquired on any partition, a read lock needs to be acquired on all
// its parents also
for (ReadEntity input : plan.getInputs()) {
if (!input.needsLock()) {
continue;
}
LOG.debug("Adding " + input.getName() + " to list of lock inputs");
if (input.getType() == ReadEntity.Type.DATABASE) {
lockObjects.addAll(getLockObjects(plan, input.getDatabase(), null,
null, HiveLockMode.SHARED));
} else if (input.getType() == ReadEntity.Type.TABLE) {
lockObjects.addAll(getLockObjects(plan, null, input.getTable(), null,
HiveLockMode.SHARED));
} else {
lockObjects.addAll(getLockObjects(plan, null, null,
input.getPartition(),
HiveLockMode.SHARED));
}
}
for (WriteEntity output : plan.getOutputs()) {
HiveLockMode lockMode = getWriteEntityLockMode(output);
if (lockMode == null) {
continue;
}
LOG.debug("Adding " + output.getName() + " to list of lock outputs");
List<HiveLockObj> lockObj = null;
if (output.getType() == WriteEntity.Type.DATABASE) {
lockObjects.addAll(getLockObjects(plan, output.getDatabase(), null, null, lockMode));
} else if (output.getTyp() == WriteEntity.Type.TABLE) {
lockObj = getLockObjects(plan, null, output.getTable(), null,lockMode);
} else if (output.getTyp() == WriteEntity.Type.PARTITION) {
lockObj = getLockObjects(plan, null, null, output.getPartition(), lockMode);
}
// In case of dynamic queries, it is possible to have incomplete dummy partitions
else if (output.getTyp() == WriteEntity.Type.DUMMYPARTITION) {
lockObj = getLockObjects(plan, null, null, output.getPartition(),
HiveLockMode.SHARED);
}
if(lockObj != null) {
lockObjects.addAll(lockObj);
ctx.getOutputLockObjects().put(output, lockObj);
}
}
if (lockObjects.isEmpty() && !ctx.isNeedLockMgr()) {
return;
}
dedupLockObjects(lockObjects);
List<HiveLock> hiveLocks = lockMgr.lock(lockObjects, false);
if (hiveLocks == null) {
throw new LockException(ErrorMsg.LOCK_CANNOT_BE_ACQUIRED.getMsg());
} else {
ctx.setHiveLocks(hiveLocks);
}
}
@Override
public void commitTxn() throws LockException {
// No-op
}
@Override
public void rollbackTxn() throws LockException {
// No-op
}
@Override
public void heartbeat() throws LockException {
// No-op
}
@Override
public ValidTxnList getValidTxns() throws LockException {
return new ValidReadTxnList();
}
@Override
public boolean supportsExplicitLock() {
return true;
}
@Override
public boolean useNewShowLocksFormat() {
return false;
}
@Override
public boolean supportsAcid() {
return false;
}
protected void destruct() {
if (lockMgr != null) {
try {
lockMgr.close();
} catch (LockException e) {
// Not much I can do about it.
LOG.warn("Got exception when closing lock manager " + e.getMessage());
}
}
}
/**
* Dedup the list of lock objects so that there is only one lock per table/partition.
* If there is both a shared and exclusive lock for the same object, this will deduped
* to just a single exclusive lock. Package level so that the unit tests
* can access it. Not intended for use outside this class.
* @param lockObjects
*/
static void dedupLockObjects(List<HiveLockObj> lockObjects) {
Map<String, HiveLockObj> lockMap = new HashMap<String, HiveLockObj>();
for (HiveLockObj lockObj : lockObjects) {
String lockName = lockObj.getName();
HiveLockObj foundLock = lockMap.get(lockName);
if (foundLock == null || lockObj.getMode() == HiveLockMode.EXCLUSIVE) {
lockMap.put(lockName, lockObj);
}
}
// copy set of deduped locks back to original list
lockObjects.clear();
for (HiveLockObj lockObj : lockMap.values()) {
lockObjects.add(lockObj);
}
}
private HiveLockMode getWriteEntityLockMode (WriteEntity we) {
HiveLockMode lockMode = we.isComplete() ? HiveLockMode.EXCLUSIVE : HiveLockMode.SHARED;
//but the writeEntity is complete in DDL operations, instead DDL sets the writeType, so
//we use it to determine its lockMode, and first we check if the writeType was set
WriteEntity.WriteType writeType = we.getWriteType();
if (writeType == null) {
return lockMode;
}
switch (writeType) {
case DDL_EXCLUSIVE:
return HiveLockMode.EXCLUSIVE;
case DDL_SHARED:
return HiveLockMode.SHARED;
case DDL_NO_LOCK:
return null;
default: //other writeTypes related to DMLs
return lockMode;
}
}
private List<HiveLockObj> getLockObjects(QueryPlan plan, Database db,
Table t, Partition p,
HiveLockMode mode)
throws LockException {
List<HiveLockObj> locks = new LinkedList<HiveLockObj>();
HiveLockObject.HiveLockObjectData lockData =
new HiveLockObject.HiveLockObjectData(plan.getQueryId(),
String.valueOf(System.currentTimeMillis()),
"IMPLICIT",
plan.getQueryStr());
if (db != null) {
locks.add(new HiveLockObj(new HiveLockObject(db.getName(), lockData),
mode));
return locks;
}
if (t != null) {
locks.add(new HiveLockObj(new HiveLockObject(t, lockData), mode));
mode = HiveLockMode.SHARED;
locks.add(new HiveLockObj(new HiveLockObject(t.getDbName(), lockData), mode));
return locks;
}
if (p != null) {
if (!(p instanceof DummyPartition)) {
locks.add(new HiveLockObj(new HiveLockObject(p, lockData), mode));
}
// All the parents are locked in shared mode
mode = HiveLockMode.SHARED;
// For dummy partitions, only partition name is needed
String name = p.getName();
if (p instanceof DummyPartition) {
name = p.getName().split("@")[2];
}
String partialName = "";
String[] partns = name.split("/");
int len = p instanceof DummyPartition ? partns.length : partns.length - 1;
Map<String, String> partialSpec = new LinkedHashMap<String, String>();
for (int idx = 0; idx < len; idx++) {
String partn = partns[idx];
partialName += partn;
String[] nameValue = partn.split("=");
assert(nameValue.length == 2);
partialSpec.put(nameValue[0], nameValue[1]);
try {
locks.add(new HiveLockObj(
new HiveLockObject(new DummyPartition(p.getTable(), p.getTable().getDbName()
+ "/" + p.getTable().getTableName()
+ "/" + partialName,
partialSpec), lockData), mode));
partialName += "/";
} catch (HiveException e) {
throw new LockException(e.getMessage());
}
}
locks.add(new HiveLockObj(new HiveLockObject(p.getTable(), lockData), mode));
locks.add(new HiveLockObj(new HiveLockObject(p.getTable().getDbName(), lockData), mode));
}
return locks;
}
}
| |
/**********************************************************************************
* $URL: https://source.sakaiproject.org/svn/common/tags/sakai-10.6/common-composite-component/src/java/org/sakaiproject/component/common/type/TypeManagerImpl.java $
* $Id: TypeManagerImpl.java 125281 2013-05-31 03:42:46Z nbotimer@unicon.net $
***********************************************************************************
*
* Copyright (c) 2003, 2004, 2005, 2006 The Sakai Foundation.
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
**********************************************************************************/
package org.sakaiproject.component.common.type;
import java.sql.SQLException;
import org.hibernate.HibernateException;
import org.hibernate.Query;
import org.hibernate.Session;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.api.common.type.Type;
import org.sakaiproject.api.common.type.TypeManager;
import org.sakaiproject.component.common.manager.PersistableHelper;
import org.sakaiproject.id.cover.IdManager;
import org.springframework.orm.hibernate3.HibernateCallback;
import org.springframework.orm.hibernate3.support.HibernateDaoSupport;
/**
* @author <a href="mailto:lance@indiana.edu">Lance Speelmon </a>
*/
public class TypeManagerImpl extends HibernateDaoSupport implements TypeManager
{
private static final Log LOG = LogFactory.getLog(TypeManagerImpl.class);
private static final String ID = "id";
private static final String FINDTYPEBYID = "findTypeById";
private static final String UUID = "uuid";
private static final String FINDTYPEBYUUID = "findTypeByUuid";
private static final String AUTHORITY = "authority";
private static final String DOMAIN = "domain";
private static final String KEYWORD = "keyword";
private static final String FINDTYPEBYTUPLE = "findTypeByTuple";
private boolean cacheFindTypeByTuple = true;
private boolean cacheFindTypeByUuid = true;
private boolean cacheFindTypeById = true;
private PersistableHelper persistableHelper; // dep inj
// public Type getType(final Long id)
// {
// if (LOG.isDebugEnabled())
// {
// LOG.debug("getType(Long " + id + ")");
// }
// if (id == null) throw new IllegalArgumentException();
//
// final HibernateCallback hcb = new HibernateCallback()
// {
// public Object doInHibernate(Session session) throws HibernateException,
// SQLException
// {
// Query q = session.getNamedQuery(FINDTYPEBYID);
// q.setLong(ID, id.longValue());
// q.setCacheable(cacheFindTypeById);
// return q.uniqueResult();
// }
// };
// Type type = (Type) getHibernateTemplate().execute(hcb);
// return type;
// }
/**
* @see org.sakaiproject.api.type.TypeManager#createType(java.lang.String, java.lang.String, java.lang.String, java.lang.String, java.lang.String)
*/
public Type createType(String authority, String domain, String keyword, String displayName, String description)
{
if (LOG.isDebugEnabled())
{
LOG.debug("createType(String " + authority + ", String " + domain + ", String " + keyword + ", String " + displayName
+ ", String " + description + ")");
}
// validation
if (authority == null || authority.length() < 1) throw new IllegalArgumentException("authority");
if (domain == null || domain.length() < 1) throw new IllegalArgumentException("domain");
if (keyword == null || keyword.length() < 1) throw new IllegalArgumentException("keyword");
if (displayName == null || displayName.length() < 1) throw new IllegalArgumentException("displayName");
TypeImpl ti = new TypeImpl();
persistableHelper.createPersistableFields(ti);
ti.setUuid(IdManager.createUuid());
ti.setAuthority(authority);
ti.setDomain(domain);
ti.setKeyword(keyword);
ti.setDisplayName(displayName);
ti.setDescription(description);
getHibernateTemplate().save(ti);
return ti;
}
public void saveType(Type type)
{
if (LOG.isDebugEnabled())
{
LOG.debug("saveType(Type " + type + ")");
}
if (type == null) throw new IllegalArgumentException("type");
if (type instanceof TypeImpl)
{ // found well known Type
TypeImpl ti = (TypeImpl) type;
persistableHelper.modifyPersistableFields(ti);
getHibernateTemplate().saveOrUpdate(ti);
}
else
{ // found external Type
throw new IllegalAccessError("Alternate Type implementations not supported yet.");
}
}
/**
* @see org.sakaiproject.service.common.type.TypeManager#getType(java.lang.String)
*/
public Type getType(final String uuid)
{
if (LOG.isDebugEnabled())
{
LOG.debug("getType(String " + uuid + ")");
}
if (uuid == null || uuid.length() < 1)
{
throw new IllegalArgumentException("uuid");
}
final HibernateCallback hcb = new HibernateCallback()
{
public Object doInHibernate(Session session) throws HibernateException, SQLException
{
Query q = session.getNamedQuery(FINDTYPEBYUUID);
q.setString(UUID, uuid);
q.setCacheable(cacheFindTypeByUuid);
q.setCacheRegion(Type.class.getCanonicalName());
return q.uniqueResult();
}
};
Type type = (Type) getHibernateTemplate().execute(hcb);
return type;
}
/**
* @see org.sakaiproject.service.common.type.TypeManager#getType(java.lang.String, java.lang.String, java.lang.String)
*/
public Type getType(final String authority, final String domain, final String keyword)
{
if (LOG.isDebugEnabled())
{
LOG.debug("getType(String " + authority + ", String " + domain + ", String " + keyword + ")");
}
// validation
if (authority == null || authority.length() < 1) throw new IllegalArgumentException("authority");
if (domain == null || domain.length() < 1) throw new IllegalArgumentException("domain");
if (keyword == null || keyword.length() < 1) throw new IllegalArgumentException("keyword");
final HibernateCallback hcb = new HibernateCallback()
{
public Object doInHibernate(Session session) throws HibernateException, SQLException
{
Query q = session.getNamedQuery(FINDTYPEBYTUPLE);
q.setString(AUTHORITY, authority);
q.setString(DOMAIN, domain);
q.setString(KEYWORD, keyword);
q.setCacheable(cacheFindTypeByTuple);
q.setCacheRegion(Type.class.getCanonicalName());
return q.uniqueResult();
}
};
Type type = (Type) getHibernateTemplate().execute(hcb);
return type;
}
/**
* @param cacheFindTypeByTuple
* The cacheFindTypeByTuple to set.
*/
public void setCacheFindTypeByTuple(boolean cacheFindTypeByTuple)
{
if (LOG.isInfoEnabled())
{
LOG.info("setCacheFindTypeByTuple(boolean " + cacheFindTypeByTuple + ")");
}
this.cacheFindTypeByTuple = cacheFindTypeByTuple;
}
/**
* @param cacheFindTypeByUuid
* The cacheFindTypeByUuid to set.
*/
public void setCacheFindTypeByUuid(boolean cacheFindTypeByUuid)
{
if (LOG.isInfoEnabled())
{
LOG.info("setCacheFindTypeByUuid(boolean " + cacheFindTypeByUuid + ")");
}
this.cacheFindTypeByUuid = cacheFindTypeByUuid;
}
/**
* @param cacheFindTypeById
* The cacheFindTypeById to set.
*/
public void setCacheFindTypeById(boolean cacheFindTypeById)
{
if (LOG.isInfoEnabled())
{
LOG.info("setCacheFindTypeById(boolean " + cacheFindTypeById + ")");
}
this.cacheFindTypeById = cacheFindTypeById;
}
public void deleteType(Type type)
{
if (LOG.isDebugEnabled())
{
LOG.debug("deleteType(Type " + type + ")");
}
throw new UnsupportedOperationException("Types should never be deleted!");
}
/**
* @param persistableHelper
* The persistableHelper to set.
*/
public void setPersistableHelper(PersistableHelper persistableHelper)
{
this.persistableHelper = persistableHelper;
}
}
| |
// Copyright (C) 2013 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.acceptance.rest.project;
import static com.google.gerrit.acceptance.rest.project.ProjectAssert.assertProjectInfo;
import static com.google.gerrit.acceptance.rest.project.ProjectAssert.assertProjectOwners;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.gerrit.acceptance.AbstractDaemonTest;
import com.google.gerrit.acceptance.RestResponse;
import com.google.gerrit.extensions.api.GerritApi;
import com.google.gerrit.extensions.api.projects.ProjectApi;
import com.google.gerrit.extensions.api.projects.ProjectInput;
import com.google.gerrit.extensions.common.InheritableBoolean;
import com.google.gerrit.extensions.common.ProjectInfo;
import com.google.gerrit.extensions.common.SubmitType;
import com.google.gerrit.extensions.restapi.RestApiException;
import com.google.gerrit.reviewdb.client.AccountGroup;
import com.google.gerrit.reviewdb.client.Project;
import com.google.gerrit.reviewdb.client.RefNames;
import com.google.gerrit.server.account.GroupCache;
import com.google.gerrit.server.git.GitRepositoryManager;
import com.google.gerrit.server.group.SystemGroupBackend;
import com.google.gerrit.server.project.ProjectCache;
import com.google.gerrit.server.project.ProjectState;
import com.google.gwtorm.server.OrmException;
import com.google.inject.Inject;
import com.jcraft.jsch.JSchException;
import org.apache.http.HttpStatus;
import org.eclipse.jgit.errors.RepositoryNotFoundException;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.treewalk.TreeWalk;
import org.junit.Test;
import java.io.IOException;
import java.util.Collections;
import java.util.Set;
public class CreateProjectIT extends AbstractDaemonTest {
@Inject
private ProjectCache projectCache;
@Inject
private GroupCache groupCache;
@Inject
private GitRepositoryManager git;
@Inject
private GerritApi gApi;
@Test
public void testCreateProjectApi() throws RestApiException, IOException {
final String newProjectName = "newProject";
ProjectApi projectApi = gApi.projects().name(newProjectName).create();
ProjectInfo p = projectApi.get();
assertEquals(newProjectName, p.name);
ProjectState projectState = projectCache.get(new Project.NameKey(newProjectName));
assertNotNull(projectState);
assertProjectInfo(projectState.getProject(), p);
assertHead(newProjectName, "refs/heads/master");
}
@Test
public void testCreateProject() throws IOException {
final String newProjectName = "newProject";
RestResponse r = adminSession.put("/projects/" + newProjectName);
assertEquals(HttpStatus.SC_CREATED, r.getStatusCode());
ProjectInfo p = newGson().fromJson(r.getReader(), ProjectInfo.class);
assertEquals(newProjectName, p.name);
ProjectState projectState = projectCache.get(new Project.NameKey(newProjectName));
assertNotNull(projectState);
assertProjectInfo(projectState.getProject(), p);
assertHead(newProjectName, "refs/heads/master");
}
@Test
public void testCreateProjectWithNameMismatch_BadRequest() throws IOException {
ProjectInput in = new ProjectInput();
in.name = "otherName";
RestResponse r = adminSession.put("/projects/someName", in);
assertEquals(HttpStatus.SC_BAD_REQUEST, r.getStatusCode());
}
@Test
public void testCreateProjectWithProperties() throws IOException {
final String newProjectName = "newProject";
ProjectInput in = new ProjectInput();
in.description = "Test description";
in.submitType = SubmitType.CHERRY_PICK;
in.useContributorAgreements = InheritableBoolean.TRUE;
in.useSignedOffBy = InheritableBoolean.TRUE;
in.useContentMerge = InheritableBoolean.TRUE;
in.requireChangeId = InheritableBoolean.TRUE;
RestResponse r = adminSession.put("/projects/" + newProjectName, in);
ProjectInfo p = newGson().fromJson(r.getReader(), ProjectInfo.class);
assertEquals(newProjectName, p.name);
Project project = projectCache.get(new Project.NameKey(newProjectName)).getProject();
assertProjectInfo(project, p);
assertEquals(in.description, project.getDescription());
assertEquals(in.submitType, project.getSubmitType());
assertEquals(in.useContributorAgreements, project.getUseContributorAgreements());
assertEquals(in.useSignedOffBy, project.getUseSignedOffBy());
assertEquals(in.useContentMerge, project.getUseContentMerge());
assertEquals(in.requireChangeId, project.getRequireChangeID());
}
@Test
public void testCreateChildProject() throws IOException {
final String parentName = "parent";
RestResponse r = adminSession.put("/projects/" + parentName);
r.consume();
final String childName = "child";
ProjectInput in = new ProjectInput();
in.parent = parentName;
r = adminSession.put("/projects/" + childName, in);
Project project = projectCache.get(new Project.NameKey(childName)).getProject();
assertEquals(in.parent, project.getParentName());
}
@Test
public void testCreateChildProjectUnderNonExistingParent_UnprocessableEntity()
throws IOException {
ProjectInput in = new ProjectInput();
in.parent = "non-existing-project";
RestResponse r = adminSession.put("/projects/child", in);
assertEquals(HttpStatus.SC_UNPROCESSABLE_ENTITY, r.getStatusCode());
}
@Test
public void testCreateProjectWithOwner() throws IOException {
final String newProjectName = "newProject";
ProjectInput in = new ProjectInput();
in.owners = Lists.newArrayListWithCapacity(3);
in.owners.add("Anonymous Users"); // by name
in.owners.add(SystemGroupBackend.REGISTERED_USERS.get()); // by UUID
in.owners.add(Integer.toString(groupCache.get(
new AccountGroup.NameKey("Administrators")).getId().get())); // by ID
adminSession.put("/projects/" + newProjectName, in);
ProjectState projectState = projectCache.get(new Project.NameKey(newProjectName));
Set<AccountGroup.UUID> expectedOwnerIds = Sets.newHashSetWithExpectedSize(3);
expectedOwnerIds.add(SystemGroupBackend.ANONYMOUS_USERS);
expectedOwnerIds.add(SystemGroupBackend.REGISTERED_USERS);
expectedOwnerIds.add(groupUuid("Administrators"));
assertProjectOwners(expectedOwnerIds, projectState);
}
@Test
public void testCreateProjectWithNonExistingOwner_UnprocessableEntity()
throws IOException {
ProjectInput in = new ProjectInput();
in.owners = Collections.singletonList("non-existing-group");
RestResponse r = adminSession.put("/projects/newProject", in);
assertEquals(HttpStatus.SC_UNPROCESSABLE_ENTITY, r.getStatusCode());
}
@Test
public void testCreatePermissionOnlyProject() throws IOException {
final String newProjectName = "newProject";
ProjectInput in = new ProjectInput();
in.permissionsOnly = true;
adminSession.put("/projects/" + newProjectName, in);
assertHead(newProjectName, RefNames.REFS_CONFIG);
}
@Test
public void testCreateProjectWithEmptyCommit() throws IOException {
final String newProjectName = "newProject";
ProjectInput in = new ProjectInput();
in.createEmptyCommit = true;
adminSession.put("/projects/" + newProjectName, in);
assertEmptyCommit(newProjectName, "refs/heads/master");
}
@Test
public void testCreateProjectWithBranches() throws IOException {
final String newProjectName = "newProject";
ProjectInput in = new ProjectInput();
in.createEmptyCommit = true;
in.branches = Lists.newArrayListWithCapacity(3);
in.branches.add("refs/heads/test");
in.branches.add("refs/heads/master");
in.branches.add("release"); // without 'refs/heads' prefix
adminSession.put("/projects/" + newProjectName, in);
assertHead(newProjectName, "refs/heads/test");
assertEmptyCommit(newProjectName, "refs/heads/test", "refs/heads/master",
"refs/heads/release");
}
@Test
public void testCreateProjectWithoutCapability_Forbidden() throws OrmException,
JSchException, IOException {
RestResponse r = userSession.put("/projects/newProject");
assertEquals(HttpStatus.SC_FORBIDDEN, r.getStatusCode());
}
@Test
public void testCreateProjectWhenProjectAlreadyExists_Conflict()
throws OrmException, JSchException, IOException {
RestResponse r = adminSession.put("/projects/All-Projects");
assertEquals(HttpStatus.SC_CONFLICT, r.getStatusCode());
}
private AccountGroup.UUID groupUuid(String groupName) {
return groupCache.get(new AccountGroup.NameKey(groupName)).getGroupUUID();
}
private void assertHead(String projectName, String expectedRef)
throws RepositoryNotFoundException, IOException {
Repository repo = git.openRepository(new Project.NameKey(projectName));
try {
assertEquals(expectedRef, repo.getRef(Constants.HEAD).getTarget()
.getName());
} finally {
repo.close();
}
}
private void assertEmptyCommit(String projectName, String... refs)
throws RepositoryNotFoundException, IOException {
Repository repo = git.openRepository(new Project.NameKey(projectName));
RevWalk rw = new RevWalk(repo);
TreeWalk tw = new TreeWalk(repo);
try {
for (String ref : refs) {
RevCommit commit = rw.lookupCommit(repo.getRef(ref).getObjectId());
rw.parseBody(commit);
tw.addTree(commit.getTree());
assertFalse("ref " + ref + " has non empty commit", tw.next());
tw.reset();
}
} finally {
rw.release();
repo.close();
}
}
}
| |
/*
* Copyright 2009 Perforce Software Inc., All Rights Reserved.
*/
package com.perforce.p4java.impl.mapbased.rpc.func.helper;
import java.util.Map;
import com.perforce.p4java.impl.mapbased.MapKeys;
/**
* Provides unmapping services to the P4Java RPC implementation.
* These are not what they probably sound like -- they're basically
* a way to serialise an input map for something like a changelist
* or job onto a single byte buffer to be sent to the server as a
* single data argument with newlines, tabs, etc.<p>
*
* The need for this will probably go away when we refactor the upper
* levels of P4Java to optimise and rationalise the use of maps overall.
*
*
*/
public class MapUnmapper {
/**
* Unmap a change list. Absolutely no sanity or other checks are done on
* the passed-in map...
*
* @param inMap
* @param strBuf
*/
public static void unmapChangelistMap(Map<String, Object> inMap,
StringBuffer strBuf) {
if ((inMap != null) && (strBuf != null)) {
strBuf.append(MapKeys.CHANGE_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.CHANGE_KEY) + MapKeys.DOUBLE_LF);
Object client = inMap.get(MapKeys.CLIENT_KEY);
if (client != null) {
strBuf.append(MapKeys.CLIENT_KEY + MapKeys.COLON_SPACE + client + MapKeys.DOUBLE_LF);
}
Object user = inMap.get(MapKeys.USER_KEY);
if (user != null) {
strBuf.append(MapKeys.USER_KEY + MapKeys.COLON_SPACE + user + MapKeys.DOUBLE_LF);
}
Object type = inMap.get(MapKeys.TYPE_KEY);
if (type != null) {
strBuf.append(MapKeys.TYPE_KEY + MapKeys.COLON_SPACE + type + MapKeys.DOUBLE_LF);
}
Object status = inMap.get(MapKeys.STATUS_KEY);
if (status != null) {
strBuf.append(MapKeys.STATUS_KEY + MapKeys.COLON_SPACE + status + MapKeys.DOUBLE_LF);
}
Object date = inMap.get(MapKeys.DATE_KEY);
if (date != null) {
strBuf.append(MapKeys.DATE_KEY + MapKeys.COLON_SPACE + date + MapKeys.DOUBLE_LF);
}
String descr = replaceNewlines((String) inMap.get(MapKeys.DESCRIPTION_KEY));
strBuf.append(MapKeys.DESCRIPTION_KEY + MapKeys.COLON_LF + (descr == null ? MapKeys.EMPTY : descr) + MapKeys.LF);
strBuf.append(MapKeys.FILES_KEY + MapKeys.COLON_LF);
for (int i = 0; ; i++) {
String fileStr = (String) inMap.get(MapKeys.FILES_KEY + i);
if (fileStr != null) {
strBuf.append(MapKeys.TAB + inMap.get(MapKeys.FILES_KEY + i) + MapKeys.LF);
} else {
break;
}
}
strBuf.append(MapKeys.LF);
for (int i = 0; ; i++) {
String fileStr = (String) inMap.get(MapKeys.JOBS_KEY + i);
if (fileStr != null) {
if (i == 0) {
strBuf.append(MapKeys.JOBS_KEY + MapKeys.COLON_LF);
}
strBuf.append(MapKeys.TAB + inMap.get(MapKeys.JOBS_KEY + i) + MapKeys.LF);
} else {
break;
}
}
strBuf.append(MapKeys.LF);
}
}
/**
* Unmap a job. Jobs basically have free format defined by the associated
* jobspec (which we don't have access to here), so we have to try to the
* best we can with what we've got -- which is to dump the map to the strbuf
* while guessing at things like string formats, etc. This may prove error-prone
* in the long run.
*
* @param inMap
* @param strBuf
*/
public static void unmapJobMap(Map<String, Object> inMap,
StringBuffer strBuf) {
if ((inMap != null) && (strBuf != null)) {
for (Map.Entry<String, Object> entry : inMap.entrySet()) {
strBuf.append(entry.getKey() + MapKeys.COLON_SPACE + replaceNewlines((String) entry.getValue()) + MapKeys.DOUBLE_LF);
}
}
}
/**
* Unmap a client map. Similar in intent and execution to unmapJobMap.
*
* @param inMap
* @param strBuf
*/
public static void unmapClientMap(Map<String, Object> inMap,
StringBuffer strBuf) {
if ((inMap != null) && (strBuf != null)) {
strBuf.append(MapKeys.CLIENT_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.CLIENT_KEY) + MapKeys.DOUBLE_LF);
Object owner = inMap.get(MapKeys.OWNER_KEY);
if (owner != null) {
strBuf.append(MapKeys.OWNER_KEY + MapKeys.COLON_SPACE + owner + MapKeys.DOUBLE_LF);
}
// Fix for job036074, a null Host should not be turned into the
// "null" string but should be omitted from the map string.
Object host = inMap.get(MapKeys.HOST_KEY);
if( host != null) {
strBuf.append(MapKeys.HOST_KEY + MapKeys.COLON_SPACE + host.toString() + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.UPDATE_KEY)) {
strBuf.append(MapKeys.UPDATE_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.UPDATE_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.ACCESS_KEY)) {
strBuf.append(MapKeys.ACCESS_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.ACCESS_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.OPTIONS_KEY)) {
strBuf.append(MapKeys.OPTIONS_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.OPTIONS_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.SUBMITOPTIONS_KEY)) {
strBuf.append(MapKeys.SUBMITOPTIONS_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.SUBMITOPTIONS_KEY) + MapKeys.DOUBLE_LF);
}
strBuf.append(MapKeys.ROOT_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.ROOT_KEY) + MapKeys.DOUBLE_LF);
if (inMap.containsKey(MapKeys.LINEEND_KEY)) {
strBuf.append(MapKeys.LINEEND_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.LINEEND_KEY) + MapKeys.DOUBLE_LF);
}
String descr = replaceNewlines((String) inMap.get(MapKeys.DESCRIPTION_KEY));
strBuf.append(MapKeys.DESCRIPTION_KEY + MapKeys.COLON_LF + (descr == null ? MapKeys.EMPTY : descr) + MapKeys.LF);
strBuf.append(MapKeys.VIEW_KEY + MapKeys.COLON_LF);
for (int i = 0; ; i++) {
String fileStr = (String) inMap.get(MapKeys.VIEW_KEY + i);
if (fileStr != null) {
strBuf.append(MapKeys.TAB + inMap.get(MapKeys.VIEW_KEY + i) + MapKeys.LF);
} else {
break;
}
}
strBuf.append(MapKeys.LF);
for (int i = 0; ; i++) {
String fileStr = (String) inMap.get(MapKeys.ALTROOTS_KEY + i);
if (fileStr != null) {
if (i == 0 ) {
strBuf.append(MapKeys.ALTROOTS_KEY + MapKeys.COLON_LF);
}
strBuf.append(MapKeys.TAB + inMap.get(MapKeys.ALTROOTS_KEY + i) + MapKeys.LF);
} else {
break;
}
}
strBuf.append(MapKeys.LF);
if (inMap.containsKey(MapKeys.STREAM_KEY)) {
strBuf.append(MapKeys.STREAM_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.STREAM_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.SERVERID_KEY)) {
strBuf.append(MapKeys.SERVERID_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.SERVERID_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.STREAMATCHANGE_KEY)) {
strBuf.append(MapKeys.STREAMATCHANGE_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.STREAMATCHANGE_KEY) + MapKeys.DOUBLE_LF);
}
}
}
/**
* Unmap a Perforce user map.
*
* @param inMap
* @param strBuf
*/
public static void unmapUserMap(Map<String, Object> inMap,
StringBuffer strBuf) {
if ((inMap != null) && (strBuf != null)) {
strBuf.append(MapKeys.USER_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.USER_KEY) + MapKeys.DOUBLE_LF);
if (inMap.containsKey(MapKeys.EMAIL_KEY)) {
strBuf.append(MapKeys.EMAIL_KEY + MapKeys.COLON_SPACE
+ inMap.get(MapKeys.EMAIL_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.FULLNAME_KEY)) {
strBuf.append(MapKeys.FULLNAME_KEY + MapKeys.COLON_SPACE
+ inMap.get(MapKeys.FULLNAME_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.JOBVIEW_KEY)) {
strBuf.append(MapKeys.JOBVIEW_KEY + MapKeys.COLON_SPACE
+ inMap.get(MapKeys.JOBVIEW_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.PASSWORD_KEY)) {
strBuf.append(MapKeys.PASSWORD_KEY + MapKeys.COLON_SPACE
+ inMap.get(MapKeys.PASSWORD_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.TYPE_KEY)) {
strBuf.append(MapKeys.TYPE_KEY + MapKeys.COLON_SPACE
+ inMap.get(MapKeys.TYPE_KEY) + MapKeys.DOUBLE_LF);
}
for (int i = 0; ; i++) {
String mapStr = (String) inMap.get(MapKeys.REVIEWS_KEY + i);
if (mapStr != null) {
if (i == 0) {
strBuf.append(MapKeys.REVIEWS_KEY + MapKeys.COLON_LF);
}
strBuf.append(MapKeys.TAB + mapStr + MapKeys.LF);
} else {
break;
}
}
}
}
/**
* Unmap a Perforce user group map.
*
* @param inMap
* @param strBuf
*/
public static void unmapUserGroupMap(Map<String, Object> inMap,
StringBuffer strBuf) {
if ((inMap != null) && (strBuf != null)) {
strBuf.append(MapKeys.GROUP_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.GROUP_KEY) + MapKeys.DOUBLE_LF);
if (inMap.containsKey(MapKeys.MAXRESULTS_KEY)) {
strBuf.append(MapKeys.MAXRESULTS_KEY + MapKeys.COLON_SPACE
+ inMap.get(MapKeys.MAXRESULTS_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.MAXSCANROWS_KEY)) {
strBuf.append(MapKeys.MAXSCANROWS_KEY + MapKeys.COLON_SPACE
+ inMap.get(MapKeys.MAXSCANROWS_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.MAXLOCKTIME_KEY)) {
strBuf.append(MapKeys.MAXLOCKTIME_KEY + MapKeys.COLON_SPACE
+ inMap.get(MapKeys.MAXLOCKTIME_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.TIMEOUT_KEY)) {
strBuf.append(MapKeys.TIMEOUT_KEY + MapKeys.COLON_SPACE
+ inMap.get(MapKeys.TIMEOUT_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.PASSWORD_TIMEOUT_KEY)) {
strBuf.append(MapKeys.PASSWORD_TIMEOUT_KEY + MapKeys.COLON_SPACE
+ inMap.get(MapKeys.PASSWORD_TIMEOUT_KEY) + MapKeys.DOUBLE_LF);
}
for (int i = 0; ; i++) {
String mapStr = (String) inMap.get(MapKeys.SUBGROUPS_KEY + i);
if (mapStr != null) {
if (i == 0) {
strBuf.append(MapKeys.SUBGROUPS_KEY + MapKeys.COLON_LF);
}
strBuf.append(MapKeys.TAB + mapStr + MapKeys.LF);
} else {
break;
}
}
for (int i = 0; ; i++) {
String mapStr = (String) inMap.get(MapKeys.OWNERS_KEY + i);
if (mapStr != null) {
if (i == 0) {
strBuf.append(MapKeys.OWNERS_KEY + MapKeys.COLON_LF);
}
strBuf.append(MapKeys.TAB + mapStr + MapKeys.LF);
} else {
break;
}
}
for (int i = 0; ; i++) {
String mapStr = (String) inMap.get(MapKeys.USERS_KEY + i);
if (mapStr != null) {
if (i == 0) {
strBuf.append(MapKeys.USERS_KEY + MapKeys.COLON_LF);
}
strBuf.append(MapKeys.TAB + mapStr + MapKeys.LF);
} else {
break;
}
}
}
}
/**
* Unmap a Label Perforce label.
*
* @param inMap
* @param strBuf
*/
public static void unmapLabelMap(Map<String, Object> inMap,
StringBuffer strBuf) {
if ((inMap != null) && (strBuf != null)) {
strBuf.append(MapKeys.LABEL_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.LABEL_KEY) + MapKeys.DOUBLE_LF);
strBuf.append(MapKeys.OWNER_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.OWNER_KEY) + MapKeys.DOUBLE_LF);
if (inMap.containsKey(MapKeys.UPDATE_KEY)) {
strBuf.append(MapKeys.UPDATE_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.UPDATE_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.ACCESS_KEY)) {
strBuf.append(MapKeys.ACCESS_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.ACCESS_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.REVISION_KEY)) {
strBuf.append(MapKeys.REVISION_KEY + MapKeys.COLON_SPACE
+ inMap.get(MapKeys.REVISION_KEY) + MapKeys.DOUBLE_LF);
}
String descr = replaceNewlines((String) inMap.get(MapKeys.DESCRIPTION_KEY));
strBuf.append(MapKeys.DESCRIPTION_KEY + MapKeys.COLON_LF + (descr == null ? MapKeys.EMPTY : descr) + MapKeys.LF);
if (inMap.containsKey(MapKeys.OPTIONS_KEY)) {
strBuf.append(MapKeys.OPTIONS_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.OPTIONS_KEY) + MapKeys.DOUBLE_LF);
}
for (int i = 0; ; i++) {
String mapStr = (String) inMap.get(MapKeys.VIEW_KEY + i);
if (mapStr != null) {
if (i == 0) {
strBuf.append(MapKeys.VIEW_KEY + MapKeys.COLON_LF);
}
strBuf.append(MapKeys.TAB + mapStr + MapKeys.LF);
} else {
break;
}
}
}
}
/**
* Unmap a BranchSpec spec.
*
* @param inMap
* @param strBuf
*/
public static void unmapBranchMap(Map<String, Object> inMap,
StringBuffer strBuf) {
if ((inMap != null) && (strBuf != null)) {
strBuf.append(MapKeys.BRANCH_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.BRANCH_KEY) + MapKeys.DOUBLE_LF);
if (inMap.containsKey(MapKeys.OWNER_KEY)) {
strBuf.append(MapKeys.OWNER_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.OWNER_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.UPDATE_KEY)) {
strBuf.append(MapKeys.UPDATE_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.UPDATE_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.ACCESS_KEY)) {
strBuf.append(MapKeys.ACCESS_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.ACCESS_KEY) + MapKeys.DOUBLE_LF);
}
String descr = replaceNewlines((String) inMap.get(MapKeys.DESCRIPTION_KEY));
strBuf.append(MapKeys.DESCRIPTION_KEY + MapKeys.COLON_LF + (descr == null ? MapKeys.EMPTY : descr) + MapKeys.LF);
if( inMap.containsKey(MapKeys.OPTIONS_KEY) ) {
strBuf.append(MapKeys.OPTIONS_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.OPTIONS_KEY) + MapKeys.DOUBLE_LF);
}
for (int i = 0; ; i++) {
String mapStr = (String) inMap.get(MapKeys.VIEW_KEY + i);
if (mapStr != null) {
if (i == 0) {
strBuf.append(MapKeys.VIEW_KEY + MapKeys.COLON_LF);
}
strBuf.append(MapKeys.TAB + mapStr + MapKeys.LF);
} else {
break;
}
}
}
}
/**
* Unmap a depot map.
*
* @param inMap
* @param strBuf
*/
public static void unmapDepotMap(Map<String, Object> inMap,
StringBuffer strBuf) {
if ((inMap != null) && (strBuf != null)) {
strBuf.append(MapKeys.DEPOT_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.DEPOT_KEY) + MapKeys.DOUBLE_LF);
if (inMap.containsKey(MapKeys.OWNER_KEY)) {
strBuf.append(MapKeys.OWNER_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.OWNER_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.DATE_KEY)) {
strBuf.append(MapKeys.DATE_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.DATE_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.TYPE_KEY)) {
strBuf.append(MapKeys.TYPE_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.TYPE_KEY) + MapKeys.DOUBLE_LF);
}
String descr = replaceNewlines((String) inMap.get(MapKeys.DESCRIPTION_KEY));
strBuf.append(MapKeys.DESCRIPTION_KEY + MapKeys.COLON_LF + (descr == null ? MapKeys.EMPTY : descr) + MapKeys.LF);
if (inMap.containsKey(MapKeys.ADDRESS_KEY)) {
strBuf.append(MapKeys.ADDRESS_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.ADDRESS_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.SUFFIX_KEY)) {
strBuf.append(MapKeys.SUFFIX_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.SUFFIX_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.MAP_KEY)) {
strBuf.append(MapKeys.MAP_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.MAP_KEY) + MapKeys.DOUBLE_LF);
}
}
}
/**
* Unmap a list of protection entries.
*
* @param inMap
* @param strBuf
*/
public static void unmapProtectionEntriesMap(Map<String, Object> inMap,
StringBuffer strBuf) {
if ((inMap != null) && (strBuf != null)) {
for (int i = 0; ; i++) {
String mapStr = (String) inMap.get(MapKeys.PROTECTIONS_KEY + i);
if (mapStr != null) {
if (i == 0) {
strBuf.append(MapKeys.PROTECTIONS_KEY + MapKeys.COLON_LF);
}
strBuf.append(MapKeys.TAB + mapStr + MapKeys.LF);
} else {
break;
}
}
}
}
/**
* Unmap a stream map.
*
* @param inMap
* @param strBuf
*/
public static void unmapStreamMap(Map<String, Object> inMap,
StringBuffer strBuf) {
if ((inMap != null) && (strBuf != null)) {
if (inMap.containsKey(MapKeys.STREAM_KEY)) {
strBuf.append(MapKeys.STREAM_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.STREAM_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.TYPE_KEY)) {
strBuf.append(MapKeys.TYPE_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.TYPE_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.PARENT_KEY)) {
strBuf.append(MapKeys.PARENT_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.PARENT_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.NAME_KEY)) {
strBuf.append(MapKeys.NAME_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.NAME_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.OWNER_KEY)) {
strBuf.append(MapKeys.OWNER_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.OWNER_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.UPDATE_KEY)) {
strBuf.append(MapKeys.UPDATE_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.UPDATE_KEY) + MapKeys.DOUBLE_LF);
}
if (inMap.containsKey(MapKeys.ACCESS_KEY)) {
strBuf.append(MapKeys.ACCESS_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.ACCESS_KEY) + MapKeys.DOUBLE_LF);
}
String descr = replaceNewlines((String) inMap.get(MapKeys.DESCRIPTION_KEY));
strBuf.append(MapKeys.DESCRIPTION_KEY + MapKeys.COLON_LF + (descr == null ? MapKeys.EMPTY : descr) + MapKeys.LF);
if( inMap.containsKey(MapKeys.OPTIONS_KEY) ) {
strBuf.append(MapKeys.OPTIONS_KEY + MapKeys.COLON_SPACE + inMap.get(MapKeys.OPTIONS_KEY) + MapKeys.DOUBLE_LF);
}
for (int i = 0; ; i++) {
String mapStr = (String) inMap.get(MapKeys.PATHS_KEY + i);
if (mapStr != null) {
if (i == 0) {
strBuf.append(MapKeys.PATHS_KEY + MapKeys.COLON_LF);
}
strBuf.append(MapKeys.TAB + mapStr + MapKeys.LF);
} else {
break;
}
}
for (int i = 0; ; i++) {
String mapStr = (String) inMap.get(MapKeys.REMAPPED_KEY + i);
if (mapStr != null) {
if (i == 0) {
strBuf.append(MapKeys.REMAPPED_KEY + MapKeys.COLON_LF);
}
strBuf.append(MapKeys.TAB + mapStr + MapKeys.LF);
} else {
break;
}
}
for (int i = 0; ; i++) {
String mapStr = (String) inMap.get(MapKeys.IGNORED_KEY + i);
if (mapStr != null) {
if (i == 0) {
strBuf.append(MapKeys.IGNORED_KEY + MapKeys.COLON_LF);
}
strBuf.append(MapKeys.TAB + mapStr + MapKeys.LF);
} else {
break;
}
}
}
}
/**
* Unmap a list of trigger entries.
*
* @param inMap
* @param strBuf
*/
public static void unmapTriggerEntriesMap(Map<String, Object> inMap,
StringBuffer strBuf) {
if ((inMap != null) && (strBuf != null)) {
for (int i = 0; ; i++) {
String mapStr = (String) inMap.get(MapKeys.TRIGGERS_KEY + i);
if (mapStr != null) {
if (i == 0) {
strBuf.append(MapKeys.TRIGGERS_KEY + MapKeys.COLON_LF);
}
strBuf.append(MapKeys.TAB + mapStr + MapKeys.LF);
} else {
break;
}
}
}
}
/**
* Replace all but the last newline in the incoming string with
* newline / tab pairs. Useful for various multi-line form inputs.
*
* @param str
* @return replaced string
*/
public static String replaceNewlines(String str) {
if (str != null) {
String[] strs = str.split(MapKeys.LF);
if (strs.length == 1) {
return MapKeys.TAB + str + MapKeys.LF;
} else {
StringBuilder retStr = new StringBuilder();
for (String s : strs) {
retStr.append(MapKeys.TAB);
retStr.append(s);
retStr.append(MapKeys.LF);
}
return retStr.toString();
}
}
return null;
}
}
| |
/*
* Copyright 2013 Proofpoint, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.proofpoint.reporting;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.common.collect.Table;
import com.proofpoint.http.client.HttpClient;
import com.proofpoint.http.client.HttpStatus;
import com.proofpoint.http.client.Request;
import com.proofpoint.http.client.Response;
import com.proofpoint.http.client.testing.TestingHttpClient;
import com.proofpoint.http.client.testing.TestingHttpClient.Processor;
import com.proofpoint.json.ObjectMapperProvider;
import com.proofpoint.node.NodeConfig;
import com.proofpoint.node.NodeInfo;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.zip.GZIPInputStream;
import static com.google.common.base.Throwables.throwIfUnchecked;
import static com.proofpoint.http.client.testing.BodySourceTester.writeBodySourceTo;
import static com.proofpoint.http.client.testing.TestingResponse.mockResponse;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNull;
public class TestReportClient
{
private static final int TEST_TIME = 1234567890;
private NodeInfo nodeInfo;
private Table<String, Map<String, String>, Object> collectedData;
private HttpClient httpClient;
private List<Map<String, Object>> sentJson;
private final ObjectMapper objectMapper = new ObjectMapperProvider().get();
@BeforeMethod
public void setup()
{
nodeInfo = new NodeInfo("test-application", new NodeConfig()
.setEnvironment("test_environment")
.setNodeInternalHostname("test.hostname")
.setPool("test_pool")
);
collectedData = HashBasedTable.create();
collectedData.put("Foo.Size", ImmutableMap.of(), 1.1);
collectedData.put("Foo.Ba:r.Size", ImmutableMap.of("tag1", "B\\a\"z"), 1.2);
httpClient = new TestingHttpClient(new TestingResponseFunction());
sentJson = null;
}
@Test
public void testReportingDisabled()
{
httpClient = new TestingHttpClient();
ReportClient client = new ReportClient(nodeInfo, httpClient, new ReportClientConfig().setEnabled(false), new ReportTagConfig(), objectMapper);
client.report(System.currentTimeMillis(), collectedData);
}
@Test
public void testReportData()
{
ReportClient client = new ReportClient(nodeInfo, httpClient, new ReportClientConfig(), new ReportTagConfig(), objectMapper);
client.report(TEST_TIME, collectedData);
assertEquals(sentJson.size(), 2);
for (Map<String, Object> map : sentJson) {
assertEquals(map.keySet(), ImmutableSet.of("name", "timestamp", "value", "tags"));
assertEquals(map.get("timestamp"), TEST_TIME);
Map<String, String> tags = (Map<String, String>) map.get("tags");
assertEquals(tags.get("application"), "test-application");
assertEquals(tags.get("host"), "test.hostname");
assertEquals(tags.get("environment"), "test_environment");
assertEquals(tags.get("pool"), "test_pool");
}
assertEquals(sentJson.get(0).get("name"), "Foo.Ba_r.Size");
assertEquals(sentJson.get(1).get("name"), "Foo.Size");
assertEquals(sentJson.get(0).get("value"), 1.2);
assertEquals(sentJson.get(1).get("value"), 1.1);
Map<String, String> tags = (Map<String, String>) sentJson.get(0).get("tags");
assertEquals(tags.keySet(), ImmutableSet.of("application", "host", "environment", "pool", "tag1"));
assertEquals(tags.get("tag1"), "B_a_z"); // "B\\a\"z");
tags = (Map<String, String>) sentJson.get(1).get("tags");
assertEquals(tags.keySet(), ImmutableSet.of("application", "host", "environment", "pool"));
}
@Test
public void testReportString()
{
ReportClient client = new ReportClient(nodeInfo, httpClient, new ReportClientConfig(), new ReportTagConfig(), objectMapper);
collectedData = HashBasedTable.create();
collectedData.put("Foo.String", ImmutableMap.of(), "test value");
client.report(TEST_TIME, collectedData);
assertEquals(sentJson, ImmutableList.of(
ImmutableMap.of(
"name", "Foo.String",
"timestamp", TEST_TIME,
"type", "string",
"value", "test value",
"tags", ImmutableMap.of(
"application", "test-application",
"host", "test.hostname",
"environment", "test_environment",
"pool", "test_pool"
)
)
));
}
@Test
public void testConfiguredTags()
{
ReportClient client = new ReportClient(nodeInfo, httpClient,
new ReportClientConfig(), new ReportTagConfig()
.setTags(ImmutableMap.of("foo", "ba:r", "baz", "quux")), objectMapper);
client.report(TEST_TIME, collectedData);
assertEquals(sentJson.size(), 2);
for (Map<String, Object> map : sentJson) {
assertEquals(map.keySet(), ImmutableSet.of("name", "timestamp", "value", "tags"));
Map<String, String> tags = (Map<String, String>) map.get("tags");
assertEquals(tags.get("foo"), "ba:r");
assertEquals(tags.get("baz"), "quux");
}
Map<String, String> tags = (Map<String, String>) sentJson.get(0).get("tags");
assertEquals(tags.keySet(), ImmutableSet.of("application", "host", "environment", "pool", "foo", "baz", "tag1"));
tags = (Map<String, String>) sentJson.get(1).get("tags");
assertEquals(tags.keySet(), ImmutableSet.of("application", "host", "environment", "pool", "foo", "baz"));
}
@Test
public void testNoReportHost()
{
ReportClient client = new ReportClient(nodeInfo, httpClient,
new ReportClientConfig().setPulseIncludeHostTag(false), new ReportTagConfig()
.setTags(ImmutableMap.of("foo", "ba:r", "baz", "quux")), objectMapper);
client.report(TEST_TIME, collectedData);
assertEquals(sentJson.size(), 2);
for (Map<String, Object> map : sentJson) {
assertEquals(map.keySet(), ImmutableSet.of("name", "timestamp", "value", "tags"));
Map<String, String> tags = (Map<String, String>) map.get("tags");
assertEquals(tags.get("foo"), "ba:r");
assertEquals(tags.get("baz"), "quux");
}
Map<String, String> tags = (Map<String, String>) sentJson.get(0).get("tags");
assertEquals(tags.keySet(), ImmutableSet.of("application", "environment", "pool", "foo", "baz", "tag1"));
tags = (Map<String, String>) sentJson.get(1).get("tags");
assertEquals(tags.keySet(), ImmutableSet.of("application", "environment", "pool", "foo", "baz"));
}
private class TestingResponseFunction
implements Processor
{
@Override
public Response handle(Request input)
{
assertNull(sentJson);
assertEquals(input.getMethod(), "POST");
assertEquals(input.getUri().toString(), "api/v1/datapoints");
assertEquals(input.getHeader("Content-Type"), "application/gzip");
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
try {
writeBodySourceTo(input.getBodySource(), outputStream);
GZIPInputStream inputStream = new GZIPInputStream(new ByteArrayInputStream(outputStream.toByteArray()));
sentJson = new ObjectMapper().readValue(inputStream, new TypeReference<List<Map<String, Object>>>()
{
});
sentJson = Lists.newArrayList(sentJson);
sentJson.sort(Comparator.comparing(o -> ((String) o.get("name"))));
}
catch (Exception e) {
throwIfUnchecked(e);
throw new RuntimeException(e);
}
return mockResponse(HttpStatus.NO_CONTENT);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.query.aggregation.bloom;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Lists;
import com.google.inject.Guice;
import com.google.inject.Key;
import org.apache.druid.data.input.MapBasedRow;
import org.apache.druid.guice.BloomFilterExtensionModule;
import org.apache.druid.guice.annotations.Json;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.query.aggregation.AggregationTestHelper;
import org.apache.druid.query.filter.BloomKFilter;
import org.apache.druid.query.groupby.GroupByQuery;
import org.apache.druid.query.groupby.GroupByQueryConfig;
import org.apache.druid.query.groupby.GroupByQueryRunnerTest;
import org.apache.druid.query.groupby.ResultRow;
import org.apache.druid.query.groupby.strategy.GroupByStrategySelector;
import org.apache.druid.segment.TestHelper;
import org.junit.After;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
@RunWith(Parameterized.class)
public class BloomFilterGroupByQueryTest
{
private static final BloomFilterExtensionModule MODULE = new BloomFilterExtensionModule();
static {
// throwaway, just using to properly initialize jackson modules
Guice.createInjector(
binder -> binder.bind(Key.get(ObjectMapper.class, Json.class)).toInstance(TestHelper.makeJsonMapper()),
MODULE
);
}
private AggregationTestHelper helper;
private boolean isV2;
@Rule
public final TemporaryFolder tempFolder = new TemporaryFolder();
public BloomFilterGroupByQueryTest(final GroupByQueryConfig config)
{
helper = AggregationTestHelper.createGroupByQueryAggregationTestHelper(
Lists.newArrayList(MODULE.getJacksonModules()),
config,
tempFolder
);
isV2 = config.getDefaultStrategy().equals(GroupByStrategySelector.STRATEGY_V2);
}
@Parameterized.Parameters(name = "{0}")
public static Collection<?> constructorFeeder()
{
final List<Object[]> constructors = new ArrayList<>();
for (GroupByQueryConfig config : GroupByQueryRunnerTest.testConfigs()) {
constructors.add(new Object[]{config});
}
return constructors;
}
@After
public void teardown() throws IOException
{
helper.close();
}
@Test
public void testQuery() throws Exception
{
String query = "{"
+ "\"queryType\": \"groupBy\","
+ "\"dataSource\": \"test_datasource\","
+ "\"granularity\": \"ALL\","
+ "\"dimensions\": [],"
+ "\"filter\":{ \"type\":\"selector\", \"dimension\":\"market\", \"value\":\"upfront\"},"
+ "\"aggregations\": ["
+ " { \"type\": \"bloom\", \"name\": \"blooming_quality\", \"field\": \"quality\" }"
+ "],"
+ "\"intervals\": [ \"1970/2050\" ]"
+ "}";
MapBasedRow row = ingestAndQuery(query);
BloomKFilter filter = BloomKFilter.deserialize((ByteBuffer) row.getRaw("blooming_quality"));
Assert.assertTrue(filter.testString("mezzanine"));
Assert.assertTrue(filter.testString("premium"));
Assert.assertFalse(filter.testString("entertainment"));
}
@Test
public void testNestedQuery() throws Exception
{
if (!isV2) {
return;
}
String query = "{"
+ "\"queryType\": \"groupBy\","
+ "\"dataSource\": {"
+ "\"type\": \"query\","
+ "\"query\": {"
+ "\"queryType\":\"groupBy\","
+ "\"dataSource\": \"test_datasource\","
+ "\"intervals\": [ \"1970/2050\" ],"
+ "\"granularity\":\"ALL\","
+ "\"dimensions\":[],"
+ "\"aggregations\": [{ \"type\":\"longSum\", \"name\":\"innerSum\", \"fieldName\":\"count\"}]"
+ "}"
+ "},"
+ "\"granularity\": \"ALL\","
+ "\"dimensions\": [],"
+ "\"aggregations\": ["
+ " { \"type\": \"bloom\", \"name\": \"bloom\", \"field\": \"innerSum\" }"
+ "],"
+ "\"intervals\": [ \"1970/2050\" ]"
+ "}";
MapBasedRow row = ingestAndQuery(query);
BloomKFilter filter = BloomKFilter.deserialize((ByteBuffer) row.getRaw("bloom"));
Assert.assertTrue(filter.testLong(13L));
Assert.assertFalse(filter.testLong(5L));
}
@Test
public void testNestedQueryComplex() throws Exception
{
if (!isV2) {
return;
}
String query = "{"
+ "\"queryType\": \"groupBy\","
+ "\"dataSource\": {"
+ "\"type\": \"query\","
+ "\"query\": {"
+ "\"queryType\":\"groupBy\","
+ "\"dataSource\": \"test_datasource\","
+ "\"intervals\": [ \"1970/2050\" ],"
+ "\"granularity\":\"ALL\","
+ "\"dimensions\":[],"
+ "\"filter\":{ \"type\":\"selector\", \"dimension\":\"market\", \"value\":\"upfront\"},"
+ "\"aggregations\": [{ \"type\":\"bloom\", \"name\":\"innerBloom\", \"field\":\"quality\"}]"
+ "}"
+ "},"
+ "\"granularity\": \"ALL\","
+ "\"dimensions\": [],"
+ "\"aggregations\": ["
+ " { \"type\": \"bloom\", \"name\": \"innerBloom\", \"field\": \"innerBloom\" }"
+ "],"
+ "\"intervals\": [ \"1970/2050\" ]"
+ "}";
MapBasedRow row = ingestAndQuery(query);
BloomKFilter filter = BloomKFilter.deserialize((ByteBuffer) row.getRaw("innerBloom"));
Assert.assertTrue(filter.testString("mezzanine"));
Assert.assertTrue(filter.testString("premium"));
Assert.assertFalse(filter.testString("entertainment"));
}
@Test
public void testQueryFakeDimension() throws Exception
{
String query = "{"
+ "\"queryType\": \"groupBy\","
+ "\"dataSource\": \"test_datasource\","
+ "\"granularity\": \"ALL\","
+ "\"dimensions\": [],"
+ "\"filter\":{ \"type\":\"selector\", \"dimension\":\"market\", \"value\":\"upfront\"},"
+ "\"aggregations\": ["
+ " { \"type\": \"bloom\", \"name\": \"blooming_quality\", \"field\": \"nope\" }"
+ "],"
+ "\"intervals\": [ \"1970/2050\" ]"
+ "}";
MapBasedRow row = ingestAndQuery(query);
// a nil column results in a totally empty bloom filter
BloomKFilter filter = new BloomKFilter(1500);
Object val = row.getRaw("blooming_quality");
String serialized = BloomFilterAggregatorTest.filterToString(BloomKFilter.deserialize((ByteBuffer) val));
String empty = BloomFilterAggregatorTest.filterToString(filter);
Assert.assertEquals(empty, serialized);
}
private MapBasedRow ingestAndQuery(String query) throws Exception
{
String metricSpec = "[{ \"type\": \"count\", \"name\": \"count\"}]";
String parseSpec = "{"
+ "\"type\" : \"string\","
+ "\"parseSpec\" : {"
+ " \"format\" : \"tsv\","
+ " \"timestampSpec\" : {"
+ " \"column\" : \"timestamp\","
+ " \"format\" : \"auto\""
+ "},"
+ " \"dimensionsSpec\" : {"
+ " \"dimensions\": [],"
+ " \"dimensionExclusions\" : [],"
+ " \"spatialDimensions\" : []"
+ " },"
+ " \"columns\": [\"timestamp\", \"market\", \"quality\", \"placement\", \"placementish\", \"index\"]"
+ " }"
+ "}";
Sequence<ResultRow> seq = helper.createIndexAndRunQueryOnSegment(
this.getClass().getClassLoader().getResourceAsStream("sample.data.tsv"),
parseSpec,
metricSpec,
0,
Granularities.NONE,
50000,
query
);
return seq.toList().get(0).toMapBasedRow((GroupByQuery) helper.readQuery(query));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.security.basic.authentication.db.updater;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Preconditions;
import com.google.inject.Inject;
import org.apache.druid.common.config.ConfigManager;
import org.apache.druid.concurrent.LifecycleLock;
import org.apache.druid.guice.ManageLifecycle;
import org.apache.druid.guice.annotations.Smile;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.concurrent.Execs;
import org.apache.druid.java.util.common.concurrent.ScheduledExecutors;
import org.apache.druid.java.util.common.lifecycle.LifecycleStart;
import org.apache.druid.java.util.common.lifecycle.LifecycleStop;
import org.apache.druid.java.util.emitter.EmittingLogger;
import org.apache.druid.metadata.MetadataCASUpdate;
import org.apache.druid.metadata.MetadataStorageConnector;
import org.apache.druid.metadata.MetadataStorageTablesConfig;
import org.apache.druid.security.basic.BasicAuthCommonCacheConfig;
import org.apache.druid.security.basic.BasicAuthDBConfig;
import org.apache.druid.security.basic.BasicAuthUtils;
import org.apache.druid.security.basic.BasicSecurityDBResourceException;
import org.apache.druid.security.basic.authentication.BasicHTTPAuthenticator;
import org.apache.druid.security.basic.authentication.db.cache.BasicAuthenticatorCacheNotifier;
import org.apache.druid.security.basic.authentication.entity.BasicAuthenticatorCredentialUpdate;
import org.apache.druid.security.basic.authentication.entity.BasicAuthenticatorCredentials;
import org.apache.druid.security.basic.authentication.entity.BasicAuthenticatorUser;
import org.apache.druid.security.basic.authentication.entity.BasicAuthenticatorUserMapBundle;
import org.apache.druid.server.security.Authenticator;
import org.apache.druid.server.security.AuthenticatorMapper;
import org.joda.time.Duration;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
@ManageLifecycle
public class CoordinatorBasicAuthenticatorMetadataStorageUpdater implements BasicAuthenticatorMetadataStorageUpdater
{
private static final EmittingLogger LOG =
new EmittingLogger(CoordinatorBasicAuthenticatorMetadataStorageUpdater.class);
private static final String USERS = "users";
private static final long UPDATE_RETRY_DELAY = 1000;
private final AuthenticatorMapper authenticatorMapper;
private final MetadataStorageConnector connector;
private final MetadataStorageTablesConfig connectorConfig;
private final BasicAuthCommonCacheConfig commonCacheConfig;
private final ObjectMapper objectMapper;
private final BasicAuthenticatorCacheNotifier cacheNotifier;
private final int numRetries = 5;
private final Map<String, BasicAuthenticatorUserMapBundle> cachedUserMaps;
private final Set<String> authenticatorPrefixes;
private final LifecycleLock lifecycleLock = new LifecycleLock();
private final ScheduledExecutorService exec;
private volatile boolean stopped = false;
@Inject
public CoordinatorBasicAuthenticatorMetadataStorageUpdater(
AuthenticatorMapper authenticatorMapper,
MetadataStorageConnector connector,
MetadataStorageTablesConfig connectorConfig,
BasicAuthCommonCacheConfig commonCacheConfig,
@Smile ObjectMapper objectMapper,
BasicAuthenticatorCacheNotifier cacheNotifier,
ConfigManager configManager // -V6022 (unused parameter): ConfigManager creates the db table we need,
// set a dependency here
)
{
this.exec = Execs.scheduledSingleThreaded("CoordinatorBasicAuthenticatorMetadataStorageUpdater-Exec--%d");
this.authenticatorMapper = authenticatorMapper;
this.connector = connector;
this.connectorConfig = connectorConfig;
this.commonCacheConfig = commonCacheConfig;
this.objectMapper = objectMapper;
this.cacheNotifier = cacheNotifier;
this.cachedUserMaps = new ConcurrentHashMap<>();
this.authenticatorPrefixes = new HashSet<>();
}
@LifecycleStart
public void start()
{
if (!lifecycleLock.canStart()) {
throw new ISE("can't start.");
}
if (authenticatorMapper == null || authenticatorMapper.getAuthenticatorMap() == null) {
return;
}
try {
LOG.info("Starting CoordinatorBasicAuthenticatorMetadataStorageUpdater.");
for (Map.Entry<String, Authenticator> entry : authenticatorMapper.getAuthenticatorMap().entrySet()) {
Authenticator authenticator = entry.getValue();
if (authenticator instanceof BasicHTTPAuthenticator) {
String authenticatorName = entry.getKey();
authenticatorPrefixes.add(authenticatorName);
BasicHTTPAuthenticator basicHTTPAuthenticator = (BasicHTTPAuthenticator) authenticator;
BasicAuthDBConfig dbConfig = basicHTTPAuthenticator.getDbConfig();
byte[] userMapBytes = getCurrentUserMapBytes(authenticatorName);
Map<String, BasicAuthenticatorUser> userMap = BasicAuthUtils.deserializeAuthenticatorUserMap(
objectMapper,
userMapBytes
);
cachedUserMaps.put(authenticatorName, new BasicAuthenticatorUserMapBundle(userMap, userMapBytes));
if (dbConfig.getInitialAdminPassword() != null && !userMap.containsKey(BasicAuthUtils.ADMIN_NAME)) {
createUserInternal(authenticatorName, BasicAuthUtils.ADMIN_NAME);
setUserCredentialsInternal(
authenticatorName,
BasicAuthUtils.ADMIN_NAME,
new BasicAuthenticatorCredentialUpdate(
dbConfig.getInitialAdminPassword().getPassword(),
BasicAuthUtils.DEFAULT_KEY_ITERATIONS
)
);
}
if (dbConfig.getInitialInternalClientPassword() != null
&& !userMap.containsKey(BasicAuthUtils.INTERNAL_USER_NAME)) {
createUserInternal(authenticatorName, BasicAuthUtils.INTERNAL_USER_NAME);
setUserCredentialsInternal(
authenticatorName,
BasicAuthUtils.INTERNAL_USER_NAME,
new BasicAuthenticatorCredentialUpdate(
dbConfig.getInitialInternalClientPassword().getPassword(),
BasicAuthUtils.DEFAULT_KEY_ITERATIONS
)
);
}
}
}
ScheduledExecutors.scheduleWithFixedDelay(
exec,
new Duration(commonCacheConfig.getPollingPeriod()),
new Duration(commonCacheConfig.getPollingPeriod()),
new Callable<ScheduledExecutors.Signal>()
{
@Override
public ScheduledExecutors.Signal call()
{
if (stopped) {
return ScheduledExecutors.Signal.STOP;
}
try {
LOG.debug("Scheduled db poll is running");
for (String authenticatorPrefix : authenticatorPrefixes) {
byte[] userMapBytes = getCurrentUserMapBytes(authenticatorPrefix);
Map<String, BasicAuthenticatorUser> userMap = BasicAuthUtils.deserializeAuthenticatorUserMap(
objectMapper,
userMapBytes
);
if (userMapBytes != null) {
cachedUserMaps.put(authenticatorPrefix, new BasicAuthenticatorUserMapBundle(userMap, userMapBytes));
}
}
LOG.debug("Scheduled db poll is done");
}
catch (Throwable t) {
LOG.makeAlert(t, "Error occured while polling for cachedUserMaps.").emit();
}
return ScheduledExecutors.Signal.REPEAT;
}
}
);
lifecycleLock.started();
}
finally {
lifecycleLock.exitStart();
}
}
@LifecycleStop
public void stop()
{
if (!lifecycleLock.canStop()) {
throw new ISE("can't stop.");
}
LOG.info("CoordinatorBasicAuthenticatorMetadataStorageUpdater is stopping.");
stopped = true;
LOG.info("CoordinatorBasicAuthenticatorMetadataStorageUpdater is stopped.");
}
@Override
public void createUser(String prefix, String userName)
{
Preconditions.checkState(lifecycleLock.awaitStarted(1, TimeUnit.MILLISECONDS));
createUserInternal(prefix, userName);
}
@Override
public void deleteUser(String prefix, String userName)
{
Preconditions.checkState(lifecycleLock.awaitStarted(1, TimeUnit.MILLISECONDS));
deleteUserInternal(prefix, userName);
}
@Override
public void setUserCredentials(String prefix, String userName, BasicAuthenticatorCredentialUpdate update)
{
Preconditions.checkState(lifecycleLock.awaitStarted(1, TimeUnit.MILLISECONDS));
setUserCredentialsInternal(prefix, userName, update);
}
@Override
public Map<String, BasicAuthenticatorUser> getCachedUserMap(String prefix)
{
Preconditions.checkState(lifecycleLock.awaitStarted(1, TimeUnit.MILLISECONDS));
BasicAuthenticatorUserMapBundle bundle = cachedUserMaps.get(prefix);
if (bundle == null) {
return null;
} else {
return bundle.getUserMap();
}
}
@Override
public byte[] getCachedSerializedUserMap(String prefix)
{
Preconditions.checkState(lifecycleLock.awaitStarted(1, TimeUnit.MILLISECONDS));
BasicAuthenticatorUserMapBundle bundle = cachedUserMaps.get(prefix);
if (bundle == null) {
return null;
} else {
return bundle.getSerializedUserMap();
}
}
@Override
public byte[] getCurrentUserMapBytes(String prefix)
{
return connector.lookup(
connectorConfig.getConfigTable(),
MetadataStorageConnector.CONFIG_TABLE_KEY_COLUMN,
MetadataStorageConnector.CONFIG_TABLE_VALUE_COLUMN,
getPrefixedKeyColumn(prefix, USERS)
);
}
@Override
public void refreshAllNotification()
{
cachedUserMaps.forEach(
(authenticatorName, userMapBundle) -> {
cacheNotifier.addUpdate(authenticatorName, userMapBundle.getSerializedUserMap());
}
);
}
private static String getPrefixedKeyColumn(String keyPrefix, String keyName)
{
return StringUtils.format("basic_authentication_%s_%s", keyPrefix, keyName);
}
private boolean tryUpdateUserMap(
String prefix,
Map<String, BasicAuthenticatorUser> userMap,
byte[] oldValue,
byte[] newValue
)
{
try {
MetadataCASUpdate update = new MetadataCASUpdate(
connectorConfig.getConfigTable(),
MetadataStorageConnector.CONFIG_TABLE_KEY_COLUMN,
MetadataStorageConnector.CONFIG_TABLE_VALUE_COLUMN,
getPrefixedKeyColumn(prefix, USERS),
oldValue,
newValue
);
boolean succeeded = connector.compareAndSwap(
Collections.singletonList(update)
);
if (succeeded) {
cachedUserMaps.put(prefix, new BasicAuthenticatorUserMapBundle(userMap, newValue));
cacheNotifier.addUpdate(prefix, newValue);
return true;
} else {
return false;
}
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
private void createUserInternal(String prefix, String userName)
{
int attempts = 0;
while (attempts < numRetries) {
if (createUserOnce(prefix, userName)) {
return;
} else {
attempts++;
}
try {
Thread.sleep(ThreadLocalRandom.current().nextLong(UPDATE_RETRY_DELAY));
}
catch (InterruptedException ie) {
throw new RuntimeException(ie);
}
}
throw new ISE("Could not create user[%s] due to concurrent update contention.", userName);
}
private void deleteUserInternal(String prefix, String userName)
{
int attempts = 0;
while (attempts < numRetries) {
if (deleteUserOnce(prefix, userName)) {
return;
} else {
attempts++;
}
try {
Thread.sleep(ThreadLocalRandom.current().nextLong(UPDATE_RETRY_DELAY));
}
catch (InterruptedException ie) {
throw new RuntimeException(ie);
}
}
throw new ISE("Could not delete user[%s] due to concurrent update contention.", userName);
}
private void setUserCredentialsInternal(String prefix, String userName, BasicAuthenticatorCredentialUpdate update)
{
BasicAuthenticatorCredentials credentials;
// use default iteration count from Authenticator if not specified in request
if (update.getIterations() == -1) {
BasicHTTPAuthenticator authenticator = (BasicHTTPAuthenticator) authenticatorMapper.getAuthenticatorMap().get(
prefix
);
credentials = new BasicAuthenticatorCredentials(
new BasicAuthenticatorCredentialUpdate(
update.getPassword(),
authenticator.getDbConfig().getIterations()
)
);
} else {
credentials = new BasicAuthenticatorCredentials(update);
}
int attempts = 0;
while (attempts < numRetries) {
if (setUserCredentialOnce(prefix, userName, credentials)) {
return;
} else {
attempts++;
}
try {
Thread.sleep(ThreadLocalRandom.current().nextLong(UPDATE_RETRY_DELAY));
}
catch (InterruptedException ie) {
throw new RuntimeException(ie);
}
}
throw new ISE("Could not set credentials for user[%s] due to concurrent update contention.", userName);
}
private boolean createUserOnce(String prefix, String userName)
{
byte[] oldValue = getCurrentUserMapBytes(prefix);
Map<String, BasicAuthenticatorUser> userMap = BasicAuthUtils.deserializeAuthenticatorUserMap(
objectMapper,
oldValue
);
if (userMap.get(userName) != null) {
throw new BasicSecurityDBResourceException("User [%s] already exists.", userName);
} else {
userMap.put(userName, new BasicAuthenticatorUser(userName, null));
}
byte[] newValue = BasicAuthUtils.serializeAuthenticatorUserMap(objectMapper, userMap);
return tryUpdateUserMap(prefix, userMap, oldValue, newValue);
}
private boolean deleteUserOnce(String prefix, String userName)
{
byte[] oldValue = getCurrentUserMapBytes(prefix);
Map<String, BasicAuthenticatorUser> userMap = BasicAuthUtils.deserializeAuthenticatorUserMap(
objectMapper,
oldValue
);
if (userMap.get(userName) == null) {
throw new BasicSecurityDBResourceException("User [%s] does not exist.", userName);
} else {
userMap.remove(userName);
}
byte[] newValue = BasicAuthUtils.serializeAuthenticatorUserMap(objectMapper, userMap);
return tryUpdateUserMap(prefix, userMap, oldValue, newValue);
}
private boolean setUserCredentialOnce(String prefix, String userName, BasicAuthenticatorCredentials credentials)
{
byte[] oldValue = getCurrentUserMapBytes(prefix);
Map<String, BasicAuthenticatorUser> userMap = BasicAuthUtils.deserializeAuthenticatorUserMap(
objectMapper,
oldValue
);
if (userMap.get(userName) == null) {
throw new BasicSecurityDBResourceException("User [%s] does not exist.", userName);
} else {
userMap.put(userName, new BasicAuthenticatorUser(userName, credentials));
}
byte[] newValue = BasicAuthUtils.serializeAuthenticatorUserMap(objectMapper, userMap);
return tryUpdateUserMap(prefix, userMap, oldValue, newValue);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.wan;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.logging.log4j.Logger;
import org.apache.geode.DataSerializer;
import org.apache.geode.cache.Cache;
import org.apache.geode.cache.wan.GatewayQueueEvent;
import org.apache.geode.cache.wan.GatewaySender;
import org.apache.geode.distributed.internal.ClusterDistributionManager;
import org.apache.geode.distributed.internal.DistributionManager;
import org.apache.geode.distributed.internal.DistributionMessage;
import org.apache.geode.distributed.internal.MessageWithReply;
import org.apache.geode.distributed.internal.PooledDistributionMessage;
import org.apache.geode.distributed.internal.ReplyException;
import org.apache.geode.distributed.internal.ReplyMessage;
import org.apache.geode.distributed.internal.ReplyProcessor21;
import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
import org.apache.geode.internal.cache.GemFireCacheImpl;
import org.apache.geode.internal.cache.InitialImageOperation;
import org.apache.geode.internal.cache.InternalCache;
import org.apache.geode.internal.cache.InternalRegion;
import org.apache.geode.internal.cache.LocalRegion;
import org.apache.geode.internal.cache.versions.VersionTag;
import org.apache.geode.internal.serialization.DataSerializableFixedID;
import org.apache.geode.internal.serialization.DeserializationContext;
import org.apache.geode.internal.serialization.KnownVersion;
import org.apache.geode.internal.serialization.SerializationContext;
import org.apache.geode.logging.internal.log4j.api.LogService;
public class GatewaySenderQueueEntrySynchronizationOperation {
private final InternalDistributedMember recipient;
private final InternalRegion region;
private List<GatewaySenderQueueEntrySynchronizationEntry> entriesToSynchronize;
private static final Logger logger = LogService.getLogger();
protected GatewaySenderQueueEntrySynchronizationOperation(InternalDistributedMember recipient,
InternalRegion internalRegion, List<InitialImageOperation.Entry> giiEntriesToSynchronize) {
this.recipient = recipient;
region = internalRegion;
initializeEntriesToSynchronize(giiEntriesToSynchronize);
}
protected void synchronizeEntries() {
if (logger.isDebugEnabled()) {
logger.debug(
"{}: Requesting synchronization from member={}; regionPath={}; entriesToSynchronize={}",
getClass().getSimpleName(), recipient, region.getFullPath(),
entriesToSynchronize);
}
// Create and send message
DistributionManager dm = region.getDistributionManager();
GatewaySenderQueueEntrySynchronizationReplyProcessor processor =
new GatewaySenderQueueEntrySynchronizationReplyProcessor(dm, recipient, this);
GatewaySenderQueueEntrySynchronizationMessage message =
new GatewaySenderQueueEntrySynchronizationMessage(recipient,
processor.getProcessorId(), this);
dm.putOutgoing(message);
// Wait for replies
try {
processor.waitForReplies();
} catch (ReplyException e) {
e.handleCause();
} catch (InterruptedException e) {
dm.getCancelCriterion().checkCancelInProgress(e);
Thread.currentThread().interrupt();
}
}
protected GemFireCacheImpl getCache() {
return (GemFireCacheImpl) region.getDistributionManager().getCache();
}
private void initializeEntriesToSynchronize(
List<InitialImageOperation.Entry> giiEntriesToSynchronize) {
entriesToSynchronize = new ArrayList<>();
for (InitialImageOperation.Entry entry : giiEntriesToSynchronize) {
entriesToSynchronize.add(
new GatewaySenderQueueEntrySynchronizationEntry(entry.getKey(), entry.getVersionTag()));
}
}
public static class GatewaySenderQueueEntrySynchronizationReplyProcessor
extends ReplyProcessor21 {
private final GatewaySenderQueueEntrySynchronizationOperation operation;
public GatewaySenderQueueEntrySynchronizationReplyProcessor(DistributionManager dm,
InternalDistributedMember recipient,
GatewaySenderQueueEntrySynchronizationOperation operation) {
super(dm, recipient);
this.operation = operation;
}
@Override
public void process(DistributionMessage msg) {
try {
if (msg instanceof ReplyMessage) {
ReplyMessage reply = (ReplyMessage) msg;
if (reply.getException() == null) {
if (logger.isDebugEnabled()) {
logger.debug(
"{}: Processing reply from member={}; regionPath={}; key={}; entriesToSynchronize={}",
getClass().getSimpleName(), reply.getSender(),
operation.region.getFullPath(), operation.entriesToSynchronize,
reply.getReturnValue());
}
List<Map<String, GatewayQueueEvent>> events =
(List<Map<String, GatewayQueueEvent>>) reply.getReturnValue();
for (int i = 0; i < events.size(); i++) {
Map<String, GatewayQueueEvent> eventsForOneEntry = events.get(i);
if (events.isEmpty()) {
GatewaySenderQueueEntrySynchronizationEntry entry =
operation.entriesToSynchronize.get(i);
logger.info(
"Synchronization event reply from member={}; regionPath={}; key={}; entryVersion={} is empty",
new Object[] {reply.getSender(), operation.region.getFullPath(),
entry.key,
entry.entryVersion});
} else {
putSynchronizationEvents(eventsForOneEntry);
}
}
}
}
} finally {
super.process(msg);
}
}
private void putSynchronizationEvents(Map<String, GatewayQueueEvent> senderIdsAndEvents) {
for (Map.Entry<String, GatewayQueueEvent> senderIdAndEvent : senderIdsAndEvents.entrySet()) {
AbstractGatewaySender sender =
(AbstractGatewaySender) getCache().getGatewaySender(senderIdAndEvent.getKey());
sender.putSynchronizationEvent(senderIdAndEvent.getValue());
}
}
Cache getCache() {
return dmgr.getCache();
}
}
public static class GatewaySenderQueueEntrySynchronizationMessage
extends PooledDistributionMessage implements MessageWithReply {
private int processorId;
private String regionPath;
private List<GatewaySenderQueueEntrySynchronizationEntry> entriesToSynchronize;
/* For serialization */
public GatewaySenderQueueEntrySynchronizationMessage() {}
protected GatewaySenderQueueEntrySynchronizationMessage(InternalDistributedMember recipient,
int processorId, GatewaySenderQueueEntrySynchronizationOperation operation) {
super();
setRecipient(recipient);
this.processorId = processorId;
regionPath = operation.region.getFullPath();
entriesToSynchronize = operation.entriesToSynchronize;
}
@Override
protected void process(ClusterDistributionManager dm) {
Object result = null;
ReplyException replyException = null;
try {
if (logger.isDebugEnabled()) {
logger.debug("{}: Providing synchronization region={}; entriesToSynchronize={}",
getClass().getSimpleName(), regionPath, entriesToSynchronize);
}
result = getSynchronizationEvents(dm.getCache());
} catch (Throwable t) {
replyException = new ReplyException(t);
} finally {
ReplyMessage replyMsg = new ReplyMessage();
replyMsg.setRecipient(getSender());
replyMsg.setProcessorId(processorId);
if (replyException == null) {
replyMsg.setReturnValue(result);
} else {
replyMsg.setException(replyException);
}
if (logger.isDebugEnabled()) {
logger.debug("{}: Sending synchronization reply returnValue={}; exception={}",
getClass().getSimpleName(), replyMsg.getReturnValue(), replyMsg.getException());
}
dm.putOutgoing(replyMsg);
}
}
private Object getSynchronizationEvents(InternalCache cache) {
List<Map<String, GatewayQueueEvent>> results = new ArrayList<>();
// Get the region
LocalRegion region = (LocalRegion) cache.getRegion(regionPath);
// Add the appropriate GatewaySenderEventImpl from each GatewaySender for each entry
Set<String> allGatewaySenderIds = region.getAllGatewaySenderIds();
for (GatewaySender sender : cache.getAllGatewaySenders()) {
if (allGatewaySenderIds.contains(sender.getId())) {
for (GatewaySenderQueueEntrySynchronizationEntry entry : entriesToSynchronize) {
Map<String, GatewayQueueEvent> resultForOneEntry = new HashMap<>();
GatewayQueueEvent event = ((AbstractGatewaySender) sender)
.getSynchronizationEvent(entry.key, entry.entryVersion.getVersionTimeStamp());
if (event != null) {
resultForOneEntry.put(sender.getId(), event);
}
results.add(resultForOneEntry);
}
}
}
return results;
}
@Override
public int getDSFID() {
return GATEWAY_SENDER_QUEUE_ENTRY_SYNCHRONIZATION_MESSAGE;
}
@Override
public void toData(DataOutput out,
SerializationContext context) throws IOException {
super.toData(out, context);
out.writeInt(processorId);
DataSerializer.writeString(regionPath, out);
DataSerializer.writeArrayList((ArrayList) entriesToSynchronize, out);
}
@Override
public void fromData(DataInput in,
DeserializationContext context) throws IOException, ClassNotFoundException {
super.fromData(in, context);
processorId = in.readInt();
regionPath = DataSerializer.readString(in);
entriesToSynchronize = DataSerializer.readArrayList(in);
}
}
public static class GatewaySenderQueueEntrySynchronizationEntry
implements DataSerializableFixedID {
private Object key;
private VersionTag entryVersion;
/* For serialization */
public GatewaySenderQueueEntrySynchronizationEntry() {}
public GatewaySenderQueueEntrySynchronizationEntry(Object key, VersionTag entryVersion) {
this.key = key;
this.entryVersion = entryVersion;
}
@Override
public int getDSFID() {
return GATEWAY_SENDER_QUEUE_ENTRY_SYNCHRONIZATION_ENTRY;
}
@Override
public KnownVersion[] getSerializationVersions() {
return null;
}
@Override
public void toData(DataOutput out,
SerializationContext context) throws IOException {
context.getSerializer().writeObject(key, out);
context.getSerializer().writeObject(entryVersion, out);
}
@Override
public void fromData(DataInput in,
DeserializationContext context) throws IOException, ClassNotFoundException {
key = context.getDeserializer().readObject(in);
entryVersion = context.getDeserializer().readObject(in);
}
@Override
public String toString() {
return getClass().getSimpleName() + "[" + "key="
+ key + "; entryVersion=" + entryVersion + "]";
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2015
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*******************************************************************************/
package jsettlers.main.swing;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Locale;
import java.util.Properties;
import javax.swing.JOptionPane;
import javax.swing.SwingUtilities;
import jsettlers.common.CommonConstants;
import jsettlers.common.ai.EPlayerType;
import jsettlers.common.map.MapLoadException;
import jsettlers.common.menu.IMapInterfaceConnector;
import jsettlers.common.menu.IStartedGame;
import jsettlers.common.menu.IStartingGame;
import jsettlers.common.resources.ResourceManager;
import jsettlers.common.utils.MainUtils;
import jsettlers.common.utils.OptionableProperties;
import jsettlers.graphics.localization.AbstractLabels;
import jsettlers.graphics.localization.Labels;
import jsettlers.graphics.swing.resources.ConfigurationPropertiesFile;
import jsettlers.graphics.swing.resources.SwingResourceLoader;
import jsettlers.logic.constants.MatchConstants;
import jsettlers.logic.map.MapLoader;
import jsettlers.logic.map.save.DirectoryMapLister;
import jsettlers.logic.player.PlayerSetting;
import jsettlers.main.JSettlersGame;
import jsettlers.main.ReplayStartInformation;
import jsettlers.main.swing.foldertree.SelectSettlersFolderDialog;
import jsettlers.main.swing.lookandfeel.JSettlersLookAndFeel;
import jsettlers.main.swing.lookandfeel.JSettlersLookAndFeelExecption;
import jsettlers.network.client.OfflineNetworkConnector;
/**
* @author codingberlin
* @author Andreas Eberle
*/
public class SwingManagedJSettlers {
static {
CommonConstants.USE_SAVEGAME_COMPRESSION = true;
}
public static void main(String[] args) throws IOException, MapLoadException, JSettlersLookAndFeelExecption {
OptionableProperties optionableProperties = MainUtils.loadOptions(args);
loadOptionalSettings(optionableProperties);
SwingManagedJSettlers.setupResourceManagers(optionableProperties, "config.prp");
JSettlersFrame settlersFrame = createJSettlersFrame();
handleStartOptions(optionableProperties, settlersFrame);
}
public static void loadOptionalSettings(OptionableProperties options) {
CommonConstants.CONTROL_ALL = options.isOptionSet("control-all");
CommonConstants.ACTIVATE_ALL_PLAYERS = options.isOptionSet("activate-all-players");
CommonConstants.ENABLE_CONSOLE_LOGGING = options.isOptionSet("console-output");
CommonConstants.ENABLE_AI = !options.isOptionSet("disable-ai");
CommonConstants.ALL_AI = options.isOptionSet("all-ai");
CommonConstants.DISABLE_ORIGINAL_MAPS = options.isOptionSet("disable-original-maps");
if (options.containsKey("fixed-ai-type")) {
CommonConstants.FIXED_AI_TYPE = EPlayerType.valueOf(options.getProperty("fixed-ai-type"));
}
if (options.isOptionSet("localhost")) {
CommonConstants.DEFAULT_SERVER_ADDRESS = "localhost";
}
if (options.containsKey("locale")) {
String localeString = options.getProperty("locale");
String[] localeParts = localeString.split("_");
if (localeParts.length == 2) {
AbstractLabels.setPreferredLocale(new Locale(localeParts[0], localeParts[1]));
} else {
System.err.println("Please specify the locale with language and country. (For example: de_de or en_us)");
}
}
}
/**
* Sets up the {@link ResourceManager} by using a configuration file. <br>
* First it is checked, if the given argsMap contains a "configFile" parameter. If so, the path specified for this parameter is used to get the
* file. <br>
* If the parameter is not given, the defaultConfigFile is used.
*
* @param argsMap
* @param defaultConfigFileName
* @throws FileNotFoundException
* @throws IOException
*/
public static void setupResourceManagers(OptionableProperties options, String defaultConfigFileName) throws FileNotFoundException, IOException {
ConfigurationPropertiesFile configFile = SwingManagedJSettlers.getConfigFile(options, defaultConfigFileName);
SwingResourceLoader.setupResourcesManager(configFile);
boolean firstRun = true;
while (!configFile.isValidSettlersFolderSet() || !SwingManagedJSettlers.trySettingUpResources(configFile)) {
if (!firstRun) {
JOptionPane.showMessageDialog(null, Labels.getString("settlers-folder-still-invalid"));
}
firstRun = false;
final SelectSettlersFolderDialog folderChooser = new SelectSettlersFolderDialog();
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
folderChooser.setVisible(true);
}
});
File selectedFolder = folderChooser.waitForUserInput();
if (selectedFolder == null) {
String noFolderSelctedMessage = Labels.getString("error-no-settlers-3-folder-selected");
JOptionPane.showMessageDialog(null, noFolderSelctedMessage);
System.err.println(noFolderSelctedMessage);
System.exit(1);
}
System.out.println(selectedFolder);
try {
configFile.setSettlersFolder(selectedFolder);
} catch (IOException ex) {
String errorSavingSettingsMessage = Labels.getString("error-settings-not-saveable");
System.err.println(errorSavingSettingsMessage);
JOptionPane.showMessageDialog(null, errorSavingSettingsMessage);
ex.printStackTrace();
}
}
if (!firstRun) { // the dialog was shown => settlers folder might have changed
SwingResourceLoader.setupResourcesManager(configFile);
}
}
private static boolean trySettingUpResources(ConfigurationPropertiesFile configFile) {
try {
SwingResourceLoader.setupGraphicsAndSoundResources(configFile);
return true;
} catch (IOException e) {
e.printStackTrace();
return false;
}
}
public static ConfigurationPropertiesFile getConfigFile(Properties options, String defaultConfigFileName) throws IOException {
String configFileName = defaultConfigFileName;
if (options.containsKey("config")) {
configFileName = options.getProperty("config");
}
return new ConfigurationPropertiesFile(new File(configFileName));
}
private static void handleStartOptions(OptionableProperties options, JSettlersFrame settlersFrame) throws IOException, MapLoadException {
String mapfile = null;
long randomSeed = 0;
File loadableReplayFile = null;
int targetGameTime = 0;
mapfile = options.getProperty("mapfile");
if (options.containsKey("random")) {
randomSeed = Long.parseLong(options.getProperty("random"));
}
if (options.containsKey("replayFile")) {
String loadableReplayFileString = options.getProperty("replayFile");
File replayFile = new File(loadableReplayFileString);
if (replayFile.exists()) {
loadableReplayFile = replayFile;
System.out.println("Found loadable replay file and loading it: " + loadableReplayFile);
} else {
System.err.println("Found replayFile parameter, but file can not be found!");
}
}
if (options.containsKey("targetTime")) {
targetGameTime = Integer.valueOf(options.getProperty("targetTime")) * 60 * 1000;
}
if (mapfile != null || loadableReplayFile != null) {
IStartingGame game;
if (loadableReplayFile == null) {
MapLoader mapLoader = MapLoader.getLoaderForListedMap(new DirectoryMapLister.ListedMapFile(new File(mapfile)));
byte playerId = 0;
PlayerSetting[] playerSettings = PlayerSetting.createDefaultSettings(playerId, (byte) mapLoader.getMaxPlayers());
game = new JSettlersGame(mapLoader, randomSeed, playerId, playerSettings).start();
} else {
game = JSettlersGame.loadFromReplayFile(loadableReplayFile, new OfflineNetworkConnector(), new ReplayStartInformation()).start();
}
settlersFrame.showStartingGamePanel(game);
if (targetGameTime > 0) {
while (!game.isStartupFinished()) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
}
}
MatchConstants.clock().fastForwardTo(targetGameTime);
}
}
}
public static IMapInterfaceConnector showJSettlers(IStartedGame startedGame) throws JSettlersLookAndFeelExecption {
JSettlersFrame jSettlersFrame = createJSettlersFrame();
return jSettlersFrame.showStartedGame(startedGame);
}
private static JSettlersFrame createJSettlersFrame() throws JSettlersLookAndFeelExecption {
JSettlersLookAndFeel.install();
return new JSettlersFrame();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.parquet.hadoop;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.parquet.ParquetReadOptions;
import org.apache.parquet.Version;
import org.apache.parquet.bytes.BytesUtils;
import org.apache.parquet.hadoop.ParquetOutputFormat.JobSummaryLevel;
import org.junit.Assume;
import org.junit.Rule;
import org.junit.Test;
import org.apache.parquet.bytes.BytesInput;
import org.apache.parquet.column.ColumnDescriptor;
import org.apache.parquet.column.Encoding;
import org.apache.parquet.column.page.DataPage;
import org.apache.parquet.column.page.DataPageV1;
import org.apache.parquet.column.page.PageReadStore;
import org.apache.parquet.column.page.PageReader;
import org.apache.parquet.column.statistics.BinaryStatistics;
import org.apache.parquet.column.statistics.LongStatistics;
import org.apache.parquet.format.Statistics;
import org.apache.parquet.hadoop.metadata.*;
import org.apache.parquet.hadoop.util.HadoopInputFile;
import org.apache.parquet.hadoop.util.HiddenFileFilter;
import org.apache.parquet.internal.column.columnindex.BoundaryOrder;
import org.apache.parquet.internal.column.columnindex.ColumnIndex;
import org.apache.parquet.internal.column.columnindex.OffsetIndex;
import org.apache.parquet.io.api.Binary;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.MessageTypeParser;
import org.apache.parquet.schema.PrimitiveType;
import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName;
import org.apache.parquet.schema.Types;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.*;
import static org.apache.parquet.CorruptStatistics.shouldIgnoreStatistics;
import static org.apache.parquet.hadoop.ParquetFileWriter.Mode.OVERWRITE;
import static org.junit.Assert.*;
import static org.apache.parquet.column.Encoding.BIT_PACKED;
import static org.apache.parquet.column.Encoding.PLAIN;
import static org.apache.parquet.format.converter.ParquetMetadataConverter.MAX_STATS_SIZE;
import static org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName.BINARY;
import static org.apache.parquet.schema.Type.Repetition.*;
import static org.apache.parquet.hadoop.TestUtils.enforceEmptyDir;
import org.apache.parquet.example.data.Group;
import org.apache.parquet.example.data.simple.SimpleGroup;
import org.apache.parquet.hadoop.example.GroupWriteSupport;
import org.junit.rules.TemporaryFolder;
import org.mockito.Mockito;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TestParquetFileWriter {
private static final Logger LOG = LoggerFactory.getLogger(TestParquetFileWriter.class);
private static final MessageType SCHEMA = MessageTypeParser.parseMessageType("" +
"message m {" +
" required group a {" +
" required binary b;" +
" }" +
" required group c {" +
" required int64 d;" +
" }" +
"}");
private static final String[] PATH1 = {"a", "b"};
private static final ColumnDescriptor C1 = SCHEMA.getColumnDescription(PATH1);
private static final String[] PATH2 = {"c", "d"};
private static final ColumnDescriptor C2 = SCHEMA.getColumnDescription(PATH2);
private static final byte[] BYTES1 = { 0, 1, 2, 3 };
private static final byte[] BYTES2 = { 1, 2, 3, 4 };
private static final byte[] BYTES3 = { 2, 3, 4, 5 };
private static final byte[] BYTES4 = { 3, 4, 5, 6 };
private static final CompressionCodecName CODEC = CompressionCodecName.UNCOMPRESSED;
private static final org.apache.parquet.column.statistics.Statistics<?> EMPTY_STATS = org.apache.parquet.column.statistics.Statistics
.getBuilderForReading(Types.required(PrimitiveTypeName.BINARY).named("test_binary")).build();
private String writeSchema;
@Rule
public final TemporaryFolder temp = new TemporaryFolder();
@Test
public void testWriteMode() throws Exception {
File testFile = temp.newFile();
MessageType schema = MessageTypeParser.parseMessageType(
"message m { required group a {required binary b;} required group "
+ "c { required int64 d; }}");
Configuration conf = new Configuration();
ParquetFileWriter writer = null;
boolean exceptionThrown = false;
Path path = new Path(testFile.toURI());
try {
writer = new ParquetFileWriter(conf, schema, path,
ParquetFileWriter.Mode.CREATE);
} catch(IOException ioe1) {
exceptionThrown = true;
}
assertTrue(exceptionThrown);
exceptionThrown = false;
try {
writer = new ParquetFileWriter(conf, schema, path,
OVERWRITE);
} catch(IOException ioe2) {
exceptionThrown = true;
}
assertTrue(!exceptionThrown);
testFile.delete();
}
@Test
public void testWriteRead() throws Exception {
File testFile = temp.newFile();
testFile.delete();
Path path = new Path(testFile.toURI());
Configuration configuration = new Configuration();
ParquetFileWriter w = new ParquetFileWriter(configuration, SCHEMA, path);
w.start();
w.startBlock(3);
w.startColumn(C1, 5, CODEC);
long c1Starts = w.getPos();
w.writeDataPage(2, 4, BytesInput.from(BYTES1), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.writeDataPage(3, 4, BytesInput.from(BYTES1), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
long c1Ends = w.getPos();
w.startColumn(C2, 6, CODEC);
long c2Starts = w.getPos();
w.writeDataPage(2, 4, BytesInput.from(BYTES2), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.writeDataPage(3, 4, BytesInput.from(BYTES2), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.writeDataPage(1, 4, BytesInput.from(BYTES2), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
long c2Ends = w.getPos();
w.endBlock();
w.startBlock(4);
w.startColumn(C1, 7, CODEC);
w.writeDataPage(7, 4, BytesInput.from(BYTES3), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
w.startColumn(C2, 8, CODEC);
w.writeDataPage(8, 4, BytesInput.from(BYTES4), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
w.endBlock();
w.end(new HashMap<String, String>());
ParquetMetadata readFooter = ParquetFileReader.readFooter(configuration, path);
assertEquals("footer: "+ readFooter, 2, readFooter.getBlocks().size());
assertEquals(c1Ends - c1Starts, readFooter.getBlocks().get(0).getColumns().get(0).getTotalSize());
assertEquals(c2Ends - c2Starts, readFooter.getBlocks().get(0).getColumns().get(1).getTotalSize());
assertEquals(c2Ends - c1Starts, readFooter.getBlocks().get(0).getTotalByteSize());
HashSet<Encoding> expectedEncoding=new HashSet<Encoding>();
expectedEncoding.add(PLAIN);
expectedEncoding.add(BIT_PACKED);
assertEquals(expectedEncoding,readFooter.getBlocks().get(0).getColumns().get(0).getEncodings());
{ // read first block of col #1
ParquetFileReader r = new ParquetFileReader(configuration, readFooter.getFileMetaData(), path,
Arrays.asList(readFooter.getBlocks().get(0)), Arrays.asList(SCHEMA.getColumnDescription(PATH1)));
PageReadStore pages = r.readNextRowGroup();
assertEquals(3, pages.getRowCount());
validateContains(SCHEMA, pages, PATH1, 2, BytesInput.from(BYTES1));
validateContains(SCHEMA, pages, PATH1, 3, BytesInput.from(BYTES1));
assertNull(r.readNextRowGroup());
}
{ // read all blocks of col #1 and #2
ParquetFileReader r = new ParquetFileReader(configuration, readFooter.getFileMetaData(), path,
readFooter.getBlocks(), Arrays.asList(SCHEMA.getColumnDescription(PATH1), SCHEMA.getColumnDescription(PATH2)));
PageReadStore pages = r.readNextRowGroup();
assertEquals(3, pages.getRowCount());
validateContains(SCHEMA, pages, PATH1, 2, BytesInput.from(BYTES1));
validateContains(SCHEMA, pages, PATH1, 3, BytesInput.from(BYTES1));
validateContains(SCHEMA, pages, PATH2, 2, BytesInput.from(BYTES2));
validateContains(SCHEMA, pages, PATH2, 3, BytesInput.from(BYTES2));
validateContains(SCHEMA, pages, PATH2, 1, BytesInput.from(BYTES2));
pages = r.readNextRowGroup();
assertEquals(4, pages.getRowCount());
validateContains(SCHEMA, pages, PATH1, 7, BytesInput.from(BYTES3));
validateContains(SCHEMA, pages, PATH2, 8, BytesInput.from(BYTES4));
assertNull(r.readNextRowGroup());
}
PrintFooter.main(new String[] {path.toString()});
}
@Test
public void testAlignmentWithPadding() throws Exception {
File testFile = temp.newFile();
Path path = new Path(testFile.toURI());
Configuration conf = new Configuration();
// uses the test constructor
ParquetFileWriter w = new ParquetFileWriter(conf, SCHEMA, path, 120, 60);
w.start();
w.startBlock(3);
w.startColumn(C1, 5, CODEC);
long c1Starts = w.getPos();
w.writeDataPage(2, 4, BytesInput.from(BYTES1), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.writeDataPage(3, 4, BytesInput.from(BYTES1), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
long c1Ends = w.getPos();
w.startColumn(C2, 6, CODEC);
long c2Starts = w.getPos();
w.writeDataPage(2, 4, BytesInput.from(BYTES2), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.writeDataPage(3, 4, BytesInput.from(BYTES2), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.writeDataPage(1, 4, BytesInput.from(BYTES2), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
long c2Ends = w.getPos();
w.endBlock();
long firstRowGroupEnds = w.getPos(); // should be 109
w.startBlock(4);
w.startColumn(C1, 7, CODEC);
w.writeDataPage(7, 4, BytesInput.from(BYTES3), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
w.startColumn(C2, 8, CODEC);
w.writeDataPage(8, 4, BytesInput.from(BYTES4), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
w.endBlock();
long secondRowGroupEnds = w.getPos();
w.end(new HashMap<String, String>());
FileSystem fs = path.getFileSystem(conf);
long fileLen = fs.getFileStatus(path).getLen();
FSDataInputStream data = fs.open(path);
data.seek(fileLen - 8); // 4-byte offset + "PAR1"
long footerLen = BytesUtils.readIntLittleEndian(data);
long startFooter = fileLen - footerLen - 8;
assertEquals("Footer should start after second row group without padding",
secondRowGroupEnds, startFooter);
ParquetMetadata readFooter = ParquetFileReader.readFooter(conf, path);
assertEquals("footer: "+ readFooter, 2, readFooter.getBlocks().size());
assertEquals(c1Ends - c1Starts, readFooter.getBlocks().get(0).getColumns().get(0).getTotalSize());
assertEquals(c2Ends - c2Starts, readFooter.getBlocks().get(0).getColumns().get(1).getTotalSize());
assertEquals(c2Ends - c1Starts, readFooter.getBlocks().get(0).getTotalByteSize());
HashSet<Encoding> expectedEncoding=new HashSet<Encoding>();
expectedEncoding.add(PLAIN);
expectedEncoding.add(BIT_PACKED);
assertEquals(expectedEncoding,readFooter.getBlocks().get(0).getColumns().get(0).getEncodings());
// verify block starting positions with padding
assertEquals("First row group should start after magic",
4, readFooter.getBlocks().get(0).getStartingPos());
assertTrue("First row group should end before the block size (120)",
firstRowGroupEnds < 120);
assertEquals("Second row group should start at the block size",
120, readFooter.getBlocks().get(1).getStartingPos());
{ // read first block of col #1
ParquetFileReader r = new ParquetFileReader(conf, readFooter.getFileMetaData(), path,
Arrays.asList(readFooter.getBlocks().get(0)), Arrays.asList(SCHEMA.getColumnDescription(PATH1)));
PageReadStore pages = r.readNextRowGroup();
assertEquals(3, pages.getRowCount());
validateContains(SCHEMA, pages, PATH1, 2, BytesInput.from(BYTES1));
validateContains(SCHEMA, pages, PATH1, 3, BytesInput.from(BYTES1));
assertNull(r.readNextRowGroup());
}
{ // read all blocks of col #1 and #2
ParquetFileReader r = new ParquetFileReader(conf, readFooter.getFileMetaData(), path,
readFooter.getBlocks(), Arrays.asList(SCHEMA.getColumnDescription(PATH1), SCHEMA.getColumnDescription(PATH2)));
PageReadStore pages = r.readNextRowGroup();
assertEquals(3, pages.getRowCount());
validateContains(SCHEMA, pages, PATH1, 2, BytesInput.from(BYTES1));
validateContains(SCHEMA, pages, PATH1, 3, BytesInput.from(BYTES1));
validateContains(SCHEMA, pages, PATH2, 2, BytesInput.from(BYTES2));
validateContains(SCHEMA, pages, PATH2, 3, BytesInput.from(BYTES2));
validateContains(SCHEMA, pages, PATH2, 1, BytesInput.from(BYTES2));
pages = r.readNextRowGroup();
assertEquals(4, pages.getRowCount());
validateContains(SCHEMA, pages, PATH1, 7, BytesInput.from(BYTES3));
validateContains(SCHEMA, pages, PATH2, 8, BytesInput.from(BYTES4));
assertNull(r.readNextRowGroup());
}
PrintFooter.main(new String[] {path.toString()});
}
@Test
public void testAlignmentWithNoPaddingNeeded() throws Exception {
File testFile = temp.newFile();
Path path = new Path(testFile.toURI());
Configuration conf = new Configuration();
// uses the test constructor
ParquetFileWriter w = new ParquetFileWriter(conf, SCHEMA, path, 100, 50);
w.start();
w.startBlock(3);
w.startColumn(C1, 5, CODEC);
long c1Starts = w.getPos();
w.writeDataPage(2, 4, BytesInput.from(BYTES1), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.writeDataPage(3, 4, BytesInput.from(BYTES1), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
long c1Ends = w.getPos();
w.startColumn(C2, 6, CODEC);
long c2Starts = w.getPos();
w.writeDataPage(2, 4, BytesInput.from(BYTES2), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.writeDataPage(3, 4, BytesInput.from(BYTES2), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.writeDataPage(1, 4, BytesInput.from(BYTES2), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
long c2Ends = w.getPos();
w.endBlock();
long firstRowGroupEnds = w.getPos(); // should be 109
w.startBlock(4);
w.startColumn(C1, 7, CODEC);
w.writeDataPage(7, 4, BytesInput.from(BYTES3), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
w.startColumn(C2, 8, CODEC);
w.writeDataPage(8, 4, BytesInput.from(BYTES4), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
w.endBlock();
long secondRowGroupEnds = w.getPos();
w.end(new HashMap<String, String>());
FileSystem fs = path.getFileSystem(conf);
long fileLen = fs.getFileStatus(path).getLen();
FSDataInputStream data = fs.open(path);
data.seek(fileLen - 8); // 4-byte offset + "PAR1"
long footerLen = BytesUtils.readIntLittleEndian(data);
long startFooter = fileLen - footerLen - 8;
assertEquals("Footer should start after second row group without padding",
secondRowGroupEnds, startFooter);
ParquetMetadata readFooter = ParquetFileReader.readFooter(conf, path);
assertEquals("footer: "+ readFooter, 2, readFooter.getBlocks().size());
assertEquals(c1Ends - c1Starts, readFooter.getBlocks().get(0).getColumns().get(0).getTotalSize());
assertEquals(c2Ends - c2Starts, readFooter.getBlocks().get(0).getColumns().get(1).getTotalSize());
assertEquals(c2Ends - c1Starts, readFooter.getBlocks().get(0).getTotalByteSize());
HashSet<Encoding> expectedEncoding=new HashSet<Encoding>();
expectedEncoding.add(PLAIN);
expectedEncoding.add(BIT_PACKED);
assertEquals(expectedEncoding,readFooter.getBlocks().get(0).getColumns().get(0).getEncodings());
// verify block starting positions with padding
assertEquals("First row group should start after magic",
4, readFooter.getBlocks().get(0).getStartingPos());
assertTrue("First row group should end before the block size (120)",
firstRowGroupEnds > 100);
assertEquals("Second row group should start after no padding",
109, readFooter.getBlocks().get(1).getStartingPos());
{ // read first block of col #1
ParquetFileReader r = new ParquetFileReader(conf, readFooter.getFileMetaData(), path,
Arrays.asList(readFooter.getBlocks().get(0)), Arrays.asList(SCHEMA.getColumnDescription(PATH1)));
PageReadStore pages = r.readNextRowGroup();
assertEquals(3, pages.getRowCount());
validateContains(SCHEMA, pages, PATH1, 2, BytesInput.from(BYTES1));
validateContains(SCHEMA, pages, PATH1, 3, BytesInput.from(BYTES1));
assertNull(r.readNextRowGroup());
}
{ // read all blocks of col #1 and #2
ParquetFileReader r = new ParquetFileReader(conf, readFooter.getFileMetaData(), path,
readFooter.getBlocks(), Arrays.asList(SCHEMA.getColumnDescription(PATH1), SCHEMA.getColumnDescription(PATH2)));
PageReadStore pages = r.readNextRowGroup();
assertEquals(3, pages.getRowCount());
validateContains(SCHEMA, pages, PATH1, 2, BytesInput.from(BYTES1));
validateContains(SCHEMA, pages, PATH1, 3, BytesInput.from(BYTES1));
validateContains(SCHEMA, pages, PATH2, 2, BytesInput.from(BYTES2));
validateContains(SCHEMA, pages, PATH2, 3, BytesInput.from(BYTES2));
validateContains(SCHEMA, pages, PATH2, 1, BytesInput.from(BYTES2));
pages = r.readNextRowGroup();
assertEquals(4, pages.getRowCount());
validateContains(SCHEMA, pages, PATH1, 7, BytesInput.from(BYTES3));
validateContains(SCHEMA, pages, PATH2, 8, BytesInput.from(BYTES4));
assertNull(r.readNextRowGroup());
}
PrintFooter.main(new String[] {path.toString()});
}
@Test
public void testConvertToThriftStatistics() throws Exception {
long[] longArray = new long[] {39L, 99L, 12L, 1000L, 65L, 542L, 2533461316L, -253346131996L, Long.MAX_VALUE, Long.MIN_VALUE};
LongStatistics parquetMRstats = new LongStatistics();
for (long l: longArray) {
parquetMRstats.updateStats(l);
}
final String createdBy =
"parquet-mr version 1.8.0 (build d4d5a07ec9bd262ca1e93c309f1d7d4a74ebda4c)";
Statistics thriftStats =
org.apache.parquet.format.converter.ParquetMetadataConverter.toParquetStatistics(parquetMRstats);
LongStatistics convertedBackStats =
(LongStatistics) org.apache.parquet.format.converter.ParquetMetadataConverter.fromParquetStatistics(
createdBy, thriftStats, PrimitiveTypeName.INT64);
assertEquals(parquetMRstats.getMax(), convertedBackStats.getMax());
assertEquals(parquetMRstats.getMin(), convertedBackStats.getMin());
assertEquals(parquetMRstats.getNumNulls(), convertedBackStats.getNumNulls());
}
@Test
public void testWriteReadStatistics() throws Exception {
// this test assumes statistics will be read
Assume.assumeTrue(!shouldIgnoreStatistics(Version.FULL_VERSION, BINARY));
File testFile = temp.newFile();
testFile.delete();
Path path = new Path(testFile.toURI());
Configuration configuration = new Configuration();
configuration.setBoolean("parquet.strings.signed-min-max.enabled", true);
MessageType schema = MessageTypeParser.parseMessageType("message m { required group a {required binary b (UTF8);} required group c { required int64 d; }}");
String[] path1 = {"a", "b"};
ColumnDescriptor c1 = schema.getColumnDescription(path1);
String[] path2 = {"c", "d"};
ColumnDescriptor c2 = schema.getColumnDescription(path2);
byte[] bytes1 = { 0, 1, 2, 3};
byte[] bytes2 = { 1, 2, 3, 4};
byte[] bytes3 = { 2, 3, 4, 5};
byte[] bytes4 = { 3, 4, 5, 6};
CompressionCodecName codec = CompressionCodecName.UNCOMPRESSED;
BinaryStatistics statsB1C1P1 = new BinaryStatistics();
BinaryStatistics statsB1C1P2 = new BinaryStatistics();
LongStatistics statsB1C2P1 = new LongStatistics();
LongStatistics statsB1C2P2 = new LongStatistics();
BinaryStatistics statsB2C1P1 = new BinaryStatistics();
LongStatistics statsB2C2P1 = new LongStatistics();
statsB1C1P1.setMinMax(Binary.fromString("s"), Binary.fromString("z"));
statsB1C1P2.setMinMax(Binary.fromString("a"), Binary.fromString("b"));
statsB1C2P1.setMinMax(2l, 10l);
statsB1C2P2.setMinMax(-6l, 4l);
statsB2C1P1.setMinMax(Binary.fromString("d"), Binary.fromString("e"));
statsB2C2P1.setMinMax(11l, 122l);
ParquetFileWriter w = new ParquetFileWriter(configuration, schema, path);
w.start();
w.startBlock(3);
w.startColumn(c1, 5, codec);
w.writeDataPage(2, 4, BytesInput.from(bytes1), statsB1C1P1, BIT_PACKED, BIT_PACKED, PLAIN);
w.writeDataPage(3, 4, BytesInput.from(bytes1), statsB1C1P2, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
w.startColumn(c2, 6, codec);
w.writeDataPage(3, 4, BytesInput.from(bytes2), statsB1C2P1, BIT_PACKED, BIT_PACKED, PLAIN);
w.writeDataPage(1, 4, BytesInput.from(bytes2), statsB1C2P2, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
w.endBlock();
w.startBlock(4);
w.startColumn(c1, 7, codec);
w.writeDataPage(7, 4, BytesInput.from(bytes3), statsB2C1P1, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
w.startColumn(c2, 8, codec);
w.writeDataPage(8, 4, BytesInput.from(bytes4), statsB2C2P1, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
w.endBlock();
w.end(new HashMap<String, String>());
ParquetMetadata readFooter = ParquetFileReader.readFooter(configuration, path);
for (BlockMetaData block : readFooter.getBlocks()) {
for (ColumnChunkMetaData col : block.getColumns()) {
col.getPath();
}
}
// correct statistics
BinaryStatistics bs1 = new BinaryStatistics();
bs1.setMinMax(Binary.fromString("a"), Binary.fromString("z"));
LongStatistics ls1 = new LongStatistics();
ls1.setMinMax(-6l, 10l);
BinaryStatistics bs2 = new BinaryStatistics();
bs2.setMinMax(Binary.fromString("d"), Binary.fromString("e"));
LongStatistics ls2 = new LongStatistics();
ls2.setMinMax(11l, 122l);
{ // assert stats are correct for the first block
BinaryStatistics bsout = (BinaryStatistics)readFooter.getBlocks().get(0).getColumns().get(0).getStatistics();
String str = new String(bsout.getMaxBytes());
String str2 = new String(bsout.getMinBytes());
TestUtils.assertStatsValuesEqual(bs1, readFooter.getBlocks().get(0).getColumns().get(0).getStatistics());
TestUtils.assertStatsValuesEqual(ls1, readFooter.getBlocks().get(0).getColumns().get(1).getStatistics());
}
{ // assert stats are correct for the second block
TestUtils.assertStatsValuesEqual(bs2, readFooter.getBlocks().get(1).getColumns().get(0).getStatistics());
TestUtils.assertStatsValuesEqual(ls2, readFooter.getBlocks().get(1).getColumns().get(1).getStatistics());
}
}
@Test
public void testMetaDataFile() throws Exception {
File testDir = temp.newFolder();
Path testDirPath = new Path(testDir.toURI());
Configuration configuration = new Configuration();
final FileSystem fs = testDirPath.getFileSystem(configuration);
enforceEmptyDir(configuration, testDirPath);
MessageType schema = MessageTypeParser.parseMessageType("message m { required group a {required binary b;} required group c { required int64 d; }}");
createFile(configuration, new Path(testDirPath, "part0"), schema);
createFile(configuration, new Path(testDirPath, "part1"), schema);
createFile(configuration, new Path(testDirPath, "part2"), schema);
FileStatus outputStatus = fs.getFileStatus(testDirPath);
List<Footer> footers = ParquetFileReader.readFooters(configuration, outputStatus, false);
validateFooters(footers);
ParquetFileWriter.writeMetadataFile(configuration, testDirPath, footers, JobSummaryLevel.ALL);
footers = ParquetFileReader.readFooters(configuration, outputStatus, false);
validateFooters(footers);
footers = ParquetFileReader.readFooters(configuration, fs.getFileStatus(new Path(testDirPath, "part0")), false);
assertEquals(1, footers.size());
final FileStatus metadataFile = fs.getFileStatus(new Path(testDirPath, ParquetFileWriter.PARQUET_METADATA_FILE));
final FileStatus metadataFileLight = fs.getFileStatus(new Path(testDirPath, ParquetFileWriter.PARQUET_COMMON_METADATA_FILE));
final List<Footer> metadata = ParquetFileReader.readSummaryFile(configuration, metadataFile);
validateFooters(metadata);
footers = ParquetFileReader.readAllFootersInParallelUsingSummaryFiles(configuration, Arrays.asList(fs.listStatus(testDirPath, HiddenFileFilter.INSTANCE)), false);
validateFooters(footers);
fs.delete(metadataFile.getPath(), false);
fs.delete(metadataFileLight.getPath(), false);
footers = ParquetFileReader.readAllFootersInParallelUsingSummaryFiles(configuration, Arrays.asList(fs.listStatus(testDirPath)), false);
validateFooters(footers);
}
@Test
public void testWriteReadStatisticsAllNulls() throws Exception {
// this test assumes statistics will be read
Assume.assumeTrue(!shouldIgnoreStatistics(Version.FULL_VERSION, BINARY));
File testFile = temp.newFile();
testFile.delete();
writeSchema = "message example {\n" +
"required binary content (UTF8);\n" +
"}";
Path path = new Path(testFile.toURI());
MessageType schema = MessageTypeParser.parseMessageType(writeSchema);
Configuration configuration = new Configuration();
configuration.setBoolean("parquet.strings.signed-min-max.enabled", true);
GroupWriteSupport.setSchema(schema, configuration);
ParquetWriter<Group> writer = new ParquetWriter<Group>(path, configuration, new GroupWriteSupport());
Group r1 = new SimpleGroup(schema);
writer.write(r1);
writer.close();
ParquetMetadata readFooter = ParquetFileReader.readFooter(configuration, path);
// assert the statistics object is not empty
org.apache.parquet.column.statistics.Statistics stats = readFooter.getBlocks().get(0).getColumns().get(0).getStatistics();
assertFalse("is empty: " + stats, stats.isEmpty());
// assert the number of nulls are correct for the first block
assertEquals("nulls: " + stats, 1, stats.getNumNulls());
}
private void validateFooters(final List<Footer> metadata) {
LOG.debug("{}", metadata);
assertEquals(String.valueOf(metadata), 3, metadata.size());
for (Footer footer : metadata) {
final File file = new File(footer.getFile().toUri());
assertTrue(file.getName(), file.getName().startsWith("part"));
assertTrue(file.getPath(), file.exists());
final ParquetMetadata parquetMetadata = footer.getParquetMetadata();
assertEquals(2, parquetMetadata.getBlocks().size());
final Map<String, String> keyValueMetaData = parquetMetadata.getFileMetaData().getKeyValueMetaData();
assertEquals("bar", keyValueMetaData.get("foo"));
assertEquals(footer.getFile().getName(), keyValueMetaData.get(footer.getFile().getName()));
}
}
private void createFile(Configuration configuration, Path path, MessageType schema) throws IOException {
String[] path1 = {"a", "b"};
ColumnDescriptor c1 = schema.getColumnDescription(path1);
String[] path2 = {"c", "d"};
ColumnDescriptor c2 = schema.getColumnDescription(path2);
byte[] bytes1 = { 0, 1, 2, 3};
byte[] bytes2 = { 1, 2, 3, 4};
byte[] bytes3 = { 2, 3, 4, 5};
byte[] bytes4 = { 3, 4, 5, 6};
CompressionCodecName codec = CompressionCodecName.UNCOMPRESSED;
BinaryStatistics stats1 = new BinaryStatistics();
BinaryStatistics stats2 = new BinaryStatistics();
ParquetFileWriter w = new ParquetFileWriter(configuration, schema, path);
w.start();
w.startBlock(3);
w.startColumn(c1, 5, codec);
w.writeDataPage(2, 4, BytesInput.from(bytes1), stats1, BIT_PACKED, BIT_PACKED, PLAIN);
w.writeDataPage(3, 4, BytesInput.from(bytes1), stats1, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
w.startColumn(c2, 6, codec);
w.writeDataPage(2, 4, BytesInput.from(bytes2), stats2, BIT_PACKED, BIT_PACKED, PLAIN);
w.writeDataPage(3, 4, BytesInput.from(bytes2), stats2, BIT_PACKED, BIT_PACKED, PLAIN);
w.writeDataPage(1, 4, BytesInput.from(bytes2), stats2, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
w.endBlock();
w.startBlock(4);
w.startColumn(c1, 7, codec);
w.writeDataPage(7, 4, BytesInput.from(bytes3), stats1, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
w.startColumn(c2, 8, codec);
w.writeDataPage(8, 4, BytesInput.from(bytes4), stats2, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
w.endBlock();
final HashMap<String, String> extraMetaData = new HashMap<String, String>();
extraMetaData.put("foo", "bar");
extraMetaData.put(path.getName(), path.getName());
w.end(extraMetaData);
}
private void validateContains(MessageType schema, PageReadStore pages, String[] path, int values, BytesInput bytes) throws IOException {
PageReader pageReader = pages.getPageReader(schema.getColumnDescription(path));
DataPage page = pageReader.readPage();
assertEquals(values, page.getValueCount());
assertArrayEquals(bytes.toByteArray(), ((DataPageV1)page).getBytes().toByteArray());
}
@Test
public void testMergeMetadata() {
FileMetaData md1 = new FileMetaData(
new MessageType("root1",
new PrimitiveType(REPEATED, BINARY, "a"),
new PrimitiveType(OPTIONAL, BINARY, "b")),
new HashMap<String, String>(), "test");
FileMetaData md2 = new FileMetaData(
new MessageType("root2",
new PrimitiveType(REQUIRED, BINARY, "c")),
new HashMap<String, String>(), "test2");
GlobalMetaData merged = ParquetFileWriter.mergeInto(md2, ParquetFileWriter.mergeInto(md1, null));
assertEquals(
merged.getSchema(),
new MessageType("root1",
new PrimitiveType(REPEATED, BINARY, "a"),
new PrimitiveType(OPTIONAL, BINARY, "b"),
new PrimitiveType(REQUIRED, BINARY, "c"))
);
}
@Test
public void testMergeFooters() {
List<BlockMetaData> oneBlocks = new ArrayList<BlockMetaData>();
oneBlocks.add(new BlockMetaData());
oneBlocks.add(new BlockMetaData());
List<BlockMetaData> twoBlocks = new ArrayList<BlockMetaData>();
twoBlocks.add(new BlockMetaData());
List<BlockMetaData> expected = new ArrayList<BlockMetaData>();
expected.addAll(oneBlocks);
expected.addAll(twoBlocks);
Footer one = new Footer(new Path("file:/tmp/output/one.parquet"),
new ParquetMetadata(new FileMetaData(
new MessageType("root1",
new PrimitiveType(REPEATED, BINARY, "a"),
new PrimitiveType(OPTIONAL, BINARY, "b")),
new HashMap<String, String>(), "test"),
oneBlocks));
Footer two = new Footer(new Path("/tmp/output/two.parquet"),
new ParquetMetadata(new FileMetaData(
new MessageType("root2",
new PrimitiveType(REQUIRED, BINARY, "c")),
new HashMap<String, String>(), "test2"),
twoBlocks));
List<Footer> footers = new ArrayList<Footer>();
footers.add(one);
footers.add(two);
ParquetMetadata merged = ParquetFileWriter.mergeFooters(
new Path("/tmp"), footers);
assertEquals(
new MessageType("root1",
new PrimitiveType(REPEATED, BINARY, "a"),
new PrimitiveType(OPTIONAL, BINARY, "b"),
new PrimitiveType(REQUIRED, BINARY, "c")),
merged.getFileMetaData().getSchema());
assertEquals("Should have all blocks", expected, merged.getBlocks());
}
/**
* {@link ParquetFileWriter#mergeFooters(Path, List)} expects a fully-qualified
* path for the root and crashes if a relative one is provided.
*/
@Test
public void testWriteMetadataFileWithRelativeOutputPath() throws IOException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
Path relativeRoot = new Path("target/_test_relative");
Path qualifiedRoot = fs.makeQualified(relativeRoot);
ParquetMetadata mock = Mockito.mock(ParquetMetadata.class);
FileMetaData fileMetaData = new FileMetaData(
new MessageType("root1",
new PrimitiveType(REPEATED, BINARY, "a")),
new HashMap<String, String>(), "test");
Mockito.when(mock.getFileMetaData()).thenReturn(fileMetaData);
List<Footer> footers = new ArrayList<Footer>();
Footer footer = new Footer(new Path(qualifiedRoot, "one"), mock);
footers.add(footer);
// This should not throw an exception
ParquetFileWriter.writeMetadataFile(conf, relativeRoot, footers, JobSummaryLevel.ALL);
}
@Test
public void testColumnIndexWriteRead() throws Exception {
File testFile = temp.newFile();
testFile.delete();
Path path = new Path(testFile.toURI());
Configuration configuration = new Configuration();
ParquetFileWriter w = new ParquetFileWriter(configuration, SCHEMA, path);
w.start();
w.startBlock(4);
w.startColumn(C1, 7, CODEC);
w.writeDataPage(7, 4, BytesInput.from(BYTES3), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
w.startColumn(C2, 8, CODEC);
w.writeDataPage(8, 4, BytesInput.from(BYTES4), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
w.endBlock();
w.startBlock(4);
w.startColumn(C1, 5, CODEC);
long c1p1Starts = w.getPos();
w.writeDataPage(2, 4, BytesInput.from(BYTES1), statsC1(null, Binary.fromString("aaa")), 1, BIT_PACKED, BIT_PACKED,
PLAIN);
long c1p2Starts = w.getPos();
w.writeDataPage(3, 4, BytesInput.from(BYTES1), statsC1(Binary.fromString("bbb"), Binary.fromString("ccc")), 3,
BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
long c1Ends = w.getPos();
w.startColumn(C2, 6, CODEC);
long c2p1Starts = w.getPos();
w.writeDataPage(2, 4, BytesInput.from(BYTES2), statsC2(117l, 100l), 1, BIT_PACKED, BIT_PACKED, PLAIN);
long c2p2Starts = w.getPos();
w.writeDataPage(3, 4, BytesInput.from(BYTES2), statsC2(null, null, null), 2, BIT_PACKED, BIT_PACKED, PLAIN);
long c2p3Starts = w.getPos();
w.writeDataPage(1, 4, BytesInput.from(BYTES2), statsC2(0l), 1, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
long c2Ends = w.getPos();
w.endBlock();
w.startBlock(4);
w.startColumn(C1, 7, CODEC);
w.writeDataPage(7, 4, BytesInput.from(BYTES3),
// Creating huge stats so the column index will reach the limit and won't be written
statsC1(
Binary.fromConstantByteArray(new byte[(int) MAX_STATS_SIZE]),
Binary.fromConstantByteArray(new byte[1])),
4, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
w.startColumn(C2, 8, CODEC);
w.writeDataPage(8, 4, BytesInput.from(BYTES4), EMPTY_STATS, BIT_PACKED, BIT_PACKED, PLAIN);
w.endColumn();
w.endBlock();
w.end(new HashMap<String, String>());
try (ParquetFileReader reader = new ParquetFileReader(HadoopInputFile.fromPath(path, configuration),
ParquetReadOptions.builder().build())) {
ParquetMetadata footer = reader.getFooter();
assertEquals(3, footer.getBlocks().size());
BlockMetaData blockMeta = footer.getBlocks().get(1);
assertEquals(2, blockMeta.getColumns().size());
ColumnIndex columnIndex = reader.readColumnIndex(blockMeta.getColumns().get(0));
assertEquals(BoundaryOrder.ASCENDING, columnIndex.getBoundaryOrder());
assertTrue(Arrays.asList(1l, 0l).equals(columnIndex.getNullCounts()));
assertTrue(Arrays.asList(false, false).equals(columnIndex.getNullPages()));
List<ByteBuffer> minValues = columnIndex.getMinValues();
assertEquals(2, minValues.size());
List<ByteBuffer> maxValues = columnIndex.getMaxValues();
assertEquals(2, maxValues.size());
assertEquals("aaa", new String(minValues.get(0).array(), StandardCharsets.UTF_8));
assertEquals("aaa", new String(maxValues.get(0).array(), StandardCharsets.UTF_8));
assertEquals("bbb", new String(minValues.get(1).array(), StandardCharsets.UTF_8));
assertEquals("ccc", new String(maxValues.get(1).array(), StandardCharsets.UTF_8));
columnIndex = reader.readColumnIndex(blockMeta.getColumns().get(1));
assertEquals(BoundaryOrder.DESCENDING, columnIndex.getBoundaryOrder());
assertTrue(Arrays.asList(0l, 3l, 0l).equals(columnIndex.getNullCounts()));
assertTrue(Arrays.asList(false, true, false).equals(columnIndex.getNullPages()));
minValues = columnIndex.getMinValues();
assertEquals(3, minValues.size());
maxValues = columnIndex.getMaxValues();
assertEquals(3, maxValues.size());
assertEquals(100, BytesUtils.bytesToLong(minValues.get(0).array()));
assertEquals(117, BytesUtils.bytesToLong(maxValues.get(0).array()));
assertEquals(0, minValues.get(1).array().length);
assertEquals(0, maxValues.get(1).array().length);
assertEquals(0, BytesUtils.bytesToLong(minValues.get(2).array()));
assertEquals(0, BytesUtils.bytesToLong(maxValues.get(2).array()));
OffsetIndex offsetIndex = reader.readOffsetIndex(blockMeta.getColumns().get(0));
assertEquals(2, offsetIndex.getPageCount());
assertEquals(c1p1Starts, offsetIndex.getOffset(0));
assertEquals(c1p2Starts, offsetIndex.getOffset(1));
assertEquals(c1p2Starts - c1p1Starts, offsetIndex.getCompressedPageSize(0));
assertEquals(c1Ends - c1p2Starts, offsetIndex.getCompressedPageSize(1));
assertEquals(0, offsetIndex.getFirstRowIndex(0));
assertEquals(1, offsetIndex.getFirstRowIndex(1));
offsetIndex = reader.readOffsetIndex(blockMeta.getColumns().get(1));
assertEquals(3, offsetIndex.getPageCount());
assertEquals(c2p1Starts, offsetIndex.getOffset(0));
assertEquals(c2p2Starts, offsetIndex.getOffset(1));
assertEquals(c2p3Starts, offsetIndex.getOffset(2));
assertEquals(c2p2Starts - c2p1Starts, offsetIndex.getCompressedPageSize(0));
assertEquals(c2p3Starts - c2p2Starts, offsetIndex.getCompressedPageSize(1));
assertEquals(c2Ends - c2p3Starts, offsetIndex.getCompressedPageSize(2));
assertEquals(0, offsetIndex.getFirstRowIndex(0));
assertEquals(1, offsetIndex.getFirstRowIndex(1));
assertEquals(3, offsetIndex.getFirstRowIndex(2));
assertNull(reader.readColumnIndex(footer.getBlocks().get(2).getColumns().get(0)));
}
}
private org.apache.parquet.column.statistics.Statistics<?> statsC1(Binary... values) {
org.apache.parquet.column.statistics.Statistics<?> stats = org.apache.parquet.column.statistics.Statistics
.createStats(C1.getPrimitiveType());
for (Binary value : values) {
if (value == null) {
stats.incrementNumNulls();
} else {
stats.updateStats(value);
}
}
return stats;
}
private org.apache.parquet.column.statistics.Statistics<?> statsC2(Long... values) {
org.apache.parquet.column.statistics.Statistics<?> stats = org.apache.parquet.column.statistics.Statistics
.createStats(C2.getPrimitiveType());
for (Long value : values) {
if (value == null) {
stats.incrementNumNulls();
} else {
stats.updateStats(value);
}
}
return stats;
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.android;
import com.facebook.buck.cxx.CompilerProvider;
import com.facebook.buck.cxx.CxxBuckConfig;
import com.facebook.buck.cxx.CxxPlatform;
import com.facebook.buck.cxx.CxxToolProvider;
import com.facebook.buck.cxx.DefaultLinkerProvider;
import com.facebook.buck.cxx.ElfSharedLibraryInterfaceFactory;
import com.facebook.buck.cxx.GnuArchiver;
import com.facebook.buck.cxx.GnuLinker;
import com.facebook.buck.cxx.HeaderVerification;
import com.facebook.buck.cxx.Linker;
import com.facebook.buck.cxx.LinkerProvider;
import com.facebook.buck.cxx.MungingDebugPathSanitizer;
import com.facebook.buck.cxx.PosixNmSymbolNameTool;
import com.facebook.buck.cxx.PrefixMapDebugPathSanitizer;
import com.facebook.buck.cxx.PreprocessorProvider;
import com.facebook.buck.io.ExecutableFinder;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.log.Logger;
import com.facebook.buck.model.Flavor;
import com.facebook.buck.model.ImmutableFlavor;
import com.facebook.buck.rules.ConstantToolProvider;
import com.facebook.buck.rules.Tool;
import com.facebook.buck.rules.ToolProvider;
import com.facebook.buck.rules.VersionedTool;
import com.facebook.buck.util.environment.Platform;
import com.facebook.infer.annotation.Assertions;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableBiMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.regex.Pattern;
public class NdkCxxPlatforms {
private static final Logger LOG = Logger.get(NdkCxxPlatforms.class);
/**
* Magic path prefix we use to denote the machine-specific location of the Android NDK. Why "@"?
* It's uncommon enough to mark that path element as special while not being a metacharacter in
* either make, shell, or regular expression syntax.
* <p/>
* We also have prefixes for tool specific paths, even though they're sub-paths of
* `@ANDROID_NDK_ROOT@`. This is to sanitize host-specific sub-directories in the toolchain
* (e.g. darwin-x86_64) which would otherwise break determinism and caching when using
* cross-compilation.
*/
public static final String ANDROID_NDK_ROOT = "@ANDROID_NDK_ROOT@";
/**
* Magic string we substitute into debug paths in place of the build-host name, erasing the
* difference between say, building on Darwin and building on Linux.
*/
public static final String BUILD_HOST_SUBST = "@BUILD_HOST@";
public static final NdkCxxPlatformCompiler.Type DEFAULT_COMPILER_TYPE =
NdkCxxPlatformCompiler.Type.GCC;
public static final String DEFAULT_TARGET_APP_PLATFORM = "android-9";
public static final ImmutableSet<String> DEFAULT_CPU_ABIS =
ImmutableSet.of("arm", "armv7", "x86");
public static final NdkCxxPlatforms.CxxRuntime DEFAULT_CXX_RUNTIME =
NdkCxxPlatforms.CxxRuntime.GNUSTL;
private static final ImmutableMap<Platform, Host> BUILD_PLATFORMS =
ImmutableMap.of(
Platform.LINUX, Host.LINUX_X86_64,
Platform.MACOS, Host.DARWIN_X86_64,
Platform.WINDOWS, Host.WINDOWS_X86_64);
// Utility class, do not instantiate.
private NdkCxxPlatforms() { }
static int getNdkMajorVersion(String ndkVersion) {
return
ndkVersion.startsWith("r9") ? 9 :
ndkVersion.startsWith("r10") ? 10 :
ndkVersion.startsWith("11.") ? 11 :
ndkVersion.startsWith("12.") ? 12 :
ndkVersion.startsWith("13.") ? 13 :
ndkVersion.startsWith("14.") ? 14 :
-1;
}
public static String getDefaultGccVersionForNdk(Optional<String> ndkVersion) {
if (ndkVersion.isPresent() && getNdkMajorVersion(ndkVersion.get()) < 11) {
return "4.8";
}
return "4.9";
}
public static String getDefaultClangVersionForNdk(Optional<String> ndkVersion) {
if (ndkVersion.isPresent() && getNdkMajorVersion(ndkVersion.get()) < 11) {
return "3.5";
}
return "3.8";
}
public static boolean isSupportedConfiguration(
Path ndkRoot,
CxxRuntime cxxRuntime) {
// TODO(12846101): With ndk r12, Android has started to use libc++abi. Buck
// needs to figure out how to support that.
String ndkVersion = readVersion(ndkRoot);
return !(
cxxRuntime == NdkCxxPlatforms.CxxRuntime.LIBCXX &&
getNdkMajorVersion(ndkVersion) >= 12);
}
public static ImmutableMap<TargetCpuType, NdkCxxPlatform> getPlatforms(
CxxBuckConfig config,
ProjectFilesystem filesystem,
Path ndkRoot,
NdkCxxPlatformCompiler compiler,
CxxRuntime cxxRuntime,
String androidPlatform,
Set<String> cpuAbis,
Platform platform) {
return getPlatforms(
config,
filesystem,
ndkRoot,
compiler,
cxxRuntime,
androidPlatform,
cpuAbis,
platform,
new ExecutableFinder(),
/* strictToolchainPaths */ true);
}
/**
* @return the map holding the available {@link NdkCxxPlatform}s.
*/
public static ImmutableMap<TargetCpuType, NdkCxxPlatform> getPlatforms(
CxxBuckConfig config,
ProjectFilesystem filesystem,
Path ndkRoot,
NdkCxxPlatformCompiler compiler,
CxxRuntime cxxRuntime,
String androidPlatform,
Set<String> cpuAbis,
Platform platform,
ExecutableFinder executableFinder,
boolean strictToolchainPaths) {
ImmutableMap.Builder<TargetCpuType, NdkCxxPlatform> ndkCxxPlatformBuilder =
ImmutableMap.builder();
// ARM Platform
if (cpuAbis.contains("arm")) {
NdkCxxPlatformTargetConfiguration targetConfiguration = getTargetConfiguration(
TargetCpuType.ARM,
compiler,
androidPlatform);
NdkCxxPlatform armeabi =
build(
config,
filesystem,
ImmutableFlavor.of("android-arm"),
platform,
ndkRoot,
targetConfiguration,
cxxRuntime,
executableFinder,
strictToolchainPaths);
ndkCxxPlatformBuilder.put(TargetCpuType.ARM, armeabi);
}
// ARMv7 Platform
if (cpuAbis.contains("armv7")) {
NdkCxxPlatformTargetConfiguration targetConfiguration = getTargetConfiguration(
TargetCpuType.ARMV7,
compiler,
androidPlatform);
NdkCxxPlatform armeabiv7 =
build(
config,
filesystem,
ImmutableFlavor.of("android-armv7"),
platform,
ndkRoot,
targetConfiguration,
cxxRuntime,
executableFinder,
strictToolchainPaths);
ndkCxxPlatformBuilder.put(TargetCpuType.ARMV7, armeabiv7);
}
// ARM64 Platform
if (cpuAbis.contains("arm64")) {
NdkCxxPlatformTargetConfiguration targetConfiguration = getTargetConfiguration(
TargetCpuType.ARM64,
compiler,
androidPlatform);
NdkCxxPlatform arm64 =
build(
config,
filesystem,
ImmutableFlavor.of("android-arm64"),
platform,
ndkRoot,
targetConfiguration,
cxxRuntime,
executableFinder,
strictToolchainPaths);
ndkCxxPlatformBuilder.put(TargetCpuType.ARM64, arm64);
}
// x86 Platform
if (cpuAbis.contains("x86")) {
NdkCxxPlatformTargetConfiguration targetConfiguration = getTargetConfiguration(
TargetCpuType.X86,
compiler,
androidPlatform);
NdkCxxPlatform x86 =
build(
config,
filesystem,
ImmutableFlavor.of("android-x86"),
platform,
ndkRoot,
targetConfiguration,
cxxRuntime,
executableFinder,
strictToolchainPaths);
ndkCxxPlatformBuilder.put(TargetCpuType.X86, x86);
}
// x86_64 Platform
if (cpuAbis.contains("x86_64")) {
NdkCxxPlatformTargetConfiguration targetConfiguration = getTargetConfiguration(
TargetCpuType.X86_64,
compiler,
androidPlatform);
// CHECKSTYLE.OFF: LocalVariableName
NdkCxxPlatform x86_64 =
// CHECKSTYLE.ON
build(
config,
filesystem,
ImmutableFlavor.of("android-x86_64"),
platform,
ndkRoot,
targetConfiguration,
cxxRuntime,
executableFinder,
strictToolchainPaths);
ndkCxxPlatformBuilder.put(TargetCpuType.X86_64, x86_64);
}
return ndkCxxPlatformBuilder.build();
}
private static NdkCxxPlatformTargetConfiguration getTargetConfiguration(
TargetCpuType targetCpuType,
NdkCxxPlatformCompiler compiler,
String androidPlatform) {
switch (targetCpuType) {
case ARM:
ImmutableList<String> armeabiArchFlags =
ImmutableList.of(
"-march=armv5te",
"-mtune=xscale",
"-msoft-float",
"-mthumb");
return NdkCxxPlatformTargetConfiguration.builder()
.setToolchain(Toolchain.ARM_LINUX_ANDROIDEABI)
.setTargetArch(TargetArch.ARM)
.setTargetArchAbi(TargetArchAbi.ARMEABI)
.setTargetAppPlatform(androidPlatform)
.setCompiler(compiler)
.setToolchainTarget(ToolchainTarget.ARM_LINUX_ANDROIDEABI)
.putAssemblerFlags(NdkCxxPlatformCompiler.Type.GCC, armeabiArchFlags)
.putAssemblerFlags(
NdkCxxPlatformCompiler.Type.CLANG,
ImmutableList.<String>builder()
.add("-target", "armv5te-none-linux-androideabi")
.addAll(armeabiArchFlags)
.build())
.putCompilerFlags(
NdkCxxPlatformCompiler.Type.GCC,
ImmutableList.<String>builder()
.add("-Os")
.addAll(armeabiArchFlags)
.build())
.putCompilerFlags(
NdkCxxPlatformCompiler.Type.CLANG,
ImmutableList.<String>builder()
.add("-target", "armv5te-none-linux-androideabi", "-Os")
.addAll(armeabiArchFlags)
.build())
.putLinkerFlags(
NdkCxxPlatformCompiler.Type.GCC,
ImmutableList.of(
"-march=armv5te",
"-Wl,--fix-cortex-a8"))
.putLinkerFlags(
NdkCxxPlatformCompiler.Type.CLANG,
ImmutableList.of(
"-target", "armv5te-none-linux-androideabi",
"-march=armv5te",
"-Wl,--fix-cortex-a8"))
.build();
case ARMV7:
ImmutableList<String> armeabiv7ArchFlags =
ImmutableList.of(
"-march=armv7-a",
"-mfpu=vfpv3-d16",
"-mfloat-abi=softfp",
"-mthumb");
return NdkCxxPlatformTargetConfiguration.builder()
.setToolchain(Toolchain.ARM_LINUX_ANDROIDEABI)
.setTargetArch(TargetArch.ARM)
.setTargetArchAbi(TargetArchAbi.ARMEABI_V7A)
.setTargetAppPlatform(androidPlatform)
.setCompiler(compiler)
.setToolchainTarget(ToolchainTarget.ARM_LINUX_ANDROIDEABI)
.putAssemblerFlags(NdkCxxPlatformCompiler.Type.GCC, armeabiv7ArchFlags)
.putAssemblerFlags(
NdkCxxPlatformCompiler.Type.CLANG,
ImmutableList.<String>builder()
.add("-target", "armv7-none-linux-androideabi")
.addAll(armeabiv7ArchFlags)
.build())
.putCompilerFlags(
NdkCxxPlatformCompiler.Type.GCC,
ImmutableList.<String>builder()
.add("-finline-limit=64", "-Os")
.addAll(armeabiv7ArchFlags)
.build())
.putCompilerFlags(
NdkCxxPlatformCompiler.Type.CLANG,
ImmutableList.<String>builder()
.add("-target", "armv7-none-linux-androideabi", "-Os")
.addAll(armeabiv7ArchFlags)
.build())
.putLinkerFlags(
NdkCxxPlatformCompiler.Type.GCC,
ImmutableList.<String>of())
.putLinkerFlags(
NdkCxxPlatformCompiler.Type.CLANG,
ImmutableList.of("-target", "armv7-none-linux-androideabi"))
.build();
case ARM64:
ImmutableList<String> arm64ArchFlags =
ImmutableList.of(
"-march=armv8-a"
);
return NdkCxxPlatformTargetConfiguration.builder()
.setToolchain(Toolchain.AARCH64_LINUX_ANDROID)
.setTargetArch(TargetArch.ARM64)
.setTargetArchAbi(TargetArchAbi.ARM64_V8A)
.setTargetAppPlatform(androidPlatform)
.setCompiler(compiler)
.setToolchainTarget(ToolchainTarget.AARCH64_LINUX_ANDROID)
.putAssemblerFlags(NdkCxxPlatformCompiler.Type.GCC, arm64ArchFlags)
.putAssemblerFlags(
NdkCxxPlatformCompiler.Type.CLANG,
ImmutableList.<String>builder()
.add("-target", "aarch64-none-linux-android")
.addAll(arm64ArchFlags)
.build())
.putCompilerFlags(
NdkCxxPlatformCompiler.Type.GCC,
ImmutableList.<String>builder()
.add("-O2")
.add("-fomit-frame-pointer")
.add("-fstrict-aliasing")
.add("-funswitch-loops")
.add("-finline-limit=300")
.addAll(arm64ArchFlags)
.build())
.putCompilerFlags(
NdkCxxPlatformCompiler.Type.CLANG,
ImmutableList.<String>builder()
.add("-target", "aarch64-none-linux-android")
.add("-O2")
.add("-fomit-frame-pointer")
.add("-fstrict-aliasing")
.addAll(arm64ArchFlags)
.build())
.putLinkerFlags(
NdkCxxPlatformCompiler.Type.GCC,
ImmutableList.<String>of())
.putLinkerFlags(
NdkCxxPlatformCompiler.Type.CLANG,
ImmutableList.of("-target", "aarch64-none-linux-android"))
.build();
case X86:
return NdkCxxPlatformTargetConfiguration.builder()
.setToolchain(Toolchain.X86)
.setTargetArch(TargetArch.X86)
.setTargetArchAbi(TargetArchAbi.X86)
.setTargetAppPlatform(androidPlatform)
.setCompiler(compiler)
.setToolchainTarget(ToolchainTarget.I686_LINUX_ANDROID)
.putAssemblerFlags(NdkCxxPlatformCompiler.Type.GCC, ImmutableList.<String>of())
.putAssemblerFlags(
NdkCxxPlatformCompiler.Type.CLANG,
ImmutableList.of(
"-target", "i686-none-linux-android"))
.putCompilerFlags(
NdkCxxPlatformCompiler.Type.GCC,
ImmutableList.of(
"-funswitch-loops",
"-finline-limit=300",
"-O2"))
.putCompilerFlags(
NdkCxxPlatformCompiler.Type.CLANG,
ImmutableList.of(
"-target", "i686-none-linux-android",
"-O2"))
.putLinkerFlags(
NdkCxxPlatformCompiler.Type.GCC,
ImmutableList.<String>of())
.putLinkerFlags(
NdkCxxPlatformCompiler.Type.CLANG,
ImmutableList.of(
"-target", "i686-none-linux-android"))
.build();
case X86_64:
return NdkCxxPlatformTargetConfiguration.builder()
.setToolchain(Toolchain.X86_64)
.setTargetArch(TargetArch.X86_64)
.setTargetArchAbi(TargetArchAbi.X86_64)
.setTargetAppPlatform(androidPlatform)
.setCompiler(compiler)
.setToolchainTarget(ToolchainTarget.X86_64_LINUX_ANDROID)
.putAssemblerFlags(NdkCxxPlatformCompiler.Type.GCC, ImmutableList.<String>of())
.putAssemblerFlags(NdkCxxPlatformCompiler.Type.CLANG, ImmutableList.<String>of())
.putCompilerFlags(
NdkCxxPlatformCompiler.Type.GCC,
ImmutableList.of(
"-funswitch-loops",
"-finline-limit=300",
"-O2"))
.putCompilerFlags(
NdkCxxPlatformCompiler.Type.CLANG,
ImmutableList.of(
"-target", "x86_64-none-linux-android",
"-O2"))
.putLinkerFlags(
NdkCxxPlatformCompiler.Type.GCC,
ImmutableList.<String>of())
.putLinkerFlags(
NdkCxxPlatformCompiler.Type.CLANG,
ImmutableList.of(
"-target", "x86_64-none-linux-android"))
.build();
case MIPS:
break;
}
throw new AssertionError();
}
@VisibleForTesting
static NdkCxxPlatform build(
CxxBuckConfig config,
ProjectFilesystem filesystem,
Flavor flavor,
Platform platform,
Path ndkRoot,
NdkCxxPlatformTargetConfiguration targetConfiguration,
CxxRuntime cxxRuntime,
ExecutableFinder executableFinder,
boolean strictToolchainPaths) {
// Create a version string to use when generating rule keys via the NDK tools we'll generate
// below. This will be used in lieu of hashing the contents of the tools, so that builds from
// different host platforms (which produce identical output) will share the cache with one
// another.
NdkCxxPlatformCompiler.Type compilerType = targetConfiguration.getCompiler().getType();
String version =
Joiner.on('-').join(
ImmutableList.of(
readVersion(ndkRoot),
targetConfiguration.getToolchain(),
targetConfiguration.getTargetAppPlatform(),
compilerType,
targetConfiguration.getCompiler().getVersion(),
targetConfiguration.getCompiler().getGccVersion(),
cxxRuntime));
Host host = Preconditions.checkNotNull(BUILD_PLATFORMS.get(platform));
NdkCxxToolchainPaths toolchainPaths = new NdkCxxToolchainPaths(
filesystem,
ndkRoot,
targetConfiguration,
host.toString(),
cxxRuntime,
strictToolchainPaths);
// Sanitized paths will have magic placeholders for parts of the paths that
// are machine/host-specific. See comments on ANDROID_NDK_ROOT and
// BUILD_HOST_SUBST above.
NdkCxxToolchainPaths sanitizedPaths = toolchainPaths.getSanitizedPaths();
// Build up the map of paths that must be sanitized.
ImmutableBiMap.Builder<Path, Path> sanitizePathsBuilder = ImmutableBiMap.builder();
sanitizePathsBuilder.put(toolchainPaths.getNdkToolRoot(), sanitizedPaths.getNdkToolRoot());
if (compilerType != NdkCxxPlatformCompiler.Type.GCC) {
sanitizePathsBuilder.put(
toolchainPaths.getNdkGccToolRoot(),
sanitizedPaths.getNdkGccToolRoot());
}
sanitizePathsBuilder.put(ndkRoot, Paths.get(ANDROID_NDK_ROOT));
CxxToolProvider.Type type =
compilerType == NdkCxxPlatformCompiler.Type.CLANG ?
CxxToolProvider.Type.CLANG :
CxxToolProvider.Type.GCC;
ToolProvider ccTool =
new ConstantToolProvider(
getCTool(
toolchainPaths,
compilerType.getCc(),
version,
executableFinder));
ToolProvider cxxTool =
new ConstantToolProvider(
getCTool(
toolchainPaths,
compilerType.getCxx(),
version,
executableFinder));
CompilerProvider cc = new CompilerProvider(ccTool, type);
PreprocessorProvider cpp = new PreprocessorProvider(ccTool, type);
CompilerProvider cxx = new CompilerProvider(cxxTool, type);
PreprocessorProvider cxxpp = new PreprocessorProvider(cxxTool, type);
CxxPlatform.Builder cxxPlatformBuilder = CxxPlatform.builder();
ImmutableBiMap<Path, Path> sanitizePaths = sanitizePathsBuilder.build();
PrefixMapDebugPathSanitizer compilerDebugPathSanitizer = new PrefixMapDebugPathSanitizer(
config.getDebugPathSanitizerLimit(),
File.separatorChar,
Paths.get("."),
sanitizePaths,
filesystem.getRootPath().toAbsolutePath(),
type,
filesystem);
MungingDebugPathSanitizer assemblerDebugPathSanitizer = new MungingDebugPathSanitizer(
config.getDebugPathSanitizerLimit(),
File.separatorChar,
Paths.get("."),
sanitizePaths);
cxxPlatformBuilder
.setFlavor(flavor)
.setAs(cc)
.addAllAsflags(getAsflags(targetConfiguration, toolchainPaths))
.setAspp(cpp)
.setCc(cc)
.addAllCflags(getCflagsInternal(targetConfiguration, toolchainPaths))
.setCpp(cpp)
.addAllCppflags(getCppflags(targetConfiguration, toolchainPaths))
.setCxx(cxx)
.addAllCxxflags(getCxxflagsInternal(targetConfiguration, toolchainPaths))
.setCxxpp(cxxpp)
.addAllCxxppflags(getCxxppflags(targetConfiguration, toolchainPaths))
.setLd(
new DefaultLinkerProvider(
LinkerProvider.Type.GNU,
new ConstantToolProvider(
getCcLinkTool(
targetConfiguration,
toolchainPaths,
compilerType.getCxx(),
version,
cxxRuntime,
executableFinder))))
.addAllLdflags(
targetConfiguration.getLinkerFlags(compilerType))
// Default linker flags added by the NDK
.addLdflags(
// Add a deterministic build ID to Android builds.
// We use it to find symbols from arbitrary binaries.
"-Wl,--build-id",
// Enforce the NX (no execute) security feature
"-Wl,-z,noexecstack",
// Strip unused code
"-Wl,--gc-sections",
// Refuse to produce dynamic objects with undefined symbols
"-Wl,-z,defs",
// Forbid dangerous copy "relocations"
"-Wl,-z,nocopyreloc",
// We always pass the runtime library on the command line, so setting this flag
// means the resulting link will only use it if it was actually needed it.
"-Wl,--as-needed")
.setStrip(
getGccTool(toolchainPaths, "strip", version, executableFinder))
.setSymbolNameTool(
new PosixNmSymbolNameTool(
getGccTool(toolchainPaths, "nm", version, executableFinder)))
.setAr(
new GnuArchiver(
getGccTool(toolchainPaths, "ar", version, executableFinder)))
.setRanlib(
getGccTool(toolchainPaths, "ranlib", version, executableFinder))
// NDK builds are cross compiled, so the header is the same regardless of the host platform.
.setCompilerDebugPathSanitizer(compilerDebugPathSanitizer)
.setAssemblerDebugPathSanitizer(assemblerDebugPathSanitizer)
.setSharedLibraryExtension("so")
.setSharedLibraryVersionedExtensionFormat("so.%s")
.setStaticLibraryExtension("a")
.setObjectFileExtension("o")
.setSharedLibraryInterfaceFactory(
config.shouldUseSharedLibraryInterfaces() ?
Optional.of(
ElfSharedLibraryInterfaceFactory.of(
new ConstantToolProvider(
getGccTool(toolchainPaths, "objcopy", version, executableFinder)))) :
Optional.empty());
// Add the NDK root path to the white-list so that headers from the NDK won't trigger the
// verification warnings. Ideally, long-term, we'd model NDK libs/headers via automatically
// generated nodes/descriptions so that they wouldn't need to special case it here.
HeaderVerification headerVerification = config.getHeaderVerification();
try {
headerVerification = headerVerification.withPlatformWhitelist(
ImmutableList.of(
"^" + Pattern.quote(ndkRoot.toRealPath().toString() + File.separatorChar) + ".*"));
} catch (IOException e) {
LOG.warn(e, "NDK path could not be resolved: %s", ndkRoot);
}
cxxPlatformBuilder.setHeaderVerification(headerVerification);
LOG.debug("NDK root: %s", ndkRoot.toString());
LOG.debug("Headers verification platform whitelist: %s",
headerVerification.getPlatformWhitelist());
if (cxxRuntime != CxxRuntime.SYSTEM) {
cxxPlatformBuilder.putRuntimeLdflags(
Linker.LinkableDepType.SHARED, "-l" + cxxRuntime.getSharedName());
cxxPlatformBuilder.putRuntimeLdflags(
Linker.LinkableDepType.STATIC, "-l" + cxxRuntime.getStaticName());
}
CxxPlatform cxxPlatform = cxxPlatformBuilder.build();
NdkCxxPlatform.Builder builder = NdkCxxPlatform.builder();
builder
.setCxxPlatform(cxxPlatform)
.setCxxRuntime(cxxRuntime)
.setObjdump(
getGccTool(toolchainPaths, "objdump", version, executableFinder));
if (cxxRuntime != CxxRuntime.SYSTEM) {
builder.setCxxSharedRuntimePath(
toolchainPaths.getCxxRuntimeLibsDirectory()
.resolve(cxxRuntime.getSoname()));
}
return builder.build();
}
/**
* It returns the version of the Android NDK located at the {@code ndkRoot} or throws the
* exception.
*
* @param ndkRoot the path where Android NDK is located.
* @return the version of the Android NDK located in {@code ndkRoot}.
*/
private static String readVersion(Path ndkRoot) {
return DefaultAndroidDirectoryResolver.findNdkVersionFromDirectory(ndkRoot).get();
}
private static Path getToolPath(
NdkCxxToolchainPaths toolchainPaths,
String tool,
ExecutableFinder executableFinder) {
Path expected = toolchainPaths.getToolPath(tool);
Optional<Path> path =
executableFinder.getOptionalExecutable(expected, ImmutableMap.of());
Preconditions.checkState(path.isPresent(), expected.toString());
return path.get();
}
private static Path getGccToolPath(
NdkCxxToolchainPaths toolchainPaths,
String tool,
ExecutableFinder executableFinder) {
Path expected = toolchainPaths.getGccToolchainBinPath().resolve(tool);
Optional<Path> path =
executableFinder.getOptionalExecutable(expected, ImmutableMap.of());
Preconditions.checkState(path.isPresent(), expected.toString());
return path.get();
}
private static Tool getGccTool(
NdkCxxToolchainPaths toolchainPaths,
String tool,
String version,
ExecutableFinder executableFinder) {
return VersionedTool.of(
getGccToolPath(toolchainPaths, tool, executableFinder),
tool,
version);
}
private static Tool getCTool(
NdkCxxToolchainPaths toolchainPaths,
String tool,
String version,
ExecutableFinder executableFinder) {
return VersionedTool.of(
getToolPath(toolchainPaths, tool, executableFinder),
tool,
version);
}
private static ImmutableList<String> getCxxRuntimeIncludeFlags(
NdkCxxPlatformTargetConfiguration targetConfiguration,
NdkCxxToolchainPaths toolchainPaths) {
ImmutableList.Builder<String> flags = ImmutableList.builder();
switch (toolchainPaths.getCxxRuntime()) {
case GNUSTL:
flags.add(
"-isystem",
toolchainPaths.getCxxRuntimeDirectory()
.resolve("include")
.toString());
flags.add(
"-isystem",
toolchainPaths.getCxxRuntimeDirectory()
.resolve("libs")
.resolve(targetConfiguration.getTargetArchAbi().toString())
.resolve("include")
.toString());
break;
case LIBCXX:
flags.add(
"-isystem",
toolchainPaths.getCxxRuntimeDirectory()
.resolve("libcxx")
.resolve("include")
.toString());
flags.add(
"-isystem",
toolchainPaths.getCxxRuntimeDirectory()
.getParent()
.resolve("llvm-libc++abi")
.resolve("libcxxabi")
.resolve("include")
.toString());
flags.add(
"-isystem",
toolchainPaths.getNdkRoot()
.resolve("sources")
.resolve("android")
.resolve("support")
.resolve("include")
.toString());
break;
// $CASES-OMITTED$
default:
flags.add(
"-isystem",
toolchainPaths.getCxxRuntimeDirectory()
.resolve("include")
.toString());
}
return flags.build();
}
private static Linker getCcLinkTool(
NdkCxxPlatformTargetConfiguration targetConfiguration,
NdkCxxToolchainPaths toolchainPaths,
String tool,
String version,
CxxRuntime cxxRuntime,
ExecutableFinder executableFinder) {
ImmutableList.Builder<String> flags = ImmutableList.builder();
// Clang still needs to find GCC tools.
if (targetConfiguration.getCompiler().getType() == NdkCxxPlatformCompiler.Type.CLANG) {
flags.add(
"-gcc-toolchain",
toolchainPaths.getNdkGccToolRoot().toString());
}
// Set the sysroot to the platform-specific path.
flags.add("--sysroot=" + toolchainPaths.getSysroot());
// TODO(#7264008): This was added for windows support but it's not clear why it's needed.
if (targetConfiguration.getCompiler().getType() == NdkCxxPlatformCompiler.Type.GCC) {
flags.add(
"-B" + toolchainPaths.getLibexecGccToolPath(),
"-B" + toolchainPaths.getLibPath());
}
// Add the path to the C/C++ runtime libraries, if necessary.
if (cxxRuntime != CxxRuntime.SYSTEM) {
flags.add(
"-L" + toolchainPaths.getCxxRuntimeLibsDirectory().toString());
}
return new GnuLinker(
VersionedTool.builder()
.setPath(getToolPath(toolchainPaths, tool, executableFinder))
.setName(tool)
.setVersion(version)
.setExtraArgs(flags.build())
.build());
}
/**
* Flags to be used when either preprocessing or compiling C or C++ sources.
*/
private static ImmutableList<String> getCommonFlags(
NdkCxxPlatformTargetConfiguration targetConfiguration,
NdkCxxToolchainPaths toolchainPaths) {
ImmutableList.Builder<String> flags = ImmutableList.builder();
// Clang still needs to find the GCC tools.
if (targetConfiguration.getCompiler().getType() == NdkCxxPlatformCompiler.Type.CLANG) {
flags.add(
"-gcc-toolchain",
toolchainPaths.getNdkGccToolRoot().toString());
}
// TODO(#7264008): This was added for windows support but it's not clear why it's needed.
if (targetConfiguration.getCompiler().getType() == NdkCxxPlatformCompiler.Type.GCC) {
flags.add(
"-B" + toolchainPaths.getLibexecGccToolPath(),
"-B" + toolchainPaths.getToolchainBinPath());
}
// Enable default warnings and turn them into errors.
flags.add(
"-Wall",
"-Werror");
// NOTE: We pass all compiler flags to the preprocessor to make sure any necessary internal
// macros get defined and we also pass the include paths to the to the compiler since we're
// not whether we're doing combined preprocessing/compiling or not.
if (targetConfiguration.getCompiler().getType() == NdkCxxPlatformCompiler.Type.CLANG) {
flags.add("-Wno-unused-command-line-argument");
}
// NDK builds enable stack protector and debug symbols by default.
flags.add(
"-fstack-protector",
"-g3");
return flags.build();
}
/**
* Flags to be used when either preprocessing or compiling C sources.
*/
private static ImmutableList<String> getCommonCFlags() {
return ImmutableList.of(
// Default to the newer C11 standard. This is *not* a default set in the NDK.
// Since this flag can be used multiple times, and because the compiler just uses
// whichever standard was specified last, cxx_library rules can override this from
// their BUCK-file definitions.
"-std=gnu11");
}
/**
* Flags to be used when either preprocessing or compiling C++ sources.
*/
private static ImmutableList<String> getCommonCxxFlags() {
return ImmutableList.of(
// Default to the newer C++11 standard. This is *not* a default set in the NDK.
// Since this flag can be used multiple times, and because the compiler just uses
// whichever standard was specified last, cxx_library rules can override this from
// their BUCK-file definitions.
"-std=gnu++11",
// By default, Android builds disable exceptions and runtime type identification.
"-fno-exceptions",
"-fno-rtti");
}
/**
* Flags to be used when preprocessing C or C++ sources.
*/
private static ImmutableList<String> getCommonPreprocessorFlags() {
return ImmutableList.of(
// Disable searching for headers provided by the system. This limits headers to just
// those provided by the NDK and any library dependencies.
"-nostdinc",
// Default macro definitions applied to all builds.
"-DNDEBUG",
"-DANDROID");
}
private static ImmutableList<String> getCommonIncludes(
NdkCxxToolchainPaths toolchainPaths) {
return ImmutableList.of(
"-isystem",
toolchainPaths.getNdkToolRoot()
.resolve("include")
.toString(),
"-isystem",
toolchainPaths.getLibPath()
.resolve("include")
.toString(),
"-isystem",
toolchainPaths.getSysroot()
.resolve("usr")
.resolve("include")
.toString(),
"-isystem",
toolchainPaths.getSysroot()
.resolve("usr")
.resolve("include")
.resolve("linux")
.toString());
}
private static ImmutableList<String> getAsflags(
NdkCxxPlatformTargetConfiguration targetConfiguration,
NdkCxxToolchainPaths toolchainPaths) {
return ImmutableList.<String>builder()
.addAll(getCommonFlags(targetConfiguration, toolchainPaths))
// Default assembler flags added by the NDK to enforce the NX (no execute) security feature.
.add("-Xassembler", "--noexecstack")
.addAll(targetConfiguration.getAssemblerFlags(targetConfiguration.getCompiler().getType()))
.build();
}
private static ImmutableList<String> getCppflags(
NdkCxxPlatformTargetConfiguration targetConfiguration,
NdkCxxToolchainPaths toolchainPaths) {
return ImmutableList.<String>builder()
.addAll(getCommonIncludes(toolchainPaths))
.addAll(getCommonPreprocessorFlags())
.addAll(getCommonFlags(targetConfiguration, toolchainPaths))
.addAll(getCommonCFlags())
.addAll(targetConfiguration.getCompilerFlags(targetConfiguration.getCompiler().getType()))
.build();
}
private static ImmutableList<String> getCxxppflags(
NdkCxxPlatformTargetConfiguration targetConfiguration,
NdkCxxToolchainPaths toolchainPaths) {
ImmutableList.Builder<String> flags = ImmutableList.builder();
flags.addAll(getCxxRuntimeIncludeFlags(targetConfiguration, toolchainPaths));
flags.addAll(getCommonIncludes(toolchainPaths));
flags.addAll(getCommonPreprocessorFlags());
flags.addAll(getCommonFlags(targetConfiguration, toolchainPaths));
flags.addAll(getCommonCxxFlags());
if (targetConfiguration.getCompiler().getType() == NdkCxxPlatformCompiler.Type.GCC) {
flags.add("-Wno-literal-suffix");
}
flags.addAll(targetConfiguration.getCompilerFlags(targetConfiguration.getCompiler().getType()));
return flags.build();
}
/**
* Flags used when compiling either C or C++ sources.
*/
private static ImmutableList<String> getCommonNdkCxxPlatformCompilerFlags() {
return ImmutableList.of(
// Default compiler flags provided by the NDK build makefiles.
"-ffunction-sections",
"-funwind-tables",
"-fomit-frame-pointer",
"-fno-strict-aliasing");
}
private static ImmutableList<String> getCflagsInternal(
NdkCxxPlatformTargetConfiguration targetConfiguration,
NdkCxxToolchainPaths toolchainPaths) {
return ImmutableList.<String>builder()
.addAll(
targetConfiguration.getCompilerFlags(targetConfiguration.getCompiler().getType()))
.addAll(getCommonCFlags())
.addAll(getCommonFlags(targetConfiguration, toolchainPaths))
.addAll(getCommonNdkCxxPlatformCompilerFlags())
.build();
}
private static ImmutableList<String> getCxxflagsInternal(
NdkCxxPlatformTargetConfiguration targetConfiguration,
NdkCxxToolchainPaths toolchainPaths) {
return ImmutableList.<String>builder()
.addAll(
targetConfiguration.getCompilerFlags(targetConfiguration.getCompiler().getType()))
.addAll(getCommonCxxFlags())
.addAll(getCommonFlags(targetConfiguration, toolchainPaths))
.addAll(getCommonNdkCxxPlatformCompilerFlags())
.build();
}
/**
* The CPU architectures to target.
*/
public enum TargetCpuType {
ARM,
ARMV7,
ARM64,
X86,
X86_64,
MIPS,
}
/**
* The build toolchain, named (including compiler version) after the target platform/arch.
*/
public enum Toolchain {
X86("x86"),
X86_64("x86_64"),
ARM_LINUX_ANDROIDEABI("arm-linux-androideabi"),
AARCH64_LINUX_ANDROID("aarch64-linux-android"),
;
private final String value;
Toolchain(String value) {
this.value = Preconditions.checkNotNull(value);
}
@Override
public String toString() {
return value;
}
}
/**
* Name of the target CPU architecture.
*/
public enum TargetArch {
X86("x86"),
X86_64("x86_64"),
ARM("arm"),
ARM64("arm64"),
;
private final String value;
TargetArch(String value) {
this.value = Preconditions.checkNotNull(value);
}
@Override
public String toString() {
return value;
}
}
/**
* Name of the target CPU + ABI.
*/
public enum TargetArchAbi {
X86("x86"),
X86_64("x86_64"),
ARMEABI("armeabi"),
ARMEABI_V7A("armeabi-v7a"),
ARM64_V8A("arm64-v8a"),
;
private final String value;
TargetArchAbi(String value) {
this.value = Preconditions.checkNotNull(value);
}
@Override
public String toString() {
return value;
}
}
/**
* The OS and Architecture that we're building on.
*/
public enum Host {
DARWIN_X86_64("darwin-x86_64"),
LINUX_X86_64("linux-x86_64"),
WINDOWS_X86_64("windows-x86_64"),
;
private final String value;
Host(String value) {
this.value = Preconditions.checkNotNull(value);
}
@Override
public String toString() {
return value;
}
}
/**
* The C/C++ runtime library to link against.
*/
public enum CxxRuntime {
SYSTEM("system", "system", "system"),
GABIXX("gabi++", "gabi++_shared", "gabi++_static"),
STLPORT("stlport", "stlport_shared", "stlport_static"),
GNUSTL("gnu-libstdc++", "gnustl_shared", "gnustl_static"),
LIBCXX("llvm-libc++", "c++_shared", "c++_static"),
;
private final String name;
private final String sharedName;
private final String staticName;
/**
* @param name the runtimes directory name in the NDK.
* @param sharedName the shared library name used for this runtime.
* @param staticName the the static library used for this runtime.
*/
CxxRuntime(String name, String sharedName, String staticName) {
this.name = name;
this.sharedName = sharedName;
this.staticName = staticName;
}
public String getName() {
return name;
}
public String getStaticName() {
return staticName;
}
public String getSharedName() {
return sharedName;
}
public String getSoname() {
return "lib" + sharedName + ".so";
}
}
/**
* The toolchains name for the platform being targeted.
*/
public enum ToolchainTarget {
I686_LINUX_ANDROID("i686-linux-android"),
X86_64_LINUX_ANDROID("x86_64-linux-android"),
ARM_LINUX_ANDROIDEABI("arm-linux-androideabi"),
AARCH64_LINUX_ANDROID("aarch64-linux-android"),
;
private final String value;
ToolchainTarget(String value) {
this.value = Preconditions.checkNotNull(value);
}
@Override
public String toString() {
return value;
}
}
static class NdkCxxToolchainPaths {
private Path ndkRoot;
private String ndkVersion;
private NdkCxxPlatformTargetConfiguration targetConfiguration;
private String hostName;
private CxxRuntime cxxRuntime;
private Map<String, Path> cachedPaths;
private boolean strict;
private int ndkMajorVersion;
private ProjectFilesystem filesystem;
NdkCxxToolchainPaths(
ProjectFilesystem filesystem,
Path ndkRoot,
NdkCxxPlatformTargetConfiguration targetConfiguration,
String hostName,
CxxRuntime cxxRuntime,
boolean strict) {
this(
filesystem,
ndkRoot,
readVersion(ndkRoot),
targetConfiguration,
hostName,
cxxRuntime,
strict);
}
private NdkCxxToolchainPaths(
ProjectFilesystem filesystem,
Path ndkRoot,
String ndkVersion,
NdkCxxPlatformTargetConfiguration targetConfiguration,
String hostName,
CxxRuntime cxxRuntime,
boolean strict) {
this.filesystem = filesystem;
this.cachedPaths = new HashMap<>();
this.strict = strict;
this.targetConfiguration = targetConfiguration;
this.hostName = hostName;
this.cxxRuntime = cxxRuntime;
this.ndkRoot = ndkRoot;
this.ndkVersion = ndkVersion;
this.ndkMajorVersion = getNdkMajorVersion(ndkVersion);
Assertions.assertCondition(
ndkMajorVersion > 0,
"Unknown ndk version: " + ndkVersion);
}
NdkCxxToolchainPaths getSanitizedPaths() {
return new NdkCxxToolchainPaths(
filesystem,
Paths.get(ANDROID_NDK_ROOT),
ndkVersion,
targetConfiguration,
BUILD_HOST_SUBST,
cxxRuntime,
false);
}
Path processPathPattern(Path root, String pattern) {
String key = root.toString() + "/" + pattern;
Path result = cachedPaths.get(key);
if (result == null) {
String[] segments = pattern.split("/");
result = root;
for (String s : segments) {
if (s.contains("{")) {
s = s.replace("{toolchain}", targetConfiguration.getToolchain().toString());
s = s.replace(
"{toolchain_target}", targetConfiguration.getToolchainTarget().toString());
s = s.replace("{compiler_version}", targetConfiguration.getCompiler().getVersion());
s = s.replace("{compiler_type}", targetConfiguration.getCompiler().getType().getName());
s = s.replace(
"{gcc_compiler_version}", targetConfiguration.getCompiler().getGccVersion());
s = s.replace("{hostname}", hostName);
s = s.replace("{target_platform}", targetConfiguration.getTargetAppPlatform());
s = s.replace("{target_arch}", targetConfiguration.getTargetArch().toString());
s = s.replace("{target_arch_abi}", targetConfiguration.getTargetArchAbi().toString());
}
result = result.resolve(s);
}
if (strict) {
Assertions.assertCondition(
result.toFile().exists(),
result.toString() + " doesn't exist.");
}
cachedPaths.put(key, result);
}
return result;
}
private boolean isGcc() {
return targetConfiguration.getCompiler().getType() == NdkCxxPlatformCompiler.Type.GCC;
}
Path processPathPattern(String s) {
return processPathPattern(ndkRoot, s);
}
Path getNdkToolRoot() {
if (isGcc()) {
return processPathPattern("toolchains/{toolchain}-{compiler_version}/prebuilt/{hostname}");
} else {
if (ndkMajorVersion < 11) {
return processPathPattern("toolchains/llvm-{compiler_version}/prebuilt/{hostname}");
} else {
return processPathPattern("toolchains/llvm/prebuilt/{hostname}");
}
}
}
/**
* @return the path to use as the system root, targeted to the given target platform and
* architecture.
*/
Path getSysroot() {
return processPathPattern("platforms/{target_platform}/arch-{target_arch}");
}
Path getLibexecGccToolPath() {
Assertions.assertCondition(isGcc());
if (ndkMajorVersion < 12) {
return processPathPattern(
getNdkToolRoot(), "libexec/gcc/{toolchain_target}/{compiler_version}");
} else {
return processPathPattern(
getNdkToolRoot(), "libexec/gcc/{toolchain_target}/{compiler_version}.x");
}
}
Path getLibPath() {
String pattern;
if (isGcc()) {
if (ndkMajorVersion < 12) {
pattern = "lib/{compiler_type}/{toolchain_target}/{compiler_version}";
} else {
pattern = "lib/{compiler_type}/{toolchain_target}/{compiler_version}.x";
}
} else {
if (ndkMajorVersion < 11) {
pattern = "lib/{compiler_type}/{compiler_version}";
} else {
pattern = "lib64/{compiler_type}/{compiler_version}";
}
}
return processPathPattern(getNdkToolRoot(), pattern);
}
Path getNdkGccToolRoot() {
return processPathPattern(
"toolchains/{toolchain}-{gcc_compiler_version}/prebuilt/{hostname}");
}
Path getToolchainBinPath() {
if (isGcc()) {
return processPathPattern(getNdkToolRoot(), "{toolchain_target}/bin");
} else {
return processPathPattern(getNdkToolRoot(), "bin");
}
}
private Path getGccToolchainBinPath() {
return processPathPattern(getNdkGccToolRoot(), "{toolchain_target}/bin");
}
private Path getCxxRuntimeDirectory() {
if (cxxRuntime == CxxRuntime.GNUSTL) {
return processPathPattern(
"sources/cxx-stl/" + cxxRuntime.getName() + "/{gcc_compiler_version}");
} else {
return processPathPattern(
"sources/cxx-stl/" + cxxRuntime.getName());
}
}
private Path getCxxRuntimeLibsDirectory() {
return processPathPattern(getCxxRuntimeDirectory(), "libs/{target_arch_abi}");
}
Path getToolPath(String tool) {
if (isGcc()) {
return processPathPattern(getNdkToolRoot(), "bin/{toolchain_target}-" + tool);
} else {
return processPathPattern(getNdkToolRoot(), "bin/" + tool);
}
}
public Path getNdkRoot() {
return ndkRoot;
}
public CxxRuntime getCxxRuntime() {
return cxxRuntime;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sanselan.common;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.RandomAccessFile;
import org.apache.sanselan.ImageReadException;
import org.apache.sanselan.ImageWriteException;
public class BinaryFileFunctions implements BinaryConstants
{
protected boolean debug = false;
public final void setDebug(boolean b)
{
debug = b;
}
public final boolean getDebug()
{
return debug;
}
protected final void readRandomBytes(InputStream is)
throws ImageReadException, IOException
{
for (int counter = 0; counter < 100; counter++)
{
readByte("" + counter, is, "Random Data");
}
}
public final void debugNumber(String msg, int data)
{
debugNumber(msg, data, 1);
}
public final void debugNumber(String msg, int data, int bytes)
{
PrintWriter pw = new PrintWriter(System.out);
debugNumber(pw, msg,
data, bytes);
pw.flush();
}
public final void debugNumber(PrintWriter pw, String msg, int data)
{
debugNumber(pw, msg, data, 1);
}
public final void debugNumber(PrintWriter pw, String msg, int data,
int bytes)
{
pw.print(msg + ": " + data + " (");
int byteData = data;
for (int i = 0; i < bytes; i++)
{
if (i > 0)
pw.print(",");
int singleByte = 0xff & byteData;
pw.print((char) singleByte + " [" + singleByte + "]");
byteData >>= 8;
}
pw.println(") [0x" + Integer.toHexString(data) + ", "
+ Integer.toBinaryString(data) + "]");
pw.flush();
}
public final boolean startsWith(byte haystack[], byte needle[])
{
if (needle == null)
return false;
if (haystack == null)
return false;
if (needle.length > haystack.length)
return false;
for (int i = 0; i < needle.length; i++)
{
if (needle[i] != haystack[i])
return false;
}
return true;
}
public final byte[] readBytes(InputStream is, int count)
throws ImageReadException, IOException
{
byte result[] = new byte[count];
for (int i = 0; i < count; i++)
{
int data = is.read();
result[i] = (byte) data;
}
return result;
}
public final void readAndVerifyBytes(InputStream is, byte expected[],
String exception) throws ImageReadException, IOException
{
for (int i = 0; i < expected.length; i++)
{
int data = is.read();
byte b = (byte) (0xff & data);
if (data < 0)
throw new ImageReadException("Unexpected EOF.");
if (b != expected[i])
{
// System.out.println("i" + ": " + i);
// this.debugByteArray("expected", expected);
// debugNumber("data[" + i + "]", b);
// debugNumber("expected[" + i + "]", expected[i]);
throw new ImageReadException(exception);
}
}
}
protected final void readAndVerifyBytes(String name, InputStream is,
byte expected[], String exception) throws ImageReadException,
IOException
{
byte bytes[] = readByteArray(name, expected.length, is, exception);
for (int i = 0; i < expected.length; i++)
{
if (bytes[i] != expected[i])
{
// System.out.println("i" + ": " + i);
// debugNumber("bytes[" + i + "]", bytes[i]);
// debugNumber("expected[" + i + "]", expected[i]);
throw new ImageReadException(exception);
}
}
}
public final void skipBytes(InputStream is, int length, String exception)
throws IOException
{
long total = 0;
while (length != total)
{
long skipped = is.skip(length - total);
if (skipped < 1)
throw new IOException(exception + " (" + skipped + ")");
total += skipped;
}
}
protected final void scanForByte(InputStream is, byte value)
throws IOException
{
int count = 0;
for (int i = 0; count < 3; i++)
// while(count<3)
{
int b = is.read();
if (b < 0)
return;
if ((0xff & b) == value)
{
System.out.println("\t" + i + ": match.");
count++;
}
}
}
public final byte readByte(String name, InputStream is, String exception)
throws ImageReadException, IOException
{
int result = is.read();
if ((result < 0))
{
System.out.println(name + ": " + result);
throw new IOException(exception);
}
if (debug)
debugNumber(name, result);
return (byte) (0xff & result);
}
protected final RationalNumber[] convertByteArrayToRationalArray(
String name, byte bytes[], int start, int length, int byteOrder)
{
int expectedLength = start + length * 8;
if (bytes.length < expectedLength)
{
System.out.println(name + ": expected length: " + expectedLength
+ ", actual length: " + bytes.length);
return null;
}
RationalNumber result[] = new RationalNumber[length];
for (int i = 0; i < length; i++)
{
result[i] = convertByteArrayToRational(name, bytes, start + i * 8,
byteOrder);
}
return result;
}
protected final RationalNumber convertByteArrayToRational(String name,
byte bytes[], int byteOrder)
{
return convertByteArrayToRational(name, bytes, 0, byteOrder);
}
protected final RationalNumber convertByteArrayToRational(String name,
byte bytes[], int start, int byteOrder)
{
int numerator = convertByteArrayToInt(name, bytes, start + 0, byteOrder);
int divisor = convertByteArrayToInt(name, bytes, start + 4, byteOrder);
return new RationalNumber(numerator, divisor);
}
protected final int convertByteArrayToInt(String name, byte bytes[],
int byteOrder)
{
return convertByteArrayToInt(name, bytes, 0, byteOrder);
}
protected final int convertByteArrayToInt(String name, byte bytes[],
int start, int byteOrder)
{
byte byte0 = bytes[start + 0];
byte byte1 = bytes[start + 1];
byte byte2 = bytes[start + 2];
byte byte3 = bytes[start + 3];
int result;
if (byteOrder == BYTE_ORDER_MOTOROLA) // motorola, big endian
{
result = ((0xff & byte0) << 24) | ((0xff & byte1) << 16)
| ((0xff & byte2) << 8) | ((0xff & byte3) << 0);
} else
{
// intel, little endian
result = ((0xff & byte3) << 24) | ((0xff & byte2) << 16)
| ((0xff & byte1) << 8) | ((0xff & byte0) << 0);
}
if (debug)
debugNumber(name, result, 4);
return result;
}
protected final int[] convertByteArrayToIntArray(String name, byte bytes[],
int start, int length, int byteOrder)
{
int expectedLength = start + length * 4;
if (bytes.length < expectedLength)
{
System.out.println(name + ": expected length: " + expectedLength
+ ", actual length: " + bytes.length);
return null;
}
int result[] = new int[length];
for (int i = 0; i < length; i++)
{
result[i] = convertByteArrayToInt(name, bytes, start + i * 4,
byteOrder);
}
return result;
}
protected final void writeIntInToByteArray(int value, byte bytes[],
int start, int byteOrder)
{
if (byteOrder == BYTE_ORDER_MOTOROLA) // motorola, big endian
{
bytes[start + 0] = (byte) (value >> 24);
bytes[start + 1] = (byte) (value >> 16);
bytes[start + 2] = (byte) (value >> 8);
bytes[start + 3] = (byte) (value >> 0);
} else
{
bytes[start + 3] = (byte) (value >> 24);
bytes[start + 2] = (byte) (value >> 16);
bytes[start + 1] = (byte) (value >> 8);
bytes[start + 0] = (byte) (value >> 0);
}
}
protected static final byte[] int2ToByteArray(int value, int byteOrder)
{
if (byteOrder == BYTE_ORDER_MOTOROLA) // motorola, big endian
return new byte[] { (byte) (value >> 8), (byte) (value >> 0), };
else
return new byte[] { (byte) (value >> 0), (byte) (value >> 8), };
}
protected final byte[] convertIntArrayToByteArray(int values[],
int byteOrder)
{
byte result[] = new byte[values.length * 4];
for (int i = 0; i < values.length; i++)
{
writeIntInToByteArray(values[i], result, i * 4, byteOrder);
}
return result;
}
protected final byte[] convertShortArrayToByteArray(int values[],
int byteOrder)
{
byte result[] = new byte[values.length * 2];
for (int i = 0; i < values.length; i++)
{
int value = values[i];
if (byteOrder == BYTE_ORDER_MOTOROLA) // motorola, big endian
{
result[i * 2 + 0] = (byte) (value >> 8);
result[i * 2 + 1] = (byte) (value >> 0);
} else
{
result[i * 2 + 1] = (byte) (value >> 8);
result[i * 2 + 0] = (byte) (value >> 0);
}
}
return result;
}
protected final byte[] convertShortToByteArray(int value, int byteOrder)
{
byte result[] = new byte[2];
if (byteOrder == BYTE_ORDER_MOTOROLA) // motorola, big endian
{
result[0] = (byte) (value >> 8);
result[1] = (byte) (value >> 0);
} else
{
result[1] = (byte) (value >> 8);
result[0] = (byte) (value >> 0);
}
return result;
}
protected final byte[] convertIntArrayToRationalArray(int numerators[],
int denominators[], int byteOrder) throws ImageWriteException
{
if (numerators.length != denominators.length)
throw new ImageWriteException("numerators.length ("
+ numerators.length + " != denominators.length ("
+ denominators.length + ")");
byte result[] = new byte[numerators.length * 8];
for (int i = 0; i < numerators.length; i++)
{
writeIntInToByteArray(numerators[i], result, i * 8, byteOrder);
writeIntInToByteArray(denominators[i], result, i * 8 + 4, byteOrder);
}
return result;
}
protected final byte[] convertRationalArrayToByteArray(
RationalNumber numbers[], int byteOrder) throws ImageWriteException
{
// Debug.debug("convertRationalArrayToByteArray 2");
byte result[] = new byte[numbers.length * 8];
for (int i = 0; i < numbers.length; i++)
{
writeIntInToByteArray(numbers[i].numerator, result, i * 8,
byteOrder);
writeIntInToByteArray(numbers[i].divisor, result, i * 8 + 4,
byteOrder);
}
return result;
}
protected final byte[] convertRationalToByteArray(RationalNumber number,
int byteOrder) throws ImageWriteException
{
byte result[] = new byte[8];
writeIntInToByteArray(number.numerator, result, 0, byteOrder);
writeIntInToByteArray(number.divisor, result, 4, byteOrder);
return result;
}
protected final int convertByteArrayToShort(String name, byte bytes[],
int byteOrder) throws ImageReadException
{
return convertByteArrayToShort(name, 0, bytes, byteOrder);
}
protected final int convertByteArrayToShort(String name, int index,
byte bytes[], int byteOrder) throws ImageReadException
{
if (index + 1 >= bytes.length)
throw new ImageReadException("Index out of bounds. Array size: "
+ bytes.length + ", index: " + index);
int byte0 = 0xff & bytes[index + 0];
int byte1 = 0xff & bytes[index + 1];
int result;
if (byteOrder == BYTE_ORDER_MOTOROLA) // motorola, big endian
result = (byte0 << 8) | byte1;
else
// intel, little endian
result = (byte1 << 8) | byte0;
if (debug)
debugNumber(name, result, 2);
return result;
}
protected final int[] convertByteArrayToShortArray(String name,
byte bytes[], int start, int length, int byteOrder)
throws ImageReadException
{
int expectedLength = start + length * 2;
if (bytes.length < expectedLength)
{
System.out.println(name + ": expected length: " + expectedLength
+ ", actual length: " + bytes.length);
return null;
}
int result[] = new int[length];
for (int i = 0; i < length; i++)
{
result[i] = convertByteArrayToShort(name, start + i * 2, bytes,
byteOrder);
}
return result;
}
public final byte[] readByteArray(String name, int length, InputStream is)
throws IOException
{
String exception = name + " could not be read.";
return readByteArray(name, length, is, exception);
}
public final byte[] readByteArray(String name, int length, InputStream is,
String exception) throws IOException
{
byte result[] = new byte[length];
int read = 0;
while (read < length)
{
int count = is.read(result, read, length - read);
// Debug.debug("count", count);
if (count < 1)
throw new IOException(exception);
read += count;
}
if (debug)
{
for (int i = 0; ((i < length) && (i < 50)); i++)
{
debugNumber(name + " (" + i + ")", 0xff & result[i]);
}
}
return result;
}
public final void debugByteArray(String name, byte bytes[])
{
System.out.println(name + ": " + bytes.length);
for (int i = 0; ((i < bytes.length) && (i < 50)); i++)
{
debugNumber("\t" + " (" + i + ")", 0xff & bytes[i]);
}
}
protected final void debugNumberArray(String name, int numbers[], int length)
{
System.out.println(name + ": " + numbers.length);
for (int i = 0; ((i < numbers.length) && (i < 50)); i++)
{
debugNumber(name + " (" + i + ")", numbers[i], length);
}
}
public final byte[] readBytearray(String name, byte bytes[], int start,
int count) throws ImageReadException
{
if (bytes.length < (start + count))
{
throw new ImageReadException("Invalid read. bytes.length: " + bytes.length+ ", start: " + start + ", count: " + count);
// return null;
}
byte result[] = new byte[count];
System.arraycopy(bytes, start, result, 0, count);
if (debug)
debugByteArray(name, result);
return result;
}
protected final byte[] getByteArrayTail(String name, byte bytes[], int count) throws ImageReadException
{
return readBytearray(name, bytes, count, bytes.length - count);
}
protected final byte[] getBytearrayHead(String name, byte bytes[], int count) throws ImageReadException
{
return readBytearray(name, bytes, 0, bytes.length - count);
}
public static final byte[] slice(byte bytes[], int start, int count)
{
if (bytes.length < (start + count))
return null;
byte result[] = new byte[count];
System.arraycopy(bytes, start, result, 0, count);
return result;
}
public static final byte[] tail(byte bytes[], int count)
{
if (count > bytes.length)
count = bytes.length;
return slice(bytes, bytes.length - count, count);
}
public static final byte[] head(byte bytes[], int count)
{
if (count > bytes.length)
count = bytes.length;
return slice(bytes, 0, count);
}
public final boolean compareByteArrays(byte a[], byte b[])
{
if (a.length != b.length)
return false;
return compareByteArrays(a, 0, b, 0, a.length);
}
public final boolean compareByteArrays(byte a[], int aStart, byte b[],
int bStart, int length)
{
if (a.length < (aStart + length))
{
return false;
}
if (b.length < (bStart + length))
return false;
for (int i = 0; i < length; i++)
{
if (a[aStart + i] != b[bStart + i])
{
// debugNumber("\t" + "a[" + (aStart + i) + "]", a[aStart + i]);
// debugNumber("\t" + "b[" + (bStart + i) + "]", b[bStart + i]);
return false;
}
}
return true;
}
public static final boolean compareBytes(byte a[], byte b[])
{
if (a.length != b.length)
return false;
return compareBytes(a, 0, b, 0, a.length);
}
public static final boolean compareBytes(byte a[], int aStart, byte b[],
int bStart, int length)
{
if (a.length < (aStart + length))
return false;
if (b.length < (bStart + length))
return false;
for (int i = 0; i < length; i++)
{
if (a[aStart + i] != b[bStart + i])
return false;
}
return true;
}
protected final int read4Bytes(String name, InputStream is,
String exception, int byteOrder) throws ImageReadException,
IOException
{
int size = 4;
byte bytes[] = new byte[size];
int read = 0;
while (read < size)
{
int count = is.read(bytes, read, size - read);
if (count < 1)
throw new IOException(exception);
read += count;
}
return convertByteArrayToInt(name, bytes, byteOrder);
}
protected final int read3Bytes(String name, InputStream is,
String exception, int byteOrder) throws ImageReadException,
IOException
{
byte byte0 = (byte) is.read();
byte byte1 = (byte) is.read();
byte byte2 = (byte) is.read();
int result;
if (byteOrder == BYTE_ORDER_MOTOROLA) // motorola, big endian
result = ((0xff & byte0) << 16) | ((0xff & byte1) << 8)
| ((0xff & byte2) << 0);
else
// intel, little endian
result = ((0xff & byte2) << 16) | ((0xff & byte1) << 8)
| ((0xff & byte0) << 0);
if (debug)
debugNumber(name, result, 3);
return result;
//
//
// int size = 3;
// byte bytes[] = new byte[size];
//
// int read = 0;
// while (read < size)
// {
// int count = is.read(bytes, read, size - read);
// if (count < 1)
// throw new IOException(exception);
//
// read += count;
// }
//
// return convertByteArrayToInt(name, bytes, 0, 3, byteOrder);
}
protected final int read2Bytes(String name, InputStream is,
String exception, int byteOrder) throws ImageReadException,
IOException
{
int size = 2;
byte bytes[] = new byte[size];
int read = 0;
while (read < size)
{
int count = is.read(bytes, read, size - read);
if (count < 1)
throw new IOException(exception);
read += count;
}
return convertByteArrayToShort(name, bytes, byteOrder);
}
protected final void printCharQuad(String msg, int i)
{
System.out.println(msg + ": '" + (char) (0xff & (i >> 24))
+ (char) (0xff & (i >> 16)) + (char) (0xff & (i >> 8))
+ (char) (0xff & (i >> 0)) + "'");
}
protected final void printCharQuad(PrintWriter pw, String msg, int i)
{
pw.println(msg + ": '" + (char) (0xff & (i >> 24))
+ (char) (0xff & (i >> 16)) + (char) (0xff & (i >> 8))
+ (char) (0xff & (i >> 0)) + "'");
}
protected final void printByteBits(String msg, byte i)
{
System.out.println(msg + ": '" + Integer.toBinaryString(0xff & i));
}
public final static int CharsToQuad(char c1, char c2, char c3, char c4)
{
return (((0xff & c1) << 24) | ((0xff & c2) << 16) | ((0xff & c3) << 8) | ((0xff & c4) << 0));
}
public final int findNull(byte src[])
{
return findNull(src, 0);
}
public final int findNull(byte src[], int start)
{
for (int i = start; i < src.length; i++)
{
if (src[i] == 0)
return i;
}
return -1;
}
protected final byte[] getRAFBytes(RandomAccessFile raf, long pos,
int length, String exception) throws IOException
{
if (debug)
{
System.out.println("getRAFBytes pos" + ": " + pos);
System.out.println("getRAFBytes length" + ": " + length);
}
byte result[] = new byte[length];
raf.seek(pos);
int read = 0;
while (read < length)
{
int count = raf.read(result, read, length - read);
if (count < 1)
throw new IOException(exception);
read += count;
}
return result;
}
protected final float convertByteArrayToFloat(String name, byte bytes[],
int byteOrder)
{
return convertByteArrayToFloat(name, bytes, 0, byteOrder);
}
protected final float convertByteArrayToFloat(String name, byte bytes[],
int start, int byteOrder)
{
// TODO: not tested; probably wrong.
byte byte0 = bytes[start + 0];
byte byte1 = bytes[start + 1];
byte byte2 = bytes[start + 2];
byte byte3 = bytes[start + 3];
int bits;
if (byteOrder == BYTE_ORDER_MOTOROLA) // motorola, big endian
{
bits = ((0xff & byte0) << 24) | ((0xff & byte1) << 16)
| ((0xff & byte2) << 8) | ((0xff & byte3) << 0);
} else
{
// intel, little endian
bits = ((0xff & byte3) << 24) | ((0xff & byte2) << 16)
| ((0xff & byte1) << 8) | ((0xff & byte0) << 0);
}
float result = Float.intBitsToFloat(bits);
// if (debug)
// debugNumber(name, result, 4);
return result;
}
protected final float[] convertByteArrayToFloatArray(String name,
byte bytes[], int start, int length, int byteOrder)
{
int expectedLength = start + length * 4;
if (bytes.length < expectedLength)
{
System.out.println(name + ": expected length: " + expectedLength
+ ", actual length: " + bytes.length);
return null;
}
float result[] = new float[length];
for (int i = 0; i < length; i++)
{
result[i] = convertByteArrayToFloat(name, bytes, start + i * 4,
byteOrder);
}
return result;
}
protected final byte[] convertFloatToByteArray(float value, int byteOrder)
{
// TODO: not tested; probably wrong.
byte result[] = new byte[4];
int bits = Float.floatToRawIntBits(value);
if (byteOrder == BYTE_ORDER_MOTOROLA) // motorola, big endian
{
result[0] = (byte) (0xff & (bits >> 0));
result[1] = (byte) (0xff & (bits >> 8));
result[2] = (byte) (0xff & (bits >> 16));
result[3] = (byte) (0xff & (bits >> 24));
} else
{
result[3] = (byte) (0xff & (bits >> 0));
result[2] = (byte) (0xff & (bits >> 8));
result[1] = (byte) (0xff & (bits >> 16));
result[0] = (byte) (0xff & (bits >> 24));
}
return result;
}
protected final byte[] convertFloatArrayToByteArray(float values[],
int byteOrder)
{
// TODO: not tested; probably wrong.
byte result[] = new byte[values.length * 4];
for (int i = 0; i < values.length; i++)
{
float value = values[i];
int bits = Float.floatToRawIntBits(value);
int start = i * 4;
if (byteOrder == BYTE_ORDER_MOTOROLA) // motorola, big endian
{
result[start + 0] = (byte) (0xff & (bits >> 0));
result[start + 1] = (byte) (0xff & (bits >> 8));
result[start + 2] = (byte) (0xff & (bits >> 16));
result[start + 3] = (byte) (0xff & (bits >> 24));
} else
{
result[start + 3] = (byte) (0xff & (bits >> 0));
result[start + 2] = (byte) (0xff & (bits >> 8));
result[start + 1] = (byte) (0xff & (bits >> 16));
result[start + 0] = (byte) (0xff & (bits >> 24));
}
}
return result;
}
protected final byte[] convertDoubleToByteArray(double value, int byteOrder)
{
// TODO: not tested; probably wrong.
byte result[] = new byte[8];
long bits = Double.doubleToRawLongBits(value);
if (byteOrder == BYTE_ORDER_MOTOROLA) // motorola, big endian
{
result[0] = (byte) (0xff & (bits >> 0));
result[1] = (byte) (0xff & (bits >> 8));
result[2] = (byte) (0xff & (bits >> 16));
result[3] = (byte) (0xff & (bits >> 24));
result[4] = (byte) (0xff & (bits >> 32));
result[5] = (byte) (0xff & (bits >> 40));
result[6] = (byte) (0xff & (bits >> 48));
result[7] = (byte) (0xff & (bits >> 56));
} else
{
result[7] = (byte) (0xff & (bits >> 0));
result[6] = (byte) (0xff & (bits >> 8));
result[5] = (byte) (0xff & (bits >> 16));
result[4] = (byte) (0xff & (bits >> 24));
result[3] = (byte) (0xff & (bits >> 32));
result[2] = (byte) (0xff & (bits >> 40));
result[1] = (byte) (0xff & (bits >> 48));
result[0] = (byte) (0xff & (bits >> 56));
}
return result;
}
protected final byte[] convertDoubleArrayToByteArray(double values[],
int byteOrder)
{
// TODO: not tested; probably wrong.
byte result[] = new byte[values.length * 8];
for (int i = 0; i < values.length; i++)
{
double value = values[i];
long bits = Double.doubleToRawLongBits(value);
int start = i * 8;
if (byteOrder == BYTE_ORDER_MOTOROLA) // motorola, big endian
{
result[start + 0] = (byte) (0xff & (bits >> 0));
result[start + 1] = (byte) (0xff & (bits >> 8));
result[start + 2] = (byte) (0xff & (bits >> 16));
result[start + 3] = (byte) (0xff & (bits >> 24));
result[start + 4] = (byte) (0xff & (bits >> 32));
result[start + 5] = (byte) (0xff & (bits >> 40));
result[start + 6] = (byte) (0xff & (bits >> 48));
result[start + 7] = (byte) (0xff & (bits >> 56));
} else
{
result[start + 7] = (byte) (0xff & (bits >> 0));
result[start + 6] = (byte) (0xff & (bits >> 8));
result[start + 5] = (byte) (0xff & (bits >> 16));
result[start + 4] = (byte) (0xff & (bits >> 24));
result[start + 3] = (byte) (0xff & (bits >> 32));
result[start + 2] = (byte) (0xff & (bits >> 40));
result[start + 1] = (byte) (0xff & (bits >> 48));
result[start + 0] = (byte) (0xff & (bits >> 56));
}
}
return result;
}
protected final double convertByteArrayToDouble(String name, byte bytes[],
int byteOrder)
{
return convertByteArrayToDouble(name, bytes, 0, byteOrder);
}
protected final double convertByteArrayToDouble(String name, byte bytes[],
int start, int byteOrder)
{
// TODO: not tested; probably wrong.
byte byte0 = bytes[start + 0];
byte byte1 = bytes[start + 1];
byte byte2 = bytes[start + 2];
byte byte3 = bytes[start + 3];
byte byte4 = bytes[start + 4];
byte byte5 = bytes[start + 5];
byte byte6 = bytes[start + 6];
byte byte7 = bytes[start + 7];
long bits;
if (byteOrder == BYTE_ORDER_MOTOROLA) // motorola, big endian
{
bits = ((0xff & byte0) << 56) | ((0xff & byte1) << 48)
| ((0xff & byte2) << 40) | ((0xff & byte3) << 32)
| ((0xff & byte4) << 24) | ((0xff & byte5) << 16)
| ((0xff & byte6) << 8) | ((0xff & byte7) << 0);
} else
{
// intel, little endian
bits = ((0xff & byte7) << 56) | ((0xff & byte6) << 48)
| ((0xff & byte5) << 40) | ((0xff & byte4) << 32)
| ((0xff & byte3) << 24) | ((0xff & byte2) << 16)
| ((0xff & byte1) << 8) | ((0xff & byte0) << 0);
}
double result = Double.longBitsToDouble(bits);
// if (debug)
// debugNumber(name, result, 4);
return result;
// byte array[];
// if (byteOrder == BYTE_ORDER_MOTOROLA) // motorola, big endian
// // ?? dunno byte order very likely wrong here.
// array = new byte[]{
// bytes[start + 0], bytes[start + 1], bytes[start + 2],
// bytes[start + 3], bytes[start + 4], bytes[start + 5],
// bytes[start + 6], bytes[start + 7],
//
// };
// else
// // ?? dunno byte order very likely wrong here.
// array = new byte[]{
// bytes[start + 3], bytes[start + 2], bytes[start + 1],
// bytes[start + 0], bytes[start + 7], bytes[start + 6],
// bytes[start + 5], bytes[start + 4],
// };
//
// double result = Double.NaN;
//
// try
// {
// ByteArrayInputStream bais = new ByteArrayInputStream(array);
// if (start > 0)
// {
// skipBytes(bais, start);
// // bais.skip(start);
// }
// DataInputStream dis = new DataInputStream(bais);
// result = dis.readDouble();
//
// dis.close();
// }
// catch (Exception e)
// {
// Debug.debug(e);
// }
//
// return result;
}
protected final double[] convertByteArrayToDoubleArray(String name,
byte bytes[], int start, int length, int byteOrder)
{
int expectedLength = start + length * 8;
if (bytes.length < expectedLength)
{
System.out.println(name + ": expected length: " + expectedLength
+ ", actual length: " + bytes.length);
return null;
}
double result[] = new double[length];
for (int i = 0; i < length; i++)
{
result[i] = convertByteArrayToDouble(name, bytes, start + i * 8,
byteOrder);
}
return result;
}
protected void skipBytes(InputStream is, int length) throws IOException
{
this.skipBytes(is, length, "Couldn't skip bytes");
}
public final void copyStreamToStream(InputStream is, OutputStream os)
throws IOException
{
byte buffer[] = new byte[1024];
int read;
while ((read = is.read(buffer)) > 0)
{
os.write(buffer, 0, read);
}
}
public final byte[] getStreamBytes(InputStream is) throws IOException
{
ByteArrayOutputStream os = new ByteArrayOutputStream();
copyStreamToStream(is, os);
return os.toByteArray();
}
}
| |
/* JAT: Java Astrodynamics Toolkit
*
* Copyright (c) 2002 National Aeronautics and Space Administration and the Center for Space Research (CSR),
* The University of Texas at Austin. All rights reserved.
*
* This file is part of JAT. JAT is free software; you can
* redistribute it and/or modify it under the terms of the
* NASA Open Source Agreement
*
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* NASA Open Source Agreement for more details.
*
* You should have received a copy of the NASA Open Source Agreement
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*
*/
package jat.jat3D;
import jat.core.util.EasyReader;
import javax.media.j3d.GeometryArray;
import javax.media.j3d.LineStripArray;
import javax.media.j3d.Shape3D;
/**
* Create Java3D trajectory from COPERNICUS data file
*
* @author Tobias Berthold
*/
public class Copernicus_Trajectory extends Shape3D
{
int num = 0; // number of records in file
public double[] coords;
public double[] t_read, x_read, y_read, z_read; // values from file
public double[] ux_read, uy_read, uz_read; // values from file
public double[] t, x, y, z; // original or interpolated values
public double[] ux, uy, uz; // original or interpolated values
double tmp;
public Copernicus_Trajectory(double[] coords)
{
this.coords = coords;
}
/**
* Create interpolated Java3D trajectory from COPERNICUS data file
*
* @param filename
* @param steps
*/
public Copernicus_Trajectory(String filename, int steps)
{
int i;
get_data_from_file(filename); // read data into arrays t_read, etc.
//int steps=10;
double delta_time = (t_read[num - 1] - t_read[0]) / steps;
jat.coreNOSA.math.Interpolator in_x = new jat.coreNOSA.math.Interpolator(t_read, x_read);
jat.coreNOSA.math.Interpolator in_y = new jat.coreNOSA.math.Interpolator(t_read, y_read);
jat.coreNOSA.math.Interpolator in_z = new jat.coreNOSA.math.Interpolator(t_read, z_read);
jat.coreNOSA.math.Interpolator in_ux = new jat.coreNOSA.math.Interpolator(t_read, ux_read);
jat.coreNOSA.math.Interpolator in_uy = new jat.coreNOSA.math.Interpolator(t_read, uy_read);
jat.coreNOSA.math.Interpolator in_uz = new jat.coreNOSA.math.Interpolator(t_read, uz_read);
// Create new arrays for interpolated values
t = new double[steps + 1];
x = new double[steps + 1];
y = new double[steps + 1];
z = new double[steps + 1];
ux = new double[steps + 1];
uy = new double[steps + 1];
uz = new double[steps + 1];
for (i = 0; i < steps + 1; i++)
{
t[i] = t_read[0] + i * delta_time;
x[i] = in_x.get_value(t[i]);
y[i] = in_y.get_value(t[i]);
z[i] = in_z.get_value(t[i]);
ux[i] = in_ux.get_value(t[i]);
uy[i] = in_uy.get_value(t[i]);
uz[i] = in_uz.get_value(t[i]);
//System.out.println(t[i]+" "+x[i]);
}
// Copy data into coords array
coords = new double[steps * 3];
for (i = 0; i < steps; i++)
{
coords[i * 3 + 0] = x[i];
coords[i * 3 + 1] = y[i];
coords[i * 3 + 2] = z[i];
}
create_trajectory_lines();
}
/**
* Create Java3D trajectory from COPERNICUS data file
*
* @param filename
*/
public Copernicus_Trajectory(String filename)
{
get_data_from_file(filename); // read data into arrays t_read, etc.
// Copy data into coords array
//coords=new double[num*3];
//draw_trajectory();
}
public Copernicus_Trajectory()
{
//draw_trajectory();
}
public void get_data_from_file(String filename)
{
EasyReader inFile = new EasyReader(filename);
if (inFile.bad())
{
System.err.println("Can't open " + filename);
System.exit(1);
}
// find out how many numbers in file
while (!inFile.eof())
{
inFile.readLine();
//System.out.println(inFile.readLine());
num++;
}
inFile.close();
num -= 3;
System.out.println("lines " + num);
// read the data from file
inFile = new EasyReader(filename);
t_read = new double[num];
x_read = new double[num];
y_read = new double[num];
z_read = new double[num];
ux_read = new double[num];
uy_read = new double[num];
uz_read = new double[num];
int i, line = 0;
inFile.readLine(); // read over first line
for (i = 0; i < num; i++)
{
t_read[i] = inFile.readFORTRANDouble(); // Julian date
tmp = inFile.readFORTRANDouble(); // sim_time
tmp = inFile.readFORTRANDouble(); // seg_time
x_read[i] = inFile.readFORTRANDouble(); // x(km)
y_read[i] = inFile.readFORTRANDouble(); // y(km)
z_read[i] = inFile.readFORTRANDouble(); // z(km)
tmp = inFile.readFORTRANDouble(); // xd(km/s)
tmp = inFile.readFORTRANDouble(); // yd(km/s)
tmp = inFile.readFORTRANDouble(); // zd(km/s)
tmp = inFile.readFORTRANDouble(); // mass(kg)
ux_read[i] = inFile.readFORTRANDouble(); // t/m ux
uy_read[i] = inFile.readFORTRANDouble(); // t/m uy
uz_read[i] = inFile.readFORTRANDouble(); // t/m uz
tmp = inFile.readFORTRANDouble(); // lx
tmp = inFile.readFORTRANDouble(); // ly
tmp = inFile.readFORTRANDouble(); // lz
tmp = inFile.readFORTRANDouble(); // lxd
tmp = inFile.readFORTRANDouble(); // lyd
tmp = inFile.readFORTRANDouble(); // lzd
tmp = inFile.readFORTRANDouble(); // lm
tmp = inFile.readFORTRANDouble(); // lvmag
tmp = inFile.readFORTRANDouble(); // lvdmag
tmp = inFile.readFORTRANDouble(); // lrvlvd
tmp = inFile.readFORTRANDouble(); // alpha(deg)
tmp = inFile.readFORTRANDouble(); // beta(deg)
tmp = inFile.readFORTRANDouble(); // u_dot_mag(deg/day)
tmp = inFile.readFORTRANDouble(); // Hamiltonian
tmp = inFile.readFORTRANDouble(); // switch_func
tmp = inFile.readFORTRANDouble(); // thrust(N)
tmp = inFile.readFORTRANDouble(); // Isp(s)
tmp = inFile.readFORTRANDouble(); // c_exhaust(km/s)
tmp = inFile.readFORTRANDouble(); // power(watts)
//System.out.println(""+coords[line+2]);
inFile.readLine();
line += 3;
}
inFile.close();
}
private void create_trajectory_lines()
{
int num_vert = coords.length / 3;
//int[] stripLengths = { 200};
int[] stripLengths = { num_vert };
LineStripArray myLines = new LineStripArray(num_vert, GeometryArray.COORDINATES, stripLengths);
myLines.setCoordinates(0, coords);
this.setGeometry(myLines);
}
private void create_test_lines()
{
double[] coords = { 0., 0., 0., 100000., 0., 0., 1000., 1000., 0., 1000., 1000., 1000. };
int[] stripLengths = { 4 };
LineStripArray myLines = new LineStripArray(4, GeometryArray.COORDINATES, stripLengths);
myLines.setCoordinates(0, coords);
this.setGeometry(myLines);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.security.authorization;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.smile.SmileFactory;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.apache.druid.metadata.MetadataStorageTablesConfig;
import org.apache.druid.metadata.TestDerbyConnector;
import org.apache.druid.security.basic.BasicAuthCommonCacheConfig;
import org.apache.druid.security.basic.BasicAuthUtils;
import org.apache.druid.security.basic.BasicSecurityDBResourceException;
import org.apache.druid.security.basic.authorization.BasicRoleBasedAuthorizer;
import org.apache.druid.security.basic.authorization.db.updater.CoordinatorBasicAuthorizerMetadataStorageUpdater;
import org.apache.druid.security.basic.authorization.entity.BasicAuthorizerGroupMapping;
import org.apache.druid.security.basic.authorization.entity.BasicAuthorizerPermission;
import org.apache.druid.security.basic.authorization.entity.BasicAuthorizerRole;
import org.apache.druid.security.basic.authorization.entity.BasicAuthorizerUser;
import org.apache.druid.server.security.Action;
import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.server.security.Resource;
import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.server.security.ResourceType;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class CoordinatorBasicAuthorizerMetadataStorageUpdaterTest
{
private static final String AUTHORIZER_NAME = "test";
private static final Map<String, BasicAuthorizerUser> BASE_USER_MAP = ImmutableMap.of(
BasicAuthUtils.ADMIN_NAME,
new BasicAuthorizerUser(BasicAuthUtils.ADMIN_NAME, ImmutableSet.of(BasicAuthUtils.ADMIN_NAME)),
BasicAuthUtils.INTERNAL_USER_NAME,
new BasicAuthorizerUser(BasicAuthUtils.INTERNAL_USER_NAME, ImmutableSet.of(
BasicAuthUtils.INTERNAL_USER_NAME))
);
private static final Map<String, BasicAuthorizerRole> BASE_ROLE_MAP = ImmutableMap.of(
BasicAuthUtils.ADMIN_NAME,
new BasicAuthorizerRole(
BasicAuthUtils.ADMIN_NAME,
BasicAuthorizerPermission.makePermissionList(CoordinatorBasicAuthorizerMetadataStorageUpdater.SUPERUSER_PERMISSIONS)
),
BasicAuthUtils.INTERNAL_USER_NAME,
new BasicAuthorizerRole(
BasicAuthUtils.INTERNAL_USER_NAME,
BasicAuthorizerPermission.makePermissionList(CoordinatorBasicAuthorizerMetadataStorageUpdater.SUPERUSER_PERMISSIONS)
)
);
@Rule
public ExpectedException expectedException = ExpectedException.none();
@Rule
public final TestDerbyConnector.DerbyConnectorRule derbyConnectorRule = new TestDerbyConnector.DerbyConnectorRule();
private CoordinatorBasicAuthorizerMetadataStorageUpdater updater;
private ObjectMapper objectMapper;
@Before
public void setUp()
{
objectMapper = new ObjectMapper(new SmileFactory());
TestDerbyConnector connector = derbyConnectorRule.getConnector();
MetadataStorageTablesConfig tablesConfig = derbyConnectorRule.metadataTablesConfigSupplier().get();
connector.createConfigTable();
updater = new CoordinatorBasicAuthorizerMetadataStorageUpdater(
new AuthorizerMapper(
ImmutableMap.of(
AUTHORIZER_NAME,
new BasicRoleBasedAuthorizer(
null,
AUTHORIZER_NAME,
null,
null,
null,
null,
null,
null
)
)
),
connector,
tablesConfig,
new BasicAuthCommonCacheConfig(null, null, null, null),
objectMapper,
new NoopBasicAuthorizerCacheNotifier(),
null
);
updater.start();
}
// user tests
@Test
public void testCreateDeleteUser()
{
updater.createUser(AUTHORIZER_NAME, "druid");
Map<String, BasicAuthorizerUser> expectedUserMap = new HashMap<>(BASE_USER_MAP);
expectedUserMap.put("druid", new BasicAuthorizerUser("druid", ImmutableSet.of()));
Map<String, BasicAuthorizerUser> actualUserMap = BasicAuthUtils.deserializeAuthorizerUserMap(
objectMapper,
updater.getCurrentUserMapBytes(AUTHORIZER_NAME)
);
Assert.assertEquals(expectedUserMap, actualUserMap);
updater.deleteUser(AUTHORIZER_NAME, "druid");
expectedUserMap.remove("druid");
actualUserMap = BasicAuthUtils.deserializeAuthorizerUserMap(
objectMapper,
updater.getCurrentUserMapBytes(AUTHORIZER_NAME)
);
Assert.assertEquals(expectedUserMap, actualUserMap);
}
@Test
public void testCreateDeleteGroupMapping()
{
updater.createGroupMapping(AUTHORIZER_NAME, new BasicAuthorizerGroupMapping("druid", "CN=test", null));
Map<String, BasicAuthorizerGroupMapping> expectedGroupMappingMap = new HashMap<>();
expectedGroupMappingMap.put("druid", new BasicAuthorizerGroupMapping("druid", "CN=test", null));
Map<String, BasicAuthorizerGroupMapping> actualGroupMappingMap = BasicAuthUtils.deserializeAuthorizerGroupMappingMap(
objectMapper,
updater.getCurrentGroupMappingMapBytes(AUTHORIZER_NAME)
);
Assert.assertEquals(expectedGroupMappingMap, actualGroupMappingMap);
updater.deleteGroupMapping(AUTHORIZER_NAME, "druid");
expectedGroupMappingMap.remove("druid");
actualGroupMappingMap = BasicAuthUtils.deserializeAuthorizerGroupMappingMap(
objectMapper,
updater.getCurrentGroupMappingMapBytes(AUTHORIZER_NAME)
);
Assert.assertEquals(expectedGroupMappingMap, actualGroupMappingMap);
}
@Test
public void testDeleteNonExistentUser()
{
expectedException.expect(BasicSecurityDBResourceException.class);
expectedException.expectMessage("User [druid] does not exist.");
updater.deleteUser(AUTHORIZER_NAME, "druid");
}
@Test
public void testDeleteNonExistentGroupMapping()
{
expectedException.expect(BasicSecurityDBResourceException.class);
expectedException.expectMessage("Group mapping [druid] does not exist.");
updater.deleteGroupMapping(AUTHORIZER_NAME, "druid");
}
@Test
public void testCreateDuplicateUser()
{
expectedException.expect(BasicSecurityDBResourceException.class);
expectedException.expectMessage("User [druid] already exists.");
updater.createUser(AUTHORIZER_NAME, "druid");
updater.createUser(AUTHORIZER_NAME, "druid");
}
@Test
public void testCreateDuplicateGroupMapping()
{
expectedException.expect(BasicSecurityDBResourceException.class);
expectedException.expectMessage("Group mapping [druid] already exists.");
updater.createGroupMapping(AUTHORIZER_NAME, new BasicAuthorizerGroupMapping("druid", "CN=test", null));
updater.createGroupMapping(AUTHORIZER_NAME, new BasicAuthorizerGroupMapping("druid", "CN=test", null));
}
// role tests
@Test
public void testCreateDeleteRole()
{
updater.createRole(AUTHORIZER_NAME, "druid");
Map<String, BasicAuthorizerRole> expectedRoleMap = new HashMap<>(BASE_ROLE_MAP);
expectedRoleMap.put("druid", new BasicAuthorizerRole("druid", ImmutableList.of()));
Map<String, BasicAuthorizerRole> actualRoleMap = BasicAuthUtils.deserializeAuthorizerRoleMap(
objectMapper,
updater.getCurrentRoleMapBytes(AUTHORIZER_NAME)
);
Assert.assertEquals(expectedRoleMap, actualRoleMap);
updater.deleteRole(AUTHORIZER_NAME, "druid");
expectedRoleMap.remove("druid");
actualRoleMap = BasicAuthUtils.deserializeAuthorizerRoleMap(
objectMapper,
updater.getCurrentRoleMapBytes(AUTHORIZER_NAME)
);
Assert.assertEquals(expectedRoleMap, actualRoleMap);
}
@Test
public void testDeleteNonExistentRole()
{
expectedException.expect(BasicSecurityDBResourceException.class);
expectedException.expectMessage("Role [druid] does not exist.");
updater.deleteRole(AUTHORIZER_NAME, "druid");
}
@Test
public void testCreateDuplicateRole()
{
expectedException.expect(BasicSecurityDBResourceException.class);
expectedException.expectMessage("Role [druid] already exists.");
updater.createRole(AUTHORIZER_NAME, "druid");
updater.createRole(AUTHORIZER_NAME, "druid");
}
// role, user, and group mapping tests
@Test
public void testAddAndRemoveRoleToUser()
{
updater.createUser(AUTHORIZER_NAME, "druid");
updater.createRole(AUTHORIZER_NAME, "druidRole");
updater.assignUserRole(AUTHORIZER_NAME, "druid", "druidRole");
Map<String, BasicAuthorizerUser> expectedUserMap = new HashMap<>(BASE_USER_MAP);
expectedUserMap.put("druid", new BasicAuthorizerUser("druid", ImmutableSet.of("druidRole")));
Map<String, BasicAuthorizerRole> expectedRoleMap = new HashMap<>(BASE_ROLE_MAP);
expectedRoleMap.put("druidRole", new BasicAuthorizerRole("druidRole", ImmutableList.of()));
Map<String, BasicAuthorizerUser> actualUserMap = BasicAuthUtils.deserializeAuthorizerUserMap(
objectMapper,
updater.getCurrentUserMapBytes(AUTHORIZER_NAME)
);
Map<String, BasicAuthorizerRole> actualRoleMap = BasicAuthUtils.deserializeAuthorizerRoleMap(
objectMapper,
updater.getCurrentRoleMapBytes(AUTHORIZER_NAME)
);
Assert.assertEquals(expectedUserMap, actualUserMap);
Assert.assertEquals(expectedRoleMap, actualRoleMap);
updater.unassignUserRole(AUTHORIZER_NAME, "druid", "druidRole");
expectedUserMap.put("druid", new BasicAuthorizerUser("druid", ImmutableSet.of()));
actualUserMap = BasicAuthUtils.deserializeAuthorizerUserMap(
objectMapper,
updater.getCurrentUserMapBytes(AUTHORIZER_NAME)
);
Assert.assertEquals(expectedUserMap, actualUserMap);
Assert.assertEquals(expectedRoleMap, actualRoleMap);
}
// role, user, and group mapping tests
@Test
public void testAddAndRemoveRoleToGroupMapping()
{
updater.createGroupMapping(AUTHORIZER_NAME, new BasicAuthorizerGroupMapping("druid", "CN=test", null));
updater.createRole(AUTHORIZER_NAME, "druidRole");
updater.assignGroupMappingRole(AUTHORIZER_NAME, "druid", "druidRole");
Map<String, BasicAuthorizerGroupMapping> expectedGroupMappingMap = new HashMap<>();
expectedGroupMappingMap.put("druid", new BasicAuthorizerGroupMapping("druid", "CN=test", ImmutableSet.of("druidRole")));
Map<String, BasicAuthorizerRole> expectedRoleMap = new HashMap<>(BASE_ROLE_MAP);
expectedRoleMap.put("druidRole", new BasicAuthorizerRole("druidRole", ImmutableList.of()));
Map<String, BasicAuthorizerGroupMapping> actualGroupMappingMap = BasicAuthUtils.deserializeAuthorizerGroupMappingMap(
objectMapper,
updater.getCurrentGroupMappingMapBytes(AUTHORIZER_NAME)
);
Map<String, BasicAuthorizerRole> actualRoleMap = BasicAuthUtils.deserializeAuthorizerRoleMap(
objectMapper,
updater.getCurrentRoleMapBytes(AUTHORIZER_NAME)
);
Assert.assertEquals(expectedGroupMappingMap, actualGroupMappingMap);
Assert.assertEquals(expectedRoleMap, actualRoleMap);
updater.unassignGroupMappingRole(AUTHORIZER_NAME, "druid", "druidRole");
expectedGroupMappingMap.put("druid", new BasicAuthorizerGroupMapping("druid", "CN=test", ImmutableSet.of()));
actualGroupMappingMap = BasicAuthUtils.deserializeAuthorizerGroupMappingMap(
objectMapper,
updater.getCurrentGroupMappingMapBytes(AUTHORIZER_NAME)
);
Assert.assertEquals(expectedGroupMappingMap, actualGroupMappingMap);
Assert.assertEquals(expectedRoleMap, actualRoleMap);
}
@Test
public void testAddRoleToNonExistentUser()
{
expectedException.expect(BasicSecurityDBResourceException.class);
expectedException.expectMessage("User [nonUser] does not exist.");
updater.createRole(AUTHORIZER_NAME, "druid");
updater.assignUserRole(AUTHORIZER_NAME, "nonUser", "druid");
}
@Test
public void testAddRoleToNonExistentGroupMapping()
{
expectedException.expect(BasicSecurityDBResourceException.class);
expectedException.expectMessage("Group mapping [nonUser] does not exist.");
updater.createRole(AUTHORIZER_NAME, "druid");
updater.assignGroupMappingRole(AUTHORIZER_NAME, "nonUser", "druid");
}
@Test
public void testAddNonexistentRoleToUser()
{
expectedException.expect(BasicSecurityDBResourceException.class);
expectedException.expectMessage("Role [nonRole] does not exist.");
updater.createUser(AUTHORIZER_NAME, "druid");
updater.assignUserRole(AUTHORIZER_NAME, "druid", "nonRole");
}
@Test
public void testAddNonexistentRoleToGroupMapping()
{
expectedException.expect(BasicSecurityDBResourceException.class);
expectedException.expectMessage("Role [nonRole] does not exist.");
updater.createGroupMapping(AUTHORIZER_NAME, new BasicAuthorizerGroupMapping("druid", "CN=test", null));
updater.assignGroupMappingRole(AUTHORIZER_NAME, "druid", "nonRole");
}
@Test
public void testAddExistingRoleToUserFails()
{
expectedException.expect(BasicSecurityDBResourceException.class);
expectedException.expectMessage("User [druid] already has role [druidRole].");
updater.createUser(AUTHORIZER_NAME, "druid");
updater.createRole(AUTHORIZER_NAME, "druidRole");
updater.assignUserRole(AUTHORIZER_NAME, "druid", "druidRole");
updater.assignUserRole(AUTHORIZER_NAME, "druid", "druidRole");
}
@Test
public void testAddExistingRoleToGroupMappingFails()
{
expectedException.expect(BasicSecurityDBResourceException.class);
expectedException.expectMessage("Group mapping [druid] already has role [druidRole].");
updater.createGroupMapping(AUTHORIZER_NAME, new BasicAuthorizerGroupMapping("druid", "CN=test", null));
updater.createRole(AUTHORIZER_NAME, "druidRole");
updater.assignGroupMappingRole(AUTHORIZER_NAME, "druid", "druidRole");
updater.assignGroupMappingRole(AUTHORIZER_NAME, "druid", "druidRole");
}
@Test
public void testAddExistingRoleToGroupMappingWithRoleFails()
{
expectedException.expect(BasicSecurityDBResourceException.class);
expectedException.expectMessage("Group mapping [druid] already has role [druidRole].");
updater.createGroupMapping(AUTHORIZER_NAME, new BasicAuthorizerGroupMapping("druid", "CN=test", ImmutableSet.of("druidRole")));
updater.createRole(AUTHORIZER_NAME, "druidRole");
updater.assignGroupMappingRole(AUTHORIZER_NAME, "druid", "druidRole");
}
@Test
public void testUnassignInvalidRoleAssignmentToUserFails()
{
expectedException.expect(BasicSecurityDBResourceException.class);
expectedException.expectMessage("User [druid] does not have role [druidRole].");
updater.createUser(AUTHORIZER_NAME, "druid");
updater.createRole(AUTHORIZER_NAME, "druidRole");
Map<String, BasicAuthorizerUser> expectedUserMap = new HashMap<>(BASE_USER_MAP);
expectedUserMap.put("druid", new BasicAuthorizerUser("druid", ImmutableSet.of()));
Map<String, BasicAuthorizerRole> expectedRoleMap = new HashMap<>(BASE_ROLE_MAP);
expectedRoleMap.put("druidRole", new BasicAuthorizerRole("druidRole", ImmutableList.of()));
Map<String, BasicAuthorizerUser> actualUserMap = BasicAuthUtils.deserializeAuthorizerUserMap(
objectMapper,
updater.getCurrentUserMapBytes(AUTHORIZER_NAME)
);
Map<String, BasicAuthorizerRole> actualRoleMap = BasicAuthUtils.deserializeAuthorizerRoleMap(
objectMapper,
updater.getCurrentRoleMapBytes(AUTHORIZER_NAME)
);
Assert.assertEquals(expectedUserMap, actualUserMap);
Assert.assertEquals(expectedRoleMap, actualRoleMap);
updater.unassignUserRole(AUTHORIZER_NAME, "druid", "druidRole");
}
@Test
public void testUnassignInvalidRoleAssignmentToGroupMappingFails()
{
expectedException.expect(BasicSecurityDBResourceException.class);
expectedException.expectMessage("Group mapping [druid] does not have role [druidRole].");
updater.createGroupMapping(AUTHORIZER_NAME, new BasicAuthorizerGroupMapping("druid", "CN=test", null));
updater.createRole(AUTHORIZER_NAME, "druidRole");
Map<String, BasicAuthorizerGroupMapping> expectedGroupMappingMap = new HashMap<>();
expectedGroupMappingMap.put("druid", new BasicAuthorizerGroupMapping("druid", "CN=test", null));
Map<String, BasicAuthorizerRole> expectedRoleMap = new HashMap<>(BASE_ROLE_MAP);
expectedRoleMap.put("druidRole", new BasicAuthorizerRole("druidRole", ImmutableList.of()));
Map<String, BasicAuthorizerGroupMapping> actualGroupMappingMap = BasicAuthUtils.deserializeAuthorizerGroupMappingMap(
objectMapper,
updater.getCurrentGroupMappingMapBytes(AUTHORIZER_NAME)
);
Map<String, BasicAuthorizerRole> actualRoleMap = BasicAuthUtils.deserializeAuthorizerRoleMap(
objectMapper,
updater.getCurrentRoleMapBytes(AUTHORIZER_NAME)
);
Assert.assertEquals(expectedGroupMappingMap, actualGroupMappingMap);
Assert.assertEquals(expectedRoleMap, actualRoleMap);
updater.unassignGroupMappingRole(AUTHORIZER_NAME, "druid", "druidRole");
}
// role and permission tests
@Test
public void testSetRolePermissions()
{
updater.createUser(AUTHORIZER_NAME, "druid");
updater.createRole(AUTHORIZER_NAME, "druidRole");
updater.assignUserRole(AUTHORIZER_NAME, "druid", "druidRole");
List<ResourceAction> permsToAdd = ImmutableList.of(
new ResourceAction(
new Resource("testResource", ResourceType.DATASOURCE),
Action.WRITE
)
);
updater.setPermissions(AUTHORIZER_NAME, "druidRole", permsToAdd);
Map<String, BasicAuthorizerUser> expectedUserMap = new HashMap<>(BASE_USER_MAP);
expectedUserMap.put("druid", new BasicAuthorizerUser("druid", ImmutableSet.of("druidRole")));
Map<String, BasicAuthorizerRole> expectedRoleMap = new HashMap<>(BASE_ROLE_MAP);
expectedRoleMap.put(
"druidRole",
new BasicAuthorizerRole("druidRole", BasicAuthorizerPermission.makePermissionList(permsToAdd))
);
Map<String, BasicAuthorizerUser> actualUserMap = BasicAuthUtils.deserializeAuthorizerUserMap(
objectMapper,
updater.getCurrentUserMapBytes(AUTHORIZER_NAME)
);
Map<String, BasicAuthorizerRole> actualRoleMap = BasicAuthUtils.deserializeAuthorizerRoleMap(
objectMapper,
updater.getCurrentRoleMapBytes(AUTHORIZER_NAME)
);
Assert.assertEquals(expectedUserMap, actualUserMap);
Assert.assertEquals(expectedRoleMap, actualRoleMap);
updater.setPermissions(AUTHORIZER_NAME, "druidRole", null);
expectedRoleMap.put("druidRole", new BasicAuthorizerRole("druidRole", null));
actualRoleMap = BasicAuthUtils.deserializeAuthorizerRoleMap(
objectMapper,
updater.getCurrentRoleMapBytes(AUTHORIZER_NAME)
);
Assert.assertEquals(expectedUserMap, actualUserMap);
Assert.assertEquals(expectedRoleMap, actualRoleMap);
}
@Test
public void testAddPermissionToNonExistentRole()
{
expectedException.expect(BasicSecurityDBResourceException.class);
expectedException.expectMessage("Role [druidRole] does not exist.");
List<ResourceAction> permsToAdd = ImmutableList.of(
new ResourceAction(
new Resource("testResource", ResourceType.DATASOURCE),
Action.WRITE
)
);
updater.setPermissions(AUTHORIZER_NAME, "druidRole", permsToAdd);
}
@Test
public void testAddBadPermission()
{
expectedException.expect(BasicSecurityDBResourceException.class);
expectedException.expectMessage("Invalid permission, resource name regex[??????????] does not compile.");
updater.createRole(AUTHORIZER_NAME, "druidRole");
List<ResourceAction> permsToAdd = ImmutableList.of(
new ResourceAction(
new Resource("??????????", ResourceType.DATASOURCE),
Action.WRITE
)
);
updater.setPermissions(AUTHORIZER_NAME, "druidRole", permsToAdd);
}
}
| |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.rest.dmn.service.api;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.HttpHeaders;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.CredentialsProvider;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.entity.StringEntity;
import org.apache.http.impl.client.BasicCredentialsProvider;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.message.BasicHeader;
import org.eclipse.jetty.server.Server;
import org.flowable.dmn.api.DmnHistoryService;
import org.flowable.dmn.api.DmnRepositoryService;
import org.flowable.dmn.api.DmnRuleService;
import org.flowable.dmn.engine.DmnEngine;
import org.flowable.dmn.engine.DmnEngineConfiguration;
import org.flowable.dmn.engine.DmnEngines;
import org.flowable.dmn.engine.impl.test.AbstractDmnTestCase;
import org.flowable.dmn.engine.test.DmnTestHelper;
import org.flowable.rest.dmn.conf.ApplicationConfiguration;
import org.flowable.rest.dmn.util.TestServerUtil;
import org.flowable.rest.dmn.util.TestServerUtil.TestServer;
import org.junit.Assert;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.context.ApplicationContext;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import junit.framework.AssertionFailedError;
public abstract class BaseSpringDmnRestTestCase extends AbstractDmnTestCase {
private static final Logger LOGGER = LoggerFactory.getLogger(BaseSpringDmnRestTestCase.class);
protected static String SERVER_URL_PREFIX;
protected static DmnRestUrlBuilder URL_BUILDER;
protected static Server server;
protected static ApplicationContext appContext;
protected ObjectMapper objectMapper = new ObjectMapper();
protected static DmnEngine dmnEngine;
protected String deploymentId;
protected Throwable exception;
protected static DmnEngineConfiguration dmnEngineConfiguration;
protected static DmnRepositoryService dmnRepositoryService;
protected static DmnRuleService dmnRuleService;
protected static DmnHistoryService dmnHistoryService;
protected static CloseableHttpClient client;
protected static LinkedList<CloseableHttpResponse> httpResponses = new LinkedList<>();
static {
TestServer testServer = TestServerUtil.createAndStartServer(ApplicationConfiguration.class);
server = testServer.getServer();
appContext = testServer.getApplicationContext();
SERVER_URL_PREFIX = testServer.getServerUrlPrefix();
URL_BUILDER = DmnRestUrlBuilder.usingBaseUrl(SERVER_URL_PREFIX);
// Lookup services
dmnEngine = DmnEngines.getDefaultDmnEngine();
dmnEngineConfiguration = appContext.getBean(DmnEngineConfiguration.class);
dmnRepositoryService = dmnEngineConfiguration.getDmnRepositoryService();
dmnRuleService = dmnEngineConfiguration.getDmnRuleService();
dmnHistoryService = dmnEngineConfiguration.getDmnHistoryService();
// Create http client for all tests
CredentialsProvider provider = new BasicCredentialsProvider();
UsernamePasswordCredentials credentials = new UsernamePasswordCredentials("kermit", "kermit");
provider.setCredentials(AuthScope.ANY, credentials);
client = HttpClientBuilder.create().setDefaultCredentialsProvider(provider).build();
// Clean shutdown
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
if (client != null) {
try {
client.close();
} catch (IOException e) {
LOGGER.error("Could not close http client", e);
}
}
if (server != null && server.isRunning()) {
try {
server.stop();
} catch (Exception e) {
LOGGER.error("Error stopping server", e);
}
}
}
});
}
@Override
public void runBare() throws Throwable {
try {
deploymentId = DmnTestHelper.annotationDeploymentSetUp(dmnEngine, getClass(), getName());
super.runBare();
} catch (AssertionFailedError e) {
LOGGER.error(EMPTY_LINE);
LOGGER.error("ASSERTION FAILED: {}", e, e);
exception = e;
throw e;
} catch (Throwable e) {
LOGGER.error(EMPTY_LINE);
LOGGER.error("EXCEPTION: {}", e, e);
exception = e;
throw e;
} finally {
DmnTestHelper.annotationDeploymentTearDown(dmnEngine, deploymentId, getClass(), getName());
DmnTestHelper.assertAndEnsureCleanDb(dmnEngine);
dmnEngineConfiguration.getClock().reset();
closeHttpConnections();
}
}
/**
* IMPORTANT: calling method is responsible for calling close() on returned {@link HttpResponse} to free the connection.
*/
public CloseableHttpResponse executeRequest(HttpUriRequest request, int expectedStatusCode) {
return internalExecuteRequest(request, expectedStatusCode, true);
}
/**
* IMPORTANT: calling method is responsible for calling close() on returned {@link HttpResponse} to free the connection.
*/
public CloseableHttpResponse executeBinaryRequest(HttpUriRequest request, int expectedStatusCode) {
return internalExecuteRequest(request, expectedStatusCode, false);
}
protected CloseableHttpResponse internalExecuteRequest(HttpUriRequest request, int expectedStatusCode, boolean addJsonContentType) {
CloseableHttpResponse response = null;
try {
if (addJsonContentType && request.getFirstHeader(HttpHeaders.CONTENT_TYPE) == null) {
// Revert to default content-type
request.addHeader(new BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"));
}
response = client.execute(request);
Assert.assertNotNull(response.getStatusLine());
int responseStatusCode = response.getStatusLine().getStatusCode();
if (expectedStatusCode != responseStatusCode) {
LOGGER.info("Wrong status code : {}, but should be {}", responseStatusCode, expectedStatusCode);
LOGGER.info("Response body: {}", IOUtils.toString(response.getEntity().getContent()));
}
Assert.assertEquals(expectedStatusCode, responseStatusCode);
httpResponses.add(response);
return response;
} catch (ClientProtocolException e) {
Assert.fail(e.getMessage());
} catch (IOException e) {
Assert.fail(e.getMessage());
}
return null;
}
public void closeResponse(CloseableHttpResponse response) {
if (response != null) {
try {
response.close();
} catch (IOException e) {
fail("Could not close http connection");
}
}
}
protected void closeHttpConnections() {
for (CloseableHttpResponse response : httpResponses) {
if (response != null) {
try {
response.close();
} catch (IOException e) {
LOGGER.error("Could not close http connection", e);
}
}
}
httpResponses.clear();
}
protected String encode(String string) {
if (string != null) {
try {
return URLEncoder.encode(string, "UTF-8");
} catch (UnsupportedEncodingException uee) {
throw new IllegalStateException("JVM does not support UTF-8 encoding.", uee);
}
}
return null;
}
/**
* Checks if the returned "data" array (child-node of root-json node returned by invoking a GET on the given url) contains entries with the given ID's.
*/
protected void assertResultsPresentInDataResponse(String url, String... expectedResourceIds) throws JsonProcessingException, IOException {
int numberOfResultsExpected = expectedResourceIds.length;
// Do the actual call
CloseableHttpResponse response = executeRequest(new HttpGet(SERVER_URL_PREFIX + url), HttpStatus.SC_OK);
// Check status and size
JsonNode dataNode = objectMapper.readTree(response.getEntity().getContent()).get("data");
closeResponse(response);
assertEquals(numberOfResultsExpected, dataNode.size());
// Check presence of ID's
List<String> toBeFound = new ArrayList<>(Arrays.asList(expectedResourceIds));
Iterator<JsonNode> it = dataNode.iterator();
while (it.hasNext()) {
String id = it.next().get("id").textValue();
toBeFound.remove(id);
}
assertTrue("Not all expected ids have been found in result, missing: " + StringUtils.join(toBeFound, ", "), toBeFound.isEmpty());
}
protected void assertResultsPresentInPostDataResponseWithStatusCheck(String url, ObjectNode body, int expectedStatusCode, String... expectedResourceIds) throws JsonProcessingException, IOException {
int numberOfResultsExpected = 0;
if (expectedResourceIds != null) {
numberOfResultsExpected = expectedResourceIds.length;
}
// Do the actual call
HttpPost post = new HttpPost(SERVER_URL_PREFIX + url);
post.setEntity(new StringEntity(body.toString()));
CloseableHttpResponse response = executeRequest(post, expectedStatusCode);
if (expectedStatusCode == HttpStatus.SC_OK) {
// Check status and size
JsonNode rootNode = objectMapper.readTree(response.getEntity().getContent());
JsonNode dataNode = rootNode.get("data");
assertEquals(numberOfResultsExpected, dataNode.size());
// Check presence of ID's
if (expectedResourceIds != null) {
List<String> toBeFound = new ArrayList<>(Arrays.asList(expectedResourceIds));
Iterator<JsonNode> it = dataNode.iterator();
while (it.hasNext()) {
String id = it.next().get("id").textValue();
toBeFound.remove(id);
}
assertTrue("Not all entries have been found in result, missing: " + StringUtils.join(toBeFound, ", "), toBeFound.isEmpty());
}
}
closeResponse(response);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.qpid.jms.bench;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import javax.jms.DeliveryMode;
import javax.jms.Destination;
import javax.jms.Message;
import javax.jms.MessageConsumer;
import javax.jms.MessageListener;
import javax.jms.MessageProducer;
import javax.jms.Queue;
import javax.jms.Session;
import javax.jms.TextMessage;
import org.apache.activemq.broker.BrokerService;
import org.apache.activemq.broker.jmx.QueueViewMBean;
import org.apache.activemq.broker.region.policy.PolicyEntry;
import org.apache.activemq.broker.region.policy.PolicyMap;
import org.apache.activemq.broker.region.policy.VMPendingQueueMessageStoragePolicy;
import org.apache.qpid.jms.support.AmqpTestSupport;
import org.junit.Ignore;
import org.junit.Test;
/**
*
*/
@Ignore
public class ConsumeFromAMQPTest extends AmqpTestSupport {
private final int MSG_COUNT = 50 * 1000;
private final int NUM_RUNS = 10;
@Override
protected boolean isForceAsyncSends() {
return true;
}
@Override
protected boolean isForceSyncSends() {
return false;
}
@Override
protected String getAmqpTransformer() {
return "raw";
}
@Override
protected boolean isForceAsyncAcks() {
return true;
}
@Override
public String getAmqpConnectionURIOptions() {
return "jms.presettlePolicy.presettleAll=true";
}
@Test
public void oneConsumedForProfile() throws Exception {
connection = createAmqpConnection();
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
Queue queue = session.createQueue(getDestinationName());
MessageProducer producer = session.createProducer(queue);
producer.setDeliveryMode(DeliveryMode.NON_PERSISTENT);
TextMessage message = session.createTextMessage();
message.setText("hello");
producer.send(message);
producer.close();
QueueViewMBean queueView = getProxyToQueue(getDestinationName());
assertEquals("Queue should have a message", 1, queueView.getQueueSize());
MessageConsumer consumer = session.createConsumer(queue);
Message received = consumer.receive(7000);
assertNotNull(received);
consumer.close();
assertEquals("Queue should have ano messages", 0, queueView.getQueueSize());
}
@Test
public void testConsumeRateFromQueue() throws Exception {
connection = createAmqpConnection();
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
Queue queue = session.createQueue(getDestinationName());
// Warm Up the broker.
produceMessages(queue, MSG_COUNT);
consumerMessages(queue, MSG_COUNT);
QueueViewMBean queueView = getProxyToQueue(getDestinationName());
queueView.purge();
List<Long> sendTimes = new ArrayList<Long>();
long cumulative = 0;
for (int i = 0; i < NUM_RUNS; ++i) {
produceMessages(queue, MSG_COUNT);
long result = consumerMessages(queue, MSG_COUNT);
sendTimes.add(result);
cumulative += result;
LOG.info("Time to send {} topic messages: {} ms", MSG_COUNT, result);
queueView.purge();
}
long smoothed = cumulative / NUM_RUNS;
LOG.info("Smoothed send time for {} messages: {}", MSG_COUNT, smoothed);
}
@Test
public void testConsumeRateFromQueueAsync() throws Exception {
connection = createAmqpConnection();
connection.start();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
Queue queue = session.createQueue(getDestinationName());
// Warm Up the broker.
produceMessages(queue, MSG_COUNT);
consumerMessagesAsync(queue, MSG_COUNT);
QueueViewMBean queueView = getProxyToQueue(getDestinationName());
List<Long> sendTimes = new ArrayList<Long>();
long cumulative = 0;
for (int i = 0; i < NUM_RUNS; ++i) {
produceMessages(queue, MSG_COUNT);
long result = consumerMessagesAsync(queue, MSG_COUNT);
sendTimes.add(result);
cumulative += result;
LOG.info("Time to send {} topic messages: {} ms", MSG_COUNT, result);
queueView.purge();
}
long smoothed = cumulative / NUM_RUNS;
LOG.info("Smoothed send time for {} messages: {}", MSG_COUNT, smoothed);
}
protected long consumerMessages(Destination destination, int msgCount) throws Exception {
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageConsumer consumer = session.createConsumer(destination);
long startTime = System.currentTimeMillis();
for (int i = 0; i < msgCount; ++i) {
Message message = consumer.receive(7000);
assertNotNull("Failed to receive message " + i, message);
}
long result = (System.currentTimeMillis() - startTime);
consumer.close();
return result;
}
protected long consumerMessagesAsync(Destination destination, int msgCount) throws Exception {
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageConsumer consumer = session.createConsumer(destination);
final CountDownLatch doneLatch = new CountDownLatch(MSG_COUNT);
long startTime = System.currentTimeMillis();
consumer.setMessageListener(new MessageListener() {
@Override
public void onMessage(Message message) {
doneLatch.countDown();
}
});
assertTrue(doneLatch.await(60, TimeUnit.SECONDS));
long result = (System.currentTimeMillis() - startTime);
consumer.close();
return result;
}
protected void produceMessages(Destination destination, int msgCount) throws Exception {
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageProducer producer = session.createProducer(destination);
producer.setDeliveryMode(DeliveryMode.NON_PERSISTENT);
TextMessage message = session.createTextMessage();
message.setText("hello");
for (int i = 0; i < msgCount; ++i) {
producer.send(message);
}
producer.close();
}
@Override
protected void configureBrokerPolicies(BrokerService broker) {
PolicyEntry policyEntry = new PolicyEntry();
policyEntry.setPendingQueuePolicy(new VMPendingQueueMessageStoragePolicy());
policyEntry.setPrioritizedMessages(false);
policyEntry.setExpireMessagesPeriod(0);
policyEntry.setEnableAudit(false);
policyEntry.setOptimizedDispatch(false);
policyEntry.setQueuePrefetch(1000);
PolicyMap policyMap = new PolicyMap();
policyMap.setDefaultEntry(policyEntry);
broker.setDestinationPolicy(policyMap);
}
}
| |
/*
* This work is licensed under the Creative Commons Attribution 3.0 Unported
* License. To view a copy of this license, visit
* http://creativecommons.org/licenses/by/3.0/ or send a letter to Creative
* Commons, 171 Second Street, Suite 300, San Francisco, California, 94105, USA.
*/
package org.tros.utils;
import java.io.IOException;
import java.util.HashMap;
import java.util.Properties;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import org.tros.torgo.TorgoToolkit;
import org.tros.utils.logging.Logging;
/**
* This is a utility class for accessing random members of a collection and also
* generating random numbers/values.
*
* In this class, there are PUID values which is for pseudo-unique-ID values.
* These values are unique within a given simulation, but can be duplicated for
* repeatability
*
* @author matta
*/
public final class Random {
/**
* Enumeration for a 3 state system.
*/
public enum TriState {
TRUE,
FALSE,
MAYBE
}
/**
* Enumeration for how the UUID values should be incremented.
*/
public enum UuidIncrementType {
useInstance,
useClass,
usePackage
}
private static final double EPSILON = 1E-14;
private static final HashMap<Thread, java.util.Random> RANDOMS;
private static final HashMap<Object, java.util.Random> SPECIFIC_RANDOMS;
private static UuidIncrementType incrementType = UuidIncrementType.useClass;
private static boolean doSeed;
private static int seedValue;
private static final HashMap<String, AtomicLong> COUNTERS;
private static final String DEFAULT_KEY = "puid";
/**
* Hidden Constructor.
*/
private Random() {
}
/**
* Static Constructor.
*/
static {
COUNTERS = new HashMap<>();
Properties prop = new Properties();
String propFile = Random.class.getCanonicalName().replace('.', '/') + ".properties";
try {
prop.load(TorgoToolkit.getDefaultResourceAccessor().open(propFile));
incrementType = UuidIncrementType.valueOf(prop.getProperty("uuidIncrementType"));
doSeed = Boolean.parseBoolean(prop.getProperty("doSeed"));
seedValue = Integer.parseInt(prop.getProperty("seedValue"));
} catch (NullPointerException | IOException ex) {
Logging.getLogFactory().getLogger(Random.class).fatal(null, ex);
}
RANDOMS = new HashMap<>();
SPECIFIC_RANDOMS = new HashMap<>();
}
/**
* Get Instance.
*
* Random objects are created on a per-thread basis. So each thread will get
* a new Random object which may be seeded to the same initial value. This
* is necessary for repeatability.
*
* @return a Random object based on the the set criteria.
*/
private static java.util.Random getInstance() {
Thread curr = Thread.currentThread();
if (!RANDOMS.containsKey(curr)) {
if (doSeed) {
RANDOMS.put(curr, new java.util.Random(seedValue));
} else {
RANDOMS.put(curr, new java.util.Random());
}
}
return RANDOMS.get(curr);
}
/**
* Get Instance.
*
* Sometimes a specific random object must be shared across threads.
*
* @param key key for specific random object.
* @return random object for the set criteria.
*/
public static java.util.Random getInstance(final Object key) {
if (!SPECIFIC_RANDOMS.containsKey(key)) {
if (doSeed) {
SPECIFIC_RANDOMS.put(key, new java.util.Random(seedValue));
} else {
SPECIFIC_RANDOMS.put(key, new java.util.Random());
}
}
return SPECIFIC_RANDOMS.get(key);
}
/**
* Reset the random object to initial state (only useful if the random
* object is seeded). This will clear the PUID counters.
*/
public static synchronized void reset() {
reset(true);
}
/**
* Reset the specified counter to the specified value.
*
* @param c reset the UUID counter for the specified class.
* @param value reset the UUID counter to the specified value.
*/
public static synchronized void reset(Class<?> c, long value) {
String key = DEFAULT_KEY;
switch (incrementType) {
case useClass:
key = c.getName();
break;
case usePackage:
key = c.getPackage().getName();
break;
}
if (!COUNTERS.containsKey(key)) {
COUNTERS.put(key, new AtomicLong(1));
}
AtomicLong l = COUNTERS.get(key);
l.set(value + 1);
}
/**
* Reset the random object to initial state (only useful if the random
* object is seeded). However can be specified to leave the PUID counters
* alone.
*
* @param clearCount specify if we want to clear the UUID values.
*/
public static synchronized void reset(final boolean clearCount) {
RANDOMS.clear();
SPECIFIC_RANDOMS.clear();
if (clearCount) {
COUNTERS.clear();
}
}
/**
* Get a new PUID value for a specified class type.
*
* @param c the class type
* @return a new PUID value
*/
public static String getRandomName(final Class<?> c) {
return getPUID(c);
}
/**
* Get a new puid.
*
* Pseudo UID.
*
* @param c class to get a puid value for.
* @param type increment type.
* @return a new puid value.
*/
public static synchronized String getPUID(final Class<?> c, UuidIncrementType type) {
String key = DEFAULT_KEY;
switch (type) {
case useClass:
key = c.getName();
break;
case usePackage:
key = c.getPackage().getName();
break;
}
if (!COUNTERS.containsKey(key)) {
COUNTERS.put(key, new AtomicLong(1));
}
AtomicLong l = COUNTERS.get(key);
Long l2 = l.getAndIncrement();
return key + "-" + l2.toString();
}
/**
* Get a new PUID value for a specified class type.
*
* Pseudo UID.
*
* @param c the class type
* @return a new PUID value
*/
public static synchronized String getPUID(final Class<?> c) {
return getPUID(c, incrementType);
}
/**
*
* get a new PUID value for a specified class type.
*
* @param c the class type
* @param strength the strength of the PUID (unused for now)
* @return a new PUID value
*/
public static synchronized String getPUID(final Class<?> c, final int strength) {
return getPUID(c);
}
/**
* Return a random boolean value.
*
* @param random Random to use.
* @return a random true/false value
*/
public static synchronized boolean nextBoolean(java.util.Random random) {
return random.nextBoolean();
}
/**
* Return a random boolean value.
*
* @return a random true/false value
*/
public static synchronized boolean nextBoolean() {
return nextBoolean(getInstance());
}
/**
* return a random tri-state value.
*
* @param random Random to use.
* @return a random tri-state value TRUE/FALSE/MAYBE
*/
public static synchronized TriState nextTriState(java.util.Random random) {
double d = random.nextDouble();
if (d < (1.0 / 3.0)) {
return TriState.FALSE;
} else if (d <= (2.0 / 3.0)) {
return TriState.MAYBE;
} else {
return TriState.TRUE;
}
}
/**
* return a random tri-state value.
*
* @return a random tri-state value TRUE/FALSE/MAYBE
*/
public static synchronized TriState nextTriState() {
return nextTriState(getInstance());
}
/**
* Returns a new double value from 0.0 inclusive to 1.0 exclusive.
*
* @param random Random to use.
* @return a new double value from 0.0 inclusive to 1.0 exclusive.
*/
public static synchronized double nextDouble(java.util.Random random) {
return random.nextDouble();
}
/**
* Returns a random real number uniformly in [a, b).
*
* @param a the left endpoint
* @param b the right endpoint
* @return a random real number uniformly in [a, b)
* @throws IllegalArgumentException unless <tt>a < b</tt>
*/
public static double nextDouble(double a, double b) {
if (!(a < b)) {
throw new IllegalArgumentException("Invalid range");
}
return a + nextDouble() * (b - a);
}
/**
* Returns a new double value from 0.0 inclusive to 1.0 exclusive.
*
* @return a new double value from 0.0 inclusive to 1.0 exclusive.
*/
public static synchronized double nextDouble() {
return nextDouble(getInstance());
}
/**
* Returns a new float value from 0.0 inclusive to 1.0 exclusive.
*
* @param random Random to use.
* @return a new float value from 0.0 inclusive to 1.0 exclusive.
*/
public static synchronized float nextFloat(java.util.Random random) {
return random.nextFloat();
}
/**
* Returns a new float value from 0.0 inclusive to 1.0 exclusive.
*
* @return a new float value from 0.0 inclusive to 1.0 exclusive.
*/
public static synchronized float nextFloat() {
return nextFloat(getInstance());
}
/**
* Returns a random integer.
*
* @param random Random to use.
* @return a random integer
*/
public static synchronized int nextInt(java.util.Random random) {
return random.nextInt();
}
/**
* Returns a random integer.
*
* @return a random integer
*/
public static synchronized int nextInt() {
return nextInt(getInstance());
}
/**
* Returns a random integer less than the specified value.
*
* @param random Random to use.
* @param n the specified value
* @return a random integer >= 0 and < n
*/
public static synchronized int nextInt(java.util.Random random, final int n) {
return random.nextInt(Math.max(1, n));
}
/**
* Returns a random integer less than the specified value.
*
* @param n the specified value
* @return a random integer >= 0 and < n
*/
public static synchronized int nextInt(final int n) {
return nextInt(getInstance(), Math.max(1, n));
}
/**
* Returns a random integer within the specified range.
*
* @param random Random to use.
* @param min the min value (inclusive)
* @param max the max value (exclusive)
* @return a random integer within the specified range.
*/
public static synchronized int nextInt(java.util.Random random, final int min, final int max) {
return (random.nextInt(Math.max(1, max - min)) + min);
}
/**
* Returns a random integer within the specified range.
*
* @param min the min value (inclusive)
* @param max the max value (exclusive)
* @return a random integer within the specified range.
*/
public static synchronized int nextInt(final int min, final int max) {
return nextInt(getInstance(), min, max);
}
/**
* Returns a random long value.
*
* @param random Random to use.
* @return a random long value.
*/
public static synchronized long nextLong(java.util.Random random) {
return random.nextLong();
}
/**
* Returns a random long value.
*
* @return a random long value.
*/
public static synchronized long nextLong() {
return nextLong(getInstance());
}
/**
* Returns a random long value.
*
* @param random Random to use.
* @param max max value for the random.
* @return a random long value.
*/
public static synchronized long nextLong(java.util.Random random, final long max) {
// error checking and 2^x checking removed for simplicity.
return nextLong(random, 0, Math.max(max, 1));
}
/**
* Returns a random long value.
*
* @param max max value for the random
* @return a random long value
*/
public static synchronized long nextLong(final long max) {
// error checking and 2^x checking removed for simplicity.
return nextLong(getInstance(), max);
}
/**
* Returns a random long value.
*
* @param random Random to use.
* @param min max value for the random.
* @param max min value for the random.
* @return a random long value.
*/
public static synchronized long nextLong(java.util.Random random, final long min, final long max) {
// error checking and 2^x checking removed for simplicity.
long bits, val;
java.util.Random rng = random;
long max2 = Math.abs(max - min);
do {
bits = (rng.nextLong() << 1) >>> 1;
val = bits % max2;
} while (bits - val + (max2 - 1) < 0L);
return val + min;
}
/**
* Returns a random long value.
*
* @param min min value for the random.
* @param max max value for the random.
* @return a random long value.
*/
public static synchronized long nextLong(final long min, final long max) {
// error checking and 2^x checking removed for simplicity.
long bits, val;
java.util.Random rng = getInstance();
long max2 = Math.abs(max - min);
do {
bits = (rng.nextLong() << 1) >>> 1;
val = bits % max2;
} while (bits - val + (max2 - 1) < 0L);
return val + min;
}
/**
* Returns the next value in a Gaussian series.
*
* @param key the key for the Guassian series.
* @return a random value with a Guassian distribution.
*/
public static synchronized double nextGaussian(final Object key) {
return getInstance(key).nextGaussian();
}
/**
* Returns a new collection that has the specified number of items in the
* list.
*
* @param <T> the type
* @param list the collection to choose random objects from
* @param count the number of items to select
* @return a new collection
*/
public static <T> Collection<T> getRandom(final Collection<T> list, final int count) {
if (count <= 0) {
return new ArrayList<>();
}
if (list.size() > count) {
final int listCount = list.size();
final int masStride = listCount / count;
java.util.Random instance = getInstance();
final int start = Random.nextInt(instance, listCount % count);
final List<T> retVal = new ArrayList<>();
final List<Integer> ints = new ArrayList<>();
for (int ii = start; ints.size() < count; ii += masStride) {
if (count - 1 == retVal.size()) {
ints.add(Random.nextInt(instance, ii, listCount));
} else {
ints.add(Random.nextInt(instance, ii, ii + masStride));
}
}
Iterator<T> it = list.iterator();
Integer c = 0;
while (it.hasNext()) {
T ret = it.next();
if (ints.contains(c)) {
retVal.add(ret);
}
c++;
}
return retVal;
} else {
return new ArrayList<>(list);
}
}
/**
* Returns a new collection that has the specified number of items in the
* list.
*
* @param <T> the type
* @param random Random to use.
* @param list the collection to choose random objects from
* @param count the number of items to select
* @return a new collection
*/
public static <T> Collection<T> getRandom(java.util.Random random, final Collection<T> list, final int count) {
if (count <= 0) {
return new ArrayList<>();
}
if (list.size() > count) {
final int listCount = list.size();
final int maxStride = listCount / count;
java.util.Random instance = random;
final int start = Random.nextInt(instance, listCount % count);
final List<T> retVal = new ArrayList<>();
final List<Integer> ints = new ArrayList<>();
for (int ii = start; ints.size() < count; ii += maxStride) {
if (count - 1 == retVal.size()) {
ints.add(Random.nextInt(instance, ii, listCount));
} else {
ints.add(Random.nextInt(instance, ii, ii + maxStride));
}
}
Iterator<T> it = list.iterator();
Integer c = 0;
while (it.hasNext()) {
T ret = it.next();
if (ints.contains(c)) {
retVal.add(ret);
}
c++;
}
return retVal;
} else {
return new ArrayList<>(list);
}
}
/**
* Gets a random item from a collection that is NOT equal to the specified
* object.
*
* @param <T> the type
* @param list the collection to select from
* @param not the object which we do not want a duplicate selection of
* @return a new randomly selected object which is not equal to the
* specified value
*/
public static <T> T getRandom(final Collection<T> list, final T not) {
if (list.isEmpty()) {
return null;
}
final T elem = getRandom(list);
if (elem.equals(not)) {
Logging.getLogFactory().getLogger(Random.class).debug("Creating array copy for finding random item.");
List<T> l = new ArrayList<>(list);
l.remove(not);
return getRandom(l);
}
return elem;
}
/**
* Gets a random item from a collection that is NOT equal to the specified
* object.
*
* @param <T> the type
* @param random Random to use.
* @param list the collection to select from
* @param not the object which we do not want a duplicate selection of
* @return a new randomly selected object which is not equal to the
* specified value
*/
public static <T> T getRandom(java.util.Random random, final Collection<T> list, final T not) {
if (list.isEmpty()) {
return null;
}
final T elem = getRandom(random, list);
if (elem.equals(not)) {
Logging.getLogFactory().getLogger(Random.class).debug("Creating array copy for finding random item.");
List<T> l = new ArrayList<>(list);
l.remove(not);
return getRandom(random, l);
}
return elem;
}
/**
* Gets a random item from the specified collection.
*
* @param <T> the type
* @param list the specified collection
* @return a randomly selected object from the collection
*/
public static <T> T getRandom(final Collection<T> list) {
if (list.isEmpty()) {
return null;
}
java.util.Random instance = getInstance();
final int index = Random.nextInt(instance, list.size());
return org.apache.commons.collections4.IterableUtils.get(list, index);
}
/**
* Get a random item from this list.
*
* @param <T> the type.
* @param random Random to use.
* @param list the list to choose from.
* @return a random item from the list.
*/
public static <T> T getRandom(java.util.Random random, final Collection<T> list) {
if (list.isEmpty()) {
return null;
}
final int index = Random.nextInt(random, list.size());
return org.apache.commons.collections4.IterableUtils.get(list, index);
}
/**
* Gets a random item from a collection that is NOT equal to the specified
* collection.
*
* @param <T> the type
* @param list the collection to select from
* @param not the collection from which we do not want a duplicate selection
* of
* @return a new randomly selected object which is not equal to the
* specified value
*/
public static <T> T getRandomNotInList(final Collection<T> list, final Collection<T> not) {
return getRandom(org.apache.commons.collections4.CollectionUtils.subtract(list, not));
}
/**
* Gets a random item from a collection that is NOT equal to the specified
* collection.
*
* @param <T> the type
* @param random Random to use.
* @param list the collection to select from
* @param not the collection from which we do not want a duplicate selection
* of
* @return a new randomly selected object which is not equal to the
* specified value
*/
public static <T> T getRandomNotInList(java.util.Random random, final Collection<T> list, final Collection<T> not) {
return getRandom(random, org.apache.commons.collections4.CollectionUtils.subtract(list, not));
}
/**
* Is the random object specified to be seeded for repeatability.
*
* @return Is the random object specified to be seeded for repeatability.
*/
public static boolean isSeeded() {
return doSeed;
}
/**
* Set if the random system is to be seeded.
*
* @param value If the Random object specified to be seeded.
*/
public static void setSeeded(boolean value) {
doSeed = value;
}
/**
* Get the seed.
*
* @return the current seed value for Random objects.
*/
public static int getSeed() {
return seedValue;
}
/**
* Set the seed.
*
* @param value the seed value for future Random objects.
*/
public static void setSeed(int value) {
seedValue = value;
}
/**
* Returns a random boolean from a Bernoulli distribution with success
* probability <em>p</em>.
*
* @param p the probability of returning <tt>true</tt>
* @return <tt>true</tt> with probability <tt>p</tt> and
* <tt>false</tt> with probability <tt>p</tt>
* @throws IllegalArgumentException unless <tt>p >= 0.0</tt> and <tt>p
* <= 1.0</tt>
*/
public static boolean bernoulli(double p) {
if (!(p >= 0.0 && p <= 1.0)) {
throw new IllegalArgumentException("Probability must be between 0.0 and 1.0");
}
return nextDouble() < p;
}
/**
* Returns a random boolean from a Bernoulli distribution with success
* probability 1/2.
*
* @return <tt>true</tt> with probability 1/2 and
* <tt>false</tt> with probability 1/2
*/
public static boolean bernoulli() {
return bernoulli(0.5);
}
/**
* Returns a random real number from a standard Gaussian distribution.
*
* @return a random real number from a standard Gaussian distribution (mean
* 0 and standard deviation 1).
*/
public static synchronized double gaussian() {
// use the polar form of the Box-Muller transform
double r, x, y;
do {
x = nextDouble(-1.0, 1.0);
y = nextDouble(-1.0, 1.0);
r = x * x + y * y;
} while (r >= 1 || r == 0);
return x * Math.sqrt(-2 * Math.log(r) / r);
// Remark: y * Math.sqrt(-2 * Math.log(r) / r)
// is an independent random gaussian
}
/**
* Returns a random real number from a Gaussian distribution with mean μ
* and standard deviation σ.
*
* @param mu the mean
* @param sigma the standard deviation
* @return a real number distributed according to the Gaussian distribution
* with mean <tt>mu</tt> and standard deviation <tt>sigma</tt>
*/
public static double gaussian(double mu, double sigma) {
return mu + sigma * gaussian();
}
/**
* Returns a random integer from a geometric distribution with success
* probability <em>p</em>.
*
* @param p the parameter of the geometric distribution
* @return a random integer from a geometric distribution with success
* probability <tt>p</tt>
* @throws IllegalArgumentException unless <tt>p >= 0.0</tt> and <tt>p
* <= 1.0</tt>
*/
public static int geometric(double p) {
if (!(p >= 0.0 && p <= 1.0)) {
throw new IllegalArgumentException("Probability must be between 0.0 and 1.0");
}
// using algorithm given by Knuth
return (int) Math.ceil(Math.log(nextDouble()) / Math.log(1.0 - p));
}
/**
* Returns a random integer from a Poisson distribution with mean λ.
*
* @param lambda the mean of the Poisson distribution
* @return a random integer from a Poisson distribution with mean
* <tt>lambda</tt>
* @throws IllegalArgumentException unless <tt>lambda > 0.0</tt> and not
* infinite
*/
public static int poisson(double lambda) {
if (!(lambda > 0.0)) {
throw new IllegalArgumentException("Parameter lambda must be positive");
}
if (Double.isInfinite(lambda)) {
throw new IllegalArgumentException("Parameter lambda must not be infinite");
}
// using algorithm given by Knuth
// see http://en.wikipedia.org/wiki/Poisson_distribution
int k = 0;
double p = 1.0;
double l = Math.exp(-lambda);
do {
k++;
p *= nextDouble();
} while (p >= l);
return k - 1;
}
/**
* Returns a random real number from the standard Pareto distribution.
*
* @return a random real number from the standard Pareto distribution
*/
public static double pareto() {
return pareto(1.0);
}
/**
* Returns a random real number from a Pareto distribution with shape
* parameter α.
*
* @param alpha shape parameter
* @return a random real number from a Pareto distribution with shape
* parameter <tt>alpha</tt>
* @throws IllegalArgumentException unless <tt>alpha > 0.0</tt>
*/
public static double pareto(double alpha) {
if (!(alpha > 0.0)) {
throw new IllegalArgumentException("Shape parameter alpha must be positive");
}
return Math.pow(1 - nextDouble(), -1.0 / alpha) - 1.0;
}
/**
* Returns a random real number from the Cauchy distribution.
*
* @return a random real number from the Cauchy distribution.
*/
public static double cauchy() {
return Math.tan(Math.PI * (nextDouble() - 0.5));
}
/**
* Returns a random integer from the specified discrete distribution.
*
* @param probabilities the probability of occurrence of each integer
* @return a random integer from a discrete distribution:
* <tt>i</tt> with probability <tt>probabilities[i]</tt>
* @throws NullPointerException if <tt>probabilities</tt> is <tt>null</tt>
* @throws IllegalArgumentException if sum of array entries is not (very
* nearly) equal to <tt>1.0</tt>
* @throws IllegalArgumentException unless <tt>probabilities[i] >=
* 0.0</tt>
* for each index <tt>i</tt>
*/
public static synchronized int discrete(double[] probabilities) {
if (probabilities == null) {
throw new NullPointerException("argument array is null");
}
double sum = 0.0;
for (int i = 0; i < probabilities.length; i++) {
if (!(probabilities[i] >= 0.0)) {
throw new IllegalArgumentException("array entry " + i + " must be nonnegative: " + probabilities[i]);
}
sum += probabilities[i];
}
if (sum > 1.0 + EPSILON || sum < 1.0 - EPSILON) {
throw new IllegalArgumentException("sum of array entries does not approximately equal 1.0: " + sum);
}
// the for loop may not return a value when both r is (nearly) 1.0 and when the
// cumulative sum is less than 1.0 (as a result of floating-point roundoff error)
while (true) {
double r = nextDouble();
sum = 0.0;
for (int i = 0; i < probabilities.length; i++) {
sum = sum + probabilities[i];
if (sum > r) {
return i;
}
}
}
}
/**
* Returns a random integer from the specified discrete distribution.
*
* @param frequencies the frequency of occurrence of each integer
* @return a random integer from a discrete distribution:
* <tt>i</tt> with probability proportional to <tt>frequencies[i]</tt>
* @throws NullPointerException if <tt>frequencies</tt> is <tt>null</tt>
* @throws IllegalArgumentException if all array entries are <tt>0</tt>
* @throws IllegalArgumentException if <tt>frequencies[i]</tt> is negative
* for any index <tt>i</tt>
* @throws IllegalArgumentException if sum of frequencies exceeds
* <tt>Integer.MAX_VALUE</tt> (2<sup>31</sup> - 1)
*/
public static int discrete(int[] frequencies) {
if (frequencies == null) {
throw new NullPointerException("argument array is null");
}
long sum = 0;
for (int i = 0; i < frequencies.length; i++) {
if (frequencies[i] < 0) {
throw new IllegalArgumentException("array entry " + i + " must be nonnegative: " + frequencies[i]);
}
sum += frequencies[i];
}
if (sum == 0) {
throw new IllegalArgumentException("at least one array entry must be positive");
}
if (sum >= Integer.MAX_VALUE) {
throw new IllegalArgumentException("sum of frequencies overflows an int");
}
// pick index i with probabilitity proportional to frequency
double r = nextInt((int) sum);
sum = 0;
int ret = -1;
for (int i = 0; i < frequencies.length; i++) {
sum += frequencies[i];
if (sum > r) {
ret = i;
break;
}
}
return ret;
}
/**
* Returns a random real number from an exponential distribution with rate
* λ.
*
* @param lambda the rate of the exponential distribution
* @return a random real number from an exponential distribution with rate
* <tt>lambda</tt>
* @throws IllegalArgumentException unless <tt>lambda > 0.0</tt>
*/
public static double exp(double lambda) {
if (!(lambda > 0.0)) {
throw new IllegalArgumentException("Rate lambda must be positive");
}
return -Math.log(1 - nextDouble()) / lambda;
}
/**
* Rearranges the elements of the specified array in uniformly random order.
*
* @param a the array to shuffle
* @throws NullPointerException if <tt>a</tt> is <tt>null</tt>
*/
public static synchronized void shuffle(Object[] a) {
if (a == null) {
throw new NullPointerException("argument array is null");
}
int n = a.length;
for (int i = 0; i < n; i++) {
int r = i + nextInt(n - i); // between i and n-1
Object temp = a[i];
a[i] = a[r];
a[r] = temp;
}
}
/**
* Rearranges the elements of the specified array in uniformly random order.
*
* @param a the array to shuffle
* @throws NullPointerException if <tt>a</tt> is <tt>null</tt>
*/
public static synchronized void shuffle(double[] a) {
if (a == null) {
throw new NullPointerException("argument array is null");
}
int n = a.length;
for (int i = 0; i < n; i++) {
int r = i + nextInt(n - i); // between i and n-1
double temp = a[i];
a[i] = a[r];
a[r] = temp;
}
}
/**
* Rearranges the elements of the specified array in uniformly random order.
*
* @param a the array to shuffle
* @throws NullPointerException if <tt>a</tt> is <tt>null</tt>
*/
public static synchronized void shuffle(int[] a) {
if (a == null) {
throw new NullPointerException("argument array is null");
}
int n = a.length;
for (int i = 0; i < n; i++) {
int r = i + nextInt(n - i); // between i and n-1
int temp = a[i];
a[i] = a[r];
a[r] = temp;
}
}
/**
* Rearranges the elements of the specified subarray in uniformly random
* order.
*
* @param a the array to shuffle
* @param lo the left endpoint (inclusive)
* @param hi the right endpoint (inclusive)
* @throws NullPointerException if <tt>a</tt> is <tt>null</tt>
* @throws IndexOutOfBoundsException unless <tt>(0 <= lo) && (lo
* <= hi) && (hi < a.length)</tt>
*
*/
public static synchronized void shuffle(Object[] a, int lo, int hi) {
if (a == null) {
throw new NullPointerException("argument array is null");
}
if (lo < 0 || lo > hi || hi >= a.length) {
throw new IndexOutOfBoundsException("Illegal subarray range");
}
for (int i = lo; i <= hi; i++) {
int r = i + nextInt(hi - i + 1); // between i and hi
Object temp = a[i];
a[i] = a[r];
a[r] = temp;
}
}
/**
* Rearranges the elements of the specified subarray in uniformly random
* order.
*
* @param a the array to shuffle
* @param lo the left endpoint (inclusive)
* @param hi the right endpoint (inclusive)
* @throws NullPointerException if <tt>a</tt> is <tt>null</tt>
* @throws IndexOutOfBoundsException unless <tt>(0 <= lo) && (lo
* <= hi) && (hi < a.length)</tt>
*/
public static synchronized void shuffle(double[] a, int lo, int hi) {
if (a == null) {
throw new NullPointerException("argument array is null");
}
if (lo < 0 || lo > hi || hi >= a.length) {
throw new IndexOutOfBoundsException("Illegal subarray range");
}
for (int i = lo; i <= hi; i++) {
int r = i + nextInt(hi - i + 1); // between i and hi
double temp = a[i];
a[i] = a[r];
a[r] = temp;
}
}
/**
* Rearranges the elements of the specified subarray in uniformly random
* order.
*
* @param a the array to shuffle
* @param lo the left endpoint (inclusive)
* @param hi the right endpoint (inclusive)
* @throws NullPointerException if <tt>a</tt> is <tt>null</tt>
* @throws IndexOutOfBoundsException unless <tt>(0 <= lo) && (lo
* <= hi) && (hi < a.length)</tt>
*/
public static synchronized void shuffle(int[] a, int lo, int hi) {
if (a == null) {
throw new NullPointerException("argument array is null");
}
if (lo < 0 || lo > hi || hi >= a.length) {
throw new IndexOutOfBoundsException("Illegal subarray range");
}
for (int i = lo; i <= hi; i++) {
int r = i + nextInt(hi - i + 1); // between i and hi
int temp = a[i];
a[i] = a[r];
a[r] = temp;
}
}
}
| |
package com.benromberg.cordonbleu.data.dao;
import com.benromberg.cordonbleu.data.model.CodeRepositoryMetadata;
import com.benromberg.cordonbleu.data.model.Comment;
import com.benromberg.cordonbleu.data.model.CommentFixture;
import com.benromberg.cordonbleu.data.model.Commit;
import com.benromberg.cordonbleu.data.model.CommitApproval;
import com.benromberg.cordonbleu.data.model.CommitAuthor;
import com.benromberg.cordonbleu.data.model.CommitFixture;
import com.benromberg.cordonbleu.data.model.CommitId;
import com.benromberg.cordonbleu.data.model.CommitRepository;
import com.benromberg.cordonbleu.data.model.Team;
import com.benromberg.cordonbleu.data.model.User;
import com.benromberg.cordonbleu.util.SystemTimeRule;
import com.mongodb.DBObject;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import java.time.LocalDateTime;
import java.time.temporal.ChronoUnit;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Optional;
import static java.util.Arrays.asList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.tuple;
public class CommitDaoTest implements CommitFixture, CommentFixture {
private static final Team OTHER_TEAM = new TeamBuilder().name("other-team").build();
private static final String OTHER_REPOSITORY_NAME = "other-repository";
private static final String COMMIT_APPROVER_NAME = "approver";
private static final String COMMIT_APPROVER_EMAIL = "approver@email.com";
private static final User COMMIT_USER = new User(COMMIT_AUTHOR_EMAIL, COMMIT_AUTHOR_NAME, "author password");
private static final String FIRST_AUTHOR_NAME = "aaaauthor";
private static final String UPPERCASE_AUTHOR = "Uppercase Author";
private static final String OTHER_TEXT = "other text";
private static final LocalDateTime COMMIT_APPROVAL_TIME = LocalDateTime.now().truncatedTo(ChronoUnit.MILLIS);
private static final LocalDateTime COMMIT_CREATION_TIME = LocalDateTime.now().truncatedTo(ChronoUnit.MILLIS);
private static final String OTHER_COMMIT_HASH = "other commit hash";
private static final List<String> OTHER_COMMIT_BRANCHES = asList("other commit branch");
private static final CodeRepositoryMetadata OTHER_REPOSITORY = new RepositoryBuilder().name(OTHER_REPOSITORY_NAME)
.build();
private static final User COMMIT_APPROVER = new User(COMMIT_APPROVER_EMAIL, COMMIT_APPROVER_NAME,
"approver password");
private static final User COMMIT_ASSIGNEE = new User("assignee@email.com", "JackAssignee",
"assignee password");
@Rule
public SystemTimeRule systemTimeRule = new SystemTimeRule();
@Rule
public DaoRule databaseRule = new DaoRule().withRepository().withCommentUser();
@Rule
public ExpectedException expectedException = ExpectedException.none();
private final UserDao userDao = databaseRule.createUserDao();
private final TeamDao teamDao = databaseRule.createTeamDao();
private final CommitDao dao = databaseRule.createCommitDao();
private final CodeRepositoryMetadataDao repositoryDao = databaseRule.createRepositoryDao();
@Before
public void setUp() {
userDao.insert(COMMIT_APPROVER);
userDao.insert(COMMIT_ASSIGNEE);
}
@Test
public void insertedElement_CanBeFoundById() throws Exception {
Commit dummyElement = COMMIT;
dao.insert(dummyElement);
Commit foundElement = dao.findById(dummyElement.getId()).get();
assertCommit(foundElement);
}
@Test
public void insertedElement_HasCreatedAsIsoDate() throws Exception {
Commit dummyElement = COMMIT;
dao.insert(dummyElement);
DBObject foundElement = DaoRule.getDB().getCollection(CommitDao.COLLECTION_NAME).findOne();
assertThat(foundElement.get("created")).isInstanceOf(Date.class);
}
@Test
public void insertedElement_HasRepositoryAsReference() throws Exception {
Commit dummyElement = COMMIT;
dao.insert(dummyElement);
DBObject foundElement = DaoRule.getDB().getCollection(CommitDao.COLLECTION_NAME).findOne();
@SuppressWarnings("unchecked")
List<Object> repositories = (List<Object>) foundElement.get("repositories");
assertThat(((DBObject) repositories.get(0)).get("repository")).isInstanceOf(String.class);
}
@Test
public void insertedElement_CanBeFoundByRepository() throws Exception {
Commit dummyElement = COMMIT;
dao.insert(dummyElement);
List<Commit> foundCommits = dao.findByRepositories(asList(REPOSITORY));
assertThat(foundCommits).extracting(Commit::getId).containsExactly(COMMIT_ID);
}
@Test
public void insertedElement_CanBeFoundByRepository_OrderedByTimeDesc() throws Exception {
Commit firstElement = COMMIT;
Commit secondElement = commit().id(OTHER_COMMIT_HASH).created(COMMIT_CREATED.minusMinutes(1)).build();
dao.insert(secondElement);
dao.insert(firstElement);
List<Commit> foundCommits = dao.findByRepositories(asList(REPOSITORY));
assertThat(foundCommits).containsExactly(firstElement, secondElement);
}
@Test
public void insertedElement_CanNotBeFoundByOtherRepository() throws Exception {
dao.insert(COMMIT);
List<Commit> foundCommits = dao.findByRepositories(asList(OTHER_REPOSITORY));
assertThat(foundCommits).isEmpty();
}
@Test
public void insertOrUpdateRepository_OnInsert_CanBeFoundById() throws Exception {
Commit dummyElement = COMMIT;
Commit commit = dao.insertOrUpdateRepository(dummyElement, COMMIT_REPOSITORY);
assertCommit(commit);
}
@Test
public void insertOrUpdateRepository_OnExistingCommit_UpdatesRepository() throws Exception {
Commit dummyElement = COMMIT;
dao.insert(dummyElement);
repositoryDao.insert(OTHER_REPOSITORY);
CommitRepository commitRepository = new CommitRepository(OTHER_REPOSITORY, OTHER_COMMIT_BRANCHES);
Commit commit = dao.insertOrUpdateRepository(commit().repositories(commitRepository).build(), commitRepository);
assertThat(commit.getRepositories()).extracting(repository -> repository.getRepository().getName(),
CommitRepository::getBranches).containsExactly(tuple(REPOSITORY_NAME, COMMIT_BRANCHES),
tuple(OTHER_REPOSITORY_NAME, OTHER_COMMIT_BRANCHES));
}
@Test
@Ignore("would only fail with real mongo")
public void insertOrUpdateRepository_WithManyBranches_DoesntExceedIndexLimit() throws Exception {
Commit dummyElement = COMMIT;
List<String> manyBranches = Collections.nCopies(1024, "branch");
Commit commit = dao.insertOrUpdateRepository(dummyElement, new CommitRepository(REPOSITORY, manyBranches));
assertThat(commit.getRepositories()).extracting(entry -> entry.getRepository().getName(),
CommitRepository::getBranches).containsExactly(tuple(REPOSITORY_NAME, manyBranches));
}
@Test
public void findByFilter_CanBeFoundByRepositoryAndAuthor() throws Exception {
Commit dummyElement = COMMIT;
dao.insert(dummyElement);
List<Commit> foundCommits = dao.findByFilter(createFilter(REPOSITORY, dummyElement.getAuthor(), true));
assertThat(foundCommits).extracting(Commit::getId).containsExactly(COMMIT_ID);
}
@Test
public void findByFilter_ReturnsCommitsAssignedToAssignee() throws Exception {
Commit dummyElement = new CommitBuilder().assignee(COMMIT_ASSIGNEE).build();
dao.insert(dummyElement);
List<Commit> foundCommits = dao.findByFilter(createFilter(REPOSITORY, dummyElement.getAssignee()));
assertThat(foundCommits).extracting(Commit::getId).containsExactly(COMMIT_ID);
}
@Test
public void findByFilter_WithAssigneeSet_DoesNotReturnCommitsWithNoAssignee() throws Exception {
dao.insert(COMMIT);
assertThat(dao.findByFilter(createFilter(REPOSITORY, Optional.of(COMMIT_ASSIGNEE)))).isEmpty();
}
@Test
public void findByFilter_WithAssigneeSet_DoesNotReturnCommitsAssignedToOthers() throws Exception {
Commit commitAssignedToAnother = new CommitBuilder().assignee(COMMIT_USER).build();
dao.insert(commitAssignedToAnother);
assertThat(dao.findByFilter(createFilter(REPOSITORY, Optional.of(COMMIT_ASSIGNEE)))).isEmpty();
}
@Test
public void findByFilter_CanBeFoundByRepositoryAndAuthorEmail_CaseInsensitive() throws Exception {
Commit dummyElement = COMMIT;
dao.insert(dummyElement);
List<Commit> foundCommits = dao.findByFilter(createFilter(REPOSITORY, new CommitAuthor(FIRST_AUTHOR_NAME,
COMMIT_AUTHOR_EMAIL.toUpperCase()), true));
assertThat(foundCommits).extracting(Commit::getId).containsExactly(COMMIT_ID);
}
@Test
public void findByFilter_CanBeFoundByRepositoryAndUser() throws Exception {
userDao.insert(COMMIT_USER);
Commit dummyElement = COMMIT;
dao.insert(dummyElement);
List<Commit> foundCommits = dao.findByFilter(createFilter(REPOSITORY, COMMIT_USER, true));
assertThat(foundCommits).extracting(Commit::getId).containsExactly(COMMIT_ID);
}
@Test
public void findByFilter_CanBeFoundByRepositoryAndUser_WithEmailHavingDifferentCase() throws Exception {
User uppercaseEmailUser = new User(COMMIT_AUTHOR_EMAIL.toUpperCase(), COMMIT_AUTHOR_NAME, "author password");
userDao.insert(uppercaseEmailUser);
Commit dummyElement = COMMIT;
dao.insert(dummyElement);
List<Commit> foundCommits = dao.findByFilter(createFilter(REPOSITORY, uppercaseEmailUser, true));
assertThat(foundCommits).extracting(Commit::getId).containsExactly(COMMIT_ID);
}
@Test
public void findByFilter_CanBeFoundByRepositoryAndUsersEmailAlias() throws Exception {
User aliasUser = userDao.update(COMMIT_APPROVER, COMMIT_APPROVER_NAME, COMMIT_APPROVER_EMAIL,
asList(COMMIT_AUTHOR_EMAIL)).get();
Commit dummyElement = COMMIT;
dao.insert(dummyElement);
List<Commit> foundCommits = dao.findByFilter(createFilter(REPOSITORY, aliasUser, true));
assertThat(foundCommits).extracting(Commit::getId).containsExactly(COMMIT_ID);
}
@Test
public void findByFilter_CanNotBeFoundByOtherRepositoryAndSameAuthor() throws Exception {
Commit dummyElement = COMMIT;
dao.insert(dummyElement);
List<Commit> foundCommits = dao.findByFilter(createFilter(OTHER_REPOSITORY, dummyElement.getAuthor(), true));
assertThat(foundCommits).isEmpty();
}
@Test
public void findByFilter_CanNotBeFoundBySameRepositoryAndOtherAuthorEmail() throws Exception {
Commit dummyElement = COMMIT;
dao.insert(dummyElement);
List<Commit> foundCommits = dao.findByFilter(createFilter(REPOSITORY, new CommitAuthor(COMMIT_AUTHOR_NAME,
"other@email.com"), true));
assertThat(foundCommits).isEmpty();
}
@Test
public void findByFilter_CanNotBeFoundByUnapproved() throws Exception {
Commit dummyElement = COMMIT;
dao.insert(dummyElement);
dao.updateApproval(dummyElement.getId(), Optional.of(new CommitApproval(COMMIT_APPROVER, COMMIT_APPROVAL_TIME)));
List<Commit> foundCommits = dao.findByFilter(createFilter(REPOSITORY, dummyElement.getAuthor(), false));
assertThat(foundCommits).isEmpty();
}
@Test
public void findByFilter_StartsAfterLastCommitHash() throws Exception {
Commit firstElement = COMMIT;
Commit secondElement = commit().id(OTHER_COMMIT_HASH).created(COMMIT_CREATED.minusMinutes(1)).build();
dao.insert(secondElement);
dao.insert(firstElement);
List<Commit> foundCommits = dao.findByFilter(createFilter(Optional.of(firstElement.getId().getHash()), 100));
assertThat(foundCommits).containsExactly(secondElement);
}
@Test
public void findByFilter_StartsAfterLastCommitHash_WithCommitAtSameTime() throws Exception {
Commit firstElement = COMMIT;
Commit secondElement = commit().id(OTHER_COMMIT_HASH).build();
dao.insert(secondElement);
dao.insert(firstElement);
List<Commit> foundCommits = dao.findByFilter(createFilter(Optional.of(firstElement.getId().getHash()), 100));
assertThat(foundCommits).containsExactly(secondElement);
}
@Test
public void findByFilter_WithUnknownLastCommitHash_ThrowsNoSuchElementException() throws Exception {
expectedException.expect(NoSuchElementException.class);
dao.findByFilter(createFilter(Optional.of("unknown-last-commit-hash"), 100));
}
@Test
public void findByFilter_ObeysLimit() throws Exception {
Commit firstElement = COMMIT;
Commit secondElement = commit().id(OTHER_COMMIT_HASH).created(COMMIT_CREATED.minusMinutes(1)).build();
dao.insert(secondElement);
dao.insert(firstElement);
List<Commit> foundCommits = dao.findByFilter(createFilter(Optional.empty(), 1));
assertThat(foundCommits).containsExactly(firstElement);
}
@Test
public void findByFilter_WithMultipleCommitsHavingSameTimestamp_DoesntReturnSmallerCommitHash() throws Exception {
Commit firstElement = COMMIT;
Commit secondElement = commit().id(OTHER_COMMIT_HASH).build();
dao.insert(firstElement);
dao.insert(secondElement);
List<Commit> foundCommits = dao.findByFilter(createFilter(Optional.of(OTHER_COMMIT_HASH), 1));
assertThat(foundCommits).isEmpty();
}
@Test
public void findByFilter_WithCommitHavingSmallerTimestampAndHash_ReturnsCommit() throws Exception {
Commit firstElement = COMMIT;
Commit secondElement = commit().id(OTHER_COMMIT_HASH).created(COMMIT_CREATED.plusMinutes(1)).build();
dao.insert(firstElement);
dao.insert(secondElement);
List<Commit> foundCommits = dao.findByFilter(createFilter(Optional.of(OTHER_COMMIT_HASH), 1));
assertThat(foundCommits).containsExactly(firstElement);
}
@Test
public void countByFilter_AppliesFilter() throws Exception {
LocalDateTime sampleTime = LocalDateTime.now();
Commit firstFetched = new CommitBuilder().fetchedAt(sampleTime).build();
Commit secondFetched = new CommitBuilder().id("second").fetchedAt(sampleTime.plusHours(1)).build();
Commit thirdFetched = new CommitBuilder().id("third").fetchedAt(sampleTime.plusHours(2)).build();
dao.insert(firstFetched);
dao.insert(secondFetched);
dao.insert(thirdFetched);
assertThat(dao.countByFilter(createFilterFetchedAfter(firstFetched.getId().getHash()))).isEqualTo(2);
assertThat(dao.countByFilter(createFilterFetchedAfter(secondFetched.getId().getHash()))).isEqualTo(1);
assertThat(dao.countByFilter(createFilterFetchedAfter(thirdFetched.getId().getHash()))).isEqualTo(0);
}
@Test
public void updateAsRemoved_WithoutCommit_ReturnsEmpty() throws Exception {
Optional<Commit> commit = dao.updateAsRemoved(new CommitId("non-existing-hash", TEAM));
assertThat(commit).isEmpty();
}
@Test
public void updateAsRemoved_WithCommit_ReturnsCommitAsRemoved() throws Exception {
dao.insert(COMMIT);
Commit commit = dao.updateAsRemoved(COMMIT_ID).get();
assertThat(commit.isRemoved()).isTrue();
}
@Test
public void updatedApproval_CanBeFoundInCommit() throws Exception {
Commit dummyElement = COMMIT;
dao.insert(dummyElement);
dao.updateApproval(dummyElement.getId(), Optional.of(new CommitApproval(COMMIT_APPROVER, COMMIT_APPROVAL_TIME)));
Commit commit = dao.findById(dummyElement.getId()).get();
CommitApproval approval = commit.getApproval().get();
assertThat(approval.getApprover()).isEqualTo(COMMIT_APPROVER);
assertThat(approval.getTime()).isEqualTo(COMMIT_APPROVAL_TIME);
}
@Test
public void updateApproval_DoesNotDestroyCommit() throws Exception {
Commit dummyElement = COMMIT;
dao.insert(dummyElement);
dao.updateApproval(dummyElement.getId(), Optional.of(new CommitApproval(COMMIT_APPROVER, COMMIT_APPROVAL_TIME)));
Commit commit = dao.findById(dummyElement.getId()).get();
assertThat(commit.getAuthor().getName()).isEqualTo(COMMIT_AUTHOR_NAME);
}
@Test
public void revertedApproval_CanBeFoundInCommit() throws Exception {
Commit dummyElement = COMMIT;
dao.insert(dummyElement);
dao.updateApproval(dummyElement.getId(), Optional.of(new CommitApproval(COMMIT_APPROVER, COMMIT_APPROVAL_TIME)));
dao.updateApproval(dummyElement.getId(), Optional.empty());
Optional<CommitApproval> approval = dao.findById(dummyElement.getId()).get().getApproval();
assertThat(approval).isEqualTo(Optional.empty());
}
@Test
public void setAssigneeAndFindCommit_AssigneeIsSet() throws Exception {
Commit dummyElement = COMMIT;
dao.insert(dummyElement);
dao.updateAssignee(dummyElement.getId(), Optional.of(COMMIT_ASSIGNEE));
assertThat(dao.findById(dummyElement.getId()).get().getAssignee().get()).isEqualToComparingFieldByField(COMMIT_ASSIGNEE);
}
@Test
public void removeAssigneeAndFindCommit_AssigneeIsNotSet() throws Exception {
Commit dummyElement = COMMIT;
dao.insert(dummyElement);
dao.updateAssignee(dummyElement.getId(), Optional.of(COMMIT_ASSIGNEE));
dao.updateAssignee(dummyElement.getId(), Optional.empty());
assertThat(dao.findById(dummyElement.getId()).get().getAssignee()).isEmpty();
}
@Test
public void updateApproval_ReturnsUpdatedCommit() throws Exception {
Commit dummyElement = COMMIT;
dao.insert(dummyElement);
Commit returnedCommit = dao.updateApproval(dummyElement.getId(),
Optional.of(new CommitApproval(COMMIT_APPROVER, COMMIT_APPROVAL_TIME))).get();
assertThat(returnedCommit.getApproval().get().getApprover()).isEqualTo(COMMIT_APPROVER);
assertThat(returnedCommit.getApproval().get().getTime()).isEqualTo(COMMIT_APPROVAL_TIME);
}
@Test
public void addedComment_CanBeFoundInCommit() throws Exception {
insertWithComment(COMMIT);
List<Comment> foundComments = dao.findById(COMMIT.getId()).get().getComments();
assertThat(foundComments).extracting(Comment::getText).containsExactly(COMMENT_TEXT);
}
@Test
public void addComment_ReturnsUpdatedCommit() throws Exception {
Commit dummyElement = COMMIT;
Commit returnedCommit = insertWithComment(dummyElement);
assertThat(returnedCommit.getComments()).extracting(Comment::getText).containsExactly(COMMENT_TEXT);
}
@Test
public void updatedComment_CanBeFoundInCommit() throws Exception {
Commit commit = insertWithComment(COMMIT);
dao.updateComment(commit.getId(), commit.getComments().get(0).getId(), OTHER_TEXT);
List<Comment> foundComments = dao.findById(COMMIT.getId()).get().getComments();
assertThat(foundComments).extracting(Comment::getText).containsExactly(OTHER_TEXT);
}
@Test
public void updateComment_ReturnsUpdatedCommit() throws Exception {
Commit commit = insertWithComment(COMMIT);
Commit returnedCommit = dao.updateComment(commit.getId(), commit.getComments().get(0).getId(), OTHER_TEXT)
.get();
assertThat(returnedCommit.getComments()).extracting(Comment::getText).containsExactly(OTHER_TEXT);
}
@Test
public void removedComment_CanNotBeFoundInCommit() throws Exception {
Commit commit = insertWithComment(COMMIT);
dao.removeComment(commit.getId(), commit.getComments().get(0).getId());
List<Comment> foundComments = dao.findById(COMMIT.getId()).get().getComments();
assertThat(foundComments).isEmpty();
}
@Test
public void removeComment_ReturnsUpdatedCommit() throws Exception {
Commit commit = insertWithComment(COMMIT);
Commit returnedCommit = dao.removeComment(commit.getId(), commit.getComments().get(0).getId()).get();
assertThat(returnedCommit.getComments()).isEmpty();
}
@Test
public void addedComments_AreOrderedByCreated() throws Exception {
Commit commit = insertWithComment(COMMIT);
systemTimeRule.advanceBySeconds(-1);
dao.addComment(commit.getId(), comment().text(OTHER_TEXT).build());
List<Comment> foundComments = dao.findById(commit.getId()).get().getComments();
assertThat(foundComments).extracting(Comment::getText).containsExactly(OTHER_TEXT, COMMENT_TEXT);
}
@Test
@Ignore("Aggregation queries not fully supported by Fongo: https://github.com/fakemongo/fongo/issues/8")
public void findAuthors_IncludesAddedCommitAuthorEmail() throws Exception {
dao.insert(COMMIT);
List<CommitAuthor> authors = dao.findAuthors(TEAM);
assertThat(authors).extracting(CommitAuthor::getName, CommitAuthor::getEmail).containsExactly(
tuple(COMMIT_AUTHOR_NAME, COMMIT_AUTHOR_EMAIL));
}
@Test
@Ignore("Aggregation queries not fully supported by Fongo: https://github.com/fakemongo/fongo/issues/8")
public void findAuthors_SkipsAuthorsOutsideTheTeam() throws Exception {
dao.insert(COMMIT);
teamDao.insert(OTHER_TEAM);
List<CommitAuthor> authors = dao.findAuthors(OTHER_TEAM);
assertThat(authors).isEmpty();
}
@Test
@Ignore("Aggregation queries not fully supported by Fongo: https://github.com/fakemongo/fongo/issues/8")
public void foundAuthors_AreSortedByName() throws Exception {
Commit commit = COMMIT;
dao.insert(commit);
dao.insert(commit().id(OTHER_COMMIT_HASH).author(new CommitAuthor(FIRST_AUTHOR_NAME, COMMIT_AUTHOR_EMAIL))
.build());
List<CommitAuthor> authors = dao.findAuthors(TEAM);
assertThat(authors).extracting(CommitAuthor::getName).containsExactly(FIRST_AUTHOR_NAME, COMMIT_AUTHOR_NAME);
}
@Test
@Ignore("Aggregation queries not fully supported by Fongo: https://github.com/fakemongo/fongo/issues/8")
public void foundAuthors_AreSortedByName_IgnoreCase() throws Exception {
Commit commit = COMMIT;
dao.insert(commit);
dao.insert(commit().id(OTHER_COMMIT_HASH).author(new CommitAuthor(UPPERCASE_AUTHOR, COMMIT_AUTHOR_EMAIL))
.build());
List<CommitAuthor> authors = dao.findAuthors(TEAM);
assertThat(authors).extracting(CommitAuthor::getName).containsExactly(COMMIT_AUTHOR_NAME, UPPERCASE_AUTHOR);
}
@Test
public void removeOrphaned_RemovesCommitNoLongerAttachedToGivenRepositories() throws Exception {
Commit dummyElement = COMMIT;
dao.insert(dummyElement);
dao.removeOrphaned(asList());
assertThat(dao.findById(dummyElement.getId())).isEmpty();
}
@Test
public void removeOrphaned_KeepsCommitWithExistingRepository() throws Exception {
Commit dummyElement = COMMIT;
dao.insert(dummyElement);
dao.removeOrphaned(asList(REPOSITORY));
assertThat(dao.findById(dummyElement.getId())).isPresent();
}
@Test
public void removeOrphaned_FromCommitWithTwoRepositories_KeepsOnlyOneExistingRepository() throws Exception {
CommitRepository noLongerExistingRepository = new CommitRepository(repository().name(OTHER_REPOSITORY_NAME)
.build(), COMMIT_BRANCHES);
Commit dummyElement = commit().repositories(COMMIT_REPOSITORY, noLongerExistingRepository).build();
dao.insert(dummyElement);
dao.removeOrphaned(asList(REPOSITORY));
assertThat(dao.findById(dummyElement.getId()).get().getRepositories()).hasSize(1);
}
@Test
public void findNotifications_WithoutCommits_ReturnsEmptyList() throws Exception {
List<Commit> notifications = insertUserWithTeamAndFindNotifications(COMMIT_APPROVER, 100);
assertThat(notifications).isEmpty();
}
@Test
public void findNotifications_WithUncommentedCommit_ReturnsEmptyList() throws Exception {
dao.insert(COMMIT);
List<Commit> notifications = insertUserWithTeamAndFindNotifications(COMMIT_USER, 100);
assertThat(notifications).isEmpty();
}
@Test
public void findNotifications_WithOwnCommitCommented_ButInAnotherTeam_ReturnsEmptyList() throws Exception {
insertWithComment(COMMIT);
userDao.insert(COMMIT_USER);
teamDao.insert(OTHER_TEAM);
userDao.addTeam(COMMIT_USER.getId(), OTHER_TEAM);
List<Commit> notifications = dao.findNotifications(COMMIT_USER, 100);
assertThat(notifications).isEmpty();
}
@Test
public void findNotifications_WithOwnCommitCommented_ReturnsCommit() throws Exception {
Commit commit = insertWithComment(COMMIT);
List<Commit> notifications = insertUserWithTeamAndFindNotifications(COMMIT_USER, 100);
assertThat(notifications).extracting(Commit::getId).containsExactly(commit.getId());
}
@Test
public void findNotifications_WithCommitCommentedByUser_ReturnsCommit() throws Exception {
Commit commit = insertWithComment(COMMIT);
List<Commit> notifications = insertUserWithTeamAndFindNotifications(COMMENT_USER, 100);
assertThat(notifications).extracting(Commit::getId).containsExactly(commit.getId());
}
@Test
public void findNotifications_SortedByLastCommentDateDesc() throws Exception {
Commit commit = insertWithComment(COMMIT);
systemTimeRule.advanceBySeconds(1);
Commit otherCommit = insertWithComment(commit().id(OTHER_COMMIT_HASH).build());
List<Commit> notifications = insertUserWithTeamAndFindNotifications(COMMENT_USER, 100);
assertThat(notifications).extracting(Commit::getId).containsExactly(otherCommit.getId(), commit.getId());
}
@Test
public void findNotifications_ObeysLimit() throws Exception {
insertWithComment(COMMIT);
systemTimeRule.advanceBySeconds(1);
Commit otherCommit = insertWithComment(commit().id(OTHER_COMMIT_HASH).build());
List<Commit> notifications = insertUserWithTeamAndFindNotifications(COMMENT_USER, 1);
assertThat(notifications).extracting(Commit::getId).containsExactly(otherCommit.getId());
}
@Test
public void findNonAssignedNonApproved_CommitFromAnotherTeamIsNotReturned() throws Exception {
dao.insert(new CommitBuilder().created(COMMIT_CREATION_TIME).build());
assertThat(dao.findNonAssignedNonApproved(OTHER_TEAM, COMMIT_CREATION_TIME.minusSeconds(10), 10)).isEmpty();
}
@Test
public void findNonAssignedNonApproved_CommitWithAssigneeIsNotReturned() throws Exception {
dao.insert(new CommitBuilder().created(COMMIT_CREATION_TIME).assignee(COMMIT_ASSIGNEE).build());
assertThat(dao.findNonAssignedNonApproved(TEAM, COMMIT_CREATION_TIME.minusSeconds(10), 10)).isEmpty();
}
@Test
public void findNonAssignedNonApproved_CommitWithApprovalIsNotReturned() throws Exception {
Commit dummyElement = new CommitBuilder().created(COMMIT_CREATION_TIME).build();
dao.insert(dummyElement);
dao.updateApproval(dummyElement.getId(), Optional.of(new CommitApproval(COMMIT_APPROVER, COMMIT_APPROVAL_TIME)));
assertThat(dao.findNonAssignedNonApproved(TEAM, COMMIT_CREATION_TIME.minusSeconds(10), 10)).isEmpty();
}
@Test
public void findNonAssignedNonApproved_CommitEarlierThanSpecifiedDateIsNotReturned() throws Exception {
Commit dummyElement = new CommitBuilder().created(COMMIT_CREATION_TIME).build();
dao.insert(dummyElement);
assertThat(dao.findNonAssignedNonApproved(TEAM, dummyElement.getCreated(), 10)).isEmpty();
assertThat(dao.findNonAssignedNonApproved(TEAM, dummyElement.getCreated().plusSeconds(10), 10)).isEmpty();
}
@Test
public void findNonAssignedNonApproved_CommitNonAssignedNonApprovedIsReturned() throws Exception {
Commit dummyElement = new CommitBuilder().created(COMMIT_CREATION_TIME).build();
dao.insert(dummyElement);
List<Commit> findResult = dao.findNonAssignedNonApproved(TEAM, COMMIT_CREATION_TIME.minusSeconds(10), 10);
assertThat(findResult).extracting(Commit::getId).containsExactly(dummyElement.getId());
}
@Test
public void findNonAssignedNonApproved_ReturnsUpToLimitAmount() throws Exception {
LocalDateTime creationDate = LocalDateTime.now();
dao.insert(new CommitBuilder().id("First").created(creationDate).build());
dao.insert(new CommitBuilder().id("Second").created(creationDate).build());
dao.insert(new CommitBuilder().id("Third").created(creationDate).build());
assertThat(dao.findNonAssignedNonApproved(TEAM, creationDate.minusSeconds(10), 10)).hasSize(3);
assertThat(dao.findNonAssignedNonApproved(TEAM, creationDate.minusSeconds(10), 2)).hasSize(2);
assertThat(dao.findNonAssignedNonApproved(TEAM, creationDate.minusSeconds(10), 1)).hasSize(1);
assertThat(dao.findNonAssignedNonApproved(TEAM, creationDate.minusSeconds(10), 0)).hasSize(3);
}
private List<Commit> insertUserWithTeamAndFindNotifications(User user, int limit) {
userDao.insertIfNotExists(user);
User userWithTeam = userDao.addTeam(user.getId(), TEAM).get();
return dao.findNotifications(userWithTeam, limit);
}
private void assertCommit(Commit foundElement) {
assertThat(foundElement.getAuthor().getName()).isEqualTo(COMMIT_AUTHOR_NAME);
assertThat(foundElement.getAuthor().getEmail()).isEqualTo(COMMIT_AUTHOR_EMAIL);
assertThat(foundElement.getCreated()).isEqualTo(COMMIT_CREATED);
assertThat(foundElement.getMessage()).isEqualTo(COMMIT_MESSAGE);
assertThat(foundElement.getRepositories()).extracting(entry -> entry.getRepository().getName(),
CommitRepository::getBranches).containsExactly(tuple(REPOSITORY_NAME, COMMIT_BRANCHES));
assertThat(foundElement.getApproval()).isEqualTo(Optional.empty());
}
private CommitFilter createFilter(CodeRepositoryMetadata repository, CommitAuthor author, boolean approved) {
return new CommitFilter(TEAM, asList(repository), asList(author), asList(), approved, Optional.empty(), Optional.empty(), 100, Optional.empty());
}
private CommitFilter createFilter(CodeRepositoryMetadata repository, Optional<User> assignee) {
return new CommitFilter(TEAM, asList(repository), asList(COMMIT_AUTHOR), asList(), true, Optional.empty(), Optional.empty(),100, assignee);
}
private CommitFilter createFilter(CodeRepositoryMetadata repository, User user, boolean approved) {
return new CommitFilter(TEAM, asList(repository), asList(), asList(user), approved, Optional.empty(), Optional.empty(), 100, Optional.empty());
}
private CommitFilter createFilter(Optional<String> lastCommitId, int limit) {
return new CommitFilter(TEAM, asList(REPOSITORY), asList(COMMIT_AUTHOR), asList(), true, lastCommitId, Optional.empty(), limit, Optional.empty());
}
private CommitFilter createFilterFetchedAfter(String fetchedAfterHash) {
return new CommitFilter(TEAM, asList(REPOSITORY), asList(COMMIT_AUTHOR), asList(), true, Optional.empty(), Optional.of(fetchedAfterHash), 100, Optional.empty());
}
private Commit insertWithComment(Commit dummyElement) {
dao.insert(dummyElement);
return dao.addComment(dummyElement.getId(), comment().build()).get();
}
}
| |
package com.jeff.footballmanager.param;
import java.io.Serializable;
public class PlayerParam implements Serializable{
private static final long serialVersionUID = 1L;
private int id;
private String playerNo;
private String name;
private String age;
private String mobile;
private String status;
private String height;
private String weight;
private String country;
private String role;
private String playPosition;
private String introduction;
private String createtime;
private String updatetime;
private String teamName;
private String userNo;
public String getUserNo() {
return userNo;
}
public void setUserNo(String userNo) {
this.userNo = userNo;
}
public String getTeamName() {
return teamName;
}
public void setTeamName(String teamName) {
this.teamName = teamName;
}
/**
* @return the status
*/
public String getStatus() {
return status;
}
/**
* @param status the status to set
*/
public void setStatus(String status) {
this.status = status;
}
/**
* @return the playPosition
*/
public String getPlayPosition() {
return playPosition;
}
/**
* @param playPosition the playPosition to set
*/
public void setPlayPosition(String playPosition) {
this.playPosition = playPosition;
}
public String getAge() {
return age;
}
/**
* @return the playerNo
*/
public String getPlayerNo() {
return playerNo;
}
/**
* @param playerNo the playerNo to set
*/
public void setPlayerNo(String playerNo) {
this.playerNo = playerNo;
}
public void setAge(String age) {
this.age = age;
}
public String getMobile() {
return mobile;
}
public void setMobile(String mobile) {
this.mobile = mobile;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getIntroduction() {
return introduction;
}
public void setIntroduction(String introduction) {
this.introduction = introduction;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getHeight() {
return height;
}
public void setHeight(String height) {
this.height = height;
}
public String getWeight() {
return weight;
}
public void setWeight(String weight) {
this.weight = weight;
}
public String getCountry() {
return country;
}
public void setCountry(String country) {
this.country = country;
}
public String getRole() {
return role;
}
public void setRole(String role) {
this.role = role;
}
public String getCreatetime() {
return createtime;
}
public void setCreatetime(String createtime) {
this.createtime = createtime;
}
public String getUpdatetime() {
return updatetime;
}
public void setUpdatetime(String updatetime) {
this.updatetime = updatetime;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("&playerNo=");
builder.append(playerNo);
builder.append("&name=");
builder.append(name);
builder.append("&userNo=");
builder.append(userNo);
builder.append("&height=");
builder.append(height);
builder.append("&weight=");
builder.append(weight);
builder.append("&country=");
builder.append(country);
builder.append("&role=");
builder.append(role);
builder.append("&age=");
builder.append(age);
builder.append("&mobile=");
builder.append(mobile);
builder.append("&teamName=");
builder.append(teamName);
builder.append("&playPosition=");
builder.append(playPosition);
return builder.toString();
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2017 CloudBees, Inc.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package jenkins.slaves;
import hudson.Extension;
import hudson.Util;
import hudson.model.Describable;
import hudson.model.Descriptor;
import hudson.model.Slave;
import hudson.slaves.SlaveComputer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import jenkins.model.Jenkins;
import org.kohsuke.accmod.Restricted;
import org.kohsuke.accmod.restrictions.NoExternalUse;
import org.kohsuke.stapler.DataBoundConstructor;
/**
* Defines settings of the Remoting work directory.
*
* This class contains Remoting Work Directory settings, which can be used when starting Jenkins agents.
* See <a href="https://github.com/jenkinsci/remoting/blob/master/docs/workDir.md">Remoting Work Dir Documentation</a>.
*
* @author Oleg Nenashev
* @since 2.72
*/
public class RemotingWorkDirSettings implements Describable<RemotingWorkDirSettings> {
private static final String DEFAULT_INTERNAL_DIR = "remoting";
private static final RemotingWorkDirSettings LEGACY_DEFAULT = new RemotingWorkDirSettings(true, null, DEFAULT_INTERNAL_DIR, false);
private static final RemotingWorkDirSettings ENABLED_DEFAULT = new RemotingWorkDirSettings(false, null, DEFAULT_INTERNAL_DIR, false);
private final boolean disabled;
@CheckForNull
private final String workDirPath;
@Nonnull
private final String internalDir;
private final boolean failIfWorkDirIsMissing;
@DataBoundConstructor
public RemotingWorkDirSettings(boolean disabled,
@CheckForNull String workDirPath, @CheckForNull String internalDir,
boolean failIfWorkDirIsMissing) {
this.disabled = disabled;
this.workDirPath = Util.fixEmptyAndTrim(workDirPath);
this.failIfWorkDirIsMissing = failIfWorkDirIsMissing;
String internalDirName = Util.fixEmptyAndTrim(internalDir);
this.internalDir = internalDirName != null ? internalDirName : DEFAULT_INTERNAL_DIR;
}
public RemotingWorkDirSettings() {
// Enabled by default
this(false, null, DEFAULT_INTERNAL_DIR, false);
}
/**
* Check if workdir is disabled.
*
* @return {@code true} if the property is disabled.
* In such case Remoting will use the legacy mode.
*/
public boolean isDisabled() {
return disabled;
}
/**
* Indicates that agent root directory should be used as work directory.
*
* @return {@code true} if the agent root should be a work directory.
*/
public boolean isUseAgentRootDir() {
return workDirPath == null;
}
/**
* Check if startup should fail if the workdir is missing.
*
* @return {@code true} if Remoting should fail if the the work directory is missing instead of creating it
*/
public boolean isFailIfWorkDirIsMissing() {
return failIfWorkDirIsMissing;
}
/**
* Gets path to the custom workdir path.
*
* @return Custom workdir path.
* If {@code null}, an agent root directory path should be used instead.
*/
@CheckForNull
public String getWorkDirPath() {
return workDirPath;
}
@Nonnull
public String getInternalDir() {
return internalDir;
}
@Override
public Descriptor<RemotingWorkDirSettings> getDescriptor() {
return Jenkins.get().getDescriptor(RemotingWorkDirSettings.class);
}
/**
* Gets list of command-line arguments for the work directory.
* @param computer Computer, for which the arguments are being created
* @return Non-modifiable list of command-line arguments
*/
public List<String> toCommandLineArgs(@Nonnull SlaveComputer computer) {
if(disabled) {
return Collections.emptyList();
}
ArrayList<String> args = new ArrayList<>();
args.add("-workDir");
if (workDirPath == null) {
Slave node = computer.getNode();
if (node == null) {
// It is not possible to launch this node anyway.
return Collections.emptyList();
}
args.add(node.getRemoteFS());
} else {
args.add(workDirPath);
}
if (!DEFAULT_INTERNAL_DIR.equals(internalDir)) {
args.add("-internalDir");
args.add(internalDir);;
}
if (failIfWorkDirIsMissing) {
args.add(" -failIfWorkDirIsMissing");
}
return Collections.unmodifiableList(args);
}
/**
* Gets a command line string, which can be passed to agent start command.
*
* @param computer Computer, for which the arguments need to be constructed.
* @return Command line arguments.
* It may be empty if the working directory is disabled or
* if the Computer type is not {@link SlaveComputer}.
*/
@Nonnull
@Restricted(NoExternalUse.class)
public String toCommandLineString(@Nonnull SlaveComputer computer) {
if(disabled) {
return "";
}
StringBuilder bldr = new StringBuilder();
bldr.append("-workDir \"");
if (workDirPath == null) {
Slave node = computer.getNode();
if (node == null) {
// It is not possible to launch this node anyway.
return "";
}
bldr.append(node.getRemoteFS());
} else {
bldr.append(workDirPath);
}
bldr.append("\"");
if (!DEFAULT_INTERNAL_DIR.equals(internalDir)) {
bldr.append(" -internalDir \"");
bldr.append(internalDir);
bldr.append("\"");
}
if (failIfWorkDirIsMissing) {
bldr.append(" -failIfWorkDirIsMissing");
}
return bldr.toString();
}
@Extension
public static class DescriptorImpl extends Descriptor<RemotingWorkDirSettings> {
}
/**
* Gets default settings for the disabled work directory.
*
* @return Legacy value: disabled work directory.
*/
@Nonnull
public static RemotingWorkDirSettings getDisabledDefaults() {
return LEGACY_DEFAULT;
}
/**
* Gets default settings of the enabled work directory.
*/
@Nonnull
public static RemotingWorkDirSettings getEnabledDefaults() {
return ENABLED_DEFAULT;
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.cassandra.cql;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.Set;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.Schema;
import org.apache.cassandra.db.marshal.AbstractType;
import org.apache.cassandra.thrift.ConsistencyLevel;
/**
* Encapsulates a completely parsed SELECT query, including the target
* column family, expression, result count, and ordering clause.
*
*/
public class SelectStatement
{
private final SelectExpression expression;
private final boolean isCountOper;
private final String columnFamily;
private final String keyspace;
private final ConsistencyLevel cLevel;
private final WhereClause clause;
private final int numRecords;
public SelectStatement(SelectExpression expression, boolean isCountOper, String keyspace, String columnFamily,
ConsistencyLevel cLevel, WhereClause clause, int numRecords)
{
this.expression = expression;
this.isCountOper = isCountOper;
this.keyspace = keyspace;
this.columnFamily = columnFamily;
this.cLevel = cLevel;
this.clause = (clause != null) ? clause : new WhereClause();
this.numRecords = numRecords;
}
public boolean isKeyRange()
{
return clause.isKeyRange();
}
public Set<Term> getKeys()
{
return clause.getKeys();
}
public Term getKeyStart()
{
return clause.getStartKey();
}
public Term getKeyFinish()
{
return clause.getFinishKey();
}
public List<Relation> getColumnRelations()
{
return clause.getColumnRelations();
}
public boolean isColumnRange()
{
return expression.isColumnRange();
}
public boolean isWildcard()
{
return expression.isWildcard();
}
public boolean isFullWildcard()
{
return expression.isWildcard() && !expression.hasFirstSet();
}
public List<Term> getColumnNames()
{
return expression.getColumns();
}
public Term getColumnStart()
{
return expression.getStart();
}
public Term getColumnFinish()
{
return expression.getFinish();
}
public boolean isSetKeyspace()
{
return keyspace != null;
}
public String getKeyspace()
{
return keyspace;
}
public String getColumnFamily()
{
return columnFamily;
}
public boolean isColumnsReversed()
{
return expression.isColumnsReversed();
}
public ConsistencyLevel getConsistencyLevel()
{
return cLevel;
}
public int getNumRecords()
{
return numRecords;
}
public int getColumnsLimit()
{
return expression.getColumnsLimit();
}
public boolean isCountOperation()
{
return isCountOper;
}
public boolean includeStartKey()
{
return clause.includeStartKey();
}
public boolean includeFinishKey()
{
return clause.includeFinishKey();
}
public String getKeyAlias()
{
return clause.getKeyAlias();
}
public boolean isMultiKey()
{
return clause.isMultiKey();
}
public void extractKeyAliasFromColumns(CFMetaData cfm)
{
clause.extractKeysFromColumns(cfm);
}
public AbstractType<?> getComparator(String keyspace)
{
return Schema.instance.getComparator(keyspace, columnFamily);
}
public AbstractType<?> getValueValidator(String keyspace, ByteBuffer column)
{
return Schema.instance.getValueValidator(keyspace, columnFamily, column);
}
public String toString()
{
return String.format("SelectStatement [expression=%s, isCountOper=%s, columnFamily=%s, keyspace=%s, cLevel=%s, clause=%s, numRecords=%s]",
expression,
isCountOper,
columnFamily,
keyspace,
cLevel,
clause,
numRecords);
}
}
| |
package it.prisma.businesslayer.bizlib.mail;
import it.prisma.businesslayer.config.EnvironmentConfig;
import it.prisma.businesslayer.utils.LinkHelper;
import it.prisma.businesslayer.utils.mailer.MailServiceBean;
import it.prisma.dal.dao.accounting.UserAccountDAO;
import it.prisma.dal.entities.accounting.UserAccount;
import it.prisma.dal.entities.paas.services.AbstractPaaSService;
import it.prisma.dal.entities.paas.services.PaaSService;
import it.prisma.utils.mailer.MailerImpl;
import it.prisma.utils.mailer.exceptions.EmailSyntaxException;
import javax.inject.Inject;
import javax.mail.MessagingException;
public class MailBean {
@Inject
protected MailServiceBean mailService;
@Inject
protected UserAccountDAO userDAO;
@Inject
protected EnvironmentConfig envConfig;
@Inject
protected LinkHelper linkHelper;
protected String getBaseEmailAddress() {
return envConfig.getMailProperty(EnvironmentConfig.MAIL_BASE_ADDRESS);
}
enum MailAddressType {
INFO
}
protected String getEmailAddress(MailAddressType type) {
switch (type) {
case INFO:
return envConfig
.getMailProperty(EnvironmentConfig.MAIL_INFO_ADDRESS)
+ "@"
+ getBaseEmailAddress();
default:
throw new IllegalArgumentException("Invalid Email address type "
+ type);
}
}
public void sendPaaSDeploySuccesfullEmail(AbstractPaaSService paasService)
throws EmailSyntaxException, MessagingException {
MailerImpl mailSender = mailService.getMailSender();
String mailUsername;
PaaSService paasSvc = paasService.getPaaSService();
// Fix for transaction Workaround
UserAccount user = userDAO.findById(paasSvc.getUserAccount().getId());
//Check if middleName is null and this code avoids to print null in email
if (user.getUserProfile().getMiddleName()==null){
mailUsername = "Dear " + user.getUserProfile().getFirstName()
+ " " + user.getUserProfile().getLastName();
}else {
mailUsername = "Dear " + user.getUserProfile().getFirstName()
+ " " + user.getUserProfile().getMiddleName() + " "
+ user.getUserProfile().getLastName();
}
String sender = getEmailAddress(MailAddressType.INFO);
String recipient = paasSvc.getNotificationEmail();
String paasServiceType = paasSvc.getType();
String subject = "Prisma: " + paasSvc.getName() + " ("
+ paasServiceType + " service) is ready to use !";
String deployLink = linkHelper.getPaaSServiceLink(paasService);
String body = mailUsername + "<br/><br/> Your " + paasServiceType
+ " '<a href=\"" + deployLink + "\">" + paasSvc.getName()
+ "</a>' is ready to use.<br/>Service domain name: "
+ paasSvc.getDomainName();
mailSender.sendEmail(recipient, sender, subject, body);
}
public void sendPaaSDeployErrorEmail(AbstractPaaSService paasService,
String errorDetails) throws EmailSyntaxException,
MessagingException {
String mailUsername;
MailerImpl mailSender = mailService.getMailSender();
PaaSService paasSvc = paasService.getPaaSService();
// Fix for transaction Workaround
UserAccount user = userDAO.findById(paasSvc.getUserAccount().getId());
//Check if middleName is null and this code avoids to print null in email
if (user.getUserProfile().getMiddleName()==null){
mailUsername = "Dear " + user.getUserProfile().getFirstName()
+ " " + user.getUserProfile().getLastName();
}else {
mailUsername = "Dear " + user.getUserProfile().getFirstName()
+ " " + user.getUserProfile().getMiddleName() + " "
+ user.getUserProfile().getLastName();
}
String sender = getEmailAddress(MailAddressType.INFO);
String recipient = paasSvc.getNotificationEmail();
String paasServiceType = paasSvc.getType();
String subject = "Prisma: " + paasSvc.getName() + " ("
+ paasServiceType + " service) deploy failed !";
String deployLink = linkHelper.getPaaSServiceLink(paasService);
String body = mailUsername
+ "<br/><br/>Unfortunately, your "
+ paasServiceType
+ " '<a href=\""
+ deployLink
+ "\">"
+ paasSvc.getName()
+ "</a>' deploy has failed.<br/>Error details: "
+ errorDetails
+ "<br/><br/>Please, contact the <a href=\"mailto:support@bari.ponsmartcities-prisma.eu\">support team</a> for further instructions.";
mailSender.sendEmail(recipient, sender, subject, body);
}
public void sendPaaSUndeploySuccesfullEmail(AbstractPaaSService paasService)
throws EmailSyntaxException, MessagingException {
String mailUsername;
MailerImpl mailSender = mailService.getMailSender();
PaaSService paasSvc = paasService.getPaaSService();
// Fix for transaction Workaround
UserAccount user = userDAO.findById(paasSvc.getUserAccount().getId());
//Check if middleName is null and this code avoids to print null in email
if (user.getUserProfile().getMiddleName()==null){
mailUsername = "Dear " + user.getUserProfile().getFirstName()
+ " " + user.getUserProfile().getLastName();
}else {
mailUsername = "Dear " + user.getUserProfile().getFirstName()
+ " " + user.getUserProfile().getMiddleName() + " "
+ user.getUserProfile().getLastName();
}
String sender = getEmailAddress(MailAddressType.INFO);
String recipient = paasSvc.getNotificationEmail();
String paasServiceType = paasSvc.getType();
String subject = "Prisma: " + paasSvc.getName() + " ("
+ paasServiceType + " service) has been undeployed !";
String body = mailUsername + "<br/><br/> Your " + paasServiceType
+ " '" + paasSvc.getName()
+ "' has been successfully deleted from the platform.";
mailSender.sendEmail(recipient, sender, subject, body);
}
public void sendPaaSUndeployErrorEmail(AbstractPaaSService paasService,
String errorDetails) throws EmailSyntaxException,
MessagingException {
String mailUsername;
MailerImpl mailSender = mailService.getMailSender();
PaaSService paasSvc = paasService.getPaaSService();
// Fix for transaction Workaround
UserAccount user = userDAO.findById(paasSvc.getUserAccount().getId());
//Check if middleName is null and this code avoids to print null in email
if (user.getUserProfile().getMiddleName()==null){
mailUsername = "Dear " + user.getUserProfile().getFirstName()
+ " " + user.getUserProfile().getLastName();
}else {
mailUsername = "Dear " + user.getUserProfile().getFirstName()
+ " " + user.getUserProfile().getMiddleName() + " "
+ user.getUserProfile().getLastName();
}
String sender = getEmailAddress(MailAddressType.INFO);
String recipient = paasSvc.getNotificationEmail();
String paasServiceType = paasSvc.getType();
String subject = "Prisma: " + paasSvc.getName() + " ("
+ paasServiceType + " service) undeploy failed !";
String body = mailUsername
+ "<br/><br/>Unfortunately, your "
+ paasServiceType
+ " '"
+ paasSvc.getName()
+ "' undeployment failed.<br/>Error details: "
+ errorDetails
+ "<br/><br/>Please, contact the <a href=\"mailto:support@bari.ponsmartcities-prisma.eu\">support team</a> for further instructions.";
mailSender.sendEmail(recipient, sender, subject, body);
}
public void sendTestEmail(String email) throws EmailSyntaxException,
MessagingException {
MailerImpl mailSender = mailService.getMailSender();
String sender = getEmailAddress(MailAddressType.INFO);
String recipient = (email != null ? email : sender);
String subject = "Prisma BizLayer Mail Test";
String body = "Prisma automatic mail test.";
mailSender.sendEmail(recipient, sender, subject, body);
}
}
| |
/**
* Copyright (c) 2016-present, RxJava Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.internal.operators.flowable;
import static org.junit.Assert.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.*;
import org.junit.*;
import org.reactivestreams.*;
import io.reactivex.*;
import io.reactivex.disposables.Disposable;
import io.reactivex.exceptions.*;
import io.reactivex.flowables.ConnectableFlowable;
import io.reactivex.functions.*;
import io.reactivex.internal.functions.Functions;
import io.reactivex.internal.fuseable.HasUpstreamPublisher;
import io.reactivex.internal.operators.flowable.FlowablePublish.*;
import io.reactivex.internal.schedulers.ImmediateThinScheduler;
import io.reactivex.internal.subscriptions.BooleanSubscription;
import io.reactivex.plugins.RxJavaPlugins;
import io.reactivex.processors.PublishProcessor;
import io.reactivex.schedulers.*;
import io.reactivex.subscribers.TestSubscriber;
public class FlowablePublishAltTest {
@Test
public void testPublish() throws InterruptedException {
final AtomicInteger counter = new AtomicInteger();
ConnectableFlowable<String> f = Flowable.unsafeCreate(new Publisher<String>() {
@Override
public void subscribe(final Subscriber<? super String> subscriber) {
subscriber.onSubscribe(new BooleanSubscription());
new Thread(new Runnable() {
@Override
public void run() {
counter.incrementAndGet();
subscriber.onNext("one");
subscriber.onComplete();
}
}).start();
}
}).publish();
final CountDownLatch latch = new CountDownLatch(2);
// subscribe once
f.subscribe(new Consumer<String>() {
@Override
public void accept(String v) {
assertEquals("one", v);
latch.countDown();
}
});
// subscribe again
f.subscribe(new Consumer<String>() {
@Override
public void accept(String v) {
assertEquals("one", v);
latch.countDown();
}
});
Disposable connection = f.connect();
try {
if (!latch.await(1000, TimeUnit.MILLISECONDS)) {
fail("subscriptions did not receive values");
}
assertEquals(1, counter.get());
} finally {
connection.dispose();
}
}
@Test
public void testBackpressureFastSlow() {
ConnectableFlowable<Integer> is = Flowable.range(1, Flowable.bufferSize() * 2).publish();
Flowable<Integer> fast = is.observeOn(Schedulers.computation())
.doOnComplete(new Action() {
@Override
public void run() {
System.out.println("^^^^^^^^^^^^^ completed FAST");
}
});
Flowable<Integer> slow = is.observeOn(Schedulers.computation()).map(new Function<Integer, Integer>() {
int c;
@Override
public Integer apply(Integer i) {
if (c == 0) {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
}
}
c++;
return i;
}
}).doOnComplete(new Action() {
@Override
public void run() {
System.out.println("^^^^^^^^^^^^^ completed SLOW");
}
});
TestSubscriber<Integer> ts = new TestSubscriber<Integer>();
Flowable.merge(fast, slow).subscribe(ts);
is.connect();
ts.awaitTerminalEvent();
ts.assertNoErrors();
assertEquals(Flowable.bufferSize() * 4, ts.valueCount());
}
// use case from https://github.com/ReactiveX/RxJava/issues/1732
@Test
public void testTakeUntilWithPublishedStreamUsingSelector() {
final AtomicInteger emitted = new AtomicInteger();
Flowable<Integer> xs = Flowable.range(0, Flowable.bufferSize() * 2).doOnNext(new Consumer<Integer>() {
@Override
public void accept(Integer t1) {
emitted.incrementAndGet();
}
});
TestSubscriber<Integer> ts = new TestSubscriber<Integer>();
xs.publish(new Function<Flowable<Integer>, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Flowable<Integer> xs) {
return xs.takeUntil(xs.skipWhile(new Predicate<Integer>() {
@Override
public boolean test(Integer i) {
return i <= 3;
}
}));
}
}).subscribe(ts);
ts.awaitTerminalEvent();
ts.assertNoErrors();
ts.assertValues(0, 1, 2, 3);
assertEquals(5, emitted.get());
System.out.println(ts.values());
}
// use case from https://github.com/ReactiveX/RxJava/issues/1732
@Test
public void testTakeUntilWithPublishedStream() {
Flowable<Integer> xs = Flowable.range(0, Flowable.bufferSize() * 2);
TestSubscriber<Integer> ts = new TestSubscriber<Integer>();
ConnectableFlowable<Integer> xsp = xs.publish();
xsp.takeUntil(xsp.skipWhile(new Predicate<Integer>() {
@Override
public boolean test(Integer i) {
return i <= 3;
}
})).subscribe(ts);
xsp.connect();
System.out.println(ts.values());
}
@Test(timeout = 10000)
public void testBackpressureTwoConsumers() {
final AtomicInteger sourceEmission = new AtomicInteger();
final AtomicBoolean sourceUnsubscribed = new AtomicBoolean();
final Flowable<Integer> source = Flowable.range(1, 100)
.doOnNext(new Consumer<Integer>() {
@Override
public void accept(Integer t1) {
sourceEmission.incrementAndGet();
}
})
.doOnCancel(new Action() {
@Override
public void run() {
sourceUnsubscribed.set(true);
}
}).share();
;
final AtomicBoolean child1Unsubscribed = new AtomicBoolean();
final AtomicBoolean child2Unsubscribed = new AtomicBoolean();
final TestSubscriber<Integer> ts2 = new TestSubscriber<Integer>();
final TestSubscriber<Integer> ts1 = new TestSubscriber<Integer>() {
@Override
public void onNext(Integer t) {
if (valueCount() == 2) {
source.doOnCancel(new Action() {
@Override
public void run() {
child2Unsubscribed.set(true);
}
}).take(5).subscribe(ts2);
}
super.onNext(t);
}
};
source.doOnCancel(new Action() {
@Override
public void run() {
child1Unsubscribed.set(true);
}
}).take(5)
.subscribe(ts1);
ts1.awaitTerminalEvent();
ts2.awaitTerminalEvent();
ts1.assertNoErrors();
ts2.assertNoErrors();
assertTrue(sourceUnsubscribed.get());
assertTrue(child1Unsubscribed.get());
assertTrue(child2Unsubscribed.get());
ts1.assertValues(1, 2, 3, 4, 5);
ts2.assertValues(4, 5, 6, 7, 8);
assertEquals(8, sourceEmission.get());
}
@Test
public void testConnectWithNoSubscriber() {
TestScheduler scheduler = new TestScheduler();
ConnectableFlowable<Long> cf = Flowable.interval(10, 10, TimeUnit.MILLISECONDS, scheduler).take(3).publish();
cf.connect();
// Emit 0
scheduler.advanceTimeBy(15, TimeUnit.MILLISECONDS);
TestSubscriber<Long> subscriber = new TestSubscriber<Long>();
cf.subscribe(subscriber);
// Emit 1 and 2
scheduler.advanceTimeBy(50, TimeUnit.MILLISECONDS);
subscriber.assertValues(1L, 2L);
subscriber.assertNoErrors();
subscriber.assertTerminated();
}
@Test
public void testSubscribeAfterDisconnectThenConnect() {
ConnectableFlowable<Integer> source = Flowable.just(1).publish();
TestSubscriber<Integer> ts1 = new TestSubscriber<Integer>();
source.subscribe(ts1);
Disposable connection = source.connect();
ts1.assertValue(1);
ts1.assertNoErrors();
ts1.assertTerminated();
TestSubscriber<Integer> ts2 = new TestSubscriber<Integer>();
source.subscribe(ts2);
Disposable connection2 = source.connect();
ts2.assertValue(1);
ts2.assertNoErrors();
ts2.assertTerminated();
System.out.println(connection);
System.out.println(connection2);
}
@Test
public void testNoSubscriberRetentionOnCompleted() {
FlowablePublish<Integer> source = (FlowablePublish<Integer>)Flowable.just(1).publish();
TestSubscriber<Integer> ts1 = new TestSubscriber<Integer>();
source.subscribe(ts1);
ts1.assertNoValues();
ts1.assertNoErrors();
ts1.assertNotComplete();
source.connect();
ts1.assertValue(1);
ts1.assertNoErrors();
ts1.assertTerminated();
assertNull(source.current.get());
}
@Test
public void testNonNullConnection() {
ConnectableFlowable<Object> source = Flowable.never().publish();
assertNotNull(source.connect());
assertNotNull(source.connect());
}
@Test
public void testNoDisconnectSomeoneElse() {
ConnectableFlowable<Object> source = Flowable.never().publish();
Disposable connection1 = source.connect();
Disposable connection2 = source.connect();
connection1.dispose();
Disposable connection3 = source.connect();
connection2.dispose();
assertTrue(checkPublishDisposed(connection1));
assertTrue(checkPublishDisposed(connection2));
assertFalse(checkPublishDisposed(connection3));
}
@SuppressWarnings("unchecked")
static boolean checkPublishDisposed(Disposable d) {
return ((FlowablePublish.PublishSubscriber<Object>)d).isDisposed();
}
@Test
public void testZeroRequested() {
ConnectableFlowable<Integer> source = Flowable.just(1).publish();
TestSubscriber<Integer> ts = new TestSubscriber<Integer>(0L);
source.subscribe(ts);
ts.assertNoValues();
ts.assertNoErrors();
ts.assertNotComplete();
source.connect();
ts.assertNoValues();
ts.assertNoErrors();
ts.assertNotComplete();
ts.request(5);
ts.assertValue(1);
ts.assertNoErrors();
ts.assertTerminated();
}
@Test
public void testConnectIsIdempotent() {
final AtomicInteger calls = new AtomicInteger();
Flowable<Integer> source = Flowable.unsafeCreate(new Publisher<Integer>() {
@Override
public void subscribe(Subscriber<? super Integer> t) {
t.onSubscribe(new BooleanSubscription());
calls.getAndIncrement();
}
});
ConnectableFlowable<Integer> conn = source.publish();
assertEquals(0, calls.get());
conn.connect();
conn.connect();
assertEquals(1, calls.get());
conn.connect().dispose();
conn.connect();
conn.connect();
assertEquals(2, calls.get());
}
@Test
public void syncFusedObserveOn() {
ConnectableFlowable<Integer> cf = Flowable.range(0, 1000).publish();
Flowable<Integer> obs = cf.observeOn(Schedulers.computation());
for (int i = 0; i < 1000; i++) {
for (int j = 1; j < 6; j++) {
List<TestSubscriber<Integer>> tss = new ArrayList<TestSubscriber<Integer>>();
for (int k = 1; k < j; k++) {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>();
tss.add(ts);
obs.subscribe(ts);
}
Disposable connection = cf.connect();
for (TestSubscriber<Integer> ts : tss) {
ts.awaitDone(5, TimeUnit.SECONDS)
.assertSubscribed()
.assertValueCount(1000)
.assertNoErrors()
.assertComplete();
}
connection.dispose();
}
}
}
@Test
public void syncFusedObserveOn2() {
ConnectableFlowable<Integer> cf = Flowable.range(0, 1000).publish();
Flowable<Integer> obs = cf.observeOn(ImmediateThinScheduler.INSTANCE);
for (int i = 0; i < 1000; i++) {
for (int j = 1; j < 6; j++) {
List<TestSubscriber<Integer>> tss = new ArrayList<TestSubscriber<Integer>>();
for (int k = 1; k < j; k++) {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>();
tss.add(ts);
obs.subscribe(ts);
}
Disposable connection = cf.connect();
for (TestSubscriber<Integer> ts : tss) {
ts.awaitDone(5, TimeUnit.SECONDS)
.assertSubscribed()
.assertValueCount(1000)
.assertNoErrors()
.assertComplete();
}
connection.dispose();
}
}
}
@Test
public void asyncFusedObserveOn() {
ConnectableFlowable<Integer> cf = Flowable.range(0, 1000).observeOn(ImmediateThinScheduler.INSTANCE).publish();
for (int i = 0; i < 1000; i++) {
for (int j = 1; j < 6; j++) {
List<TestSubscriber<Integer>> tss = new ArrayList<TestSubscriber<Integer>>();
for (int k = 1; k < j; k++) {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>();
tss.add(ts);
cf.subscribe(ts);
}
Disposable connection = cf.connect();
for (TestSubscriber<Integer> ts : tss) {
ts.awaitDone(5, TimeUnit.SECONDS)
.assertSubscribed()
.assertValueCount(1000)
.assertNoErrors()
.assertComplete();
}
connection.dispose();
}
}
}
@Test
public void testObserveOn() {
ConnectableFlowable<Integer> cf = Flowable.range(0, 1000).hide().publish();
Flowable<Integer> obs = cf.observeOn(Schedulers.computation());
for (int i = 0; i < 1000; i++) {
for (int j = 1; j < 6; j++) {
List<TestSubscriber<Integer>> tss = new ArrayList<TestSubscriber<Integer>>();
for (int k = 1; k < j; k++) {
TestSubscriber<Integer> ts = new TestSubscriber<Integer>();
tss.add(ts);
obs.subscribe(ts);
}
Disposable connection = cf.connect();
for (TestSubscriber<Integer> ts : tss) {
ts.awaitDone(5, TimeUnit.SECONDS)
.assertSubscribed()
.assertValueCount(1000)
.assertNoErrors()
.assertComplete();
}
connection.dispose();
}
}
}
@Test
public void source() {
Flowable<Integer> f = Flowable.never();
assertSame(f, (((HasUpstreamPublisher<?>)f.publish()).source()));
}
@Test
public void connectThrows() {
ConnectableFlowable<Integer> cf = Flowable.<Integer>empty().publish();
try {
cf.connect(new Consumer<Disposable>() {
@Override
public void accept(Disposable d) throws Exception {
throw new TestException();
}
});
} catch (TestException ex) {
// expected
}
}
@Test
public void addRemoveRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final ConnectableFlowable<Integer> cf = Flowable.<Integer>empty().publish();
final TestSubscriber<Integer> ts = cf.test();
final TestSubscriber<Integer> ts2 = new TestSubscriber<Integer>();
Runnable r1 = new Runnable() {
@Override
public void run() {
cf.subscribe(ts2);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
ts.cancel();
}
};
TestHelper.race(r1, r2);
}
}
@Test
public void disposeOnArrival() {
ConnectableFlowable<Integer> cf = Flowable.<Integer>empty().publish();
cf.test(Long.MAX_VALUE, true).assertEmpty();
}
@Test
public void disposeOnArrival2() {
Flowable<Integer> co = Flowable.<Integer>never().publish().autoConnect();
co.test(Long.MAX_VALUE, true).assertEmpty();
}
@Test
public void dispose() {
TestHelper.checkDisposed(Flowable.never().publish());
TestHelper.checkDisposed(Flowable.never().publish(Functions.<Flowable<Object>>identity()));
}
@Test
public void empty() {
ConnectableFlowable<Integer> cf = Flowable.<Integer>empty().publish();
cf.connect();
}
@Test
public void take() {
ConnectableFlowable<Integer> cf = Flowable.range(1, 2).publish();
TestSubscriber<Integer> ts = cf.take(1).test();
cf.connect();
ts.assertResult(1);
}
@Test
public void just() {
final PublishProcessor<Integer> pp = PublishProcessor.create();
ConnectableFlowable<Integer> cf = pp.publish();
TestSubscriber<Integer> ts = new TestSubscriber<Integer>() {
@Override
public void onNext(Integer t) {
super.onNext(t);
pp.onComplete();
}
};
cf.subscribe(ts);
cf.connect();
pp.onNext(1);
ts.assertResult(1);
}
@Test
public void nextCancelRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final PublishProcessor<Integer> pp = PublishProcessor.create();
final ConnectableFlowable<Integer> cf = pp.publish();
final TestSubscriber<Integer> ts = cf.test();
Runnable r1 = new Runnable() {
@Override
public void run() {
pp.onNext(1);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
ts.cancel();
}
};
TestHelper.race(r1, r2);
}
}
@Test
public void badSource() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
new Flowable<Integer>() {
@Override
protected void subscribeActual(Subscriber<? super Integer> subscriber) {
subscriber.onSubscribe(new BooleanSubscription());
subscriber.onNext(1);
subscriber.onComplete();
subscriber.onNext(2);
subscriber.onError(new TestException());
subscriber.onComplete();
}
}
.publish()
.autoConnect()
.test()
.assertResult(1);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void noErrorLoss() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
ConnectableFlowable<Object> cf = Flowable.error(new TestException()).publish();
cf.connect();
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void subscribeDisconnectRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final PublishProcessor<Integer> pp = PublishProcessor.create();
final ConnectableFlowable<Integer> cf = pp.publish();
final Disposable d = cf.connect();
final TestSubscriber<Integer> ts = new TestSubscriber<Integer>();
Runnable r1 = new Runnable() {
@Override
public void run() {
d.dispose();
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
cf.subscribe(ts);
}
};
TestHelper.race(r1, r2);
}
}
@Test
public void selectorDisconnectsIndependentSource() {
PublishProcessor<Integer> pp = PublishProcessor.create();
pp.publish(new Function<Flowable<Integer>, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Flowable<Integer> v) throws Exception {
return Flowable.range(1, 2);
}
})
.test()
.assertResult(1, 2);
assertFalse(pp.hasSubscribers());
}
@Test(timeout = 5000)
public void selectorLatecommer() {
Flowable.range(1, 5)
.publish(new Function<Flowable<Integer>, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Flowable<Integer> v) throws Exception {
return v.concatWith(v);
}
})
.test()
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void mainError() {
Flowable.error(new TestException())
.publish(Functions.<Flowable<Object>>identity())
.test()
.assertFailure(TestException.class);
}
@Test
public void selectorInnerError() {
PublishProcessor<Integer> pp = PublishProcessor.create();
pp.publish(new Function<Flowable<Integer>, Flowable<Integer>>() {
@Override
public Flowable<Integer> apply(Flowable<Integer> v) throws Exception {
return Flowable.error(new TestException());
}
})
.test()
.assertFailure(TestException.class);
assertFalse(pp.hasSubscribers());
}
@Test
public void preNextConnect() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final ConnectableFlowable<Integer> cf = Flowable.<Integer>empty().publish();
cf.connect();
Runnable r1 = new Runnable() {
@Override
public void run() {
cf.test();
}
};
TestHelper.race(r1, r1);
}
}
@Test
public void connectRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final ConnectableFlowable<Integer> cf = Flowable.<Integer>empty().publish();
Runnable r1 = new Runnable() {
@Override
public void run() {
cf.connect();
}
};
TestHelper.race(r1, r1);
}
}
@Test
public void selectorCrash() {
Flowable.just(1).publish(new Function<Flowable<Integer>, Flowable<Object>>() {
@Override
public Flowable<Object> apply(Flowable<Integer> v) throws Exception {
throw new TestException();
}
})
.test()
.assertFailure(TestException.class);
}
@Test
public void pollThrows() {
Flowable.just(1)
.map(new Function<Integer, Object>() {
@Override
public Object apply(Integer v) throws Exception {
throw new TestException();
}
})
.compose(TestHelper.flowableStripBoundary())
.publish()
.autoConnect()
.test()
.assertFailure(TestException.class);
}
@Test
public void pollThrowsNoSubscribers() {
ConnectableFlowable<Integer> cf = Flowable.just(1, 2)
.map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer v) throws Exception {
if (v == 2) {
throw new TestException();
}
return v;
}
})
.compose(TestHelper.<Integer>flowableStripBoundary())
.publish();
TestSubscriber<Integer> ts = cf.take(1)
.test();
cf.connect();
ts.assertResult(1);
}
@Test
public void dryRunCrash() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
final TestSubscriber<Object> ts = new TestSubscriber<Object>(1L) {
@Override
public void onNext(Object t) {
super.onNext(t);
onComplete();
cancel();
}
};
Flowable.range(1, 10)
.map(new Function<Integer, Object>() {
@Override
public Object apply(Integer v) throws Exception {
if (v == 2) {
throw new TestException();
}
return v;
}
})
.publish()
.autoConnect()
.subscribe(ts);
ts
.assertResult(1);
TestHelper.assertUndeliverable(errors, 0, TestException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void overflowQueue() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
Flowable.create(new FlowableOnSubscribe<Object>() {
@Override
public void subscribe(FlowableEmitter<Object> s) throws Exception {
for (int i = 0; i < 10; i++) {
s.onNext(i);
}
}
}, BackpressureStrategy.MISSING)
.publish(8)
.autoConnect()
.test(0L)
.assertFailure(MissingBackpressureException.class);
TestHelper.assertError(errors, 0, MissingBackpressureException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void delayedUpstreamOnSubscribe() {
final Subscriber<?>[] sub = { null };
new Flowable<Integer>() {
@Override
protected void subscribeActual(Subscriber<? super Integer> s) {
sub[0] = s;
}
}
.publish()
.connect()
.dispose();
BooleanSubscription bs = new BooleanSubscription();
sub[0].onSubscribe(bs);
assertTrue(bs.isCancelled());
}
@Test
public void disposeRace() {
for (int i = 0; i < TestHelper.RACE_DEFAULT_LOOPS; i++) {
final AtomicReference<Disposable> ref = new AtomicReference<Disposable>();
final ConnectableFlowable<Integer> cf = new Flowable<Integer>() {
@Override
protected void subscribeActual(Subscriber<? super Integer> s) {
s.onSubscribe(new BooleanSubscription());
ref.set((Disposable)s);
}
}.publish();
cf.connect();
Runnable r1 = new Runnable() {
@Override
public void run() {
ref.get().dispose();
}
};
TestHelper.race(r1, r1);
}
}
@Test
public void removeNotPresent() {
final AtomicReference<PublishSubscriber<Integer>> ref = new AtomicReference<PublishSubscriber<Integer>>();
final ConnectableFlowable<Integer> cf = new Flowable<Integer>() {
@Override
@SuppressWarnings("unchecked")
protected void subscribeActual(Subscriber<? super Integer> s) {
s.onSubscribe(new BooleanSubscription());
ref.set((PublishSubscriber<Integer>)s);
}
}.publish();
cf.connect();
ref.get().add(new InnerSubscriber<Integer>(new TestSubscriber<Integer>()));
ref.get().remove(null);
}
@Test
@Ignore("publish() keeps consuming the upstream if there are no subscribers, 3.x should change this")
public void subscriberSwap() {
final ConnectableFlowable<Integer> cf = Flowable.range(1, 5).publish();
cf.connect();
TestSubscriber<Integer> ts1 = new TestSubscriber<Integer>() {
@Override
public void onNext(Integer t) {
super.onNext(t);
cancel();
onComplete();
}
};
cf.subscribe(ts1);
ts1.assertResult(1);
TestSubscriber<Integer> ts2 = new TestSubscriber<Integer>(0);
cf.subscribe(ts2);
ts2
.assertEmpty()
.requestMore(4)
.assertResult(2, 3, 4, 5);
}
@Test
public void subscriberLiveSwap() {
final ConnectableFlowable<Integer> cf = new FlowablePublishAlt<Integer>(Flowable.range(1, 5), 128);
final TestSubscriber<Integer> ts2 = new TestSubscriber<Integer>(0);
TestSubscriber<Integer> ts1 = new TestSubscriber<Integer>() {
@Override
public void onNext(Integer t) {
super.onNext(t);
cancel();
onComplete();
cf.subscribe(ts2);
}
};
cf.subscribe(ts1);
cf.connect();
ts1.assertResult(1);
ts2
.assertEmpty()
.requestMore(4)
.assertResult(2, 3, 4, 5);
}
@Test
public void selectorSubscriberSwap() {
final AtomicReference<Flowable<Integer>> ref = new AtomicReference<Flowable<Integer>>();
Flowable.range(1, 5).publish(new Function<Flowable<Integer>, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Flowable<Integer> f) throws Exception {
ref.set(f);
return Flowable.never();
}
}).test();
ref.get().take(2).test().assertResult(1, 2);
ref.get()
.test(0)
.assertEmpty()
.requestMore(2)
.assertValuesOnly(3, 4)
.requestMore(1)
.assertResult(3, 4, 5);
}
@Test
public void leavingSubscriberOverrequests() {
final AtomicReference<Flowable<Integer>> ref = new AtomicReference<Flowable<Integer>>();
PublishProcessor<Integer> pp = PublishProcessor.create();
pp.publish(new Function<Flowable<Integer>, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Flowable<Integer> f) throws Exception {
ref.set(f);
return Flowable.never();
}
}).test();
TestSubscriber<Integer> ts1 = ref.get().take(2).test();
pp.onNext(1);
pp.onNext(2);
ts1.assertResult(1, 2);
pp.onNext(3);
pp.onNext(4);
TestSubscriber<Integer> ts2 = ref.get().test(0L);
ts2.assertEmpty();
ts2.requestMore(2);
ts2.assertValuesOnly(3, 4);
}
// call a transformer only if the input is non-empty
@Test
public void composeIfNotEmpty() {
final FlowableTransformer<Integer, Integer> transformer = new FlowableTransformer<Integer, Integer>() {
@Override
public Publisher<Integer> apply(Flowable<Integer> g) {
return g.map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer v) throws Exception {
return v + 1;
}
});
}
};
final AtomicInteger calls = new AtomicInteger();
Flowable.range(1, 5)
.publish(new Function<Flowable<Integer>, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(final Flowable<Integer> shared)
throws Exception {
return shared.take(1).concatMap(new Function<Integer, Publisher<? extends Integer>>() {
@Override
public Publisher<? extends Integer> apply(Integer first)
throws Exception {
calls.incrementAndGet();
return transformer.apply(Flowable.just(first).concatWith(shared));
}
});
}
})
.test()
.assertResult(2, 3, 4, 5, 6);
assertEquals(1, calls.get());
}
// call a transformer only if the input is non-empty
@Test
public void composeIfNotEmptyNotFused() {
final FlowableTransformer<Integer, Integer> transformer = new FlowableTransformer<Integer, Integer>() {
@Override
public Publisher<Integer> apply(Flowable<Integer> g) {
return g.map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer v) throws Exception {
return v + 1;
}
});
}
};
final AtomicInteger calls = new AtomicInteger();
Flowable.range(1, 5).hide()
.publish(new Function<Flowable<Integer>, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(final Flowable<Integer> shared)
throws Exception {
return shared.take(1).concatMap(new Function<Integer, Publisher<? extends Integer>>() {
@Override
public Publisher<? extends Integer> apply(Integer first)
throws Exception {
calls.incrementAndGet();
return transformer.apply(Flowable.just(first).concatWith(shared));
}
});
}
})
.test()
.assertResult(2, 3, 4, 5, 6);
assertEquals(1, calls.get());
}
// call a transformer only if the input is non-empty
@Test
public void composeIfNotEmptyIsEmpty() {
final FlowableTransformer<Integer, Integer> transformer = new FlowableTransformer<Integer, Integer>() {
@Override
public Publisher<Integer> apply(Flowable<Integer> g) {
return g.map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer v) throws Exception {
return v + 1;
}
});
}
};
final AtomicInteger calls = new AtomicInteger();
Flowable.<Integer>empty().hide()
.publish(new Function<Flowable<Integer>, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(final Flowable<Integer> shared)
throws Exception {
return shared.take(1).concatMap(new Function<Integer, Publisher<? extends Integer>>() {
@Override
public Publisher<? extends Integer> apply(Integer first)
throws Exception {
calls.incrementAndGet();
return transformer.apply(Flowable.just(first).concatWith(shared));
}
});
}
})
.test()
.assertResult();
assertEquals(0, calls.get());
}
@Test
public void publishFunctionCancelOuterAfterOneInner() {
final AtomicReference<Flowable<Integer>> ref = new AtomicReference<Flowable<Integer>>();
PublishProcessor<Integer> pp = PublishProcessor.create();
final TestSubscriber<Integer> ts = pp.publish(new Function<Flowable<Integer>, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Flowable<Integer> f) throws Exception {
ref.set(f);
return Flowable.never();
}
}).test();
ref.get().subscribe(new TestSubscriber<Integer>() {
@Override
public void onNext(Integer t) {
super.onNext(t);
onComplete();
ts.cancel();
}
});
pp.onNext(1);
}
@Test
public void publishFunctionCancelOuterAfterOneInnerBackpressured() {
final AtomicReference<Flowable<Integer>> ref = new AtomicReference<Flowable<Integer>>();
PublishProcessor<Integer> pp = PublishProcessor.create();
final TestSubscriber<Integer> ts = pp.publish(new Function<Flowable<Integer>, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Flowable<Integer> f) throws Exception {
ref.set(f);
return Flowable.never();
}
}).test();
ref.get().subscribe(new TestSubscriber<Integer>(1L) {
@Override
public void onNext(Integer t) {
super.onNext(t);
onComplete();
ts.cancel();
}
});
pp.onNext(1);
}
@Test
public void publishCancelOneAsync() {
for (int i = 0; i < TestHelper.RACE_LONG_LOOPS; i++) {
final PublishProcessor<Integer> pp = PublishProcessor.create();
final AtomicReference<Flowable<Integer>> ref = new AtomicReference<Flowable<Integer>>();
pp.publish(new Function<Flowable<Integer>, Publisher<Integer>>() {
@Override
public Publisher<Integer> apply(Flowable<Integer> f) throws Exception {
ref.set(f);
return Flowable.never();
}
}).test();
final TestSubscriber<Integer> ts1 = ref.get().test();
TestSubscriber<Integer> ts2 = ref.get().test();
Runnable r1 = new Runnable() {
@Override
public void run() {
pp.onNext(1);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
ts1.cancel();
}
};
TestHelper.race(r1, r2);
ts2.assertValuesOnly(1);
}
}
@Test
public void publishCancelOneAsync2() {
final PublishProcessor<Integer> pp = PublishProcessor.create();
ConnectableFlowable<Integer> cf = pp.publish();
final TestSubscriber<Integer> ts1 = new TestSubscriber<Integer>();
final AtomicReference<InnerSubscriber<Integer>> ref = new AtomicReference<InnerSubscriber<Integer>>();
cf.subscribe(new FlowableSubscriber<Integer>() {
@SuppressWarnings("unchecked")
@Override
public void onSubscribe(Subscription s) {
ts1.onSubscribe(new BooleanSubscription());
// pretend to be cancelled without removing it from the subscriber list
ref.set((InnerSubscriber<Integer>)s);
}
@Override
public void onNext(Integer t) {
ts1.onNext(t);
}
@Override
public void onError(Throwable t) {
ts1.onError(t);
}
@Override
public void onComplete() {
ts1.onComplete();
}
});
TestSubscriber<Integer> ts2 = cf.test();
cf.connect();
ref.get().set(Long.MIN_VALUE);
pp.onNext(1);
ts1.assertEmpty();
ts2.assertValuesOnly(1);
}
@Test
public void boundaryFusion() {
Flowable.range(1, 10000)
.observeOn(Schedulers.single())
.map(new Function<Integer, String>() {
@Override
public String apply(Integer t) throws Exception {
String name = Thread.currentThread().getName();
if (name.contains("RxSingleScheduler")) {
return "RxSingleScheduler";
}
return name;
}
})
.share()
.observeOn(Schedulers.computation())
.distinct()
.test()
.awaitDone(5, TimeUnit.SECONDS)
.assertResult("RxSingleScheduler");
}
@Test
public void badRequest() {
TestHelper.assertBadRequestReported(Flowable.range(1, 5).publish());
}
@Test
@SuppressWarnings("unchecked")
public void splitCombineSubscriberChangeAfterOnNext() {
Flowable<Integer> source = Flowable.range(0, 20)
.doOnSubscribe(new Consumer<Subscription>() {
@Override
public void accept(Subscription v) throws Exception {
System.out.println("Subscribed");
}
})
.publish(10)
.refCount()
;
Flowable<Integer> evenNumbers = source.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer v) throws Exception {
return v % 2 == 0;
}
});
Flowable<Integer> oddNumbers = source.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer v) throws Exception {
return v % 2 != 0;
}
});
final Single<Integer> getNextOdd = oddNumbers.first(0);
TestSubscriber<List<Integer>> ts = evenNumbers.concatMap(new Function<Integer, Publisher<List<Integer>>>() {
@Override
public Publisher<List<Integer>> apply(Integer v) throws Exception {
return Single.zip(
Single.just(v), getNextOdd,
new BiFunction<Integer, Integer, List<Integer>>() {
@Override
public List<Integer> apply(Integer a, Integer b) throws Exception {
return Arrays.asList( a, b );
}
}
)
.toFlowable();
}
})
.takeWhile(new Predicate<List<Integer>>() {
@Override
public boolean test(List<Integer> v) throws Exception {
return v.get(0) < 20;
}
})
.test();
ts
.assertResult(
Arrays.asList(0, 1),
Arrays.asList(2, 3),
Arrays.asList(4, 5),
Arrays.asList(6, 7),
Arrays.asList(8, 9),
Arrays.asList(10, 11),
Arrays.asList(12, 13),
Arrays.asList(14, 15),
Arrays.asList(16, 17),
Arrays.asList(18, 19)
);
}
@Test
@SuppressWarnings("unchecked")
public void splitCombineSubscriberChangeAfterOnNextFused() {
Flowable<Integer> source = Flowable.range(0, 20)
.publish(10)
.refCount()
;
Flowable<Integer> evenNumbers = source.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer v) throws Exception {
return v % 2 == 0;
}
});
Flowable<Integer> oddNumbers = source.filter(new Predicate<Integer>() {
@Override
public boolean test(Integer v) throws Exception {
return v % 2 != 0;
}
});
final Single<Integer> getNextOdd = oddNumbers.first(0);
TestSubscriber<List<Integer>> ts = evenNumbers.concatMap(new Function<Integer, Publisher<List<Integer>>>() {
@Override
public Publisher<List<Integer>> apply(Integer v) throws Exception {
return Single.zip(
Single.just(v), getNextOdd,
new BiFunction<Integer, Integer, List<Integer>>() {
@Override
public List<Integer> apply(Integer a, Integer b) throws Exception {
return Arrays.asList( a, b );
}
}
)
.toFlowable();
}
})
.takeWhile(new Predicate<List<Integer>>() {
@Override
public boolean test(List<Integer> v) throws Exception {
return v.get(0) < 20;
}
})
.test();
ts
.assertResult(
Arrays.asList(0, 1),
Arrays.asList(2, 3),
Arrays.asList(4, 5),
Arrays.asList(6, 7),
Arrays.asList(8, 9),
Arrays.asList(10, 11),
Arrays.asList(12, 13),
Arrays.asList(14, 15),
Arrays.asList(16, 17),
Arrays.asList(18, 19)
);
}
@Test
public void altConnectCrash() {
try {
new FlowablePublishAlt<Integer>(Flowable.<Integer>empty(), 128)
.connect(new Consumer<Disposable>() {
@Override
public void accept(Disposable t) throws Exception {
throw new TestException();
}
});
fail("Should have thrown");
} catch (TestException expected) {
// expected
}
}
@Test
public void altConnectRace() {
for (int i = 0; i < TestHelper.RACE_LONG_LOOPS; i++) {
final ConnectableFlowable<Integer> cf =
new FlowablePublishAlt<Integer>(Flowable.<Integer>never(), 128);
Runnable r = new Runnable() {
@Override
public void run() {
cf.connect();
}
};
TestHelper.race(r, r);
}
}
@Test
public void fusedPollCrash() {
Flowable.range(1, 5)
.map(new Function<Integer, Object>() {
@Override
public Object apply(Integer v) throws Exception {
throw new TestException();
}
})
.compose(TestHelper.flowableStripBoundary())
.publish()
.refCount()
.test()
.assertFailure(TestException.class);
}
@Test
public void syncFusedNoRequest() {
Flowable.range(1, 5)
.publish(1)
.refCount()
.test()
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void normalBackpressuredPolls() {
Flowable.range(1, 5)
.hide()
.publish(1)
.refCount()
.test()
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void emptyHidden() {
Flowable.empty()
.hide()
.publish(1)
.refCount()
.test()
.assertResult();
}
@Test
public void emptyFused() {
Flowable.empty()
.publish(1)
.refCount()
.test()
.assertResult();
}
@Test
public void overflowQueueRefCount() {
new Flowable<Integer>() {
@Override
protected void subscribeActual(Subscriber<? super Integer> s) {
s.onSubscribe(new BooleanSubscription());
s.onNext(1);
s.onNext(2);
}
}
.publish(1)
.refCount()
.test(0)
.requestMore(1)
.assertFailure(MissingBackpressureException.class, 1);
}
@Test
public void doubleErrorRefCount() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
new Flowable<Integer>() {
@Override
protected void subscribeActual(Subscriber<? super Integer> s) {
s.onSubscribe(new BooleanSubscription());
s.onError(new TestException("one"));
s.onError(new TestException("two"));
}
}
.publish(1)
.refCount()
.test(0)
.assertFailureAndMessage(TestException.class, "one");
TestHelper.assertUndeliverable(errors, 0, TestException.class, "two");
assertEquals(1, errors.size());
} finally {
RxJavaPlugins.reset();
}
}
}
| |
package org.apache.solr.handler.dataimport;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.io.FileUtils;
import org.apache.http.client.HttpClient;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.embedded.JettySolrRunner;
import org.apache.solr.client.solrj.impl.HttpClientUtil;
import org.apache.solr.client.solrj.impl.HttpSolrServer;
import org.apache.solr.common.SolrInputDocument;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* End-to-end test of SolrEntityProcessor. "Real" test using embedded Solr
*/
public class TestSolrEntityProcessorEndToEnd extends AbstractDataImportHandlerTestCase {
private static Logger LOG = LoggerFactory.getLogger(TestSolrEntityProcessorEndToEnd.class);
//rivate static final String SOLR_SOURCE_URL = "http://127.0.0.1:8983/solr";
private static final String SOLR_CONFIG = "dataimport-solrconfig.xml";
private static final String SOLR_SCHEMA = "dataimport-schema.xml";
private static final String SOLR_HOME = "dih/solr";
private static final String CONF_DIR = "dih" + File.separator + "solr" + File.separator + "collection1" + File.separator + "conf" + File.separator;
private static final List<Map<String,Object>> DB_DOCS = new ArrayList<Map<String,Object>>();
private static final List<Map<String,Object>> SOLR_DOCS = new ArrayList<Map<String,Object>>();
static {
// dynamic fields in the destination schema
Map<String,Object> dbDoc = new HashMap<String,Object>();
dbDoc.put("dbid_s", "1");
dbDoc.put("dbdesc_s", "DbDescription");
DB_DOCS.add(dbDoc);
Map<String,Object> solrDoc = new HashMap<String,Object>();
solrDoc.put("id", "1");
solrDoc.put("desc", "SolrDescription");
SOLR_DOCS.add(solrDoc);
}
private SolrInstance instance = null;
private JettySolrRunner jetty;
private static String getDihConfigTagsInnerEntity(int port) {
return "<dataConfig>\r\n"
+ " <dataSource type='MockDataSource' />\r\n"
+ " <document>\r\n"
+ " <entity name='db' query='select * from x'>\r\n"
+ " <field column='dbid_s' />\r\n"
+ " <field column='dbdesc_s' />\r\n"
+ " <entity name='se' processor='SolrEntityProcessor' query='id:${db.dbid_s}'\n"
+ " url='" + getSourceUrl(port) + "' fields='id,desc'>\r\n"
+ " <field column='id' />\r\n"
+ " <field column='desc' />\r\n" + " </entity>\r\n"
+ " </entity>\r\n" + " </document>\r\n" + "</dataConfig>\r\n";
}
private static String generateDIHConfig(String options, int port) {
return "<dataConfig>\r\n" + " <document>\r\n"
+ " <entity name='se' processor='SolrEntityProcessor'" + " url='"
+ getSourceUrl(port) + "' " + options + " />\r\n" + " </document>\r\n"
+ "</dataConfig>\r\n";
}
private static String getSourceUrl(int port) {
return "http://127.0.0.1:" + port + "/solr";
}
//TODO: fix this test to close its directories
static String savedFactory;
@BeforeClass
public static void beforeClass() {
savedFactory = System.getProperty("solr.DirectoryFactory");
System.setProperty("solr.directoryFactory", "solr.StandardDirectoryFactory");
}
@AfterClass
public static void afterClass() {
if (savedFactory == null) {
System.clearProperty("solr.directoryFactory");
} else {
System.setProperty("solr.directoryFactory", savedFactory);
}
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
// destination solr core
initCore(SOLR_CONFIG, SOLR_SCHEMA, SOLR_HOME);
// data source solr instance
instance = new SolrInstance();
instance.setUp();
jetty = createJetty(instance);
}
@Override
@After
public void tearDown() throws Exception {
try {
deleteCore();
} catch (Exception e) {
LOG.error("Error deleting core", e);
}
jetty.stop();
instance.tearDown();
super.tearDown();
}
public void testFullImport() {
assertQ(req("*:*"), "//result[@numFound='0']");
try {
addDocumentsToSolr(SOLR_DOCS);
runFullImport(generateDIHConfig("query='*:*' rows='2' fl='id,desc' onError='skip'", jetty.getLocalPort()));
} catch (Exception e) {
LOG.error(e.getMessage(), e);
fail(e.getMessage());
}
assertQ(req("*:*"), "//result[@numFound='1']");
assertQ(req("id:1"), "//result/doc/str[@name='id'][.='1']",
"//result/doc/arr[@name='desc'][.='SolrDescription']");
}
public void testFullImportFqParam() {
assertQ(req("*:*"), "//result[@numFound='0']");
try {
addDocumentsToSolr(generateSolrDocuments(30));
Map<String,String> map = new HashMap<String,String>();
map.put("rows", "50");
runFullImport(generateDIHConfig("query='*:*' fq='desc:Description1*,desc:Description*2' rows='2'", jetty.getLocalPort()), map);
} catch (Exception e) {
LOG.error(e.getMessage(), e);
fail(e.getMessage());
}
assertQ(req("*:*"), "//result[@numFound='1']");
assertQ(req("id:12"), "//result[@numFound='1']", "//result/doc/arr[@name='desc'][.='Description12']");
}
public void testFullImportFieldsParam() {
assertQ(req("*:*"), "//result[@numFound='0']");
try {
addDocumentsToSolr(generateSolrDocuments(7));
runFullImport(generateDIHConfig("query='*:*' fl='id' rows='2'", jetty.getLocalPort()));
} catch (Exception e) {
LOG.error(e.getMessage(), e);
fail(e.getMessage());
}
assertQ(req("*:*"), "//result[@numFound='7']");
assertQ(req("id:1"), "//result[@numFound='1']");
try {
assertQ(req("id:1"), "//result/doc/arr[@name='desc']");
fail("The document has a field with name desc");
} catch(Exception e) {
}
}
/**
* Receive a row from SQL (Mock) and fetch a row from Solr
*/
public void testFullImportInnerEntity() {
assertQ(req("*:*"), "//result[@numFound='0']");
try {
MockDataSource.setIterator("select * from x", DB_DOCS.iterator());
addDocumentsToSolr(SOLR_DOCS);
runFullImport(getDihConfigTagsInnerEntity(jetty.getLocalPort()));
} catch (Exception e) {
LOG.error(e.getMessage(), e);
fail(e.getMessage());
} finally {
MockDataSource.clearCache();
}
assertQ(req("*:*"), "//result[@numFound='1']");
assertQ(req("id:1"), "//result/doc/str[@name='id'][.='1']",
"//result/doc/str[@name='dbdesc_s'][.='DbDescription']",
"//result/doc/str[@name='dbid_s'][.='1']",
"//result/doc/arr[@name='desc'][.='SolrDescription']");
}
public void testFullImportWrongSolrUrl() {
try {
jetty.stop();
} catch (Exception e) {
LOG.error("Error stopping jetty", e);
fail(e.getMessage());
}
assertQ(req("*:*"), "//result[@numFound='0']");
try {
runFullImport(generateDIHConfig("query='*:*' rows='2' fl='id,desc' onError='skip'", jetty.getLocalPort()));
} catch (Exception e) {
LOG.error(e.getMessage(), e);
fail(e.getMessage());
}
assertQ(req("*:*"), "//result[@numFound='0']");
}
public void testFullImportBadConfig() {
assertQ(req("*:*"), "//result[@numFound='0']");
try {
runFullImport(generateDIHConfig("query='bogus:3' rows='2' fl='id,desc' onError='abort'", jetty.getLocalPort()));
} catch (Exception e) {
LOG.error(e.getMessage(), e);
fail(e.getMessage());
}
assertQ(req("*:*"), "//result[@numFound='0']");
}
private static List<Map<String,Object>> generateSolrDocuments(int num) {
List<Map<String,Object>> docList = new ArrayList<Map<String,Object>>();
for (int i = 1; i <= num; i++) {
Map<String,Object> map = new HashMap<String,Object>();
map.put("id", i);
map.put("desc", "Description" + i);
docList.add(map);
}
return docList;
}
private void addDocumentsToSolr(List<Map<String,Object>> docs) throws SolrServerException, IOException {
List<SolrInputDocument> sidl = new ArrayList<SolrInputDocument>();
for (Map<String,Object> doc : docs) {
SolrInputDocument sd = new SolrInputDocument();
for (Entry<String,Object> entry : doc.entrySet()) {
sd.addField(entry.getKey(), entry.getValue());
}
sidl.add(sd);
}
HttpClient client = HttpClientUtil.createClient(null);
URL url = new URL(getSourceUrl(jetty.getLocalPort()));
HttpSolrServer solrServer = new HttpSolrServer(url.toExternalForm(), client);
solrServer.add(sidl);
solrServer.commit(true, true);
}
private static class SolrInstance {
File homeDir;
File confDir;
public String getHomeDir() {
return homeDir.toString();
}
public String getSchemaFile() {
return CONF_DIR + "dataimport-schema.xml";
}
public String getDataDir() {
return dataDir.toString();
}
public String getSolrConfigFile() {
return CONF_DIR + "dataimport-solrconfig.xml";
}
public void setUp() throws Exception {
File home = new File(TEMP_DIR, getClass().getName() + "-"
+ System.currentTimeMillis());
homeDir = new File(home + "inst");
dataDir = new File(homeDir + "/collection1", "data");
confDir = new File(homeDir + "/collection1", "conf");
homeDir.mkdirs();
dataDir.mkdirs();
confDir.mkdirs();
File f = new File(confDir, "solrconfig.xml");
FileUtils.copyFile(getFile(getSolrConfigFile()), f);
f = new File(confDir, "schema.xml");
FileUtils.copyFile(getFile(getSchemaFile()), f);
f = new File(confDir, "data-config.xml");
FileUtils.copyFile(getFile(CONF_DIR + "dataconfig-contentstream.xml"), f);
}
public void tearDown() throws Exception {
recurseDelete(homeDir);
}
}
private JettySolrRunner createJetty(SolrInstance instance) throws Exception {
System.setProperty("solr.data.dir", instance.getDataDir());
JettySolrRunner jetty = new JettySolrRunner(instance.getHomeDir(), "/solr", 0);
jetty.start();
return jetty;
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.securityhub.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Contains information about a version 2 API in Amazon API Gateway.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/securityhub-2018-10-26/AwsApiGatewayV2ApiDetails"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AwsApiGatewayV2ApiDetails implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The URI of the API.
* </p>
* <p>
* Uses the format <code> <i><api-id></i>.execute-api.<i><region></i>.amazonaws.com</code>
* </p>
* <p>
* The stage name is typically appended to the URI to form a complete path to a deployed API stage.
* </p>
*/
private String apiEndpoint;
/**
* <p>
* The identifier of the API.
* </p>
*/
private String apiId;
/**
* <p>
* An API key selection expression. Supported only for WebSocket APIs.
* </p>
*/
private String apiKeySelectionExpression;
/**
* <p>
* Indicates when the API was created.
* </p>
* <p>
* Uses the <code>date-time</code> format specified in <a href="https://tools.ietf.org/html/rfc3339#section-5.6">RFC
* 3339 section 5.6, Internet Date/Time Format</a>. The value cannot contain spaces. For example,
* <code>2020-03-22T13:22:13.933Z</code>.
* </p>
*/
private String createdDate;
/**
* <p>
* A description of the API.
* </p>
*/
private String description;
/**
* <p>
* The version identifier for the API.
* </p>
*/
private String version;
/**
* <p>
* The name of the API.
* </p>
*/
private String name;
/**
* <p>
* The API protocol for the API.
* </p>
* <p>
* Valid values: <code>WEBSOCKET</code> | <code>HTTP</code>
* </p>
*/
private String protocolType;
/**
* <p>
* The route selection expression for the API.
* </p>
* <p>
* For HTTP APIs, must be <code>${request.method} ${request.path}</code>. This is the default value for HTTP APIs.
* </p>
* <p>
* For WebSocket APIs, there is no default value.
* </p>
*/
private String routeSelectionExpression;
/**
* <p>
* A cross-origin resource sharing (CORS) configuration. Supported only for HTTP APIs.
* </p>
*/
private AwsCorsConfiguration corsConfiguration;
/**
* <p>
* The URI of the API.
* </p>
* <p>
* Uses the format <code> <i><api-id></i>.execute-api.<i><region></i>.amazonaws.com</code>
* </p>
* <p>
* The stage name is typically appended to the URI to form a complete path to a deployed API stage.
* </p>
*
* @param apiEndpoint
* The URI of the API. </p>
* <p>
* Uses the format <code> <i><api-id></i>.execute-api.<i><region></i>.amazonaws.com</code>
* </p>
* <p>
* The stage name is typically appended to the URI to form a complete path to a deployed API stage.
*/
public void setApiEndpoint(String apiEndpoint) {
this.apiEndpoint = apiEndpoint;
}
/**
* <p>
* The URI of the API.
* </p>
* <p>
* Uses the format <code> <i><api-id></i>.execute-api.<i><region></i>.amazonaws.com</code>
* </p>
* <p>
* The stage name is typically appended to the URI to form a complete path to a deployed API stage.
* </p>
*
* @return The URI of the API. </p>
* <p>
* Uses the format <code> <i><api-id></i>.execute-api.<i><region></i>.amazonaws.com</code>
* </p>
* <p>
* The stage name is typically appended to the URI to form a complete path to a deployed API stage.
*/
public String getApiEndpoint() {
return this.apiEndpoint;
}
/**
* <p>
* The URI of the API.
* </p>
* <p>
* Uses the format <code> <i><api-id></i>.execute-api.<i><region></i>.amazonaws.com</code>
* </p>
* <p>
* The stage name is typically appended to the URI to form a complete path to a deployed API stage.
* </p>
*
* @param apiEndpoint
* The URI of the API. </p>
* <p>
* Uses the format <code> <i><api-id></i>.execute-api.<i><region></i>.amazonaws.com</code>
* </p>
* <p>
* The stage name is typically appended to the URI to form a complete path to a deployed API stage.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AwsApiGatewayV2ApiDetails withApiEndpoint(String apiEndpoint) {
setApiEndpoint(apiEndpoint);
return this;
}
/**
* <p>
* The identifier of the API.
* </p>
*
* @param apiId
* The identifier of the API.
*/
public void setApiId(String apiId) {
this.apiId = apiId;
}
/**
* <p>
* The identifier of the API.
* </p>
*
* @return The identifier of the API.
*/
public String getApiId() {
return this.apiId;
}
/**
* <p>
* The identifier of the API.
* </p>
*
* @param apiId
* The identifier of the API.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AwsApiGatewayV2ApiDetails withApiId(String apiId) {
setApiId(apiId);
return this;
}
/**
* <p>
* An API key selection expression. Supported only for WebSocket APIs.
* </p>
*
* @param apiKeySelectionExpression
* An API key selection expression. Supported only for WebSocket APIs.
*/
public void setApiKeySelectionExpression(String apiKeySelectionExpression) {
this.apiKeySelectionExpression = apiKeySelectionExpression;
}
/**
* <p>
* An API key selection expression. Supported only for WebSocket APIs.
* </p>
*
* @return An API key selection expression. Supported only for WebSocket APIs.
*/
public String getApiKeySelectionExpression() {
return this.apiKeySelectionExpression;
}
/**
* <p>
* An API key selection expression. Supported only for WebSocket APIs.
* </p>
*
* @param apiKeySelectionExpression
* An API key selection expression. Supported only for WebSocket APIs.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AwsApiGatewayV2ApiDetails withApiKeySelectionExpression(String apiKeySelectionExpression) {
setApiKeySelectionExpression(apiKeySelectionExpression);
return this;
}
/**
* <p>
* Indicates when the API was created.
* </p>
* <p>
* Uses the <code>date-time</code> format specified in <a href="https://tools.ietf.org/html/rfc3339#section-5.6">RFC
* 3339 section 5.6, Internet Date/Time Format</a>. The value cannot contain spaces. For example,
* <code>2020-03-22T13:22:13.933Z</code>.
* </p>
*
* @param createdDate
* Indicates when the API was created.</p>
* <p>
* Uses the <code>date-time</code> format specified in <a
* href="https://tools.ietf.org/html/rfc3339#section-5.6">RFC 3339 section 5.6, Internet Date/Time
* Format</a>. The value cannot contain spaces. For example, <code>2020-03-22T13:22:13.933Z</code>.
*/
public void setCreatedDate(String createdDate) {
this.createdDate = createdDate;
}
/**
* <p>
* Indicates when the API was created.
* </p>
* <p>
* Uses the <code>date-time</code> format specified in <a href="https://tools.ietf.org/html/rfc3339#section-5.6">RFC
* 3339 section 5.6, Internet Date/Time Format</a>. The value cannot contain spaces. For example,
* <code>2020-03-22T13:22:13.933Z</code>.
* </p>
*
* @return Indicates when the API was created.</p>
* <p>
* Uses the <code>date-time</code> format specified in <a
* href="https://tools.ietf.org/html/rfc3339#section-5.6">RFC 3339 section 5.6, Internet Date/Time
* Format</a>. The value cannot contain spaces. For example, <code>2020-03-22T13:22:13.933Z</code>.
*/
public String getCreatedDate() {
return this.createdDate;
}
/**
* <p>
* Indicates when the API was created.
* </p>
* <p>
* Uses the <code>date-time</code> format specified in <a href="https://tools.ietf.org/html/rfc3339#section-5.6">RFC
* 3339 section 5.6, Internet Date/Time Format</a>. The value cannot contain spaces. For example,
* <code>2020-03-22T13:22:13.933Z</code>.
* </p>
*
* @param createdDate
* Indicates when the API was created.</p>
* <p>
* Uses the <code>date-time</code> format specified in <a
* href="https://tools.ietf.org/html/rfc3339#section-5.6">RFC 3339 section 5.6, Internet Date/Time
* Format</a>. The value cannot contain spaces. For example, <code>2020-03-22T13:22:13.933Z</code>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AwsApiGatewayV2ApiDetails withCreatedDate(String createdDate) {
setCreatedDate(createdDate);
return this;
}
/**
* <p>
* A description of the API.
* </p>
*
* @param description
* A description of the API.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* <p>
* A description of the API.
* </p>
*
* @return A description of the API.
*/
public String getDescription() {
return this.description;
}
/**
* <p>
* A description of the API.
* </p>
*
* @param description
* A description of the API.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AwsApiGatewayV2ApiDetails withDescription(String description) {
setDescription(description);
return this;
}
/**
* <p>
* The version identifier for the API.
* </p>
*
* @param version
* The version identifier for the API.
*/
public void setVersion(String version) {
this.version = version;
}
/**
* <p>
* The version identifier for the API.
* </p>
*
* @return The version identifier for the API.
*/
public String getVersion() {
return this.version;
}
/**
* <p>
* The version identifier for the API.
* </p>
*
* @param version
* The version identifier for the API.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AwsApiGatewayV2ApiDetails withVersion(String version) {
setVersion(version);
return this;
}
/**
* <p>
* The name of the API.
* </p>
*
* @param name
* The name of the API.
*/
public void setName(String name) {
this.name = name;
}
/**
* <p>
* The name of the API.
* </p>
*
* @return The name of the API.
*/
public String getName() {
return this.name;
}
/**
* <p>
* The name of the API.
* </p>
*
* @param name
* The name of the API.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AwsApiGatewayV2ApiDetails withName(String name) {
setName(name);
return this;
}
/**
* <p>
* The API protocol for the API.
* </p>
* <p>
* Valid values: <code>WEBSOCKET</code> | <code>HTTP</code>
* </p>
*
* @param protocolType
* The API protocol for the API.</p>
* <p>
* Valid values: <code>WEBSOCKET</code> | <code>HTTP</code>
*/
public void setProtocolType(String protocolType) {
this.protocolType = protocolType;
}
/**
* <p>
* The API protocol for the API.
* </p>
* <p>
* Valid values: <code>WEBSOCKET</code> | <code>HTTP</code>
* </p>
*
* @return The API protocol for the API.</p>
* <p>
* Valid values: <code>WEBSOCKET</code> | <code>HTTP</code>
*/
public String getProtocolType() {
return this.protocolType;
}
/**
* <p>
* The API protocol for the API.
* </p>
* <p>
* Valid values: <code>WEBSOCKET</code> | <code>HTTP</code>
* </p>
*
* @param protocolType
* The API protocol for the API.</p>
* <p>
* Valid values: <code>WEBSOCKET</code> | <code>HTTP</code>
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AwsApiGatewayV2ApiDetails withProtocolType(String protocolType) {
setProtocolType(protocolType);
return this;
}
/**
* <p>
* The route selection expression for the API.
* </p>
* <p>
* For HTTP APIs, must be <code>${request.method} ${request.path}</code>. This is the default value for HTTP APIs.
* </p>
* <p>
* For WebSocket APIs, there is no default value.
* </p>
*
* @param routeSelectionExpression
* The route selection expression for the API.</p>
* <p>
* For HTTP APIs, must be <code>${request.method} ${request.path}</code>. This is the default value for HTTP
* APIs.
* </p>
* <p>
* For WebSocket APIs, there is no default value.
*/
public void setRouteSelectionExpression(String routeSelectionExpression) {
this.routeSelectionExpression = routeSelectionExpression;
}
/**
* <p>
* The route selection expression for the API.
* </p>
* <p>
* For HTTP APIs, must be <code>${request.method} ${request.path}</code>. This is the default value for HTTP APIs.
* </p>
* <p>
* For WebSocket APIs, there is no default value.
* </p>
*
* @return The route selection expression for the API.</p>
* <p>
* For HTTP APIs, must be <code>${request.method} ${request.path}</code>. This is the default value for HTTP
* APIs.
* </p>
* <p>
* For WebSocket APIs, there is no default value.
*/
public String getRouteSelectionExpression() {
return this.routeSelectionExpression;
}
/**
* <p>
* The route selection expression for the API.
* </p>
* <p>
* For HTTP APIs, must be <code>${request.method} ${request.path}</code>. This is the default value for HTTP APIs.
* </p>
* <p>
* For WebSocket APIs, there is no default value.
* </p>
*
* @param routeSelectionExpression
* The route selection expression for the API.</p>
* <p>
* For HTTP APIs, must be <code>${request.method} ${request.path}</code>. This is the default value for HTTP
* APIs.
* </p>
* <p>
* For WebSocket APIs, there is no default value.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AwsApiGatewayV2ApiDetails withRouteSelectionExpression(String routeSelectionExpression) {
setRouteSelectionExpression(routeSelectionExpression);
return this;
}
/**
* <p>
* A cross-origin resource sharing (CORS) configuration. Supported only for HTTP APIs.
* </p>
*
* @param corsConfiguration
* A cross-origin resource sharing (CORS) configuration. Supported only for HTTP APIs.
*/
public void setCorsConfiguration(AwsCorsConfiguration corsConfiguration) {
this.corsConfiguration = corsConfiguration;
}
/**
* <p>
* A cross-origin resource sharing (CORS) configuration. Supported only for HTTP APIs.
* </p>
*
* @return A cross-origin resource sharing (CORS) configuration. Supported only for HTTP APIs.
*/
public AwsCorsConfiguration getCorsConfiguration() {
return this.corsConfiguration;
}
/**
* <p>
* A cross-origin resource sharing (CORS) configuration. Supported only for HTTP APIs.
* </p>
*
* @param corsConfiguration
* A cross-origin resource sharing (CORS) configuration. Supported only for HTTP APIs.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AwsApiGatewayV2ApiDetails withCorsConfiguration(AwsCorsConfiguration corsConfiguration) {
setCorsConfiguration(corsConfiguration);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getApiEndpoint() != null)
sb.append("ApiEndpoint: ").append(getApiEndpoint()).append(",");
if (getApiId() != null)
sb.append("ApiId: ").append(getApiId()).append(",");
if (getApiKeySelectionExpression() != null)
sb.append("ApiKeySelectionExpression: ").append(getApiKeySelectionExpression()).append(",");
if (getCreatedDate() != null)
sb.append("CreatedDate: ").append(getCreatedDate()).append(",");
if (getDescription() != null)
sb.append("Description: ").append(getDescription()).append(",");
if (getVersion() != null)
sb.append("Version: ").append(getVersion()).append(",");
if (getName() != null)
sb.append("Name: ").append(getName()).append(",");
if (getProtocolType() != null)
sb.append("ProtocolType: ").append(getProtocolType()).append(",");
if (getRouteSelectionExpression() != null)
sb.append("RouteSelectionExpression: ").append(getRouteSelectionExpression()).append(",");
if (getCorsConfiguration() != null)
sb.append("CorsConfiguration: ").append(getCorsConfiguration());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof AwsApiGatewayV2ApiDetails == false)
return false;
AwsApiGatewayV2ApiDetails other = (AwsApiGatewayV2ApiDetails) obj;
if (other.getApiEndpoint() == null ^ this.getApiEndpoint() == null)
return false;
if (other.getApiEndpoint() != null && other.getApiEndpoint().equals(this.getApiEndpoint()) == false)
return false;
if (other.getApiId() == null ^ this.getApiId() == null)
return false;
if (other.getApiId() != null && other.getApiId().equals(this.getApiId()) == false)
return false;
if (other.getApiKeySelectionExpression() == null ^ this.getApiKeySelectionExpression() == null)
return false;
if (other.getApiKeySelectionExpression() != null && other.getApiKeySelectionExpression().equals(this.getApiKeySelectionExpression()) == false)
return false;
if (other.getCreatedDate() == null ^ this.getCreatedDate() == null)
return false;
if (other.getCreatedDate() != null && other.getCreatedDate().equals(this.getCreatedDate()) == false)
return false;
if (other.getDescription() == null ^ this.getDescription() == null)
return false;
if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false)
return false;
if (other.getVersion() == null ^ this.getVersion() == null)
return false;
if (other.getVersion() != null && other.getVersion().equals(this.getVersion()) == false)
return false;
if (other.getName() == null ^ this.getName() == null)
return false;
if (other.getName() != null && other.getName().equals(this.getName()) == false)
return false;
if (other.getProtocolType() == null ^ this.getProtocolType() == null)
return false;
if (other.getProtocolType() != null && other.getProtocolType().equals(this.getProtocolType()) == false)
return false;
if (other.getRouteSelectionExpression() == null ^ this.getRouteSelectionExpression() == null)
return false;
if (other.getRouteSelectionExpression() != null && other.getRouteSelectionExpression().equals(this.getRouteSelectionExpression()) == false)
return false;
if (other.getCorsConfiguration() == null ^ this.getCorsConfiguration() == null)
return false;
if (other.getCorsConfiguration() != null && other.getCorsConfiguration().equals(this.getCorsConfiguration()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getApiEndpoint() == null) ? 0 : getApiEndpoint().hashCode());
hashCode = prime * hashCode + ((getApiId() == null) ? 0 : getApiId().hashCode());
hashCode = prime * hashCode + ((getApiKeySelectionExpression() == null) ? 0 : getApiKeySelectionExpression().hashCode());
hashCode = prime * hashCode + ((getCreatedDate() == null) ? 0 : getCreatedDate().hashCode());
hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode());
hashCode = prime * hashCode + ((getVersion() == null) ? 0 : getVersion().hashCode());
hashCode = prime * hashCode + ((getName() == null) ? 0 : getName().hashCode());
hashCode = prime * hashCode + ((getProtocolType() == null) ? 0 : getProtocolType().hashCode());
hashCode = prime * hashCode + ((getRouteSelectionExpression() == null) ? 0 : getRouteSelectionExpression().hashCode());
hashCode = prime * hashCode + ((getCorsConfiguration() == null) ? 0 : getCorsConfiguration().hashCode());
return hashCode;
}
@Override
public AwsApiGatewayV2ApiDetails clone() {
try {
return (AwsApiGatewayV2ApiDetails) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.securityhub.model.transform.AwsApiGatewayV2ApiDetailsMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Copyright 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.dataflow.shell.command.common;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import javax.naming.OperationNotSupportedException;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cloud.dataflow.rest.client.TaskOperations;
import org.springframework.cloud.dataflow.rest.resource.CurrentTaskExecutionsResource;
import org.springframework.cloud.dataflow.rest.resource.TaskAppStatusResource;
import org.springframework.cloud.dataflow.rest.resource.TaskDefinitionResource;
import org.springframework.cloud.dataflow.rest.resource.TaskExecutionResource;
import org.springframework.cloud.dataflow.rest.util.DeploymentPropertiesUtils;
import org.springframework.cloud.dataflow.shell.command.support.OpsType;
import org.springframework.cloud.dataflow.shell.command.support.RoleType;
import org.springframework.cloud.dataflow.shell.config.DataFlowShell;
import org.springframework.hateoas.PagedResources;
import org.springframework.shell.core.CommandMarker;
import org.springframework.shell.core.annotation.CliAvailabilityIndicator;
import org.springframework.shell.core.annotation.CliCommand;
import org.springframework.shell.core.annotation.CliOption;
import org.springframework.shell.table.BeanListTableModel;
import org.springframework.shell.table.Table;
import org.springframework.shell.table.TableBuilder;
import org.springframework.shell.table.TableModelBuilder;
import org.springframework.stereotype.Component;
import org.springframework.util.StringUtils;
/**
* Task commands.
*
* @author Glenn Renfro
* @author Michael Minella
* @author Gunnar Hillert
* @author Ilayaperumal Gopinathan
* @author Janne Valkealahti
* @author David Turanski
*/
@Component
public class TaskCommands implements CommandMarker {
private static final String LIST = "task list";
private static final String CREATE = "task create";
private static final String LAUNCH = "task launch";
private static final String DESTROY = "task destroy";
private static final String VALIDATE = "task validate";
private static final String TASK_EXECUTION_STATUS = "task execution status";
private static final String TASK_EXECUTION_CURRENT = "task execution current";
private static final String TASK_EXECUTION_CLEANUP = "task execution cleanup";
private static final String PROPERTIES_OPTION = "properties";
private static final String PROPERTIES_FILE_OPTION = "propertiesFile";
private static final String ARGUMENTS_OPTION = "arguments";
private static final String EXECUTION_LIST = "task execution list";
@Autowired
private DataFlowShell dataFlowShell;
@CliAvailabilityIndicator({ LIST, TASK_EXECUTION_STATUS, EXECUTION_LIST })
public boolean availableWithViewRole() {
return dataFlowShell.hasAccess(RoleType.VIEW, OpsType.TASK);
}
@CliAvailabilityIndicator({ CREATE, LAUNCH, TASK_EXECUTION_CLEANUP, DESTROY, VALIDATE })
public boolean availableWithCreateRole() {
return dataFlowShell.hasAccess(RoleType.CREATE, OpsType.TASK);
}
@CliCommand(value = LIST, help = "List created tasks")
public Table list() {
final PagedResources<TaskDefinitionResource> tasks = taskOperations().list();
LinkedHashMap<String, Object> headers = new LinkedHashMap<>();
headers.put("name", "Task Name");
headers.put("dslText", "Task Definition");
headers.put("status", "Task Status");
final TableBuilder builder = new TableBuilder(new BeanListTableModel<>(tasks, headers));
return DataFlowTables.applyStyle(builder).build();
}
@CliCommand(value = VALIDATE, help = "Validate apps contained in task definitions")
public List<Object> validate(@CliOption(key = { "", "name" }, help = "the task definition name", mandatory = true) String name) throws OperationNotSupportedException {
final TaskAppStatusResource task = taskOperations().validateTaskDefinition(name);
List<Object> result = new ArrayList<>();
TableModelBuilder<Object> modelBuilder = new TableModelBuilder<>();
modelBuilder.addRow().addValue("Task Name").addValue("Task Definition");
modelBuilder.addRow().addValue(task.getAppName())
.addValue(task.getDsl());
TableBuilder builder = DataFlowTables.applyStyle(new TableBuilder(modelBuilder.build()));
result.add(builder.build());
modelBuilder = new TableModelBuilder<>();
modelBuilder.addRow().addValue("App Name").addValue("Validation Status");
boolean isValidStream = true;
for(Map.Entry<String,String> entry : task.getAppStatuses().entrySet()) {
modelBuilder.addRow().addValue(entry.getKey())
.addValue(entry.getValue());
if (entry.getValue().equals("invalid")) {
isValidStream = false;
}
}
builder = DataFlowTables.applyStyle(new TableBuilder(modelBuilder.build()));
if(isValidStream) {
result.add(String.format("\n%s is a valid task.", task.getAppName()));
}
else {
result.add(String.format("\n%s is an invalid task.", task.getAppName()));
}
result.add(builder.build());
return result;
}
@CliCommand(value = CREATE, help = "Create a new task definition")
public String create(
@CliOption(mandatory = true, key = { "", "name" }, help = "the name to give to the task") String name,
@CliOption(mandatory = true, key = { "definition" }, help = "a task definition, using the DSL (e.g. "
+ "\"timestamp --format=YYYY\")", optionContext = "disable-string-converter completion-task") String dsl) {
this.taskOperations().create(name, dsl);
return String.format("Created new task '%s'", name);
}
@CliCommand(value = LAUNCH, help = "Launch a previously created task")
public String launch(
@CliOption(key = { "", "name" }, help = "the name of the task to launch", mandatory = true,
optionContext = "existing-task disable-string-converter") String name,
@CliOption(key = {
PROPERTIES_OPTION }, help = "the properties for this launch", mandatory = false) String properties,
@CliOption(key = {
PROPERTIES_FILE_OPTION }, help = "the properties for this launch (as a File)", mandatory = false) File propertiesFile,
@CliOption(key = {
ARGUMENTS_OPTION }, help = "the commandline arguments for this launch", mandatory = false) String arguments)
throws IOException {
int which = Assertions.atMostOneOf(PROPERTIES_OPTION, properties, PROPERTIES_FILE_OPTION, propertiesFile);
Map<String, String> propertiesToUse = DeploymentPropertiesUtils.parseDeploymentProperties(properties,
propertiesFile, which);
List<String> argumentsToUse = new ArrayList<String>();
if (StringUtils.hasText(arguments)) {
argumentsToUse.add(arguments);
}
DeploymentPropertiesUtils.validateDeploymentProperties(propertiesToUse);
taskOperations().launch(name, propertiesToUse, argumentsToUse);
return String.format("Launched task '%s'", name);
}
@CliCommand(value = DESTROY, help = "Destroy an existing task")
public String destroy(
@CliOption(key = { "", "name" }, help = "the name of the task to destroy", mandatory = true,
optionContext = "existing-task disable-string-converter") String name) {
taskOperations().destroy(name);
return String.format("Destroyed task '%s'", name);
}
@CliCommand(value = EXECUTION_LIST, help = "List created task executions filtered by taskName")
public Table executionListByName(@CliOption(key = "name", help = "the task name to be used as a filter",
optionContext = "existing-task disable-string-converter") String name) {
final PagedResources<TaskExecutionResource> tasks;
if (name == null) {
tasks = taskOperations().executionList();
}
else {
tasks = taskOperations().executionListByTaskName(name);
}
LinkedHashMap<String, Object> headers = new LinkedHashMap<>();
headers.put("taskName", "Task Name");
headers.put("executionId", "ID");
headers.put("startTime", "Start Time");
headers.put("endTime", "End Time");
headers.put("exitCode", "Exit Code");
final TableBuilder builder = new TableBuilder(new BeanListTableModel<>(tasks, headers));
return DataFlowTables.applyStyle(builder).build();
}
@CliCommand(value = TASK_EXECUTION_STATUS, help = "Display the details of a specific task execution")
public Table display(@CliOption(key = { "", "id" }, help = "the task execution id", mandatory = true) long id) {
TaskExecutionResource taskExecutionResource = taskOperations().taskExecutionStatus(id);
TableModelBuilder<Object> modelBuilder = new TableModelBuilder<>();
modelBuilder.addRow().addValue("Key ").addValue("Value ");
modelBuilder.addRow().addValue("Id ").addValue(taskExecutionResource.getExecutionId());
modelBuilder.addRow().addValue("Name ").addValue(taskExecutionResource.getTaskName());
modelBuilder.addRow().addValue("Arguments ").addValue(taskExecutionResource.getArguments());
modelBuilder.addRow().addValue("Job Execution Ids ").addValue(taskExecutionResource.getJobExecutionIds());
modelBuilder.addRow().addValue("Start Time ").addValue(taskExecutionResource.getStartTime());
modelBuilder.addRow().addValue("End Time ").addValue(taskExecutionResource.getEndTime());
modelBuilder.addRow().addValue("Exit Code ").addValue(taskExecutionResource.getExitCode());
modelBuilder.addRow().addValue("Exit Message ").addValue(taskExecutionResource.getExitMessage());
modelBuilder.addRow().addValue("Error Message ").addValue(taskExecutionResource.getErrorMessage());
modelBuilder.addRow().addValue("External Execution Id ")
.addValue(taskExecutionResource.getExternalExecutionId());
TableBuilder builder = new TableBuilder(modelBuilder.build());
DataFlowTables.applyStyle(builder);
return builder.build();
}
@CliCommand(value = TASK_EXECUTION_CURRENT,
help = "Display count of currently executin tasks and related information")
public Table currentExecutions() {
CurrentTaskExecutionsResource taskExecutionsResource = taskOperations().currentTaskExecutions();
TableModelBuilder<Object> modelBuilder = new TableModelBuilder<>();
modelBuilder.addRow().addValue("Current Running Tasks").addValue(taskExecutionsResource.getRunningExecutionCount());
modelBuilder.addRow().addValue("Maximum Concurrent Executions").addValue(taskExecutionsResource
.getMaximumTaskExecutions());
TableBuilder builder = new TableBuilder(modelBuilder.build());
DataFlowTables.applyStyle(builder);
return builder.build();
}
@CliCommand(value = TASK_EXECUTION_CLEANUP, help = "Clean up any platform specific resources linked to a task "
+ "execution")
public String cleanup(@CliOption(key = { "", "id" }, help = "the task execution id", mandatory = true) long id) {
taskOperations().cleanup(id);
return String.format("Request to clean up resources for task execution %s has been submitted", id);
}
private TaskOperations taskOperations() {
return dataFlowShell.getDataFlowOperations().taskOperations();
}
}
| |
/*
* Copyright 2004 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Preconditions;
import com.google.debugging.sourcemap.FilePosition;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.nio.charset.Charset;
import com.granule.java6emul.ArrayDeque;
import java.util.ArrayList;
import com.granule.java6emul.Deque;
import java.util.List;
/**
* CodePrinter prints out js code in either pretty format or compact format.
*
* @see CodeGenerator
*/
class CodePrinter {
// The number of characters after which we insert a line break in the code
static final int DEFAULT_LINE_LENGTH_THRESHOLD = 500;
// There are two separate CodeConsumers, one for pretty-printing and
// another for compact printing.
// There are two implementations because the CompactCodePrinter
// potentially has a very different implementation to the pretty
// version.
private abstract static class MappedCodePrinter extends CodeConsumer {
final private Deque<Mapping> mappings;
final private List<Mapping> allMappings;
final private boolean createSrcMap;
final private SourceMap.DetailLevel sourceMapDetailLevel;
protected final StringBuilder code = new StringBuilder(1024);
protected final int lineLengthThreshold;
protected int lineLength = 0;
protected int lineIndex = 0;
MappedCodePrinter(
int lineLengthThreshold,
boolean createSrcMap,
SourceMap.DetailLevel sourceMapDetailLevel) {
Preconditions.checkState(sourceMapDetailLevel != null);
this.lineLengthThreshold = lineLengthThreshold <= 0 ? Integer.MAX_VALUE :
lineLengthThreshold;
this.createSrcMap = createSrcMap;
this.sourceMapDetailLevel = sourceMapDetailLevel;
this.mappings = createSrcMap ? new ArrayDeque<Mapping>() : null;
this.allMappings = createSrcMap ? new ArrayList<Mapping>() : null;
}
/**
* Maintains a mapping from a given node to the position
* in the source code at which its generated form was
* placed. This position is relative only to the current
* run of the CodeConsumer and will be normalized
* later on by the SourceMap.
*
* @see SourceMap
*/
private static class Mapping {
Node node;
FilePosition start;
FilePosition end;
}
/**
* Starts the source mapping for the given
* node at the current position.
*/
@Override
void startSourceMapping(Node node) {
Preconditions.checkState(sourceMapDetailLevel != null);
Preconditions.checkState(node != null);
if (createSrcMap
&& node.getProp(Node.SOURCENAME_PROP) != null
&& node.getLineno() > 0
&& sourceMapDetailLevel.apply(node)) {
int line = getCurrentLineIndex();
int index = getCurrentCharIndex();
Preconditions.checkState(line >= 0);
Mapping mapping = new Mapping();
mapping.node = node;
mapping.start = new FilePosition(line, index);
mappings.push(mapping);
allMappings.add(mapping);
}
}
/**
* Finishes the source mapping for the given
* node at the current position.
*/
@Override
void endSourceMapping(Node node) {
if (createSrcMap && !mappings.isEmpty() && mappings.peek().node == node) {
Mapping mapping = mappings.pop();
int line = getCurrentLineIndex();
int index = getCurrentCharIndex();
Preconditions.checkState(line >= 0);
mapping.end = new FilePosition(line, index);
}
}
/**
* Generates the source map from the given code consumer,
* appending the information it saved to the SourceMap
* object given.
*/
void generateSourceMap(SourceMap map){
if (createSrcMap) {
for (Mapping mapping : allMappings) {
map.addMapping(mapping.node, mapping.start, mapping.end);
}
}
}
/**
* Reports to the code consumer that the given line has been cut at the
* given position (i.e. a \n has been inserted there). All mappings in
* the source maps after that position will be renormalized as needed.
*/
void reportLineCut(int lineIndex, int charIndex) {
if (createSrcMap) {
for (Mapping mapping : allMappings) {
mapping.start = convertPosition(mapping.start, lineIndex, charIndex);
if (mapping.end != null) {
mapping.end = convertPosition(mapping.end, lineIndex, charIndex);
}
}
}
}
/**
* Converts the given position by normalizing it against the insertion
* of a newline at the given line and character position.
*
* @param position The existing position before the newline was inserted.
* @param lineIndex The index of the line at which the newline was inserted.
* @param characterPosition The position on the line at which the newline
* was inserted.
*
* @return The normalized position.
*/
private FilePosition convertPosition(FilePosition position, int lineIndex,
int characterPosition) {
int originalLine = position.getLine();
int originalChar = position.getColumn();
if (originalLine == lineIndex && originalChar >= characterPosition) {
// If the position falls on the line itself, then normalize it
// if it falls at or after the place the newline was inserted.
return new FilePosition(
originalLine + 1, originalChar - characterPosition);
} else {
return position;
}
}
public String getCode() {
return code.toString();
}
@Override
char getLastChar() {
return (code.length() > 0) ? code.charAt(code.length() - 1) : '\0';
}
protected final int getCurrentCharIndex() {
return lineLength;
}
protected final int getCurrentLineIndex() {
return lineIndex;
}
}
static class PrettyCodePrinter
extends MappedCodePrinter {
// The number of characters after which we insert a line break in the code
static final String INDENT = " ";
private int indent = 0;
/**
* @param lineLengthThreshold The length of a line after which we force
* a newline when possible.
* @param createSourceMap Whether to generate source map data.
* @param sourceMapDetailLevel A filter to control which nodes get mapped
* into the source map.
*/
private PrettyCodePrinter(
int lineLengthThreshold,
boolean createSourceMap,
SourceMap.DetailLevel sourceMapDetailLevel) {
super(lineLengthThreshold, createSourceMap, sourceMapDetailLevel);
}
/**
* Appends a string to the code, keeping track of the current line length.
*/
@Override
void append(String str) {
// For pretty printing: indent at the beginning of the line
if (lineLength == 0) {
for (int i = 0; i < indent; i++) {
code.append(INDENT);
lineLength += INDENT.length();
}
}
code.append(str);
lineLength += str.length();
}
/**
* Adds a newline to the code, resetting the line length and handling
* indenting for pretty printing.
*/
@Override
void startNewLine() {
if (lineLength > 0) {
code.append('\n');
lineIndex++;
lineLength = 0;
}
}
@Override
void maybeLineBreak() {
maybeCutLine();
}
/**
* This may start a new line if the current line is longer than the line
* length threshold.
*/
@Override
void maybeCutLine() {
if (lineLength > lineLengthThreshold) {
startNewLine();
}
}
@Override
void endLine() {
startNewLine();
}
@Override
void appendBlockStart() {
append(" {");
indent++;
}
@Override
void appendBlockEnd() {
endLine();
indent--;
append("}");
}
@Override
void listSeparator() {
add(", ");
maybeLineBreak();
}
@Override
void endFunction(boolean statementContext) {
super.endFunction(statementContext);
if (statementContext) {
startNewLine();
}
}
@Override
void beginCaseBody() {
super.beginCaseBody();
indent++;
endLine();
}
@Override
void endCaseBody() {
super.endCaseBody();
indent--;
endStatement();
}
@Override
void appendOp(String op, boolean binOp) {
if (binOp) {
if (getLastChar() != ' ') {
append(" ");
}
append(op);
append(" ");
} else {
append(op);
}
}
/**
* If the body of a for loop or the then clause of an if statement has
* a single statement, should it be wrapped in a block?
* {@inheritDoc}
*/
@Override
boolean shouldPreserveExtraBlocks() {
// When pretty-printing, always place the statement in its own block
// so it is printed on a separate line. This allows breakpoints to be
// placed on the statement.
return true;
}
/**
* @return The TRY node for the specified CATCH node.
*/
private Node getTryForCatch(Node n) {
return n.getParent().getParent();
}
/**
* @return Whether the a line break should be added after the specified
* BLOCK.
*/
@Override
boolean breakAfterBlockFor(Node n, boolean isStatementContext) {
Preconditions.checkState(n.getType() == Token.BLOCK);
Node parent = n.getParent();
if (parent != null) {
int type = parent.getType();
switch (type) {
case Token.DO:
// Don't break before 'while' in DO-WHILE statements.
return false;
case Token.FUNCTION:
// FUNCTIONs are handled separately, don't break here.
return false;
case Token.TRY:
// Don't break before catch
return n != parent.getFirstChild();
case Token.CATCH:
// Don't break before finally
return !NodeUtil.hasFinally(getTryForCatch(parent));
case Token.IF:
// Don't break before else
return n == parent.getLastChild();
}
}
return true;
}
@Override
void endFile() {
maybeEndStatement();
}
}
static class CompactCodePrinter
extends MappedCodePrinter {
// The CompactCodePrinter tries to emit just enough newlines to stop there
// being lines longer than the threshold. Since the output is going to be
// gzipped, it makes sense to try to make the newlines appear in similar
// contexts so that GZIP can encode them for 'free'.
//
// This version tries to break the lines at 'preferred' places, which are
// between the top-level forms. This works because top level forms tend to
// be more uniform than arbitary legal contexts. Better compression would
// probably require explicit modelling of the gzip algorithm.
private final boolean lineBreak;
private int lineStartPosition = 0;
private int preferredBreakPosition = 0;
/**
* @param lineBreak break the lines a bit more aggressively
* @param lineLengthThreshold The length of a line after which we force
* a newline when possible.
* @param createSrcMap Whether to gather source position
* mapping information when printing.
* @param sourceMapDetailLevel A filter to control which nodes get mapped into
* the source map.
*/
private CompactCodePrinter(boolean lineBreak, int lineLengthThreshold,
boolean createSrcMap, SourceMap.DetailLevel sourceMapDetailLevel) {
super(lineLengthThreshold, createSrcMap, sourceMapDetailLevel);
this.lineBreak = lineBreak;
}
/**
* Appends a string to the code, keeping track of the current line length.
*/
@Override
void append(String str) {
code.append(str);
lineLength += str.length();
}
/**
* Adds a newline to the code, resetting the line length.
*/
@Override
void startNewLine() {
if (lineLength > 0) {
code.append('\n');
lineLength = 0;
lineIndex++;
lineStartPosition = code.length();
}
}
@Override
void maybeLineBreak() {
if (lineBreak) {
if (sawFunction) {
startNewLine();
sawFunction = false;
}
}
// Since we are at a legal line break, can we upgrade the
// preferred break position? We prefer to break after a
// semicolon rather than before it.
int len = code.length();
if (preferredBreakPosition == len - 1) {
char ch = code.charAt(len - 1);
if (ch == ';') {
preferredBreakPosition = len;
}
}
maybeCutLine();
}
/**
* This may start a new line if the current line is longer than the line
* length threshold.
*/
@Override
void maybeCutLine() {
if (lineLength > lineLengthThreshold) {
// Use the preferred position provided it will break the line.
if (preferredBreakPosition > lineStartPosition &&
preferredBreakPosition < lineStartPosition + lineLength) {
int position = preferredBreakPosition;
code.insert(position, '\n');
reportLineCut(lineIndex, position - lineStartPosition);
lineIndex++;
lineLength -= (position - lineStartPosition);
lineStartPosition = position + 1;
} else {
startNewLine();
}
}
}
@Override
void notePreferredLineBreak() {
preferredBreakPosition = code.length();
}
}
static class Builder {
private final Node root;
private boolean prettyPrint = false;
private boolean lineBreak = false;
private boolean outputTypes = false;
private int lineLengthThreshold = DEFAULT_LINE_LENGTH_THRESHOLD;
private SourceMap sourceMap = null;
private SourceMap.DetailLevel sourceMapDetailLevel =
SourceMap.DetailLevel.ALL;
// Specify a charset to use when outputting source code. If null,
// then just output ASCII.
private Charset outputCharset = null;
private boolean tagAsStrict;
/**
* Sets the root node from which to generate the source code.
* @param node The root node.
*/
Builder(Node node) {
root = node;
}
/**
* Sets whether pretty printing should be used.
* @param prettyPrint If true, pretty printing will be used.
*/
Builder setPrettyPrint(boolean prettyPrint) {
this.prettyPrint = prettyPrint;
return this;
}
/**
* Sets whether line breaking should be done automatically.
* @param lineBreak If true, line breaking is done automatically.
*/
Builder setLineBreak(boolean lineBreak) {
this.lineBreak = lineBreak;
return this;
}
/**
* Sets whether to output closure-style type annotations.
* @param outputTypes If true, outputs closure-style type annotations.
*/
Builder setOutputTypes(boolean outputTypes) {
this.outputTypes = outputTypes;
return this;
}
/**
* Sets the line length threshold that will be used to determine
* when to break lines, if line breaking is on.
*
* @param threshold The line length threshold.
*/
Builder setLineLengthThreshold(int threshold) {
this.lineLengthThreshold = threshold;
return this;
}
/**
* Sets the source map to which to write the metadata about
* the generated source code.
*
* @param sourceMap The source map.
*/
Builder setSourceMap(SourceMap sourceMap) {
this.sourceMap = sourceMap;
return this;
}
/**
* @param level The detail level to use.
*/
Builder setSourceMapDetailLevel(SourceMap.DetailLevel level) {
Preconditions.checkState(level != null);
this.sourceMapDetailLevel = level;
return this;
}
/**
* Set the charset to use when determining what characters need to be
* escaped in the output.
*/
Builder setOutputCharset(Charset outCharset) {
this.outputCharset = outCharset;
return this;
}
/**
* Set whether the output should be tags as ECMASCRIPT 5 Strict.
*/
Builder setTagAsStrict(boolean tagAsStrict) {
this.tagAsStrict = tagAsStrict;
return this;
}
/**
* Generates the source code and returns it.
*/
String build() {
if (root == null) {
throw new IllegalStateException(
"Cannot build without root node being specified");
}
Format outputFormat = outputTypes
? Format.TYPED
: prettyPrint
? Format.PRETTY
: Format.COMPACT;
return toSource(root, outputFormat, lineBreak, lineLengthThreshold,
sourceMap, sourceMapDetailLevel, outputCharset, tagAsStrict);
}
}
enum Format {
COMPACT,
PRETTY,
TYPED
}
/**
* Converts a tree to js code
*/
private static String toSource(Node root, Format outputFormat,
boolean lineBreak, int lineLengthThreshold,
SourceMap sourceMap,
SourceMap.DetailLevel sourceMapDetailLevel,
Charset outputCharset,
boolean tagAsStrict) {
Preconditions.checkState(sourceMapDetailLevel != null);
boolean createSourceMap = (sourceMap != null);
MappedCodePrinter mcp =
outputFormat == Format.COMPACT
? new CompactCodePrinter(
lineBreak, lineLengthThreshold,
createSourceMap, sourceMapDetailLevel)
: new PrettyCodePrinter(
lineLengthThreshold, createSourceMap, sourceMapDetailLevel);
CodeGenerator cg =
outputFormat == Format.TYPED
? new TypedCodeGenerator(mcp, outputCharset)
: new CodeGenerator(mcp, outputCharset);
if (tagAsStrict) {
cg.tagAsStrict();
}
cg.add(root);
mcp.endFile();
String code = mcp.getCode();
if (createSourceMap) {
mcp.generateSourceMap(sourceMap);
}
return code;
}
}
| |
/*
* Copyright 2014 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.dashbuilder.dataset;
import org.dashbuilder.DataSetCore;
import org.dashbuilder.dataset.filter.FilterFactory;
import org.dashbuilder.dataset.group.DateIntervalType;
import org.junit.Before;
import org.junit.Test;
import static org.dashbuilder.dataset.ExpenseReportsData.*;
import static org.dashbuilder.dataset.Assertions.*;
import static org.dashbuilder.dataset.group.DateIntervalType.QUARTER;
import static org.fest.assertions.api.Assertions.*;
import static org.dashbuilder.dataset.group.AggregateFunctionType.*;
public class DataSetNestedGroupTest {
public static final String EXPENSE_REPORTS = "expense_reports";
DataSetManager dataSetManager = DataSetCore.get().getDataSetManager();
DataSetFormatter dataSetFormatter = new DataSetFormatter();
@Before
public void setUp() throws Exception {
DataSet dataSet = ExpenseReportsData.INSTANCE.toDataSet();
dataSet.setUUID(EXPENSE_REPORTS);
dataSetManager.registerDataSet(dataSet);
}
/*
@Test
public void testMultipleYearSplit() throws Exception {
DataSet result = dataSetManager.lookupDataSet(
DataSetFactory.newDataSetLookupBuilder()
.dataset(EXPENSE_REPORTS)
.group(COLUMN_DATE).fixed(DateIntervalType.MONTH)
.column(COLUMN_AMOUNT, SUM, "total")
.column(COLUMN_DATE, COLUMN_AMOUNT, SUM, "total in {date}", DateIntervalType.YEAR)
.buildLookup());
printDataSet(result);
}
*/
@Test
public void testGroupSelectionFilter() throws Exception {
DataSet result = dataSetManager.lookupDataSet(
DataSetFactory.newDataSetLookupBuilder()
.dataset(EXPENSE_REPORTS)
.filter(COLUMN_AMOUNT, FilterFactory.greaterThan(500))
.group(COLUMN_DEPARTMENT).select("Engineering")
.group(COLUMN_CITY).select("Westford")
.buildLookup());
//printDataSet(result);
assertThat(result.getRowCount()).isEqualTo(1);
assertDataSetValue(result, 0, 0, "26.00");
}
@Test
public void testNestedGroupFromMultipleSelection() throws Exception {
DataSet result = dataSetManager.lookupDataSet(
DataSetFactory.newDataSetLookupBuilder()
.dataset(EXPENSE_REPORTS)
.group(COLUMN_DEPARTMENT, "Department").select("Services", "Engineering")
.group(COLUMN_CITY, "City")
.column(COLUMN_CITY)
.column(COUNT, "Occurrences")
.column(COLUMN_AMOUNT, MIN, "min")
.column(COLUMN_AMOUNT, MAX, "max")
.column(COLUMN_AMOUNT, AVERAGE, "average")
.column(COLUMN_AMOUNT, SUM, "total")
.sort(COLUMN_CITY, "asc")
.buildLookup());
//printDataSet(result);
assertDataSetValues(result, dataSetFormatter, new String[][] {
{"Barcelona", "6.00", "120.35", "1,100.10", "485.52", "2,913.14"},
{"Brno", "4.00", "159.01", "800.24", "364.86", "1,459.45"},
{"London", "3.00", "333.45", "868.45", "535.40", "1,606.20"},
{"Madrid", "2.00", "800.80", "911.11", "855.96", "1,711.91"},
{"Raleigh", "4.00", "209.55", "401.40", "284.38", "1,137.53"},
{"Westford", "5.00", "1.10", "600.34", "265.29", "1,326.43"}
}, 0);
}
@Test
public void testNestedGroupRequiresSelection() throws Exception {
DataSet result = dataSetManager.lookupDataSet(
DataSetFactory.newDataSetLookupBuilder()
.dataset(EXPENSE_REPORTS)
.group(COLUMN_DEPARTMENT, "Department")
.column(COLUMN_DEPARTMENT)
.group(COLUMN_CITY, COLUMN_CITY)
.sort(COLUMN_DEPARTMENT, "asc")
.buildLookup());
//printDataSet(result);
assertDataSetValues(result, dataSetFormatter, new String[][] {
{"Engineering"},
{"Management"},
{"Sales"},
{"Services"},
{"Support"}
}, 0);
}
@Test
public void testNoResultsSelection() throws Exception {
DataSet result = dataSetManager.lookupDataSet(
DataSetFactory.newDataSetLookupBuilder()
.dataset(EXPENSE_REPORTS)
.group(COLUMN_EMPLOYEE).select("Jerri Preble")
.group(COLUMN_DEPARTMENT).select("Engineering")
.group(COLUMN_CITY).select("Westford")
.group(COLUMN_DATE).fixed(DateIntervalType.MONTH, true)
.column(COLUMN_DATE)
.column(COLUMN_AMOUNT, SUM, "total")
.buildLookup());
String intervalType = result.getColumnByIndex(0).getIntervalType();
assertThat(intervalType).isNotEmpty();
assertThat(DateIntervalType.getByName(intervalType)).isEqualTo(DateIntervalType.MONTH);
//printDataSet(result);
assertDataSetValues(result, dataSetFormatter, new String[][]{
{"1", "0.00"},
{"2", "0.00"},
{"3", "0.00"},
{"4", "0.00"},
{"5", "0.00"},
{"6", "0.00"},
{"7", "0.00"},
{"8", "0.00"},
{"9", "0.00"},
{"10", "0.00"},
{"11", "0.00"},
{"12", "0.00"}
}, 0);
}
@Test
public void testThreeNestedLevels() throws Exception {
DataSet result = dataSetManager.lookupDataSet(
DataSetFactory.newDataSetLookupBuilder()
.dataset(EXPENSE_REPORTS)
.group(COLUMN_DEPARTMENT).select("Services", "Engineering")
.group(COLUMN_CITY).select("Madrid", "Barcelona")
.group(COLUMN_DATE).fixed(DateIntervalType.MONTH, true)
.column(COLUMN_DATE)
.column(COLUMN_AMOUNT, SUM, "total")
.buildLookup());
//printDataSet(result);
assertDataSetValues(result, dataSetFormatter, new String[][] {
{"1", "0.00"},
{"2", "0.00"},
{"3", "0.00"},
{"4", "0.00"},
{"5", "0.00"},
{"6", "911.11"},
{"7", "800.80"},
{"8", "152.25"},
{"9", "300.00"},
{"10", "340.34"},
{"11", "900.10"},
{"12", "1,220.45"}
}, 0);
}
@Test
public void testGroupByQuarter() throws Exception {
DataSet result = dataSetManager.lookupDataSet(
DataSetFactory.newDataSetLookupBuilder()
.dataset(EXPENSE_REPORTS)
.group(COLUMN_DATE).fixed(QUARTER, true).select("1")
.buildLookup());
//printDataSet(result);
assertThat(result.getRowCount()).isEqualTo(14);
}
/* @Test
public void testGroupJoin() throws Exception {
DataSet result = dataSetManager.lookupDataSet(
DataSetFactory.newDataSetLookupBuilder()
.dataset(EXPENSE_REPORTS)
.group(COLUMN_DEPARTMENT)
.group(COLUMN_CITY).select("Barcelona", "Brno").join()
.group(COLUMN_DATE, "month").fixed(DateIntervalType.MONTH, true).join()
.column(COLUMN_DEPARTMENT)
.column(COLUMN_CITY)
.column("month")
.column(COLUMN_AMOUNT, SUM, "total")
.buildLookup());
//printDataSet(result);
}
*/
private void printDataSet(DataSet dataSet) {
System.out.print(dataSetFormatter.formatDataSet(dataSet, "{", "}", ",\n", "\"", "\"", ", ") + "\n\n");
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.servicecomb.demo.jaxrs.client;
import java.util.List;
import java.util.Map;
import javax.ws.rs.core.Response.Status;
import org.apache.servicecomb.common.rest.codec.RestObjectMapperFactory;
import org.apache.servicecomb.demo.CategorizedTestCase;
import org.apache.servicecomb.demo.TestMgr;
import org.apache.servicecomb.demo.multiErrorCode.MultiRequest;
import org.apache.servicecomb.demo.multiErrorCode.MultiResponse200;
import org.apache.servicecomb.demo.multiErrorCode.MultiResponse400;
import org.apache.servicecomb.demo.multiErrorCode.MultiResponse500;
import org.apache.servicecomb.foundation.common.net.URIEndpointObject;
import org.apache.servicecomb.provider.springmvc.reference.RestTemplateBuilder;
import org.apache.servicecomb.registry.DiscoveryManager;
import org.apache.servicecomb.registry.RegistrationManager;
import org.apache.servicecomb.registry.api.registry.Microservice;
import org.apache.servicecomb.registry.api.registry.MicroserviceInstance;
import org.apache.servicecomb.registry.definition.DefinitionConst;
import org.apache.servicecomb.swagger.invocation.exception.InvocationException;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Component;
import org.springframework.web.client.HttpClientErrorException;
import org.springframework.web.client.HttpServerErrorException;
import org.springframework.web.client.RestTemplate;
import io.vertx.core.json.Json;
import io.vertx.core.json.JsonObject;
@Component
public class MultiErrorCodeServiceClient implements CategorizedTestCase {
private static final String SERVER = "cse://jaxrs";
private static String serverDirectURL;
private static RestTemplate template = RestTemplateBuilder.create();
@Override
public void testAllTransport() throws Exception {
testErrorCode();
testErrorCodeWithHeader();
testErrorCodeWithHeaderJAXRS();
testErrorCodeWithHeaderJAXRSUsingRowType();
testNoClientErrorCode();
}
@Override
public void testRestTransport() throws Exception {
prepareServerDirectURL();
testErrorCodeWrongType();
}
@Override
public void testHighwayTransport() throws Exception {
}
private static void prepareServerDirectURL() {
Microservice microservice = RegistrationManager.INSTANCE.getMicroservice();
MicroserviceInstance microserviceInstance = (MicroserviceInstance) DiscoveryManager.INSTANCE
.getAppManager()
.getOrCreateMicroserviceVersionRule(microservice.getAppId(), "jaxrs", DefinitionConst.VERSION_RULE_ALL)
.getVersionedCache()
.mapData()
.values()
.stream()
.findFirst()
.get();
URIEndpointObject edgeAddress = new URIEndpointObject(microserviceInstance.getEndpoints().get(0));
serverDirectURL = String.format("http://%s:%d/", edgeAddress.getHostOrIp(), edgeAddress.getPort());
}
private static void testErrorCodeWrongType() {
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
String body = "{\"message\":\"hello\",\"code\":\"wrongType\"";
HttpEntity<String> entity = new HttpEntity<>(body, headers);
ResponseEntity<MultiResponse200> result;
try {
template
.postForEntity(serverDirectURL + "/MultiErrorCodeService/errorCode", entity, MultiResponse200.class);
} catch (HttpClientErrorException e) {
TestMgr.check(e.getRawStatusCode(), 400);
TestMgr.check(e.getMessage(),
"400 Bad Request: [{\"message\":\"Parameter is not valid for operation "
+ "[jaxrs.MultiErrorCodeService.errorCode]. Parameter is [request]. "
+ "Processor is [body].\"}]");
}
entity = new HttpEntity<>(null, headers);
try {
result = template
.postForEntity(serverDirectURL + "/MultiErrorCodeService/errorCode", entity, MultiResponse200.class);
TestMgr.check(590, 200);
} catch (HttpServerErrorException e) {
TestMgr.check(e.getRawStatusCode(), 500);
}
// not recommend
body = "{\"message\":\"hello\",\"code\":\"200\"}";
entity = new HttpEntity<>(body, headers);
result = template
.postForEntity(serverDirectURL + "/MultiErrorCodeService/errorCode", entity, MultiResponse200.class);
TestMgr.check(result.getStatusCodeValue(), 200);
TestMgr.check(result.getBody().getMessage(), "success result");
}
private static void testErrorCode() {
MultiRequest request = new MultiRequest();
request.setCode(200);
ResponseEntity<MultiResponse200> result = template
.postForEntity(SERVER + "/MultiErrorCodeService/errorCode", request, MultiResponse200.class);
TestMgr.check(result.getStatusCodeValue(), 200);
TestMgr.check(result.getBody().getMessage(), "success result");
request.setCode(400);
MultiResponse400 t400 = null;
try {
template.postForEntity(SERVER + "/MultiErrorCodeService/errorCode", request, MultiResponse400.class);
} catch (InvocationException e) {
t400 = (MultiResponse400) e.getErrorData();
}
TestMgr.check(t400.getCode(), 400);
TestMgr.check(t400.getMessage(), "bad request");
request.setCode(500);
MultiResponse500 t500 = null;
try {
template.postForEntity(SERVER + "/MultiErrorCodeService/errorCode", request, MultiResponse400.class);
} catch (InvocationException e) {
t500 = (MultiResponse500) e.getErrorData();
}
TestMgr.check(t500.getCode(), 500);
TestMgr.check(t500.getMessage(), "internal error");
}
private static void testErrorCodeWithHeader() {
MultiRequest request = new MultiRequest();
request.setCode(200);
ResponseEntity<MultiResponse200> result = template
.postForEntity(SERVER + "/MultiErrorCodeService/errorCodeWithHeader", request, MultiResponse200.class);
TestMgr.check(result.getStatusCodeValue(), 200);
TestMgr.check(result.getBody().getMessage(), "success result");
TestMgr.check(result.getBody().getCode(), 200);
TestMgr.check(result.getHeaders().getFirst("x-code"), 200);
request.setCode(400);
MultiResponse400 t400 = null;
try {
template.postForEntity(SERVER + "/MultiErrorCodeService/errorCodeWithHeader", request, MultiResponse400.class);
} catch (InvocationException e) {
t400 = (MultiResponse400) e.getErrorData();
TestMgr.check(e.getStatus().getStatusCode(), Status.BAD_REQUEST.getStatusCode());
}
TestMgr.check(t400.getCode(), 400);
TestMgr.check(t400.getMessage(), "bad request");
request.setCode(500);
MultiResponse500 t500 = null;
try {
template.postForEntity(SERVER + "/MultiErrorCodeService/errorCodeWithHeader", request, MultiResponse400.class);
} catch (InvocationException e) {
t500 = (MultiResponse500) e.getErrorData();
TestMgr.check(e.getStatus().getStatusCode(), Status.INTERNAL_SERVER_ERROR.getStatusCode());
}
TestMgr.check(t500.getCode(), 500);
TestMgr.check(t500.getMessage(), "internal error");
}
private static void testErrorCodeWithHeaderJAXRS() {
MultiRequest request = new MultiRequest();
request.setCode(200);
request.setMessage("success result");
ResponseEntity<MultiResponse200> result = template
.postForEntity(SERVER + "/MultiErrorCodeService/errorCodeWithHeaderJAXRS", request, MultiResponse200.class);
TestMgr.check(result.getStatusCodeValue(), 200);
TestMgr.check(result.getBody().getMessage(), "success result");
TestMgr.check(result.getBody().getCode(), 200);
TestMgr.check(result.getHeaders().getFirst("x-code"), 200);
request.setCode(400);
request.setMessage("bad request");
MultiResponse400 t400 = null;
try {
template
.postForEntity(SERVER + "/MultiErrorCodeService/errorCodeWithHeaderJAXRS", request, MultiResponse400.class);
} catch (InvocationException e) {
t400 = (MultiResponse400) e.getErrorData();
TestMgr.check(e.getStatus().getStatusCode(), Status.BAD_REQUEST.getStatusCode());
}
TestMgr.check(t400.getCode(), 400);
TestMgr.check(t400.getMessage(), "bad request");
request.setCode(500);
request.setMessage("internal error");
MultiResponse500 t500 = null;
try {
template
.postForEntity(SERVER + "/MultiErrorCodeService/errorCodeWithHeaderJAXRS", request, MultiResponse400.class);
} catch (InvocationException e) {
t500 = (MultiResponse500) e.getErrorData();
TestMgr.check(e.getStatus().getStatusCode(), Status.INTERNAL_SERVER_ERROR.getStatusCode());
}
TestMgr.check(t500.getCode(), 500);
TestMgr.check(t500.getMessage(), "internal error");
}
private static void testErrorCodeWithHeaderJAXRSUsingRowType() {
JsonObject requestJson = new JsonObject();
requestJson.put("code", 200);
requestJson.put("message", "test message");
ResponseEntity<MultiResponse200> result = template
.postForEntity(SERVER + "/MultiErrorCodeService/errorCodeWithHeaderJAXRS", requestJson, MultiResponse200.class);
TestMgr.check(result.getStatusCodeValue(), 200);
TestMgr.check(result.getBody().getMessage(), "test message");
TestMgr.check(result.getBody().getCode(), 200);
TestMgr.check(result.getHeaders().getFirst("x-code"), 200);
MultiRequest request = new MultiRequest();
request.setCode(200);
request.setMessage("test message");
String stringRequest = Json.encode(request);
// wrap request to JsonObject
result = template
.postForEntity(SERVER + "/MultiErrorCodeService/errorCodeWithHeaderJAXRS", new JsonObject(stringRequest),
MultiResponse200.class);
TestMgr.check(result.getStatusCodeValue(), 200);
TestMgr.check(result.getBody().getMessage(), "test message");
TestMgr.check(result.getBody().getCode(), 200);
TestMgr.check(result.getHeaders().getFirst("x-code"), 200);
}
private static void testNoClientErrorCode() {
JsonObject requestJson = new JsonObject();
requestJson.put("code", 200);
requestJson.put("message", "test message");
@SuppressWarnings("rawtypes")
ResponseEntity<List> listResult = template
.postForEntity(SERVER + "/MultiErrorCodeService/noClientErrorCode", requestJson, List.class);
TestMgr.check(listResult.getStatusCodeValue(), 200);
Map<?, ?> mapResult =
RestObjectMapperFactory.getRestObjectMapper().convertValue(listResult.getBody().get(0), Map.class);
TestMgr.check(mapResult.get("message"), "test message");
TestMgr.check(mapResult.get("code"), 200);
TestMgr.check(mapResult.get("t200"), 200);
try {
requestJson.put("code", 400);
template
.postForEntity(SERVER + "/MultiErrorCodeService/noClientErrorCode", requestJson, Object.class);
} catch (InvocationException e) {
TestMgr.check(e.getStatusCode(), 400);
mapResult = RestObjectMapperFactory.getRestObjectMapper().convertValue(e.getErrorData(), Map.class);
TestMgr.check(mapResult.get("message"), "test message");
TestMgr.check(mapResult.get("code"), 400);
TestMgr.check(mapResult.get("t400"), 400);
}
}
}
| |
package org.mapdb.store.li;
import org.jetbrains.annotations.NotNull;
import org.mapdb.DBException;
import org.mapdb.io.DataInput2;
import org.mapdb.io.DataInput2ByteBuffer;
import org.mapdb.io.DataOutput2;
import org.mapdb.io.DataOutput2ByteArray;
import org.mapdb.ser.Serializer;
import org.mapdb.store.Store;
import java.nio.ByteBuffer;
import java.util.LinkedList;
import java.util.Queue;
import static org.mapdb.store.li.LiUtil.*;
public class LiStore implements Store {
private final static int PAGE_SIZE = 1024;
private final long[] index = new long[100_000];
private final Queue<Integer> freeRecids = new LinkedList<>();
private int recidTail = 1;
private final Queue<Long> freePages = new LinkedList<>();
private long pageTail = PAGE_SIZE;
private final ByteBuffer data = ByteBuffer.allocate(64*1024*1024);
@Override
public long preallocate() {
int recid = allocRecid();
index[recid] = composeRecordType(R_PREALLOC);
return recid;
}
@Override
public <R> void preallocatePut(long recid, @NotNull Serializer<R> serializer, @NotNull R record) {
long indexVal = index[(int) recid];
if(indexVal == R_VOID)
throw new DBException.RecordNotPreallocated();
int recType = decompIndexValType(indexVal);
if(recType != R_PREALLOC)
throw new DBException.RecordNotPreallocated();
long page = allocPage();
int size = serializeToPage(record, serializer, (int) page);
index[(int) recid] = composeIndexValSmall(size, page);
}
@Override
public <R> @NotNull long put(@NotNull R record, @NotNull Serializer<R> serializer) {
long page = allocPage();
int size = serializeToPage(record, serializer, (int) page);
int recid = allocRecid();
index[recid] = composeIndexValSmall(size, page);
return recid;
}
protected <R> int serializeToPage(@NotNull R record, @NotNull Serializer<R> serializer, long page) {
DataOutput2 out = new DataOutput2ByteArray();
serializer.serialize(out, record);
byte[] b = out.copyBytes();
if(b.length>PAGE_SIZE)
throw new RuntimeException();
ByteBuffer bb = data.duplicate();
bb.position((int) page);
bb.put(b);
return b.length;
}
private int allocRecid() {
Integer recid = freeRecids.poll();
if(recid == null)
return recidTail++;
return recid;
}
private long allocPage() {
Long ret = freePages.poll();
if(ret==null) {
ret = pageTail;
pageTail+=PAGE_SIZE;
}
return ret;
}
@Override
public <R> void update(long recid, @NotNull Serializer<R> serializer, @NotNull R updatedRecord) {
long indexVal = index[(int) recid];
if(indexVal== R_VOID)
throw new DBException.RecordNotFound();
int recType = decompIndexValType(indexVal);
if(recType == R_PREALLOC)
throw new DBException.PreallocRecordAccess();
int size = decompIndexValSize(indexVal);
long page = decompIndexValPage(indexVal);
int newSize = serializeToPage(updatedRecord, serializer, page);
index[(int) recid] = composeIndexValSmall(newSize, page);
}
@Override
public void verify() {
}
@Override
public void commit() {
}
@Override
public void compact() {
}
@Override
public boolean isThreadSafe() {
return false;
}
@Override
public <R> void updateAtomic(long recid, @NotNull Serializer<R> serializer, @NotNull Transform<R> r) {
R rec = get(recid, serializer);
rec = r.transform(rec);
update(recid, serializer, rec);
}
@Override
public <R> boolean compareAndUpdate(long recid, @NotNull Serializer<R> serializer, @NotNull R expectedOldRecord, @NotNull R updatedRecord) {
R r = get(recid, serializer);
if(!serializer.equals(r,expectedOldRecord))
return false;
update(recid, serializer, updatedRecord);
return true;
}
@Override
public <R> boolean compareAndDelete(long recid, @NotNull Serializer<R> serializer, @NotNull R expectedOldRecord) {
R r = get(recid, serializer);
if(!serializer.equals(r,expectedOldRecord))
return false;
delete(recid, serializer);
return true;
}
@Override
public <R> void delete(long recid, @NotNull Serializer<R> serializer) {
long indexVal = index[(int) recid];
if(indexVal==R_VOID)
throw new DBException.RecordNotFound();
int recType = decompIndexValType(indexVal);
if(recType == R_PREALLOC)
throw new DBException.PreallocRecordAccess();
int size = decompIndexValSize(indexVal);
long page = decompIndexValPage(indexVal);
index[(int) recid] = R_VOID;
freeRecids.add((int) recid);
zeroOut(data, page, PAGE_SIZE);
freePages.add(page);
}
@Override
public <R> @NotNull R getAndDelete(long recid, @NotNull Serializer<R> serializer) {
R r = get(recid, serializer);
delete(recid, serializer);
return r;
}
@Override
public <K> @NotNull K get(long recid, @NotNull Serializer<K> ser) {
long indexVal = index[(int) recid];
if(indexVal==R_VOID)
throw new DBException.RecordNotFound();
int recType = decompIndexValType(indexVal);
if(recType == R_PREALLOC)
throw new DBException.PreallocRecordAccess();
int size = decompIndexValSize(indexVal);
long page = decompIndexValPage(indexVal);
ByteBuffer bb = data.duplicate();
bb.position((int) page);
bb.limit((int) (page+size));
DataInput2 input = new DataInput2ByteBuffer(bb);
return ser.deserialize(input);
}
@Override
public void close() {
}
@Override
public void getAll(@NotNull GetAllCallback callback) {
ByteBuffer bb = data.duplicate();
for(int recid = 1; recid<recidTail; recid++){
long indexVal = index[recid];
if(indexVal==R_VOID)
continue;
int recType = decompIndexValType(indexVal);
if(recType == R_PREALLOC)
continue;
int size = decompIndexValSize(indexVal);
long page = decompIndexValPage(indexVal);
byte[] b = new byte[size];
bb.position((int) page);
bb.get(b);
callback.takeOne(recid, b);
}
}
@Override
public boolean isEmpty() {
return freeRecids.size() == recidTail-1;
}
@Override
public int maxRecordSize() {
return PAGE_SIZE;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.plan;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.convert.ConverterRule;
import org.apache.calcite.util.Pair;
import org.apache.calcite.util.Util;
import org.apache.calcite.util.graph.DefaultDirectedGraph;
import org.apache.calcite.util.graph.DefaultEdge;
import org.apache.calcite.util.graph.DirectedGraph;
import org.apache.calcite.util.graph.Graphs;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap;
import java.util.List;
import java.util.WeakHashMap;
/**
* Definition of the the convention trait.
* A new set of conversion information is created for
* each planner that registers at least one {@link ConverterRule} instance.
*
* <p>Conversion data is held in a {@link WeakHashMap} so that the JVM's garbage
* collector may reclaim the conversion data after the planner itself has been
* garbage collected. The conversion information consists of a graph of
* conversions (from one calling convention to another) and a map of graph arcs
* to {@link ConverterRule}s.
*/
public class ConventionTraitDef extends RelTraitDef<Convention> {
//~ Static fields/initializers ---------------------------------------------
public static final ConventionTraitDef INSTANCE =
new ConventionTraitDef();
//~ Instance fields --------------------------------------------------------
/**
* Weak-key map of RelOptPlanner to ConversionData. The idea is that when
* the planner goes away, so does the map entry.
*/
private final WeakHashMap<RelOptPlanner, ConversionData>
plannerConversionMap =
new WeakHashMap<RelOptPlanner, ConversionData>();
//~ Constructors -----------------------------------------------------------
private ConventionTraitDef() {
super();
}
//~ Methods ----------------------------------------------------------------
// implement RelTraitDef
public Class<Convention> getTraitClass() {
return Convention.class;
}
public String getSimpleName() {
return "convention";
}
public Convention getDefault() {
return Convention.NONE;
}
public void registerConverterRule(
RelOptPlanner planner,
ConverterRule converterRule) {
if (converterRule.isGuaranteed()) {
ConversionData conversionData = getConversionData(planner);
final Convention inConvention =
(Convention) converterRule.getInTrait();
final Convention outConvention =
(Convention) converterRule.getOutTrait();
conversionData.conversionGraph.addVertex(inConvention);
conversionData.conversionGraph.addVertex(outConvention);
conversionData.conversionGraph.addEdge(inConvention, outConvention);
conversionData.mapArcToConverterRule.put(
Pair.of(inConvention, outConvention), converterRule);
}
}
public void deregisterConverterRule(
RelOptPlanner planner,
ConverterRule converterRule) {
if (converterRule.isGuaranteed()) {
ConversionData conversionData = getConversionData(planner);
final Convention inConvention =
(Convention) converterRule.getInTrait();
final Convention outConvention =
(Convention) converterRule.getOutTrait();
final boolean removed =
conversionData.conversionGraph.removeEdge(
inConvention, outConvention);
assert removed;
conversionData.mapArcToConverterRule.remove(
Pair.of(inConvention, outConvention), converterRule);
}
}
// implement RelTraitDef
public RelNode convert(
RelOptPlanner planner,
RelNode rel,
Convention toConvention,
boolean allowInfiniteCostConverters) {
final ConversionData conversionData = getConversionData(planner);
final Convention fromConvention = rel.getConvention();
List<List<Convention>> conversionPaths =
conversionData.getPaths(fromConvention, toConvention);
loop:
for (List<Convention> conversionPath : conversionPaths) {
assert conversionPath.get(0) == fromConvention;
assert conversionPath.get(conversionPath.size() - 1)
== toConvention;
RelNode converted = rel;
Convention previous = null;
for (Convention arc : conversionPath) {
if (planner.getCost(converted).isInfinite()
&& !allowInfiniteCostConverters) {
continue loop;
}
if (previous != null) {
converted =
changeConvention(
converted, previous, arc,
conversionData.mapArcToConverterRule);
if (converted == null) {
throw Util.newInternal("Converter from " + previous
+ " to " + arc
+ " guaranteed that it could convert any relexp");
}
}
previous = arc;
}
return converted;
}
return null;
}
/**
* Tries to convert a relational expression to the target convention of an
* arc.
*/
private RelNode changeConvention(
RelNode rel,
Convention source,
Convention target,
final Multimap<Pair<Convention, Convention>, ConverterRule>
mapArcToConverterRule) {
assert source == rel.getConvention();
// Try to apply each converter rule for this arc's source/target calling
// conventions.
final Pair<Convention, Convention> key = Pair.of(source, target);
for (ConverterRule rule : mapArcToConverterRule.get(key)) {
assert rule.getInTrait() == source;
assert rule.getOutTrait() == target;
RelNode converted = rule.convert(rel);
if (converted != null) {
return converted;
}
}
return null;
}
public boolean canConvert(
RelOptPlanner planner,
Convention fromConvention,
Convention toConvention) {
ConversionData conversionData = getConversionData(planner);
return conversionData.getShortestPath(fromConvention, toConvention)
!= null;
}
private ConversionData getConversionData(RelOptPlanner planner) {
ConversionData conversionData = plannerConversionMap.get(planner);
if (conversionData == null) {
// Create new, empty ConversionData
conversionData = new ConversionData();
plannerConversionMap.put(planner, conversionData);
}
return conversionData;
}
//~ Inner Classes ----------------------------------------------------------
/** Workspace for converting from one convention to another. */
private static final class ConversionData {
final DirectedGraph<Convention, DefaultEdge> conversionGraph =
DefaultDirectedGraph.create();
/**
* For a given source/target convention, there may be several possible
* conversion rules. Maps {@link DefaultEdge} to a
* collection of {@link ConverterRule} objects.
*/
final Multimap<Pair<Convention, Convention>, ConverterRule>
mapArcToConverterRule =
HashMultimap.create();
private Graphs.FrozenGraph<Convention, DefaultEdge> pathMap;
public List<List<Convention>> getPaths(
Convention fromConvention,
Convention toConvention) {
return getPathMap().getPaths(fromConvention, toConvention);
}
private Graphs.FrozenGraph<Convention, DefaultEdge> getPathMap() {
if (pathMap == null) {
pathMap = Graphs.makeImmutable(conversionGraph);
}
return pathMap;
}
public List<Convention> getShortestPath(
Convention fromConvention,
Convention toConvention) {
return getPathMap().getShortestPath(fromConvention, toConvention);
}
}
}
// End ConventionTraitDef.java
| |
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.eventgrid.v2020_01_01_preview.implementation;
import retrofit2.Retrofit;
import com.google.common.reflect.TypeToken;
import com.microsoft.azure.CloudException;
import com.microsoft.rest.ServiceCallback;
import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.ServiceResponse;
import java.io.IOException;
import java.util.List;
import okhttp3.ResponseBody;
import retrofit2.http.GET;
import retrofit2.http.Header;
import retrofit2.http.Headers;
import retrofit2.http.Path;
import retrofit2.http.Query;
import retrofit2.Response;
import rx.functions.Func1;
import rx.Observable;
/**
* An instance of this class provides access to all the operations defined
* in TopicTypes.
*/
public class TopicTypesInner {
/** The Retrofit service to perform REST calls. */
private TopicTypesService service;
/** The service client containing this operation class. */
private EventGridManagementClientImpl client;
/**
* Initializes an instance of TopicTypesInner.
*
* @param retrofit the Retrofit instance built from a Retrofit Builder.
* @param client the instance of the service client containing this operation class.
*/
public TopicTypesInner(Retrofit retrofit, EventGridManagementClientImpl client) {
this.service = retrofit.create(TopicTypesService.class);
this.client = client;
}
/**
* The interface defining all the services for TopicTypes to be
* used by Retrofit to perform actually REST calls.
*/
interface TopicTypesService {
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.eventgrid.v2020_01_01_preview.TopicTypes list" })
@GET("providers/Microsoft.EventGrid/topicTypes")
Observable<Response<ResponseBody>> list(@Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.eventgrid.v2020_01_01_preview.TopicTypes get" })
@GET("providers/Microsoft.EventGrid/topicTypes/{topicTypeName}")
Observable<Response<ResponseBody>> get(@Path("topicTypeName") String topicTypeName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
@Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.eventgrid.v2020_01_01_preview.TopicTypes listEventTypes" })
@GET("providers/Microsoft.EventGrid/topicTypes/{topicTypeName}/eventTypes")
Observable<Response<ResponseBody>> listEventTypes(@Path("topicTypeName") String topicTypeName, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent);
}
/**
* List topic types.
* List all registered topic types.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the List<TopicTypeInfoInner> object if successful.
*/
public List<TopicTypeInfoInner> list() {
return listWithServiceResponseAsync().toBlocking().single().body();
}
/**
* List topic types.
* List all registered topic types.
*
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<TopicTypeInfoInner>> listAsync(final ServiceCallback<List<TopicTypeInfoInner>> serviceCallback) {
return ServiceFuture.fromResponse(listWithServiceResponseAsync(), serviceCallback);
}
/**
* List topic types.
* List all registered topic types.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the List<TopicTypeInfoInner> object
*/
public Observable<List<TopicTypeInfoInner>> listAsync() {
return listWithServiceResponseAsync().map(new Func1<ServiceResponse<List<TopicTypeInfoInner>>, List<TopicTypeInfoInner>>() {
@Override
public List<TopicTypeInfoInner> call(ServiceResponse<List<TopicTypeInfoInner>> response) {
return response.body();
}
});
}
/**
* List topic types.
* List all registered topic types.
*
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the List<TopicTypeInfoInner> object
*/
public Observable<ServiceResponse<List<TopicTypeInfoInner>>> listWithServiceResponseAsync() {
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.list(this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<List<TopicTypeInfoInner>>>>() {
@Override
public Observable<ServiceResponse<List<TopicTypeInfoInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl1<TopicTypeInfoInner>> result = listDelegate(response);
List<TopicTypeInfoInner> items = null;
if (result.body() != null) {
items = result.body().items();
}
ServiceResponse<List<TopicTypeInfoInner>> clientResponse = new ServiceResponse<List<TopicTypeInfoInner>>(items, result.response());
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl1<TopicTypeInfoInner>> listDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl1<TopicTypeInfoInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl1<TopicTypeInfoInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* Get a topic type.
* Get information about a topic type.
*
* @param topicTypeName Name of the topic type
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the TopicTypeInfoInner object if successful.
*/
public TopicTypeInfoInner get(String topicTypeName) {
return getWithServiceResponseAsync(topicTypeName).toBlocking().single().body();
}
/**
* Get a topic type.
* Get information about a topic type.
*
* @param topicTypeName Name of the topic type
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<TopicTypeInfoInner> getAsync(String topicTypeName, final ServiceCallback<TopicTypeInfoInner> serviceCallback) {
return ServiceFuture.fromResponse(getWithServiceResponseAsync(topicTypeName), serviceCallback);
}
/**
* Get a topic type.
* Get information about a topic type.
*
* @param topicTypeName Name of the topic type
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the TopicTypeInfoInner object
*/
public Observable<TopicTypeInfoInner> getAsync(String topicTypeName) {
return getWithServiceResponseAsync(topicTypeName).map(new Func1<ServiceResponse<TopicTypeInfoInner>, TopicTypeInfoInner>() {
@Override
public TopicTypeInfoInner call(ServiceResponse<TopicTypeInfoInner> response) {
return response.body();
}
});
}
/**
* Get a topic type.
* Get information about a topic type.
*
* @param topicTypeName Name of the topic type
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the TopicTypeInfoInner object
*/
public Observable<ServiceResponse<TopicTypeInfoInner>> getWithServiceResponseAsync(String topicTypeName) {
if (topicTypeName == null) {
throw new IllegalArgumentException("Parameter topicTypeName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.get(topicTypeName, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<TopicTypeInfoInner>>>() {
@Override
public Observable<ServiceResponse<TopicTypeInfoInner>> call(Response<ResponseBody> response) {
try {
ServiceResponse<TopicTypeInfoInner> clientResponse = getDelegate(response);
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<TopicTypeInfoInner> getDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<TopicTypeInfoInner, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<TopicTypeInfoInner>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
/**
* List event types.
* List event types for a topic type.
*
* @param topicTypeName Name of the topic type
* @throws IllegalArgumentException thrown if parameters fail the validation
* @throws CloudException thrown if the request is rejected by server
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent
* @return the List<EventTypeInner> object if successful.
*/
public List<EventTypeInner> listEventTypes(String topicTypeName) {
return listEventTypesWithServiceResponseAsync(topicTypeName).toBlocking().single().body();
}
/**
* List event types.
* List event types for a topic type.
*
* @param topicTypeName Name of the topic type
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/
public ServiceFuture<List<EventTypeInner>> listEventTypesAsync(String topicTypeName, final ServiceCallback<List<EventTypeInner>> serviceCallback) {
return ServiceFuture.fromResponse(listEventTypesWithServiceResponseAsync(topicTypeName), serviceCallback);
}
/**
* List event types.
* List event types for a topic type.
*
* @param topicTypeName Name of the topic type
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the List<EventTypeInner> object
*/
public Observable<List<EventTypeInner>> listEventTypesAsync(String topicTypeName) {
return listEventTypesWithServiceResponseAsync(topicTypeName).map(new Func1<ServiceResponse<List<EventTypeInner>>, List<EventTypeInner>>() {
@Override
public List<EventTypeInner> call(ServiceResponse<List<EventTypeInner>> response) {
return response.body();
}
});
}
/**
* List event types.
* List event types for a topic type.
*
* @param topicTypeName Name of the topic type
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the observable to the List<EventTypeInner> object
*/
public Observable<ServiceResponse<List<EventTypeInner>>> listEventTypesWithServiceResponseAsync(String topicTypeName) {
if (topicTypeName == null) {
throw new IllegalArgumentException("Parameter topicTypeName is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.listEventTypes(topicTypeName, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<List<EventTypeInner>>>>() {
@Override
public Observable<ServiceResponse<List<EventTypeInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl1<EventTypeInner>> result = listEventTypesDelegate(response);
List<EventTypeInner> items = null;
if (result.body() != null) {
items = result.body().items();
}
ServiceResponse<List<EventTypeInner>> clientResponse = new ServiceResponse<List<EventTypeInner>>(items, result.response());
return Observable.just(clientResponse);
} catch (Throwable t) {
return Observable.error(t);
}
}
});
}
private ServiceResponse<PageImpl1<EventTypeInner>> listEventTypesDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException {
return this.client.restClient().responseBuilderFactory().<PageImpl1<EventTypeInner>, CloudException>newInstance(this.client.serializerAdapter())
.register(200, new TypeToken<PageImpl1<EventTypeInner>>() { }.getType())
.registerError(CloudException.class)
.build(response);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.