repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15
values |
|---|---|---|---|---|
danielyc/test-1.9.4 | build/tmp/recompileMc/sources/net/minecraftforge/fml/relauncher/FMLInjectionData.java | 1592 | /*
* Forge Mod Loader
* Copyright (c) 2012-2013 cpw.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser Public License v2.1
* which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/old-licenses/gpl-2.0.html
*
* Contributors:
* cpw - implementation
*/
package net.minecraftforge.fml.relauncher;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import net.minecraft.launchwrapper.LaunchClassLoader;
import net.minecraftforge.common.ForgeVersion;
public class FMLInjectionData
{
static File minecraftHome;
static String major;
static String minor;
static String rev;
static String build;
static String mccversion;
static String mcpversion;
public static final List<String> containers = new ArrayList<String>();
static void build(File mcHome, LaunchClassLoader classLoader)
{
minecraftHome = mcHome;
major = String.valueOf(ForgeVersion.majorVersion);
minor = String.valueOf(ForgeVersion.minorVersion);
rev = String.valueOf(ForgeVersion.revisionVersion);
build = String.valueOf(ForgeVersion.buildVersion);
mccversion = ForgeVersion.mcVersion;
mcpversion = ForgeVersion.mcpVersion;
}
static String debfuscationDataName()
{
return "/deobfuscation_data-"+mccversion+".lzma";
}
public static Object[] data()
{
return new Object[] { major, minor, rev, build, mccversion, mcpversion, minecraftHome, containers };
}
} | gpl-3.0 |
Saigut/yin | src/main/java/org/yinwang/yin/value/VoidValue.java | 133 | package org.yinwang.yin.value;
public class VoidValue extends Value {
public String toString() {
return "void";
}
}
| agpl-3.0 |
open-health-hub/openMAXIMS | openmaxims_workspace/ValueObjects/src/ims/coe/vo/AccommodationHousingVo.java | 11066 | //#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751)
// Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.coe.vo;
/**
* Linked to coe.assessment.Accommodation Housing business object (ID: 1012100002).
*/
public class AccommodationHousingVo extends ims.coe.assessment.vo.AccommodationHousingRefVo implements ims.vo.ImsCloneable, Comparable
{
private static final long serialVersionUID = 1L;
public AccommodationHousingVo()
{
}
public AccommodationHousingVo(Integer id, int version)
{
super(id, version);
}
public AccommodationHousingVo(ims.coe.vo.beans.AccommodationHousingVoBean bean)
{
this.id = bean.getId();
this.version = bean.getVersion();
this.currentlivingarrangements = bean.getCurrentLivingArrangements() == null ? null : ims.coe.vo.lookups.AccommodationLivingArrangements.buildLookup(bean.getCurrentLivingArrangements());
this.currentlivingarrangementsnotes = bean.getCurrentLivingArrangementsNotes();
this.accommodation = bean.getAccommodation() == null ? null : ims.coe.vo.lookups.AccommodationHousing.buildLookup(bean.getAccommodation());
this.accommodationnotes = bean.getAccommodationNotes();
this.assisstancetoothers = bean.getAssisstanceToOthers() == null ? null : ims.core.vo.lookups.YesNoUnknown.buildLookup(bean.getAssisstanceToOthers());
this.assistancetoothersnotes = bean.getAssistanceToOthersNotes();
}
public void populate(ims.vo.ValueObjectBeanMap map, ims.coe.vo.beans.AccommodationHousingVoBean bean)
{
this.id = bean.getId();
this.version = bean.getVersion();
this.currentlivingarrangements = bean.getCurrentLivingArrangements() == null ? null : ims.coe.vo.lookups.AccommodationLivingArrangements.buildLookup(bean.getCurrentLivingArrangements());
this.currentlivingarrangementsnotes = bean.getCurrentLivingArrangementsNotes();
this.accommodation = bean.getAccommodation() == null ? null : ims.coe.vo.lookups.AccommodationHousing.buildLookup(bean.getAccommodation());
this.accommodationnotes = bean.getAccommodationNotes();
this.assisstancetoothers = bean.getAssisstanceToOthers() == null ? null : ims.core.vo.lookups.YesNoUnknown.buildLookup(bean.getAssisstanceToOthers());
this.assistancetoothersnotes = bean.getAssistanceToOthersNotes();
}
public ims.vo.ValueObjectBean getBean()
{
return this.getBean(new ims.vo.ValueObjectBeanMap());
}
public ims.vo.ValueObjectBean getBean(ims.vo.ValueObjectBeanMap map)
{
ims.coe.vo.beans.AccommodationHousingVoBean bean = null;
if(map != null)
bean = (ims.coe.vo.beans.AccommodationHousingVoBean)map.getValueObjectBean(this);
if (bean == null)
{
bean = new ims.coe.vo.beans.AccommodationHousingVoBean();
map.addValueObjectBean(this, bean);
bean.populate(map, this);
}
return bean;
}
public Object getFieldValueByFieldName(String fieldName)
{
if(fieldName == null)
throw new ims.framework.exceptions.CodingRuntimeException("Invalid field name");
fieldName = fieldName.toUpperCase();
if(fieldName.equals("CURRENTLIVINGARRANGEMENTS"))
return getCurrentLivingArrangements();
if(fieldName.equals("CURRENTLIVINGARRANGEMENTSNOTES"))
return getCurrentLivingArrangementsNotes();
if(fieldName.equals("ACCOMMODATION"))
return getAccommodation();
if(fieldName.equals("ACCOMMODATIONNOTES"))
return getAccommodationNotes();
if(fieldName.equals("ASSISSTANCETOOTHERS"))
return getAssisstanceToOthers();
if(fieldName.equals("ASSISTANCETOOTHERSNOTES"))
return getAssistanceToOthersNotes();
return super.getFieldValueByFieldName(fieldName);
}
public boolean getCurrentLivingArrangementsIsNotNull()
{
return this.currentlivingarrangements != null;
}
public ims.coe.vo.lookups.AccommodationLivingArrangements getCurrentLivingArrangements()
{
return this.currentlivingarrangements;
}
public void setCurrentLivingArrangements(ims.coe.vo.lookups.AccommodationLivingArrangements value)
{
this.isValidated = false;
this.currentlivingarrangements = value;
}
public boolean getCurrentLivingArrangementsNotesIsNotNull()
{
return this.currentlivingarrangementsnotes != null;
}
public String getCurrentLivingArrangementsNotes()
{
return this.currentlivingarrangementsnotes;
}
public static int getCurrentLivingArrangementsNotesMaxLength()
{
return 255;
}
public void setCurrentLivingArrangementsNotes(String value)
{
this.isValidated = false;
this.currentlivingarrangementsnotes = value;
}
public boolean getAccommodationIsNotNull()
{
return this.accommodation != null;
}
public ims.coe.vo.lookups.AccommodationHousing getAccommodation()
{
return this.accommodation;
}
public void setAccommodation(ims.coe.vo.lookups.AccommodationHousing value)
{
this.isValidated = false;
this.accommodation = value;
}
public boolean getAccommodationNotesIsNotNull()
{
return this.accommodationnotes != null;
}
public String getAccommodationNotes()
{
return this.accommodationnotes;
}
public static int getAccommodationNotesMaxLength()
{
return 255;
}
public void setAccommodationNotes(String value)
{
this.isValidated = false;
this.accommodationnotes = value;
}
public boolean getAssisstanceToOthersIsNotNull()
{
return this.assisstancetoothers != null;
}
public ims.core.vo.lookups.YesNoUnknown getAssisstanceToOthers()
{
return this.assisstancetoothers;
}
public void setAssisstanceToOthers(ims.core.vo.lookups.YesNoUnknown value)
{
this.isValidated = false;
this.assisstancetoothers = value;
}
public boolean getAssistanceToOthersNotesIsNotNull()
{
return this.assistancetoothersnotes != null;
}
public String getAssistanceToOthersNotes()
{
return this.assistancetoothersnotes;
}
public static int getAssistanceToOthersNotesMaxLength()
{
return 255;
}
public void setAssistanceToOthersNotes(String value)
{
this.isValidated = false;
this.assistancetoothersnotes = value;
}
public boolean isValidated()
{
if(this.isBusy)
return true;
this.isBusy = true;
if(!this.isValidated)
{
this.isBusy = false;
return false;
}
this.isBusy = false;
return true;
}
public String[] validate()
{
return validate(null);
}
public String[] validate(String[] existingErrors)
{
if(this.isBusy)
return null;
this.isBusy = true;
java.util.ArrayList<String> listOfErrors = new java.util.ArrayList<String>();
if(existingErrors != null)
{
for(int x = 0; x < existingErrors.length; x++)
{
listOfErrors.add(existingErrors[x]);
}
}
int errorCount = listOfErrors.size();
if(errorCount == 0)
{
this.isBusy = false;
this.isValidated = true;
return null;
}
String[] result = new String[errorCount];
for(int x = 0; x < errorCount; x++)
result[x] = (String)listOfErrors.get(x);
this.isBusy = false;
this.isValidated = false;
return result;
}
public void clearIDAndVersion()
{
this.id = null;
this.version = 0;
}
public Object clone()
{
if(this.isBusy)
return this;
this.isBusy = true;
AccommodationHousingVo clone = new AccommodationHousingVo(this.id, this.version);
if(this.currentlivingarrangements == null)
clone.currentlivingarrangements = null;
else
clone.currentlivingarrangements = (ims.coe.vo.lookups.AccommodationLivingArrangements)this.currentlivingarrangements.clone();
clone.currentlivingarrangementsnotes = this.currentlivingarrangementsnotes;
if(this.accommodation == null)
clone.accommodation = null;
else
clone.accommodation = (ims.coe.vo.lookups.AccommodationHousing)this.accommodation.clone();
clone.accommodationnotes = this.accommodationnotes;
if(this.assisstancetoothers == null)
clone.assisstancetoothers = null;
else
clone.assisstancetoothers = (ims.core.vo.lookups.YesNoUnknown)this.assisstancetoothers.clone();
clone.assistancetoothersnotes = this.assistancetoothersnotes;
clone.isValidated = this.isValidated;
this.isBusy = false;
return clone;
}
public int compareTo(Object obj)
{
return compareTo(obj, true);
}
public int compareTo(Object obj, boolean caseInsensitive)
{
if (obj == null)
{
return -1;
}
if(caseInsensitive); // this is to avoid eclipse warning only.
if (!(AccommodationHousingVo.class.isAssignableFrom(obj.getClass())))
{
throw new ClassCastException("A AccommodationHousingVo object cannot be compared an Object of type " + obj.getClass().getName());
}
if (this.id == null)
return 1;
if (((AccommodationHousingVo)obj).getBoId() == null)
return -1;
return this.id.compareTo(((AccommodationHousingVo)obj).getBoId());
}
public synchronized static int generateValueObjectUniqueID()
{
return ims.vo.ValueObject.generateUniqueID();
}
public int countFieldsWithValue()
{
int count = 0;
if(this.currentlivingarrangements != null)
count++;
if(this.currentlivingarrangementsnotes != null)
count++;
if(this.accommodation != null)
count++;
if(this.accommodationnotes != null)
count++;
if(this.assisstancetoothers != null)
count++;
if(this.assistancetoothersnotes != null)
count++;
return count;
}
public int countValueObjectFields()
{
return 6;
}
protected ims.coe.vo.lookups.AccommodationLivingArrangements currentlivingarrangements;
protected String currentlivingarrangementsnotes;
protected ims.coe.vo.lookups.AccommodationHousing accommodation;
protected String accommodationnotes;
protected ims.core.vo.lookups.YesNoUnknown assisstancetoothers;
protected String assistancetoothersnotes;
private boolean isValidated = false;
private boolean isBusy = false;
}
| agpl-3.0 |
open-health-hub/openMAXIMS | openmaxims_workspace/ClinicalAdmin/src/ims/clinicaladmin/forms/dementiaamtsfieldandhelpconfig/Handlers.java | 7755 | //#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751)
// Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.clinicaladmin.forms.dementiaamtsfieldandhelpconfig;
import ims.framework.delegates.*;
abstract public class Handlers implements ims.framework.UILogic, IFormUILogicCode, ims.framework.interfaces.IClearInfo
{
abstract protected void onFormModeChanged();
abstract protected void onFormOpen(Object[] args) throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onGrdDetailsSelectionChanged() throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onBtnEditClick() throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onBtnSaveClick() throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onBtnCancelClick() throws ims.framework.exceptions.PresentationLogicException;
abstract protected void onContextMenuItemClick(int menuItemID, ims.framework.Control sender) throws ims.framework.exceptions.PresentationLogicException;
public final void setContext(ims.framework.UIEngine engine, GenForm form)
{
this.engine = engine;
this.form = form;
this.form.setFormModeChangedEvent(new FormModeChanged()
{
private static final long serialVersionUID = 1L;
public void handle()
{
onFormModeChanged();
}
});
this.form.setFormOpenEvent(new FormOpen()
{
private static final long serialVersionUID = 1L;
public void handle(Object[] args) throws ims.framework.exceptions.PresentationLogicException
{
onFormOpen(args);
}
});
this.form.grdDetails().setSelectionChangedEvent(new GridSelectionChanged()
{
private static final long serialVersionUID = 1L;
public void handle(ims.framework.enumerations.MouseButton mouseButton) throws ims.framework.exceptions.PresentationLogicException
{
onGrdDetailsSelectionChanged();
}
});
this.form.btnEdit().setClickEvent(new Click()
{
private static final long serialVersionUID = 1L;
public void handle() throws ims.framework.exceptions.PresentationLogicException
{
onBtnEditClick();
}
});
this.form.btnSave().setClickEvent(new Click()
{
private static final long serialVersionUID = 1L;
public void handle() throws ims.framework.exceptions.PresentationLogicException
{
onBtnSaveClick();
}
});
this.form.btnCancel().setClickEvent(new Click()
{
private static final long serialVersionUID = 1L;
public void handle() throws ims.framework.exceptions.PresentationLogicException
{
onBtnCancelClick();
}
});
this.form.getContextMenus().getGenericGridAddItem().setClickEvent(new ims.framework.delegates.MenuItemClick()
{
private static final long serialVersionUID = 1L;
public void handle(ims.framework.Control sender) throws ims.framework.exceptions.PresentationLogicException
{
onContextMenuItemClick(GenForm.ContextMenus.GenericGrid.Add, sender);
}
});
this.form.getContextMenus().getGenericGridUpdateItem().setClickEvent(new ims.framework.delegates.MenuItemClick()
{
private static final long serialVersionUID = 1L;
public void handle(ims.framework.Control sender) throws ims.framework.exceptions.PresentationLogicException
{
onContextMenuItemClick(GenForm.ContextMenus.GenericGrid.Update, sender);
}
});
this.form.getContextMenus().getGenericGridRemoveItem().setClickEvent(new ims.framework.delegates.MenuItemClick()
{
private static final long serialVersionUID = 1L;
public void handle(ims.framework.Control sender) throws ims.framework.exceptions.PresentationLogicException
{
onContextMenuItemClick(GenForm.ContextMenus.GenericGrid.Remove, sender);
}
});
this.form.getContextMenus().getGenericGridViewItem().setClickEvent(new ims.framework.delegates.MenuItemClick()
{
private static final long serialVersionUID = 1L;
public void handle(ims.framework.Control sender) throws ims.framework.exceptions.PresentationLogicException
{
onContextMenuItemClick(GenForm.ContextMenus.GenericGrid.View, sender);
}
});
this.form.getContextMenus().getGenericGridMoveUpItem().setClickEvent(new ims.framework.delegates.MenuItemClick()
{
private static final long serialVersionUID = 1L;
public void handle(ims.framework.Control sender) throws ims.framework.exceptions.PresentationLogicException
{
onContextMenuItemClick(GenForm.ContextMenus.GenericGrid.MoveUp, sender);
}
});
this.form.getContextMenus().getGenericGridMoveDownItem().setClickEvent(new ims.framework.delegates.MenuItemClick()
{
private static final long serialVersionUID = 1L;
public void handle(ims.framework.Control sender) throws ims.framework.exceptions.PresentationLogicException
{
onContextMenuItemClick(GenForm.ContextMenus.GenericGrid.MoveDown, sender);
}
});
this.form.getContextMenus().getGenericGridReplaceItem().setClickEvent(new ims.framework.delegates.MenuItemClick()
{
private static final long serialVersionUID = 1L;
public void handle(ims.framework.Control sender) throws ims.framework.exceptions.PresentationLogicException
{
onContextMenuItemClick(GenForm.ContextMenus.GenericGrid.Replace, sender);
}
});
this.form.getContextMenus().getGenericGridConfirmItem().setClickEvent(new ims.framework.delegates.MenuItemClick()
{
private static final long serialVersionUID = 1L;
public void handle(ims.framework.Control sender) throws ims.framework.exceptions.PresentationLogicException
{
onContextMenuItemClick(GenForm.ContextMenus.GenericGrid.Confirm, sender);
}
});
this.form.getContextMenus().getGenericGridEDIT_VIEW_ICP_ACTIONSItem().setClickEvent(new ims.framework.delegates.MenuItemClick()
{
private static final long serialVersionUID = 1L;
public void handle(ims.framework.Control sender) throws ims.framework.exceptions.PresentationLogicException
{
onContextMenuItemClick(GenForm.ContextMenus.GenericGrid.EDIT_VIEW_ICP_ACTIONS, sender);
}
});
}
public void free()
{
this.engine = null;
this.form = null;
}
public abstract void clearContextInformation();
protected ims.framework.UIEngine engine;
protected GenForm form;
}
| agpl-3.0 |
isokissa3/mcMMO | src/main/java/com/gmail/nossr50/util/blockmeta/HashChunkletManager.java | 13109 | package com.gmail.nossr50.util.blockmeta;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.StreamCorruptedException;
import java.io.UTFDataFormatException;
import java.util.HashMap;
import org.bukkit.World;
import org.bukkit.block.Block;
import com.gmail.nossr50.mcMMO;
public class HashChunkletManager implements ChunkletManager {
public HashMap<String, ChunkletStore> store = new HashMap<String, ChunkletStore>();
@Override
public void loadChunklet(int cx, int cy, int cz, World world) {
File dataDir = new File(world.getWorldFolder(), "mcmmo_data");
File cxDir = new File(dataDir, "" + cx);
if (!cxDir.exists()) {
return;
}
File czDir = new File(cxDir, "" + cz);
if (!czDir.exists()) {
return;
}
File yFile = new File(czDir, "" + cy);
if (!yFile.exists()) {
return;
}
ChunkletStore in = deserializeChunkletStore(yFile);
if (in != null) {
store.put(world.getName() + "," + cx + "," + cz + "," + cy, in);
}
}
@Override
public void unloadChunklet(int cx, int cy, int cz, World world) {
File dataDir = new File(world.getWorldFolder(), "mcmmo_data");
if (store.containsKey(world.getName() + "," + cx + "," + cz + "," + cy)) {
File cxDir = new File(dataDir, "" + cx);
if (!cxDir.exists()) {
cxDir.mkdir();
}
File czDir = new File(cxDir, "" + cz);
if (!czDir.exists()) {
czDir.mkdir();
}
File yFile = new File(czDir, "" + cy);
ChunkletStore out = store.get(world.getName() + "," + cx + "," + cz + "," + cy);
serializeChunkletStore(out, yFile);
store.remove(world.getName() + "," + cx + "," + cz + "," + cy);
}
}
@Override
public void loadChunk(int cx, int cz, World world) {
File dataDir = new File(world.getWorldFolder(), "mcmmo_data");
File cxDir = new File(dataDir, "" + cx);
if (!cxDir.exists()) {
return;
}
File czDir = new File(cxDir, "" + cz);
if (!czDir.exists()) {
return;
}
for (int y = 0; y < 4; y++) {
File yFile = new File(czDir, "" + y);
if (!yFile.exists()) {
continue;
}
ChunkletStore in = deserializeChunkletStore(yFile);
if (in != null) {
store.put(world.getName() + "," + cx + "," + cz + "," + y, in);
}
}
}
@Override
public void unloadChunk(int cx, int cz, World world) {
File dataDir = new File(world.getWorldFolder(), "mcmmo_data");
for (int y = 0; y < 4; y++) {
if (store.containsKey(world.getName() + "," + cx + "," + cz + "," + y)) {
File cxDir = new File(dataDir, "" + cx);
if (!cxDir.exists()) {
cxDir.mkdir();
}
File czDir = new File(cxDir, "" + cz);
if (!czDir.exists()) {
czDir.mkdir();
}
File yFile = new File(czDir, "" + y);
ChunkletStore out = store.get(world.getName() + "," + cx + "," + cz + "," + y);
serializeChunkletStore(out, yFile);
store.remove(world.getName() + "," + cx + "," + cz + "," + y);
}
}
}
@Override
public void chunkLoaded(int cx, int cz, World world) {
//loadChunk(cx, cz, world);
}
@Override
public void chunkUnloaded(int cx, int cz, World world) {
unloadChunk(cx, cx, world);
}
@Override
public void saveWorld(World world) {
String worldName = world.getName();
File dataDir = new File(world.getWorldFolder(), "mcmmo_data");
if (!dataDir.exists()) {
dataDir.mkdirs();
}
for (String key : store.keySet()) {
String[] info = key.split(",");
if (worldName.equals(info[0])) {
File cxDir = new File(dataDir, "" + info[1]);
if (!cxDir.exists()) {
cxDir.mkdir();
}
File czDir = new File(cxDir, "" + info[2]);
if (!czDir.exists()) {
czDir.mkdir();
}
File yFile = new File(czDir, "" + info[3]);
serializeChunkletStore(store.get(key), yFile);
}
}
}
@Override
public void unloadWorld(World world) {
saveWorld(world);
String worldName = world.getName();
for (String key : store.keySet()) {
String tempWorldName = key.split(",")[0];
if (tempWorldName.equals(worldName)) {
store.remove(key);
return;
}
}
}
@Override
public void loadWorld(World world) {
//for (Chunk chunk : world.getLoadedChunks()) {
// this.chunkLoaded(chunk.getX(), chunk.getZ(), world);
//}
}
@Override
public void saveAll() {
for (World world : mcMMO.p.getServer().getWorlds()) {
saveWorld(world);
}
}
@Override
public void unloadAll() {
saveAll();
for (World world : mcMMO.p.getServer().getWorlds()) {
unloadWorld(world);
}
}
@Override
public boolean isTrue(int x, int y, int z, World world) {
int cx = x / 16;
int cz = z / 16;
int cy = y / 64;
String key = world.getName() + "," + cx + "," + cz + "," + cy;
if (!store.containsKey(key)) {
loadChunklet(cx, cy, cz, world);
}
if (!store.containsKey(key)) {
return false;
}
ChunkletStore check = store.get(world.getName() + "," + cx + "," + cz + "," + cy);
int ix = Math.abs(x) % 16;
int iz = Math.abs(z) % 16;
int iy = Math.abs(y) % 64;
return check.isTrue(ix, iy, iz);
}
@Override
public boolean isTrue(Block block) {
return isTrue(block.getX(), block.getY(), block.getZ(), block.getWorld());
}
@Override
public void setTrue(int x, int y, int z, World world) {
int cx = x / 16;
int cz = z / 16;
int cy = y / 64;
int ix = Math.abs(x) % 16;
int iz = Math.abs(z) % 16;
int iy = Math.abs(y) % 64;
String key = world.getName() + "," + cx + "," + cz + "," + cy;
if (!store.containsKey(key)) {
loadChunklet(cx, cy, cz, world);
}
ChunkletStore cStore = store.get(key);
if (cStore == null) {
cStore = ChunkletStoreFactory.getChunkletStore();
store.put(world.getName() + "," + cx + "," + cz + "," + cy, cStore);
}
cStore.setTrue(ix, iy, iz);
}
@Override
public void setTrue(Block block) {
setTrue(block.getX(), block.getY(), block.getZ(), block.getWorld());
}
@Override
public void setFalse(int x, int y, int z, World world) {
int cx = x / 16;
int cz = z / 16;
int cy = y / 64;
int ix = Math.abs(x) % 16;
int iz = Math.abs(z) % 16;
int iy = Math.abs(y) % 64;
String key = world.getName() + "," + cx + "," + cz + "," + cy;
if (!store.containsKey(key)) {
loadChunklet(cx, cy, cz, world);
}
ChunkletStore cStore = store.get(key);
if (cStore == null) {
return; // No need to make a store for something we will be setting to false
}
cStore.setFalse(ix, iy, iz);
}
@Override
public void setFalse(Block block) {
setFalse(block.getX(), block.getY(), block.getZ(), block.getWorld());
}
@Override
public void cleanUp() {
for (String key : store.keySet()) {
if (store.get(key).isEmpty()) {
String[] info = key.split(",");
File dataDir = new File(mcMMO.p.getServer().getWorld(info[0]).getWorldFolder(), "mcmmo_data");
File cxDir = new File(dataDir, "" + info[1]);
if (!cxDir.exists()) {
continue;
}
File czDir = new File(cxDir, "" + info[2]);
if (!czDir.exists()) {
continue;
}
File yFile = new File(czDir, "" + info[3]);
yFile.delete();
// Delete empty directories
if (czDir.list().length == 0) {
czDir.delete();
}
if (cxDir.list().length == 0) {
cxDir.delete();
}
}
}
}
/**
* @param cStore ChunkletStore to save
* @param location Where on the disk to put it
*/
private void serializeChunkletStore(ChunkletStore cStore, File location) {
FileOutputStream fileOut = null;
ObjectOutputStream objOut = null;
try {
if (!location.exists()) {
location.createNewFile();
}
fileOut = new FileOutputStream(location);
objOut = new ObjectOutputStream(fileOut);
objOut.writeObject(cStore);
}
catch (IOException ex) {
ex.printStackTrace();
}
finally {
if (objOut != null) {
try {
objOut.flush();
objOut.close();
}
catch (IOException ex) {
ex.printStackTrace();
}
}
if (fileOut != null) {
try {
fileOut.close();
}
catch (IOException ex) {
ex.printStackTrace();
}
}
}
}
/**
* @param location Where on the disk to read from
* @return ChunkletStore from the specified location
*/
private ChunkletStore deserializeChunkletStore(File location) {
ChunkletStore storeIn = null;
FileInputStream fileIn = null;
ObjectInputStream objIn = null;
try {
fileIn = new FileInputStream(location);
objIn = new ObjectInputStream(fileIn);
storeIn = (ChunkletStore) objIn.readObject();
}
catch (IOException ex) {
if (ex instanceof EOFException) {
// EOF should only happen on Chunklets that somehow have been corrupted.
//mcMMO.p.getLogger().severe("Chunklet data at " + location.toString() + " could not be read due to an EOFException, data in this area will be lost.");
return ChunkletStoreFactory.getChunkletStore();
}
else if (ex instanceof StreamCorruptedException) {
// StreamCorrupted happens when the Chunklet is no good.
//mcMMO.p.getLogger().severe("Chunklet data at " + location.toString() + " is corrupted, data in this area will be lost.");
return ChunkletStoreFactory.getChunkletStore();
}
else if (ex instanceof UTFDataFormatException) {
// UTF happens when the Chunklet cannot be read or is corrupted
//mcMMO.p.getLogger().severe("Chunklet data at " + location.toString() + " could not be read due to an UTFDataFormatException, data in this area will be lost.");
return ChunkletStoreFactory.getChunkletStore();
}
ex.printStackTrace();
}
catch (ClassNotFoundException ex) {
ex.printStackTrace();
}
finally {
if (objIn != null) {
try {
objIn.close();
}
catch (IOException ex) {
ex.printStackTrace();
}
}
if (fileIn != null) {
try {
fileIn.close();
}
catch (IOException ex) {
ex.printStackTrace();
}
}
}
// TODO: Make this less messy, as it is, it's kinda... depressing to do it like this.
// Might also make a mess when we move to stacks, but at that point I think I will write a new Manager...
// IMPORTANT! If ChunkletStoreFactory is going to be returning something other than PrimitiveEx we need to remove this, as it will be breaking time for old maps
/*
if (!(storeIn instanceof PrimitiveExChunkletStore)) {
ChunkletStore tempStore = ChunkletStoreFactory.getChunkletStore();
if (storeIn != null) {
tempStore.copyFrom(storeIn);
}
storeIn = tempStore;
}
*/
return storeIn;
}
}
| agpl-3.0 |
open-health-hub/openMAXIMS | openmaxims_workspace/ValueObjects/src/ims/emergency/vo/lookups/ModeOfArrival.java | 5321 | //#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751)
// Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.emergency.vo.lookups;
import ims.framework.cn.data.TreeNode;
import java.util.ArrayList;
import ims.framework.utils.Image;
import ims.framework.utils.Color;
public class ModeOfArrival extends ims.vo.LookupInstVo implements TreeNode
{
private static final long serialVersionUID = 1L;
public ModeOfArrival()
{
super();
}
public ModeOfArrival(int id)
{
super(id, "", true);
}
public ModeOfArrival(int id, String text, boolean active)
{
super(id, text, active, null, null, null);
}
public ModeOfArrival(int id, String text, boolean active, ModeOfArrival parent, Image image)
{
super(id, text, active, parent, image);
}
public ModeOfArrival(int id, String text, boolean active, ModeOfArrival parent, Image image, Color color)
{
super(id, text, active, parent, image, color);
}
public ModeOfArrival(int id, String text, boolean active, ModeOfArrival parent, Image image, Color color, int order)
{
super(id, text, active, parent, image, color, order);
}
public static ModeOfArrival buildLookup(ims.vo.LookupInstanceBean bean)
{
return new ModeOfArrival(bean.getId(), bean.getText(), bean.isActive());
}
public String toString()
{
if(getText() != null)
return getText();
return "";
}
public TreeNode getParentNode()
{
return (ModeOfArrival)super.getParentInstance();
}
public ModeOfArrival getParent()
{
return (ModeOfArrival)super.getParentInstance();
}
public void setParent(ModeOfArrival parent)
{
super.setParentInstance(parent);
}
public TreeNode[] getChildren()
{
ArrayList children = super.getChildInstances();
ModeOfArrival[] typedChildren = new ModeOfArrival[children.size()];
for (int i = 0; i < children.size(); i++)
{
typedChildren[i] = (ModeOfArrival)children.get(i);
}
return typedChildren;
}
public int addChild(TreeNode child)
{
if (child instanceof ModeOfArrival)
{
super.addChild((ModeOfArrival)child);
}
return super.getChildInstances().size();
}
public int removeChild(TreeNode child)
{
if (child instanceof ModeOfArrival)
{
super.removeChild((ModeOfArrival)child);
}
return super.getChildInstances().size();
}
public Image getExpandedImage()
{
return super.getImage();
}
public Image getCollapsedImage()
{
return super.getImage();
}
public static ims.framework.IItemCollection getNegativeInstancesAsIItemCollection()
{
ModeOfArrivalCollection result = new ModeOfArrivalCollection();
result.add(AMBULANCE);
return result;
}
public static ModeOfArrival[] getNegativeInstances()
{
ModeOfArrival[] instances = new ModeOfArrival[1];
instances[0] = AMBULANCE;
return instances;
}
public static String[] getNegativeInstanceNames()
{
String[] negativeInstances = new String[1];
negativeInstances[0] = "AMBULANCE";
return negativeInstances;
}
public static ModeOfArrival getNegativeInstance(String name)
{
if(name == null)
return null;
String[] negativeInstances = getNegativeInstanceNames();
for (int i = 0; i < negativeInstances.length; i++)
{
if(negativeInstances[i].equals(name))
return getNegativeInstances()[i];
}
return null;
}
public static ModeOfArrival getNegativeInstance(Integer id)
{
if(id == null)
return null;
ModeOfArrival[] negativeInstances = getNegativeInstances();
for (int i = 0; i < negativeInstances.length; i++)
{
if(negativeInstances[i].getID() == id)
return negativeInstances[i];
}
return null;
}
public int getTypeId()
{
return TYPE_ID;
}
public static final int TYPE_ID = 1291009;
public static final ModeOfArrival AMBULANCE = new ModeOfArrival(-2348, "Ambulance", true, null, null, Color.Default);
}
| agpl-3.0 |
open-health-hub/openMAXIMS | openmaxims_workspace/Nursing/src/ims/nursing/forms/enterevaluationdateforcareplandialog/Logic.java | 3711 | //#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
// This code was generated by Florin Blindu using IMS Development Environment (version 1.80 build 4471.18200)
// Copyright (C) 1995-2012 IMS MAXIMS. All rights reserved.
package ims.nursing.forms.enterevaluationdateforcareplandialog;
import ims.framework.enumerations.DialogResult;
import ims.framework.exceptions.PresentationLogicException;
import ims.framework.utils.Date;
import ims.nursing.vo.NextEvaluationDateTimeVo;
import java.util.ArrayList;
public class Logic extends BaseLogic
{
private static final long serialVersionUID = 1L;
@Override
protected void onBtnOkClick() throws ims.framework.exceptions.PresentationLogicException
{
String[] uiErrors = getUIErrors();
if (uiErrors != null)
{
engine.showErrors(uiErrors);
return;
}
NextEvaluationDateTimeVo newNextEvaluationDateTime = new NextEvaluationDateTimeVo();
newNextEvaluationDateTime.setNextEvaluationDate(form.dteNextEvaluationDate().getValue());
form.getGlobalContext().Nursing.setNewNextEvaluationDateEntered(newNextEvaluationDateTime);
engine.close(DialogResult.OK);
}
private String[] getUIErrors()
{
ArrayList<String> errors = new ArrayList<String>();
if (form.dteNextEvaluationDate().getValue() == null)
{
errors.add("Next Evaluation Date is mandatory!");
}
else
{
if (form.dteNextEvaluationDate().getValue().isLessThan(new Date()))
{
errors.add("The Next Evaluation Date cannot be in the past!");
}
}
if( ims.configuration.gen.ConfigFlag.DOM.CAREPLAN_NEXT_EVALUTION_DATE_VALIDATION.getValue() != 0 )
{
Date dateT = new Date();
dateT.addDay(ims.configuration.gen.ConfigFlag.DOM.CAREPLAN_NEXT_EVALUTION_DATE_VALIDATION.getValue());
if( form.dteNextEvaluationDate().getValue() != null && form.dteNextEvaluationDate().getValue().isGreaterThan(dateT))
{
errors.add("Next Evaluation Date exceeds " +ims.configuration.gen.ConfigFlag.DOM.CAREPLAN_NEXT_EVALUTION_DATE_VALIDATION.getValue()+" day policy!" ); //WDEV-15275
}
}
if (errors.size() > 0)
{
String[] UIErrors = new String[errors.size()];
errors.toArray(UIErrors);
return UIErrors;
}
return null;
}
@Override
protected void onBtnCancelClick() throws PresentationLogicException
{
engine.close(DialogResult.CANCEL);
}
}
| agpl-3.0 |
open-health-hub/openMAXIMS | openmaxims_workspace/RefMan/src/ims/RefMan/forms/booktheatre/Logic.java | 59401 | // This code was generated by Daniel Laffan using IMS Development Environment (version 1.70 build 3397.19184)
// Copyright (C) 1995-2009 IMS MAXIMS plc. All rights reserved.
package ims.RefMan.forms.booktheatre;
import ims.RefMan.forms.booktheatre.GenForm.grdSelectedRow;
import ims.RefMan.forms.booktheatre.GenForm.grdTheatreSessionRow;
import ims.RefMan.vo.FitForSurgeryAssessmentLiteVo;
import ims.RefMan.vo.ReferralBookingVo;
import ims.RefMan.vo.SuitableForSurgeryAssessmentMinVo;
import ims.configuration.AppRight;
import ims.configuration.gen.ConfigFlag;
import ims.core.vo.LocationLiteVo;
import ims.core.vo.LocationLiteVoCollection;
import ims.core.vo.ProcedureLiteVo;
import ims.core.vo.ServiceLiteVo;
import ims.core.vo.ServiceLiteVoCollection;
import ims.core.vo.enums.MosType;
import ims.domain.exceptions.DomainInterfaceException;
import ims.domain.exceptions.StaleObjectException;
import ims.framework.FormName;
import ims.framework.MessageButtons;
import ims.framework.controls.TreeNode;
import ims.framework.enumerations.DialogResult;
import ims.framework.enumerations.FormMode;
import ims.framework.exceptions.CodingRuntimeException;
import ims.framework.exceptions.PresentationLogicException;
import ims.framework.utils.Color;
import ims.framework.utils.Date;
import ims.framework.utils.Time;
import ims.icp.vo.PatientICPFullVo;
import ims.ocrr.vo.OrderInvestigationBookingVo;
import ims.scheduling.vo.Appointment_StatusVo;
import ims.scheduling.vo.Appointment_StatusVoCollection;
import ims.scheduling.vo.BookingAppointmentLiteVo;
import ims.scheduling.vo.BookingAppointmentTheatreVo;
import ims.scheduling.vo.BookingAppointmentTheatreVoCollection;
import ims.scheduling.vo.Booking_AppointmentRefVo;
import ims.scheduling.vo.Booking_AppointmentRefVoCollection;
import ims.scheduling.vo.Sch_BookingTheatreVo;
import ims.scheduling.vo.Sch_SessionRefVo;
import ims.scheduling.vo.SessionTheatreTCISlotLiteVo;
import ims.scheduling.vo.SessionTheatreVo;
import ims.scheduling.vo.SessionTheatreVoCollection;
import ims.scheduling.vo.Session_ListOwnerVo;
import ims.scheduling.vo.TheatreBookingLiteVo;
import ims.scheduling.vo.TheatreProcedureLiteVo;
import ims.scheduling.vo.lookups.LookupHelper;
import ims.scheduling.vo.lookups.Status_Reason;
import ims.scheduling.vo.lookups.TCITime;
import ims.scheduling.vo.lookups.TCITimeCollection;
import ims.scheduling.vo.lookups.TheatreType;
import ims.vo.interfaces.IGenericItem;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class Logic extends BaseLogic
{
private static final long serialVersionUID = 1L;
private static final int CURRENT_APPT_NODE = 1;
private static final int REBOOK_APPT_NODE = 2;
private static final int INV_NODE = 3;
private static final int OTHER_NODE = 4;
//WDEV-11761 private static final int[][] colors = new int[][]{{0,153,0},{0,192,0},{0,255,0},{157,255,60},{128,255,128},{174,255,174},
//WDEV-11761 {205,255,155},{255,255,220},{255,255,183},{255,255,128},{255,255,0},
//WDEV-11761 {209,209,0},{255,128,0},{243,87,20},{255,128,128},{255,89,89},{230,48,48},{255,0,0}};//WDEV-11713
@Override
protected void onFormOpen(Object[] args) throws ims.framework.exceptions.PresentationLogicException
{
initialise();
open();
}
private void initialise()
{
//WDEV-9645
if(engine.isDialog())
{
form.setMode(FormMode.VIEW);
}
//WDEV-10556
form.ccConsultant().initialize(MosType.HCP);
clearBookingCalendar();
form.getContextMenus().RefMan.getBookAppointmentREBOOK_APPTItem().setVisible(false);
form.bookingCalendar().setEnabled(false);
form.bookingCalendar().setChecksReadOnly(true);
form.bookingCalendar().setCurrentMonth(new Date());
loadServices();
loadLocations();
//----wdev-9744--------------
form.getLocalContext().setApptStatus(null);
//---------------------------
// WDEV-12080 - Get latest FitForSurgeryAssessment
FitForSurgeryAssessmentLiteVo fitForSurgery = domain.getLatestFitForSurgeryAssessment(form.getGlobalContext().RefMan.getCatsReferral());
form.getLocalContext().setAnaestheticType(fitForSurgery != null ? fitForSurgery.getAnaestheticType() : null);
}
private void clearBookingCalendar()
{
form.bookingCalendar().clearBookedChecks();
form.bookingCalendar().clearChecks();
form.bookingCalendar().clearPercentages();
form.bookingCalendar().clearNoSessionDates();
form.bookingCalendar().clearNoSlotDates();
form.bookingCalendar().clearBackColors();
Date dateStart = new Date(form.bookingCalendar().getCurrentMonth().getYear(), form.bookingCalendar().getCurrentMonth().getMonth(), 1);
Date dateEnd = new Date(form.bookingCalendar().getCurrentMonth().getYear(), form.bookingCalendar().getCurrentMonth().getMonth(), form.bookingCalendar().getCurrentMonth().getNumberOfDaysInTheMonth());
while (dateStart.isLessOrEqualThan(dateEnd))
{
form.bookingCalendar().addNoSessionDates(new Date(dateStart));
dateStart.addDay(1);
}
}
private void loadServices()
{
form.cmbService().clear();
ServiceLiteVoCollection voCollServices = domain.listActiveCanBeScheduledService();
if (voCollServices != null)
{
for (int i = 0; i < voCollServices.size(); i++)
form.cmbService().newRow(voCollServices.get(i), voCollServices.get(i).getServiceName());
}
}
private void loadLocations()
{
form.cmbLocation().clear();
LocationLiteVoCollection voCollLoc = domain.listLocationLite();
if (voCollLoc != null)
{
for (LocationLiteVo item : voCollLoc)
form.cmbLocation().newRow(item, item.getName());
}
}
private void open()
{
clearSearchCriteria();
loadReferralDetails();
form.bookingCalendar().setEnabled(false);
form.bookingCalendar().setCurrentMonth(new Date());
form.getLocalContext().setSch_Booking(new Sch_BookingTheatreVo());
clearBookingCalendar();
form.btnBook().setEnabled(false);
form.btnCancel().setEnabled(false);
form.cmbService().setEnabled(true);
form.qmbProcedure().setEnabled(true);
form.cmbTheatreType().setEnabled(true);
form.ccConsultant().setEnabled(true);
form.imbClear().setEnabled(true);
form.intRequiredProcTime().setEnabled(false);
repopulateScreen();
}
private void clearSearchCriteria()
{
form.qmbProcedure().setValue(null);
form.cmbService().setValue(null);
form.ccConsultant().setValue(null);
form.cmbLocation().setValue(null);
form.cmbTheatreType().setValue(null);
form.intRequiredProcTime().setValue(null);
form.chkChangeTimeRequired().setValue(false);
}
private void loadReferralDetails()
{
ReferralBookingVo voReferral = domain.getReferralDetail(form.getGlobalContext().RefMan.getCatsReferral());
populateReferralDetailsControlsFromData(voReferral);
}
private void populateReferralDetailsControlsFromData(ReferralBookingVo voReferral)
{
clearReferralDetails();
if(voReferral == null)
return;
//WDEV-8144
form.getLocalContext().setReferralCareContext(voReferral.getCareContext());
if(voReferral.getReferralLetterDetailIsNotNull())
{
form.dteReferral().setValue(voReferral.getReferralLetterDetail().getDateOfReferral());
form.dteReferralLetter().setValue(voReferral.getReferralLetterDetail().getDateReferralReceived());
if(voReferral.getReferralLetterDetail().getServiceIsNotNull())
{
form.cmbReferralService().newRow(voReferral.getReferralLetterDetail().getService(), voReferral.getReferralLetterDetail().getService().getServiceName());
form.cmbReferralService().setValue(voReferral.getReferralLetterDetail().getService());
}
form.lblEWKPI().setValue(voReferral.getReferralLetterDetail().getDaysToRTTBreachDateIsNotNull()?"End "+voReferral.getReferralLetterDetail().getDaysToRTTBreachDate()/7+" week KPI:":"End week KPI:");//WDEV-11713
form.dteEndDayKPI().setValue(voReferral.getReferralLetterDetail().getEndXdayKPI());//WDEV-11713
form.getLocalContext().setKPIDays(voReferral.getReferralLetterDetail().getKPIDays());
}
form.getLocalContext().setTriageDateTime(voReferral.getTriageDateTime());
populateInvApptTree(voReferral);
//WDEV-8144
if(voReferral.getIsFitForSurgeryIsNotNull() && voReferral.getIsFitForSurgery())
{
if(voReferral.getCareContextIsNotNull())
{
SuitableForSurgeryAssessmentMinVo voSfSA = domain.getSuitableForSurgeryAssessmentByCareContext(voReferral.getCareContext());
if(voSfSA != null)
{
if(voSfSA.getProcedureIsNotNull() && voSfSA.getProcedure().getProcedureIsNotNull())
{
form.qmbProcedure().newRow(voSfSA.getProcedure().getProcedure(), voSfSA.getProcedure().getProcedure().getProcedureName());
form.qmbProcedure().setValue(voSfSA.getProcedure().getProcedure());
qmbProcedureValueChanged(); //WDEV-8144
//WDEV-11777 time required from suitable for surgery needs to be displayed
if(voSfSA.getRequiredDurationInMinsIsNotNull())
{
form.intRequiredProcTime().setValue(voSfSA.getRequiredDurationInMins());
form.getLocalContext().setSuitableForSurgeryDuration(voSfSA.getRequiredDurationInMins());
}
}
// WDEV-11981 - default here with the value - ignore authoring HCP
form.ccConsultant().setValue(voSfSA.getNamedSurgeon());
if(voSfSA.getTheatreTypeIsNotNull())
form.cmbTheatreType().setValue(voSfSA.getTheatreType());
}
}
}
//WDEV-8295
if(voReferral.getReferralLetterDetailIsNotNull())
form.cmbService().setValue(voReferral.getReferralLetterDetail().getService());
}
private void populateInvApptTree(ReferralBookingVo voReferral)
{
if(voReferral.getCurrentAppointmentsIsNotNull() && voReferral.getCurrentAppointments().size() > 0)
{
TreeNode apptNode = form.treInvAppts().getNodes().add(CURRENT_APPT_NODE, "Current Appointments");
apptNode.setCollapsedImage(form.getImages().Core.ClosedFolder);
apptNode.setCollapsedImage(form.getImages().Core.OpenFolder);
for(BookingAppointmentLiteVo voAppt : voReferral.getCurrentAppointments())
{
TreeNode node = apptNode.getNodes().add(voAppt, voAppt.toString());
node.setCollapsedImage(form.getImages().Core.Date);
node.setExpandedImage(form.getImages().Core.Date);
}
apptNode.setExpanded(true);
}
if(voReferral.getAppointmentsRequiringRebookIsNotNull() && voReferral.getAppointmentsRequiringRebook().size() > 0)
{
TreeNode rebookNode = form.treInvAppts().getNodes().add(REBOOK_APPT_NODE, "Appointments Requiring a Rebook");
rebookNode.setCollapsedImage(form.getImages().Core.ClosedFolder);
rebookNode.setCollapsedImage(form.getImages().Core.OpenFolder);
for(BookingAppointmentLiteVo voAppt : voReferral.getAppointmentsRequiringRebook())
{
TreeNode node = rebookNode.getNodes().add(voAppt, voAppt.toString());
node.setCollapsedImage(form.getImages().Core.Date);
node.setExpandedImage(form.getImages().Core.Date);
}
rebookNode.setExpanded(true);
}
if(voReferral.getOrdersRequiringApptIsNotNull() && voReferral.getOrdersRequiringAppt().size() > 0)
{
TreeNode invNode = form.treInvAppts().getNodes().add(INV_NODE, "Investigations requiring an Appointment");
invNode.setCollapsedImage(form.getImages().Core.ClosedFolder);
invNode.setCollapsedImage(form.getImages().Core.OpenFolder);
for(OrderInvestigationBookingVo voOrderInv : voReferral.getOrdersRequiringAppt())
{
TreeNode node = invNode.getNodes().add(voOrderInv, voOrderInv.toString());
node.setCollapsedImage(form.getImages().OCRR.Investigation);
node.setExpandedImage(form.getImages().OCRR.Investigation);
}
invNode.setExpanded(true);
}
if(voReferral.getOtherAppointmentsIsNotNull() && voReferral.getOtherAppointments().size() > 0)
{
TreeNode clinNode = form.treInvAppts().getNodes().add(OTHER_NODE, "Clinical Appointments");
clinNode.setCollapsedImage(form.getImages().Core.ClosedFolder);
clinNode.setCollapsedImage(form.getImages().Core.OpenFolder);
for(BookingAppointmentLiteVo voAppt : voReferral.getOtherAppointments())
{
TreeNode node = clinNode.getNodes().add(voAppt, voAppt.toString());
node.setCollapsedImage(form.getImages().Core.Date);
node.setExpandedImage(form.getImages().Core.Date);
node.setEnabled(false);
}
clinNode.setExpanded(true);
}
}
private void clearReferralDetails()
{
form.dteReferral().setValue(null);
form.dteReferralLetter().setValue(null);
form.dteEndDayKPI().setValue(null);
form.cmbReferralService().clear();
form.treInvAppts().clear();
form.getLocalContext().setReferralCareContext(null);
}
@Override
protected void onImbClearClick() throws ims.framework.exceptions.PresentationLogicException
{
clearSearchCriteria();
clearScreen(true);
form.bookingCalendar().setCurrentMonth(new Date());
form.getLocalContext().setCurrentDate(null);
clearBookingCalendar();
}
@Override
protected void onImbSearchClick() throws ims.framework.exceptions.PresentationLogicException
{
String[] errors = validateSearchCriteria();
if(errors == null)
loadSessions();
else
engine.showErrors(errors);
}
private String[] validateSearchCriteria()
{
List<String> errors = new ArrayList<String>();
if(form.qmbProcedure().getValue() == null)
errors.add("Procedure is mandatory");
if(form.cmbTheatreType().getValue() == null)
errors.add("Type is mandatory");
if(form.chkChangeTimeRequired().getValue())
{
if(form.intRequiredProcTime().getValue() == null)
errors.add("(Change Time Required to:) mins is mandatory");
}
return errors.size() > 0 ? errors.toArray(new String[0]) : null;
}
private void loadSessions()
{
clearBookingCalendar();
form.bookingCalendar().clearNoSessionDates();
listSessions();
clearScreen(true);
initialiseCalendar();
form.bookingCalendar().setEnabled(true);
}
private void listSessions()
{
Date currentDate = domain.getCurrentMonth(form.cmbService().getValue(), form.cmbLocation().getValue(), form.ccConsultant().getValue(), form.qmbProcedure().getValue(), form.cmbTheatreType().getValue());
if (currentDate != null)
{
if (!form.getLocalContext().getCurrentDateIsNotNull())
{
form.bookingCalendar().setCurrentMonth(currentDate);
form.getLocalContext().setCurrentDate(currentDate);
}
}
Date dateStart = new Date(form.bookingCalendar().getCurrentMonth().getYear(), form.bookingCalendar().getCurrentMonth().getMonth(), 1);
Date dateEnd = new Date(form.bookingCalendar().getCurrentMonth().getYear(), form.bookingCalendar().getCurrentMonth().getMonth(), form.bookingCalendar().getCurrentMonth().getNumberOfDaysInTheMonth());
SessionTheatreVoCollection voCollSessionShort = domain.listSession(dateStart, dateEnd, form.cmbService().getValue(), form.cmbLocation().getValue(), form.ccConsultant().getValue(), form.qmbProcedure().getValue(), form.cmbTheatreType().getValue(), form.intRequiredProcTime().getValue(), engine.hasRight(AppRight.CAN_OVERBOOK_THEATRE_APPTS));
form.getLocalContext().setSessions(voCollSessionShort);
}
private void initialiseCalendar()
{
SessionTheatreVoCollection voCollSessionShort = form.getLocalContext().getSessions();
if (voCollSessionShort == null)
return;
// go through the sessions for this month and where there are no
// sessions for a given calendar date
// call addNoSessionDates which effectively disableds that date on the
// calendar
Date dateStart = new Date(form.bookingCalendar().getCurrentMonth().getYear(), form.bookingCalendar().getCurrentMonth().getMonth(), 1);
Date dateEnd = new Date(form.bookingCalendar().getCurrentMonth().getYear(), form.bookingCalendar().getCurrentMonth().getMonth(), form.bookingCalendar().getCurrentMonth().getNumberOfDaysInTheMonth());
ArrayList<SessionTheatreVo> sessionList = new ArrayList<SessionTheatreVo>();
for (int i = 0; i < voCollSessionShort.size(); i++)
{
sessionList.add(i, voCollSessionShort.get(i));
}
int count = 0;
Date dateSession = dateStart;
while (dateSession.isLessOrEqualThan(dateEnd))
{
SessionTheatreVo voSessionShort = null;
boolean bFound = false;
for (int i = 0; i < sessionList.size(); i++)
{
voSessionShort = (SessionTheatreVo) sessionList.get(i);
if (voSessionShort.getSessionDate().equals(dateSession))
{
bFound = true;
form.bookingCalendar().addPercentage(dateSession, calculatePercentage(dateSession));
}
}
if (!bFound)
form.bookingCalendar().addNoSessionDates(new Date(dateSession));
else
{
count++;
}
dateSession.addDay(1);
}
addColoursForKPIDate();
}
private int calculatePercentage(Date dateSession)
{
SessionTheatreVoCollection voCollSessionShort = form.getLocalContext().getSessions();
SessionTheatreVoCollection voCollSelectedSessions = new SessionTheatreVoCollection();
// go through Sessions Collection get matching ones for this day
for (int i = 0; i < voCollSessionShort.size(); i++)
{
if (voCollSessionShort.get(i).getSessionDate().equals(dateSession))
voCollSelectedSessions.add(voCollSessionShort.get(i));
}
int totalMins = 0, remainingMins = 0;
for (int i = 0; i < voCollSelectedSessions.size(); i++)
{
totalMins += calculateDuration( voCollSelectedSessions.get(i).getStartTm(), voCollSelectedSessions.get(i).getEndTm());
if(voCollSelectedSessions.get(i).getTheatreProceduresRemainingIsNotNull())
remainingMins += voCollSelectedSessions.get(i).getTheatreProceduresRemaining().getRemainingTimeInMins();
}
int bookedmins = totalMins - remainingMins;
return (int) (bookedmins * 100.0f / totalMins);
}
private int calculateDuration(Time startTm, Time endTm)
{
if(startTm == null || endTm == null)
return 0;
int fromValue = (startTm.getHour() * 60) + startTm.getMinute();
int toValue = (endTm.getHour() * 60) + endTm.getMinute();
return toValue - fromValue;
}
private void addColoursForKPIDate()
{
//WDEV-8017 - 65 days
Date[] arrDates = form.getLocalContext().getKPIDays();
if(arrDates == null || arrDates.length == 0)//WDEV-11713
return;
//WDEV-11713 Start
//Get no of weeks
/*WDEV-11761
* int noOfWeeks = (int) Math.ceil((float)arrDates.length/5);
int colorInterval = 18;
if (noOfWeeks > 1)
colorInterval = (int) Math.floor(18/(noOfWeeks-1));
if (colorInterval <= 0)
colorInterval = 1;
for (int i = 0 ; i < arrDates.length ; i++)
{
int week = (int) Math.floor(i/5);
int colorIndex = week*colorInterval;
if (colorIndex > 17 || week == noOfWeeks-1)
{
colorIndex = 17;
}
form.bookingCalendar().setBackColor(arrDates[i], Color.fromRGB(colors[colorIndex][0],colors[colorIndex][1],colors[colorIndex][2]));
}*/
//WDEV-11761 - Start
int noOfColors = (int) Math.ceil((float)arrDates.length/5);
float colorGap = 510;
float realValue = 0;
if (noOfColors > 2)
colorGap = colorGap/(noOfColors-1);
if (colorGap<1)
colorGap=1;
int[] startColor = new int[]{0,255,0};
int[] middleColor = new int[]{255,255,0};
int[] endColor = new int[]{255,0,0};
int[] currColor = startColor;
int weekForColor = 0;
for (int i = 0 ; i < arrDates.length ; i++)
{
int colorNo = (int) Math.floor(i/5);
if (colorNo == 0)
{
//First color
form.bookingCalendar().setBackColor(arrDates[i], Color.fromRGB(startColor[0],startColor[1],startColor[2]));
continue;
}
if (colorNo+1 >= noOfColors)
{
//Red - Last color
form.bookingCalendar().setBackColor(arrDates[i], Color.fromRGB(endColor[0],endColor[1],endColor[2]));
continue;
}
//should show yellow color?
if ((noOfColors%2==1) && (colorNo == noOfColors/2))
{
form.bookingCalendar().setBackColor(arrDates[i], Color.fromRGB(middleColor[0],middleColor[1],middleColor[2]));
currColor = middleColor;
realValue = 255;
continue;
}
//Calculate color
if (weekForColor == colorNo)
{
form.bookingCalendar().setBackColor(arrDates[i], Color.fromRGB(currColor[0],currColor[1],currColor[2]));
}
else
{
//Calculate next color
float rest = colorGap;
if (currColor[0] < 255)
{
rest = realValue + colorGap - 255;
realValue +=colorGap;
currColor[0] = (int) realValue;
if (rest <0)
rest = 0;
if (currColor[0] > 255)
{
realValue = 255;
currColor[0] = 255;
}
}
if (currColor[0] == 255)
{
realValue = realValue - rest;
currColor[1] = (int)realValue;
if (currColor[1] < 0)
currColor[1]=0;
}
weekForColor = colorNo;
form.bookingCalendar().setBackColor(arrDates[i], Color.fromRGB(currColor[0],currColor[1],currColor[2]));
}
}
//WDEV-11761 - End
//WDEV-11713 Stop
/*WDEV-11713
* if(arrDates.length == 65)
{
//every 5 days (week) change the colour
for(int i=0;i<arrDates.length; i++)
{
if(i < 5)
{
//Week 1
form.bookingCalendar().setBackColor(arrDates[i], Color.fromRGB(0, 153, 0));
}
else if(i > 4 && i < 10)
{
//Week 2
form.bookingCalendar().setBackColor(arrDates[i], Color.fromRGB(0, 255, 0));
}
else if(i > 9 && i < 15)
{
//Week 3
form.bookingCalendar().setBackColor(arrDates[i], Color.fromRGB(157, 255, 60));
}
else if(i > 14 && i < 20)
{
//Week 4
form.bookingCalendar().setBackColor(arrDates[i], Color.fromRGB(128, 255, 128));
}
else if(i > 19 && i < 25)
{
//Week 5
form.bookingCalendar().setBackColor(arrDates[i], Color.fromRGB(174, 255, 174));
}
else if(i > 24 && i < 30)
{
//Week 6
form.bookingCalendar().setBackColor(arrDates[i], Color.fromRGB(255, 255, 183));
}
else if(i > 29 && i < 35)
{
//Week 7
form.bookingCalendar().setBackColor(arrDates[i], Color.fromRGB(255, 255, 128));
}
else if(i > 34 && i < 40)
{
//Week 8
form.bookingCalendar().setBackColor(arrDates[i], Color.fromRGB(255, 255, 0));
}
else if(i > 39 && i < 45)
{
//Week 9
form.bookingCalendar().setBackColor(arrDates[i], Color.fromRGB(255, 128, 0));
}
else if(i > 44 && i < 50)
{
//Week 10
form.bookingCalendar().setBackColor(arrDates[i], Color.fromRGB(243, 87, 20));
}
else if(i > 49 && i < 55)
{
//Week 11
form.bookingCalendar().setBackColor(arrDates[i], Color.fromRGB(255, 128, 128));
}
else if(i > 54 && i < 60)
{
//Week 12
form.bookingCalendar().setBackColor(arrDates[i], Color.fromRGB(255, 89, 89));
}
else if(i > 59 && i < 65)
{
//Week 13
form.bookingCalendar().setBackColor(arrDates[i], Color.fromRGB(255, 0, 0));
}
}
}*/
}
private void clearScreen(boolean leaveInstantiatedData)
{
form.grdTheatreSession().getRows().clear();
if(!leaveInstantiatedData)
{
form.grdSelected().getRows().clear();
form.txtBookingComments().setValue(null);
}
form.getContextMenus().RefMan.getBookAppointmentAddSlotADD_SLOTItem().setVisible(false);
}
@Override
protected void onBtnBookClick() throws ims.framework.exceptions.PresentationLogicException
{
if(!kpiValidateNeeded())
if(!apptInThePastValidateNeeded())
doBooking();
}
private boolean apptInThePastValidateNeeded()
{
boolean isValidationNeeded = false;
if(ConfigFlag.UI.DISPLAY_WARNING_MESSAGE_IF_BOOKING_IN_PAST.getValue())
{
if(isApptInthePast(form.getLocalContext().getSelectedAppointments()))
{
form.getLocalContext().setApptInPastMessageBoxId(engine.showMessage("The selected date of the appointment is in the past. Do you wish to continue to book the appointment", "Booking Historical Appointment?", MessageButtons.YESNO));
isValidationNeeded = true;
}
}
return isValidationNeeded;
}
private boolean isApptInthePast(BookingAppointmentTheatreVoCollection selectedAppointments)
{
if(selectedAppointments != null)
{
for(BookingAppointmentTheatreVo voAppt : selectedAppointments)
{
if(voAppt.getID_Booking_Appointment() == null || voAppt.getRequiresRebookIsNotNull())
if(voAppt.getAppointmentDateIsNotNull() && voAppt.getAppointmentDate().isLessThan(new Date()))
return true;
}
}
return false;
}
/**
*
*/
private boolean kpiValidateNeeded()
{
boolean isValidationNeeded = false;
//WDEV-9539
if(form.dteEndDayKPI().getValue() != null)
{
if(form.getLocalContext().getSelectedAppointmentsIsNotNull())
{
if(form.getLocalContext().getSelectedAppointments().size() == 1)
{
BookingAppointmentTheatreVo voAppt = form.getLocalContext().getSelectedAppointments().get(0);
if(voAppt.getAppointmentDateIsNotNull() && voAppt.getAppointmentDate().isGreaterThan(form.dteEndDayKPI().getValue()))
{
form.getLocalContext().setKPIMessageBoxId(engine.showMessage("Appointment Date is outside the End 13 week KPI date,\nDo you want to continue?", "Warning", MessageButtons.YESNO));
isValidationNeeded = true;
}
}
}
}
return isValidationNeeded;
}
/**
*
*/
private void doBooking()
{
Sch_BookingTheatreVo voBooking = form.getLocalContext().getSch_Booking();
if(voBooking == null)
engine.showMessage("Nothing to Book");
voBooking.setPatient(form.getLocalContext().getPatient());
voBooking.setBookingComments(form.txtBookingComments().getValue());
voBooking.setAppointments(getSelectedValuesFromSelectedGrid(form.getLocalContext().getSelectedAppointments()));
//WDEV-10182
Boolean isRebook = form.getLocalContext().getIsRebookApptSelectedIsNotNull() && form.getLocalContext().getIsRebookApptSelected();
if (voBooking.getAppointmentsIsNotNull())
{
for (int i = 0; i < voBooking.getAppointments().size(); i++)
{
voBooking.getAppointments().get(i).setApptStatus(Status_Reason.BOOKED);
voBooking.getAppointments().get(i).setPatient(form.getGlobalContext().Core.getPatientShort());
if((form.chkChangeTimeRequired().getValue() || form.getLocalContext().getSuitableForSurgeryDurationIsNotNull()) && form.intRequiredProcTime().getValue() != null) //WDEV-11928
voBooking.getAppointments().get(i).setCustomProcedureDuration(form.intRequiredProcTime().getValue());
//if we are rebooking now set the requires rebooking to false for the appt
if(form.getLocalContext().getIsRebookApptSelectedIsNotNull() && form.getLocalContext().getIsRebookApptSelected())
{
if(form.getGlobalContext().Scheduling.getTheatreAppointmentIsNotNull())
{
if(voBooking.getAppointments().get(i).equals(form.getGlobalContext().Scheduling.getTheatreAppointment()))
voBooking.getAppointments().get(i).setRequiresRebook(false);
}
}
}
}
else
return;
//------------------
String[] arrErodErrors = null;
for(int i=0;i<form.grdSelected().getRows().size();i++)
{
grdSelectedRow row = form.grdSelected().getRows().get(i);
BookingAppointmentTheatreVo appt = row.getValue();
//WDEV-11777 - setting the appointment for the slot
if(row.getColComeInTime().getValue() != null)
{
if(row.getColComeInTime().getValue() instanceof SessionTheatreTCISlotLiteVo)
((SessionTheatreTCISlotLiteVo)row.getColComeInTime().getValue()).setAppointment(appt);
else if(row.getColComeInTime().getValue() instanceof TCITime)
appt.getTheatreBooking().setTCITime((TCITime) row.getColComeInTime().getValue());
}
if(appt != null )
{
arrErodErrors = updateAppointmentsWithErodDate(voBooking.getAppointments(), row.getcolErodDate(),row.getcolErod());
if(arrErodErrors != null)
break;
}
}
if (arrErodErrors != null)
{
engine.showErrors(arrErodErrors) ;
return;
}
//------------------
//wdev-13765
TheatreType theatreTreatm = null;
if(form.getGlobalContext().Scheduling.getTheatreSessionIsNotNull())
{
SessionTheatreVo tempVO = domain.getSessionTheatre(form.getGlobalContext().Scheduling.getTheatreSession());
if(tempVO != null)
{
theatreTreatm = tempVO.getTheatreType();
}
}
//---------
try
{
SessionTheatreTCISlotLiteVo slot = null;
if(form.grdSelected().getRows().get(0).getColComeInTime().getValue() instanceof SessionTheatreTCISlotLiteVo)
slot = (SessionTheatreTCISlotLiteVo)form.grdSelected().getRows().get(0).getColComeInTime().getValue();
voBooking = domain.saveTheatreBooking(voBooking, form.getGlobalContext().RefMan.getCatsReferral(), slot,theatreTreatm); //WDEV-12918 - rebook param not used anyway //wdev-13765
}
catch (StaleObjectException e)
{
engine.showMessage(ConfigFlag.UI.STALE_OBJECT_MESSAGE.getValue());
clearScreen(false);
open();
form.setMode(FormMode.VIEW);
return;
}
catch (DomainInterfaceException e)
{
engine.showMessage(e.getMessage());
return;
}
if(!saveICP(voBooking))
return;
clearScreen(false);
clearContexts();
open();
form.setMode(FormMode.VIEW);
//WDEV-8295
form.getGlobalContext().Scheduling.setAppointmentIds(getApptIds(voBooking));
//WDEV-7528
form.getGlobalContext().Scheduling.setTheatreSession(voBooking.getAppointments().get(0).getSession());
engine.open(form.getForms().Scheduling.SortTheatreAppointments);
}
//----------------------------wdev-9744
private String[] updateAppointmentsWithErodDate(BookingAppointmentTheatreVoCollection appointments, Date erodDate,Boolean bEROD)
{
ArrayList<String> errors = new ArrayList<String>();
if(appointments == null )
return null;
if(erodDate == null && bEROD == true)
errors.add("'Offered Date' is mandatory when EROD is checked");
else
{
for(BookingAppointmentTheatreVo voAppt : appointments)
{
//if(voAppt.getSessionSlotIsNotNull() && voAppt.getSessionSlot().equals(sessionSlot))
if(erodDate == null || erodDate.isGreaterOrEqualThan(new Date()))
{
voAppt.setEarliestOfferedDate(erodDate);
updateHistoryStatus(voAppt);
}
else
{
errors.add("'Offered Date' must be greater than or equal to Today");
break;
}
}
}
return (String[]) (errors.size() > 0 ? errors.toArray(new String[0]) : null);
}
private void updateHistoryStatus(BookingAppointmentTheatreVo appoitment)
{
if(appoitment == null)
return;
Appointment_StatusVoCollection tempCol = form.getLocalContext().getApptStatus();
for(Appointment_StatusVo statusVo : appoitment.getApptStatusHistory())
{
for(Appointment_StatusVo statusVo1: tempCol)
{
if(statusVo.equals(statusVo1))
statusVo.setEarliestOfferedDate(appoitment.getEarliestOfferedDate());
}
}
}
//---------------------
public java.util.ArrayList getApptIds(Sch_BookingTheatreVo voBooking)
{
if(voBooking.getAppointments() == null || voBooking.getAppointments().size() == 0)
return null;
java.util.ArrayList ids = new java.util.ArrayList();
for(int i=0;i<voBooking.getAppointments().size();i++)
ids.add(voBooking.getAppointments().get(i).getID_Booking_Appointment());
return ids;
}
private boolean saveICP(Sch_BookingTheatreVo voBooking)
{
//if no CareContext we cant saveICP
if(form.getLocalContext().getReferralCareContext() == null)
return false;
form.getLocalContext().setSelectedAppointments(voBooking.getAppointments());
PatientICPFullVo voICP = domain.getPatientICP(form.getLocalContext().getReferralCareContext());
if (voICP != null)
{
Booking_AppointmentRefVoCollection voApptsColl = voICP.getAppointments();
if (voApptsColl == null)
voApptsColl = new Booking_AppointmentRefVoCollection();
BookingAppointmentTheatreVoCollection voCollBookAppointments = form.getLocalContext().getSelectedAppointments();
for (int i = 0; i < voCollBookAppointments.size(); i++)
voApptsColl.add(voCollBookAppointments.get(i));
voICP.setAppointments(voApptsColl);
String[] strErr = voICP.validate();
if (strErr != null)
{
engine.showErrors(strErr);
return false;
}
try
{
voICP = domain.savePatientICP(voICP);
}
catch (DomainInterfaceException e)
{
engine.showMessage(e.getMessage());
return false;
}
catch (StaleObjectException e)
{
engine.showMessage(ConfigFlag.UI.STALE_OBJECT_MESSAGE.getValue());
clearScreen(false);
open();
form.setMode(FormMode.VIEW);
return false;
}
}
return true;
}
private BookingAppointmentTheatreVoCollection getSelectedValuesFromSelectedGrid(BookingAppointmentTheatreVoCollection selectedAppointments)
{
List<String> errors = new ArrayList<String>();
if(selectedAppointments != null)
{
for(BookingAppointmentTheatreVo voTheatreAppt : selectedAppointments)
{
for(int i=0;i<form.grdSelected().getRows().size();i++)
{
grdSelectedRow row = form.grdSelected().getRows().get(i);
if(row.getValue().equals(voTheatreAppt))
{
if(row.getColComeInTime().getValue() == null)
errors.add("'Come in Time' is mandatory");
else
{
if(row.getColComeInTime().getValue() instanceof SessionTheatreTCISlotLiteVo)
{
voTheatreAppt.setApptStartTime(((SessionTheatreTCISlotLiteVo)row.getColComeInTime().getValue()).getToComeInTime());
//WDEV-11887
if(voTheatreAppt.getCurrentStatusRecordIsNotNull())
voTheatreAppt.getCurrentStatusRecord().setApptTime(voTheatreAppt.getApptStartTime());
}
}
if(row.getColConsultant().getValue() != null)
voTheatreAppt.getTheatreBooking().setConsultant(((Session_ListOwnerVo) row.getColConsultant().getValue()).getHcp());
}
if(row.getcolErod() && row.getcolErodDate() == null)
errors.add("'Offered Date' is mandatory");
}
}
}
if(errors.size() > 0)
{
engine.showErrors("Validation Errors", errors.toArray(new String[0]));
return null;
}
return selectedAppointments;
}
@Override
protected void onBtnCancelClick() throws ims.framework.exceptions.PresentationLogicException
{
clearContexts();
clearScreen(false);
open();
form.setMode(FormMode.VIEW);
}
private void clearContexts()
{
form.getLocalContext().setIsRebookApptSelected(null); //aciubotaru - WDEV-7860
form.getLocalContext().setSch_Booking(null);
form.getLocalContext().setSelectedAppointments(null);
form.getLocalContext().setSelectedSessions(null);
form.getLocalContext().setSuitableForSurgeryDuration(null); //WDEV-11928
form.getGlobalContext().Scheduling.setTheatreAppointment(null);
}
@Override
protected void onContextMenuItemClick(int menuItemID, ims.framework.Control sender) throws ims.framework.exceptions.PresentationLogicException
{
switch(menuItemID)
{
case GenForm.ContextMenus.RefManNamespace.BookAppointment.REBOOK_APPT:
if(form.treInvAppts().getValue() instanceof BookingAppointmentLiteVo)
{
//get the full appointment record to be rebooked and set the appropriate context to reinitialise the screen
BookingAppointmentTheatreVo voAppt = domain.getBookingAppointment((Booking_AppointmentRefVo)form.treInvAppts().getValue());
form.getGlobalContext().Scheduling.setTheatreAppointment(voAppt);
if(voAppt != null)
{
if(voAppt.getTheatreBookingIsNotNull())
{
form.getGlobalContext().Scheduling.setTheatreProcedure(voAppt.getTheatreBooking().getProcedure());
form.getGlobalContext().Scheduling.setTheatreConsultant(voAppt.getTheatreBooking().getConsultant());
}
if(voAppt.getSessionIsNotNull())
form.getGlobalContext().Scheduling.setTheatreType(voAppt.getSession().getTheatreType());
reInitialize();
}
}
break;
default:
}
}
private void reInitialize()
{
form.getContextMenus().RefMan.getBookAppointmentREBOOK_APPTItem().setVisible(false);
form.btnBook().setEnabled(false);
form.btnCancel().setEnabled(true);
form.bookingCalendar().setEnabled(false);
form.bookingCalendar().setCurrentMonth(new Date());
form.txtBookingComments().setValue(null);
clearSearchCriteria();
clearBookingCalendar();
repopulateScreen();
}
private void repopulateScreen()
{
if(form.getGlobalContext().Scheduling.getTheatreAppointment() == null)
return;
//SITE-508
form.getGlobalContext().Scheduling.setTheatreAppointment(domain.getBookingAppointment(form.getGlobalContext().Scheduling.getTheatreAppointment()));
//store the same booking record
Sch_BookingTheatreVo voBooking = domain.getSch_BookingByAppt(form.getGlobalContext().Scheduling.getTheatreAppointment());
form.getLocalContext().setSch_Booking(voBooking);
if(voBooking != null)
form.txtBookingComments().setValue(voBooking.getBookingComments());
form.cmbService().setValue(form.getGlobalContext().Scheduling.getTheatreService());
//service
if(form.cmbService().getValue() == null && form.getGlobalContext().Scheduling.getTheatreService() instanceof ServiceLiteVo)
{
form.cmbService().newRow(form.getGlobalContext().Scheduling.getTheatreService(), ((ServiceLiteVo)form.getGlobalContext().Scheduling.getTheatreService()).getServiceName());
form.cmbService().setValue(form.getGlobalContext().Scheduling.getTheatreService());
}
//procedure
if(form.getGlobalContext().Scheduling.getTheatreProcedureIsNotNull())
{
form.qmbProcedure().newRow(form.getGlobalContext().Scheduling.getTheatreProcedure(), form.getGlobalContext().Scheduling.getTheatreProcedure().getProcedureName());
form.qmbProcedure().setValue(form.getGlobalContext().Scheduling.getTheatreProcedure());
qmbProcedureValueChanged();
}
//WDEV-9643 custom procedure length
if(form.getGlobalContext().Scheduling.getTheatreAppointmentIsNotNull())
{
if(form.getGlobalContext().Scheduling.getTheatreAppointment().getCustomProcedureDurationIsNotNull())
{
form.chkChangeTimeRequired().setValue(true);
form.intRequiredProcTime().setEnabled(true);
form.intRequiredProcTime().setValue(form.getGlobalContext().Scheduling.getTheatreAppointment().getCustomProcedureDuration());
}
}
//consultant
if(form.getGlobalContext().Scheduling.getTheatreConsultantIsNotNull())
form.ccConsultant().setValue(form.getGlobalContext().Scheduling.getTheatreConsultant());
//Theatre Type
if(form.getGlobalContext().Scheduling.getTheatreTypeIsNotNull())
form.cmbTheatreType().setValue(form.getGlobalContext().Scheduling.getTheatreType());
if(validateSearchCriteria() == null)
{
loadSessions();
//set a flag to say that the appt to be rebooked hasnt yet been selected
form.getLocalContext().setIsRebookApptSelected(false);
form.setMode(FormMode.EDIT);
form.bookingCalendar().setSelectedDay(new Date());
addTheatreSessions(new Date());
form.btnCancel().setEnabled(true);
form.cmbService().setEnabled(false);
form.qmbProcedure().setEnabled(false);
form.cmbTheatreType().setEnabled(false);
form.ccConsultant().setEnabled(false);
form.imbClear().setEnabled(false);
}
}
@Override
protected void onBookingCalendarDateSelected(Date date) throws PresentationLogicException
{
addTheatreSessions(date);
}
private void addTheatreSessions(Date selDate)
{
// go through sessions getting ones for the selected date
SessionTheatreVoCollection voCollSessionShort = form.getLocalContext().getSessions();
if (voCollSessionShort == null)
return;
SessionTheatreVoCollection voCollSelectedSessions = new SessionTheatreVoCollection();
for (int i = 0; i < voCollSessionShort.size(); i++)
{
if (voCollSessionShort.get(i).getSessionDate().equals(selDate))
voCollSelectedSessions.add(voCollSessionShort.get(i));
}
form.getLocalContext().setSelectedSessions(voCollSelectedSessions);
loadTheatreSlots();
}
private void loadTheatreSlots()
{
// WDEV-12080 - Get Anaesthetic type from latest FitForSurgery record
SessionTheatreVoCollection voCollSelectedSessions = form.getLocalContext().getSelectedSessions();
if (voCollSelectedSessions == null || voCollSelectedSessions.size() == 0)
return;
for(SessionTheatreVo voSession : voCollSelectedSessions)
{
boolean hasBookingRights = doesRoleHaveBookingRightsForSession(voSession);
if(voSession.getTheatreProceduresRemainingIsNotNull())
{
// Optimization - DO NOT put a domain call inside a for when it's not needed
String noBooked = domain.countNonCancelledAppointmentsForSession(voSession).toString();
for(TheatreProcedureLiteVo voTheatreProcedure : voSession.getTheatreProceduresRemaining().getProcedureDetails())
{
if(isTheatreSlotAlreadyAdded(voTheatreProcedure.getProcedure(), voSession) || !isProcedureSelectedInSearch(voTheatreProcedure))
continue;
grdTheatreSessionRow row = form.grdTheatreSession().getRows().newRow();
row.setColDate(voSession.getSessionDate());
row.setColConsultant(getConsultantsListString(voSession));
//WDEV-11777 add count of appts
row.setColNoBooked(noBooked);
row.setColTime(voSession.getTheatreProceduresRemainingIsNotNull() && voSession.getTheatreProceduresRemaining().getRemainingTimeInMinsIsNotNull() ? voSession.getTheatreProceduresRemaining().formatTimeRemaining() : "");
if(row.getColTime().equals("00hrs 00mins "))
row.setTextColor(Color.Red);
row.setColLocation(voSession.getSchLocationIsNotNull() ? voSession.getSchLocation().getName() : "");
row.setColSelectReadOnly(!hasBookingRights);
if(!hasBookingRights)
row.setTooltipForColSelect("Role does not have rights to book in this Session");
TheatreBookingLiteVo voBooking = new TheatreBookingLiteVo();
voBooking.setSession(voSession);
voBooking.setProcedure(voTheatreProcedure.getProcedure());
voBooking.setAnaestheticType(form.getLocalContext().getAnaestheticType()); // WDEV-12080 - Set anaesthetic type to Theatre Booking record
row.setValue(voBooking);
}
}
}
}
private boolean isProcedureSelectedInSearch(TheatreProcedureLiteVo voTheatreProcedure)
{
if (voTheatreProcedure == null)
throw new CodingRuntimeException("voTheatreProcedure is null in method isProcedureSelectedInSearch");
if(form.qmbProcedure().getValue() != null)
{
if(form.qmbProcedure().getValue().getIGenericItemInfoID() != null && voTheatreProcedure.getProcedureIsNotNull() && voTheatreProcedure.getProcedure().getID_ProcedureIsNotNull())
if(form.qmbProcedure().getValue().getIGenericItemInfoID().equals(voTheatreProcedure.getProcedure().getID_Procedure()))
return true;
}
return false;
}
private boolean isTheatreSlotAlreadyAdded(ProcedureLiteVo voProcedure, SessionTheatreVo voSession)
{
for(int i=0;i<form.grdTheatreSession().getRows().size();i++)
{
grdTheatreSessionRow row = form.grdTheatreSession().getRows().get(i);
if(row.getValue() != null)
{
if(row.getValue().getProcedureIsNotNull() && row.getValue().getSessionIsNotNull())
{
if(voProcedure != null && voSession != null)
{
if(row.getValue().getProcedure().equals(voProcedure) && row.getValue().getSession().equals(voSession))
return true;
}
}
}
}
return false;
}
private String getConsultantsListString(SessionTheatreVo voSession)
{
if (voSession == null || voSession.getListOwners() == null)
return "";
String strConsultant = "";
boolean loopAgain = false;
for(int i=0;i<voSession.getListOwners().size();i++)
{
Session_ListOwnerVo voCons = voSession.getListOwners().get(i);
if(voCons.getHcpIsNotNull())
strConsultant += voCons.getHcp().toString();
loopAgain = voSession.getListOwners().size() - i > 1;
if(loopAgain)
strConsultant += ",";
}
return strConsultant;
}
private Boolean doesRoleHaveBookingRightsForSession(Sch_SessionRefVo voSession)
{
Map map = form.getLocalContext().getSessionBookingRights();
if(map == null)
map = new HashMap();
Object entry = map.get(voSession.getID_Sch_Session());
if(entry == null)
{
map.put(voSession.getID_Sch_Session(), domain.hasBookingRights(engine.getLoggedInRole(), voSession));
entry = map.get(voSession.getID_Sch_Session());
}
form.getLocalContext().setSessionBookingRights(map);
return (Boolean) entry;
}
@Override
protected void onBookingCalendarMonthSelected(Date date) throws PresentationLogicException
{
if(validateSearchCriteria() == null)
{
clearBookingCalendar();
listSessions();
form.bookingCalendar().clearNoSessionDates();
initialiseCalendar();
}
}
@Override
protected void onGrdTheatreSessionGridCheckBoxClicked(int column, grdTheatreSessionRow row, boolean isChecked) throws PresentationLogicException
{
//WDEV-9539
boolean isErod = false;
Date erodDate = null;
if(form.grdSelected().getRows().size() > 0)
{
//hold onto this selection and add it into any new selection
isErod = form.grdSelected().getRows().get(0).getcolErod();
erodDate = form.grdSelected().getRows().get(0).getcolErodDate();
}
BookingAppointmentTheatreVoCollection voCollBookAppointments = form.getLocalContext().getSelectedAppointments();
if (voCollBookAppointments == null)
voCollBookAppointments = new BookingAppointmentTheatreVoCollection();
form.getGlobalContext().Scheduling.setTheatreSession(null);
if (row.getColSelect() == true)
{
form.getGlobalContext().Scheduling.setTheatreSession(row.getValue().getSession());
//overbooking
String message = allowBooking(row.getValue().getSession(), row.getValue());
if(message != null)
{
engine.showErrors(new String[]{message});
row.setColSelect(false);
loadAppointmentsGrid(voCollBookAppointments, isErod, erodDate);
return;
}
clearAllOtherSelections(row);
///////////////////////////////////////////
//rebook///////////////////////////////////
if(form.getLocalContext().getIsRebookApptSelectedIsNotNull() && !form.getLocalContext().getIsRebookApptSelected().booleanValue())
{
//rebook appt already selected
form.getLocalContext().setIsRebookApptSelected(true);
if(!form.getGlobalContext().Scheduling.getTheatreAppointmentIsNotNull())
throw new CodingRuntimeException("Rebook appointment not set in Global Context");
}
//end rebook/////////////////////////////////
/////////////////////////////////////////////
boolean bInListAlready = false;
if (row.getValue() instanceof TheatreBookingLiteVo)
{
//when rebooking only one appt can be booked this replaces the one in the list with the one selected in the slots grid
if(form.getLocalContext().getIsRebookApptSelectedIsNotNull() && form.getLocalContext().getIsRebookApptSelected().booleanValue())
{
voCollBookAppointments.clear();
voCollBookAppointments.add(createAppointment((BookingAppointmentTheatreVo) form.getGlobalContext().Scheduling.getTheatreAppointment().clone(), row.getValue()));
loadAppointmentsGrid(voCollBookAppointments,isErod,erodDate);
return;
}
for (int i = 0; i < voCollBookAppointments.size(); i++)
{
TheatreBookingLiteVo voTheatreBooking = voCollBookAppointments.get(i).getTheatreBooking();
if(voTheatreBooking != null && voTheatreBooking.getProcedureIsNotNull() && voTheatreBooking.getSessionIsNotNull())
{
if (voTheatreBooking.getProcedure().equals(row.getValue().getProcedure()) && voTheatreBooking.getSession().equals(row.getValue().getSession()))
bInListAlready = true;
}
}
if (!bInListAlready && voCollBookAppointments.size() == 0)
voCollBookAppointments.add(createAppointment(null, row.getValue()));
else
{
voCollBookAppointments.clear();
voCollBookAppointments.add(createAppointment(form.getGlobalContext().Scheduling.getTheatreAppointment(), row.getValue()));
loadAppointmentsGrid(voCollBookAppointments, isErod, erodDate);
}
}
}
else
{
if (row.getValue() instanceof TheatreBookingLiteVo)
{
//WDEV-10119,WDEV-10058,WDEV-9012
if(form.getLocalContext().getIsRebookApptSelectedIsNotNull())
form.getLocalContext().setIsRebookApptSelected(false);
for (int i = 0; i < voCollBookAppointments.size(); i++)
{
TheatreBookingLiteVo voTheatreBooking = voCollBookAppointments.get(i).getTheatreBooking();
if(voTheatreBooking != null && voTheatreBooking.getProcedureIsNotNull() && voTheatreBooking.getSessionIsNotNull())
{
if (voTheatreBooking.getProcedure().equals(row.getValue().getProcedure()) && voTheatreBooking.getSession().equals(row.getValue().getSession()))
voCollBookAppointments.remove(voCollBookAppointments.get(i));
}
}
}
}
loadAppointmentsGrid(voCollBookAppointments,isErod,erodDate);
if (isChecked)
{
form.setMode(FormMode.EDIT);
form.btnCancel().setEnabled(true);
}
}
/*
* check if limit(if any) has been reached for the procedure selected
*/
private String allowBooking(SessionTheatreVo session, TheatreBookingLiteVo voBooking)
{
//if we have an overbooking right we disregard all the checks
if(engine.hasRight(AppRight.CAN_OVERBOOK_THEATRE_APPTS))
return null;
if(session != null && voBooking != null)
{
if(session.getTheatreProceduresRemainingIsNotNull() && voBooking.getProcedureIsNotNull())
{
//if the procedure is limited and the number of procedures left is 0
for(TheatreProcedureLiteVo voTheatreProc : session.getTheatreProceduresRemaining().getProcedureDetails())
{
if(voTheatreProc.getProcedure().equals(voBooking.getProcedure()))
{
if(voTheatreProc.getIsLimitedIsNotNull() && voTheatreProc.getIsLimited() && voTheatreProc.getNumberOfProceduresLeftIsNotNull() && voTheatreProc.getNumberOfProceduresLeft() == 0)
return "Limit has been reached for this procedure.";
}
}
Integer customProcedureDuration = null;
if(form.chkChangeTimeRequired().getValue())
customProcedureDuration = form.intRequiredProcTime().getValue();
Integer durationToUseForValidation = null;
if(customProcedureDuration != null)
durationToUseForValidation = customProcedureDuration;
//WDEV-11977
else if(form.getLocalContext().getSuitableForSurgeryDurationIsNotNull())
durationToUseForValidation = form.getLocalContext().getSuitableForSurgeryDuration();
else
durationToUseForValidation = voBooking.getProcedure().getDurationInMins();
//if the time required for the procedure is not available at the TheatreSlot Level
if(session.getTheatreProceduresRemaining().getRemainingTimeInMinsIsNotNull() && durationToUseForValidation != null)
{
if(session.getTheatreProceduresRemaining().getRemainingTimeInMins() < durationToUseForValidation)
{
return "Not enough time remaining for this procedure.";
}
}
}
}
return null;
}
protected void onTreInvApptsTreeViewSelectionChanged(TreeNode node) throws PresentationLogicException
{
form.getContextMenus().RefMan.getBookAppointmentREBOOK_APPTItem().setVisible(false);
if(form.getMode().equals(FormMode.VIEW))
{
if(node != null)
{
if(node.getValue() instanceof BookingAppointmentLiteVo && node.getParent().equals(form.treInvAppts().getNodeByValue(REBOOK_APPT_NODE)))
form.getContextMenus().RefMan.getBookAppointmentREBOOK_APPTItem().setVisible(true);
}
}
}
private void clearAllOtherSelections(grdTheatreSessionRow row)
{
//clear the only other appt entry as only allow rebook one appt
for(int i=0;i<form.grdTheatreSession().getRows().size();i++)
{
grdTheatreSessionRow tRow = form.grdTheatreSession().getRows().get(i);
if(!tRow.isReadOnly() && !tRow.getValue().equals(row.getValue()) && tRow.getColSelect())
tRow.setColSelect(false);
}
}
private void loadAppointmentsGrid(BookingAppointmentTheatreVoCollection voCollBookAppointments, boolean isErod, Date erodDate)
{
form.getLocalContext().setSelectedAppointments(voCollBookAppointments);
form.grdSelected().getRows().clear();
grdSelectedRow aRow = null;
BookingAppointmentTheatreVo voBookAppt = null;
for (int i = 0; i < voCollBookAppointments.size(); i++)
{
aRow = form.grdSelected().getRows().newRow();
voBookAppt = voCollBookAppointments.get(i);
aRow.setColLocation(voBookAppt.getSessionIsNotNull() && voBookAppt.getSession().getSchLocationIsNotNull() ? voBookAppt.getSession().getSchLocation().getName() : null);
aRow.setColSession(voBookAppt.getSession().getName());
//WDEV-9539 - screen only ever allows one appt selection
aRow.setcolErod(voBookAppt.getEarliestOfferedDate() != null/*isErod*/);
aRow.setcolErodDate(voBookAppt.getEarliestOfferedDate()/*erodDate*/);
aRow.setcolErodDateReadOnly(voBookAppt.getEarliestOfferedDate() == null/*true*//*!isErod*/);
//for this session we need to get all the session theatre slots and display in the combo
if(voBookAppt.getSessionIsNotNull() && voBookAppt.getSession().getTheatreSlotsIsNotNull() && voBookAppt.getSession().getTheatreSlots().size() > 0)
{
voBookAppt.getSession().getTheatreSlots().sort();
for(SessionTheatreTCISlotLiteVo voSessTheatreSlot : voBookAppt.getSession().getTheatreSlots())
{
//only load slots that have no appointment already
if(voSessTheatreSlot.getAppointment() == null && (voSessTheatreSlot.getStatusIsNotNull() && voSessTheatreSlot.getStatus().equals(Status_Reason.SLOTOPENED)))
aRow.getColComeInTime().newRow(voSessTheatreSlot, voSessTheatreSlot.getToComeInTime().toString());
}
}
else
{
//load tci lookup (superceded implementation)
TCITimeCollection collTCI = LookupHelper.getTCITime(domain.getLookupService());
for(int p=0;p<collTCI.size();p++)
{
aRow.getColComeInTime().newRow(collTCI.get(p), collTCI.get(p).getText());
}
}
if(voBookAppt.getSession().getListOwnersIsNotNull())
{
for(Session_ListOwnerVo voListOwner : voBookAppt.getSession().getListOwners())
aRow.getColConsultant().newRow(voListOwner, voListOwner.getHcp().toString());
if(voBookAppt.getSession().getListOwners().size() == 1)
aRow.getColConsultant().setValue(voBookAppt.getSession().getListOwners().get(0));
}
if(voBookAppt.getTheatreBookingIsNotNull())
aRow.setColProcedure(voBookAppt.getTheatreBooking().getProcedure());
if(voBookAppt.getAppointmentDateIsNotNull())
aRow.setColDate(voBookAppt.getAppointmentDate().toString());
aRow.setValue(voBookAppt);
}
form.grdSelected().setReadOnly(false);
if (form.grdSelected().getRows().size() > 0)
form.btnBook().setEnabled(true);
else
form.btnBook().setEnabled(false);
}
private BookingAppointmentTheatreVo createAppointment(BookingAppointmentTheatreVo voAppt, TheatreBookingLiteVo value)
{
BookingAppointmentTheatreVo voBookAppt = voAppt;
if(voBookAppt == null)
voBookAppt = new BookingAppointmentTheatreVo();
voBookAppt.setSession(value.getSession());
voBookAppt.setAppointmentDate(value.getSession().getSessionDate());
voBookAppt.setApptStatus(Status_Reason.BOOKED);
voBookAppt.setTheatreBooking(value);
//WDEV-6049
Appointment_StatusVo voStatus = new Appointment_StatusVo();
voStatus.setApptDate(voBookAppt.getAppointmentDate());
voStatus.setApptTime(voBookAppt.getApptStartTime());
voStatus.setStatus(voBookAppt.getApptStatus());
//---wdev-9744
voStatus.setEarliestOfferedDate(voBookAppt.getEarliestOfferedDate());
//------
voBookAppt.setCurrentStatusRecord(voStatus);
if(voBookAppt.getApptStatusHistory() == null)
voBookAppt.setApptStatusHistory(new Appointment_StatusVoCollection());
voBookAppt.getApptStatusHistory().add(voStatus);
//---------------------------------------------wdev-9744
Appointment_StatusVoCollection appstatusCol;
if(form.getLocalContext().getApptStatus() == null)
{
form.getLocalContext().setApptStatus(new Appointment_StatusVoCollection());
appstatusCol = form.getLocalContext().getApptStatus();
}
else
appstatusCol = form.getLocalContext().getApptStatus();
appstatusCol.add(voStatus);
form.getLocalContext().setApptStatus(appstatusCol);
//-----------------------------------------------
return voBookAppt;
}
@Override
protected void onQmbProcedureTextSubmited(String value) throws PresentationLogicException
{
form.qmbProcedure().clear();
IGenericItem[] procedures = domain.listProcedures(value);
if(procedures != null)
{
for (int i = 0; i < procedures.length; i++)
{
if (procedures[i].getIGenericItemInfoName() != null)
form.qmbProcedure().newRow(procedures[i], procedures[i].getIGenericItemInfoName());
}
if(procedures.length == 1)
{
form.qmbProcedure().setValue(procedures[0]);
qmbProcedureValueChanged(); //WDEV-11777
}
else if(procedures.length > 1)
form.qmbProcedure().showOpened();
else
engine.showErrors(new String[]{"No matching records found"});
}
}
@Override
protected void onBtnCloseClick() throws PresentationLogicException
{
//WDEV-9634
clearContexts();
engine.close(DialogResult.OK);
}
@Override
protected void onFormModeChanged()
{
form.btnClose().setVisible(engine.isDialog() && !form.btnBook().isEnabled());
}
@Override
protected void onBtnViewTheatreListOrderClick() throws PresentationLogicException
{
form.getGlobalContext().Scheduling.setAppointmentIds(null);
engine.open(form.getForms().Scheduling.SortTheatreAppointments);
}
@Override
protected void onChkChangeTimeRequiredValueChanged() throws PresentationLogicException
{
if(form.chkChangeTimeRequired().getValue())
form.intRequiredProcTime().setEnabled(true);
else
form.intRequiredProcTime().setEnabled(false);
}
@Override
protected void onQmbProcedureValueChanged() throws PresentationLogicException
{
qmbProcedureValueChanged();
}
/**
*
*/
private void qmbProcedureValueChanged()
{
form.intRequiredProcTime().setValue(null);
if(form.qmbProcedure().getValue() == null)
return;
//WDEV-9539
if(form.qmbProcedure().getValue() instanceof ProcedureLiteVo)
form.intRequiredProcTime().setValue(((ProcedureLiteVo)form.qmbProcedure().getValue()).getDurationInMins());
}
@Override
protected void onMessageBoxClosed(int messageBoxId, DialogResult result) throws PresentationLogicException
{
if(form.getLocalContext().getKPIMessageBoxIdIsNotNull())
{
if(form.getLocalContext().getKPIMessageBoxId().equals(messageBoxId))
{
if(result.equals(DialogResult.YES))
{
if(!apptInThePastValidateNeeded())
doBooking();
}
}
}
if(form.getLocalContext().getApptInPastMessageBoxIdIsNotNull())
{
if(form.getLocalContext().getApptInPastMessageBoxId().equals(messageBoxId))
{
if(result.equals(DialogResult.YES))
{
doBooking();
}
}
}
}
@Override
protected void onGrdSelectedGridCheckBoxClicked(int column, grdSelectedRow row, boolean isChecked) throws PresentationLogicException
{
row.setcolErodDateReadOnly(!isChecked);
if(!isChecked)
row.setcolErodDate(null);
}
@Override
protected void onFormDialogClosed(FormName formName, DialogResult result) throws PresentationLogicException
{
//WDEV-11890
if(formName.equals(form.getForms().Scheduling.SortTheatreAppointments))
{
//dont care about dialog result refresh acreen anyway
loadReferralDetails();
}
}
}
| agpl-3.0 |
MatthiasMann/EnderIO | src/main/java/crazypants/enderio/conduit/render/DefaultConduitRenderer.java | 11668 | package crazypants.enderio.conduit.render;
import java.util.Collection;
import java.util.List;
import net.minecraft.client.renderer.RenderBlocks;
import net.minecraft.client.renderer.Tessellator;
import net.minecraft.util.IIcon;
import net.minecraftforge.common.util.ForgeDirection;
import com.enderio.core.client.render.BoundingBox;
import com.enderio.core.client.render.RenderUtil;
import com.enderio.core.common.vecmath.Vertex;
import crazypants.enderio.EnderIO;
import crazypants.enderio.conduit.ConnectionMode;
import crazypants.enderio.conduit.IConduit;
import crazypants.enderio.conduit.IConduitBundle;
import crazypants.enderio.conduit.geom.CollidableComponent;
import static com.enderio.core.client.render.CubeRenderer.*;
import static net.minecraftforge.common.util.ForgeDirection.*;
public class DefaultConduitRenderer implements ConduitRenderer {
protected float transmissionScaleFactor;
@Override
public boolean isRendererForConduit(IConduit conduit) {
return true;
}
@Override
public void renderEntity(ConduitBundleRenderer conduitBundleRenderer, IConduitBundle te, IConduit conduit, double x, double y, double z, float partialTick,
float worldLight, RenderBlocks rb) {
Collection<CollidableComponent> components = conduit.getCollidableComponents();
Tessellator tessellator = Tessellator.instance;
transmissionScaleFactor = conduit.getTransmitionGeometryScale();
IIcon tex;
if(!rb.hasOverrideBlockTexture()) {
for (CollidableComponent component : components) {
if(renderComponent(component)) {
float selfIllum = Math.max(worldLight, conduit.getSelfIlluminationForState(component));
if(isNSEWUD(component.dir) &&
conduit.getTransmitionTextureForState(component) != null) {
tessellator.setBrightness((int) (worldLight));
tex = conduit.getTransmitionTextureForState(component);
renderTransmission(conduit, tex, component, selfIllum);
}
tex = conduit.getTextureForState(component);
if(tex != null) {
tessellator.setBrightness((int) (worldLight));
renderConduit(tex, conduit, component, selfIllum);
}
}
}
}
}
@Override
public void renderDynamicEntity(ConduitBundleRenderer conduitBundleRenderer, IConduitBundle te, IConduit con, double x, double y, double z,
float partialTick, float worldLight) {
}
protected void renderConduit(IIcon tex, IConduit conduit, CollidableComponent component, float brightness) {
if(isNSEWUD(component.dir)) {
float scaleFactor = 0.75f;
float xLen = Math.abs(component.dir.offsetX) == 1 ? 1 : scaleFactor;
float yLen = Math.abs(component.dir.offsetY) == 1 ? 1 : scaleFactor;
float zLen = Math.abs(component.dir.offsetZ) == 1 ? 1 : scaleFactor;
BoundingBox cube = component.bound;
BoundingBox bb = cube.scale(xLen, yLen, zLen);
drawSection(bb, tex.getMinU(), tex.getMaxU(), tex.getMinV(), tex.getMaxV(), component.dir, false);
if(conduit.getConnectionMode(component.dir) == ConnectionMode.DISABLED) {
tex = EnderIO.blockConduitBundle.getConnectorIcon(component.data);
List<Vertex> corners = component.bound.getCornersWithUvForFace(component.dir, tex.getMinU(), tex.getMaxU(), tex.getMinV(), tex.getMaxV());
Tessellator tessellator = Tessellator.instance;
for (Vertex c : corners) {
addVecWithUV(c.xyz, c.uv.x, c.uv.y);
}
}
} else {
drawSection(component.bound, tex.getMinU(), tex.getMaxU(), tex.getMinV(), tex.getMaxV(), component.dir, true);
}
}
protected void renderTransmission(IConduit conduit, IIcon tex, CollidableComponent component, float selfIllum) {
// RoundedSegmentRenderer.renderSegment(component.dir, component.bound, tex.getMinU(), tex.getMaxU(), tex.getMinV(), tex.getMaxV(),
// conduit.getConectionMode(component.dir) == ConnectionMode.DISABLED);
float scaleFactor = 0.6f;
float xLen = Math.abs(component.dir.offsetX) == 1 ? 1 : scaleFactor;
float yLen = Math.abs(component.dir.offsetY) == 1 ? 1 : scaleFactor;
float zLen = Math.abs(component.dir.offsetZ) == 1 ? 1 : scaleFactor;
BoundingBox cube = component.bound;
BoundingBox bb = cube.scale(xLen, yLen, zLen);
drawSection(bb, tex.getMinU(), tex.getMaxU(), tex.getMinV(), tex.getMaxV(), component.dir, false);
}
protected boolean renderComponent(CollidableComponent component) {
return true;
}
protected boolean isNSEWUD(ForgeDirection dir) {
return dir == NORTH || dir == SOUTH || dir == EAST || dir == WEST || dir == UP || dir == DOWN;
}
protected void drawSection(BoundingBox bound, float minU, float maxU, float minV, float maxV, ForgeDirection dir, boolean isTransmission) {
Tessellator tessellator = Tessellator.instance;
if(isTransmission) {
setVerticesForTransmission(bound, dir);
} else {
setupVertices(bound);
}
if(dir == NORTH || dir == UP || dir == EAST) { // maintain consistent
// texture
// dir relative to the cneter
// of the conduit
float tmp = minU;
minU = maxU;
maxU = tmp;
}
boolean rotateSides = dir == UP || dir == DOWN;
boolean rotateTopBottom = dir == NORTH || dir == SOUTH;
float cm;
if(dir != NORTH && dir != SOUTH) {
tessellator.setNormal(0, 0, -1);
if(!isTransmission) {
cm = RenderUtil.getColorMultiplierForFace(ForgeDirection.NORTH);
tessellator.setColorOpaque_F(cm, cm, cm);
}
if(rotateSides) {
addVecWithUV(verts[1], maxU, maxV);
addVecWithUV(verts[0], maxU, minV);
addVecWithUV(verts[3], minU, minV);
addVecWithUV(verts[2], minU, maxV);
} else {
addVecWithUV(verts[1], minU, minV);
addVecWithUV(verts[0], maxU, minV);
addVecWithUV(verts[3], maxU, maxV);
addVecWithUV(verts[2], minU, maxV);
}
if(dir == WEST || dir == EAST) {
float tmp = minU;
minU = maxU;
maxU = tmp;
}
tessellator.setNormal(0, 0, 1);
if(!isTransmission) {
cm = RenderUtil.getColorMultiplierForFace(ForgeDirection.SOUTH);
tessellator.setColorOpaque_F(cm, cm, cm);
}
if(rotateSides) {
addVecWithUV(verts[4], maxU, maxV);
addVecWithUV(verts[5], maxU, minV);
addVecWithUV(verts[6], minU, minV);
addVecWithUV(verts[7], minU, maxV);
} else {
addVecWithUV(verts[4], minU, minV);
addVecWithUV(verts[5], maxU, minV);
addVecWithUV(verts[6], maxU, maxV);
addVecWithUV(verts[7], minU, maxV);
}
if(dir == WEST || dir == EAST) {
float tmp = minU;
minU = maxU;
maxU = tmp;
}
}
if(dir != UP && dir != DOWN) {
tessellator.setNormal(0, 1, 0);
if(!isTransmission) {
cm = RenderUtil.getColorMultiplierForFace(ForgeDirection.UP);
tessellator.setColorOpaque_F(cm, cm, cm);
}
if(rotateTopBottom) {
addVecWithUV(verts[6], maxU, maxV);
addVecWithUV(verts[2], minU, maxV);
addVecWithUV(verts[3], minU, minV);
addVecWithUV(verts[7], maxU, minV);
} else {
addVecWithUV(verts[6], minU, minV);
addVecWithUV(verts[2], minU, maxV);
addVecWithUV(verts[3], maxU, maxV);
addVecWithUV(verts[7], maxU, minV);
}
tessellator.setNormal(0, -1, 0);
if(!isTransmission) {
cm = RenderUtil.getColorMultiplierForFace(ForgeDirection.DOWN);
tessellator.setColorOpaque_F(cm, cm, cm);
}
if(rotateTopBottom) {
addVecWithUV(verts[0], minU, minV);
addVecWithUV(verts[1], minU, maxV);
addVecWithUV(verts[5], maxU, maxV);
addVecWithUV(verts[4], maxU, minV);
} else {
addVecWithUV(verts[0], maxU, maxV);
addVecWithUV(verts[1], minU, maxV);
addVecWithUV(verts[5], minU, minV);
addVecWithUV(verts[4], maxU, minV);
}
}
if(dir != EAST && dir != WEST) {
tessellator.setNormal(1, 0, 0);
if(!isTransmission) {
cm = RenderUtil.getColorMultiplierForFace(ForgeDirection.EAST);
tessellator.setColorOpaque_F(cm, cm, cm);
}
if(rotateSides) {
addVecWithUV(verts[2], minU, maxV);
addVecWithUV(verts[6], minU, minV);
addVecWithUV(verts[5], maxU, minV);
addVecWithUV(verts[1], maxU, maxV);
} else {
addVecWithUV(verts[2], minU, maxV);
addVecWithUV(verts[6], maxU, maxV);
addVecWithUV(verts[5], maxU, minV);
addVecWithUV(verts[1], minU, minV);
}
tessellator.setNormal(-1, 0, 0);
if(!isTransmission) {
cm = RenderUtil.getColorMultiplierForFace(ForgeDirection.WEST);
tessellator.setColorOpaque_F(cm, cm, cm);
}
if(rotateSides) {
addVecWithUV(verts[0], maxU, maxV);
addVecWithUV(verts[4], maxU, minV);
addVecWithUV(verts[7], minU, minV);
addVecWithUV(verts[3], minU, maxV);
} else {
addVecWithUV(verts[0], minU, minV);
addVecWithUV(verts[4], maxU, minV);
addVecWithUV(verts[7], maxU, maxV);
addVecWithUV(verts[3], minU, maxV);
}
}
tessellator.setColorOpaque_F(1, 1, 1);
}
protected void setVerticesForTransmission(BoundingBox bound, ForgeDirection dir) {
float xs = dir.offsetX == 0 ? transmissionScaleFactor : 1;
float ys = dir.offsetY == 0 ? transmissionScaleFactor : 1;
float zs = dir.offsetZ == 0 ? transmissionScaleFactor : 1;
setupVertices(bound.scale(xs, ys, zs));
}
// TODO: This is a really hacky, imprecise and slow way to do this
public BoundingBox[] toCubes(BoundingBox bb) {
// NB This on handles the really simple conduit case!
float width = bb.maxX - bb.minX;
float height = bb.maxY - bb.minY;
float depth = bb.maxZ - bb.minZ;
if(width > 0 && height > 0 && depth > 0) {
if(width / depth > 1.5f || depth / width > 1.5f) {
// split horizontally
if(width > depth) {
int numSplits = Math.round(width / depth);
float newWidth = width / numSplits;
BoundingBox[] result = new BoundingBox[numSplits];
float lastMax = bb.minX;
for (int i = 0; i < numSplits; i++) {
float max = lastMax + newWidth;
result[i] = new BoundingBox(lastMax, bb.minY, bb.minZ, max, bb.maxY, bb.maxZ);
lastMax = max;
}
return result;
} else {
int numSplits = Math.round(depth / width);
float newWidth = depth / numSplits;
BoundingBox[] result = new BoundingBox[numSplits];
float lastMax = bb.minZ;
for (int i = 0; i < numSplits; i++) {
float max = lastMax + newWidth;
result[i] = new BoundingBox(bb.minX, bb.minY, lastMax, bb.maxX, bb.maxY, max);
lastMax = max;
}
return result;
}
} else if(height / width > 1.5) {
int numSplits = Math.round(height / width);
float newWidth = height / numSplits;
BoundingBox[] result = new BoundingBox[numSplits];
float lastMax = bb.minY;
for (int i = 0; i < numSplits; i++) {
float max = lastMax + newWidth;
result[i] = new BoundingBox(bb.minX, lastMax, bb.minZ, bb.maxX, max, bb.maxZ);
lastMax = max;
}
return result;
}
}
return new BoundingBox[] { bb };
}
@Override
public boolean isDynamic() {
return false;
}
}
| unlicense |
Coneboy-k/incubator-rocketmq | test/src/test/java/org/apache/rocketmq/test/client/producer/querymsg/QueryMsgByIdExceptionIT.java | 2702 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.rocketmq.test.client.producer.querymsg;
import org.apache.log4j.Logger;
import org.apache.rocketmq.common.message.MessageExt;
import org.apache.rocketmq.test.base.BaseConf;
import org.apache.rocketmq.test.client.rmq.RMQNormalProducer;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import static com.google.common.truth.Truth.assertThat;
public class QueryMsgByIdExceptionIT extends BaseConf {
private static Logger logger = Logger.getLogger(QueryMsgByKeyIT.class);
private static RMQNormalProducer producer = null;
private static String topic = null;
@BeforeClass
public static void setUp() {
topic = initTopic();
logger.info(String.format("use topic: %s;", topic));
producer = getProducer(nsAddr, topic);
}
@AfterClass
public static void tearDown() {
shutDown();
}
@Test
public void testQueryMsgByErrorMsgId() {
producer.clearMsg();
int msgSize = 20;
String errorMsgId = "errorMsgId";
producer.send(msgSize);
Assert.assertEquals("Not all are sent", msgSize, producer.getAllUndupMsgBody().size());
MessageExt queryMsg = null;
try {
queryMsg = producer.getProducer().viewMessage(errorMsgId);
} catch (Exception e) {
}
assertThat(queryMsg).isNull();
}
@Test
public void testQueryMsgByNullMsgId() {
producer.clearMsg();
int msgSize = 20;
String errorMsgId = null;
producer.send(msgSize);
Assert.assertEquals("Not all are sent", msgSize, producer.getAllUndupMsgBody().size());
MessageExt queryMsg = null;
try {
queryMsg = producer.getProducer().viewMessage(errorMsgId);
} catch (Exception e) {
}
assertThat(queryMsg).isNull();
}
}
| apache-2.0 |
stoksey69/googleads-java-lib | modules/dfp_axis/src/main/java/com/google/api/ads/dfp/axis/v201411/UserTeamAssociationServiceInterface.java | 3299 | /**
* UserTeamAssociationServiceInterface.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.dfp.axis.v201411;
public interface UserTeamAssociationServiceInterface extends java.rmi.Remote {
/**
* Creates new {@link UserTeamAssociation} objects.
*
*
* @param userTeamAssociations the user team associations to create
*
* @return the created user team associations with their IDs filled in
*/
public com.google.api.ads.dfp.axis.v201411.UserTeamAssociation[] createUserTeamAssociations(com.google.api.ads.dfp.axis.v201411.UserTeamAssociation[] userTeamAssociations) throws java.rmi.RemoteException, com.google.api.ads.dfp.axis.v201411.ApiException;
/**
* Gets a {@link UserTeamAssociationPage} of {@link UserTeamAssociation}
* objects that satisfy the given {@link Statement#query}. The following
* fields are supported for filtering:
*
* <table>
* <tr>
* <th scope="col">PQL Property</th> <th scope="col">Object Property</th>
* </tr>
* <tr>
* <td>{@code userId}</td>
* <td>{@link UserTeamAssociation#userId}</td>
* </tr>
* <tr>
* <td>{@code teamId}</td>
* <td>{@link UserTeamAssociation#teamId}</td>
* </tr>
* </table>
*
*
* @param filterStatement a Publisher Query Language statement used to
* filter
* a set of user team associations
*
* @return the user team associations that match the given filter
*/
public com.google.api.ads.dfp.axis.v201411.UserTeamAssociationPage getUserTeamAssociationsByStatement(com.google.api.ads.dfp.axis.v201411.Statement filterStatement) throws java.rmi.RemoteException, com.google.api.ads.dfp.axis.v201411.ApiException;
/**
* Performs actions on {@link UserTeamAssociation} objects that
* match the
* given {@link Statement#query}.
*
*
* @param userTeamAssociationAction the action to perform
*
* @param filterStatement a Publisher Query Language statement used to
* filter
* a set of user team associations
*
* @return the result of the action performed
*/
public com.google.api.ads.dfp.axis.v201411.UpdateResult performUserTeamAssociationAction(com.google.api.ads.dfp.axis.v201411.UserTeamAssociationAction userTeamAssociationAction, com.google.api.ads.dfp.axis.v201411.Statement statement) throws java.rmi.RemoteException, com.google.api.ads.dfp.axis.v201411.ApiException;
/**
* Updates the specified {@link UserTeamAssociation} objects.
*
*
* @param userTeamAssociations the user team associations to update
*
* @return the updated user team associations
*/
public com.google.api.ads.dfp.axis.v201411.UserTeamAssociation[] updateUserTeamAssociations(com.google.api.ads.dfp.axis.v201411.UserTeamAssociation[] userTeamAssociations) throws java.rmi.RemoteException, com.google.api.ads.dfp.axis.v201411.ApiException;
}
| apache-2.0 |
altsoft/PlatypusJS | platypus-js-grid/src/test/java/com/bearsoft/gui/grid/GridVisualTest.java | 209 | /*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.bearsoft.gui.grid;
/**
*
* @author Gala
*/
public class GridVisualTest extends GridTest{
}
| apache-2.0 |
chetanmeh/jackrabbit-oak | oak-store-spi/src/main/java/org/apache/jackrabbit/oak/spi/commit/Observer.java | 3622 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.spi.commit;
import javax.annotation.Nonnull;
import org.apache.jackrabbit.oak.spi.state.NodeState;
/**
* Extension point for observing changes in an Oak repository. Observer
* implementations might use the observed content changes to update caches,
* trigger JCR-level observation events or otherwise process the changes.
* <p>
* An observer is informed about content changes by calling the
* {@link #contentChanged(NodeState, CommitInfo)} method. The frequency and
* granularity of these callbacks is not specified. However, each observer is
* always guaranteed to see a linear sequence of changes. In other words the
* method will not be called concurrently from multiple threads and successive
* calls represent a linear sequence of repository states, i.e. the root
* state passed to a call is guaranteed to represent a repository state
* that is not newer than the root state passed to the next call. The observer
* is expected to keep track of the previously observed state if it wants to
* use a content diff to determine what exactly changed between two states.
* <p>
* For local changes repository passes in a {@link CommitInfo} instance which
* was used as part of commit and make it available to observers along with the
* committed content changes. In such cases, i.e. when the commit info argument is
* non-{@code null}, the reported content change is guaranteed to contain
* <em>only</em> changes from that specific commit (and the applied commit
* hooks). Note that it is possible for a repository to report commit
* information for only some commits but not others.
* <p>
* For external changes repository would construct a {@link CommitInfo} instance
* which might include some metadata which can be used by observers. Such
* {@link CommitInfo} instances would <code>external</code> flag set to true
* <p>
* It should also be noted that two observers may not necessarily see the
* same sequence of content changes. It is also possible for an observer to
* be notified when no actual content changes have happened therefore passing
* the same root state to subsequent calls.
* <p>
* A specific implementation or deployment may offer more guarantees about
* when and how observers are notified of content changes. See the relevant
* documentation for more details about such cases.
*
* @since Oak 0.11
*/
public interface Observer {
/**
* Observes a content change. See the {@link Observer} class javadocs
* and relevant repository and observer registration details for more
* information on when and how this method gets called.
*
* @param root root state of the repository
* @param info commit information
*/
void contentChanged(@Nonnull NodeState root, @Nonnull CommitInfo info);
}
| apache-2.0 |
qobel/esoguproject | spring-framework/spring-core/src/main/java/org/springframework/util/MethodInvoker.java | 10777 | /*
* Copyright 2002-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.util;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
/**
* Helper class that allows for specifying a method to invoke in a declarative
* fashion, be it static or non-static.
*
* <p>Usage: Specify "targetClass"/"targetMethod" or "targetObject"/"targetMethod",
* optionally specify arguments, prepare the invoker. Afterwards, you may
* invoke the method any number of times, obtaining the invocation result.
*
* @author Colin Sampaleanu
* @author Juergen Hoeller
* @since 19.02.2004
* @see #prepare
* @see #invoke
*/
public class MethodInvoker {
private Class<?> targetClass;
private Object targetObject;
private String targetMethod;
private String staticMethod;
private Object[] arguments = new Object[0];
/** The method we will call */
private Method methodObject;
/**
* Set the target class on which to call the target method.
* Only necessary when the target method is static; else,
* a target object needs to be specified anyway.
* @see #setTargetObject
* @see #setTargetMethod
*/
public void setTargetClass(Class<?> targetClass) {
this.targetClass = targetClass;
}
/**
* Return the target class on which to call the target method.
*/
public Class<?> getTargetClass() {
return this.targetClass;
}
/**
* Set the target object on which to call the target method.
* Only necessary when the target method is not static;
* else, a target class is sufficient.
* @see #setTargetClass
* @see #setTargetMethod
*/
public void setTargetObject(Object targetObject) {
this.targetObject = targetObject;
if (targetObject != null) {
this.targetClass = targetObject.getClass();
}
}
/**
* Return the target object on which to call the target method.
*/
public Object getTargetObject() {
return this.targetObject;
}
/**
* Set the name of the method to be invoked.
* Refers to either a static method or a non-static method,
* depending on a target object being set.
* @see #setTargetClass
* @see #setTargetObject
*/
public void setTargetMethod(String targetMethod) {
this.targetMethod = targetMethod;
}
/**
* Return the name of the method to be invoked.
*/
public String getTargetMethod() {
return this.targetMethod;
}
/**
* Set a fully qualified static method name to invoke,
* e.g. "example.MyExampleClass.myExampleMethod".
* Convenient alternative to specifying targetClass and targetMethod.
* @see #setTargetClass
* @see #setTargetMethod
*/
public void setStaticMethod(String staticMethod) {
this.staticMethod = staticMethod;
}
/**
* Set arguments for the method invocation. If this property is not set,
* or the Object array is of length 0, a method with no arguments is assumed.
*/
public void setArguments(Object[] arguments) {
this.arguments = (arguments != null ? arguments : new Object[0]);
}
/**
* Return the arguments for the method invocation.
*/
public Object[] getArguments() {
return this.arguments;
}
/**
* Prepare the specified method.
* The method can be invoked any number of times afterwards.
* @see #getPreparedMethod
* @see #invoke
*/
public void prepare() throws ClassNotFoundException, NoSuchMethodException {
if (this.staticMethod != null) {
int lastDotIndex = this.staticMethod.lastIndexOf('.');
if (lastDotIndex == -1 || lastDotIndex == this.staticMethod.length()) {
throw new IllegalArgumentException(
"staticMethod must be a fully qualified class plus method name: " +
"e.g. 'example.MyExampleClass.myExampleMethod'");
}
String className = this.staticMethod.substring(0, lastDotIndex);
String methodName = this.staticMethod.substring(lastDotIndex + 1);
this.targetClass = resolveClassName(className);
this.targetMethod = methodName;
}
Class<?> targetClass = getTargetClass();
String targetMethod = getTargetMethod();
if (targetClass == null) {
throw new IllegalArgumentException("Either 'targetClass' or 'targetObject' is required");
}
if (targetMethod == null) {
throw new IllegalArgumentException("Property 'targetMethod' is required");
}
Object[] arguments = getArguments();
Class<?>[] argTypes = new Class<?>[arguments.length];
for (int i = 0; i < arguments.length; ++i) {
argTypes[i] = (arguments[i] != null ? arguments[i].getClass() : Object.class);
}
// Try to get the exact method first.
try {
this.methodObject = targetClass.getMethod(targetMethod, argTypes);
}
catch (NoSuchMethodException ex) {
// Just rethrow exception if we can't get any match.
this.methodObject = findMatchingMethod();
if (this.methodObject == null) {
throw ex;
}
}
}
/**
* Resolve the given class name into a Class.
* <p>The default implementations uses {@code ClassUtils.forName},
* using the thread context class loader.
* @param className the class name to resolve
* @return the resolved Class
* @throws ClassNotFoundException if the class name was invalid
*/
protected Class<?> resolveClassName(String className) throws ClassNotFoundException {
return ClassUtils.forName(className, ClassUtils.getDefaultClassLoader());
}
/**
* Find a matching method with the specified name for the specified arguments.
* @return a matching method, or {@code null} if none
* @see #getTargetClass()
* @see #getTargetMethod()
* @see #getArguments()
*/
protected Method findMatchingMethod() {
String targetMethod = getTargetMethod();
Object[] arguments = getArguments();
int argCount = arguments.length;
Method[] candidates = ReflectionUtils.getAllDeclaredMethods(getTargetClass());
int minTypeDiffWeight = Integer.MAX_VALUE;
Method matchingMethod = null;
for (Method candidate : candidates) {
if (candidate.getName().equals(targetMethod)) {
Class<?>[] paramTypes = candidate.getParameterTypes();
if (paramTypes.length == argCount) {
int typeDiffWeight = getTypeDifferenceWeight(paramTypes, arguments);
if (typeDiffWeight < minTypeDiffWeight) {
minTypeDiffWeight = typeDiffWeight;
matchingMethod = candidate;
}
}
}
}
return matchingMethod;
}
/**
* Return the prepared Method object that will be invoked.
* <p>Can for example be used to determine the return type.
* @return the prepared Method object (never {@code null})
* @throws IllegalStateException if the invoker hasn't been prepared yet
* @see #prepare
* @see #invoke
*/
public Method getPreparedMethod() throws IllegalStateException {
if (this.methodObject == null) {
throw new IllegalStateException("prepare() must be called prior to invoke() on MethodInvoker");
}
return this.methodObject;
}
/**
* Return whether this invoker has been prepared already,
* i.e. whether it allows access to {@link #getPreparedMethod()} already.
*/
public boolean isPrepared() {
return (this.methodObject != null);
}
/**
* Invoke the specified method.
* <p>The invoker needs to have been prepared before.
* @return the object (possibly null) returned by the method invocation,
* or {@code null} if the method has a void return type
* @throws InvocationTargetException if the target method threw an exception
* @throws IllegalAccessException if the target method couldn't be accessed
* @see #prepare
*/
public Object invoke() throws InvocationTargetException, IllegalAccessException {
// In the static case, target will simply be {@code null}.
Object targetObject = getTargetObject();
Method preparedMethod = getPreparedMethod();
if (targetObject == null && !Modifier.isStatic(preparedMethod.getModifiers())) {
throw new IllegalArgumentException("Target method must not be non-static without a target");
}
ReflectionUtils.makeAccessible(preparedMethod);
return preparedMethod.invoke(targetObject, getArguments());
}
/**
* Algorithm that judges the match between the declared parameter types of a candidate method
* and a specific list of arguments that this method is supposed to be invoked with.
* <p>Determines a weight that represents the class hierarchy difference between types and
* arguments. A direct match, i.e. type Integer -> arg of class Integer, does not increase
* the result - all direct matches means weight 0. A match between type Object and arg of
* class Integer would increase the weight by 2, due to the superclass 2 steps up in the
* hierarchy (i.e. Object) being the last one that still matches the required type Object.
* Type Number and class Integer would increase the weight by 1 accordingly, due to the
* superclass 1 step up the hierarchy (i.e. Number) still matching the required type Number.
* Therefore, with an arg of type Integer, a constructor (Integer) would be preferred to a
* constructor (Number) which would in turn be preferred to a constructor (Object).
* All argument weights get accumulated.
* <p>Note: This is the algorithm used by MethodInvoker itself and also the algorithm
* used for constructor and factory method selection in Spring's bean container (in case
* of lenient constructor resolution which is the default for regular bean definitions).
* @param paramTypes the parameter types to match
* @param args the arguments to match
* @return the accumulated weight for all arguments
*/
public static int getTypeDifferenceWeight(Class<?>[] paramTypes, Object[] args) {
int result = 0;
for (int i = 0; i < paramTypes.length; i++) {
if (!ClassUtils.isAssignableValue(paramTypes[i], args[i])) {
return Integer.MAX_VALUE;
}
if (args[i] != null) {
Class<?> paramType = paramTypes[i];
Class<?> superClass = args[i].getClass().getSuperclass();
while (superClass != null) {
if (paramType.equals(superClass)) {
result = result + 2;
superClass = null;
}
else if (ClassUtils.isAssignable(paramType, superClass)) {
result = result + 2;
superClass = superClass.getSuperclass();
}
else {
superClass = null;
}
}
if (paramType.isInterface()) {
result = result + 1;
}
}
}
return result;
}
}
| apache-2.0 |
tobiasge/cgeo | main/src/cgeo/geocaching/apps/cache/AbstractGeneralApp.java | 944 | package cgeo.geocaching.apps.cache;
import cgeo.geocaching.apps.AbstractApp;
import cgeo.geocaching.apps.navi.CacheNavigationApp;
import cgeo.geocaching.models.Geocache;
import android.content.Context;
import android.content.Intent;
import androidx.annotation.NonNull;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
abstract class AbstractGeneralApp extends AbstractApp implements CacheNavigationApp {
@SuppressFBWarnings("NP_METHOD_PARAMETER_TIGHTENS_ANNOTATION")
protected AbstractGeneralApp(@NonNull final String name, @NonNull final String packageName) {
super(name, null, packageName);
}
@Override
public void navigate(@NonNull final Context context, @NonNull final Geocache cache) {
final Intent intent = getLaunchIntent();
if (intent != null) {
intent.setFlags(Intent.FLAG_ACTIVITY_REORDER_TO_FRONT);
context.startActivity(intent);
}
}
}
| apache-2.0 |
ajju4455/jsonschema2pojo | jsonschema2pojo-core/src/main/java/org/jsonschema2pojo/GenerationConfig.java | 10088 | /**
* Copyright © 2010-2014 Nokia
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jsonschema2pojo;
import java.io.File;
import java.io.FileFilter;
import java.net.URL;
import java.util.Iterator;
import org.jsonschema2pojo.rules.RuleFactory;
/**
* Defines the configuration options for Java type generation, including source
* and target paths/packages and all behavioural options (e.g should builders be
* generated, should primitives be used, etc).
* <p>
* Devs: add to this interface if you need to introduce a new config property.
*/
public interface GenerationConfig {
/**
* Gets the 'generateBuilders' configuration option.
*
* @return Whether to generate builder-style methods of the form
* <code>withXxx(value)</code> (that return <code>this</code>),
* alongside the standard, void-return setters.
*/
boolean isGenerateBuilders();
/**
* Gets the 'usePrimitives' configuration option.
*
* @return whether to use primitives (<code>long</code>, <code>double</code>
* , <code>boolean</code>) instead of wrapper types where possible
* when generating bean properties (has the side-effect of making
* those properties non-null).
*/
boolean isUsePrimitives();
/**
* Gets the 'source' configuration option.
*
* @return The source file(s) or directory(ies) from which JSON Schema will
* be read.
*/
Iterator<URL> getSource();
/**
* Gets the 'targetDirectory' configuration option.
*
* @return The target directory into which generated types will be written
* (may or may not exist before types are written)
*/
File getTargetDirectory();
/**
* Gets the 'targetPackage' configuration option.
*
* @return The java package used for generated types.
*/
String getTargetPackage();
/**
* Gets the 'propertyWordDelimiters' configuration option.
*
* @return an array of characters that should act as word delimiters when
* choosing java bean property names.
*/
char[] getPropertyWordDelimiters();
/**
* Gets the 'useLongIntegers' configuration option.
*
* @return Whether to use the java type <code>long</code> (or
* {@link java.lang.Long}) instead of <code>int</code> (or
* {@link java.lang.Integer}) when representing the JSON Schema type
* 'integer'.
*/
boolean isUseLongIntegers();
/**
* Gets the 'useDoubleNumbers' configuration option.
*
* @return Whether to use the java type <code>double</code> (or
* {@link java.lang.Double}) instead of <code>float</code> (or
* {@link java.lang.Float}) when representing the JSON Schema type
* 'number'.
*/
boolean isUseDoubleNumbers();
/**
* Gets the 'includeHashcodeAndEquals' configuration option.
*
* @return Whether to use include <code>hashCode</code> and
* <code>equals</code> methods in generated Java types.
*/
boolean isIncludeHashcodeAndEquals();
/**
* Gets the 'includeToString' configuration option.
*
* @return Whether to use include a <code>toString</code> method in
* generated Java types.
*/
boolean isIncludeToString();
/**
* Gets the 'annotationStyle' configuration option.
*
* @return The style of annotations to use in the generated Java types.
* <p>
* Supported values:
* <ul>
* <li><code>jackson1</code> (apply annotations from the
* <a href="http://jackson.codehaus.org/">Jackson 1.x</a> library)
* </li>
* <li><code>jackson2</code> (apply annotations from the
* <a href="https://github.com/FasterXML/jackson-annotations">
* Jackson 2.x</a> library)</li>
* <li><code>gson</code> (apply annotations from the
* <a href="https://code.google.com/p/google-gson/">gson</a>
* library)</li>
* <li><code>none</code> (apply no annotations at all)</li>
* </ul>
* @see AnnotatorFactory
*/
AnnotationStyle getAnnotationStyle();
/**
* Gets the 'customAnnotator' configuration option.
*
* @return An annotator that will be used in addition to the one chosen by
* {@link #getAnnotationStyle()}
*/
Class<? extends Annotator> getCustomAnnotator();
/**
* Gets the 'customRuleFactory' configuration option.
*
* @return An Rule Factory that will be used for the creation of generation
* rules.
*/
Class<? extends RuleFactory> getCustomRuleFactory();
/**
* Gets the 'includeJsr303Annotations' configuration option.
*
* @return Whether to include
* <a href="http://jcp.org/en/jsr/detail?id=303">JSR-303</a>
* annotations (for schema rules like minimum, maximum, etc) in
* generated Java types.
*/
boolean isIncludeJsr303Annotations();
/**
* Gets the 'sourceType' configuration option.
*
* @return The type of input documents that will be read
* <p>
* Supported values:
* <ul>
* <li><code>jsonschema</code></li>
* <li><code>json</code></li>
* </ul>
*/
SourceType getSourceType();
/**
* Gets the 'removeOldOutput' configuration option.
*
* @return Whether to empty the target directory before generation occurs,
* to clear out all source files that have been generated
* previously. <strong>Be warned</strong>, when activated this
* option will cause jsonschema2pojo to <strong>indiscriminately
* delete the entire contents of the target directory (all files and
* folders)</strong> before it begins generating sources.
*/
boolean isRemoveOldOutput();
/**
* Gets the 'outputEncoding' configuration option.
*
* @return The character encoding that should be used when writing the
* generated Java source files.
*/
String getOutputEncoding();
/**
* Gets the 'useJodaDates' configuration option.
*
* @return Whether to use {@link org.joda.time.DateTime} instead of
* {@link java.util.Date} when adding date type fields to generated
* Java types.
*/
boolean isUseJodaDates();
/**
* Gets the 'useJodaLocalDates' configuration option.
*
* @return Whether to use {@link org.joda.time.LocalDate} instead of string
* when adding string type fields with a format of date (not
* date-time) to generated Java types.
*/
boolean isUseJodaLocalDates();
/**
* Gets the 'useJodaLocalTimes' configuration option.
*
* @return Whether to use {@link org.joda.time.LocalTime} instead of string
* when adding string type fields with a format of time (not
* date-time) to generated Java types.
*/
boolean isUseJodaLocalTimes();
/**
* Gets the 'useCommonsLang3' configuration option.
*
* @return Whether to use commons-lang 3.x imports instead of commons-lang
* 2.x imports when adding equals, hashCode and toString methods.
*/
boolean isUseCommonsLang3();
/**
* Gets the 'parcelable' configuration option.
*
* @return Whether to make the generated types 'parcelable' (for Android
* development)
*/
boolean isParcelable();
/**
* Gets the file filter used to isolate the schema mapping files in the
* source directories.
*
* @return the file filter use when scanning for schema files.
*/
FileFilter getFileFilter();
/**
* Gets the 'initializeCollections' configuration option.
*
* @return Whether to initialize collections with empty instance or null.
*/
boolean isInitializeCollections();
/**
* Gets the 'getClassNamePrefix' configuration option.
*
* @return Whether to initialize collections with empty instance or null.
*/
String getClassNamePrefix();
/**
* Gets the 'getClassNameSuffix' configuration option.
*
* @return Whether to initialize collections with empty instance or null.
*/
String getClassNameSuffix();
/**
* Gets the 'includeConstructors' configuration option
*
* @return Whether to generate constructors or not.
*/
boolean isIncludeConstructors();
/**
* Gets the 'constructorsRequiredPropertiesOnly' configuration option
*
* @return Whether generated constructors should have parameters for all
* properties, or only required ones.
*/
boolean isConstructorsRequiredPropertiesOnly();
/**
* Gets the 'includeAdditionalProperties' configuration option
*
* @return Whether to allow 'additional properties' support in objects.
* Setting this to false will disable additional properties support,
* regardless of the input schema(s).
*/
boolean isIncludeAdditionalProperties();
/**
* Gets the 'includeAccessors' configuration option
*
* @return Whether to include getters/setters or to omit these accessor
* methods and create public fields instead.
*/
boolean isIncludeAccessors();
}
| apache-2.0 |
kalimatas/elasticsearch | server/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java | 30476 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.Analyzer;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.TextFieldMapper;
import org.elasticsearch.indices.analysis.AnalysisModule;
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
import org.elasticsearch.indices.analysis.PreBuiltAnalyzers;
import java.io.Closeable;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import static java.util.Collections.unmodifiableMap;
/**
* An internal registry for tokenizer, token filter, char filter and analyzer.
* This class exists per node and allows to create per-index {@link IndexAnalyzers} via {@link #build(IndexSettings)}
*/
public final class AnalysisRegistry implements Closeable {
public static final String INDEX_ANALYSIS_CHAR_FILTER = "index.analysis.char_filter";
public static final String INDEX_ANALYSIS_FILTER = "index.analysis.filter";
public static final String INDEX_ANALYSIS_TOKENIZER = "index.analysis.tokenizer";
private final PrebuiltAnalysis prebuiltAnalysis;
private final Map<String, Analyzer> cachedAnalyzer = new ConcurrentHashMap<>();
private final Environment environment;
private final Map<String, AnalysisProvider<CharFilterFactory>> charFilters;
private final Map<String, AnalysisProvider<TokenFilterFactory>> tokenFilters;
private final Map<String, AnalysisProvider<TokenizerFactory>> tokenizers;
private final Map<String, AnalysisProvider<AnalyzerProvider<?>>> analyzers;
private final Map<String, AnalysisProvider<AnalyzerProvider<?>>> normalizers;
public AnalysisRegistry(Environment environment,
Map<String, AnalysisProvider<CharFilterFactory>> charFilters,
Map<String, AnalysisProvider<TokenFilterFactory>> tokenFilters,
Map<String, AnalysisProvider<TokenizerFactory>> tokenizers,
Map<String, AnalysisProvider<AnalyzerProvider<?>>> analyzers,
Map<String, AnalysisProvider<AnalyzerProvider<?>>> normalizers,
Map<String, PreConfiguredCharFilter> preConfiguredCharFilters,
Map<String, PreConfiguredTokenFilter> preConfiguredTokenFilters,
Map<String, PreConfiguredTokenizer> preConfiguredTokenizers) {
this.environment = environment;
this.charFilters = unmodifiableMap(charFilters);
this.tokenFilters = unmodifiableMap(tokenFilters);
this.tokenizers = unmodifiableMap(tokenizers);
this.analyzers = unmodifiableMap(analyzers);
this.normalizers = unmodifiableMap(normalizers);
prebuiltAnalysis = new PrebuiltAnalysis(preConfiguredCharFilters, preConfiguredTokenFilters, preConfiguredTokenizers);
}
/**
* Returns a {@link Settings} by groupName from {@link IndexSettings} or a default {@link Settings}
* @param indexSettings an index settings
* @param groupName tokenizer/token filter/char filter name
* @return {@link Settings}
*/
public static Settings getSettingsFromIndexSettings(IndexSettings indexSettings, String groupName) {
Settings settings = indexSettings.getSettings().getAsSettings(groupName);
if (settings.isEmpty()) {
settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, indexSettings.getIndexVersionCreated()).build();
}
return settings;
}
/**
* Returns a registered {@link TokenizerFactory} provider by name or <code>null</code> if the tokenizer was not registered
*/
public AnalysisModule.AnalysisProvider<TokenizerFactory> getTokenizerProvider(String tokenizer) {
return tokenizers.getOrDefault(tokenizer, this.prebuiltAnalysis.getTokenizerFactory(tokenizer));
}
/**
* Returns a registered {@link TokenFilterFactory} provider by name or <code>null</code> if the token filter was not registered
*/
public AnalysisModule.AnalysisProvider<TokenFilterFactory> getTokenFilterProvider(String tokenFilter) {
return tokenFilters.getOrDefault(tokenFilter, this.prebuiltAnalysis.getTokenFilterFactory(tokenFilter));
}
/**
* Returns a registered {@link CharFilterFactory} provider by name or <code>null</code> if the char filter was not registered
*/
public AnalysisModule.AnalysisProvider<CharFilterFactory> getCharFilterProvider(String charFilter) {
return charFilters.getOrDefault(charFilter, this.prebuiltAnalysis.getCharFilterFactory(charFilter));
}
/**
* Returns a registered {@link Analyzer} provider by name or <code>null</code> if the analyzer was not registered
*/
public Analyzer getAnalyzer(String analyzer) throws IOException {
AnalysisModule.AnalysisProvider<AnalyzerProvider<?>> analyzerProvider = this.prebuiltAnalysis.getAnalyzerProvider(analyzer);
if (analyzerProvider == null) {
AnalysisModule.AnalysisProvider<AnalyzerProvider<?>> provider = analyzers.get(analyzer);
return provider == null ? null : cachedAnalyzer.computeIfAbsent(analyzer, (key) -> {
try {
return provider.get(environment, key).get();
} catch (IOException ex) {
throw new ElasticsearchException("failed to load analyzer for name " + key, ex);
}}
);
}
return analyzerProvider.get(environment, analyzer).get();
}
@Override
public void close() throws IOException {
try {
prebuiltAnalysis.close();
} finally {
IOUtils.close(cachedAnalyzer.values());
}
}
/**
* Creates an index-level {@link IndexAnalyzers} from this registry using the given index settings
*/
public IndexAnalyzers build(IndexSettings indexSettings) throws IOException {
final Map<String, CharFilterFactory> charFilterFactories = buildCharFilterFactories(indexSettings);
final Map<String, TokenizerFactory> tokenizerFactories = buildTokenizerFactories(indexSettings);
final Map<String, TokenFilterFactory> tokenFilterFactories = buildTokenFilterFactories(indexSettings);
final Map<String, AnalyzerProvider<?>> analyzierFactories = buildAnalyzerFactories(indexSettings);
final Map<String, AnalyzerProvider<?>> normalizerFactories = buildNormalizerFactories(indexSettings);
return build(indexSettings, analyzierFactories, normalizerFactories, tokenizerFactories, charFilterFactories, tokenFilterFactories);
}
public Map<String, TokenFilterFactory> buildTokenFilterFactories(IndexSettings indexSettings) throws IOException {
final Map<String, Settings> tokenFiltersSettings = indexSettings.getSettings().getGroups(INDEX_ANALYSIS_FILTER);
Map<String, AnalysisModule.AnalysisProvider<TokenFilterFactory>> tokenFilters = new HashMap<>(this.tokenFilters);
/*
* synonym and synonym_graph are different than everything else since they need access to the tokenizer factories for the index.
* instead of building the infrastructure for plugins we rather make it a real exception to not pollute the general interface and
* hide internal data-structures as much as possible.
*/
tokenFilters.put("synonym", requiresAnalysisSettings((is, env, name, settings) -> new SynonymTokenFilterFactory(is, env, this, name, settings)));
tokenFilters.put("synonym_graph", requiresAnalysisSettings((is, env, name, settings) -> new SynonymGraphTokenFilterFactory(is, env, this, name, settings)));
return buildMapping(Component.FILTER, indexSettings, tokenFiltersSettings, Collections.unmodifiableMap(tokenFilters), prebuiltAnalysis.preConfiguredTokenFilters);
}
public Map<String, TokenizerFactory> buildTokenizerFactories(IndexSettings indexSettings) throws IOException {
final Map<String, Settings> tokenizersSettings = indexSettings.getSettings().getGroups(INDEX_ANALYSIS_TOKENIZER);
return buildMapping(Component.TOKENIZER, indexSettings, tokenizersSettings, tokenizers, prebuiltAnalysis.preConfiguredTokenizers);
}
public Map<String, CharFilterFactory> buildCharFilterFactories(IndexSettings indexSettings) throws IOException {
final Map<String, Settings> charFiltersSettings = indexSettings.getSettings().getGroups(INDEX_ANALYSIS_CHAR_FILTER);
return buildMapping(Component.CHAR_FILTER, indexSettings, charFiltersSettings, charFilters, prebuiltAnalysis.preConfiguredCharFilterFactories);
}
public Map<String, AnalyzerProvider<?>> buildAnalyzerFactories(IndexSettings indexSettings) throws IOException {
final Map<String, Settings> analyzersSettings = indexSettings.getSettings().getGroups("index.analysis.analyzer");
return buildMapping(Component.ANALYZER, indexSettings, analyzersSettings, analyzers, prebuiltAnalysis.analyzerProviderFactories);
}
public Map<String, AnalyzerProvider<?>> buildNormalizerFactories(IndexSettings indexSettings) throws IOException {
final Map<String, Settings> normalizersSettings = indexSettings.getSettings().getGroups("index.analysis.normalizer");
// TODO: Have pre-built normalizers
return buildMapping(Component.NORMALIZER, indexSettings, normalizersSettings, normalizers, Collections.emptyMap());
}
/**
* Returns a registered {@link TokenizerFactory} provider by {@link IndexSettings}
* or a registered {@link TokenizerFactory} provider by predefined name
* or <code>null</code> if the tokenizer was not registered
* @param tokenizer global or defined tokenizer name
* @param indexSettings an index settings
* @return {@link TokenizerFactory} provider or <code>null</code>
*/
public AnalysisProvider<TokenizerFactory> getTokenizerProvider(String tokenizer, IndexSettings indexSettings) {
final Map<String, Settings> tokenizerSettings = indexSettings.getSettings().getGroups("index.analysis.tokenizer");
if (tokenizerSettings.containsKey(tokenizer)) {
Settings currentSettings = tokenizerSettings.get(tokenizer);
return getAnalysisProvider(Component.TOKENIZER, tokenizers, tokenizer, currentSettings.get("type"));
} else {
return getTokenizerProvider(tokenizer);
}
}
/**
* Returns a registered {@link TokenFilterFactory} provider by {@link IndexSettings}
* or a registered {@link TokenFilterFactory} provider by predefined name
* or <code>null</code> if the tokenFilter was not registered
* @param tokenFilter global or defined tokenFilter name
* @param indexSettings an index settings
* @return {@link TokenFilterFactory} provider or <code>null</code>
*/
public AnalysisProvider<TokenFilterFactory> getTokenFilterProvider(String tokenFilter, IndexSettings indexSettings) {
final Map<String, Settings> tokenFilterSettings = indexSettings.getSettings().getGroups("index.analysis.filter");
if (tokenFilterSettings.containsKey(tokenFilter)) {
Settings currentSettings = tokenFilterSettings.get(tokenFilter);
String typeName = currentSettings.get("type");
/*
* synonym and synonym_graph are different than everything else since they need access to the tokenizer factories for the index.
* instead of building the infrastructure for plugins we rather make it a real exception to not pollute the general interface and
* hide internal data-structures as much as possible.
*/
if ("synonym".equals(typeName)) {
return requiresAnalysisSettings((is, env, name, settings) -> new SynonymTokenFilterFactory(is, env, this, name, settings));
} else if ("synonym_graph".equals(typeName)) {
return requiresAnalysisSettings((is, env, name, settings) -> new SynonymGraphTokenFilterFactory(is, env, this, name, settings));
} else {
return getAnalysisProvider(Component.FILTER, tokenFilters, tokenFilter, typeName);
}
} else {
return getTokenFilterProvider(tokenFilter);
}
}
/**
* Returns a registered {@link CharFilterFactory} provider by {@link IndexSettings}
* or a registered {@link CharFilterFactory} provider by predefined name
* or <code>null</code> if the charFilter was not registered
* @param charFilter global or defined charFilter name
* @param indexSettings an index settings
* @return {@link CharFilterFactory} provider or <code>null</code>
*/
public AnalysisProvider<CharFilterFactory> getCharFilterProvider(String charFilter, IndexSettings indexSettings) {
final Map<String, Settings> tokenFilterSettings = indexSettings.getSettings().getGroups("index.analysis.char_filter");
if (tokenFilterSettings.containsKey(charFilter)) {
Settings currentSettings = tokenFilterSettings.get(charFilter);
return getAnalysisProvider(Component.CHAR_FILTER, charFilters, charFilter, currentSettings.get("type"));
} else {
return getCharFilterProvider(charFilter);
}
}
private static <T> AnalysisModule.AnalysisProvider<T> requiresAnalysisSettings(AnalysisModule.AnalysisProvider<T> provider) {
return new AnalysisModule.AnalysisProvider<T>() {
@Override
public T get(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException {
return provider.get(indexSettings, environment, name, settings);
}
@Override
public boolean requiresAnalysisSettings() {
return true;
}
};
}
enum Component {
ANALYZER {
@Override
public String toString() {
return "analyzer";
}
},
NORMALIZER {
@Override
public String toString() {
return "normalizer";
}
},
CHAR_FILTER {
@Override
public String toString() {
return "char_filter";
}
},
TOKENIZER {
@Override
public String toString() {
return "tokenizer";
}
},
FILTER {
@Override
public String toString() {
return "filter";
}
};
}
@SuppressWarnings("unchecked")
private <T> Map<String, T> buildMapping(Component component, IndexSettings settings, Map<String, Settings> settingsMap,
Map<String, ? extends AnalysisModule.AnalysisProvider<T>> providerMap,
Map<String, ? extends AnalysisModule.AnalysisProvider<T>> defaultInstance) throws IOException {
Settings defaultSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, settings.getIndexVersionCreated()).build();
Map<String, T> factories = new HashMap<>();
for (Map.Entry<String, Settings> entry : settingsMap.entrySet()) {
String name = entry.getKey();
Settings currentSettings = entry.getValue();
String typeName = currentSettings.get("type");
if (component == Component.ANALYZER) {
T factory = null;
if (typeName == null) {
if (currentSettings.get("tokenizer") != null) {
factory = (T) new CustomAnalyzerProvider(settings, name, currentSettings, environment);
} else {
throw new IllegalArgumentException(component + " [" + name + "] must specify either an analyzer type, or a tokenizer");
}
} else if (typeName.equals("custom")) {
factory = (T) new CustomAnalyzerProvider(settings, name, currentSettings, environment);
}
if (factory != null) {
factories.put(name, factory);
continue;
}
} else if (component == Component.NORMALIZER) {
if (typeName == null || typeName.equals("custom")) {
T factory = (T) new CustomNormalizerProvider(settings, name, currentSettings);
factories.put(name, factory);
continue;
}
}
AnalysisProvider<T> type = getAnalysisProvider(component, providerMap, name, typeName);
if (type == null) {
throw new IllegalArgumentException("Unknown " + component + " type [" + typeName + "] for [" + name + "]");
}
final T factory = type.get(settings, environment, name, currentSettings);
factories.put(name, factory);
}
// go over the char filters in the bindings and register the ones that are not configured
for (Map.Entry<String, ? extends AnalysisModule.AnalysisProvider<T>> entry : providerMap.entrySet()) {
String name = entry.getKey();
AnalysisModule.AnalysisProvider<T> provider = entry.getValue();
// we don't want to re-register one that already exists
if (settingsMap.containsKey(name)) {
continue;
}
// check, if it requires settings, then don't register it, we know default has no settings...
if (provider.requiresAnalysisSettings()) {
continue;
}
AnalysisModule.AnalysisProvider<T> defaultProvider = defaultInstance.get(name);
final T instance;
if (defaultProvider == null) {
instance = provider.get(settings, environment, name, defaultSettings);
} else {
instance = defaultProvider.get(settings, environment, name, defaultSettings);
}
factories.put(name, instance);
}
for (Map.Entry<String, ? extends AnalysisModule.AnalysisProvider<T>> entry : defaultInstance.entrySet()) {
final String name = entry.getKey();
final AnalysisModule.AnalysisProvider<T> provider = entry.getValue();
if (factories.containsKey(name) == false) {
final T instance = provider.get(settings, environment, name, defaultSettings);
if (factories.containsKey(name) == false) {
factories.put(name, instance);
}
}
}
return factories;
}
private <T> AnalysisProvider<T> getAnalysisProvider(Component component, Map<String, ? extends AnalysisProvider<T>> providerMap,
String name, String typeName) {
if (typeName == null) {
throw new IllegalArgumentException(component + " [" + name + "] must specify either an analyzer type, or a tokenizer");
}
AnalysisProvider<T> type = providerMap.get(typeName);
if (type == null) {
throw new IllegalArgumentException("Unknown " + component + " type [" + typeName + "] for [" + name + "]");
}
return type;
}
private static class PrebuiltAnalysis implements Closeable {
final Map<String, AnalysisModule.AnalysisProvider<AnalyzerProvider<?>>> analyzerProviderFactories;
final Map<String, ? extends AnalysisProvider<TokenFilterFactory>> preConfiguredTokenFilters;
final Map<String, ? extends AnalysisProvider<TokenizerFactory>> preConfiguredTokenizers;
final Map<String, ? extends AnalysisProvider<CharFilterFactory>> preConfiguredCharFilterFactories;
private PrebuiltAnalysis(
Map<String, PreConfiguredCharFilter> preConfiguredCharFilters,
Map<String, PreConfiguredTokenFilter> preConfiguredTokenFilters,
Map<String, PreConfiguredTokenizer> preConfiguredTokenizers) {
Map<String, PreBuiltAnalyzerProviderFactory> analyzerProviderFactories = new HashMap<>();
// Analyzers
for (PreBuiltAnalyzers preBuiltAnalyzerEnum : PreBuiltAnalyzers.values()) {
String name = preBuiltAnalyzerEnum.name().toLowerCase(Locale.ROOT);
analyzerProviderFactories.put(name, new PreBuiltAnalyzerProviderFactory(name, AnalyzerScope.INDICES, preBuiltAnalyzerEnum.getAnalyzer(Version.CURRENT)));
}
this.analyzerProviderFactories = Collections.unmodifiableMap(analyzerProviderFactories);
this.preConfiguredCharFilterFactories = preConfiguredCharFilters;
this.preConfiguredTokenFilters = preConfiguredTokenFilters;
this.preConfiguredTokenizers = preConfiguredTokenizers;
}
public AnalysisModule.AnalysisProvider<CharFilterFactory> getCharFilterFactory(String name) {
return preConfiguredCharFilterFactories.get(name);
}
public AnalysisModule.AnalysisProvider<TokenFilterFactory> getTokenFilterFactory(String name) {
return preConfiguredTokenFilters.get(name);
}
public AnalysisModule.AnalysisProvider<TokenizerFactory> getTokenizerFactory(String name) {
return preConfiguredTokenizers.get(name);
}
public AnalysisModule.AnalysisProvider<AnalyzerProvider<?>> getAnalyzerProvider(String name) {
return analyzerProviderFactories.get(name);
}
Analyzer analyzer(String name) {
PreBuiltAnalyzerProviderFactory analyzerProviderFactory = (PreBuiltAnalyzerProviderFactory) analyzerProviderFactories.get(name);
if (analyzerProviderFactory == null) {
return null;
}
return analyzerProviderFactory.analyzer();
}
@Override
public void close() throws IOException {
IOUtils.close(analyzerProviderFactories.values().stream().map((a) -> ((PreBuiltAnalyzerProviderFactory)a).analyzer()).collect(Collectors.toList()));
}
}
public IndexAnalyzers build(IndexSettings indexSettings,
Map<String, AnalyzerProvider<?>> analyzerProviders,
Map<String, AnalyzerProvider<?>> normalizerProviders,
Map<String, TokenizerFactory> tokenizerFactoryFactories,
Map<String, CharFilterFactory> charFilterFactoryFactories,
Map<String, TokenFilterFactory> tokenFilterFactoryFactories) {
Index index = indexSettings.getIndex();
analyzerProviders = new HashMap<>(analyzerProviders);
Map<String, NamedAnalyzer> analyzers = new HashMap<>();
Map<String, NamedAnalyzer> normalizers = new HashMap<>();
for (Map.Entry<String, AnalyzerProvider<?>> entry : analyzerProviders.entrySet()) {
processAnalyzerFactory(indexSettings, entry.getKey(), entry.getValue(), analyzers,
tokenFilterFactoryFactories, charFilterFactoryFactories, tokenizerFactoryFactories);
}
for (Map.Entry<String, AnalyzerProvider<?>> entry : normalizerProviders.entrySet()) {
processNormalizerFactory(entry.getKey(), entry.getValue(), normalizers,
tokenizerFactoryFactories.get("keyword"), tokenFilterFactoryFactories, charFilterFactoryFactories);
}
if (!analyzers.containsKey("default")) {
processAnalyzerFactory(indexSettings, "default", new StandardAnalyzerProvider(indexSettings, null, "default", Settings.Builder.EMPTY_SETTINGS),
analyzers, tokenFilterFactoryFactories, charFilterFactoryFactories, tokenizerFactoryFactories);
}
if (!analyzers.containsKey("default_search")) {
analyzers.put("default_search", analyzers.get("default"));
}
if (!analyzers.containsKey("default_search_quoted")) {
analyzers.put("default_search_quoted", analyzers.get("default_search"));
}
NamedAnalyzer defaultAnalyzer = analyzers.get("default");
if (defaultAnalyzer == null) {
throw new IllegalArgumentException("no default analyzer configured");
}
if (analyzers.containsKey("default_index")) {
throw new IllegalArgumentException("setting [index.analysis.analyzer.default_index] is not supported anymore, use [index.analysis.analyzer.default] instead for index [" + index.getName() + "]");
}
NamedAnalyzer defaultSearchAnalyzer = analyzers.getOrDefault("default_search", defaultAnalyzer);
NamedAnalyzer defaultSearchQuoteAnalyzer = analyzers.getOrDefault("default_search_quote", defaultSearchAnalyzer);
for (Map.Entry<String, NamedAnalyzer> analyzer : analyzers.entrySet()) {
if (analyzer.getKey().startsWith("_")) {
throw new IllegalArgumentException("analyzer name must not start with '_'. got \"" + analyzer.getKey() + "\"");
}
}
return new IndexAnalyzers(indexSettings, defaultAnalyzer, defaultSearchAnalyzer, defaultSearchQuoteAnalyzer,
unmodifiableMap(analyzers), unmodifiableMap(normalizers));
}
private void processAnalyzerFactory(IndexSettings indexSettings,
String name,
AnalyzerProvider<?> analyzerFactory,
Map<String, NamedAnalyzer> analyzers, Map<String, TokenFilterFactory> tokenFilters,
Map<String, CharFilterFactory> charFilters, Map<String, TokenizerFactory> tokenizers) {
/*
* Lucene defaults positionIncrementGap to 0 in all analyzers but
* Elasticsearch defaults them to 0 only before version 2.0
* and 100 afterwards so we override the positionIncrementGap if it
* doesn't match here.
*/
int overridePositionIncrementGap = TextFieldMapper.Defaults.POSITION_INCREMENT_GAP;
if (analyzerFactory instanceof CustomAnalyzerProvider) {
((CustomAnalyzerProvider) analyzerFactory).build(tokenizers, charFilters, tokenFilters);
/*
* Custom analyzers already default to the correct, version
* dependent positionIncrementGap and the user is be able to
* configure the positionIncrementGap directly on the analyzer so
* we disable overriding the positionIncrementGap to preserve the
* user's setting.
*/
overridePositionIncrementGap = Integer.MIN_VALUE;
}
Analyzer analyzerF = analyzerFactory.get();
if (analyzerF == null) {
throw new IllegalArgumentException("analyzer [" + analyzerFactory.name() + "] created null analyzer");
}
NamedAnalyzer analyzer;
if (analyzerF instanceof NamedAnalyzer) {
// if we got a named analyzer back, use it...
analyzer = (NamedAnalyzer) analyzerF;
if (overridePositionIncrementGap >= 0 && analyzer.getPositionIncrementGap(analyzer.name()) != overridePositionIncrementGap) {
// unless the positionIncrementGap needs to be overridden
analyzer = new NamedAnalyzer(analyzer, overridePositionIncrementGap);
}
} else {
analyzer = new NamedAnalyzer(name, analyzerFactory.scope(), analyzerF, overridePositionIncrementGap);
}
if (analyzers.containsKey(name)) {
throw new IllegalStateException("already registered analyzer with name: " + name);
}
analyzers.put(name, analyzer);
// TODO: remove alias support completely when we no longer support pre 5.0 indices
final String analyzerAliasKey = "index.analysis.analyzer." + analyzerFactory.name() + ".alias";
if (indexSettings.getSettings().get(analyzerAliasKey) != null) {
throw new IllegalArgumentException("setting [" + analyzerAliasKey + "] is not supported");
}
}
private void processNormalizerFactory(
String name,
AnalyzerProvider<?> normalizerFactory,
Map<String, NamedAnalyzer> normalizers,
TokenizerFactory keywordTokenizerFactory,
Map<String, TokenFilterFactory> tokenFilters,
Map<String, CharFilterFactory> charFilters) {
if (normalizerFactory instanceof CustomNormalizerProvider) {
((CustomNormalizerProvider) normalizerFactory).build(keywordTokenizerFactory, charFilters, tokenFilters);
}
Analyzer normalizerF = normalizerFactory.get();
if (normalizerF == null) {
throw new IllegalArgumentException("normalizer [" + normalizerFactory.name() + "] created null normalizer");
}
NamedAnalyzer normalizer = new NamedAnalyzer(name, normalizerFactory.scope(), normalizerF);
if (normalizers.containsKey(name)) {
throw new IllegalStateException("already registered analyzer with name: " + name);
}
normalizers.put(name, normalizer);
}
}
| apache-2.0 |
stoksey69/googleads-java-lib | modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201411/ContentBundleServiceInterfaceperformContentBundleActionResponse.java | 1634 |
package com.google.api.ads.dfp.jaxws.v201411;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for performContentBundleActionResponse element declaration.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <element name="performContentBundleActionResponse">
* <complexType>
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="rval" type="{https://www.google.com/apis/ads/publisher/v201411}UpdateResult" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </element>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "", propOrder = {
"rval"
})
@XmlRootElement(name = "performContentBundleActionResponse")
public class ContentBundleServiceInterfaceperformContentBundleActionResponse {
protected UpdateResult rval;
/**
* Gets the value of the rval property.
*
* @return
* possible object is
* {@link UpdateResult }
*
*/
public UpdateResult getRval() {
return rval;
}
/**
* Sets the value of the rval property.
*
* @param value
* allowed object is
* {@link UpdateResult }
*
*/
public void setRval(UpdateResult value) {
this.rval = value;
}
}
| apache-2.0 |
DevStreet/FinanceAnalytics | projects/OG-Analytics/src/main/java/com/opengamma/analytics/financial/volatilityswap/CarrLeeFXVolatilitySwapCalculator.java | 11258 | /**
* Copyright (C) 2014 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.volatilityswap;
import java.util.Arrays;
import com.google.common.primitives.Doubles;
import com.opengamma.analytics.financial.interestrate.InstrumentDerivativeVisitorAdapter;
import com.opengamma.analytics.financial.model.volatility.surface.SmileDeltaTermStructureParameters;
import com.opengamma.analytics.financial.provider.description.volatilityswap.CarrLeeFXData;
import com.opengamma.analytics.math.FunctionUtils;
import com.opengamma.analytics.math.function.Function1D;
import com.opengamma.analytics.math.rootfinding.NewtonRaphsonSingleRootFinder;
import com.opengamma.analytics.math.statistics.distribution.NormalDistribution;
import com.opengamma.analytics.math.statistics.distribution.ProbabilityDistribution;
import com.opengamma.util.ArgumentChecker;
import com.opengamma.util.tuple.Triple;
/**
*
*/
public class CarrLeeFXVolatilitySwapCalculator extends InstrumentDerivativeVisitorAdapter<CarrLeeFXData, VolatilitySwapCalculatorResult> {
private static final double DEFAULT_LOWEST_PUT_DELTA = -0.1;
private static final double DEFAULT_HIGHEST_CALL_DELTA = 0.1;
private static final int DEFAULT_NUM_POINTS = 50;
private static final ProbabilityDistribution<Double> NORMAL = new NormalDistribution(0, 1);
private static final CarrLeeNewlyIssuedSyntheticVolatilitySwapCalculator NEW_CALCULATOR = new CarrLeeNewlyIssuedSyntheticVolatilitySwapCalculator();
private static final CarrLeeSeasonedSyntheticVolatilitySwapCalculator SEASONED_CALCULATOR = new CarrLeeSeasonedSyntheticVolatilitySwapCalculator();
private final double _lowestPutDelta;
private final double _highestCallDelta;
private final int _numPoints;
private final double[] _strikeRange;
/**
* Default constructor
*/
public CarrLeeFXVolatilitySwapCalculator() {
this(DEFAULT_LOWEST_PUT_DELTA, DEFAULT_HIGHEST_CALL_DELTA, DEFAULT_NUM_POINTS);
}
/**
* Constructor specifying strike range and number of strikes
* @param lowestPutDelta The delta for put with lowest strike
* @param highestCallDelta The delta for call with highest strike
* @param numPoints The number of strikes between the lowest strike and the highest strike is (numPoints + 1)
*/
public CarrLeeFXVolatilitySwapCalculator(final double lowestPutDelta, final double highestCallDelta, final int numPoints) {
ArgumentChecker.isTrue(lowestPutDelta < 0. && lowestPutDelta > -1., "-1 < lowestPutDelta < 0 should be true");
ArgumentChecker.isTrue(highestCallDelta > 0. && highestCallDelta < 1., "0 < highestCallDelta < 1 should be true");
ArgumentChecker.isTrue(numPoints > 2, "numPoints should be greater than 2");
_lowestPutDelta = lowestPutDelta;
_highestCallDelta = highestCallDelta;
_numPoints = numPoints;
_strikeRange = null;
}
/**
* Constructor specifying number of strikes and strike range by strike values
* @param numPoints The number of strikes between the lowest strike and the highest strike is (numPoints + 1)
* @param strikeRange {minimum strike, maximum strike}
*/
public CarrLeeFXVolatilitySwapCalculator(final int numPoints, final double[] strikeRange) {
ArgumentChecker.isTrue(numPoints > 2, "numPoints should be greater than 2");
ArgumentChecker.notNull(strikeRange, "strikeRange");
ArgumentChecker.isTrue(strikeRange.length == 2, "length of strikeRange should be 2");
ArgumentChecker.isTrue(strikeRange[0] < strikeRange[1], "upper bound should be greater than lower bound");
_lowestPutDelta = 0.0;
_highestCallDelta = 0.0;
_numPoints = numPoints;
_strikeRange = Arrays.copyOf(strikeRange, 2);
}
@Override
public VolatilitySwapCalculatorResultWithStrikes visitFXVolatilitySwap(final FXVolatilitySwap swap, final CarrLeeFXData data) {
ArgumentChecker.notNull(swap, "swap");
ArgumentChecker.notNull(data, "data");
final double spot = data.getSpot();
final double timeToExpiry = swap.getTimeToMaturity();
ArgumentChecker.isTrue(Doubles.isFinite(timeToExpiry), "timeToExpiry should be finite");
ArgumentChecker.isTrue(timeToExpiry > 0., "timeToExpiry should be positive");
ArgumentChecker.isTrue(Doubles.isFinite(spot), "spot should be finite");
ArgumentChecker.isTrue(spot > 0., "spot should be positive");
final double domesticDF = data.getMulticurveProvider().getDiscountFactor(swap.getBaseCurrency(), timeToExpiry);
final double foreignDF = data.getMulticurveProvider().getDiscountFactor(swap.getCounterCurrency(), timeToExpiry);
final double domesticRate = -Math.log(domesticDF) / timeToExpiry;
final double foreignRate = -Math.log(foreignDF) / timeToExpiry;
ArgumentChecker.isTrue(Doubles.isFinite(domesticRate), "domestic rate should be finite");
ArgumentChecker.isTrue(Doubles.isFinite(foreignRate), "foreign rate should be finite");
final double forward = spot * foreignDF / domesticDF;
final double timeFromInception = swap.getTimeToObservationStart() < 0 ? Math.abs(swap.getTimeToObservationStart()) : 0;
final double[] strikeRange;
if (_strikeRange == null) {
if (swap.getTimeToObservationStart() < 0) {
if (data.getRealizedVariance() == null) {
throw new IllegalStateException("Trying to price a seasoned swap but have null realized variance in the market data object");
}
final double reference = 3.0 * Math.sqrt(data.getRealizedVariance() * timeFromInception) / 100.;
strikeRange = getStrikeRange(timeToExpiry, data.getVolatilityData(), forward, reference);
} else {
strikeRange = getStrikeRange(timeToExpiry, data.getVolatilityData(), forward, 0.);
}
} else {
strikeRange = Arrays.copyOf(_strikeRange, 2);
ArgumentChecker.isTrue((forward > strikeRange[0] && forward < strikeRange[1]), "forward is outside of strike range");
}
final double deltaK = (strikeRange[1] - strikeRange[0]) / _numPoints;
final double[] strikes = new double[_numPoints + 1];
for (int i = 0; i < _numPoints; ++i) {
strikes[i] = strikeRange[0] + deltaK * i;
}
strikes[_numPoints] = strikeRange[1];
final int index = FunctionUtils.getLowerBoundIndex(strikes, forward);
final int nPuts = index + 1;
final int nCalls = _numPoints - index;
final double[] putStrikes = new double[nPuts];
final double[] callStrikes = new double[nCalls];
final double[] putVols = new double[nPuts];
final double[] callVols = new double[nCalls];
System.arraycopy(strikes, 0, putStrikes, 0, nPuts);
System.arraycopy(strikes, index + 1, callStrikes, 0, nCalls);
for (int i = 0; i < nPuts; ++i) {
putVols[i] = data.getVolatilityData().getVolatility(Triple.of(timeToExpiry, putStrikes[i], forward));
}
for (int i = 0; i < nCalls; ++i) {
callVols[i] = data.getVolatilityData().getVolatility(Triple.of(timeToExpiry, callStrikes[i], forward));
}
if (swap.getTimeToObservationStart() < 0) {
return (SEASONED_CALCULATOR.evaluate(spot, putStrikes, callStrikes, timeToExpiry, timeFromInception, domesticRate,
foreignRate, putVols, callVols, data.getRealizedVariance()).withStrikes(putStrikes, callStrikes));
}
final double strdVol = data.getVolatilityData().getVolatility(Triple.of(timeToExpiry, forward, forward));
return (NEW_CALCULATOR.evaluate(spot, putStrikes, callStrikes, timeToExpiry, domesticRate, foreignRate, putVols, strdVol, callVols)).withStrikes(putStrikes, callStrikes);
}
private double[] getStrikeRange(final double timeToExpiry, final SmileDeltaTermStructureParameters smile, final double forward, final double reference) {
final double[] res = new double[2];
res[0] = findStrike(_lowestPutDelta, timeToExpiry, smile, forward, false);
res[1] = findStrike(_highestCallDelta, timeToExpiry, smile, forward, true);
if (reference == 0.) {
return res;
}
res[0] = Math.min(res[0], forward * Math.exp(-reference));
res[1] = Math.max(res[1], forward * Math.exp(reference));
return res;
}
private double findStrike(final double delta, final double timeToExpiry, final SmileDeltaTermStructureParameters smile, final double forward, final boolean isCall) {
final Function1D<Double, Double> func = getDeltaDifference(delta, timeToExpiry, smile, forward, isCall);
final Function1D<Double, Double> funcDiff = getDeltaDifferenceDiff(timeToExpiry, smile, forward);
final NewtonRaphsonSingleRootFinder rtFinder = new NewtonRaphsonSingleRootFinder(1.e-12);
final double strike = rtFinder.getRoot(func, funcDiff, forward);
return strike;
}
private Function1D<Double, Double> getDeltaDifference(final double delta, final double timeToExpiry, final SmileDeltaTermStructureParameters smile, final double forward,
final boolean isCall) {
final double rootT = Math.sqrt(timeToExpiry);
return new Function1D<Double, Double>() {
@Override
public Double evaluate(final Double strike) {
final double vol = smile.getVolatility(Triple.of(timeToExpiry, strike, forward));
final double sigRootT = vol * rootT;
final double d1 = Math.log(forward / strike) / sigRootT + 0.5 * sigRootT;
final double sign = isCall ? 1. : -1.;
return sign * NORMAL.getCDF(sign * d1) - delta;
}
};
}
private Function1D<Double, Double> getDeltaDifferenceDiff(final double timeToExpiry, final SmileDeltaTermStructureParameters smile, final double forward) {
final double rootT = Math.sqrt(timeToExpiry);
return new Function1D<Double, Double>() {
@Override
public Double evaluate(final Double strike) {
final double vol = smile.getVolatility(Triple.of(timeToExpiry, strike, forward));
final double sigRootT = vol * rootT;
final double d1 = Math.log(forward / strike) / sigRootT + 0.5 * sigRootT;
return -NORMAL.getPDF(d1) / strike / sigRootT;
}
};
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
long temp;
temp = Double.doubleToLongBits(_highestCallDelta);
result = prime * result + (int) (temp ^ (temp >>> 32));
temp = Double.doubleToLongBits(_lowestPutDelta);
result = prime * result + (int) (temp ^ (temp >>> 32));
result = prime * result + _numPoints;
result = prime * result + Arrays.hashCode(_strikeRange);
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof CarrLeeFXVolatilitySwapCalculator)) {
return false;
}
CarrLeeFXVolatilitySwapCalculator other = (CarrLeeFXVolatilitySwapCalculator) obj;
if (Double.doubleToLongBits(_highestCallDelta) != Double.doubleToLongBits(other._highestCallDelta)) {
return false;
}
if (Double.doubleToLongBits(_lowestPutDelta) != Double.doubleToLongBits(other._lowestPutDelta)) {
return false;
}
if (_numPoints != other._numPoints) {
return false;
}
if (!Arrays.equals(_strikeRange, other._strikeRange)) {
return false;
}
return true;
}
}
| apache-2.0 |
apache/tomcat-maven-plugin | common-tomcat-maven-plugin/src/test/java/org/apache/tomcat/maven/common/TomcatManagerTest.java | 9121 | package org.apache.tomcat.maven.common;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import junit.framework.TestCase;
import org.apache.catalina.Context;
import org.apache.catalina.startup.Tomcat;
import org.apache.commons.io.IOUtils;
import org.apache.tomcat.maven.common.deployer.TomcatManager;
import org.apache.tomcat.maven.common.deployer.TomcatManagerResponse;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.StringWriter;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
/**
* @author Olivier Lamy
*/
public class TomcatManagerTest
extends TestCase
{
Tomcat tomcat;
Tomcat redirectTomcat;
UploadServlet uploadServlet;
RedirectServlet redirectServlet;
int port;
int redirectPort;
public static String getBasedir()
{
return System.getProperty( "basedir" );
}
@Override
protected void setUp()
throws Exception
{
super.setUp();
tomcat = new Tomcat();
tomcat.setBaseDir( System.getProperty( "java.io.tmpdir" ) );
tomcat.setPort( 0 );
Context context = tomcat.addContext( "", System.getProperty( "java.io.tmpdir" ) );
uploadServlet = new UploadServlet();
tomcat.addServlet( context, "foo", uploadServlet );
context.addServletMapping( "/*", "foo" );
tomcat.start();
port = tomcat.getConnector().getLocalPort();
System.out.println( "Tomcat started on port:" + port );
redirectTomcat = new Tomcat();
redirectTomcat.setBaseDir( System.getProperty( "java.io.tmpdir" ) );
redirectTomcat.setPort( 0 );
context = redirectTomcat.addContext( "", System.getProperty( "java.io.tmpdir" ) );
redirectServlet = new RedirectServlet();
redirectTomcat.addServlet( context, "foo", redirectServlet );
context.addServletMapping( "/*", "foo" );
redirectTomcat.start();
redirectPort = redirectTomcat.getConnector().getLocalPort();
System.out.println( "redirect Tomcat started on port:" + redirectPort );
}
@Override
protected void tearDown()
throws Exception
{
super.tearDown();
tomcat.stop();
}
public void testDeployWar()
throws Exception
{
uploadServlet.uploadedResources.clear();
TomcatManager tomcatManager = new TomcatManager( new URL( "http://localhost:" + this.port + "/foo/bar" ) );
TomcatManagerResponse response =
tomcatManager.deploy( "foo", new File( getBasedir(), "src/test/resources/test.txt" ) );
assertEquals( 200, response.getStatusCode() );
assertEquals( 1, uploadServlet.uploadedResources.size() );
assertEquals( "/foo/bar/deploy", uploadServlet.uploadedResources.get( 0 ).requestUri );
FileInputStream fileInputStream = new FileInputStream( uploadServlet.uploadedResources.get( 0 ).uploadedFile );
try
{
StringWriter sw = new StringWriter();
IOUtils.copy( fileInputStream, sw );
assertTrue( sw.toString().contains( "Apache Tomcat rocks!!" ) );
}
finally
{
fileInputStream.close();
}
}
public void testDeployWarWithRedirect()
throws Exception
{
uploadServlet.uploadedResources.clear();
TomcatManager tomcatManager =
new TomcatManager( new URL( "http://localhost:" + this.redirectPort + "/foo/bar" ) );
redirectServlet.redirectPath = "http://localhost:" + this.port + "/foo/bar/redirected";
TomcatManagerResponse response =
tomcatManager.deploy( "foo", new File( getBasedir(), "src/test/resources/test.txt" ) );
assertEquals( 200, response.getStatusCode() );
assertEquals( "no request to redirect servlet", 1, redirectServlet.uploadedResources.size() );
assertEquals( "/foo/bar/deploy", redirectServlet.uploadedResources.get( 0 ).requestUri );
assertEquals( "no redirected request to upload servlet", 1, uploadServlet.uploadedResources.size() );
assertEquals( "/foo/bar/deploy", redirectServlet.uploadedResources.get( 0 ).requestUri );
FileInputStream fileInputStream = new FileInputStream( uploadServlet.uploadedResources.get( 0 ).uploadedFile );
try
{
StringWriter sw = new StringWriter();
IOUtils.copy( fileInputStream, sw );
assertTrue( sw.toString().contains( "Apache Tomcat rocks!!" ) );
}
finally
{
fileInputStream.close();
}
}
public void testDeployWarWithRedirectRelative()
throws Exception
{
uploadServlet.uploadedResources.clear();
TomcatManager tomcatManager =
new TomcatManager( new URL( "http://localhost:" + this.redirectPort + "/foo/bar" ) );
redirectServlet.redirectPath = "redirectrelative/foo";
TomcatManagerResponse response =
tomcatManager.deploy( "foo", new File( getBasedir(), "src/test/resources/test.txt" ) );
assertEquals( 200, response.getStatusCode() );
assertEquals( "no request to redirect servlet", 2, redirectServlet.uploadedResources.size() );
assertEquals( "/foo/bar/deploy", redirectServlet.uploadedResources.get( 0 ).requestUri );
assertEquals( "found redirected request to upload servlet", 0, uploadServlet.uploadedResources.size() );
assertEquals( "/foo/bar/deploy", redirectServlet.uploadedResources.get( 0 ).requestUri );
FileInputStream fileInputStream =
new FileInputStream( redirectServlet.uploadedResources.get( 1 ).uploadedFile );
try
{
StringWriter sw = new StringWriter();
IOUtils.copy( fileInputStream, sw );
assertTrue( sw.toString().contains( "Apache Tomcat rocks!!" ) );
}
finally
{
fileInputStream.close();
}
}
//-----------------------------
// internal for tests
//-----------------------------
public class UploadedResource
{
public String requestUri;
public File uploadedFile;
public UploadedResource( String requestUri, File uploadedFile )
{
this.requestUri = requestUri;
this.uploadedFile = uploadedFile;
}
}
public class UploadServlet
extends HttpServlet
{
public List<UploadedResource> uploadedResources = new ArrayList<UploadedResource>();
@Override
protected void doPut( HttpServletRequest req, HttpServletResponse resp )
throws ServletException, IOException
{
System.out.println( "put ok:" + req.getRequestURI() );
File file = File.createTempFile( "tomcat-unit-test", "tmp" );
uploadedResources.add( new UploadedResource( req.getRequestURI(), file ) );
IOUtils.copy( req.getInputStream(), new FileOutputStream( file ) );
}
}
public class RedirectServlet
extends HttpServlet
{
int redirectPort = 0;
String redirectPath;
public List<UploadedResource> uploadedResources = new ArrayList<UploadedResource>();
@Override
protected void doPut( HttpServletRequest req, HttpServletResponse resp )
throws ServletException, IOException
{
System.out.println( "RedirectServlet put ok:" + req.getRequestURI() );
if ( req.getRequestURI().contains( "redirectrelative" ) )
{
File file = File.createTempFile( "tomcat-unit-test", "tmp" );
uploadedResources.add( new UploadedResource( req.getRequestURI(), file ) );
IOUtils.copy( req.getInputStream(), new FileOutputStream( file ) );
return;
}
uploadedResources.add( new UploadedResource( req.getRequestURI(), null ) );
String redirectUri =
redirectPort > 0 ? "http://localhost:" + redirectPort + "/" + redirectPath : redirectPath;
resp.sendRedirect( redirectUri );
}
}
}
| apache-2.0 |
siosio/intellij-community | plugins/tasks/tasks-core/src/com/intellij/tasks/actions/TaskSearchSupport.java | 4390 | // Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.tasks.actions;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationDisplayType;
import com.intellij.notification.NotificationType;
import com.intellij.notification.Notifications;
import com.intellij.openapi.options.ShowSettingsUtil;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.tasks.Task;
import com.intellij.tasks.TaskBundle;
import com.intellij.tasks.TaskManager;
import com.intellij.tasks.TaskRepository;
import com.intellij.tasks.config.TaskRepositoriesConfigurable;
import com.intellij.tasks.impl.RequestFailedException;
import com.intellij.tasks.impl.TaskManagerImpl;
import com.intellij.util.ArrayUtil;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static com.intellij.tasks.impl.TaskUtil.filterTasks;
/**
* @author Dmitry Avdeev
*/
public final class TaskSearchSupport {
private TaskSearchSupport() {
}
public static List<Task> getLocalAndCachedTasks(final TaskManager myManager, String pattern, final boolean withClosed) {
List<Task> tasks = new ArrayList<>();
tasks.addAll(myManager.getLocalTasks(withClosed));
tasks.addAll(ContainerUtil.filter(myManager.getCachedIssues(withClosed),
task -> myManager.findTask(task.getId()) == null));
List<Task> filteredTasks = filterTasks(pattern, tasks);
ContainerUtil.sort(filteredTasks, TaskManagerImpl.TASK_UPDATE_COMPARATOR);
return filteredTasks;
}
public static List<Task> getRepositoriesTasks(Project project,
String pattern,
int offset,
int limit,
boolean forceRequest,
final boolean withClosed,
@NotNull final ProgressIndicator cancelled) {
try {
TaskManager manager = TaskManager.getManager(project);
List<Task> tasks = manager.getIssues(pattern, offset, limit, withClosed, cancelled, forceRequest);
ContainerUtil.sort(tasks, TaskManagerImpl.TASK_UPDATE_COMPARATOR);
return tasks;
}
catch (RequestFailedException e) {
notifyAboutConnectionFailure(e, project);
return Collections.emptyList();
}
}
public static List<Task> getItems(final TaskManager myManager,
String pattern,
boolean cached,
boolean autopopup) {
return filterTasks(pattern, getTasks(pattern, cached, autopopup, myManager));
}
private static List<Task> getTasks(String pattern, boolean cached, boolean autopopup, final TaskManager myManager) {
return cached ? myManager.getCachedIssues() : myManager.getIssues(pattern, !autopopup);
}
static final String TASKS_NOTIFICATION_GROUP = "Task Group";
private static void notifyAboutConnectionFailure(RequestFailedException e, Project project) {
String details = e.getMessage();
TaskRepository repository = e.getRepository();
Notifications.Bus.register(TASKS_NOTIFICATION_GROUP, NotificationDisplayType.BALLOON);
String content = TaskBundle.message("notification.content.p.href.configure.server.p");
if (!StringUtil.isEmpty(details)) {
content = "<p>" + details + "</p>" + content; //NON-NLS
}
new Notification(TASKS_NOTIFICATION_GROUP, TaskBundle.message("notification.title.cannot.connect.to", repository.getUrl()), content, NotificationType.WARNING)
.setListener((notification, event) -> {
TaskRepositoriesConfigurable configurable = new TaskRepositoriesConfigurable(project);
ShowSettingsUtil.getInstance().editConfigurable(project, configurable);
if (!ArrayUtil.contains(repository, TaskManager.getManager(project).getAllRepositories())) {
notification.expire();
}
})
.notify(project);
}
}
| apache-2.0 |
episerver/elasticsearch | core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java | 8597 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.indices;
import org.apache.lucene.store.LockObtainFailedException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.gateway.GatewayMetaState;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardPath;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.IndexSettingsModule;
import java.util.concurrent.TimeUnit;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
public class IndicesServiceTests extends ESSingleNodeTestCase {
public IndicesService getIndicesService() {
return getInstanceFromNode(IndicesService.class);
}
public NodeEnvironment getNodeEnvironment() {
return getInstanceFromNode(NodeEnvironment.class);
}
@Override
protected boolean resetNodeAfterTest() {
return true;
}
public void testCanDeleteIndexContent() {
IndicesService indicesService = getIndicesService();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("test", Settings.builder()
.put(IndexMetaData.SETTING_SHADOW_REPLICAS, true)
.put(IndexMetaData.SETTING_DATA_PATH, "/foo/bar")
.put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 4))
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, randomIntBetween(0, 3))
.build());
assertFalse("shard on shared filesystem", indicesService.canDeleteIndexContents(idxSettings.getIndex(), idxSettings, false));
assertTrue("shard on shared filesystem and closed", indicesService.canDeleteIndexContents(idxSettings.getIndex(), idxSettings, true));
}
public void testCanDeleteShardContent() {
IndicesService indicesService = getIndicesService();
IndexMetaData meta = IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(1).numberOfReplicas(
1).build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", meta.getSettings());
assertFalse("no shard location", indicesService.canDeleteShardContent(new ShardId("test", "_na_", 0), indexSettings));
IndexService test = createIndex("test");
assertTrue(test.hasShard(0));
assertFalse("shard is allocated", indicesService.canDeleteShardContent(new ShardId("test", "_na_", 0), indexSettings));
test.removeShard(0, "boom");
assertTrue("shard is removed", indicesService.canDeleteShardContent(new ShardId("test", "_na_", 0), indexSettings));
}
public void testDeleteIndexStore() throws Exception {
IndicesService indicesService = getIndicesService();
IndexService test = createIndex("test");
ClusterService clusterService = getInstanceFromNode(ClusterService.class);
IndexMetaData firstMetaData = clusterService.state().metaData().index("test");
assertTrue(test.hasShard(0));
try {
indicesService.deleteIndexStore("boom", firstMetaData, clusterService.state(), false);
fail();
} catch (IllegalStateException ex) {
// all good
}
GatewayMetaState gwMetaState = getInstanceFromNode(GatewayMetaState.class);
MetaData meta = gwMetaState.loadMetaState();
assertNotNull(meta);
assertNotNull(meta.index("test"));
assertAcked(client().admin().indices().prepareDelete("test"));
meta = gwMetaState.loadMetaState();
assertNotNull(meta);
assertNull(meta.index("test"));
test = createIndex("test");
client().prepareIndex("test", "type", "1").setSource("field", "value").setRefresh(true).get();
client().admin().indices().prepareFlush("test").get();
assertHitCount(client().prepareSearch("test").get(), 1);
IndexMetaData secondMetaData = clusterService.state().metaData().index("test");
assertAcked(client().admin().indices().prepareClose("test"));
ShardPath path = ShardPath.loadShardPath(logger, getNodeEnvironment(), new ShardId(test.index(), 0), test.getIndexSettings());
assertTrue(path.exists());
try {
indicesService.deleteIndexStore("boom", secondMetaData, clusterService.state(), false);
fail();
} catch (IllegalStateException ex) {
// all good
}
assertTrue(path.exists());
// now delete the old one and make sure we resolve against the name
try {
indicesService.deleteIndexStore("boom", firstMetaData, clusterService.state(), false);
fail();
} catch (IllegalStateException ex) {
// all good
}
assertAcked(client().admin().indices().prepareOpen("test"));
ensureGreen("test");
}
public void testPendingTasks() throws Exception {
IndicesService indicesService = getIndicesService();
IndexService test = createIndex("test");
assertTrue(test.hasShard(0));
ShardPath path = test.getShardOrNull(0).shardPath();
assertTrue(test.getShardOrNull(0).routingEntry().started());
ShardPath shardPath = ShardPath.loadShardPath(logger, getNodeEnvironment(), new ShardId(test.index(), 0), test.getIndexSettings());
assertEquals(shardPath, path);
try {
indicesService.processPendingDeletes(test.index(), test.getIndexSettings(), new TimeValue(0, TimeUnit.MILLISECONDS));
fail("can't get lock");
} catch (LockObtainFailedException ex) {
}
assertTrue(path.exists());
int numPending = 1;
if (randomBoolean()) {
indicesService.addPendingDelete(new ShardId(test.index(), 0), test.getIndexSettings());
} else {
if (randomBoolean()) {
numPending++;
indicesService.addPendingDelete(new ShardId(test.index(), 0), test.getIndexSettings());
}
indicesService.addPendingDelete(test.index(), test.getIndexSettings());
}
assertAcked(client().admin().indices().prepareClose("test"));
assertTrue(path.exists());
assertEquals(indicesService.numPendingDeletes(test.index()), numPending);
// shard lock released... we can now delete
indicesService.processPendingDeletes(test.index(), test.getIndexSettings(), new TimeValue(0, TimeUnit.MILLISECONDS));
assertEquals(indicesService.numPendingDeletes(test.index()), 0);
assertFalse(path.exists());
if (randomBoolean()) {
indicesService.addPendingDelete(new ShardId(test.index(), 0), test.getIndexSettings());
indicesService.addPendingDelete(new ShardId(test.index(), 1), test.getIndexSettings());
indicesService.addPendingDelete(new ShardId("bogus", "_na_", 1), test.getIndexSettings());
assertEquals(indicesService.numPendingDeletes(test.index()), 2);
// shard lock released... we can now delete
indicesService.processPendingDeletes(test.index(), test.getIndexSettings(), new TimeValue(0, TimeUnit.MILLISECONDS));
assertEquals(indicesService.numPendingDeletes(test.index()), 0);
}
assertAcked(client().admin().indices().prepareOpen("test"));
}
}
| apache-2.0 |
Soo000/SooChat | src/org/jivesoftware/smackx/workgroup/QueueUser.java | 2551 | /**
*
* Copyright 2003-2007 Jive Software.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smackx.workgroup;
import java.util.Date;
/**
* An immutable class which wraps up customer-in-queue data return from the server; depending on
* the type of information dispatched from the server, not all information will be available in
* any given instance.
*
* @author loki der quaeler
*/
public class QueueUser {
private String userID;
private int queuePosition;
private int estimatedTime;
private Date joinDate;
/**
* @param uid the user jid of the customer in the queue
* @param position the position customer sits in the queue
* @param time the estimate of how much longer the customer will be in the queue in seconds
* @param joinedAt the timestamp of when the customer entered the queue
*/
public QueueUser (String uid, int position, int time, Date joinedAt) {
super();
this.userID = uid;
this.queuePosition = position;
this.estimatedTime = time;
this.joinDate = joinedAt;
}
/**
* @return the user jid of the customer in the queue
*/
public String getUserID () {
return this.userID;
}
/**
* @return the position in the queue at which the customer sits, or -1 if the update which
* this instance embodies is only a time update instead
*/
public int getQueuePosition () {
return this.queuePosition;
}
/**
* @return the estimated time remaining of the customer in the queue in seconds, or -1 if
* if the update which this instance embodies is only a position update instead
*/
public int getEstimatedRemainingTime () {
return this.estimatedTime;
}
/**
* @return the timestamp of when this customer entered the queue, or null if the server did not
* provide this information
*/
public Date getQueueJoinTimestamp () {
return this.joinDate;
}
}
| apache-2.0 |
zhengbangpeng/product-mdm-zh | modules/mobile-agents/android/client/client/src/main/java/org/wso2/emm/agent/api/DeviceInfo.java | 4270 | /*
* Copyright (c) 2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.emm.agent.api;
import java.util.List;
import org.wso2.emm.agent.R;
import org.wso2.emm.agent.utils.Preference;
import android.content.Context;
import android.content.res.Resources;
import android.hardware.Sensor;
import android.hardware.SensorManager;
import android.net.wifi.WifiInfo;
import android.net.wifi.WifiManager;
import android.os.Build;
import android.provider.Settings.Secure;
import android.telephony.TelephonyManager;
/**
* This class represents all the device information related APIs.
*/
public class DeviceInfo {
private Root rootChecker;
private Context context;
private Resources resources;
private TelephonyManager telephonyManager;
public DeviceInfo(Context context) {
this.context = context;
this.resources = context.getResources();
this.telephonyManager = (TelephonyManager) context.
getSystemService(Context.TELEPHONY_SERVICE);
}
/**
* Returns the network operator name.
* @return - Network operator name.
*/
public String getNetworkOperatorName() {
return telephonyManager.getSimOperatorName();
}
/**
* Returns the device model.
* @return - Device model.
*/
public String getDeviceModel() {
return android.os.Build.MODEL;
}
/**
* Returns the device manufacturer.
* @return - Device manufacturer.
*/
public String getDeviceManufacturer() {
return Build.MANUFACTURER;
}
/**
* Returns the OS version.
* @return - Device OS version.
*/
public String getOsVersion() {
return android.os.Build.VERSION.RELEASE;
}
/**
* Returns the SDK Version number.
* @return - Device android SDK version number.
*/
public int getSdkVersion() {
return android.os.Build.VERSION.SDK_INT;
}
/**
* Returns the device name.
* @return - Device name.
*/
public String getDeviceName() {
return android.os.Build.DEVICE;
}
/**
* Returns the IMEI Number.
* @return - Device IMEI number.
*/
public String getDeviceId() {
String deviceId = telephonyManager.getDeviceId();
if (deviceId == null || deviceId.isEmpty()) {
deviceId = Secure.getString(context.getContentResolver(), Secure.ANDROID_ID);
}
return deviceId;
}
/**
* Returns the IMSI Number.
* @return - Device IMSI number.
*/
public String getIMSINumber() {
return telephonyManager.getSubscriberId();
}
/**
* Returns the device WiFi MAC.
* @return - Device WiFi MAC.
*/
public String getMACAddress() {
WifiManager wifiManager = (WifiManager) context.getSystemService(Context.WIFI_SERVICE);
WifiInfo wInfo = wifiManager.getConnectionInfo();
return wInfo.getMacAddress();
}
/**
* Returns the Email address of the device owner.
* @return - Device owner email address.
*/
public String getEmail() {
return Preference.getString(context,
resources.getString(R.string.shared_pref_username));
}
/**
* Returns true if the device is a Rooted device.
* @return - Device rooted status.
*/
public boolean isRooted() {
rootChecker = new Root();
return rootChecker.isDeviceRooted();
}
/**
* Returns the SIM serial number.
* @return - Device SIM serial number.
*/
public String getSimSerialNumber() {
return telephonyManager.getSimSerialNumber();
}
/**
* Returns all the sensors available on the device as a List.
* @return - List of all the sensors available on the device.
*/
public List<Sensor> getAllSensors() {
SensorManager sensorManager =
(SensorManager) context.getSystemService(Context.SENSOR_SERVICE);
return sensorManager.getSensorList(Sensor.TYPE_ALL);
}
}
| apache-2.0 |
lburgazzoli/apache-activemq-artemis | tests/activemq5-unit-tests/src/test/java/org/apache/activemq/broker/policy/NoRetryDeadLetterTest.java | 1410 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.broker.policy;
import org.apache.activemq.ActiveMQConnectionFactory;
import org.apache.activemq.RedeliveryPolicy;
public class NoRetryDeadLetterTest extends DeadLetterTest {
@Override
protected ActiveMQConnectionFactory createConnectionFactory() throws Exception {
ActiveMQConnectionFactory connectionFactory = super.createConnectionFactory();
RedeliveryPolicy redeliveryPolicy = new RedeliveryPolicy();
redeliveryPolicy.setMaximumRedeliveries(0);
connectionFactory.setRedeliveryPolicy(redeliveryPolicy);
return connectionFactory;
}
}
| apache-2.0 |
shuodata/deeplearning4j | deeplearning4j-nn/src/main/java/org/deeplearning4j/optimize/api/TrainingListener.java | 2913 | package org.deeplearning4j.optimize.api;
import org.deeplearning4j.nn.api.Model;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.dataset.api.iterator.DataSetIterator;
import org.nd4j.linalg.dataset.api.iterator.MultiDataSetIterator;
import java.util.List;
import java.util.Map;
/**
* TrainingListener: an extension of {@link IterationListener} that adds onEpochStart, onEpochEnd, onForwardPass and
* onBackwardPass methods
*
* @author Alex Black
*/
public interface TrainingListener extends IterationListener {
/**
* Called once at the start of each epoch, when using methods such as {@link org.deeplearning4j.nn.multilayer.MultiLayerNetwork#fit(DataSetIterator)},
* {@link org.deeplearning4j.nn.graph.ComputationGraph#fit(DataSetIterator)} or {@link org.deeplearning4j.nn.graph.ComputationGraph#fit(MultiDataSetIterator)}
*/
void onEpochStart(Model model);
/**
* Called once at the end of each epoch, when using methods such as {@link org.deeplearning4j.nn.multilayer.MultiLayerNetwork#fit(DataSetIterator)},
* {@link org.deeplearning4j.nn.graph.ComputationGraph#fit(DataSetIterator)} or {@link org.deeplearning4j.nn.graph.ComputationGraph#fit(MultiDataSetIterator)}
*/
void onEpochEnd(Model model);
/**
* Called once per iteration (forward pass) for activations (usually for a {@link org.deeplearning4j.nn.multilayer.MultiLayerNetwork}),
* only at training time
*
* @param model Model
* @param activations Layer activations (including input)
*/
void onForwardPass(Model model, List<INDArray> activations);
/**
* Called once per iteration (forward pass) for activations (usually for a {@link org.deeplearning4j.nn.graph.ComputationGraph}),
* only at training time
*
* @param model Model
* @param activations Layer activations (including input)
*/
void onForwardPass(Model model, Map<String, INDArray> activations);
/**
* Called once per iteration (backward pass) <b>before the gradients are updated</b>
* Gradients are available via {@link Model#gradient()}.
* Note that gradients will likely be updated in-place - thus they should be copied or processed synchronously
* in this method.
* <p>
* For updates (gradients post learning rate/momentum/rmsprop etc) see {@link #onBackwardPass(Model)}
*
* @param model Model
*/
void onGradientCalculation(Model model);
/**
* Called once per iteration (backward pass) after gradients have been calculated, and updated
* Gradients are available via {@link Model#gradient()}.
* <p>
* Unlike {@link #onGradientCalculation(Model)} the gradients at this point will be post-update, rather than
* raw (pre-update) gradients at that method call.
*
* @param model Model
*/
void onBackwardPass(Model model);
}
| apache-2.0 |
JichengSong/hbase | src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestHLogSplit.java | 49796 | /**
* Copyright 2011 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver.wal;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.LargeTests;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.wal.HLog.Entry;
import org.apache.hadoop.hbase.regionserver.wal.HLog.Reader;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CancelableProgressable;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hdfs.server.namenode.LeaseExpiredException;
import org.apache.hadoop.ipc.RemoteException;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
/**
* Testing {@link HLog} splitting code.
*/
@Category(LargeTests.class)
public class TestHLogSplit {
private final static Log LOG = LogFactory.getLog(TestHLogSplit.class);
private Configuration conf;
private FileSystem fs;
protected final static HBaseTestingUtility
TEST_UTIL = new HBaseTestingUtility();
private static final Path hbaseDir = new Path("/hbase");
private static final Path hlogDir = new Path(hbaseDir, "hlog");
private static final Path oldLogDir = new Path(hbaseDir, "hlog.old");
private static final Path corruptDir = new Path(hbaseDir, ".corrupt");
private static final int NUM_WRITERS = 10;
private static final int ENTRIES = 10; // entries per writer per region
private HLog.Writer[] writer = new HLog.Writer[NUM_WRITERS];
private long seq = 0;
private static final byte[] TABLE_NAME = "t1".getBytes();
private static final byte[] FAMILY = "f1".getBytes();
private static final byte[] QUALIFIER = "q1".getBytes();
private static final byte[] VALUE = "v1".getBytes();
private static final String HLOG_FILE_PREFIX = "hlog.dat.";
private static List<String> regions;
private static final String HBASE_SKIP_ERRORS = "hbase.hlog.split.skip.errors";
private static final Path tabledir =
new Path(hbaseDir, Bytes.toString(TABLE_NAME));
static enum Corruptions {
INSERT_GARBAGE_ON_FIRST_LINE,
INSERT_GARBAGE_IN_THE_MIDDLE,
APPEND_GARBAGE,
TRUNCATE,
}
@BeforeClass
public static void setUpBeforeClass() throws Exception {
TEST_UTIL.getConfiguration().setStrings("hbase.rootdir", hbaseDir.toString());
TEST_UTIL.getConfiguration().setClass("hbase.regionserver.hlog.writer.impl",
InstrumentedSequenceFileLogWriter.class, HLog.Writer.class);
TEST_UTIL.getConfiguration().setBoolean("dfs.support.broken.append", true);
TEST_UTIL.getConfiguration().setBoolean("dfs.support.append", true);
TEST_UTIL.startMiniDFSCluster(2);
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
TEST_UTIL.shutdownMiniDFSCluster();
}
@Before
public void setUp() throws Exception {
flushToConsole("Cleaning up cluster for new test\n"
+ "--------------------------");
conf = TEST_UTIL.getConfiguration();
fs = TEST_UTIL.getDFSCluster().getFileSystem();
FileStatus[] entries = fs.listStatus(new Path("/"));
flushToConsole("Num entries in /:" + entries.length);
for (FileStatus dir : entries){
assertTrue("Deleting " + dir.getPath(),
fs.delete(dir.getPath(), true));
}
// create the HLog directory because recursive log creates are not allowed
fs.mkdirs(hlogDir);
seq = 0;
regions = new ArrayList<String>();
Collections.addAll(regions, "bbb", "ccc");
InstrumentedSequenceFileLogWriter.activateFailure = false;
}
@After
public void tearDown() throws Exception {
}
/**
* @throws IOException
* @see https://issues.apache.org/jira/browse/HBASE-3020
*/
@Test
public void testRecoveredEditsPathForMeta() throws IOException {
FileSystem fs = FileSystem.get(TEST_UTIL.getConfiguration());
byte [] encoded = HRegionInfo.FIRST_META_REGIONINFO.getEncodedNameAsBytes();
Path tdir = new Path(hbaseDir, Bytes.toString(HConstants.META_TABLE_NAME));
Path regiondir = new Path(tdir,
HRegionInfo.FIRST_META_REGIONINFO.getEncodedName());
fs.mkdirs(regiondir);
long now = System.currentTimeMillis();
HLog.Entry entry =
new HLog.Entry(new HLogKey(encoded,
HConstants.META_TABLE_NAME, 1, now, HConstants.DEFAULT_CLUSTER_ID),
new WALEdit());
Path p = HLogSplitter.getRegionSplitEditsPath(fs, entry, hbaseDir, true);
String parentOfParent = p.getParent().getParent().getName();
assertEquals(parentOfParent, HRegionInfo.FIRST_META_REGIONINFO.getEncodedName());
}
@Test(expected = OrphanHLogAfterSplitException.class)
public void testSplitFailsIfNewHLogGetsCreatedAfterSplitStarted()
throws IOException {
AtomicBoolean stop = new AtomicBoolean(false);
assertFalse("Previous test should clean up table dir",
fs.exists(new Path("/hbase/t1")));
generateHLogs(-1);
CountDownLatch latch = new CountDownLatch(1);
try {
(new ZombieNewLogWriterRegionServer(latch, stop)).start();
HLogSplitter logSplitter = HLogSplitter.createLogSplitter(conf, hbaseDir, hlogDir, oldLogDir,
fs);
logSplitter.splitLog(latch);
} finally {
stop.set(true);
}
}
/**
* Test old recovered edits file doesn't break HLogSplitter.
* This is useful in upgrading old instances.
*/
@Test
public void testOldRecoveredEditsFileSidelined() throws IOException {
FileSystem fs = FileSystem.get(TEST_UTIL.getConfiguration());
byte [] encoded = HRegionInfo.FIRST_META_REGIONINFO.getEncodedNameAsBytes();
Path tdir = new Path(hbaseDir, Bytes.toString(HConstants.META_TABLE_NAME));
Path regiondir = new Path(tdir,
HRegionInfo.FIRST_META_REGIONINFO.getEncodedName());
fs.mkdirs(regiondir);
long now = System.currentTimeMillis();
HLog.Entry entry =
new HLog.Entry(new HLogKey(encoded,
HConstants.META_TABLE_NAME, 1, now, HConstants.DEFAULT_CLUSTER_ID),
new WALEdit());
Path parent = HLog.getRegionDirRecoveredEditsDir(regiondir);
assertEquals(parent.getName(), HLog.RECOVERED_EDITS_DIR);
fs.createNewFile(parent); // create a recovered.edits file
Path p = HLogSplitter.getRegionSplitEditsPath(fs, entry, hbaseDir, true);
String parentOfParent = p.getParent().getParent().getName();
assertEquals(parentOfParent, HRegionInfo.FIRST_META_REGIONINFO.getEncodedName());
HLog.createWriter(fs, p, conf).close();
}
@Test
public void testSplitPreservesEdits() throws IOException{
final String REGION = "region__1";
regions.removeAll(regions);
regions.add(REGION);
generateHLogs(1, 10, -1);
fs.initialize(fs.getUri(), conf);
HLogSplitter logSplitter = HLogSplitter.createLogSplitter(conf,
hbaseDir, hlogDir, oldLogDir, fs);
logSplitter.splitLog();
Path originalLog = (fs.listStatus(oldLogDir))[0].getPath();
Path splitLog = getLogForRegion(hbaseDir, TABLE_NAME, REGION);
assertEquals("edits differ after split", true, logsAreEqual(originalLog, splitLog));
}
@Test
public void testEmptyLogFiles() throws IOException {
injectEmptyFile(".empty", true);
generateHLogs(Integer.MAX_VALUE);
injectEmptyFile("empty", true);
// make fs act as a different client now
// initialize will create a new DFSClient with a new client ID
fs.initialize(fs.getUri(), conf);
HLogSplitter logSplitter = HLogSplitter.createLogSplitter(conf,
hbaseDir, hlogDir, oldLogDir, fs);
logSplitter.splitLog();
for (String region : regions) {
Path logfile = getLogForRegion(hbaseDir, TABLE_NAME, region);
assertEquals(NUM_WRITERS * ENTRIES, countHLog(logfile, fs, conf));
}
}
@Test
public void testEmptyOpenLogFiles() throws IOException {
injectEmptyFile(".empty", false);
generateHLogs(Integer.MAX_VALUE);
injectEmptyFile("empty", false);
// make fs act as a different client now
// initialize will create a new DFSClient with a new client ID
fs.initialize(fs.getUri(), conf);
HLogSplitter logSplitter = HLogSplitter.createLogSplitter(conf,
hbaseDir, hlogDir, oldLogDir, fs);
logSplitter.splitLog();
for (String region : regions) {
Path logfile = getLogForRegion(hbaseDir, TABLE_NAME, region);
assertEquals(NUM_WRITERS * ENTRIES, countHLog(logfile, fs, conf));
}
}
@Test
public void testOpenZeroLengthReportedFileButWithDataGetsSplit() throws IOException {
// generate logs but leave hlog.dat.5 open.
generateHLogs(5);
fs.initialize(fs.getUri(), conf);
HLogSplitter logSplitter = HLogSplitter.createLogSplitter(conf,
hbaseDir, hlogDir, oldLogDir, fs);
logSplitter.splitLog();
for (String region : regions) {
Path logfile = getLogForRegion(hbaseDir, TABLE_NAME, region);
assertEquals(NUM_WRITERS * ENTRIES, countHLog(logfile, fs, conf));
}
}
@Test
public void testTralingGarbageCorruptionFileSkipErrorsPasses() throws IOException {
conf.setBoolean(HBASE_SKIP_ERRORS, true);
generateHLogs(Integer.MAX_VALUE);
corruptHLog(new Path(hlogDir, HLOG_FILE_PREFIX + "5"),
Corruptions.APPEND_GARBAGE, true, fs);
fs.initialize(fs.getUri(), conf);
HLogSplitter logSplitter = HLogSplitter.createLogSplitter(conf,
hbaseDir, hlogDir, oldLogDir, fs);
logSplitter.splitLog();
for (String region : regions) {
Path logfile = getLogForRegion(hbaseDir, TABLE_NAME, region);
assertEquals(NUM_WRITERS * ENTRIES, countHLog(logfile, fs, conf));
}
}
@Test
public void testFirstLineCorruptionLogFileSkipErrorsPasses() throws IOException {
conf.setBoolean(HBASE_SKIP_ERRORS, true);
generateHLogs(Integer.MAX_VALUE);
corruptHLog(new Path(hlogDir, HLOG_FILE_PREFIX + "5"),
Corruptions.INSERT_GARBAGE_ON_FIRST_LINE, true, fs);
fs.initialize(fs.getUri(), conf);
HLogSplitter logSplitter = HLogSplitter.createLogSplitter(conf,
hbaseDir, hlogDir, oldLogDir, fs);
logSplitter.splitLog();
for (String region : regions) {
Path logfile = getLogForRegion(hbaseDir, TABLE_NAME, region);
assertEquals((NUM_WRITERS - 1) * ENTRIES, countHLog(logfile, fs, conf));
}
}
@Test
public void testMiddleGarbageCorruptionSkipErrorsReadsHalfOfFile() throws IOException {
conf.setBoolean(HBASE_SKIP_ERRORS, true);
generateHLogs(Integer.MAX_VALUE);
corruptHLog(new Path(hlogDir, HLOG_FILE_PREFIX + "5"),
Corruptions.INSERT_GARBAGE_IN_THE_MIDDLE, false, fs);
fs.initialize(fs.getUri(), conf);
HLogSplitter logSplitter = HLogSplitter.createLogSplitter(conf,
hbaseDir, hlogDir, oldLogDir, fs);
logSplitter.splitLog();
for (String region : regions) {
Path logfile = getLogForRegion(hbaseDir, TABLE_NAME, region);
// the entries in the original logs are alternating regions
// considering the sequence file header, the middle corruption should
// affect at least half of the entries
int goodEntries = (NUM_WRITERS - 1) * ENTRIES;
int firstHalfEntries = (int) Math.ceil(ENTRIES / 2) - 1;
assertTrue("The file up to the corrupted area hasn't been parsed",
goodEntries + firstHalfEntries <= countHLog(logfile, fs, conf));
}
}
@Test
public void testCorruptedFileGetsArchivedIfSkipErrors() throws IOException {
conf.setBoolean(HBASE_SKIP_ERRORS, true);
Class<?> backupClass = conf.getClass("hbase.regionserver.hlog.reader.impl",
Reader.class);
InstrumentedSequenceFileLogWriter.activateFailure = false;
HLog.resetLogReaderClass();
try {
Path c1 = new Path(hlogDir, HLOG_FILE_PREFIX + "0");
conf.setClass("hbase.regionserver.hlog.reader.impl",
FaultySequenceFileLogReader.class, HLog.Reader.class);
for (FaultySequenceFileLogReader.FailureType failureType : FaultySequenceFileLogReader.FailureType.values()) {
conf.set("faultysequencefilelogreader.failuretype", failureType.name());
generateHLogs(1, ENTRIES, -1);
fs.initialize(fs.getUri(), conf);
HLogSplitter logSplitter = HLogSplitter.createLogSplitter(conf,
hbaseDir, hlogDir, oldLogDir, fs);
logSplitter.splitLog();
FileStatus[] archivedLogs = fs.listStatus(corruptDir);
assertEquals("expected a different file", c1.getName(), archivedLogs[0]
.getPath().getName());
assertEquals(archivedLogs.length, 1);
fs.delete(new Path(oldLogDir, HLOG_FILE_PREFIX + "0"), false);
}
} finally {
conf.setClass("hbase.regionserver.hlog.reader.impl", backupClass,
Reader.class);
HLog.resetLogReaderClass();
}
}
@Test(expected = IOException.class)
public void testTrailingGarbageCorruptionLogFileSkipErrorsFalseThrows()
throws IOException {
conf.setBoolean(HBASE_SKIP_ERRORS, false);
Class<?> backupClass = conf.getClass("hbase.regionserver.hlog.reader.impl",
Reader.class);
InstrumentedSequenceFileLogWriter.activateFailure = false;
HLog.resetLogReaderClass();
try {
conf.setClass("hbase.regionserver.hlog.reader.impl",
FaultySequenceFileLogReader.class, HLog.Reader.class);
conf.set("faultysequencefilelogreader.failuretype", FaultySequenceFileLogReader.FailureType.BEGINNING.name());
generateHLogs(Integer.MAX_VALUE);
fs.initialize(fs.getUri(), conf);
HLogSplitter logSplitter = HLogSplitter.createLogSplitter(conf,
hbaseDir, hlogDir, oldLogDir, fs);
logSplitter.splitLog();
} finally {
conf.setClass("hbase.regionserver.hlog.reader.impl", backupClass,
Reader.class);
HLog.resetLogReaderClass();
}
}
@Test
public void testCorruptedLogFilesSkipErrorsFalseDoesNotTouchLogs()
throws IOException {
conf.setBoolean(HBASE_SKIP_ERRORS, false);
Class<?> backupClass = conf.getClass("hbase.regionserver.hlog.reader.impl",
Reader.class);
InstrumentedSequenceFileLogWriter.activateFailure = false;
HLog.resetLogReaderClass();
try {
conf.setClass("hbase.regionserver.hlog.reader.impl",
FaultySequenceFileLogReader.class, HLog.Reader.class);
conf.set("faultysequencefilelogreader.failuretype", FaultySequenceFileLogReader.FailureType.BEGINNING.name());
generateHLogs(-1);
fs.initialize(fs.getUri(), conf);
HLogSplitter logSplitter = HLogSplitter.createLogSplitter(conf,
hbaseDir, hlogDir, oldLogDir, fs);
try {
logSplitter.splitLog();
} catch (IOException e) {
assertEquals(
"if skip.errors is false all files should remain in place",
NUM_WRITERS, fs.listStatus(hlogDir).length);
}
} finally {
conf.setClass("hbase.regionserver.hlog.reader.impl", backupClass,
Reader.class);
HLog.resetLogReaderClass();
}
}
@Test
public void testEOFisIgnored() throws IOException {
conf.setBoolean(HBASE_SKIP_ERRORS, false);
final String REGION = "region__1";
regions.removeAll(regions);
regions.add(REGION);
int entryCount = 10;
Path c1 = new Path(hlogDir, HLOG_FILE_PREFIX + "0");
generateHLogs(1, entryCount, -1);
corruptHLog(c1, Corruptions.TRUNCATE, true, fs);
fs.initialize(fs.getUri(), conf);
HLogSplitter logSplitter = HLogSplitter.createLogSplitter(conf,
hbaseDir, hlogDir, oldLogDir, fs);
logSplitter.splitLog();
Path originalLog = (fs.listStatus(oldLogDir))[0].getPath();
Path splitLog = getLogForRegion(hbaseDir, TABLE_NAME, REGION);
int actualCount = 0;
HLog.Reader in = HLog.getReader(fs, splitLog, conf);
HLog.Entry entry;
while ((entry = in.next()) != null) ++actualCount;
assertEquals(entryCount-1, actualCount);
// should not have stored the EOF files as corrupt
FileStatus[] archivedLogs = fs.listStatus(corruptDir);
assertEquals(archivedLogs.length, 0);
}
@Test
public void testLogsGetArchivedAfterSplit() throws IOException {
conf.setBoolean(HBASE_SKIP_ERRORS, false);
generateHLogs(-1);
fs.initialize(fs.getUri(), conf);
HLogSplitter logSplitter = HLogSplitter.createLogSplitter(conf,
hbaseDir, hlogDir, oldLogDir, fs);
logSplitter.splitLog();
FileStatus[] archivedLogs = fs.listStatus(oldLogDir);
assertEquals("wrong number of files in the archive log", NUM_WRITERS, archivedLogs.length);
}
@Test
public void testSplit() throws IOException {
generateHLogs(-1);
fs.initialize(fs.getUri(), conf);
HLogSplitter logSplitter = HLogSplitter.createLogSplitter(conf,
hbaseDir, hlogDir, oldLogDir, fs);
logSplitter.splitLog();
for (String region : regions) {
Path logfile = getLogForRegion(hbaseDir, TABLE_NAME, region);
assertEquals(NUM_WRITERS * ENTRIES, countHLog(logfile, fs, conf));
}
}
@Test
public void testLogDirectoryShouldBeDeletedAfterSuccessfulSplit()
throws IOException {
generateHLogs(-1);
fs.initialize(fs.getUri(), conf);
HLogSplitter logSplitter = HLogSplitter.createLogSplitter(conf,
hbaseDir, hlogDir, oldLogDir, fs);
logSplitter.splitLog();
FileStatus [] statuses = null;
try {
statuses = fs.listStatus(hlogDir);
if (statuses != null) {
Assert.fail("Files left in log dir: " +
Joiner.on(",").join(FileUtil.stat2Paths(statuses)));
}
} catch (FileNotFoundException e) {
// hadoop 0.21 throws FNFE whereas hadoop 0.20 returns null
}
}
/* DISABLED for now. TODO: HBASE-2645
@Test
public void testLogCannotBeWrittenOnceParsed() throws IOException {
AtomicLong counter = new AtomicLong(0);
AtomicBoolean stop = new AtomicBoolean(false);
generateHLogs(9);
fs.initialize(fs.getUri(), conf);
Thread zombie = new ZombieLastLogWriterRegionServer(writer[9], counter, stop);
try {
zombie.start();
HLog.splitLog(hbaseDir, hlogDir, oldLogDir, fs, conf);
Path logfile = getLogForRegion(hbaseDir, TABLE_NAME, "juliet");
// It's possible that the writer got an error while appending and didn't count it
// however the entry will in fact be written to file and split with the rest
long numberOfEditsInRegion = countHLog(logfile, fs, conf);
assertTrue("The log file could have at most 1 extra log entry, but " +
"can't have less. Zombie could write "+counter.get() +" and logfile had only"+ numberOfEditsInRegion+" " + logfile, counter.get() == numberOfEditsInRegion ||
counter.get() + 1 == numberOfEditsInRegion);
} finally {
stop.set(true);
}
}
*/
@Test
public void testSplitWillNotTouchLogsIfNewHLogGetsCreatedAfterSplitStarted()
throws IOException {
AtomicBoolean stop = new AtomicBoolean(false);
generateHLogs(-1);
fs.initialize(fs.getUri(), conf);
CountDownLatch latch = new CountDownLatch(1);
Thread zombie = new ZombieNewLogWriterRegionServer(latch, stop);
List<Path> splits = null;
try {
zombie.start();
try {
HLogSplitter logSplitter = HLogSplitter.createLogSplitter(conf,
hbaseDir, hlogDir, oldLogDir, fs);
splits = logSplitter.splitLog(latch);
} catch (IOException ex) {
/* expected */
LOG.warn("testSplitWillNotTouchLogsIfNewHLogGetsCreatedAfterSplitStarted", ex);
}
FileStatus[] files = fs.listStatus(hlogDir);
if (files == null) fail("no files in " + hlogDir + " with splits " + splits);
int logFilesNumber = files.length;
assertEquals("Log files should not be archived if there's an extra file after split",
NUM_WRITERS + 1, logFilesNumber);
} finally {
stop.set(true);
}
}
@Test(expected = IOException.class)
public void testSplitWillFailIfWritingToRegionFails() throws Exception {
//leave 5th log open so we could append the "trap"
generateHLogs(4);
fs.initialize(fs.getUri(), conf);
String region = "break";
Path regiondir = new Path(tabledir, region);
fs.mkdirs(regiondir);
InstrumentedSequenceFileLogWriter.activateFailure = false;
appendEntry(writer[4], TABLE_NAME, Bytes.toBytes(region),
("r" + 999).getBytes(), FAMILY, QUALIFIER, VALUE, 0);
writer[4].close();
try {
InstrumentedSequenceFileLogWriter.activateFailure = true;
HLogSplitter logSplitter = HLogSplitter.createLogSplitter(conf,
hbaseDir, hlogDir, oldLogDir, fs);
logSplitter.splitLog();
} catch (IOException e) {
assertEquals("This exception is instrumented and should only be thrown for testing", e.getMessage());
throw e;
} finally {
InstrumentedSequenceFileLogWriter.activateFailure = false;
}
}
// @Test TODO this test has been disabled since it was created!
// It currently fails because the second split doesn't output anything
// -- because there are no region dirs after we move aside the first
// split result
public void testSplittingLargeNumberOfRegionsConsistency() throws IOException {
regions.removeAll(regions);
for (int i=0; i<100; i++) {
regions.add("region__"+i);
}
generateHLogs(1, 100, -1);
fs.initialize(fs.getUri(), conf);
HLogSplitter logSplitter = HLogSplitter.createLogSplitter(conf,
hbaseDir, hlogDir, oldLogDir, fs);
logSplitter.splitLog();
fs.rename(oldLogDir, hlogDir);
Path firstSplitPath = new Path(hbaseDir, Bytes.toString(TABLE_NAME) + ".first");
Path splitPath = new Path(hbaseDir, Bytes.toString(TABLE_NAME));
fs.rename(splitPath,
firstSplitPath);
fs.initialize(fs.getUri(), conf);
logSplitter = HLogSplitter.createLogSplitter(conf,
hbaseDir, hlogDir, oldLogDir, fs);
logSplitter.splitLog();
assertEquals(0, compareHLogSplitDirs(firstSplitPath, splitPath));
}
@Test
public void testSplitDeletedRegion() throws IOException {
regions.removeAll(regions);
String region = "region_that_splits";
regions.add(region);
generateHLogs(1);
fs.initialize(fs.getUri(), conf);
Path regiondir = new Path(tabledir, region);
fs.delete(regiondir, true);
HLogSplitter logSplitter = HLogSplitter.createLogSplitter(conf,
hbaseDir, hlogDir, oldLogDir, fs);
logSplitter.splitLog();
assertFalse(fs.exists(regiondir));
}
@Test
public void testIOEOnOutputThread() throws Exception {
conf.setBoolean(HBASE_SKIP_ERRORS, false);
generateHLogs(-1);
fs.initialize(fs.getUri(), conf);
// Set up a splitter that will throw an IOE on the output side
HLogSplitter logSplitter = new HLogSplitter(
conf, hbaseDir, hlogDir, oldLogDir, fs) {
protected HLog.Writer createWriter(FileSystem fs, Path logfile, Configuration conf)
throws IOException {
HLog.Writer mockWriter = Mockito.mock(HLog.Writer.class);
Mockito.doThrow(new IOException("Injected")).when(mockWriter).append(Mockito.<HLog.Entry>any());
return mockWriter;
}
};
try {
logSplitter.splitLog();
fail("Didn't throw!");
} catch (IOException ioe) {
assertTrue(ioe.toString().contains("Injected"));
}
}
// Test for HBASE-3412
@Test
public void testMovedHLogDuringRecovery() throws Exception {
generateHLogs(-1);
fs.initialize(fs.getUri(), conf);
// This partial mock will throw LEE for every file simulating
// files that were moved
FileSystem spiedFs = Mockito.spy(fs);
// The "File does not exist" part is very important,
// that's how it comes out of HDFS
Mockito.doThrow(new LeaseExpiredException("Injected: File does not exist")).
when(spiedFs).append(Mockito.<Path>any());
HLogSplitter logSplitter = new HLogSplitter(
conf, hbaseDir, hlogDir, oldLogDir, spiedFs);
try {
logSplitter.splitLog();
assertEquals(NUM_WRITERS, fs.listStatus(oldLogDir).length);
assertFalse(fs.exists(hlogDir));
} catch (IOException e) {
fail("There shouldn't be any exception but: " + e.toString());
}
}
/**
* Test log split process with fake data and lots of edits to trigger threading
* issues.
*/
@Test
public void testThreading() throws Exception {
doTestThreading(20000, 128*1024*1024, 0);
}
/**
* Test blocking behavior of the log split process if writers are writing slower
* than the reader is reading.
*/
@Test
public void testThreadingSlowWriterSmallBuffer() throws Exception {
doTestThreading(200, 1024, 50);
}
/**
* Sets up a log splitter with a mock reader and writer. The mock reader generates
* a specified number of edits spread across 5 regions. The mock writer optionally
* sleeps for each edit it is fed.
* *
* After the split is complete, verifies that the statistics show the correct number
* of edits output into each region.
*
* @param numFakeEdits number of fake edits to push through pipeline
* @param bufferSize size of in-memory buffer
* @param writerSlowness writer threads will sleep this many ms per edit
*/
private void doTestThreading(final int numFakeEdits,
final int bufferSize,
final int writerSlowness) throws Exception {
Configuration localConf = new Configuration(conf);
localConf.setInt("hbase.regionserver.hlog.splitlog.buffersize", bufferSize);
// Create a fake log file (we'll override the reader to produce a stream of edits)
FSDataOutputStream out = fs.create(new Path(hlogDir, HLOG_FILE_PREFIX + ".fake"));
out.close();
// Make region dirs for our destination regions so the output doesn't get skipped
final List<String> regions = ImmutableList.of("r0", "r1", "r2", "r3", "r4");
makeRegionDirs(fs, regions);
// Create a splitter that reads and writes the data without touching disk
HLogSplitter logSplitter = new HLogSplitter(
localConf, hbaseDir, hlogDir, oldLogDir, fs) {
/* Produce a mock writer that doesn't write anywhere */
protected HLog.Writer createWriter(FileSystem fs, Path logfile, Configuration conf)
throws IOException {
HLog.Writer mockWriter = Mockito.mock(HLog.Writer.class);
Mockito.doAnswer(new Answer<Void>() {
int expectedIndex = 0;
@Override
public Void answer(InvocationOnMock invocation) {
if (writerSlowness > 0) {
try {
Thread.sleep(writerSlowness);
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
}
}
HLog.Entry entry = (Entry) invocation.getArguments()[0];
WALEdit edit = entry.getEdit();
List<KeyValue> keyValues = edit.getKeyValues();
assertEquals(1, keyValues.size());
KeyValue kv = keyValues.get(0);
// Check that the edits come in the right order.
assertEquals(expectedIndex, Bytes.toInt(kv.getRow()));
expectedIndex++;
return null;
}
}).when(mockWriter).append(Mockito.<HLog.Entry>any());
return mockWriter;
}
/* Produce a mock reader that generates fake entries */
protected Reader getReader(FileSystem fs, Path curLogFile, Configuration conf)
throws IOException {
Reader mockReader = Mockito.mock(Reader.class);
Mockito.doAnswer(new Answer<HLog.Entry>() {
int index = 0;
@Override
public HLog.Entry answer(InvocationOnMock invocation) throws Throwable {
if (index >= numFakeEdits) return null;
// Generate r0 through r4 in round robin fashion
int regionIdx = index % regions.size();
byte region[] = new byte[] {(byte)'r', (byte) (0x30 + regionIdx)};
HLog.Entry ret = createTestEntry(TABLE_NAME, region,
Bytes.toBytes((int)(index / regions.size())),
FAMILY, QUALIFIER, VALUE, index);
index++;
return ret;
}
}).when(mockReader).next();
return mockReader;
}
};
logSplitter.splitLog();
// Verify number of written edits per region
Map<byte[], Long> outputCounts = logSplitter.getOutputCounts();
for (Map.Entry<byte[], Long> entry : outputCounts.entrySet()) {
LOG.info("Got " + entry.getValue() + " output edits for region " +
Bytes.toString(entry.getKey()));
assertEquals((long)entry.getValue(), numFakeEdits / regions.size());
}
assertEquals(regions.size(), outputCounts.size());
}
// HBASE-2312: tests the case where a RegionServer enters a GC pause,
// comes back online after the master declared it dead and started to split.
// Want log rolling after a master split to fail
@Test
@Ignore("Need HADOOP-6886, HADOOP-6840, & HDFS-617 for this. HDFS 0.20.205.1+ should have this")
public void testLogRollAfterSplitStart() throws IOException {
// set flush interval to a large number so it doesn't interrupt us
final String F_INTERVAL = "hbase.regionserver.optionallogflushinterval";
long oldFlushInterval = conf.getLong(F_INTERVAL, 1000);
conf.setLong(F_INTERVAL, 1000*1000*100);
HLog log = null;
Path thisTestsDir = new Path(hbaseDir, "testLogRollAfterSplitStart");
try {
// put some entries in an HLog
byte [] tableName = Bytes.toBytes(this.getClass().getName());
HRegionInfo regioninfo = new HRegionInfo(tableName,
HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW);
log = new HLog(fs, thisTestsDir, oldLogDir, conf);
final int total = 20;
for (int i = 0; i < total; i++) {
WALEdit kvs = new WALEdit();
kvs.add(new KeyValue(Bytes.toBytes(i), tableName, tableName));
HTableDescriptor htd = new HTableDescriptor(tableName);
htd.addFamily(new HColumnDescriptor("column"));
log.append(regioninfo, tableName, kvs, System.currentTimeMillis(), htd);
}
// Send the data to HDFS datanodes and close the HDFS writer
log.sync();
log.cleanupCurrentWriter(log.getFilenum());
/* code taken from ProcessServerShutdown.process()
* handles RS shutdowns (as observed by the Master)
*/
// rename the directory so a rogue RS doesn't create more HLogs
Path rsSplitDir = new Path(thisTestsDir.getParent(),
thisTestsDir.getName() + "-splitting");
fs.rename(thisTestsDir, rsSplitDir);
LOG.debug("Renamed region directory: " + rsSplitDir);
// Process the old log files
HLogSplitter splitter = HLogSplitter.createLogSplitter(conf,
hbaseDir, rsSplitDir, oldLogDir, fs);
splitter.splitLog();
// Now, try to roll the HLog and verify failure
try {
log.rollWriter();
Assert.fail("rollWriter() did not throw any exception.");
} catch (IOException ioe) {
if (ioe.getCause().getMessage().contains("FileNotFound")) {
LOG.info("Got the expected exception: ", ioe.getCause());
} else {
Assert.fail("Unexpected exception: " + ioe);
}
}
} finally {
conf.setLong(F_INTERVAL, oldFlushInterval);
if (log != null) {
log.close();
}
if (fs.exists(thisTestsDir)) {
fs.delete(thisTestsDir, true);
}
}
}
/**
* This thread will keep writing to the file after the split process has started
* It simulates a region server that was considered dead but woke up and wrote
* some more to he last log entry
*/
class ZombieLastLogWriterRegionServer extends Thread {
AtomicLong editsCount;
AtomicBoolean stop;
Path log;
HLog.Writer lastLogWriter;
public ZombieLastLogWriterRegionServer(HLog.Writer writer, AtomicLong counter, AtomicBoolean stop) {
this.stop = stop;
this.editsCount = counter;
this.lastLogWriter = writer;
}
@Override
public void run() {
if (stop.get()){
return;
}
flushToConsole("starting");
while (true) {
try {
String region = "juliet";
fs.mkdirs(new Path(new Path(hbaseDir, region), region));
appendEntry(lastLogWriter, TABLE_NAME, region.getBytes(),
("r" + editsCount).getBytes(), FAMILY, QUALIFIER, VALUE, 0);
lastLogWriter.sync();
editsCount.incrementAndGet();
try {
Thread.sleep(1);
} catch (InterruptedException e) {
//
}
} catch (IOException ex) {
if (ex instanceof RemoteException) {
flushToConsole("Juliet: got RemoteException " +
ex.getMessage() + " while writing " + (editsCount.get() + 1));
break;
} else {
assertTrue("Failed to write " + editsCount.get(), false);
}
}
}
}
}
/**
* This thread will keep adding new log files
* It simulates a region server that was considered dead but woke up and wrote
* some more to a new hlog
*/
class ZombieNewLogWriterRegionServer extends Thread {
AtomicBoolean stop;
CountDownLatch latch;
public ZombieNewLogWriterRegionServer(CountDownLatch latch, AtomicBoolean stop) {
super("ZombieNewLogWriterRegionServer");
this.latch = latch;
this.stop = stop;
}
@Override
public void run() {
if (stop.get()) {
return;
}
Path tableDir = new Path(hbaseDir, new String(TABLE_NAME));
Path regionDir = new Path(tableDir, regions.get(0));
Path recoveredEdits = new Path(regionDir, HLogSplitter.RECOVERED_EDITS);
String region = "juliet";
Path julietLog = new Path(hlogDir, HLOG_FILE_PREFIX + ".juliet");
try {
while (!fs.exists(recoveredEdits) && !stop.get()) {
LOG.info("Juliet: split not started, sleeping a bit...");
Threads.sleep(10);
}
fs.mkdirs(new Path(tableDir, region));
HLog.Writer writer = HLog.createWriter(fs,
julietLog, conf);
appendEntry(writer, "juliet".getBytes(), ("juliet").getBytes(),
("r").getBytes(), FAMILY, QUALIFIER, VALUE, 0);
writer.close();
LOG.info("Juliet file creator: created file " + julietLog);
latch.countDown();
} catch (IOException e1) {
LOG.error("Failed to create file " + julietLog, e1);
assertTrue("Failed to create file " + julietLog, false);
}
}
}
private CancelableProgressable reporter = new CancelableProgressable() {
int count = 0;
@Override
public boolean progress() {
count++;
LOG.debug("progress = " + count);
return true;
}
};
@Test
public void testSplitLogFileWithOneRegion() throws IOException {
LOG.info("testSplitLogFileWithOneRegion");
final String REGION = "region__1";
regions.removeAll(regions);
regions.add(REGION);
generateHLogs(1, 10, -1);
FileStatus logfile = fs.listStatus(hlogDir)[0];
fs.initialize(fs.getUri(), conf);
HLogSplitter.splitLogFile(hbaseDir, logfile, fs, conf, reporter);
HLogSplitter.finishSplitLogFile(hbaseDir, oldLogDir, logfile.getPath()
.toString(), conf);
Path originalLog = (fs.listStatus(oldLogDir))[0].getPath();
Path splitLog = getLogForRegion(hbaseDir, TABLE_NAME, REGION);
assertEquals(true, logsAreEqual(originalLog, splitLog));
}
@Test
public void testSplitLogFileDeletedRegionDir()
throws IOException {
LOG.info("testSplitLogFileDeletedRegionDir");
final String REGION = "region__1";
regions.removeAll(regions);
regions.add(REGION);
generateHLogs(1, 10, -1);
FileStatus logfile = fs.listStatus(hlogDir)[0];
fs.initialize(fs.getUri(), conf);
Path regiondir = new Path(tabledir, REGION);
LOG.info("Region directory is" + regiondir);
fs.delete(regiondir, true);
HLogSplitter.splitLogFile(hbaseDir, logfile, fs, conf, reporter);
HLogSplitter.finishSplitLogFile(hbaseDir, oldLogDir, logfile.getPath()
.toString(), conf);
assertTrue(!fs.exists(regiondir));
assertTrue(true);
}
@Test
public void testSplitLogFileEmpty() throws IOException {
LOG.info("testSplitLogFileEmpty");
injectEmptyFile(".empty", true);
FileStatus logfile = fs.listStatus(hlogDir)[0];
fs.initialize(fs.getUri(), conf);
HLogSplitter.splitLogFile(hbaseDir, logfile, fs, conf, reporter);
HLogSplitter.finishSplitLogFile(hbaseDir, oldLogDir, logfile.getPath()
.toString(), conf);
Path tdir = HTableDescriptor.getTableDir(hbaseDir, TABLE_NAME);
assertFalse(fs.exists(tdir));
assertEquals(0, countHLog(fs.listStatus(oldLogDir)[0].getPath(), fs, conf));
}
@Test
public void testSplitLogFileMultipleRegions() throws IOException {
LOG.info("testSplitLogFileMultipleRegions");
generateHLogs(1, 10, -1);
FileStatus logfile = fs.listStatus(hlogDir)[0];
fs.initialize(fs.getUri(), conf);
HLogSplitter.splitLogFile(hbaseDir, logfile, fs, conf, reporter);
HLogSplitter.finishSplitLogFile(hbaseDir, oldLogDir, logfile.getPath()
.toString(), conf);
for (String region : regions) {
Path recovered = getLogForRegion(hbaseDir, TABLE_NAME, region);
assertEquals(10, countHLog(recovered, fs, conf));
}
}
@Test
public void testSplitLogFileFirstLineCorruptionLog()
throws IOException {
conf.setBoolean(HBASE_SKIP_ERRORS, true);
generateHLogs(1, 10, -1);
FileStatus logfile = fs.listStatus(hlogDir)[0];
corruptHLog(logfile.getPath(),
Corruptions.INSERT_GARBAGE_ON_FIRST_LINE, true, fs);
fs.initialize(fs.getUri(), conf);
HLogSplitter.splitLogFile(hbaseDir, logfile, fs, conf, reporter);
HLogSplitter.finishSplitLogFile(hbaseDir, oldLogDir, logfile.getPath()
.toString(), conf);
final Path corruptDir = new Path(conf.get(HConstants.HBASE_DIR), conf.get(
"hbase.regionserver.hlog.splitlog.corrupt.dir", ".corrupt"));
assertEquals(1, fs.listStatus(corruptDir).length);
}
/**
* @throws IOException
* @see https://issues.apache.org/jira/browse/HBASE-4862
*/
@Test
public void testConcurrentSplitLogAndReplayRecoverEdit() throws IOException {
LOG.info("testConcurrentSplitLogAndReplayRecoverEdit");
// Generate hlogs for our destination region
String regionName = "r0";
final Path regiondir = new Path(tabledir, regionName);
regions = new ArrayList<String>();
regions.add(regionName);
generateHLogs(-1);
HLogSplitter logSplitter = new HLogSplitter(
conf, hbaseDir, hlogDir, oldLogDir, fs) {
protected HLog.Writer createWriter(FileSystem fs, Path logfile, Configuration conf)
throws IOException {
HLog.Writer writer = HLog.createWriter(fs, logfile, conf);
// After creating writer, simulate region's
// replayRecoveredEditsIfAny() which gets SplitEditFiles of this
// region and delete them, excluding files with '.temp' suffix.
NavigableSet<Path> files = HLog.getSplitEditFilesSorted(this.fs,
regiondir);
if (files != null && !files.isEmpty()) {
for (Path file : files) {
if (!this.fs.delete(file, false)) {
LOG.error("Failed delete of " + file);
} else {
LOG.debug("Deleted recovered.edits file=" + file);
}
}
}
return writer;
}
};
try{
logSplitter.splitLog();
} catch (IOException e) {
LOG.info(e);
Assert.fail("Throws IOException when spliting "
+ "log, it is most likely because writing file does not "
+ "exist which is caused by concurrent replayRecoveredEditsIfAny()");
}
if (fs.exists(corruptDir)) {
if (fs.listStatus(corruptDir).length > 0) {
Assert.fail("There are some corrupt logs, "
+ "it is most likely caused by concurrent replayRecoveredEditsIfAny()");
}
}
}
private void flushToConsole(String s) {
System.out.println(s);
System.out.flush();
}
private void generateHLogs(int leaveOpen) throws IOException {
generateHLogs(NUM_WRITERS, ENTRIES, leaveOpen);
}
private void makeRegionDirs(FileSystem fs, List<String> regions) throws IOException {
for (String region : regions) {
flushToConsole("Creating dir for region " + region);
fs.mkdirs(new Path(tabledir, region));
}
}
private void generateHLogs(int writers, int entries, int leaveOpen) throws IOException {
makeRegionDirs(fs, regions);
fs.mkdirs(hlogDir);
for (int i = 0; i < writers; i++) {
writer[i] = HLog.createWriter(fs, new Path(hlogDir, HLOG_FILE_PREFIX + i), conf);
for (int j = 0; j < entries; j++) {
int prefix = 0;
for (String region : regions) {
String row_key = region + prefix++ + i + j;
appendEntry(writer[i], TABLE_NAME, region.getBytes(),
row_key.getBytes(), FAMILY, QUALIFIER, VALUE, seq);
}
}
if (i != leaveOpen) {
writer[i].close();
LOG.info("Closing writer " + i);
}
}
}
private Path getLogForRegion(Path rootdir, byte[] table, String region)
throws IOException {
Path tdir = HTableDescriptor.getTableDir(rootdir, table);
Path editsdir = HLog.getRegionDirRecoveredEditsDir(HRegion.getRegionDir(tdir,
Bytes.toString(region.getBytes())));
FileStatus [] files = this.fs.listStatus(editsdir);
assertEquals(1, files.length);
return files[0].getPath();
}
private void corruptHLog(Path path, Corruptions corruption, boolean close,
FileSystem fs) throws IOException {
FSDataOutputStream out;
int fileSize = (int) fs.listStatus(path)[0].getLen();
FSDataInputStream in = fs.open(path);
byte[] corrupted_bytes = new byte[fileSize];
in.readFully(0, corrupted_bytes, 0, fileSize);
in.close();
switch (corruption) {
case APPEND_GARBAGE:
fs.delete(path, false);
out = fs.create(path);
out.write(corrupted_bytes);
out.write("-----".getBytes());
closeOrFlush(close, out);
break;
case INSERT_GARBAGE_ON_FIRST_LINE:
fs.delete(path, false);
out = fs.create(path);
out.write(0);
out.write(corrupted_bytes);
closeOrFlush(close, out);
break;
case INSERT_GARBAGE_IN_THE_MIDDLE:
fs.delete(path, false);
out = fs.create(path);
int middle = (int) Math.floor(corrupted_bytes.length / 2);
out.write(corrupted_bytes, 0, middle);
out.write(0);
out.write(corrupted_bytes, middle, corrupted_bytes.length - middle);
closeOrFlush(close, out);
break;
case TRUNCATE:
fs.delete(path, false);
out = fs.create(path);
out.write(corrupted_bytes, 0, fileSize-32);
closeOrFlush(close, out);
break;
}
}
private void closeOrFlush(boolean close, FSDataOutputStream out)
throws IOException {
if (close) {
out.close();
} else {
Method syncMethod = null;
try {
syncMethod = out.getClass().getMethod("hflush", new Class<?> []{});
} catch (NoSuchMethodException e) {
try {
syncMethod = out.getClass().getMethod("sync", new Class<?> []{});
} catch (NoSuchMethodException ex) {
throw new IOException("This version of Hadoop supports " +
"neither Syncable.sync() nor Syncable.hflush().");
}
}
try {
syncMethod.invoke(out, new Object[]{});
} catch (Exception e) {
throw new IOException(e);
}
// Not in 0out.hflush();
}
}
@SuppressWarnings("unused")
private void dumpHLog(Path log, FileSystem fs, Configuration conf) throws IOException {
HLog.Entry entry;
HLog.Reader in = HLog.getReader(fs, log, conf);
while ((entry = in.next()) != null) {
System.out.println(entry);
}
}
private int countHLog(Path log, FileSystem fs, Configuration conf) throws IOException {
int count = 0;
HLog.Reader in = HLog.getReader(fs, log, conf);
while (in.next() != null) {
count++;
}
return count;
}
public long appendEntry(HLog.Writer writer, byte[] table, byte[] region,
byte[] row, byte[] family, byte[] qualifier,
byte[] value, long seq)
throws IOException {
LOG.info(Thread.currentThread().getName() + " append");
writer.append(createTestEntry(table, region, row, family, qualifier, value, seq));
LOG.info(Thread.currentThread().getName() + " sync");
writer.sync();
return seq;
}
private HLog.Entry createTestEntry(
byte[] table, byte[] region,
byte[] row, byte[] family, byte[] qualifier,
byte[] value, long seq) {
long time = System.nanoTime();
WALEdit edit = new WALEdit();
seq++;
edit.add(new KeyValue(row, family, qualifier, time, KeyValue.Type.Put, value));
return new HLog.Entry(new HLogKey(region, table, seq, time,
HConstants.DEFAULT_CLUSTER_ID), edit);
}
private void injectEmptyFile(String suffix, boolean closeFile)
throws IOException {
HLog.Writer writer = HLog.createWriter(
fs, new Path(hlogDir, HLOG_FILE_PREFIX + suffix), conf);
if (closeFile) writer.close();
}
@SuppressWarnings("unused")
private void listLogs(FileSystem fs, Path dir) throws IOException {
for (FileStatus file : fs.listStatus(dir)) {
System.out.println(file.getPath());
}
}
private int compareHLogSplitDirs(Path p1, Path p2) throws IOException {
FileStatus[] f1 = fs.listStatus(p1);
FileStatus[] f2 = fs.listStatus(p2);
assertNotNull("Path " + p1 + " doesn't exist", f1);
assertNotNull("Path " + p2 + " doesn't exist", f2);
System.out.println("Files in " + p1 + ": " +
Joiner.on(",").join(FileUtil.stat2Paths(f1)));
System.out.println("Files in " + p2 + ": " +
Joiner.on(",").join(FileUtil.stat2Paths(f2)));
assertEquals(f1.length, f2.length);
for (int i = 0; i < f1.length; i++) {
// Regions now have a directory named RECOVERED_EDITS_DIR and in here
// are split edit files. In below presume only 1.
Path rd1 = HLog.getRegionDirRecoveredEditsDir(f1[i].getPath());
FileStatus[] rd1fs = fs.listStatus(rd1);
assertEquals(1, rd1fs.length);
Path rd2 = HLog.getRegionDirRecoveredEditsDir(f2[i].getPath());
FileStatus[] rd2fs = fs.listStatus(rd2);
assertEquals(1, rd2fs.length);
if (!logsAreEqual(rd1fs[0].getPath(), rd2fs[0].getPath())) {
return -1;
}
}
return 0;
}
private boolean logsAreEqual(Path p1, Path p2) throws IOException {
HLog.Reader in1, in2;
in1 = HLog.getReader(fs, p1, conf);
in2 = HLog.getReader(fs, p2, conf);
HLog.Entry entry1;
HLog.Entry entry2;
while ((entry1 = in1.next()) != null) {
entry2 = in2.next();
if ((entry1.getKey().compareTo(entry2.getKey()) != 0) ||
(!entry1.getEdit().toString().equals(entry2.getEdit().toString()))) {
return false;
}
}
return true;
}
@org.junit.Rule
public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
}
| apache-2.0 |
mahak/hbase | hbase-server/src/main/java/org/apache/hadoop/hbase/master/balancer/BalancerChore.java | 1872 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master.balancer;
import java.io.IOException;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ScheduledChore;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hbase.master.HMaster;
/**
* Chore that will call HMaster.balance{@link org.apache.hadoop.hbase.master.HMaster#balance()} when
* needed.
*/
@InterfaceAudience.Private
public class BalancerChore extends ScheduledChore {
private static final Logger LOG = LoggerFactory.getLogger(BalancerChore.class);
private final HMaster master;
public BalancerChore(HMaster master) {
super(master.getServerName() + "-BalancerChore", master, master.getConfiguration().getInt(
HConstants.HBASE_BALANCER_PERIOD, HConstants.DEFAULT_HBASE_BALANCER_PERIOD));
this.master = master;
}
@Override
protected void chore() {
try {
master.balanceOrUpdateMetrics();
} catch (IOException e) {
LOG.error("Failed to balance.", e);
}
}
}
| apache-2.0 |
hello2009chen/spring-boot | spring-boot-project/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/webservices/client/WebServiceTemplateAutoConfigurationTests.java | 5470 | /*
* Copyright 2012-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.autoconfigure.webservices.client;
import java.util.function.Consumer;
import org.junit.Test;
import org.springframework.boot.autoconfigure.AutoConfigurations;
import org.springframework.boot.test.context.assertj.AssertableApplicationContext;
import org.springframework.boot.test.context.runner.ApplicationContextRunner;
import org.springframework.boot.test.context.runner.ContextConsumer;
import org.springframework.boot.webservices.client.WebServiceTemplateBuilder;
import org.springframework.boot.webservices.client.WebServiceTemplateCustomizer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.oxm.Marshaller;
import org.springframework.oxm.Unmarshaller;
import org.springframework.oxm.jaxb.Jaxb2Marshaller;
import org.springframework.ws.client.core.WebServiceTemplate;
import org.springframework.ws.transport.WebServiceMessageSender;
import org.springframework.ws.transport.http.ClientHttpRequestMessageSender;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Tests for {@link WebServiceTemplateAutoConfiguration}.
*
* @author Stephane Nicoll
* @author Dmytro Nosan
*/
public class WebServiceTemplateAutoConfigurationTests {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(
AutoConfigurations.of(WebServiceTemplateAutoConfiguration.class));
@Test
public void autoConfiguredBuilderShouldNotHaveMarshallerAndUnmarshaller() {
this.contextRunner.run(assertWebServiceTemplateBuilder((builder) -> {
WebServiceTemplate webServiceTemplate = builder.build();
assertThat(webServiceTemplate.getUnmarshaller()).isNull();
assertThat(webServiceTemplate.getMarshaller()).isNull();
}));
}
@Test
public void autoConfiguredBuilderShouldHaveHttpMessageSenderByDefault() {
this.contextRunner.run(assertWebServiceTemplateBuilder((builder) -> {
WebServiceTemplate webServiceTemplate = builder.build();
assertThat(webServiceTemplate.getMessageSenders()).hasSize(1);
WebServiceMessageSender messageSender = webServiceTemplate
.getMessageSenders()[0];
assertThat(messageSender).isInstanceOf(ClientHttpRequestMessageSender.class);
}));
}
@Test
public void webServiceTemplateWhenHasCustomBuilderShouldUseCustomBuilder() {
this.contextRunner
.withUserConfiguration(CustomWebServiceTemplateBuilderConfig.class)
.run(assertWebServiceTemplateBuilder((builder) -> {
WebServiceTemplate webServiceTemplate = builder.build();
assertThat(webServiceTemplate.getMarshaller())
.isSameAs(CustomWebServiceTemplateBuilderConfig.marshaller);
}));
}
@Test
public void webServiceTemplateShouldApplyCustomizer() {
this.contextRunner.withUserConfiguration(WebServiceTemplateCustomizerConfig.class)
.run(assertWebServiceTemplateBuilder((builder) -> {
WebServiceTemplate webServiceTemplate = builder.build();
assertThat(webServiceTemplate.getUnmarshaller())
.isSameAs(WebServiceTemplateCustomizerConfig.unmarshaller);
}));
}
@Test
public void builderShouldBeFreshForEachUse() {
this.contextRunner.withUserConfiguration(DirtyWebServiceTemplateConfig.class)
.run((context) -> assertThat(context).hasNotFailed());
}
private ContextConsumer<AssertableApplicationContext> assertWebServiceTemplateBuilder(
Consumer<WebServiceTemplateBuilder> builder) {
return (context) -> {
assertThat(context).hasSingleBean(WebServiceTemplateBuilder.class);
builder.accept(context.getBean(WebServiceTemplateBuilder.class));
};
}
@Configuration
static class DirtyWebServiceTemplateConfig {
@Bean
public WebServiceTemplate webServiceTemplateOne(
WebServiceTemplateBuilder builder) {
try {
return builder.build();
}
finally {
breakBuilderOnNextCall(builder);
}
}
@Bean
public WebServiceTemplate webServiceTemplateTwo(
WebServiceTemplateBuilder builder) {
try {
return builder.build();
}
finally {
breakBuilderOnNextCall(builder);
}
}
private void breakBuilderOnNextCall(WebServiceTemplateBuilder builder) {
builder.additionalCustomizers((webServiceTemplate) -> {
throw new IllegalStateException();
});
}
}
@Configuration
static class CustomWebServiceTemplateBuilderConfig {
private static final Marshaller marshaller = new Jaxb2Marshaller();
@Bean
public WebServiceTemplateBuilder webServiceTemplateBuilder() {
return new WebServiceTemplateBuilder().setMarshaller(marshaller);
}
}
@Configuration
static class WebServiceTemplateCustomizerConfig {
private static final Unmarshaller unmarshaller = new Jaxb2Marshaller();
@Bean
public WebServiceTemplateCustomizer webServiceTemplateCustomizer() {
return (ws) -> ws.setUnmarshaller(unmarshaller);
}
}
}
| apache-2.0 |
sergeymazin/zeppelin | zeppelin-plugins/launcher/k8s-standard/src/main/java/org/apache/zeppelin/interpreter/launcher/Kubectl.java | 4907 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.interpreter.launcher;
import com.google.common.annotations.VisibleForTesting;
import com.google.gson.Gson;
import java.util.ArrayList;
import java.util.Arrays;
import org.apache.commons.exec.*;
import org.apache.commons.io.IOUtils;
import java.io.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class Kubectl {
private final Logger LOGGER = LoggerFactory.getLogger(Kubectl.class);
private final String kubectlCmd;
private final Gson gson = new Gson();
private String namespace;
public Kubectl(String kubectlCmd) {
this.kubectlCmd = kubectlCmd;
}
/**
* Override namespace. Otherwise use namespace provided in schema
* @param namespace
*/
public void setNamespace(String namespace) {
this.namespace = namespace;
}
public String getNamespace() {
return namespace;
}
public String apply(String spec) throws IOException {
return execAndGet(new String[]{"apply", "-f", "-"}, spec);
}
public String delete(String spec) throws IOException {
return execAndGet(new String[]{"delete", "-f", "-"}, spec);
}
public String wait(String resource, String waitFor, int timeoutSec) throws IOException {
try {
return execAndGet(new String[]{
"wait",
resource,
String.format("--for=%s", waitFor),
String.format("--timeout=%ds", timeoutSec)});
} catch (IOException e) {
if ("delete".equals(waitFor) && e.getMessage().contains("NotFound")) {
LOGGER.info("{} Not found. Maybe already deleted.", resource);
return "";
} else {
throw e;
}
}
}
public ExecuteWatchdog portForward(String resource, String [] ports) throws IOException {
DefaultExecutor executor = new DefaultExecutor();
CommandLine cmd = new CommandLine(kubectlCmd);
cmd.addArguments("port-forward");
cmd.addArguments(resource);
cmd.addArguments(ports);
ExecuteWatchdog watchdog = new ExecuteWatchdog(-1);
executor.setWatchdog(watchdog);
executor.execute(cmd, new ExecuteResultHandler() {
@Override
public void onProcessComplete(int i) {
LOGGER.info("Port-forward stopped");
}
@Override
public void onProcessFailed(ExecuteException e) {
LOGGER.debug("port-forward process exit", e);
}
});
return watchdog;
}
String execAndGet(String [] args) throws IOException {
return execAndGet(args, "");
}
@VisibleForTesting
String execAndGet(String [] args, String stdin) throws IOException {
InputStream ins = IOUtils.toInputStream(stdin);
ByteArrayOutputStream stdout = new ByteArrayOutputStream();
ByteArrayOutputStream stderr = new ByteArrayOutputStream();
ArrayList<String> argsToOverride = new ArrayList<>(Arrays.asList(args));
// set namespace
if (namespace != null) {
argsToOverride.add("--namespace=" + namespace);
}
LOGGER.info("kubectl " + argsToOverride);
LOGGER.debug(stdin);
try {
int exitCode = execute(
argsToOverride.toArray(new String[0]),
ins,
stdout,
stderr
);
if (exitCode == 0) {
String output = new String(stdout.toByteArray());
return output;
} else {
String output = new String(stderr.toByteArray());
throw new IOException(String.format("non zero return code (%d). %s", exitCode, output));
}
} catch (Exception e) {
String output = new String(stderr.toByteArray());
throw new IOException(output, e);
}
}
public int execute(String [] args, InputStream stdin, OutputStream stdout, OutputStream stderr) throws IOException {
DefaultExecutor executor = new DefaultExecutor();
CommandLine cmd = new CommandLine(kubectlCmd);
cmd.addArguments(args);
ExecuteWatchdog watchdog = new ExecuteWatchdog(60 * 1000);
executor.setWatchdog(watchdog);
PumpStreamHandler streamHandler = new PumpStreamHandler(stdout, stderr, stdin);
executor.setStreamHandler(streamHandler);
return executor.execute(cmd);
}
}
| apache-2.0 |
pk1057/asterisk-java | src/main/java/org/asteriskjava/fastagi/command/DatabaseDelTreeCommand.java | 2886 | /*
* Copyright 2004-2006 Stefan Reuter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.asteriskjava.fastagi.command;
/**
* Deletes a family or specific keytree within a family in the Asterisk
* database.
* <p>
* Returns 1 if successful, 0 otherwise.
*
* @author srt
* @version $Id$
*/
public class DatabaseDelTreeCommand extends AbstractAgiCommand
{
/**
* Serial version identifier.
*/
private static final long serialVersionUID = 3256719598056387384L;
/**
* The family of the key to delete.
*/
private String family;
/**
* The keytree to delete.
*/
private String keyTree;
/**
* Creates a new DatabaseDelCommand to delete a whole family.
*
* @param family the family to delete.
*/
public DatabaseDelTreeCommand(String family)
{
super();
this.family = family;
}
/**
* Creates a new DatabaseDelCommand to delete a keytree within a given
* family.
*
* @param family the family of the keytree to delete.
* @param keyTree the keytree to delete.
*/
public DatabaseDelTreeCommand(String family, String keyTree)
{
super();
this.family = family;
this.keyTree = keyTree;
}
/**
* Returns the family of the key to delete.
*
* @return the family of the key to delete.
*/
public String getFamily()
{
return family;
}
/**
* Sets the family of the key to delete.
*
* @param family the family of the key to delete.
*/
public void setFamily(String family)
{
this.family = family;
}
/**
* Returns the the keytree to delete.
*
* @return the keytree to delete.
*/
public String getKeyTree()
{
return keyTree;
}
/**
* Sets the keytree to delete.
*
* @param keyTree the keytree to delete, <code>null</code> to delete the
* whole family.
*/
public void setKeyTree(String keyTree)
{
this.keyTree = keyTree;
}
@Override
public String buildCommand()
{
if (keyTree != null)
{
return "DATABASE DELTREE " + escapeAndQuote(family) + " " + escapeAndQuote(keyTree);
}
return "DATABASE DELTREE " + escapeAndQuote(family);
}
}
| apache-2.0 |
zqian/sakai | sitestats/sitestats-impl/src/java/org/sakaiproject/sitestats/impl/event/EntityBrokerEventRegistry.java | 8767 | /**
* $URL$
* $Id$
*
* Copyright (c) 2006-2009 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sakaiproject.sitestats.impl.event;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.sakaiproject.entity.api.ResourceProperties;
import org.sakaiproject.entitybroker.entityprovider.EntityProviderManager;
import org.sakaiproject.entitybroker.entityprovider.capabilities.Statisticable;
import org.sakaiproject.entitybroker.entityprovider.extension.EntityProviderListener;
import org.sakaiproject.memory.api.Cache;
import org.sakaiproject.memory.api.MemoryService;
import org.sakaiproject.sitestats.api.event.EventInfo;
import org.sakaiproject.sitestats.api.event.EventRegistry;
import org.sakaiproject.sitestats.api.event.EventRegistryService;
import org.sakaiproject.sitestats.api.event.ToolInfo;
import org.sakaiproject.tool.api.SessionManager;
import org.sakaiproject.user.api.Preferences;
import org.sakaiproject.user.api.PreferencesService;
import org.sakaiproject.util.ResourceLoader;
import java.util.*;
public class EntityBrokerEventRegistry extends Observable implements EventRegistry, EntityProviderListener<Statisticable> {
private static Logger LOG = LoggerFactory.getLogger(EntityBrokerEventRegistry.class);
private static final String CACHENAME = EntityBrokerEventRegistry.class.getName();
/** Event Registry members */
private List<ToolInfo> eventRegistry = new ArrayList<ToolInfo>();
private Map<String, String> eventIdToEPPrefix = new HashMap<String, String>();
/** Caching */
private Cache eventNamesCache = null;
/** Sakai Services */
private SessionManager M_sm;
private PreferencesService M_ps;
private EntityProviderManager M_epm;
private MemoryService M_ms;
// ################################################################
// Spring methods
// ################################################################
public void setSessionManager(SessionManager sessionManager) {
this.M_sm = sessionManager;
}
public void setPreferencesService(PreferencesService preferencesService) {
this.M_ps = preferencesService;
}
public void setEntityProviderManager(EntityProviderManager entityProviderManager) {
this.M_epm = entityProviderManager;
}
public void setMemoryService(MemoryService memoryService) {
this.M_ms = memoryService;
}
public void init() {
LOG.info("init()");
// configure cache
eventNamesCache = M_ms.newCache(CACHENAME);
// register EntityBrokerListener
M_epm.registerListener(this, true);
}
// ################################################################
// Event Registry methods
// ################################################################
/* (non-Javadoc)
* @see org.sakaiproject.sitestats.api.event.EventRegistry#getEventRegistry()
*/
public List<ToolInfo> getEventRegistry() {
LOG.debug("getEventRegistry(): #tools implementing Statisticable = "+eventRegistry.size());
return eventRegistry;
}
/* (non-Javadoc)
* @see org.sakaiproject.sitestats.api.event.EventRegistry#isEventRegistryExpired()
*/
public boolean isEventRegistryExpired() {
return hasChanged();
}
/* (non-Javadoc)
* @see org.sakaiproject.sitestats.api.event.EventRegistry#getEventName(java.lang.String)
*/
public String getEventName(String eventId) {
Locale currentUserLocale = getCurrentUserLocale();
EventLocaleKey key = new EventLocaleKey(eventId, currentUserLocale.toString());
if(eventNamesCache.containsKey(key.toString())) {
return (String) eventNamesCache.get(key.toString());
}else{
String eventName = null;
try{
String prefix = eventIdToEPPrefix.get(eventId);
Statisticable s = M_epm.getProviderByPrefixAndCapability(prefix, Statisticable.class);
Map<String, String> eventIdNamesMap = s.getEventNames(currentUserLocale);
if(eventIdNamesMap != null) {
for(String thisEventId : eventIdNamesMap.keySet()) {
EventLocaleKey thisCacheKey = new EventLocaleKey(thisEventId, currentUserLocale.toString());
String thisEventName = eventIdNamesMap.get(thisEventId);
eventNamesCache.put(thisCacheKey.toString(), thisEventName);
if(thisEventId.equals(eventId)) {
eventName = thisEventName;
}
}
LOG.debug("Cached event names for EB prefix '"+prefix+"', locale: "+currentUserLocale);
}
}catch(Exception e) {
eventName = null;
}
return eventName;
}
}
// ################################################################
// EntityProviderListener methods
// ################################################################
public Class<Statisticable> getCapabilityFilter() {
return Statisticable.class;
}
public String getPrefixFilter() {
return null;
}
public void run(Statisticable provider) {
LOG.info("Statisticable capability registered with prefix: " + provider.getEntityPrefix());
processStatisticableProvider(provider);
}
private void processStatisticableProvider(Statisticable provider) {
String entityPrefix = provider.getEntityPrefix();
String entityToolId = provider.getAssociatedToolId();
String[] entityEventIds = provider.getEventKeys();
// Build tool for Event Registry (List<ToolInfo>)
ToolInfo tool = new ToolInfo(entityToolId);
tool.setSelected(true);
for(String eventId : entityEventIds) {
EventInfo event = new EventInfo(eventId);
event.setSelected(true);
// Add to eventID -> entityProfider_prefix mapping
eventIdToEPPrefix.put(eventId, entityPrefix);
tool.addEvent(event);
}
eventRegistry.add(tool);
// Set expired flag on EventRegistry to true
setChanged();
notifyObservers(EventRegistryService.NOTIF_EVENT_REGISTRY_EXPIRED);
}
// ################################################################
// Utility Methods
// ################################################################
/**
* Return current user locale.
* @return user's Locale object
*/
private Locale getCurrentUserLocale() {
Locale loc = null;
try{
// check if locale is requested for specific user
String userId = M_sm.getCurrentSessionUserId();
if(userId != null){
Preferences prefs = M_ps.getPreferences(userId);
ResourceProperties locProps = prefs.getProperties(ResourceLoader.APPLICATION_ID);
String localeString = locProps.getProperty(ResourceLoader.LOCALE_KEY);
// Parse user locale preference if set
if(localeString != null){
String[] locValues = localeString.split("_");
if(locValues.length > 1)
// language, country
loc = new Locale(locValues[0], locValues[1]);
else if(locValues.length == 1)
// language
loc = new Locale(locValues[0]);
}
if(loc == null)
loc = Locale.getDefault();
}else{
loc = (Locale) M_sm.getCurrentSession().getAttribute(ResourceLoader.LOCALE_KEY + M_sm.getCurrentSessionUserId());
}
}catch(NullPointerException e){
loc = Locale.getDefault();
}
return loc;
}
// ################################################################
// Utility Classes
// ################################################################
public static class EventLocaleKey {
String eventId = "";
String locale = "";
public EventLocaleKey(String eventId, String locale) {
this.eventId = eventId;
this.locale = locale;
}
public String getEventId() {
return eventId;
}
public void setEventId(String eventId) {
this.eventId = eventId;
}
public String getLocale() {
return locale;
}
public void setLocale(String locale) {
this.locale = locale;
}
@Override
public String toString() {
StringBuilder buff = new StringBuilder();
buff.append("[");
buff.append(getEventId());
buff.append(", ");
buff.append(getLocale());
buff.append("]");
return buff.toString();
}
@Override
public int hashCode() {
return getEventId().hashCode() + getLocale().hashCode();
}
@Override
public boolean equals(Object obj) {
if(obj == null || !(obj instanceof EventLocaleKey)) {
return false;
}
EventLocaleKey o = (EventLocaleKey) obj;
if(o.getEventId().equals(getEventId())
&& o.getLocale().equals(o.getLocale())) {
return true;
}
return false;
}
}
}
| apache-2.0 |
dbmalkovsky/flowable-engine | modules/flowable-cmmn-converter/src/main/java/org/flowable/cmmn/converter/ManualActivationRuleXmlConverter.java | 1797 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.cmmn.converter;
import javax.xml.stream.XMLStreamReader;
import org.flowable.cmmn.model.CmmnElement;
import org.flowable.cmmn.model.ManualActivationRule;
import org.flowable.cmmn.model.PlanItemControl;
/**
* @author Joram Barrez
*/
public class ManualActivationRuleXmlConverter extends CaseElementXmlConverter {
@Override
public String getXMLElementName() {
return CmmnXmlConstants.ELEMENT_MANUAL_ACTIVATION_RULE;
}
@Override
public boolean hasChildElements() {
return true;
}
@Override
protected CmmnElement convert(XMLStreamReader xtr, ConversionHelper conversionHelper) {
if (conversionHelper.getCurrentCmmnElement() instanceof PlanItemControl) {
ManualActivationRule manualActivationRule = new ManualActivationRule();
manualActivationRule.setName(xtr.getAttributeValue(null, CmmnXmlConstants.ATTRIBUTE_NAME));
PlanItemControl planItemControl = (PlanItemControl) conversionHelper.getCurrentCmmnElement();
planItemControl.setManualActivationRule(manualActivationRule);
return manualActivationRule;
}
return null;
}
} | apache-2.0 |
stoksey69/googleads-java-lib | modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201408/AdUnitParent.java | 2599 |
package com.google.api.ads.dfp.jaxws.v201408;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
/**
*
* The summary of a parent {@link AdUnit}.
*
*
* <p>Java class for AdUnitParent complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="AdUnitParent">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="id" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="name" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* <element name="adUnitCode" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "AdUnitParent", propOrder = {
"id",
"name",
"adUnitCode"
})
public class AdUnitParent {
protected String id;
protected String name;
protected String adUnitCode;
/**
* Gets the value of the id property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setId(String value) {
this.id = value;
}
/**
* Gets the value of the name property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getName() {
return name;
}
/**
* Sets the value of the name property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setName(String value) {
this.name = value;
}
/**
* Gets the value of the adUnitCode property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getAdUnitCode() {
return adUnitCode;
}
/**
* Sets the value of the adUnitCode property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setAdUnitCode(String value) {
this.adUnitCode = value;
}
}
| apache-2.0 |
mefarazath/incubator-openaz | openaz-xacml-pdp/src/main/java/org/apache/openaz/xacml/pdp/std/functions/FunctionDefinitionLogical.java | 12962 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
/*
* AT&T - PROPRIETARY
* THIS FILE CONTAINS PROPRIETARY INFORMATION OF
* AT&T AND IS NOT TO BE DISCLOSED OR USED EXCEPT IN
* ACCORDANCE WITH APPLICABLE AGREEMENTS.
*
* Copyright (c) 2013 AT&T Knowledge Ventures
* Unpublished and Not for Publication
* All Rights Reserved
*/
package org.apache.openaz.xacml.pdp.std.functions;
import java.util.List;
import org.apache.openaz.xacml.api.AttributeValue;
import org.apache.openaz.xacml.api.Identifier;
import org.apache.openaz.xacml.pdp.eval.EvaluationContext;
import org.apache.openaz.xacml.pdp.policy.ExpressionResult;
import org.apache.openaz.xacml.pdp.policy.FunctionArgument;
import org.apache.openaz.xacml.std.StdStatus;
import org.apache.openaz.xacml.std.StdStatusCode;
import org.apache.openaz.xacml.std.datatypes.DataTypes;
/**
* FunctionDefinitionLogical extends
* {@link org.apache.openaz.xacml.pdp.std.functions.FunctionDefinitionHomogeneousSimple} to implement the
* XACML Logic predicates as functions taking zero, one, or multiple arguments of type <code>Boolean</code>
* and returning a <code>Boolean</code>. In the first implementation of XACML we had separate files for each
* XACML Function. This release combines multiple Functions in fewer files to minimize code duplication. This
* file supports the following XACML codes: or and n-of not
*/
public class FunctionDefinitionLogical extends FunctionDefinitionHomogeneousSimple<Boolean, Boolean> {
/**
* List of Logical Operations types
*/
public enum OPERATION {
OR,
AND,
N_OF,
NOT
}
// the operation that this instance is being asked to do
private final OPERATION operation;
public FunctionDefinitionLogical(Identifier idIn, OPERATION op) {
super(idIn, DataTypes.DT_BOOLEAN, DataTypes.DT_BOOLEAN, null);
operation = op;
}
@Override
public ExpressionResult evaluate(EvaluationContext evaluationContext, List<FunctionArgument> arguments) {
switch (operation) {
case OR:
if (arguments == null || arguments.size() == 0) {
return ER_FALSE;
}
try {
// evaluate the arguments one at a time and abort on the first true
for (int i = 0; i < arguments.size(); i++) {
ConvertedArgument<Boolean> argument = new ConvertedArgument<Boolean>(
arguments.get(i),
this.getDataTypeArgs(),
false);
if (!argument.isOk()) {
// return a decorated message
return ExpressionResult.newError(getFunctionStatus(argument.getStatus()));
}
if (argument.getValue()) {
return ER_TRUE;
}
}
} catch (Exception e) {
String message = e.getMessage();
if (e.getCause() != null) {
message = e.getCause().getMessage();
}
return ExpressionResult.newError(new StdStatus(StdStatusCode.STATUS_CODE_PROCESSING_ERROR,
this.getShortFunctionId() + " " + message));
}
return ER_FALSE;
case AND:
if (arguments == null || arguments.size() == 0) {
return ER_TRUE;
}
try {
// evaluate the arguments one at a time and abort on the first false
for (int i = 0; i < arguments.size(); i++) {
ConvertedArgument<Boolean> argument = new ConvertedArgument<Boolean>(
arguments.get(i),
this.getDataTypeArgs(),
false);
if (!argument.isOk()) {
return ExpressionResult.newError(getFunctionStatus(argument.getStatus()));
}
if (!argument.getValue()) {
return ER_FALSE;
}
}
} catch (Exception e) {
String message = e.getMessage();
if (e.getCause() != null) {
message = e.getCause().getMessage();
}
return ExpressionResult.newError(new StdStatus(StdStatusCode.STATUS_CODE_PROCESSING_ERROR,
this.getShortFunctionId() + " " + message));
}
return ER_TRUE;
case N_OF:
Integer argumentCountNeeded;
int trueArgumentsSeen = 0;
if (arguments == null || arguments.size() == 0) {
return ExpressionResult.newError(new StdStatus(StdStatusCode.STATUS_CODE_PROCESSING_ERROR,
this.getShortFunctionId()
+ " Expected 1 argument, got 0"));
}
try {
//
// Special case:
// The first argument in the list (an Integer) is not homogeneous with the rest of the
// arguments (Booleans).
// While this is technically not a FunctionDefinitionHomogeneousSimple type of object, we
// derive from that class anyway
// so that we can take advantage of the validateArgument() method in that class.
// Unfortunately we cannot re-use that same code (because of generics - it gets messy) for the
// Integer argument.
// The following code essentially does the same job as validateArgument() on the first
// argument in the list.
//
// first arg is the number of remaining arguments that must be TRUE
if (arguments.get(0) == null) {
return ER_TRUE;
}
if (!arguments.get(0).getStatus().isOk()) {
return ExpressionResult.newError(getFunctionStatus(arguments.get(0).getStatus()));
}
if (arguments.get(0).isBag()) {
return ExpressionResult
.newError(new StdStatus(StdStatusCode.STATUS_CODE_PROCESSING_ERROR, this
.getShortFunctionId() + " Expected a simple value, saw a bag"));
}
AttributeValue<?> attributeValue = arguments.get(0).getValue();
if (attributeValue == null) {
// assume this is the same as "first argument is 0"
return ER_TRUE;
}
argumentCountNeeded = DataTypes.DT_INTEGER.convert(attributeValue.getValue()).intValue();
if (argumentCountNeeded == 0) {
return ER_TRUE;
}
if (arguments.size() - 1 < argumentCountNeeded) {
// return a non-OK status to signal indeterminate
return ExpressionResult
.newError(new StdStatus(StdStatusCode.STATUS_CODE_PROCESSING_ERROR,
this.getShortFunctionId() + " Expected "
+ argumentCountNeeded + " arguments but only "
+ (arguments.size() - 1)
+ " arguments in list after the count"));
}
for (int i = 1; i < arguments.size(); i++) {
ConvertedArgument<Boolean> argument = new ConvertedArgument<Boolean>(
arguments.get(i),
this.getDataTypeArgs(),
false);
if (!argument.isOk()) {
return ExpressionResult.newError(getFunctionStatus(argument.getStatus()));
}
if (argument.getValue()) {
trueArgumentsSeen++;
if (trueArgumentsSeen >= argumentCountNeeded) {
return ER_TRUE;
}
}
// if we cannot reach the goal, stop now.
// remaining entries to be looked at = list size - i - 1, which is the most additional
// TRUEs that we could get.
if ((arguments.size() - i - 1) + trueArgumentsSeen < argumentCountNeeded) {
// do not evaluate remaining entries
return ER_FALSE;
}
}
// did not reach our goal
return ER_FALSE;
} catch (Exception e) {
String message = e.getMessage();
if (e.getCause() != null) {
message = e.getCause().getMessage();
}
return ExpressionResult.newError(new StdStatus(StdStatusCode.STATUS_CODE_PROCESSING_ERROR,
this.getShortFunctionId() + " " + message));
}
case NOT:
if (arguments == null || arguments.size() != 1) {
return ExpressionResult.newError(new StdStatus(StdStatusCode.STATUS_CODE_PROCESSING_ERROR,
this.getShortFunctionId()
+ " Expected 1 argument, got "
+ ((arguments == null)
? "null" : arguments.size())));
}
try {
ConvertedArgument<Boolean> argument = new ConvertedArgument<Boolean>(arguments.get(0),
this.getDataTypeArgs(),
false);
if (!argument.isOk()) {
return ExpressionResult.newError(getFunctionStatus(argument.getStatus()));
}
if (argument.getValue()) {
return ER_FALSE;
} else {
return ER_TRUE;
}
} catch (Exception e) {
String message = e.getMessage();
if (e.getCause() != null) {
message = e.getCause().getMessage();
}
return ExpressionResult.newError(new StdStatus(StdStatusCode.STATUS_CODE_PROCESSING_ERROR,
this.getShortFunctionId() + " " + message));
}
}
// all cases should have been covered by above - should never get here
return ExpressionResult.newError(new StdStatus(StdStatusCode.STATUS_CODE_PROCESSING_ERROR, this
.getShortFunctionId() + " Could not evaluate Logical function " + operation));
}
}
| apache-2.0 |
rocketballs/netty | codec-http/src/main/java/io/netty/handler/codec/http/multipart/AbstractDiskHttpData.java | 11629 | /*
* Copyright 2012 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.codec.http.multipart;
import io.netty.buffer.ByteBuf;
import io.netty.handler.codec.http.HttpConstants;
import io.netty.util.internal.EmptyArrays;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.nio.charset.Charset;
import static io.netty.buffer.Unpooled.*;
/**
* Abstract Disk HttpData implementation
*/
public abstract class AbstractDiskHttpData extends AbstractHttpData {
protected File file;
private boolean isRenamed;
private FileChannel fileChannel;
protected AbstractDiskHttpData(String name, Charset charset, long size) {
super(name, charset, size);
}
/**
*
* @return the real DiskFilename (basename)
*/
protected abstract String getDiskFilename();
/**
*
* @return the default prefix
*/
protected abstract String getPrefix();
/**
*
* @return the default base Directory
*/
protected abstract String getBaseDirectory();
/**
*
* @return the default postfix
*/
protected abstract String getPostfix();
/**
*
* @return True if the file should be deleted on Exit by default
*/
protected abstract boolean deleteOnExit();
/**
* @return a new Temp File from getDiskFilename(), default prefix, postfix and baseDirectory
*/
private File tempFile() throws IOException {
String newpostfix;
String diskFilename = getDiskFilename();
if (diskFilename != null) {
newpostfix = '_' + diskFilename;
} else {
newpostfix = getPostfix();
}
File tmpFile;
if (getBaseDirectory() == null) {
// create a temporary file
tmpFile = File.createTempFile(getPrefix(), newpostfix);
} else {
tmpFile = File.createTempFile(getPrefix(), newpostfix, new File(
getBaseDirectory()));
}
if (deleteOnExit()) {
tmpFile.deleteOnExit();
}
return tmpFile;
}
@Override
public void setContent(ByteBuf buffer) throws IOException {
if (buffer == null) {
throw new NullPointerException("buffer");
}
try {
size = buffer.readableBytes();
if (definedSize > 0 && definedSize < size) {
throw new IOException("Out of size: " + size + " > " + definedSize);
}
if (file == null) {
file = tempFile();
}
if (buffer.readableBytes() == 0) {
// empty file
file.createNewFile();
return;
}
FileOutputStream outputStream = new FileOutputStream(file);
FileChannel localfileChannel = outputStream.getChannel();
ByteBuffer byteBuffer = buffer.nioBuffer();
int written = 0;
while (written < size) {
written += localfileChannel.write(byteBuffer);
}
buffer.readerIndex(buffer.readerIndex() + written);
localfileChannel.force(false);
localfileChannel.close();
outputStream.close();
completed = true;
} finally {
// Release the buffer as it was retained before and we not need a reference to it at all
// See https://github.com/netty/netty/issues/1516
buffer.release();
}
}
@Override
public void addContent(ByteBuf buffer, boolean last)
throws IOException {
if (buffer != null) {
try {
int localsize = buffer.readableBytes();
if (definedSize > 0 && definedSize < size + localsize) {
throw new IOException("Out of size: " + (size + localsize) +
" > " + definedSize);
}
ByteBuffer byteBuffer = buffer.nioBufferCount() == 1 ? buffer.nioBuffer() : buffer.copy().nioBuffer();
int written = 0;
if (file == null) {
file = tempFile();
}
if (fileChannel == null) {
FileOutputStream outputStream = new FileOutputStream(file);
fileChannel = outputStream.getChannel();
}
while (written < localsize) {
written += fileChannel.write(byteBuffer);
}
size += localsize;
buffer.readerIndex(buffer.readerIndex() + written);
} finally {
// Release the buffer as it was retained before and we not need a reference to it at all
// See https://github.com/netty/netty/issues/1516
buffer.release();
}
}
if (last) {
if (file == null) {
file = tempFile();
}
if (fileChannel == null) {
FileOutputStream outputStream = new FileOutputStream(file);
fileChannel = outputStream.getChannel();
}
fileChannel.force(false);
fileChannel.close();
fileChannel = null;
completed = true;
} else {
if (buffer == null) {
throw new NullPointerException("buffer");
}
}
}
@Override
public void setContent(File file) throws IOException {
if (this.file != null) {
delete();
}
this.file = file;
size = file.length();
isRenamed = true;
completed = true;
}
@Override
public void setContent(InputStream inputStream) throws IOException {
if (inputStream == null) {
throw new NullPointerException("inputStream");
}
if (file != null) {
delete();
}
file = tempFile();
FileOutputStream outputStream = new FileOutputStream(file);
FileChannel localfileChannel = outputStream.getChannel();
byte[] bytes = new byte[4096 * 4];
ByteBuffer byteBuffer = ByteBuffer.wrap(bytes);
int read = inputStream.read(bytes);
int written = 0;
while (read > 0) {
byteBuffer.position(read).flip();
written += localfileChannel.write(byteBuffer);
read = inputStream.read(bytes);
}
localfileChannel.force(false);
localfileChannel.close();
size = written;
if (definedSize > 0 && definedSize < size) {
file.delete();
file = null;
throw new IOException("Out of size: " + size + " > " + definedSize);
}
isRenamed = true;
completed = true;
}
@Override
public void delete() {
if (! isRenamed) {
if (file != null) {
file.delete();
}
}
}
@Override
public byte[] get() throws IOException {
if (file == null) {
return EmptyArrays.EMPTY_BYTES;
}
return readFrom(file);
}
@Override
public ByteBuf getByteBuf() throws IOException {
if (file == null) {
return EMPTY_BUFFER;
}
byte[] array = readFrom(file);
return wrappedBuffer(array);
}
@Override
public ByteBuf getChunk(int length) throws IOException {
if (file == null || length == 0) {
return EMPTY_BUFFER;
}
if (fileChannel == null) {
FileInputStream inputStream = new FileInputStream(file);
fileChannel = inputStream.getChannel();
}
int read = 0;
ByteBuffer byteBuffer = ByteBuffer.allocate(length);
while (read < length) {
int readnow = fileChannel.read(byteBuffer);
if (readnow == -1) {
fileChannel.close();
fileChannel = null;
break;
} else {
read += readnow;
}
}
if (read == 0) {
return EMPTY_BUFFER;
}
byteBuffer.flip();
ByteBuf buffer = wrappedBuffer(byteBuffer);
buffer.readerIndex(0);
buffer.writerIndex(read);
return buffer;
}
@Override
public String getString() throws IOException {
return getString(HttpConstants.DEFAULT_CHARSET);
}
@Override
public String getString(Charset encoding) throws IOException {
if (file == null) {
return "";
}
if (encoding == null) {
byte[] array = readFrom(file);
return new String(array, HttpConstants.DEFAULT_CHARSET.name());
}
byte[] array = readFrom(file);
return new String(array, encoding.name());
}
@Override
public boolean isInMemory() {
return false;
}
@Override
public boolean renameTo(File dest) throws IOException {
if (dest == null) {
throw new NullPointerException("dest");
}
if (!file.renameTo(dest)) {
// must copy
FileInputStream inputStream = new FileInputStream(file);
FileOutputStream outputStream = new FileOutputStream(dest);
FileChannel in = inputStream.getChannel();
FileChannel out = outputStream.getChannel();
int chunkSize = 8196;
long position = 0;
while (position < size) {
if (chunkSize < size - position) {
chunkSize = (int) (size - position);
}
position += in.transferTo(position, chunkSize , out);
}
in.close();
out.close();
if (position == size) {
file.delete();
file = dest;
isRenamed = true;
return true;
} else {
dest.delete();
return false;
}
}
file = dest;
isRenamed = true;
return true;
}
/**
* Utility function
* @return the array of bytes
*/
private static byte[] readFrom(File src) throws IOException {
long srcsize = src.length();
if (srcsize > Integer.MAX_VALUE) {
throw new IllegalArgumentException(
"File too big to be loaded in memory");
}
FileInputStream inputStream = new FileInputStream(src);
FileChannel fileChannel = inputStream.getChannel();
byte[] array = new byte[(int) srcsize];
ByteBuffer byteBuffer = ByteBuffer.wrap(array);
int read = 0;
while (read < srcsize) {
read += fileChannel.read(byteBuffer);
}
fileChannel.close();
return array;
}
@Override
public File getFile() throws IOException {
return file;
}
}
| apache-2.0 |
googleapis/google-api-java-client-services | clients/google-api-services-compute/v1/1.29.2/com/google/api/services/compute/model/PathRule.java | 7640 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.compute.model;
/**
* A path-matching rule for a URL. If matched, will use the specified BackendService to handle the
* traffic arriving at this URL.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class PathRule extends com.google.api.client.json.GenericJson {
/**
* The list of path patterns to match. Each must start with / and the only place a * is allowed is
* at the end following a /. The string fed to the path matcher does not include any text after
* the first ? or #, and those chars are not allowed here.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> paths;
/**
* In response to a matching path, the load balancer performs advanced routing actions like URL
* rewrites, header transformations, etc. prior to forwarding the request to the selected backend.
* If routeAction specifies any weightedBackendServices, service must not be set. Conversely if
* service is set, routeAction cannot contain any weightedBackendServices. Only one of
* routeAction or urlRedirect must be set.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private HttpRouteAction routeAction;
/**
* The full or partial URL of the backend service resource to which traffic is directed if this
* rule is matched. If routeAction is additionally specified, advanced routing actions like URL
* Rewrites, etc. take effect prior to sending the request to the backend. However, if service is
* specified, routeAction cannot contain any weightedBackendService s. Conversely, if routeAction
* specifies any weightedBackendServices, service must not be specified. Only one of urlRedirect,
* service or routeAction.weightedBackendService must be set.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String service;
/**
* When a path pattern is matched, the request is redirected to a URL specified by urlRedirect. If
* urlRedirect is specified, service or routeAction must not be set.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private HttpRedirectAction urlRedirect;
/**
* The list of path patterns to match. Each must start with / and the only place a * is allowed is
* at the end following a /. The string fed to the path matcher does not include any text after
* the first ? or #, and those chars are not allowed here.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getPaths() {
return paths;
}
/**
* The list of path patterns to match. Each must start with / and the only place a * is allowed is
* at the end following a /. The string fed to the path matcher does not include any text after
* the first ? or #, and those chars are not allowed here.
* @param paths paths or {@code null} for none
*/
public PathRule setPaths(java.util.List<java.lang.String> paths) {
this.paths = paths;
return this;
}
/**
* In response to a matching path, the load balancer performs advanced routing actions like URL
* rewrites, header transformations, etc. prior to forwarding the request to the selected backend.
* If routeAction specifies any weightedBackendServices, service must not be set. Conversely if
* service is set, routeAction cannot contain any weightedBackendServices. Only one of
* routeAction or urlRedirect must be set.
* @return value or {@code null} for none
*/
public HttpRouteAction getRouteAction() {
return routeAction;
}
/**
* In response to a matching path, the load balancer performs advanced routing actions like URL
* rewrites, header transformations, etc. prior to forwarding the request to the selected backend.
* If routeAction specifies any weightedBackendServices, service must not be set. Conversely if
* service is set, routeAction cannot contain any weightedBackendServices. Only one of
* routeAction or urlRedirect must be set.
* @param routeAction routeAction or {@code null} for none
*/
public PathRule setRouteAction(HttpRouteAction routeAction) {
this.routeAction = routeAction;
return this;
}
/**
* The full or partial URL of the backend service resource to which traffic is directed if this
* rule is matched. If routeAction is additionally specified, advanced routing actions like URL
* Rewrites, etc. take effect prior to sending the request to the backend. However, if service is
* specified, routeAction cannot contain any weightedBackendService s. Conversely, if routeAction
* specifies any weightedBackendServices, service must not be specified. Only one of urlRedirect,
* service or routeAction.weightedBackendService must be set.
* @return value or {@code null} for none
*/
public java.lang.String getService() {
return service;
}
/**
* The full or partial URL of the backend service resource to which traffic is directed if this
* rule is matched. If routeAction is additionally specified, advanced routing actions like URL
* Rewrites, etc. take effect prior to sending the request to the backend. However, if service is
* specified, routeAction cannot contain any weightedBackendService s. Conversely, if routeAction
* specifies any weightedBackendServices, service must not be specified. Only one of urlRedirect,
* service or routeAction.weightedBackendService must be set.
* @param service service or {@code null} for none
*/
public PathRule setService(java.lang.String service) {
this.service = service;
return this;
}
/**
* When a path pattern is matched, the request is redirected to a URL specified by urlRedirect. If
* urlRedirect is specified, service or routeAction must not be set.
* @return value or {@code null} for none
*/
public HttpRedirectAction getUrlRedirect() {
return urlRedirect;
}
/**
* When a path pattern is matched, the request is redirected to a URL specified by urlRedirect. If
* urlRedirect is specified, service or routeAction must not be set.
* @param urlRedirect urlRedirect or {@code null} for none
*/
public PathRule setUrlRedirect(HttpRedirectAction urlRedirect) {
this.urlRedirect = urlRedirect;
return this;
}
@Override
public PathRule set(String fieldName, Object value) {
return (PathRule) super.set(fieldName, value);
}
@Override
public PathRule clone() {
return (PathRule) super.clone();
}
}
| apache-2.0 |
siosio/intellij-community | python/testSrc/com/jetbrains/python/PyQuickFixTestCase.java | 2854 | // Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.jetbrains.python;
import com.intellij.codeInsight.intention.IntentionAction;
import com.jetbrains.python.fixtures.PyTestCase;
import com.jetbrains.python.psi.LanguageLevel;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import java.util.Arrays;
public abstract class PyQuickFixTestCase extends PyTestCase {
@Override
public void setUp() throws Exception {
super.setUp();
myFixture.setCaresAboutInjection(false);
}
@Override
@NonNls
protected String getTestDataPath() {
return PythonTestUtil.getTestDataPath() + "/quickFixes/" + getClass().getSimpleName();
}
protected void doQuickFixTest(final Class inspectionClass, final String hint, LanguageLevel languageLevel) {
runWithLanguageLevel(languageLevel, () -> doQuickFixTest(inspectionClass, hint));
}
protected void doQuickFixTest(final Class inspectionClass, final String hint) {
final String testFileName = getTestName(true);
myFixture.enableInspections(inspectionClass);
myFixture.configureByFile(testFileName + ".py");
myFixture.checkHighlighting(true, false, false);
final IntentionAction intentionAction = myFixture.findSingleIntention(hint);
assertNotNull(intentionAction);
myFixture.launchAction(intentionAction);
myFixture.checkResultByFile(testFileName + "_after.py", true);
}
protected void doQuickFixTest(final String hint) {
String testName = getTestName(true);
myFixture.configureByFile(testName + ".py");
myFixture.checkHighlighting(true, false, false);
final IntentionAction intentionAction = myFixture.findSingleIntention(hint);
assertNotNull(intentionAction);
myFixture.launchAction(intentionAction);
myFixture.checkResultByFile(testName + "_after.py");
}
protected void doInspectionTest(final Class inspectionClass) {
final String testFileName = getTestName(true);
myFixture.enableInspections(inspectionClass);
myFixture.configureByFile(testFileName + ".py");
myFixture.checkHighlighting(true, false, false);
}
protected void doMultifilesTest(@NotNull final Class inspectionClass, @NotNull final String hint, final String @NotNull [] files) {
final String testFileName = getTestName(true);
myFixture.enableInspections(inspectionClass);
String [] filenames = Arrays.copyOf(files, files.length + 1);
filenames[files.length] = testFileName + ".py";
myFixture.configureByFiles(filenames);
final IntentionAction intentionAction = myFixture.findSingleIntention(hint);
assertNotNull(intentionAction);
myFixture.launchAction(intentionAction);
myFixture.checkResultByFile(testFileName + ".py", testFileName + "_after.py", true);
}
}
| apache-2.0 |
meetdestiny/geronimo-trader | applications/daytrader/wsappclient/src/test/org/apache/geronimo/samples/daytrader/NaughtyTest.java | 750 | package org.apache.geronimo.samples.daytrader;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
/**
* Unit test for simple App.
*
* @author <a href="mailto:jason@zenplex.com">Jason van Zyl</a>
*/
public class NaughtyTest
extends AbstractTestCase
{
/**
* Create the test case
*
* @param testName name of the test case
*/
public NaughtyTest( String testName )
{
super( testName );
}
/**
* @return the suite of tests being tested
*/
public static Test suite()
{
return new TestSuite( NaughtyTest.class );
}
/**
* Rigourous Test :-)
*/
public void testApp()
{
// Crash and burn!
}
}
| apache-2.0 |
ebyhr/presto | testing/trino-benchmark/src/main/java/io/trino/benchmark/SqlBetweenBenchmark.java | 1242 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.benchmark;
import io.trino.testing.LocalQueryRunner;
import static io.trino.benchmark.BenchmarkQueryRunner.createLocalQueryRunner;
public class SqlBetweenBenchmark
extends AbstractSqlBenchmark
{
public SqlBetweenBenchmark(LocalQueryRunner localQueryRunner)
{
super(localQueryRunner, "sql_between_long", 10, 30, "SELECT COUNT(*) FROM orders WHERE custkey BETWEEN 10000 AND 20000 OR custkey BETWEEN 30000 AND 35000 OR custkey BETWEEN 50000 AND 51000");
}
public static void main(String[] args)
{
new SqlBetweenBenchmark(createLocalQueryRunner()).runBenchmark(new SimpleLineBenchmarkResultWriter(System.out));
}
}
| apache-2.0 |
bocon13/buck | src/com/facebook/buck/js/ReactNativeFlavors.java | 1458 | /*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.js;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.Flavor;
import com.facebook.buck.model.ImmutableFlavor;
import com.google.common.collect.ImmutableSet;
public class ReactNativeFlavors {
// Utility class, do not instantiate.
private ReactNativeFlavors() { }
public static final Flavor UNBUNDLE = ImmutableFlavor.of("unbundle");
public static final Flavor DEV = ImmutableFlavor.of("dev");
public static boolean validateFlavors(ImmutableSet<Flavor> flavors) {
return ImmutableSet.of(DEV, UNBUNDLE).containsAll(flavors);
}
public static boolean useUnbundling(BuildTarget buildTarget) {
return buildTarget.getFlavors().contains(UNBUNDLE);
}
public static boolean isDevMode(BuildTarget buildTarget) {
return buildTarget.getFlavors().contains(DEV);
}
}
| apache-2.0 |
JayanthyChengan/dataverse | src/main/java/edu/harvard/iq/dataverse/api/BundleDownloadInstanceWriter.java | 8746 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package edu.harvard.iq.dataverse.api;
import java.lang.reflect.Type;
import java.lang.annotation.Annotation;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.IOException;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import javax.ws.rs.ext.MessageBodyWriter;
import javax.ws.rs.ext.Provider;
import edu.harvard.iq.dataverse.DataFile;
import edu.harvard.iq.dataverse.dataaccess.*;
import java.util.logging.Logger;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
/**
*
* @author Leonid Andreev
*/
@Provider
public class BundleDownloadInstanceWriter implements MessageBodyWriter<BundleDownloadInstance> {
private static final Logger logger = Logger.getLogger(BundleDownloadInstanceWriter.class.getCanonicalName());
@Override
public boolean isWriteable(Class<?> clazz, Type type, Annotation[] annotation, MediaType mediaType) {
return clazz == BundleDownloadInstance.class;
}
@Override
public long getSize(BundleDownloadInstance di, Class<?> clazz, Type type, Annotation[] annotation, MediaType mediaType) {
return -1;
}
@Override
public void writeTo(BundleDownloadInstance di, Class<?> clazz, Type type, Annotation[] annotation, MediaType mediaType, MultivaluedMap<String, Object> httpHeaders, OutputStream outstream) throws IOException, WebApplicationException {
try {
if (di.getDownloadInfo() != null && di.getDownloadInfo().getDataFile() != null) {
DataAccessRequest daReq = new DataAccessRequest();
DataFile sf = di.getDownloadInfo().getDataFile();
StorageIO<DataFile> accessObject = DataAccess.getStorageIO(sf, daReq);
if (accessObject != null) {
accessObject.open();
ZipOutputStream zout = new ZipOutputStream(outstream);
/* First, the tab file itself: */
String fileName = accessObject.getFileName();
String zipFileName = fileName.replaceAll("\\.tab$", "-bundle.zip");
httpHeaders.add("Content-disposition", "attachment; filename=\"" + zipFileName + "\"");
httpHeaders.add("Content-Type", "application/zip; name=\"" + zipFileName + "\"");
InputStream instream = accessObject.getInputStream();
ZipEntry e = new ZipEntry(fileName);
zout.putNextEntry(e);
String varHeaderLine = accessObject.getVarHeader();
if (varHeaderLine != null) {
zout.write(varHeaderLine.getBytes());
}
byte[] data = new byte[8192];
int i = 0;
while ((i = instream.read(data)) > 0) {
zout.write(data, 0, i);
zout.flush();
}
instream.close();
zout.closeEntry();
instream = null;
// Now, the original format:
String origFormat = null;
try {
StorageIO<DataFile> accessObjectOrig = StoredOriginalFile.retreive(accessObject); //.retrieve(sf, (FileAccessIO) accessObject);
if (accessObjectOrig != null) {
instream = accessObjectOrig.getInputStream();
if (instream != null) {
String origFileName = accessObjectOrig.getFileName();
origFormat = accessObject.getMimeType();
e = new ZipEntry(origFileName);
zout.putNextEntry(e);
i = 0;
while ((i = instream.read(data)) > 0) {
zout.write(data, 0, i);
zout.flush();
}
}
}
} catch (IOException ioex) {
// ignore; if for whatever reason the original is not
// available, we'll just skip it.
logger.warning("failed to retrieve saved original for " + fileName);
} finally {
if (instream != null) {
try {instream.close();} catch (IOException ioex) {}
try {zout.closeEntry();} catch (IOException ioex) {}
}
}
instream = null;
// And, if the original format was NOT RData,
// add an RData version:
if (!"application/x-rlang-transport".equals(origFormat)) {
try {
StorageIO<DataFile> accessObjectRdata = DataConverter.performFormatConversion(sf, accessObject,
"RData", "application/x-rlang-transport");
if (accessObjectRdata != null) {
instream = accessObjectRdata.getInputStream();
if (instream != null) {
String rdataFileName = accessObjectRdata.getFileName();
e = new ZipEntry(rdataFileName);
zout.putNextEntry(e);
i = 0;
while ((i = instream.read(data)) > 0) {
zout.write(data, 0, i);
zout.flush();
}
}
}
} catch (IOException ioex) {
// ignore; if for whatever reason RData conversion is not
// available, we'll just skip it.
logger.warning("failed to convert tabular data file "+fileName+" to RData.");
} finally {
if (instream != null) {
try{instream.close();}catch(IOException ioex){}
try{zout.closeEntry();}catch(IOException ioex){}
}
}
}
// And the variable metadata (DDI/XML), if available:
if (di.getFileDDIXML() != null) {
e = new ZipEntry(fileName.replaceAll("\\.tab$", "-ddi.xml"));
zout.putNextEntry(e);
zout.write(di.getFileDDIXML().getBytes());
zout.closeEntry();
}
// And now the citations:
if (di.getFileCitationEndNote() != null) {
e = new ZipEntry(fileName.replaceAll("\\.tab$","citation-endnote.xml"));
zout.putNextEntry(e);
zout.write(di.getFileCitationEndNote().getBytes());
zout.closeEntry();
}
if (di.getFileCitationRIS() != null) {
e = new ZipEntry(fileName.replaceAll("\\.tab$","citation-ris.ris"));
zout.putNextEntry(e);
zout.write(di.getFileCitationRIS().getBytes());
zout.closeEntry();
}
if (di.getFileCitationBibtex() != null) {
e = new ZipEntry(fileName.replaceAll("\\.tab$","citation-bib.bib"));
zout.putNextEntry(e);
zout.write(di.getFileCitationBibtex().getBytes());
zout.closeEntry();
}
zout.close();
return;
}
}
} catch (IOException ioex) {
throw new WebApplicationException(Response.Status.INTERNAL_SERVER_ERROR);
}
throw new WebApplicationException(Response.Status.NOT_FOUND);
}
}
| apache-2.0 |
apache/olingo-odata2 | odata2-annotation-processor/annotation-processor-ref/src/main/java/org/apache/olingo/odata2/annotation/processor/ref/model/Team.java | 2757 | /*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
******************************************************************************/
package org.apache.olingo.odata2.annotation.processor.ref.model;
import java.util.ArrayList;
import java.util.List;
import org.apache.olingo.odata2.api.annotation.edm.EdmEntitySet;
import org.apache.olingo.odata2.api.annotation.edm.EdmEntityType;
import org.apache.olingo.odata2.api.annotation.edm.EdmNavigationProperty;
import org.apache.olingo.odata2.api.annotation.edm.EdmNavigationProperty.Multiplicity;
import org.apache.olingo.odata2.api.annotation.edm.EdmProperty;
import org.apache.olingo.odata2.api.annotation.edm.EdmType;
/**
*
*/
@EdmEntityType(name = "Team", namespace = ModelSharedConstants.NAMESPACE_1)
@EdmEntitySet(name = "Teams")
public class Team extends RefBase {
@EdmProperty(type = EdmType.BOOLEAN)
private Boolean isScrumTeam;
@EdmNavigationProperty(name = "nt_Employees", association = "TeamEmployees", toMultiplicity = Multiplicity.MANY)
private List<Employee> employees = new ArrayList<Employee>();
@EdmNavigationProperty
private Team subTeam;
public Boolean isScrumTeam() {
return isScrumTeam;
}
public void setScrumTeam(final Boolean isScrumTeam) {
this.isScrumTeam = isScrumTeam;
}
public void addEmployee(final Employee e) {
employees.add(e);
}
public List<Employee> getEmployees() {
return employees;
}
public void setSubTeam(Team subTeam) {
this.subTeam = subTeam;
}
public Team getSubTeam() {
return subTeam;
}
@Override
public int hashCode() {
return super.hashCode();
}
@Override
public boolean equals(final Object obj) {
return this == obj
|| obj != null && getClass() == obj.getClass() && id == ((Team) obj).id;
}
@Override
public String toString() {
return "{\"Id\":\"" + id + "\",\"Name\":\"" + name + "\",\"IsScrumTeam\":" + isScrumTeam + "}";
}
} | apache-2.0 |
jasonchaffee/apiman | gateway/engine/influxdb/src/main/java/io/apiman/gateway/engine/influxdb/InfluxDb09Driver.java | 7714 | /*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apiman.gateway.engine.influxdb;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.map.ObjectMapper;
import io.apiman.gateway.engine.async.AsyncResultImpl;
import io.apiman.gateway.engine.async.IAsyncHandler;
import io.apiman.gateway.engine.async.IAsyncResult;
import io.apiman.gateway.engine.async.IAsyncResultHandler;
import io.apiman.gateway.engine.components.IHttpClientComponent;
import io.apiman.gateway.engine.components.http.HttpMethod;
import io.apiman.gateway.engine.components.http.IHttpClientRequest;
import io.apiman.gateway.engine.components.http.IHttpClientResponse;
/**
* A simple async HTTP impl of the influxdb driver. Contains only the subset of functionality we need.
*
* @author Marc Savy <msavy@redhat.com>
*/
public class InfluxDb09Driver {
private IHttpClientComponent httpClient;
private StringBuilder writeUrl;
private StringBuilder queryUrl;
private String username;
private String password;
private String database;
private String retentionPolicy;
private String timePrecision;
private static ObjectMapper objectMapper = new ObjectMapper();
@SuppressWarnings("nls")
public InfluxDb09Driver(IHttpClientComponent httpClient, String endpoint, String username,
String password, String database, String retentionPolicy, String timePrecision) {
this.httpClient = httpClient;
this.username = username;
this.password = password;
this.database = database;
this.retentionPolicy = retentionPolicy;
this.timePrecision = timePrecision;
StringBuilder writeEndpoint = new StringBuilder();
if (!endpoint.startsWith("http://") || !endpoint.startsWith("https://")) {
writeEndpoint.append("http://");
}
// domain + port
writeEndpoint.append(endpoint);
// Same basic structure, but with /query on end
StringBuilder queryEndpoint = new StringBuilder().append(writeEndpoint).append("/query");
this.queryUrl = buildParams(queryEndpoint, "SHOW DATABASES");
// Add user-name, password, etc
writeEndpoint.append("/write");
this.writeUrl = buildParams(writeEndpoint, null);
}
/**
* Simple write to "/write". Must be valid Influx line format.
*
* @param lineDocument document to write, as string
* @param failureHandler handler in case of failure
*/
public void write(String lineDocument,
final IAsyncHandler<InfluxException> failureHandler) {
// Make request to influx
IHttpClientRequest request = httpClient.request(writeUrl.toString(), HttpMethod.POST,
new IAsyncResultHandler<IHttpClientResponse>() {
@Override
public void handle(IAsyncResult<IHttpClientResponse> result) {
if (result.isError() || result.getResult().getResponseCode() < 200
|| result.getResult().getResponseCode() > 299) {
failureHandler.handle(new InfluxException(result.getResult()));
}
}
});
// For some reason Java's URLEncoding doesn't seem to be parseable by influx?
//request.addHeader("Content-Type", "application/x-www-form-urlencoded");
request.addHeader("Content-Type", "text/plain"); //$NON-NLS-1$ //$NON-NLS-2$
request.write(lineDocument, StandardCharsets.UTF_8.name());
request.end();
}
/**
* List all databases
*
* @param handler the result handler
*/
@SuppressWarnings("nls")
public void listDatabases(final IAsyncResultHandler<List<String>> handler) {
IHttpClientRequest request = httpClient.request(queryUrl.toString(), HttpMethod.GET,
new IAsyncResultHandler<IHttpClientResponse>() {
@Override
public void handle(IAsyncResult<IHttpClientResponse> result) {
try {
if (result.isError() || result.getResult().getResponseCode() != 200) {
handleError(result, handler);
return;
}
List<String> results = new ArrayList<>();
// {"results":
JsonNode arrNode = objectMapper.readTree(result.getResult().getBody())
.path("results").getElements().next() // results: [ first-elem
.path("series").getElements().next(); // series: [ first-elem
// values: [[db1], [db2], [...]] => db1, db2
flattenArrays(arrNode.get("values"), results);
// send results
handler.handle(AsyncResultImpl.<List<String>> create(results));
} catch (IOException e) {
AsyncResultImpl.create(new RuntimeException(
"Unable to parse Influx JSON response", e));
}
}
});
request.end();
}
protected <T> void handleError(IAsyncResult<IHttpClientResponse> result, IAsyncResultHandler<T> handler) {
if (result.isError()) {
handler.handle(AsyncResultImpl.<T> create(result.getError()));
} else if (result.getResult().getResponseCode() != 200) {
handler.handle(AsyncResultImpl.<T> create(new InfluxException("Influx: " //$NON-NLS-1$
+ result.getResult().getResponseCode() + " " + result.getResult().getResponseMessage()))); //$NON-NLS-1$
}
}
private void flattenArrays(JsonNode arrNode, List<String> results) {
if (arrNode.isArray()) {
for (JsonNode entry : arrNode) {
flattenArrays(entry, results);
}
} else {
results.add(arrNode.getTextValue());
}
}
@SuppressWarnings("nls")
private StringBuilder buildParams(StringBuilder url, String query) {
addQueryParam(url, "db", database, "?");
addQueryParam(url, "u", username, "&");
addQueryParam(url, "p", password, "&");
addQueryParam(url, "rp", retentionPolicy, "&");
addQueryParam(url, "precision", timePrecision, "&");
addQueryParam(url, "q", query, "&");
return url;
}
@SuppressWarnings("nls")
private void addQueryParam(StringBuilder url, String key, String value, String connector) {
if (value == null)
return;
try {
url.append(connector + key + "=" + URLEncoder.encode(value, StandardCharsets.UTF_8.name()));
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
}
| apache-2.0 |
sirkkalap/DependencyCheck | dependency-check-utils/src/main/java/org/owasp/dependencycheck/org/apache/tools/ant/util/CollectionUtils.java | 7652 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.owasp.dependencycheck.org.apache.tools.ant.util;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Dictionary;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Vector;
// CheckStyle:HideUtilityClassConstructorCheck OFF - bc
/**
* A set of helper methods related to collection manipulation.
*
* @since Ant 1.5
*/
public class CollectionUtils {
/**
* Collections.emptyList() is Java5+.
*/
@SuppressWarnings("rawtypes")
@Deprecated
public static final List EMPTY_LIST = Collections.EMPTY_LIST;
/**
* Please use Vector.equals() or List.equals().
* @param v1 the first vector.
* @param v2 the second vector.
* @return true if the vectors are equal.
* @since Ant 1.5
* @deprecated since 1.6.x.
*/
public static boolean equals(Vector<?> v1, Vector<?> v2) {
if (v1 == v2) {
return true;
}
if (v1 == null || v2 == null) {
return false;
}
return v1.equals(v2);
}
/**
* Dictionary does not have an equals.
* Please use Map.equals().
*
* <p>Follows the equals contract of Java 2's Map.</p>
* @param d1 the first directory.
* @param d2 the second directory.
* @return true if the directories are equal.
* @since Ant 1.5
* @deprecated since 1.6.x.
*/
public static boolean equals(Dictionary<?, ?> d1, Dictionary<?, ?> d2) {
if (d1 == d2) {
return true;
}
if (d1 == null || d2 == null) {
return false;
}
if (d1.size() != d2.size()) {
return false;
}
Enumeration<?> e1 = d1.keys();
while (e1.hasMoreElements()) {
Object key = e1.nextElement();
Object value1 = d1.get(key);
Object value2 = d2.get(key);
if (value2 == null || !value1.equals(value2)) {
return false;
}
}
// don't need the opposite check as the Dictionaries have the
// same size, so we've also covered all keys of d2 already.
return true;
}
/**
* Creates a comma separated list of all values held in the given
* collection.
*
* @since Ant 1.8.0
*/
public static String flattenToString(Collection<?> c) {
final StringBuilder sb = new StringBuilder();
for (Object o : c) {
if (sb.length() != 0) {
sb.append(",");
}
sb.append(o);
}
return sb.toString();
}
/**
* Dictionary does not know the putAll method. Please use Map.putAll().
* @param m1 the to directory.
* @param m2 the from directory.
* @since Ant 1.6
* @deprecated since 1.6.x.
*/
public static <K, V> void putAll(Dictionary<? super K, ? super V> m1, Dictionary<? extends K, ? extends V> m2) {
for (Enumeration<? extends K> it = m2.keys(); it.hasMoreElements();) {
K key = it.nextElement();
m1.put(key, m2.get(key));
}
}
/**
* An empty enumeration.
* @since Ant 1.6
*/
public static final class EmptyEnumeration<E> implements Enumeration<E> {
/** Constructor for the EmptyEnumeration */
public EmptyEnumeration() {
}
/**
* @return false always.
*/
public boolean hasMoreElements() {
return false;
}
/**
* @return nothing.
* @throws NoSuchElementException always.
*/
public E nextElement() throws NoSuchElementException {
throw new NoSuchElementException();
}
}
/**
* Append one enumeration to another.
* Elements are evaluated lazily.
* @param e1 the first enumeration.
* @param e2 the subsequent enumeration.
* @return an enumeration representing e1 followed by e2.
* @since Ant 1.6.3
*/
public static <E> Enumeration<E> append(Enumeration<E> e1, Enumeration<E> e2) {
return new CompoundEnumeration<E>(e1, e2);
}
/**
* Adapt the specified Iterator to the Enumeration interface.
* @param iter the Iterator to adapt.
* @return an Enumeration.
*/
public static <E> Enumeration<E> asEnumeration(final Iterator<E> iter) {
return new Enumeration<E>() {
public boolean hasMoreElements() {
return iter.hasNext();
}
public E nextElement() {
return iter.next();
}
};
}
/**
* Adapt the specified Enumeration to the Iterator interface.
* @param e the Enumeration to adapt.
* @return an Iterator.
*/
public static <E> Iterator<E> asIterator(final Enumeration<E> e) {
return new Iterator<E>() {
public boolean hasNext() {
return e.hasMoreElements();
}
public E next() {
return e.nextElement();
}
public void remove() {
throw new UnsupportedOperationException();
}
};
}
/**
* Returns a collection containing all elements of the iterator.
*
* @since Ant 1.8.0
*/
public static <T> Collection<T> asCollection(final Iterator<? extends T> iter) {
List<T> l = new ArrayList<T>();
while (iter.hasNext()) {
l.add(iter.next());
}
return l;
}
private static final class CompoundEnumeration<E> implements Enumeration<E> {
private final Enumeration<E> e1, e2;
public CompoundEnumeration(Enumeration<E> e1, Enumeration<E> e2) {
this.e1 = e1;
this.e2 = e2;
}
public boolean hasMoreElements() {
return e1.hasMoreElements() || e2.hasMoreElements();
}
public E nextElement() throws NoSuchElementException {
if (e1.hasMoreElements()) {
return e1.nextElement();
} else {
return e2.nextElement();
}
}
}
/**
* Counts how often the given Object occurs in the given
* collection using equals() for comparison.
*
* @since Ant 1.8.0
*/
public static int frequency(Collection<?> c, Object o) {
// same as Collections.frequency introduced with JDK 1.5
int freq = 0;
if (c != null) {
for (Iterator<?> i = c.iterator(); i.hasNext(); ) {
Object test = i.next();
if (o == null ? test == null : o.equals(test)) {
freq++;
}
}
}
return freq;
}
}
| apache-2.0 |
bogdandrutu/opencensus-java | contrib/spring_sleuth_v1x/src/test/java/io/opencensus/contrib/spring/sleuth/v1x/OpenCensusSleuthTracerTest.java | 5900 | /*
* Copyright 2018, OpenCensus Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.opencensus.contrib.spring.sleuth.v1x;
import static com.google.common.truth.Truth.assertThat;
import java.util.Random;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.springframework.cloud.sleuth.DefaultSpanNamer;
import org.springframework.cloud.sleuth.NoOpSpanReporter;
import org.springframework.cloud.sleuth.Span;
import org.springframework.cloud.sleuth.TraceKeys;
import org.springframework.cloud.sleuth.Tracer;
import org.springframework.cloud.sleuth.log.NoOpSpanLogger;
import org.springframework.cloud.sleuth.sampler.AlwaysSampler;
/** Unit tests for {@link OpenCensusSleuthTracer}. */
@RunWith(JUnit4.class)
public class OpenCensusSleuthTracerTest {
private static final Tracer tracer =
new OpenCensusSleuthTracer(
new AlwaysSampler(),
new Random(),
new DefaultSpanNamer(),
new NoOpSpanLogger(),
new NoOpSpanReporter(),
new TraceKeys());
@After
@Before
public void verifyNotTracing() {
assertThat(tracer.isTracing()).isFalse();
}
@Test
public void testRootSpanAndClose() {
Span root = tracer.createSpan("root");
assertCurrentSpanIs(root);
assertThat(root.getSavedSpan()).isNull();
Span parent = tracer.close(root);
assertThat(parent).isNull();
}
@Test
public void testSpanStackAndClose() {
Span[] spans = createSpansAndAssertCurrent(3);
// pop the stack
for (int i = spans.length - 1; i >= 0; i--) {
assertCurrentSpanIs(spans[i]);
Span parent = tracer.close(spans[i]);
assertThat(parent).isEqualTo(spans[i].getSavedSpan());
}
}
@Test
public void testSpanStackAndCloseOutOfOrder() {
Span[] spans = createSpansAndAssertCurrent(3);
// try to close a non-current span
tracer.close(spans[spans.length - 2]);
assertCurrentSpanIs(spans[spans.length - 1]);
// pop the stack
for (int i = spans.length - 1; i >= 0; i--) {
tracer.close(spans[i]);
}
}
@Test
public void testDetachNull() {
Span parent = tracer.detach(null);
assertThat(parent).isNull();
}
@Test
public void testRootSpanAndDetach() {
Span root = tracer.createSpan("root");
assertCurrentSpanIs(root);
assertThat(root.getSavedSpan()).isNull();
Span parent = tracer.detach(root);
assertThat(parent).isNull();
}
@Test
public void testSpanStackAndDetach() {
Span[] spans = createSpansAndAssertCurrent(3);
Span parent = tracer.detach(spans[spans.length - 1]);
assertThat(parent).isEqualTo(spans[spans.length - 2]);
}
@Test
public void testSpanStackAndDetachOutOfOrder() {
Span[] spans = createSpansAndAssertCurrent(3);
// try to detach a non-current span
tracer.detach(spans[spans.length - 2]);
assertCurrentSpanIs(spans[spans.length - 1]);
Span parent = tracer.detach(spans[spans.length - 1]);
assertThat(parent).isEqualTo(spans[spans.length - 2]);
}
@Test
public void testContinueNull() {
Span span = tracer.continueSpan(null);
assertThat(span).isNull();
}
@Test
public void testRootSpanAndContinue() {
Span root = tracer.createSpan("root");
assertCurrentSpanIs(root);
tracer.detach(root);
Span span = tracer.continueSpan(root);
assertThat(span).isEqualTo(root);
tracer.detach(span);
}
@Test
public void testSpanStackAndContinue() {
Span[] spans = createSpansAndAssertCurrent(3);
Span original = tracer.getCurrentSpan();
assertThat(original).isEqualTo(spans[spans.length - 1]);
Span parent = tracer.detach(original);
assertThat(parent).isEqualTo(spans[spans.length - 2]);
assertThat(tracer.getCurrentSpan()).isNull();
Span continued = tracer.continueSpan(original);
assertCurrentSpanIs(continued);
assertThat(continued.getSavedSpan()).isEqualTo(parent);
assertThat(continued).isEqualTo(original);
tracer.detach(continued);
}
@Test
public void testSpanStackAndCreateAndContinue() {
createSpansAndAssertCurrent(3);
Span original = tracer.getCurrentSpan();
tracer.detach(original);
Span root = tracer.createSpan("root");
assertCurrentSpanIs(root);
Span continued = tracer.continueSpan(original);
assertCurrentSpanIs(continued);
assertThat(continued.getSavedSpan()).isEqualTo(root);
assertThat(continued).isEqualTo(original);
assertThat(continued.getSavedSpan()).isNotEqualTo(original.getSavedSpan());
tracer.detach(continued);
}
// Verifies span and associated saved span.
private static void assertCurrentSpanIs(Span span) {
assertThat(tracer.getCurrentSpan()).isEqualTo(span);
assertThat(tracer.getCurrentSpan().getSavedSpan()).isEqualTo(span.getSavedSpan());
assertThat(OpenCensusSleuthSpanContextHolder.getCurrentSpan()).isEqualTo(span);
assertThat(OpenCensusSleuthSpanContextHolder.getCurrentSpan().getSavedSpan())
.isEqualTo(span.getSavedSpan());
}
private static Span[] createSpansAndAssertCurrent(int len) {
Span[] spans = new Span[len];
Span current = null;
for (int i = 0; i < len; i++) {
current = tracer.createSpan("span" + i, current);
spans[i] = current;
assertCurrentSpanIs(current);
}
return spans;
}
}
| apache-2.0 |
dslomov/bazel | src/tools/android/java/com/google/devtools/build/android/junctions/WindowsJunctionCreator.java | 2958 | // Copyright 2017 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.android.junctions;
import com.google.common.base.Preconditions;
import com.google.devtools.build.lib.windows.jni.WindowsFileOperations;
import java.io.IOException;
import java.nio.file.Path;
import java.util.LinkedHashMap;
import java.util.Map;
import javax.annotation.Nullable;
/**
* Junction creator implementation for Windows.
*
* <p>Creates a junction (or uses a cached one) for a path. If the path is a directory, the junction
* points to it, and the returned path is the junction's path. If the path is a file, the junction
* points to its parent, and the returned path is the file's path through the junction.
*
* <p>The `close` method deletes all junctions that this object created, along with the `dir`
* directory where the junctions are created. The purpose of this is to avoid other methods (such as
* ScopedTemporaryDirectory.close) to traverse these junctions believing they are regular
* directories and deleting files in them that are actually outside of the directory tree.
*/
public final class WindowsJunctionCreator implements JunctionCreator {
private final Path dir;
private Map<Path, Path> paths; // allocated lazily, but semantically final
private int junctionIndex = 0;
public WindowsJunctionCreator(Path dir) {
this.dir = Preconditions.checkNotNull(dir);
}
@Nullable
public Path create(@Nullable Path path) throws IOException {
if (path == null) {
return null;
}
if (paths == null) {
paths = new LinkedHashMap<>();
}
path = path.toAbsolutePath();
if (path.toFile().isDirectory()) {
Path link = paths.get(path);
if (link == null) {
link = dir.resolve(Integer.toString(junctionIndex++));
WindowsFileOperations.createJunction(link.toString(), path.toString());
paths.put(path, link);
}
return link;
}
Path parent = path.getParent();
return (parent == null) ? path : create(parent).resolve(path.getFileName());
}
@Override
public void close() throws IOException {
// Delete all junctions, otherwise the temp directory deleter would follow them and delete files
// from directories they point to.
if (paths != null) {
for (Path link : paths.values()) {
link.toFile().delete();
}
}
dir.toFile().delete();
}
}
| apache-2.0 |
HonzaKral/elasticsearch | x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/DataFrameAnalyticsStateTests.java | 3711 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ml.dataframe;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class DataFrameAnalyticsStateTests extends ESTestCase {
public void testFromString() {
assertThat(DataFrameAnalyticsState.fromString("starting"), equalTo(DataFrameAnalyticsState.STARTING));
assertThat(DataFrameAnalyticsState.fromString("started"), equalTo(DataFrameAnalyticsState.STARTED));
assertThat(DataFrameAnalyticsState.fromString("reindexing"), equalTo(DataFrameAnalyticsState.REINDEXING));
assertThat(DataFrameAnalyticsState.fromString("analyzing"), equalTo(DataFrameAnalyticsState.ANALYZING));
assertThat(DataFrameAnalyticsState.fromString("stopping"), equalTo(DataFrameAnalyticsState.STOPPING));
assertThat(DataFrameAnalyticsState.fromString("stopped"), equalTo(DataFrameAnalyticsState.STOPPED));
assertThat(DataFrameAnalyticsState.fromString("failed"), equalTo(DataFrameAnalyticsState.FAILED));
}
public void testToString() {
assertThat(DataFrameAnalyticsState.STARTING.toString(), equalTo("starting"));
assertThat(DataFrameAnalyticsState.STARTED.toString(), equalTo("started"));
assertThat(DataFrameAnalyticsState.REINDEXING.toString(), equalTo("reindexing"));
assertThat(DataFrameAnalyticsState.ANALYZING.toString(), equalTo("analyzing"));
assertThat(DataFrameAnalyticsState.STOPPING.toString(), equalTo("stopping"));
assertThat(DataFrameAnalyticsState.STOPPED.toString(), equalTo("stopped"));
assertThat(DataFrameAnalyticsState.FAILED.toString(), equalTo("failed"));
}
public void testWriteStartingStateToPre75() throws IOException {
StreamOutput streamOutput = mock(StreamOutput.class);
when(streamOutput.getVersion()).thenReturn(Version.V_7_4_1);
DataFrameAnalyticsState.STARTING.writeTo(streamOutput);
verify(streamOutput, times(1)).writeEnum(DataFrameAnalyticsState.STOPPED);
}
public void testWriteStartingStateToPost75() throws IOException {
StreamOutput streamOutput = mock(StreamOutput.class);
when(streamOutput.getVersion()).thenReturn(Version.V_7_5_0);
DataFrameAnalyticsState.STARTING.writeTo(streamOutput);
verify(streamOutput, times(1)).writeEnum(DataFrameAnalyticsState.STARTING);
}
public void testIsAnyOf() {
assertThat(DataFrameAnalyticsState.STARTED.isAnyOf(), is(false));
assertThat(DataFrameAnalyticsState.STARTED.isAnyOf(DataFrameAnalyticsState.STARTED), is(true));
assertThat(DataFrameAnalyticsState.STARTED.isAnyOf(DataFrameAnalyticsState.ANALYZING, DataFrameAnalyticsState.STOPPED), is(false));
assertThat(DataFrameAnalyticsState.STARTED.isAnyOf(DataFrameAnalyticsState.STARTED, DataFrameAnalyticsState.STOPPED), is(true));
assertThat(DataFrameAnalyticsState.ANALYZING.isAnyOf(DataFrameAnalyticsState.STARTED, DataFrameAnalyticsState.STOPPED), is(false));
assertThat(DataFrameAnalyticsState.ANALYZING.isAnyOf(DataFrameAnalyticsState.ANALYZING, DataFrameAnalyticsState.FAILED), is(true));
}
}
| apache-2.0 |
wangcy6/storm_app | frame/kafka-0.11.0/kafka-0.11.0.1-src/clients/src/main/java/org/apache/kafka/common/requests/JoinGroupResponse.java | 5984 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.requests;
import org.apache.kafka.common.protocol.ApiKeys;
import org.apache.kafka.common.protocol.Errors;
import org.apache.kafka.common.protocol.types.Struct;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class JoinGroupResponse extends AbstractResponse {
private static final String ERROR_CODE_KEY_NAME = "error_code";
/**
* Possible error codes:
*
* COORDINATOR_LOAD_IN_PROGRESS (14)
* GROUP_COORDINATOR_NOT_AVAILABLE (15)
* NOT_COORDINATOR (16)
* INCONSISTENT_GROUP_PROTOCOL (23)
* UNKNOWN_MEMBER_ID (25)
* INVALID_SESSION_TIMEOUT (26)
* GROUP_AUTHORIZATION_FAILED (30)
*/
private static final String GENERATION_ID_KEY_NAME = "generation_id";
private static final String GROUP_PROTOCOL_KEY_NAME = "group_protocol";
private static final String LEADER_ID_KEY_NAME = "leader_id";
private static final String MEMBER_ID_KEY_NAME = "member_id";
private static final String MEMBERS_KEY_NAME = "members";
private static final String MEMBER_METADATA_KEY_NAME = "member_metadata";
public static final String UNKNOWN_PROTOCOL = "";
public static final int UNKNOWN_GENERATION_ID = -1;
public static final String UNKNOWN_MEMBER_ID = "";
private final int throttleTimeMs;
private final Errors error;
private final int generationId;
private final String groupProtocol;
private final String memberId;
private final String leaderId;
private final Map<String, ByteBuffer> members;
public JoinGroupResponse(Errors error,
int generationId,
String groupProtocol,
String memberId,
String leaderId,
Map<String, ByteBuffer> groupMembers) {
this(DEFAULT_THROTTLE_TIME, error, generationId, groupProtocol, memberId, leaderId, groupMembers);
}
public JoinGroupResponse(int throttleTimeMs,
Errors error,
int generationId,
String groupProtocol,
String memberId,
String leaderId,
Map<String, ByteBuffer> groupMembers) {
this.throttleTimeMs = throttleTimeMs;
this.error = error;
this.generationId = generationId;
this.groupProtocol = groupProtocol;
this.memberId = memberId;
this.leaderId = leaderId;
this.members = groupMembers;
}
public JoinGroupResponse(Struct struct) {
this.throttleTimeMs = struct.hasField(THROTTLE_TIME_KEY_NAME) ? struct.getInt(THROTTLE_TIME_KEY_NAME) : DEFAULT_THROTTLE_TIME;
members = new HashMap<>();
for (Object memberDataObj : struct.getArray(MEMBERS_KEY_NAME)) {
Struct memberData = (Struct) memberDataObj;
String memberId = memberData.getString(MEMBER_ID_KEY_NAME);
ByteBuffer memberMetadata = memberData.getBytes(MEMBER_METADATA_KEY_NAME);
members.put(memberId, memberMetadata);
}
error = Errors.forCode(struct.getShort(ERROR_CODE_KEY_NAME));
generationId = struct.getInt(GENERATION_ID_KEY_NAME);
groupProtocol = struct.getString(GROUP_PROTOCOL_KEY_NAME);
memberId = struct.getString(MEMBER_ID_KEY_NAME);
leaderId = struct.getString(LEADER_ID_KEY_NAME);
}
public int throttleTimeMs() {
return throttleTimeMs;
}
public Errors error() {
return error;
}
public int generationId() {
return generationId;
}
public String groupProtocol() {
return groupProtocol;
}
public String memberId() {
return memberId;
}
public String leaderId() {
return leaderId;
}
public boolean isLeader() {
return memberId.equals(leaderId);
}
public Map<String, ByteBuffer> members() {
return members;
}
public static JoinGroupResponse parse(ByteBuffer buffer, short version) {
return new JoinGroupResponse(ApiKeys.JOIN_GROUP.parseResponse(version, buffer));
}
@Override
protected Struct toStruct(short version) {
Struct struct = new Struct(ApiKeys.JOIN_GROUP.responseSchema(version));
if (struct.hasField(THROTTLE_TIME_KEY_NAME))
struct.set(THROTTLE_TIME_KEY_NAME, throttleTimeMs);
struct.set(ERROR_CODE_KEY_NAME, error.code());
struct.set(GENERATION_ID_KEY_NAME, generationId);
struct.set(GROUP_PROTOCOL_KEY_NAME, groupProtocol);
struct.set(MEMBER_ID_KEY_NAME, memberId);
struct.set(LEADER_ID_KEY_NAME, leaderId);
List<Struct> memberArray = new ArrayList<>();
for (Map.Entry<String, ByteBuffer> entries : members.entrySet()) {
Struct memberData = struct.instance(MEMBERS_KEY_NAME);
memberData.set(MEMBER_ID_KEY_NAME, entries.getKey());
memberData.set(MEMBER_METADATA_KEY_NAME, entries.getValue());
memberArray.add(memberData);
}
struct.set(MEMBERS_KEY_NAME, memberArray.toArray());
return struct;
}
}
| apache-2.0 |
facebook/buck | src/com/facebook/buck/jvm/java/abi/source/TreeBackedProcessingEnvironment.java | 2540 | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.jvm.java.abi.source;
import com.sun.source.util.Trees;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import javax.annotation.processing.Filer;
import javax.annotation.processing.Messager;
import javax.annotation.processing.ProcessingEnvironment;
import javax.lang.model.SourceVersion;
import javax.lang.model.util.Elements;
import javax.lang.model.util.Types;
class TreeBackedProcessingEnvironment implements ProcessingEnvironment {
private final FrontendOnlyJavacTask task;
private final ProcessingEnvironment javacProcessingEnvironment;
private final TreeBackedMessager messager;
private final TreeBackedFiler filer;
private final Map<String, String> options = new HashMap<>();
public TreeBackedProcessingEnvironment(
FrontendOnlyJavacTask task, ProcessingEnvironment javacProcessingEnvironment) {
this.task = task;
this.javacProcessingEnvironment = javacProcessingEnvironment;
messager = new TreeBackedMessager(task, javacProcessingEnvironment.getMessager());
filer = new TreeBackedFiler(task, javacProcessingEnvironment.getFiler());
options.putAll(javacProcessingEnvironment.getOptions());
options.put("com.facebook.buck.java.generating_abi", "true");
}
@Override
public Map<String, String> getOptions() {
return options;
}
@Override
public Messager getMessager() {
return messager;
}
@Override
public Filer getFiler() {
return filer;
}
@Override
public Elements getElementUtils() {
return task.getElements();
}
@Override
public Types getTypeUtils() {
return task.getTypes();
}
public Trees getTreeUtils() {
return task.getTrees();
}
@Override
public SourceVersion getSourceVersion() {
return javacProcessingEnvironment.getSourceVersion();
}
@Override
public Locale getLocale() {
return javacProcessingEnvironment.getLocale();
}
}
| apache-2.0 |
pkocandr/indy | addons/promote/ftests/src/main/java/org/commonjava/indy/promote/ftest/rule/NoPreExistingPaths_RuleTest.java | 4907 | /**
* Copyright (C) 2011-2020 Red Hat, Inc. (https://github.com/Commonjava/indy)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.commonjava.indy.promote.ftest.rule;
import org.apache.commons.io.IOUtils;
import org.commonjava.indy.ftest.core.category.EventDependent;
import org.commonjava.indy.model.core.Group;
import org.commonjava.indy.model.core.HostedRepository;
import org.commonjava.indy.promote.model.GroupPromoteRequest;
import org.commonjava.indy.promote.model.GroupPromoteResult;
import org.commonjava.indy.promote.model.ValidationResult;
import org.commonjava.indy.promote.model.ValidationRuleSet;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collections;
import java.util.Map;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.junit.Assert.assertThat;
public class NoPreExistingPaths_RuleTest
extends AbstractValidationRuleTest<Group>
{
private static final String RULE = "maven-no-pre-existing-paths.groovy";
private static final String PREFIX = "no-pre-existing-paths/";
private HostedRepository otherSource;
private Group other;
@Test
@Category( EventDependent.class )
public void run()
throws Exception
{
String invalid = "org/foo/invalid/1/invalid-1.pom";
String valid = "org/foo/valid/1.1/valid-1.1.pom";
String content = "this is some content";
deployResource( other.getKey(), invalid, PREFIX + "invalid.pom.xml");
try(InputStream stream = client.content().get( other.getKey(), invalid ))
{
String retrieved = IOUtils.toString( stream );
assertThat( invalid + " invalid from: " + other.getKey(), retrieved,
equalTo( resourceToString( PREFIX + "invalid.pom.xml" ) ) );
}
deployResource( invalid, PREFIX + "invalid.pom.xml");
deployResource( valid, PREFIX + "valid.pom.xml" );
waitForEventPropagation();
GroupPromoteRequest request = new GroupPromoteRequest( source.getKey(), target.getName() );
GroupPromoteResult result = module.promoteToGroup( request );
assertThat( result, notNullValue() );
ValidationResult validations = result.getValidations();
assertThat( validations, notNullValue() );
Map<String, String> validatorErrors = validations.getValidatorErrors();
assertThat( validatorErrors, notNullValue() );
System.out.println(validatorErrors);
String errors = validatorErrors.get( RULE );
assertThat( errors, notNullValue() );
System.out.println(validatorErrors);
assertThat( errors.contains( valid ), equalTo( false ) );
assertThat( errors.contains( invalid ), equalTo( true ) );
}
public NoPreExistingPaths_RuleTest()
{
super( Group.class );
}
@Override
protected String getRuleScriptFile()
{
return RULE;
}
@Override
protected String getRuleScriptContent()
throws IOException
{
String path = "promote/rules/" + RULE;
return readTestResource( path );
}
@Override
protected ValidationRuleSet getRuleSet()
{
ValidationRuleSet ruleSet = new ValidationRuleSet();
ruleSet.setName( "test" );
ruleSet.setStoreKeyPattern( "group:target" );
ruleSet.setRuleNames( Collections.singletonList( getRuleScriptFile() ) );
ruleSet.setValidationParameters( Collections.singletonMap( "availableInStores", "group:other" ) );
return ruleSet;
}
@Override
public void start()
throws Throwable
{
super.start();
otherSource = new HostedRepository( "otherSource" );
otherSource = client.stores().create( otherSource, "Creating secondary content source", HostedRepository.class );
other = new Group( "other", otherSource.getKey() );
other = client.stores().create( other, "Creating secondary content group", Group.class );
Logger logger = LoggerFactory.getLogger( getClass() );
logger.info( "{} contains members: {}", other, other.getConstituents() );
}
}
| apache-2.0 |
gstevey/gradle | subprojects/model-core/src/main/java/org/gradle/model/internal/inspect/RuleExtractorUtils.java | 2278 | /*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.model.internal.inspect;
import org.gradle.model.internal.core.ModelAction;
import org.gradle.model.internal.core.ModelActionRole;
import org.gradle.model.internal.core.ModelPath;
import org.gradle.model.internal.core.ModelSpec;
import org.gradle.model.internal.core.MutableModelNode;
import org.gradle.model.internal.registry.ModelReferenceNode;
import org.gradle.model.internal.registry.ModelRegistry;
import javax.annotation.Nullable;
public class RuleExtractorUtils {
public static void configureRuleAction(MethodModelRuleApplicationContext context, RuleApplicationScope ruleApplicationScope, ModelActionRole role, MethodRuleAction ruleAction) {
ModelAction action = context.contextualize(ruleAction);
ModelRegistry registry = context.getRegistry();
switch (ruleApplicationScope) {
case SELF:
registry.configure(role, action);
break;
case DESCENDANTS:
registry.configureMatching(new NonReferenceDescendantsSpec(context.getScope()), role, action);
break;
default:
throw new AssertionError();
}
}
private static class NonReferenceDescendantsSpec extends ModelSpec {
private final ModelPath scope;
private NonReferenceDescendantsSpec(ModelPath scope) {
this.scope = scope;
}
@Nullable
@Override
public ModelPath getAncestor() {
return scope;
}
@Override
public boolean matches(MutableModelNode node) {
return !(node instanceof ModelReferenceNode);
}
};
}
| apache-2.0 |
joansmith/bnd | biz.aQute.bndlib/src/aQute/bnd/build/model/conversions/CollectionFormatter.java | 2716 | package aQute.bnd.build.model.conversions;
import java.util.Collection;
public class CollectionFormatter<T> implements Converter<String,Collection< ? extends T>> {
private final String separator;
private final Converter<String, ? super T> itemFormatter;
private final String emptyOutput;
private final boolean leadingSpace;
private final String initial;
private final String suffix;
public CollectionFormatter(String separator) {
this(separator, (String) null);
}
public CollectionFormatter(String separator, String emptyOutput) {
this(separator, new DefaultFormatter(), emptyOutput);
}
public CollectionFormatter(String separator, Converter<String, ? super T> itemFormatter) {
this(separator, itemFormatter, null);
}
public CollectionFormatter(String separator, Converter<String, ? super T> itemFormatter, String emptyOutput) {
this(separator, itemFormatter, emptyOutput, false, "\\\n\t", "");
}
public CollectionFormatter(String separator, Converter<String, ? super T> itemFormatter, String emptyOutput,
String prefix, String suffix) {
this(separator, itemFormatter, emptyOutput, false, prefix, suffix);
}
/**
* @param separator Separator between items
* @param itemFormatter Formatter for each item
* @param emptyOutput Output to produce for empty inputs
* @param leadingSpace Whether to lead with a space before the first item
* @param prefix Prefix for the first item in lists containing more than one
* items.
* @param suffix Suffix to add at the end of the list
*/
public CollectionFormatter(String separator, Converter<String, ? super T> itemFormatter, String emptyOutput,
boolean leadingSpace, String prefix, String suffix) {
this.separator = separator;
this.itemFormatter = itemFormatter;
this.emptyOutput = emptyOutput;
this.leadingSpace = leadingSpace;
this.initial = prefix;
this.suffix = suffix;
}
public String convert(Collection< ? extends T> input) throws IllegalArgumentException {
String result = null;
if (input != null) {
if (input.isEmpty()) {
result = emptyOutput;
} else {
StringBuilder buffer = new StringBuilder();
if (leadingSpace)
buffer.append(' ');
if (input.size() == 1) {
T item = input.iterator().next();
buffer.append(itemFormatter.convert(item));
} else {
String del = initial == null ? "" : initial;
for (T item : input) {
buffer.append(del);
buffer.append(itemFormatter.convert(item));
del = separator;
}
}
if (suffix != null)
buffer.append(suffix);
result = buffer.toString();
}
}
return result;
}
@Override
public String error(String msg) {
return msg;
}
} | apache-2.0 |
shyTNT/googleads-java-lib | modules/dfp_axis/src/main/java/com/google/api/ads/dfp/axis/v201505/TargetingValue.java | 4372 | /**
* TargetingValue.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.dfp.axis.v201505;
/**
* Contains a {@link Targeting} value.
* <p>
* <b>This object is experimental!
* <code>TargetingValue</code> is an experimental, innovative,
* and rapidly
* changing new feature for DFP. Unfortunately, being on
* the bleeding edge means that we may make
* backwards-incompatible changes to
* <code>TargetingValue</code>. We will inform the community
* when this feature
* is no longer experimental.</b>
*/
public class TargetingValue extends com.google.api.ads.dfp.axis.v201505.ObjectValue implements java.io.Serializable {
/* The {@code Targeting} value. */
private com.google.api.ads.dfp.axis.v201505.Targeting value;
public TargetingValue() {
}
public TargetingValue(
com.google.api.ads.dfp.axis.v201505.Targeting value) {
this.value = value;
}
/**
* Gets the value value for this TargetingValue.
*
* @return value * The {@code Targeting} value.
*/
public com.google.api.ads.dfp.axis.v201505.Targeting getValue() {
return value;
}
/**
* Sets the value value for this TargetingValue.
*
* @param value * The {@code Targeting} value.
*/
public void setValue(com.google.api.ads.dfp.axis.v201505.Targeting value) {
this.value = value;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof TargetingValue)) return false;
TargetingValue other = (TargetingValue) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = super.equals(obj) &&
((this.value==null && other.getValue()==null) ||
(this.value!=null &&
this.value.equals(other.getValue())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = super.hashCode();
if (getValue() != null) {
_hashCode += getValue().hashCode();
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(TargetingValue.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201505", "TargetingValue"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("value");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201505", "value"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201505", "Targeting"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
| apache-2.0 |
googleapis/google-api-java-client-services | clients/google-api-services-indexing/v3/1.31.0/com/google/api/services/indexing/v3/model/PublishUrlNotificationResponse.java | 2419 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.indexing.v3.model;
/**
* Output for PublishUrlNotification
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Indexing API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class PublishUrlNotificationResponse extends com.google.api.client.json.GenericJson {
/**
* Description of the notification events received for this URL.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private UrlNotificationMetadata urlNotificationMetadata;
/**
* Description of the notification events received for this URL.
* @return value or {@code null} for none
*/
public UrlNotificationMetadata getUrlNotificationMetadata() {
return urlNotificationMetadata;
}
/**
* Description of the notification events received for this URL.
* @param urlNotificationMetadata urlNotificationMetadata or {@code null} for none
*/
public PublishUrlNotificationResponse setUrlNotificationMetadata(UrlNotificationMetadata urlNotificationMetadata) {
this.urlNotificationMetadata = urlNotificationMetadata;
return this;
}
@Override
public PublishUrlNotificationResponse set(String fieldName, Object value) {
return (PublishUrlNotificationResponse) super.set(fieldName, value);
}
@Override
public PublishUrlNotificationResponse clone() {
return (PublishUrlNotificationResponse) super.clone();
}
}
| apache-2.0 |
jomarko/kie-wb-common | kie-wb-common-forms/kie-wb-common-forms-core/kie-wb-common-forms-adf/kie-wb-common-forms-adf-engine/kie-wb-common-forms-adf-engine-api/src/test/java/org/kie/workbench/common/forms/adf/engine/shared/formGeneration/processing/fields/fieldInitializers/slider/DoubleSliderFieldInitializerTest.java | 1364 | /*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.forms.adf.engine.shared.formGeneration.processing.fields.fieldInitializers.slider;
import org.junit.runner.RunWith;
import org.kie.workbench.common.forms.fields.shared.fieldTypes.basic.slider.definition.DoubleSliderDefinition;
import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public class DoubleSliderFieldInitializerTest extends AbstractSliderFieldInitializerTest<DoubleSliderFieldInitializer, DoubleSliderDefinition, Double> {
@Override
DoubleSliderFieldInitializer getInitializerInstance() {
return new DoubleSliderFieldInitializer();
}
@Override
DoubleSliderDefinition getFieldInstance() {
return new DoubleSliderDefinition();
}
}
| apache-2.0 |
jeorme/OG-Platform | projects/OG-Component/src/main/java/com/opengamma/component/factory/loader/AbstractSecurityEnhancerComponentFactory.java | 10255 | /**
* Copyright (C) 2012 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.component.factory.loader;
import java.util.LinkedHashMap;
import java.util.Map;
import org.joda.beans.Bean;
import org.joda.beans.BeanBuilder;
import org.joda.beans.BeanDefinition;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import com.opengamma.component.ComponentInfo;
import com.opengamma.component.ComponentRepository;
import com.opengamma.component.factory.AbstractComponentFactory;
import com.opengamma.component.factory.ComponentInfoAttributes;
import com.opengamma.provider.security.SecurityEnhancer;
import com.opengamma.provider.security.impl.DataSecurityEnhancerResource;
import com.opengamma.provider.security.impl.RemoteSecurityEnhancer;
/**
* Component factory providing the {@code SecurityEnhancer}.
* <p>
* This abstract implementation reduces implementation effort for subclasses.
*/
@BeanDefinition
public abstract class AbstractSecurityEnhancerComponentFactory extends AbstractComponentFactory {
/**
* The classifier that the factory should publish under.
*/
@PropertyDefinition(validate = "notNull")
private String _classifier;
/**
* The flag determining whether the component should be published by REST (default false).
*/
@PropertyDefinition
private boolean _publishRest;
//-------------------------------------------------------------------------
/**
* Initializes the security loader, setting up component information and REST.
* Override using {@link #createSecurityEnhancer(ComponentRepository)}.
*
* @param repo the component repository, not null
* @param configuration the remaining configuration, not null
*/
@Override
public void init(ComponentRepository repo, LinkedHashMap<String, String> configuration) throws Exception {
final SecurityEnhancer loader = createSecurityEnhancer(repo);
final ComponentInfo info = new ComponentInfo(SecurityEnhancer.class, getClassifier());
info.addAttribute(ComponentInfoAttributes.LEVEL, 1);
info.addAttribute(ComponentInfoAttributes.REMOTE_CLIENT_JAVA, RemoteSecurityEnhancer.class);
repo.registerComponent(info, loader);
if (isPublishRest()) {
repo.getRestComponents().publish(info, new DataSecurityEnhancerResource(loader));
}
}
/**
* Creates the loader, without registering it.
*
* @param repo the component repository, only used to register secondary items like lifecycle, not null
* @return the loader, not null
*/
protected abstract SecurityEnhancer createSecurityEnhancer(ComponentRepository repo);
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code AbstractSecurityEnhancerComponentFactory}.
* @return the meta-bean, not null
*/
public static AbstractSecurityEnhancerComponentFactory.Meta meta() {
return AbstractSecurityEnhancerComponentFactory.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(AbstractSecurityEnhancerComponentFactory.Meta.INSTANCE);
}
@Override
public AbstractSecurityEnhancerComponentFactory.Meta metaBean() {
return AbstractSecurityEnhancerComponentFactory.Meta.INSTANCE;
}
//-----------------------------------------------------------------------
/**
* Gets the classifier that the factory should publish under.
* @return the value of the property, not null
*/
public String getClassifier() {
return _classifier;
}
/**
* Sets the classifier that the factory should publish under.
* @param classifier the new value of the property, not null
*/
public void setClassifier(String classifier) {
JodaBeanUtils.notNull(classifier, "classifier");
this._classifier = classifier;
}
/**
* Gets the the {@code classifier} property.
* @return the property, not null
*/
public final Property<String> classifier() {
return metaBean().classifier().createProperty(this);
}
//-----------------------------------------------------------------------
/**
* Gets the flag determining whether the component should be published by REST (default false).
* @return the value of the property
*/
public boolean isPublishRest() {
return _publishRest;
}
/**
* Sets the flag determining whether the component should be published by REST (default false).
* @param publishRest the new value of the property
*/
public void setPublishRest(boolean publishRest) {
this._publishRest = publishRest;
}
/**
* Gets the the {@code publishRest} property.
* @return the property, not null
*/
public final Property<Boolean> publishRest() {
return metaBean().publishRest().createProperty(this);
}
//-----------------------------------------------------------------------
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
AbstractSecurityEnhancerComponentFactory other = (AbstractSecurityEnhancerComponentFactory) obj;
return JodaBeanUtils.equal(getClassifier(), other.getClassifier()) &&
(isPublishRest() == other.isPublishRest()) &&
super.equals(obj);
}
return false;
}
@Override
public int hashCode() {
int hash = 7;
hash = hash * 31 + JodaBeanUtils.hashCode(getClassifier());
hash = hash * 31 + JodaBeanUtils.hashCode(isPublishRest());
return hash ^ super.hashCode();
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(96);
buf.append("AbstractSecurityEnhancerComponentFactory{");
int len = buf.length();
toString(buf);
if (buf.length() > len) {
buf.setLength(buf.length() - 2);
}
buf.append('}');
return buf.toString();
}
@Override
protected void toString(StringBuilder buf) {
super.toString(buf);
buf.append("classifier").append('=').append(JodaBeanUtils.toString(getClassifier())).append(',').append(' ');
buf.append("publishRest").append('=').append(JodaBeanUtils.toString(isPublishRest())).append(',').append(' ');
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code AbstractSecurityEnhancerComponentFactory}.
*/
public static class Meta extends AbstractComponentFactory.Meta {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code classifier} property.
*/
private final MetaProperty<String> _classifier = DirectMetaProperty.ofReadWrite(
this, "classifier", AbstractSecurityEnhancerComponentFactory.class, String.class);
/**
* The meta-property for the {@code publishRest} property.
*/
private final MetaProperty<Boolean> _publishRest = DirectMetaProperty.ofReadWrite(
this, "publishRest", AbstractSecurityEnhancerComponentFactory.class, Boolean.TYPE);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap(
this, (DirectMetaPropertyMap) super.metaPropertyMap(),
"classifier",
"publishRest");
/**
* Restricted constructor.
*/
protected Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case -281470431: // classifier
return _classifier;
case -614707837: // publishRest
return _publishRest;
}
return super.metaPropertyGet(propertyName);
}
@Override
public BeanBuilder<? extends AbstractSecurityEnhancerComponentFactory> builder() {
throw new UnsupportedOperationException("AbstractSecurityEnhancerComponentFactory is an abstract class");
}
@Override
public Class<? extends AbstractSecurityEnhancerComponentFactory> beanType() {
return AbstractSecurityEnhancerComponentFactory.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return _metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code classifier} property.
* @return the meta-property, not null
*/
public final MetaProperty<String> classifier() {
return _classifier;
}
/**
* The meta-property for the {@code publishRest} property.
* @return the meta-property, not null
*/
public final MetaProperty<Boolean> publishRest() {
return _publishRest;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case -281470431: // classifier
return ((AbstractSecurityEnhancerComponentFactory) bean).getClassifier();
case -614707837: // publishRest
return ((AbstractSecurityEnhancerComponentFactory) bean).isPublishRest();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
switch (propertyName.hashCode()) {
case -281470431: // classifier
((AbstractSecurityEnhancerComponentFactory) bean).setClassifier((String) newValue);
return;
case -614707837: // publishRest
((AbstractSecurityEnhancerComponentFactory) bean).setPublishRest((Boolean) newValue);
return;
}
super.propertySet(bean, propertyName, newValue, quiet);
}
@Override
protected void validate(Bean bean) {
JodaBeanUtils.notNull(((AbstractSecurityEnhancerComponentFactory) bean)._classifier, "classifier");
super.validate(bean);
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
| apache-2.0 |
mirego/j2objc | jre_emul/android/platform/libcore/ojluni/src/main/java/java/lang/reflect/Modifier.java | 17647 | /*
* Copyright (C) 2014 The Android Open Source Project
* Copyright (c) 1996, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.lang.reflect;
/**
* The Modifier class provides {@code static} methods and
* constants to decode class and member access modifiers. The sets of
* modifiers are represented as integers with distinct bit positions
* representing different modifiers. The values for the constants
* representing the modifiers are taken from the tables in sections 4.1, 4.4, 4.5, and 4.7 of
* <cite>The Java™ Virtual Machine Specification</cite>.
*
* @see Class#getModifiers()
* @see Member#getModifiers()
*
* @author Nakul Saraiya
* @author Kenneth Russell
*/
public class Modifier {
/**
* Return {@code true} if the integer argument includes the
* {@code public} modifier, {@code false} otherwise.
*
* @param mod a set of modifiers
* @return {@code true} if {@code mod} includes the
* {@code public} modifier; {@code false} otherwise.
*/
public static boolean isPublic(int mod) {
return (mod & PUBLIC) != 0;
}
/**
* Return {@code true} if the integer argument includes the
* {@code private} modifier, {@code false} otherwise.
*
* @param mod a set of modifiers
* @return {@code true} if {@code mod} includes the
* {@code private} modifier; {@code false} otherwise.
*/
public static boolean isPrivate(int mod) {
return (mod & PRIVATE) != 0;
}
/**
* Return {@code true} if the integer argument includes the
* {@code protected} modifier, {@code false} otherwise.
*
* @param mod a set of modifiers
* @return {@code true} if {@code mod} includes the
* {@code protected} modifier; {@code false} otherwise.
*/
public static boolean isProtected(int mod) {
return (mod & PROTECTED) != 0;
}
/**
* Return {@code true} if the integer argument includes the
* {@code static} modifier, {@code false} otherwise.
*
* @param mod a set of modifiers
* @return {@code true} if {@code mod} includes the
* {@code static} modifier; {@code false} otherwise.
*/
public static boolean isStatic(int mod) {
return (mod & STATIC) != 0;
}
/**
* Return {@code true} if the integer argument includes the
* {@code final} modifier, {@code false} otherwise.
*
* @param mod a set of modifiers
* @return {@code true} if {@code mod} includes the
* {@code final} modifier; {@code false} otherwise.
*/
public static boolean isFinal(int mod) {
return (mod & FINAL) != 0;
}
/**
* Return {@code true} if the integer argument includes the
* {@code synchronized} modifier, {@code false} otherwise.
*
* @param mod a set of modifiers
* @return {@code true} if {@code mod} includes the
* {@code synchronized} modifier; {@code false} otherwise.
*/
public static boolean isSynchronized(int mod) {
return (mod & SYNCHRONIZED) != 0;
}
/**
* Return {@code true} if the integer argument includes the
* {@code volatile} modifier, {@code false} otherwise.
*
* @param mod a set of modifiers
* @return {@code true} if {@code mod} includes the
* {@code volatile} modifier; {@code false} otherwise.
*/
public static boolean isVolatile(int mod) {
return (mod & VOLATILE) != 0;
}
/**
* Returns true if the given modifiers contain {@link Modifier#CONSTRUCTOR}.
* @hide
*/
public static boolean isConstructor(int modifiers) {
return ((modifiers & Modifier.CONSTRUCTOR) != 0);
}
/**
* Return {@code true} if the integer argument includes the
* {@code transient} modifier, {@code false} otherwise.
*
* @param mod a set of modifiers
* @return {@code true} if {@code mod} includes the
* {@code transient} modifier; {@code false} otherwise.
*/
public static boolean isTransient(int mod) {
return (mod & TRANSIENT) != 0;
}
/**
* Return {@code true} if the integer argument includes the
* {@code native} modifier, {@code false} otherwise.
*
* @param mod a set of modifiers
* @return {@code true} if {@code mod} includes the
* {@code native} modifier; {@code false} otherwise.
*/
public static boolean isNative(int mod) {
return (mod & NATIVE) != 0;
}
/**
* Return {@code true} if the integer argument includes the
* {@code interface} modifier, {@code false} otherwise.
*
* @param mod a set of modifiers
* @return {@code true} if {@code mod} includes the
* {@code interface} modifier; {@code false} otherwise.
*/
public static boolean isInterface(int mod) {
return (mod & INTERFACE) != 0;
}
/**
* Return {@code true} if the integer argument includes the
* {@code abstract} modifier, {@code false} otherwise.
*
* @param mod a set of modifiers
* @return {@code true} if {@code mod} includes the
* {@code abstract} modifier; {@code false} otherwise.
*/
public static boolean isAbstract(int mod) {
return (mod & ABSTRACT) != 0;
}
/**
* Return {@code true} if the integer argument includes the
* {@code strictfp} modifier, {@code false} otherwise.
*
* @param mod a set of modifiers
* @return {@code true} if {@code mod} includes the
* {@code strictfp} modifier; {@code false} otherwise.
*/
public static boolean isStrict(int mod) {
return (mod & STRICT) != 0;
}
/**
* Return a string describing the access modifier flags in
* the specified modifier. For example:
* <blockquote><pre>
* public final synchronized strictfp
* </pre></blockquote>
* The modifier names are returned in an order consistent with the
* suggested modifier orderings given in sections 8.1.1, 8.3.1, 8.4.3, 8.8.3, and 9.1.1 of
* <cite>The Java™ Language Specification</cite>.
* The full modifier ordering used by this method is:
* <blockquote> {@code
* public protected private abstract static final transient
* volatile synchronized native strictfp
* interface } </blockquote>
* The {@code interface} modifier discussed in this class is
* not a true modifier in the Java language and it appears after
* all other modifiers listed by this method. This method may
* return a string of modifiers that are not valid modifiers of a
* Java entity; in other words, no checking is done on the
* possible validity of the combination of modifiers represented
* by the input.
*
* Note that to perform such checking for a known kind of entity,
* such as a constructor or method, first AND the argument of
* {@code toString} with the appropriate mask from a method like
* {@link #constructorModifiers} or {@link #methodModifiers}.
*
* @param mod a set of modifiers
* @return a string representation of the set of modifiers
* represented by {@code mod}
*/
public static String toString(int mod) {
StringBuilder sb = new StringBuilder();
int len;
if ((mod & PUBLIC) != 0) sb.append("public ");
if ((mod & PROTECTED) != 0) sb.append("protected ");
if ((mod & PRIVATE) != 0) sb.append("private ");
/* Canonical order */
if ((mod & ABSTRACT) != 0) sb.append("abstract ");
if ((mod & STATIC) != 0) sb.append("static ");
if ((mod & FINAL) != 0) sb.append("final ");
if ((mod & TRANSIENT) != 0) sb.append("transient ");
if ((mod & VOLATILE) != 0) sb.append("volatile ");
if ((mod & SYNCHRONIZED) != 0) sb.append("synchronized ");
if ((mod & NATIVE) != 0) sb.append("native ");
if ((mod & STRICT) != 0) sb.append("strictfp ");
if ((mod & INTERFACE) != 0) sb.append("interface ");
if ((len = sb.length()) > 0) /* trim trailing space */
return sb.toString().substring(0, len-1);
return "";
}
/*
* Access modifier flag constants from tables 4.1, 4.4, 4.5, and 4.7 of
* <cite>The Java™ Virtual Machine Specification</cite>
*/
/**
* The {@code int} value representing the {@code public}
* modifier.
*/
public static final int PUBLIC = 0x00000001;
/**
* The {@code int} value representing the {@code private}
* modifier.
*/
public static final int PRIVATE = 0x00000002;
/**
* The {@code int} value representing the {@code protected}
* modifier.
*/
public static final int PROTECTED = 0x00000004;
/**
* The {@code int} value representing the {@code static}
* modifier.
*/
public static final int STATIC = 0x00000008;
/**
* The {@code int} value representing the {@code final}
* modifier.
*/
public static final int FINAL = 0x00000010;
/**
* The {@code int} value representing the {@code synchronized}
* modifier.
*/
public static final int SYNCHRONIZED = 0x00000020;
/**
* The {@code int} value representing the {@code volatile}
* modifier.
*/
public static final int VOLATILE = 0x00000040;
/**
* The {@code int} value representing the {@code transient}
* modifier.
*/
public static final int TRANSIENT = 0x00000080;
/**
* The {@code int} value representing the {@code native}
* modifier.
*/
public static final int NATIVE = 0x00000100;
/**
* The {@code int} value representing the {@code interface}
* modifier.
*/
public static final int INTERFACE = 0x00000200;
/**
* The {@code int} value representing the {@code abstract}
* modifier.
*/
public static final int ABSTRACT = 0x00000400;
/**
* The {@code int} value representing the {@code strictfp}
* modifier.
*/
public static final int STRICT = 0x00000800;
// Bits not (yet) exposed in the public API either because they
// have different meanings for fields and methods and there is no
// way to distinguish between the two in this class, or because
// they are not Java programming language keywords
static final int BRIDGE = 0x00000040;
static final int VARARGS = 0x00000080;
/**
* @hide
*/
public static final int SYNTHETIC = 0x00001000;
static final int ANNOTATION = 0x00002000;
static final int ENUM = 0x00004000;
static final int MANDATED = 0x00008000;
static boolean isSynthetic(int mod) {
return (mod & SYNTHETIC) != 0;
}
static boolean isMandated(int mod) {
return (mod & MANDATED) != 0;
}
// Note on the FOO_MODIFIERS fields and fooModifiers() methods:
// the sets of modifiers are not guaranteed to be constants
// across time and Java SE releases. Therefore, it would not be
// appropriate to expose an external interface to this information
// that would allow the values to be treated as Java-level
// constants since the values could be constant folded and updates
// to the sets of modifiers missed. Thus, the fooModifiers()
// methods return an unchanging values for a given release, but a
// value that can potentially change over time.
/**
* Dex addition to mark instance constructors and static class
* initializer methods.
* @hide
*/
public static final int CONSTRUCTOR = 0x10000;
/**
* Default methods are marked with a synthetic access flag
* to speed up class loading and invocation target lookup.
* Implies INTERFACE, not-ABSTRACT, and not-STATIC.
*
* @hide
*/
public static final int DEFAULT = 0x00400000;
/**
* The Java source modifiers that can be applied to a class.
* @jls 8.1.1 Class Modifiers
*/
private static final int CLASS_MODIFIERS =
Modifier.PUBLIC | Modifier.PROTECTED | Modifier.PRIVATE |
Modifier.ABSTRACT | Modifier.STATIC | Modifier.FINAL |
Modifier.STRICT;
/**
* The Java source modifiers that can be applied to an interface.
* @jls 9.1.1 Interface Modifiers
*/
private static final int INTERFACE_MODIFIERS =
Modifier.PUBLIC | Modifier.PROTECTED | Modifier.PRIVATE |
Modifier.ABSTRACT | Modifier.STATIC | Modifier.STRICT;
/**
* The Java source modifiers that can be applied to a constructor.
* @jls 8.8.3 Constructor Modifiers
*/
private static final int CONSTRUCTOR_MODIFIERS =
Modifier.PUBLIC | Modifier.PROTECTED | Modifier.PRIVATE;
/**
* The Java source modifiers that can be applied to a method.
* @jls8.4.3 Method Modifiers
*/
private static final int METHOD_MODIFIERS =
Modifier.PUBLIC | Modifier.PROTECTED | Modifier.PRIVATE |
Modifier.ABSTRACT | Modifier.STATIC | Modifier.FINAL |
Modifier.SYNCHRONIZED | Modifier.NATIVE | Modifier.STRICT;
/**
* The Java source modifiers that can be applied to a field.
* @jls 8.3.1 Field Modifiers
*/
private static final int FIELD_MODIFIERS =
Modifier.PUBLIC | Modifier.PROTECTED | Modifier.PRIVATE |
Modifier.STATIC | Modifier.FINAL | Modifier.TRANSIENT |
Modifier.VOLATILE;
/**
* The Java source modifiers that can be applied to a method or constructor parameter.
* @jls 8.4.1 Formal Parameters
*/
private static final int PARAMETER_MODIFIERS =
Modifier.FINAL;
/**
*
*/
static final int ACCESS_MODIFIERS =
Modifier.PUBLIC | Modifier.PROTECTED | Modifier.PRIVATE;
/**
* Return an {@code int} value OR-ing together the source language
* modifiers that can be applied to a class.
* @return an {@code int} value OR-ing together the source language
* modifiers that can be applied to a class.
*
* @jls 8.1.1 Class Modifiers
* @since 1.7
*/
public static int classModifiers() {
return CLASS_MODIFIERS;
}
/**
* Return an {@code int} value OR-ing together the source language
* modifiers that can be applied to an interface.
* @return an {@code int} value OR-ing together the source language
* modifiers that can be applied to an interface.
*
* @jls 9.1.1 Interface Modifiers
* @since 1.7
*/
public static int interfaceModifiers() {
return INTERFACE_MODIFIERS;
}
/**
* Return an {@code int} value OR-ing together the source language
* modifiers that can be applied to a constructor.
* @return an {@code int} value OR-ing together the source language
* modifiers that can be applied to a constructor.
*
* @jls 8.8.3 Constructor Modifiers
* @since 1.7
*/
public static int constructorModifiers() {
return CONSTRUCTOR_MODIFIERS;
}
/**
* Return an {@code int} value OR-ing together the source language
* modifiers that can be applied to a method.
* @return an {@code int} value OR-ing together the source language
* modifiers that can be applied to a method.
*
* @jls 8.4.3 Method Modifiers
* @since 1.7
*/
public static int methodModifiers() {
return METHOD_MODIFIERS;
}
/**
* Return an {@code int} value OR-ing together the source language
* modifiers that can be applied to a field.
* @return an {@code int} value OR-ing together the source language
* modifiers that can be applied to a field.
*
* @jls 8.3.1 Field Modifiers
* @since 1.7
*/
public static int fieldModifiers() {
return FIELD_MODIFIERS;
}
/**
* Return an {@code int} value OR-ing together the source language
* modifiers that can be applied to a parameter.
* @return an {@code int} value OR-ing together the source language
* modifiers that can be applied to a parameter.
*
* @jls 8.4.1 Formal Parameters
* @since 1.8
*/
public static int parameterModifiers() {
return PARAMETER_MODIFIERS;
}
}
| apache-2.0 |
android-ia/platform_tools_idea | platform/core-impl/src/com/intellij/psi/text/BlockSupport.java | 3009 | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.psi.text;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.components.ServiceManager;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.UserDataHolder;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.psi.PsiFile;
import com.intellij.psi.impl.source.text.DiffLog;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
public abstract class BlockSupport {
public static BlockSupport getInstance(Project project) {
return ServiceManager.getService(project, BlockSupport.class);
}
public abstract void reparseRange(PsiFile file, int startOffset, int endOffset, @NonNls CharSequence newText) throws IncorrectOperationException;
@NotNull
public abstract DiffLog reparseRange(@NotNull PsiFile file,
TextRange changedPsiRange,
@NotNull CharSequence newText,
@NotNull ProgressIndicator progressIndicator) throws IncorrectOperationException;
public static final Key<Boolean> DO_NOT_REPARSE_INCREMENTALLY = Key.create("DO_NOT_REPARSE_INCREMENTALLY");
public static final Key<ASTNode> TREE_TO_BE_REPARSED = Key.create("TREE_TO_BE_REPARSED");
public static class ReparsedSuccessfullyException extends RuntimeException {
private final DiffLog myDiffLog;
public ReparsedSuccessfullyException(@NotNull DiffLog diffLog) {
myDiffLog = diffLog;
}
@NotNull
public DiffLog getDiffLog() {
return myDiffLog;
}
@Override
public synchronized Throwable fillInStackTrace() {
return this;
}
}
// maximal tree depth for which incremental reparse is allowed
// if tree is deeper then it will be replaced completely - to avoid SOEs
public static final int INCREMENTAL_REPARSE_DEPTH_LIMIT = Registry.intValue("psi.incremental.reparse.depth.limit", 1000);
public static final Key<Boolean> TREE_DEPTH_LIMIT_EXCEEDED = Key.create("TREE_IS_TOO_DEEP");
public static boolean isTooDeep(final UserDataHolder element) {
return element != null && Boolean.TRUE.equals(element.getUserData(TREE_DEPTH_LIMIT_EXCEEDED));
}
}
| apache-2.0 |
szpak/mockito | src/main/java/org/mockito/internal/runners/util/package-info.java | 218 | /*
* Copyright (c) 2007 Mockito contributors
* This program is made available under the terms of the MIT License.
*/
/**
* Internal utils for runner implementations.
*/
package org.mockito.internal.runners.util;
| mit |
mritun/protobuf-socket-rpc | java/src/test/java/com/googlecode/protobuf/socketrpc/SocketRpcConnectionFactoryTest.java | 4217 | // Copyright (c) 2010 Shardul Deo
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package com.googlecode.protobuf.socketrpc;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.UnknownHostException;
import com.googlecode.protobuf.socketrpc.RpcConnectionFactory.Connection;
import com.googlecode.protobuf.socketrpc.TestProtos.Request;
import com.googlecode.protobuf.socketrpc.TestProtos.Request.Builder;
import junit.framework.TestCase;
/**
* Tests for {@link SocketRpcConnectionFactory}.
*
* @author Shardul Deo
*/
public class SocketRpcConnectionFactoryTest extends TestCase {
private static final Request MESSAGE = Request.newBuilder()
.setStrData("test data")
.build();
private FakeSocketFactory socketFactory;
private RpcConnectionFactory connectionFactory;
@Override
protected void setUp() throws Exception {
super.setUp();
socketFactory = new FakeSocketFactory();
connectionFactory = new SocketRpcConnectionFactory("host", 8080,
socketFactory, true /* delimited */);
}
public void testCreateConnection_unknownHost() throws IOException {
UnknownHostException uhe = new UnknownHostException();
socketFactory.throwsException(uhe);
try {
connectionFactory.createConnection();
} catch (UnknownHostException e) {
assertSame(uhe, e);
}
}
public void testCreateConnection_ioError() {
IOException ioe = new IOException();
socketFactory.throwsException(ioe);
try {
connectionFactory.createConnection();
} catch (IOException e) {
assertSame(ioe, e);
}
}
private Connection connectionForSocket(FakeSocket socket) throws IOException {
socketFactory.returnsSocket(socket);
return connectionFactory.createConnection();
}
public void testConnectionOutputInput() throws IOException {
FakeSocket socket = new FakeSocket(true);
ByteArrayOutputStream os = new ByteArrayOutputStream();
MESSAGE.writeDelimitedTo(os);
socket.withInputBytes(os.toByteArray());
Connection connection = connectionForSocket(socket);
connection.sendProtoMessage(MESSAGE);
ByteArrayInputStream is = new ByteArrayInputStream(socket.getOutputBytes());
assertEquals(MESSAGE, Request.parseDelimitedFrom(is));
Builder builder = Request.newBuilder();
connection.receiveProtoMessage(builder);
assertEquals(MESSAGE, builder.build());
}
public void testConnectionInputOutput() throws IOException {
FakeSocket socket = new FakeSocket(true);
ByteArrayOutputStream os = new ByteArrayOutputStream();
MESSAGE.writeDelimitedTo(os);
socket.withInputBytes(os.toByteArray());
Connection connection = connectionForSocket(socket);
Builder builder = Request.newBuilder();
connection.receiveProtoMessage(builder);
assertEquals(MESSAGE, builder.build());
connection.sendProtoMessage(MESSAGE);
ByteArrayInputStream is = new ByteArrayInputStream(socket.getOutputBytes());
assertEquals(MESSAGE, Request.parseDelimitedFrom(is));
}
}
| mit |
siddhika1889/Pydev-Editor | src/org/python/pydev/ui/pythonpathconf/IInterpreterProvider.java | 2089 | /******************************************************************************
* Copyright (C) 2013 Jonah Graham
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Jonah Graham <jonah@kichwacoders.com> - initial API and implementation
******************************************************************************/
package org.python.pydev.ui.pythonpathconf;
/**
* A representation of an interpreter that exists, or can be installed, on the
* users machine. Created from calling all the registered
* {@link IInterpreterProviderFactory#getInterpreterProviders(org.python.pydev.ui.pythonpathconf.IInterpreterProviderFactory.InterpreterType)}
*/
public interface IInterpreterProvider {
/**
* Return the name of the Executable or Jar
*
* @return the name of the Executable or Jar
*/
public String getExecutableOrJar();
/**
* Return user visible name. If the name is <code>null</code>, the
* {@link #getExecutableOrJar()} will be used. If the name returned is not
* unique within the workspace, will be made unique.
*
* @return the name displayed in the UI. May be <code>null</code>
*/
public String getName();
/**
* Return whether the interpreter methods are returning provisional (false)
* or installed (true) information. If not complete, call
* {@link #runInstall()} to install/etc this interpreter.
*
* @return true if the Provider is ready to go
*/
public boolean isInstalled();
/**
* Install the interpreter. If {@link #isInstalled()} returns false after
* the install is run, then the user either cancelled, or an error occurred
* that the user has already been informed about.
*
* It is up to the implementor to bundle installations up in a job to
* maintain UI responsiveness.
*/
public void runInstall();
}
| epl-1.0 |
mdaniel/svn-caucho-com-resin | modules/kernel/src/com/caucho/loader/DependencyCheckInterval.java | 1994 | /*
* Copyright (c) 1998-2012 Caucho Technology -- all rights reserved
*
* This file is part of Resin(R) Open Source
*
* Each copy or derived work must preserve the copyright notice and this
* notice unmodified.
*
* Resin Open Source is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* Resin Open Source is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE, or any warranty
* of NON-INFRINGEMENT. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License
* along with Resin Open Source; if not, write to the
* Free SoftwareFoundation, Inc.
* 59 Temple Place, Suite 330
* Boston, MA 02111-1307 USA
*
* @author Scott Ferguson
*/
package com.caucho.loader;
import com.caucho.config.types.Period;
import javax.annotation.PostConstruct;
/**
* Class for update interval checking.
*/
public class DependencyCheckInterval {
private long _interval = 2000L;
/**
* Sets the interval.
*/
public void setValue(Period period)
{
_interval = period.getPeriod();
}
/**
* Initialize
*/
@PostConstruct
public void init()
{
ClassLoader loader = Thread.currentThread().getContextClassLoader();
for (; loader != null; loader = loader.getParent()) {
if (loader instanceof DynamicClassLoader) {
DynamicClassLoader dynLoader;
dynLoader = (DynamicClassLoader) loader;
dynLoader.setDependencyCheckInterval(_interval);
return;
}
}
if (loader == null) {
DynamicClassLoader.setGlobalDependencyCheckInterval(_interval);
}
}
public String toString()
{
return "DependencyCheckInterval[]";
}
}
| gpl-2.0 |
fnatter/freeplane-debian-dev | freeplane_plugin_script/src/main/java/groovy/runtime/metaclass/java/lang/StringMetaClass.java | 1241 | package groovy.runtime.metaclass.java.lang;
import groovy.lang.DelegatingMetaClass;
import groovy.lang.MetaClass;
import org.freeplane.plugin.script.proxy.Convertible;
/** Make Convertible known to class String and let String handle Convertibles as if they were Strings
* (via Convertible.getText(). */
public class StringMetaClass extends DelegatingMetaClass {
public StringMetaClass(MetaClass delegate) {
super(delegate);
}
@Override
public Object invokeMethod(Object object, String methodName, Object[] arguments) {
replaceConvertibleByText(arguments);
return super.invokeMethod(object, methodName, arguments);
}
@Override
public Object invokeStaticMethod(Object object, String methodName, Object[] arguments) {
replaceConvertibleByText(arguments);
return super.invokeStaticMethod(object, methodName, arguments);
}
@Override
public Object invokeConstructor(Object[] arguments) {
replaceConvertibleByText(arguments);
return super.invokeConstructor(arguments);
}
private void replaceConvertibleByText(Object[] arguments) {
for (int i = 0; i < arguments.length; i++) {
if (arguments[i] instanceof Convertible)
arguments[i] = ((Convertible) arguments[i]).getText();
}
}
}
| gpl-2.0 |
nuest/SOS | core/api/src/main/java/org/n52/sos/util/SimilarityComparator.java | 2737 | /**
* Copyright (C) 2012-2015 52°North Initiative for Geospatial Open Source
* Software GmbH
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 as published
* by the Free Software Foundation.
*
* If the program is linked with libraries which are licensed under one of
* the following licenses, the combination of the program with the linked
* library is not considered a "derivative work" of the program:
*
* - Apache License, version 2.0
* - Apache Software License, version 1.0
* - GNU Lesser General Public License, version 3
* - Mozilla Public License, versions 1.0, 1.1 and 2.0
* - Common Development and Distribution License (CDDL), version 1.0
*
* Therefore the distribution of the program linked with libraries licensed
* under the aforementioned licenses, is permitted by the copyright holders
* if the distribution is compliant with both the GNU General Public
* License version 2 and the aforementioned licenses.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
* Public License for more details.
*/
package org.n52.sos.util;
import java.util.Comparator;
/**
* TODO JavaDoc
*
* @param <T>
* the type
*
* @author Christian Autermann <c.autermann@52north.org>
*
* @since 4.0.0
*/
public class SimilarityComparator<T extends Similar<T>> implements Comparator<T> {
private final T ref;
public SimilarityComparator(T ref) {
this.ref = ref;
}
@Override
public int compare(T o1, T o2) {
if (o1 == o2) {
return 0;
}
// FIXME this conflicts with the contract of compare
if (o1 == null || o2 == null) {
return -1;
}
// check for equals after strict == and null checks
if (o1.equals(o2)) {
return 0;
}
int s1 = o1.getSimilarity(ref);
int s2 = o2.getSimilarity(ref);
//check for inheritance
if (s1 == s2 && !o1.getClass().equals(o2.getClass())) {
if (o1.getClass().isAssignableFrom(o2.getClass())) {
return 1;
} else if (o2.getClass().isAssignableFrom(o1.getClass())) {
return -1;
}
}
if (s1 == 0) {
return -1;
}
if (s2 == 0) {
return 1;
}
if (s1 < 0) {
return s2 < 0 ? 0 : 1;
} else if (s2 < 0 || s1 < s2) {
return -1;
} else {
return 1;
}
}
}
| gpl-2.0 |
kumarrus/voltdb | src/frontend/org/voltdb/client/ClientAffinityStats.java | 4349 | /* This file is part of VoltDB.
* Copyright (C) 2008-2015 VoltDB Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with VoltDB. If not, see <http://www.gnu.org/licenses/>.
*/
package org.voltdb.client;
/**
* Collect the client's opinion of the operation of client affinity. For the
* given partition ID, affinityWrites tracks write transactions that the client
* believes it knows the master for. AffinityReads tracks read transactions
* the the client believes it found a replica for. Round-robin stats reflect
* the client's lack of information when client affinity is on and indicate
* transactions that were routed using the default round-robin algorithm.
*/
public class ClientAffinityStats {
private int m_partitionId;
private long m_affinityWrites;
private long m_rrWrites;
private long m_affinityReads;
private long m_rrReads;
ClientAffinityStats(int partitionId, long affinityWrites, long rrWrites,
long affinityReads, long rrReads)
{
m_partitionId = partitionId;
m_affinityWrites = affinityWrites;
m_rrWrites = rrWrites;
m_affinityReads = affinityReads;
m_rrReads = rrReads;
}
/**
* Subtract one ClientAffinityStats instance from another to produce a third.
*
* @param newer More recent ClientAffinityStats instance.
* @param older Less recent ClientAffinityStats instance.
* @return New instance representing the difference.
*/
public static ClientAffinityStats diff(ClientAffinityStats newer, ClientAffinityStats older) {
if (newer.m_partitionId != older.m_partitionId) {
throw new IllegalArgumentException("Can't diff these ClientAffinityStats instances.");
}
ClientAffinityStats retval = new ClientAffinityStats(older.m_partitionId,
newer.m_affinityWrites - older.m_affinityWrites,
newer.m_rrWrites - older.m_rrWrites,
newer.m_affinityReads - older.m_affinityReads,
newer.m_rrReads - older.m_rrReads);
return retval;
}
/* (non-Javadoc)
* @see java.lang.Object#clone()
*/
@Override
protected Object clone() {
return new ClientAffinityStats(m_partitionId, m_affinityWrites, m_rrWrites, m_affinityReads,
m_rrReads);
}
void addAffinityWrite()
{
m_affinityWrites++;
}
/**
* Get the number of writes that used affinity for this time period.
*
* @return The count as a long.
*/
public long getAffinityWrites()
{
return m_affinityWrites;
}
void addRrWrite()
{
m_rrWrites++;
}
/**
* Get the number of writes that used round-robin distribution
* for this time period.
*
* @return The count as a long.
*/
public long getRrWrites()
{
return m_rrWrites;
}
void addAffinityRead()
{
m_affinityReads++;
}
/**
* Get the number of reads that used affinity for this time period.
*
* @return The count as a long.
*/
public long getAffinityReads()
{
return m_affinityReads;
}
void addRrRead()
{
m_rrReads++;
}
/**
* Get the number of reads that used round-robin distribution
* for this time period.
*
* @return The count as a long.
*/
public long getRrReads()
{
return m_rrReads;
}
@Override
public String toString()
{
String afdisplay = "Partition ID %d: %d affinity writes, %d affinity reads, " +
"%d round-robin writes, %d round-robin reads";
return String.format(afdisplay, m_partitionId, m_affinityWrites, m_affinityReads,
m_rrWrites, m_rrReads);
}
}
| agpl-3.0 |
mgherghe/k3po | specification/turn/src/test/java/org/kaazing/specification/turn/AllocationsTcpIT.java | 3667 | /**
* Copyright 2007-2015, Kaazing Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaazing.specification.turn;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.junit.rules.RuleChain.outerRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.DisableOnDebug;
import org.junit.rules.TestRule;
import org.junit.rules.Timeout;
import org.kaazing.k3po.junit.annotation.Specification;
import org.kaazing.k3po.junit.rules.K3poRule;
/**
* Test to validate behavior as specified in
* <a href="https://tools.ietf.org/html/rfc6062">Traversal Using Relays around NAT (TURN) Extensions for TCP Allocations</a>.
*/
public class AllocationsTcpIT {
private final K3poRule k3po = new K3poRule().setScriptRoot("org/kaazing/specification/turn/tcp.allocations");
private final TestRule timeout = new DisableOnDebug(new Timeout(5, SECONDS));
@Rule
public final TestRule chain = outerRule(k3po).around(timeout);
/**
* See <a href="https://tools.ietf.org/html/rfc6062#section-4.1">RFC 6062 Section 4.1. Creating an Allocation</a>.
*/
@Test
@Specification({
"allocate/request",
"allocate/response"}
)
public void shouldCreateAllocation() throws Exception {
k3po.finish();
}
/**
* See <a href="https://tools.ietf.org/html/rfc6062#section-4.3">RFC 6062 Section 4.3. Initiating a Connection</a>.
*/
@Test
@Specification({
"connect/request",
"connect/response"
})
public void shouldSendAndReceiveConnect() throws Exception {
k3po.finish();
}
/**
* See <a href="https://tools.ietf.org/html/rfc6062#section-4.4">RFC 6062 Section 4.4. Receiving a Connection</a>.
*/
@Test
@Specification({
"connection_attempt/request",
"connection_attempt/response"
})
public void shouldReceiveConnectionAttempt() throws Exception {
k3po.finish();
}
/**
* See <a href="https://tools.ietf.org/html/rfc6062#section-4.4">RFC 6062 Section 4.4. Receiving a Connection</a>.
*/
@Test
@Specification({
"connection_bind/request",
"connection_bind/response"
})
public void shouldSendAndReceiveConnectionBind() throws Exception {
k3po.finish();
}
/**
* See <a href="https://tools.ietf.org/html/rfc6062#section-5.2">RFC 6062 Section 5.2. Receiving a Connect Request</a>.
*/
@Test
@Specification({
"connect.with.connection.already.exists.error/request",
"connect.with.connection.already.exists.error/response"
})
public void shouldReceiveErorConnectionAlreadyExists() throws Exception {
k3po.finish();
}
/**
* See <a href="https://tools.ietf.org/html/rfc6062#section-5.2">RFC 6062 Section 5.2. Receiving a Connect Request/a>.
*/
@Test
@Specification({
"connect.with.connection.timeout.error/request",
"connect.with.connection.timeout.error/response"
})
public void shouldReceiveErrorConnectionTimeoutOrFailure() throws Exception {
k3po.finish();
}
}
| agpl-3.0 |
mru00/jade_agents | src/jade/tools/sniffer/PopupMessage.java | 2636 | /*****************************************************************
JADE - Java Agent DEvelopment Framework is a framework to develop multi-agent systems in compliance with the FIPA specifications.
Copyright (C) 2000 CSELT S.p.A.
GNU Lesser General Public License
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation,
version 2.1 of the License.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the
Free Software Foundation, Inc., 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.
*****************************************************************/
package jade.tools.sniffer;
//#DOTNET_EXCLUDE_BEGIN
import javax.swing.JFrame;
import javax.swing.JPopupMenu;
import javax.swing.JMenuItem;
//#DOTNET_EXCLUDE_END
/*#DOTNET_INCLUDE_BEGIN
import System.Windows.Forms.*;
#DOTNET_INCLUDE_END*/
/**
Javadoc documentation for the file
@author Francisco Regi, Andrea Soracchi - Universita` di Parma
<Br>
<a href="mailto:a_soracchi@libero.it"> Andrea Soracchi(e-mail) </a>
@version $Date: 2005-04-15 17:45:02 +0200 (ven, 15 apr 2005) $ $Revision: 5669 $
*/
/**
* This is the PopupMenu that will appear if the user click
* on the canvas of messages.
* @see jade.tools.sniffer.PopupAgent
*/
public class PopupMessage
//#DOTNET_EXCLUDE_BEGIN
extends JPopupMenu
//#DOTNET_EXCLUDE_END
/*#DOTNET_INCLUDE_BEGIN
extends ContextMenu
#DOTNET_INCLUDE_END*/
{
//#DOTNET_EXCLUDE_BEGIN
private JMenuItem tmp;
//#DOTNET_EXCLUDE_END
/*#DOTNET_INCLUDE_BEGIN
private MenuItem tmp;
#DOTNET_INCLUDE_END*/
private MainWindow mWnd;
private ViewMessage viewMessage;
public PopupMessage(MainWindow mWnd) {
super();
viewMessage=new ViewMessage(mWnd);
//#DOTNET_EXCLUDE_BEGIN
tmp=add(viewMessage);
tmp.setIcon(null);
//#DOTNET_EXCLUDE_END
/*#DOTNET_INCLUDE_BEGIN
get_MenuItems().Add(viewMessage);
#DOTNET_INCLUDE_END*/
}
protected void setMessage(Message mess) {
viewMessage.setMessage(mess);
}
/*#DOTNET_INCLUDE_BEGIN
protected void showMessage(Object o, System.EventArgs e)
{
viewMessage.ShowMe(o, e);
}
#DOTNET_INCLUDE_END*/
}
| lgpl-2.1 |
Tybion/community-edition | projects/remote-api/source/generated/org/alfresco/rest/antlr/WhereClauseParser.java | 79325 | // $ANTLR 3.4 org/alfresco/rest/antlr/WhereClause.g 2013-05-24 09:01:14
package org.alfresco.rest.antlr;
import org.alfresco.rest.framework.resource.parameters.where.InvalidQueryException;
import org.antlr.runtime.*;
import java.util.Stack;
import java.util.List;
import java.util.ArrayList;
import org.antlr.runtime.tree.*;
@SuppressWarnings({"all", "warnings", "unchecked"})
public class WhereClauseParser extends Parser {
public static final String[] tokenNames = new String[] {
"<invalid>", "<EOR>", "<DOWN>", "<UP>", "AND", "BETWEEN", "COMMA", "EQUALS", "EXISTS", "GREATERTHAN", "GREATERTHANOREQUALS", "IDENTIFIER", "IDENTIFIERDIGIT", "IDENTIFIERLETTER", "IN", "LEFTPAREN", "LESSTHAN", "LESSTHANOREQUALS", "MATCHES", "NEGATION", "OR", "PROPERTYNAME", "PROPERTYVALUE", "RIGHTPAREN", "SINGLEQUOTE", "WS"
};
public static final int EOF=-1;
public static final int AND=4;
public static final int BETWEEN=5;
public static final int COMMA=6;
public static final int EQUALS=7;
public static final int EXISTS=8;
public static final int GREATERTHAN=9;
public static final int GREATERTHANOREQUALS=10;
public static final int IDENTIFIER=11;
public static final int IDENTIFIERDIGIT=12;
public static final int IDENTIFIERLETTER=13;
public static final int IN=14;
public static final int LEFTPAREN=15;
public static final int LESSTHAN=16;
public static final int LESSTHANOREQUALS=17;
public static final int MATCHES=18;
public static final int NEGATION=19;
public static final int OR=20;
public static final int PROPERTYNAME=21;
public static final int PROPERTYVALUE=22;
public static final int RIGHTPAREN=23;
public static final int SINGLEQUOTE=24;
public static final int WS=25;
// delegates
public Parser[] getDelegates() {
return new Parser[] {};
}
// delegators
public WhereClauseParser(TokenStream input) {
this(input, new RecognizerSharedState());
}
public WhereClauseParser(TokenStream input, RecognizerSharedState state) {
super(input, state);
}
protected TreeAdaptor adaptor = new CommonTreeAdaptor();
public void setTreeAdaptor(TreeAdaptor adaptor) {
this.adaptor = adaptor;
}
public TreeAdaptor getTreeAdaptor() {
return adaptor;
}
public String[] getTokenNames() { return WhereClauseParser.tokenNames; }
public String getGrammarFileName() { return "org/alfresco/rest/antlr/WhereClause.g"; }
// These methods are here to force the parser to error instead of suppressing problems.
// @Override
// public void reportError(RecognitionException e) {
// System.out.println("CUSTOM ERROR...\n" + e);
// throw new InvalidQueryException(e.getMessage());
// }
@Override
protected Object recoverFromMismatchedToken(IntStream input, int ttype, BitSet follow) throws RecognitionException
{
throw new MismatchedTokenException(ttype, input);
}
@Override
public Object recoverFromMismatchedSet(IntStream input, RecognitionException e, BitSet follow) throws RecognitionException
{
throw e;
}
//
// @Override
// public String getErrorMessage(RecognitionException e, String[] tokenNames)
// {
// System.out.println("THROW ME...\n" + e);
// throw new InvalidQueryException(e.getMessage());
// }
// End of methods here to force the parser to error instead of supressing problems.
public static class whereclause_return extends ParserRuleReturnScope {
Object tree;
public Object getTree() { return tree; }
};
// $ANTLR start "whereclause"
// org/alfresco/rest/antlr/WhereClause.g:125:1: whereclause : ( WS )? LEFTPAREN ! ( WS )? predicate RIGHTPAREN ! ( WS )? ;
public final WhereClauseParser.whereclause_return whereclause() throws RecognitionException {
WhereClauseParser.whereclause_return retval = new WhereClauseParser.whereclause_return();
retval.start = input.LT(1);
Object root_0 = null;
Token WS1=null;
Token LEFTPAREN2=null;
Token WS3=null;
Token RIGHTPAREN5=null;
Token WS6=null;
WhereClauseParser.predicate_return predicate4 =null;
Object WS1_tree=null;
Object LEFTPAREN2_tree=null;
Object WS3_tree=null;
Object RIGHTPAREN5_tree=null;
Object WS6_tree=null;
try {
// org/alfresco/rest/antlr/WhereClause.g:125:13: ( ( WS )? LEFTPAREN ! ( WS )? predicate RIGHTPAREN ! ( WS )? )
// org/alfresco/rest/antlr/WhereClause.g:125:15: ( WS )? LEFTPAREN ! ( WS )? predicate RIGHTPAREN ! ( WS )?
{
root_0 = (Object)adaptor.nil();
// org/alfresco/rest/antlr/WhereClause.g:125:15: ( WS )?
int alt1=2;
switch ( input.LA(1) ) {
case WS:
{
alt1=1;
}
break;
}
switch (alt1) {
case 1 :
// org/alfresco/rest/antlr/WhereClause.g:125:15: WS
{
WS1=(Token)match(input,WS,FOLLOW_WS_in_whereclause750);
WS1_tree =
(Object)adaptor.create(WS1)
;
adaptor.addChild(root_0, WS1_tree);
}
break;
}
LEFTPAREN2=(Token)match(input,LEFTPAREN,FOLLOW_LEFTPAREN_in_whereclause753);
// org/alfresco/rest/antlr/WhereClause.g:125:30: ( WS )?
int alt2=2;
switch ( input.LA(1) ) {
case WS:
{
alt2=1;
}
break;
}
switch (alt2) {
case 1 :
// org/alfresco/rest/antlr/WhereClause.g:125:30: WS
{
WS3=(Token)match(input,WS,FOLLOW_WS_in_whereclause756);
WS3_tree =
(Object)adaptor.create(WS3)
;
adaptor.addChild(root_0, WS3_tree);
}
break;
}
pushFollow(FOLLOW_predicate_in_whereclause759);
predicate4=predicate();
state._fsp--;
adaptor.addChild(root_0, predicate4.getTree());
RIGHTPAREN5=(Token)match(input,RIGHTPAREN,FOLLOW_RIGHTPAREN_in_whereclause761);
// org/alfresco/rest/antlr/WhereClause.g:125:56: ( WS )?
int alt3=2;
switch ( input.LA(1) ) {
case WS:
{
alt3=1;
}
break;
}
switch (alt3) {
case 1 :
// org/alfresco/rest/antlr/WhereClause.g:125:56: WS
{
WS6=(Token)match(input,WS,FOLLOW_WS_in_whereclause764);
WS6_tree =
(Object)adaptor.create(WS6)
;
adaptor.addChild(root_0, WS6_tree);
}
break;
}
}
retval.stop = input.LT(-1);
retval.tree = (Object)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch(RecognitionException e)
{
throw e;
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "whereclause"
public static class predicate_return extends ParserRuleReturnScope {
Object tree;
public Object getTree() { return tree; }
};
// $ANTLR start "predicate"
// org/alfresco/rest/antlr/WhereClause.g:126:1: predicate : ( simplepredicate | simplepredicate ( AND simplepredicate )+ -> ^( AND ( simplepredicate )+ ) | simplepredicate ( OR simplepredicate )+ -> ^( OR ( simplepredicate )+ ) );
public final WhereClauseParser.predicate_return predicate() throws RecognitionException {
WhereClauseParser.predicate_return retval = new WhereClauseParser.predicate_return();
retval.start = input.LT(1);
Object root_0 = null;
Token AND9=null;
Token OR12=null;
WhereClauseParser.simplepredicate_return simplepredicate7 =null;
WhereClauseParser.simplepredicate_return simplepredicate8 =null;
WhereClauseParser.simplepredicate_return simplepredicate10 =null;
WhereClauseParser.simplepredicate_return simplepredicate11 =null;
WhereClauseParser.simplepredicate_return simplepredicate13 =null;
Object AND9_tree=null;
Object OR12_tree=null;
RewriteRuleTokenStream stream_AND=new RewriteRuleTokenStream(adaptor,"token AND");
RewriteRuleTokenStream stream_OR=new RewriteRuleTokenStream(adaptor,"token OR");
RewriteRuleSubtreeStream stream_simplepredicate=new RewriteRuleSubtreeStream(adaptor,"rule simplepredicate");
try {
// org/alfresco/rest/antlr/WhereClause.g:126:11: ( simplepredicate | simplepredicate ( AND simplepredicate )+ -> ^( AND ( simplepredicate )+ ) | simplepredicate ( OR simplepredicate )+ -> ^( OR ( simplepredicate )+ ) )
int alt6=3;
alt6 = dfa6.predict(input);
switch (alt6) {
case 1 :
// org/alfresco/rest/antlr/WhereClause.g:126:13: simplepredicate
{
root_0 = (Object)adaptor.nil();
pushFollow(FOLLOW_simplepredicate_in_predicate772);
simplepredicate7=simplepredicate();
state._fsp--;
adaptor.addChild(root_0, simplepredicate7.getTree());
}
break;
case 2 :
// org/alfresco/rest/antlr/WhereClause.g:127:13: simplepredicate ( AND simplepredicate )+
{
pushFollow(FOLLOW_simplepredicate_in_predicate786);
simplepredicate8=simplepredicate();
state._fsp--;
stream_simplepredicate.add(simplepredicate8.getTree());
// org/alfresco/rest/antlr/WhereClause.g:127:29: ( AND simplepredicate )+
int cnt4=0;
loop4:
do {
int alt4=2;
switch ( input.LA(1) ) {
case AND:
{
alt4=1;
}
break;
}
switch (alt4) {
case 1 :
// org/alfresco/rest/antlr/WhereClause.g:127:30: AND simplepredicate
{
AND9=(Token)match(input,AND,FOLLOW_AND_in_predicate789);
stream_AND.add(AND9);
pushFollow(FOLLOW_simplepredicate_in_predicate791);
simplepredicate10=simplepredicate();
state._fsp--;
stream_simplepredicate.add(simplepredicate10.getTree());
}
break;
default :
if ( cnt4 >= 1 ) break loop4;
EarlyExitException eee =
new EarlyExitException(4, input);
throw eee;
}
cnt4++;
} while (true);
// AST REWRITE
// elements: simplepredicate, AND
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (Object)adaptor.nil();
// 127:52: -> ^( AND ( simplepredicate )+ )
{
// org/alfresco/rest/antlr/WhereClause.g:127:55: ^( AND ( simplepredicate )+ )
{
Object root_1 = (Object)adaptor.nil();
root_1 = (Object)adaptor.becomeRoot(
stream_AND.nextNode()
, root_1);
if ( !(stream_simplepredicate.hasNext()) ) {
throw new RewriteEarlyExitException();
}
while ( stream_simplepredicate.hasNext() ) {
adaptor.addChild(root_1, stream_simplepredicate.nextTree());
}
stream_simplepredicate.reset();
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
case 3 :
// org/alfresco/rest/antlr/WhereClause.g:128:13: simplepredicate ( OR simplepredicate )+
{
pushFollow(FOLLOW_simplepredicate_in_predicate816);
simplepredicate11=simplepredicate();
state._fsp--;
stream_simplepredicate.add(simplepredicate11.getTree());
// org/alfresco/rest/antlr/WhereClause.g:128:29: ( OR simplepredicate )+
int cnt5=0;
loop5:
do {
int alt5=2;
switch ( input.LA(1) ) {
case OR:
{
alt5=1;
}
break;
}
switch (alt5) {
case 1 :
// org/alfresco/rest/antlr/WhereClause.g:128:30: OR simplepredicate
{
OR12=(Token)match(input,OR,FOLLOW_OR_in_predicate819);
stream_OR.add(OR12);
pushFollow(FOLLOW_simplepredicate_in_predicate821);
simplepredicate13=simplepredicate();
state._fsp--;
stream_simplepredicate.add(simplepredicate13.getTree());
}
break;
default :
if ( cnt5 >= 1 ) break loop5;
EarlyExitException eee =
new EarlyExitException(5, input);
throw eee;
}
cnt5++;
} while (true);
// AST REWRITE
// elements: OR, simplepredicate
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (Object)adaptor.nil();
// 128:51: -> ^( OR ( simplepredicate )+ )
{
// org/alfresco/rest/antlr/WhereClause.g:128:54: ^( OR ( simplepredicate )+ )
{
Object root_1 = (Object)adaptor.nil();
root_1 = (Object)adaptor.becomeRoot(
stream_OR.nextNode()
, root_1);
if ( !(stream_simplepredicate.hasNext()) ) {
throw new RewriteEarlyExitException();
}
while ( stream_simplepredicate.hasNext() ) {
adaptor.addChild(root_1, stream_simplepredicate.nextTree());
}
stream_simplepredicate.reset();
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
}
retval.stop = input.LT(-1);
retval.tree = (Object)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch(RecognitionException e)
{
throw e;
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "predicate"
public static class simplepredicate_return extends ParserRuleReturnScope {
Object tree;
public Object getTree() { return tree; }
};
// $ANTLR start "simplepredicate"
// org/alfresco/rest/antlr/WhereClause.g:129:1: simplepredicate : ( allowedpredicates -> allowedpredicates | NEGATION allowedpredicates -> ^( NEGATION allowedpredicates ) );
public final WhereClauseParser.simplepredicate_return simplepredicate() throws RecognitionException {
WhereClauseParser.simplepredicate_return retval = new WhereClauseParser.simplepredicate_return();
retval.start = input.LT(1);
Object root_0 = null;
Token NEGATION15=null;
WhereClauseParser.allowedpredicates_return allowedpredicates14 =null;
WhereClauseParser.allowedpredicates_return allowedpredicates16 =null;
Object NEGATION15_tree=null;
RewriteRuleTokenStream stream_NEGATION=new RewriteRuleTokenStream(adaptor,"token NEGATION");
RewriteRuleSubtreeStream stream_allowedpredicates=new RewriteRuleSubtreeStream(adaptor,"rule allowedpredicates");
try {
// org/alfresco/rest/antlr/WhereClause.g:129:17: ( allowedpredicates -> allowedpredicates | NEGATION allowedpredicates -> ^( NEGATION allowedpredicates ) )
int alt7=2;
switch ( input.LA(1) ) {
case EXISTS:
case PROPERTYNAME:
{
alt7=1;
}
break;
case NEGATION:
{
alt7=2;
}
break;
default:
NoViableAltException nvae =
new NoViableAltException("", 7, 0, input);
throw nvae;
}
switch (alt7) {
case 1 :
// org/alfresco/rest/antlr/WhereClause.g:129:19: allowedpredicates
{
pushFollow(FOLLOW_allowedpredicates_in_simplepredicate839);
allowedpredicates14=allowedpredicates();
state._fsp--;
stream_allowedpredicates.add(allowedpredicates14.getTree());
// AST REWRITE
// elements: allowedpredicates
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (Object)adaptor.nil();
// 129:37: -> allowedpredicates
{
adaptor.addChild(root_0, stream_allowedpredicates.nextTree());
}
retval.tree = root_0;
}
break;
case 2 :
// org/alfresco/rest/antlr/WhereClause.g:130:19: NEGATION allowedpredicates
{
NEGATION15=(Token)match(input,NEGATION,FOLLOW_NEGATION_in_simplepredicate863);
stream_NEGATION.add(NEGATION15);
pushFollow(FOLLOW_allowedpredicates_in_simplepredicate865);
allowedpredicates16=allowedpredicates();
state._fsp--;
stream_allowedpredicates.add(allowedpredicates16.getTree());
// AST REWRITE
// elements: NEGATION, allowedpredicates
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (Object)adaptor.nil();
// 130:46: -> ^( NEGATION allowedpredicates )
{
// org/alfresco/rest/antlr/WhereClause.g:130:49: ^( NEGATION allowedpredicates )
{
Object root_1 = (Object)adaptor.nil();
root_1 = (Object)adaptor.becomeRoot(
stream_NEGATION.nextNode()
, root_1);
adaptor.addChild(root_1, stream_allowedpredicates.nextTree());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
}
retval.stop = input.LT(-1);
retval.tree = (Object)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch(RecognitionException e)
{
throw e;
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "simplepredicate"
public static class allowedpredicates_return extends ParserRuleReturnScope {
Object tree;
public Object getTree() { return tree; }
};
// $ANTLR start "allowedpredicates"
// org/alfresco/rest/antlr/WhereClause.g:131:1: allowedpredicates : ( comparisonpredicate | existspredicate | betweenpredicate | inpredicate | matchespredicate );
public final WhereClauseParser.allowedpredicates_return allowedpredicates() throws RecognitionException {
WhereClauseParser.allowedpredicates_return retval = new WhereClauseParser.allowedpredicates_return();
retval.start = input.LT(1);
Object root_0 = null;
WhereClauseParser.comparisonpredicate_return comparisonpredicate17 =null;
WhereClauseParser.existspredicate_return existspredicate18 =null;
WhereClauseParser.betweenpredicate_return betweenpredicate19 =null;
WhereClauseParser.inpredicate_return inpredicate20 =null;
WhereClauseParser.matchespredicate_return matchespredicate21 =null;
try {
// org/alfresco/rest/antlr/WhereClause.g:131:19: ( comparisonpredicate | existspredicate | betweenpredicate | inpredicate | matchespredicate )
int alt8=5;
switch ( input.LA(1) ) {
case PROPERTYNAME:
{
switch ( input.LA(2) ) {
case BETWEEN:
{
alt8=3;
}
break;
case IN:
{
alt8=4;
}
break;
case MATCHES:
{
alt8=5;
}
break;
case EQUALS:
case GREATERTHAN:
case GREATERTHANOREQUALS:
case LESSTHAN:
case LESSTHANOREQUALS:
{
alt8=1;
}
break;
default:
NoViableAltException nvae =
new NoViableAltException("", 8, 1, input);
throw nvae;
}
}
break;
case EXISTS:
{
alt8=2;
}
break;
default:
NoViableAltException nvae =
new NoViableAltException("", 8, 0, input);
throw nvae;
}
switch (alt8) {
case 1 :
// org/alfresco/rest/antlr/WhereClause.g:131:21: comparisonpredicate
{
root_0 = (Object)adaptor.nil();
pushFollow(FOLLOW_comparisonpredicate_in_allowedpredicates880);
comparisonpredicate17=comparisonpredicate();
state._fsp--;
adaptor.addChild(root_0, comparisonpredicate17.getTree());
}
break;
case 2 :
// org/alfresco/rest/antlr/WhereClause.g:131:43: existspredicate
{
root_0 = (Object)adaptor.nil();
pushFollow(FOLLOW_existspredicate_in_allowedpredicates884);
existspredicate18=existspredicate();
state._fsp--;
adaptor.addChild(root_0, existspredicate18.getTree());
}
break;
case 3 :
// org/alfresco/rest/antlr/WhereClause.g:131:61: betweenpredicate
{
root_0 = (Object)adaptor.nil();
pushFollow(FOLLOW_betweenpredicate_in_allowedpredicates888);
betweenpredicate19=betweenpredicate();
state._fsp--;
adaptor.addChild(root_0, betweenpredicate19.getTree());
}
break;
case 4 :
// org/alfresco/rest/antlr/WhereClause.g:131:80: inpredicate
{
root_0 = (Object)adaptor.nil();
pushFollow(FOLLOW_inpredicate_in_allowedpredicates892);
inpredicate20=inpredicate();
state._fsp--;
adaptor.addChild(root_0, inpredicate20.getTree());
}
break;
case 5 :
// org/alfresco/rest/antlr/WhereClause.g:131:94: matchespredicate
{
root_0 = (Object)adaptor.nil();
pushFollow(FOLLOW_matchespredicate_in_allowedpredicates896);
matchespredicate21=matchespredicate();
state._fsp--;
adaptor.addChild(root_0, matchespredicate21.getTree());
}
break;
}
retval.stop = input.LT(-1);
retval.tree = (Object)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch(RecognitionException e)
{
throw e;
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "allowedpredicates"
public static class comparisonpredicate_return extends ParserRuleReturnScope {
Object tree;
public Object getTree() { return tree; }
};
// $ANTLR start "comparisonpredicate"
// org/alfresco/rest/antlr/WhereClause.g:132:1: comparisonpredicate : PROPERTYNAME comparisonoperator value -> ^( comparisonoperator PROPERTYNAME value ) ;
public final WhereClauseParser.comparisonpredicate_return comparisonpredicate() throws RecognitionException {
WhereClauseParser.comparisonpredicate_return retval = new WhereClauseParser.comparisonpredicate_return();
retval.start = input.LT(1);
Object root_0 = null;
Token PROPERTYNAME22=null;
WhereClauseParser.comparisonoperator_return comparisonoperator23 =null;
WhereClauseParser.value_return value24 =null;
Object PROPERTYNAME22_tree=null;
RewriteRuleTokenStream stream_PROPERTYNAME=new RewriteRuleTokenStream(adaptor,"token PROPERTYNAME");
RewriteRuleSubtreeStream stream_comparisonoperator=new RewriteRuleSubtreeStream(adaptor,"rule comparisonoperator");
RewriteRuleSubtreeStream stream_value=new RewriteRuleSubtreeStream(adaptor,"rule value");
try {
// org/alfresco/rest/antlr/WhereClause.g:132:20: ( PROPERTYNAME comparisonoperator value -> ^( comparisonoperator PROPERTYNAME value ) )
// org/alfresco/rest/antlr/WhereClause.g:132:22: PROPERTYNAME comparisonoperator value
{
PROPERTYNAME22=(Token)match(input,PROPERTYNAME,FOLLOW_PROPERTYNAME_in_comparisonpredicate902);
stream_PROPERTYNAME.add(PROPERTYNAME22);
pushFollow(FOLLOW_comparisonoperator_in_comparisonpredicate904);
comparisonoperator23=comparisonoperator();
state._fsp--;
stream_comparisonoperator.add(comparisonoperator23.getTree());
pushFollow(FOLLOW_value_in_comparisonpredicate906);
value24=value();
state._fsp--;
stream_value.add(value24.getTree());
// AST REWRITE
// elements: value, PROPERTYNAME, comparisonoperator
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (Object)adaptor.nil();
// 132:60: -> ^( comparisonoperator PROPERTYNAME value )
{
// org/alfresco/rest/antlr/WhereClause.g:132:63: ^( comparisonoperator PROPERTYNAME value )
{
Object root_1 = (Object)adaptor.nil();
root_1 = (Object)adaptor.becomeRoot(stream_comparisonoperator.nextNode(), root_1);
adaptor.addChild(root_1,
stream_PROPERTYNAME.nextNode()
);
adaptor.addChild(root_1, stream_value.nextTree());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
retval.stop = input.LT(-1);
retval.tree = (Object)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch(RecognitionException e)
{
throw e;
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "comparisonpredicate"
public static class comparisonoperator_return extends ParserRuleReturnScope {
Object tree;
public Object getTree() { return tree; }
};
// $ANTLR start "comparisonoperator"
// org/alfresco/rest/antlr/WhereClause.g:133:1: comparisonoperator : ( EQUALS | LESSTHAN | GREATERTHAN | LESSTHANOREQUALS | GREATERTHANOREQUALS );
public final WhereClauseParser.comparisonoperator_return comparisonoperator() throws RecognitionException {
WhereClauseParser.comparisonoperator_return retval = new WhereClauseParser.comparisonoperator_return();
retval.start = input.LT(1);
Object root_0 = null;
Token set25=null;
Object set25_tree=null;
try {
// org/alfresco/rest/antlr/WhereClause.g:133:19: ( EQUALS | LESSTHAN | GREATERTHAN | LESSTHANOREQUALS | GREATERTHANOREQUALS )
// org/alfresco/rest/antlr/WhereClause.g:
{
root_0 = (Object)adaptor.nil();
set25=(Token)input.LT(1);
if ( input.LA(1)==EQUALS||(input.LA(1) >= GREATERTHAN && input.LA(1) <= GREATERTHANOREQUALS)||(input.LA(1) >= LESSTHAN && input.LA(1) <= LESSTHANOREQUALS) ) {
input.consume();
adaptor.addChild(root_0,
(Object)adaptor.create(set25)
);
state.errorRecovery=false;
}
else {
MismatchedSetException mse = new MismatchedSetException(null,input);
throw mse;
}
}
retval.stop = input.LT(-1);
retval.tree = (Object)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch(RecognitionException e)
{
throw e;
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "comparisonoperator"
public static class existspredicate_return extends ParserRuleReturnScope {
Object tree;
public Object getTree() { return tree; }
};
// $ANTLR start "existspredicate"
// org/alfresco/rest/antlr/WhereClause.g:134:1: existspredicate : EXISTS LEFTPAREN ( WS )? PROPERTYNAME RIGHTPAREN -> ^( EXISTS PROPERTYNAME ) ;
public final WhereClauseParser.existspredicate_return existspredicate() throws RecognitionException {
WhereClauseParser.existspredicate_return retval = new WhereClauseParser.existspredicate_return();
retval.start = input.LT(1);
Object root_0 = null;
Token EXISTS26=null;
Token LEFTPAREN27=null;
Token WS28=null;
Token PROPERTYNAME29=null;
Token RIGHTPAREN30=null;
Object EXISTS26_tree=null;
Object LEFTPAREN27_tree=null;
Object WS28_tree=null;
Object PROPERTYNAME29_tree=null;
Object RIGHTPAREN30_tree=null;
RewriteRuleTokenStream stream_PROPERTYNAME=new RewriteRuleTokenStream(adaptor,"token PROPERTYNAME");
RewriteRuleTokenStream stream_LEFTPAREN=new RewriteRuleTokenStream(adaptor,"token LEFTPAREN");
RewriteRuleTokenStream stream_WS=new RewriteRuleTokenStream(adaptor,"token WS");
RewriteRuleTokenStream stream_EXISTS=new RewriteRuleTokenStream(adaptor,"token EXISTS");
RewriteRuleTokenStream stream_RIGHTPAREN=new RewriteRuleTokenStream(adaptor,"token RIGHTPAREN");
try {
// org/alfresco/rest/antlr/WhereClause.g:134:16: ( EXISTS LEFTPAREN ( WS )? PROPERTYNAME RIGHTPAREN -> ^( EXISTS PROPERTYNAME ) )
// org/alfresco/rest/antlr/WhereClause.g:134:18: EXISTS LEFTPAREN ( WS )? PROPERTYNAME RIGHTPAREN
{
EXISTS26=(Token)match(input,EXISTS,FOLLOW_EXISTS_in_existspredicate936);
stream_EXISTS.add(EXISTS26);
LEFTPAREN27=(Token)match(input,LEFTPAREN,FOLLOW_LEFTPAREN_in_existspredicate938);
stream_LEFTPAREN.add(LEFTPAREN27);
// org/alfresco/rest/antlr/WhereClause.g:134:35: ( WS )?
int alt9=2;
switch ( input.LA(1) ) {
case WS:
{
alt9=1;
}
break;
}
switch (alt9) {
case 1 :
// org/alfresco/rest/antlr/WhereClause.g:134:35: WS
{
WS28=(Token)match(input,WS,FOLLOW_WS_in_existspredicate940);
stream_WS.add(WS28);
}
break;
}
PROPERTYNAME29=(Token)match(input,PROPERTYNAME,FOLLOW_PROPERTYNAME_in_existspredicate943);
stream_PROPERTYNAME.add(PROPERTYNAME29);
RIGHTPAREN30=(Token)match(input,RIGHTPAREN,FOLLOW_RIGHTPAREN_in_existspredicate945);
stream_RIGHTPAREN.add(RIGHTPAREN30);
// AST REWRITE
// elements: EXISTS, PROPERTYNAME
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (Object)adaptor.nil();
// 134:63: -> ^( EXISTS PROPERTYNAME )
{
// org/alfresco/rest/antlr/WhereClause.g:134:66: ^( EXISTS PROPERTYNAME )
{
Object root_1 = (Object)adaptor.nil();
root_1 = (Object)adaptor.becomeRoot(
stream_EXISTS.nextNode()
, root_1);
adaptor.addChild(root_1,
stream_PROPERTYNAME.nextNode()
);
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
retval.stop = input.LT(-1);
retval.tree = (Object)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch(RecognitionException e)
{
throw e;
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "existspredicate"
public static class betweenpredicate_return extends ParserRuleReturnScope {
Object tree;
public Object getTree() { return tree; }
};
// $ANTLR start "betweenpredicate"
// org/alfresco/rest/antlr/WhereClause.g:135:1: betweenpredicate : PROPERTYNAME BETWEEN LEFTPAREN ( WS )? propertyvaluepair RIGHTPAREN -> ^( BETWEEN PROPERTYNAME propertyvaluepair ) ;
public final WhereClauseParser.betweenpredicate_return betweenpredicate() throws RecognitionException {
WhereClauseParser.betweenpredicate_return retval = new WhereClauseParser.betweenpredicate_return();
retval.start = input.LT(1);
Object root_0 = null;
Token PROPERTYNAME31=null;
Token BETWEEN32=null;
Token LEFTPAREN33=null;
Token WS34=null;
Token RIGHTPAREN36=null;
WhereClauseParser.propertyvaluepair_return propertyvaluepair35 =null;
Object PROPERTYNAME31_tree=null;
Object BETWEEN32_tree=null;
Object LEFTPAREN33_tree=null;
Object WS34_tree=null;
Object RIGHTPAREN36_tree=null;
RewriteRuleTokenStream stream_PROPERTYNAME=new RewriteRuleTokenStream(adaptor,"token PROPERTYNAME");
RewriteRuleTokenStream stream_LEFTPAREN=new RewriteRuleTokenStream(adaptor,"token LEFTPAREN");
RewriteRuleTokenStream stream_WS=new RewriteRuleTokenStream(adaptor,"token WS");
RewriteRuleTokenStream stream_RIGHTPAREN=new RewriteRuleTokenStream(adaptor,"token RIGHTPAREN");
RewriteRuleTokenStream stream_BETWEEN=new RewriteRuleTokenStream(adaptor,"token BETWEEN");
RewriteRuleSubtreeStream stream_propertyvaluepair=new RewriteRuleSubtreeStream(adaptor,"rule propertyvaluepair");
try {
// org/alfresco/rest/antlr/WhereClause.g:135:17: ( PROPERTYNAME BETWEEN LEFTPAREN ( WS )? propertyvaluepair RIGHTPAREN -> ^( BETWEEN PROPERTYNAME propertyvaluepair ) )
// org/alfresco/rest/antlr/WhereClause.g:135:19: PROPERTYNAME BETWEEN LEFTPAREN ( WS )? propertyvaluepair RIGHTPAREN
{
PROPERTYNAME31=(Token)match(input,PROPERTYNAME,FOLLOW_PROPERTYNAME_in_betweenpredicate959);
stream_PROPERTYNAME.add(PROPERTYNAME31);
BETWEEN32=(Token)match(input,BETWEEN,FOLLOW_BETWEEN_in_betweenpredicate961);
stream_BETWEEN.add(BETWEEN32);
LEFTPAREN33=(Token)match(input,LEFTPAREN,FOLLOW_LEFTPAREN_in_betweenpredicate963);
stream_LEFTPAREN.add(LEFTPAREN33);
// org/alfresco/rest/antlr/WhereClause.g:135:50: ( WS )?
int alt10=2;
switch ( input.LA(1) ) {
case WS:
{
alt10=1;
}
break;
}
switch (alt10) {
case 1 :
// org/alfresco/rest/antlr/WhereClause.g:135:50: WS
{
WS34=(Token)match(input,WS,FOLLOW_WS_in_betweenpredicate965);
stream_WS.add(WS34);
}
break;
}
pushFollow(FOLLOW_propertyvaluepair_in_betweenpredicate968);
propertyvaluepair35=propertyvaluepair();
state._fsp--;
stream_propertyvaluepair.add(propertyvaluepair35.getTree());
RIGHTPAREN36=(Token)match(input,RIGHTPAREN,FOLLOW_RIGHTPAREN_in_betweenpredicate970);
stream_RIGHTPAREN.add(RIGHTPAREN36);
// AST REWRITE
// elements: propertyvaluepair, PROPERTYNAME, BETWEEN
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (Object)adaptor.nil();
// 135:83: -> ^( BETWEEN PROPERTYNAME propertyvaluepair )
{
// org/alfresco/rest/antlr/WhereClause.g:135:86: ^( BETWEEN PROPERTYNAME propertyvaluepair )
{
Object root_1 = (Object)adaptor.nil();
root_1 = (Object)adaptor.becomeRoot(
stream_BETWEEN.nextNode()
, root_1);
adaptor.addChild(root_1,
stream_PROPERTYNAME.nextNode()
);
adaptor.addChild(root_1, stream_propertyvaluepair.nextTree());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
retval.stop = input.LT(-1);
retval.tree = (Object)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch(RecognitionException e)
{
throw e;
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "betweenpredicate"
public static class inpredicate_return extends ParserRuleReturnScope {
Object tree;
public Object getTree() { return tree; }
};
// $ANTLR start "inpredicate"
// org/alfresco/rest/antlr/WhereClause.g:136:1: inpredicate : PROPERTYNAME IN LEFTPAREN ( WS )? propertyvaluelist RIGHTPAREN -> ^( IN PROPERTYNAME propertyvaluelist ) ;
public final WhereClauseParser.inpredicate_return inpredicate() throws RecognitionException {
WhereClauseParser.inpredicate_return retval = new WhereClauseParser.inpredicate_return();
retval.start = input.LT(1);
Object root_0 = null;
Token PROPERTYNAME37=null;
Token IN38=null;
Token LEFTPAREN39=null;
Token WS40=null;
Token RIGHTPAREN42=null;
WhereClauseParser.propertyvaluelist_return propertyvaluelist41 =null;
Object PROPERTYNAME37_tree=null;
Object IN38_tree=null;
Object LEFTPAREN39_tree=null;
Object WS40_tree=null;
Object RIGHTPAREN42_tree=null;
RewriteRuleTokenStream stream_PROPERTYNAME=new RewriteRuleTokenStream(adaptor,"token PROPERTYNAME");
RewriteRuleTokenStream stream_LEFTPAREN=new RewriteRuleTokenStream(adaptor,"token LEFTPAREN");
RewriteRuleTokenStream stream_WS=new RewriteRuleTokenStream(adaptor,"token WS");
RewriteRuleTokenStream stream_IN=new RewriteRuleTokenStream(adaptor,"token IN");
RewriteRuleTokenStream stream_RIGHTPAREN=new RewriteRuleTokenStream(adaptor,"token RIGHTPAREN");
RewriteRuleSubtreeStream stream_propertyvaluelist=new RewriteRuleSubtreeStream(adaptor,"rule propertyvaluelist");
try {
// org/alfresco/rest/antlr/WhereClause.g:136:12: ( PROPERTYNAME IN LEFTPAREN ( WS )? propertyvaluelist RIGHTPAREN -> ^( IN PROPERTYNAME propertyvaluelist ) )
// org/alfresco/rest/antlr/WhereClause.g:136:14: PROPERTYNAME IN LEFTPAREN ( WS )? propertyvaluelist RIGHTPAREN
{
PROPERTYNAME37=(Token)match(input,PROPERTYNAME,FOLLOW_PROPERTYNAME_in_inpredicate986);
stream_PROPERTYNAME.add(PROPERTYNAME37);
IN38=(Token)match(input,IN,FOLLOW_IN_in_inpredicate988);
stream_IN.add(IN38);
LEFTPAREN39=(Token)match(input,LEFTPAREN,FOLLOW_LEFTPAREN_in_inpredicate990);
stream_LEFTPAREN.add(LEFTPAREN39);
// org/alfresco/rest/antlr/WhereClause.g:136:40: ( WS )?
int alt11=2;
switch ( input.LA(1) ) {
case WS:
{
alt11=1;
}
break;
}
switch (alt11) {
case 1 :
// org/alfresco/rest/antlr/WhereClause.g:136:40: WS
{
WS40=(Token)match(input,WS,FOLLOW_WS_in_inpredicate992);
stream_WS.add(WS40);
}
break;
}
pushFollow(FOLLOW_propertyvaluelist_in_inpredicate995);
propertyvaluelist41=propertyvaluelist();
state._fsp--;
stream_propertyvaluelist.add(propertyvaluelist41.getTree());
RIGHTPAREN42=(Token)match(input,RIGHTPAREN,FOLLOW_RIGHTPAREN_in_inpredicate997);
stream_RIGHTPAREN.add(RIGHTPAREN42);
// AST REWRITE
// elements: propertyvaluelist, IN, PROPERTYNAME
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (Object)adaptor.nil();
// 136:73: -> ^( IN PROPERTYNAME propertyvaluelist )
{
// org/alfresco/rest/antlr/WhereClause.g:136:76: ^( IN PROPERTYNAME propertyvaluelist )
{
Object root_1 = (Object)adaptor.nil();
root_1 = (Object)adaptor.becomeRoot(
stream_IN.nextNode()
, root_1);
adaptor.addChild(root_1,
stream_PROPERTYNAME.nextNode()
);
adaptor.addChild(root_1, stream_propertyvaluelist.nextTree());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
retval.stop = input.LT(-1);
retval.tree = (Object)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch(RecognitionException e)
{
throw e;
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "inpredicate"
public static class matchespredicate_return extends ParserRuleReturnScope {
Object tree;
public Object getTree() { return tree; }
};
// $ANTLR start "matchespredicate"
// org/alfresco/rest/antlr/WhereClause.g:137:1: matchespredicate : PROPERTYNAME MATCHES LEFTPAREN ( WS )? value RIGHTPAREN -> ^( MATCHES PROPERTYNAME value ) ;
public final WhereClauseParser.matchespredicate_return matchespredicate() throws RecognitionException {
WhereClauseParser.matchespredicate_return retval = new WhereClauseParser.matchespredicate_return();
retval.start = input.LT(1);
Object root_0 = null;
Token PROPERTYNAME43=null;
Token MATCHES44=null;
Token LEFTPAREN45=null;
Token WS46=null;
Token RIGHTPAREN48=null;
WhereClauseParser.value_return value47 =null;
Object PROPERTYNAME43_tree=null;
Object MATCHES44_tree=null;
Object LEFTPAREN45_tree=null;
Object WS46_tree=null;
Object RIGHTPAREN48_tree=null;
RewriteRuleTokenStream stream_PROPERTYNAME=new RewriteRuleTokenStream(adaptor,"token PROPERTYNAME");
RewriteRuleTokenStream stream_LEFTPAREN=new RewriteRuleTokenStream(adaptor,"token LEFTPAREN");
RewriteRuleTokenStream stream_WS=new RewriteRuleTokenStream(adaptor,"token WS");
RewriteRuleTokenStream stream_MATCHES=new RewriteRuleTokenStream(adaptor,"token MATCHES");
RewriteRuleTokenStream stream_RIGHTPAREN=new RewriteRuleTokenStream(adaptor,"token RIGHTPAREN");
RewriteRuleSubtreeStream stream_value=new RewriteRuleSubtreeStream(adaptor,"rule value");
try {
// org/alfresco/rest/antlr/WhereClause.g:137:17: ( PROPERTYNAME MATCHES LEFTPAREN ( WS )? value RIGHTPAREN -> ^( MATCHES PROPERTYNAME value ) )
// org/alfresco/rest/antlr/WhereClause.g:137:19: PROPERTYNAME MATCHES LEFTPAREN ( WS )? value RIGHTPAREN
{
PROPERTYNAME43=(Token)match(input,PROPERTYNAME,FOLLOW_PROPERTYNAME_in_matchespredicate1013);
stream_PROPERTYNAME.add(PROPERTYNAME43);
MATCHES44=(Token)match(input,MATCHES,FOLLOW_MATCHES_in_matchespredicate1015);
stream_MATCHES.add(MATCHES44);
LEFTPAREN45=(Token)match(input,LEFTPAREN,FOLLOW_LEFTPAREN_in_matchespredicate1017);
stream_LEFTPAREN.add(LEFTPAREN45);
// org/alfresco/rest/antlr/WhereClause.g:137:50: ( WS )?
int alt12=2;
switch ( input.LA(1) ) {
case WS:
{
alt12=1;
}
break;
}
switch (alt12) {
case 1 :
// org/alfresco/rest/antlr/WhereClause.g:137:50: WS
{
WS46=(Token)match(input,WS,FOLLOW_WS_in_matchespredicate1019);
stream_WS.add(WS46);
}
break;
}
pushFollow(FOLLOW_value_in_matchespredicate1022);
value47=value();
state._fsp--;
stream_value.add(value47.getTree());
RIGHTPAREN48=(Token)match(input,RIGHTPAREN,FOLLOW_RIGHTPAREN_in_matchespredicate1024);
stream_RIGHTPAREN.add(RIGHTPAREN48);
// AST REWRITE
// elements: PROPERTYNAME, MATCHES, value
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (Object)adaptor.nil();
// 137:71: -> ^( MATCHES PROPERTYNAME value )
{
// org/alfresco/rest/antlr/WhereClause.g:137:74: ^( MATCHES PROPERTYNAME value )
{
Object root_1 = (Object)adaptor.nil();
root_1 = (Object)adaptor.becomeRoot(
stream_MATCHES.nextNode()
, root_1);
adaptor.addChild(root_1,
stream_PROPERTYNAME.nextNode()
);
adaptor.addChild(root_1, stream_value.nextTree());
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
retval.stop = input.LT(-1);
retval.tree = (Object)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch(RecognitionException e)
{
throw e;
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "matchespredicate"
public static class propertyvaluepair_return extends ParserRuleReturnScope {
Object tree;
public Object getTree() { return tree; }
};
// $ANTLR start "propertyvaluepair"
// org/alfresco/rest/antlr/WhereClause.g:138:1: propertyvaluepair : value COMMA value -> ( value )+ ;
public final WhereClauseParser.propertyvaluepair_return propertyvaluepair() throws RecognitionException {
WhereClauseParser.propertyvaluepair_return retval = new WhereClauseParser.propertyvaluepair_return();
retval.start = input.LT(1);
Object root_0 = null;
Token COMMA50=null;
WhereClauseParser.value_return value49 =null;
WhereClauseParser.value_return value51 =null;
Object COMMA50_tree=null;
RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
RewriteRuleSubtreeStream stream_value=new RewriteRuleSubtreeStream(adaptor,"rule value");
try {
// org/alfresco/rest/antlr/WhereClause.g:138:18: ( value COMMA value -> ( value )+ )
// org/alfresco/rest/antlr/WhereClause.g:138:20: value COMMA value
{
pushFollow(FOLLOW_value_in_propertyvaluepair1040);
value49=value();
state._fsp--;
stream_value.add(value49.getTree());
COMMA50=(Token)match(input,COMMA,FOLLOW_COMMA_in_propertyvaluepair1042);
stream_COMMA.add(COMMA50);
pushFollow(FOLLOW_value_in_propertyvaluepair1044);
value51=value();
state._fsp--;
stream_value.add(value51.getTree());
// AST REWRITE
// elements: value
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (Object)adaptor.nil();
// 138:38: -> ( value )+
{
if ( !(stream_value.hasNext()) ) {
throw new RewriteEarlyExitException();
}
while ( stream_value.hasNext() ) {
adaptor.addChild(root_0, stream_value.nextTree());
}
stream_value.reset();
}
retval.tree = root_0;
}
retval.stop = input.LT(-1);
retval.tree = (Object)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch(RecognitionException e)
{
throw e;
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "propertyvaluepair"
public static class propertyvaluelist_return extends ParserRuleReturnScope {
Object tree;
public Object getTree() { return tree; }
};
// $ANTLR start "propertyvaluelist"
// org/alfresco/rest/antlr/WhereClause.g:139:1: propertyvaluelist : value ( COMMA value )* -> ( value )+ ;
public final WhereClauseParser.propertyvaluelist_return propertyvaluelist() throws RecognitionException {
WhereClauseParser.propertyvaluelist_return retval = new WhereClauseParser.propertyvaluelist_return();
retval.start = input.LT(1);
Object root_0 = null;
Token COMMA53=null;
WhereClauseParser.value_return value52 =null;
WhereClauseParser.value_return value54 =null;
Object COMMA53_tree=null;
RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
RewriteRuleSubtreeStream stream_value=new RewriteRuleSubtreeStream(adaptor,"rule value");
try {
// org/alfresco/rest/antlr/WhereClause.g:139:18: ( value ( COMMA value )* -> ( value )+ )
// org/alfresco/rest/antlr/WhereClause.g:139:20: value ( COMMA value )*
{
pushFollow(FOLLOW_value_in_propertyvaluelist1055);
value52=value();
state._fsp--;
stream_value.add(value52.getTree());
// org/alfresco/rest/antlr/WhereClause.g:139:26: ( COMMA value )*
loop13:
do {
int alt13=2;
switch ( input.LA(1) ) {
case COMMA:
{
alt13=1;
}
break;
}
switch (alt13) {
case 1 :
// org/alfresco/rest/antlr/WhereClause.g:139:27: COMMA value
{
COMMA53=(Token)match(input,COMMA,FOLLOW_COMMA_in_propertyvaluelist1058);
stream_COMMA.add(COMMA53);
pushFollow(FOLLOW_value_in_propertyvaluelist1060);
value54=value();
state._fsp--;
stream_value.add(value54.getTree());
}
break;
default :
break loop13;
}
} while (true);
// AST REWRITE
// elements: value
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (Object)adaptor.nil();
// 139:41: -> ( value )+
{
if ( !(stream_value.hasNext()) ) {
throw new RewriteEarlyExitException();
}
while ( stream_value.hasNext() ) {
adaptor.addChild(root_0, stream_value.nextTree());
}
stream_value.reset();
}
retval.tree = root_0;
}
retval.stop = input.LT(-1);
retval.tree = (Object)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch(RecognitionException e)
{
throw e;
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "propertyvaluelist"
public static class value_return extends ParserRuleReturnScope {
Object tree;
public Object getTree() { return tree; }
};
// $ANTLR start "value"
// org/alfresco/rest/antlr/WhereClause.g:140:1: value : (a= PROPERTYVALUE -> ^( PROPERTYVALUE[$a] ) |b= PROPERTYNAME -> ^( PROPERTYVALUE[$b] ) );
public final WhereClauseParser.value_return value() throws RecognitionException {
WhereClauseParser.value_return retval = new WhereClauseParser.value_return();
retval.start = input.LT(1);
Object root_0 = null;
Token a=null;
Token b=null;
Object a_tree=null;
Object b_tree=null;
RewriteRuleTokenStream stream_PROPERTYNAME=new RewriteRuleTokenStream(adaptor,"token PROPERTYNAME");
RewriteRuleTokenStream stream_PROPERTYVALUE=new RewriteRuleTokenStream(adaptor,"token PROPERTYVALUE");
try {
// org/alfresco/rest/antlr/WhereClause.g:140:6: (a= PROPERTYVALUE -> ^( PROPERTYVALUE[$a] ) |b= PROPERTYNAME -> ^( PROPERTYVALUE[$b] ) )
int alt14=2;
switch ( input.LA(1) ) {
case PROPERTYVALUE:
{
alt14=1;
}
break;
case PROPERTYNAME:
{
alt14=2;
}
break;
default:
NoViableAltException nvae =
new NoViableAltException("", 14, 0, input);
throw nvae;
}
switch (alt14) {
case 1 :
// org/alfresco/rest/antlr/WhereClause.g:140:8: a= PROPERTYVALUE
{
a=(Token)match(input,PROPERTYVALUE,FOLLOW_PROPERTYVALUE_in_value1075);
stream_PROPERTYVALUE.add(a);
// AST REWRITE
// elements: PROPERTYVALUE
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (Object)adaptor.nil();
// 140:24: -> ^( PROPERTYVALUE[$a] )
{
// org/alfresco/rest/antlr/WhereClause.g:140:27: ^( PROPERTYVALUE[$a] )
{
Object root_1 = (Object)adaptor.nil();
root_1 = (Object)adaptor.becomeRoot(
(Object)adaptor.create(PROPERTYVALUE, a)
, root_1);
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
case 2 :
// org/alfresco/rest/antlr/WhereClause.g:141:9: b= PROPERTYNAME
{
b=(Token)match(input,PROPERTYNAME,FOLLOW_PROPERTYNAME_in_value1095);
stream_PROPERTYNAME.add(b);
// AST REWRITE
// elements:
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (Object)adaptor.nil();
// 141:24: -> ^( PROPERTYVALUE[$b] )
{
// org/alfresco/rest/antlr/WhereClause.g:141:27: ^( PROPERTYVALUE[$b] )
{
Object root_1 = (Object)adaptor.nil();
root_1 = (Object)adaptor.becomeRoot(
(Object)adaptor.create(PROPERTYVALUE, b)
, root_1);
adaptor.addChild(root_0, root_1);
}
}
retval.tree = root_0;
}
break;
}
retval.stop = input.LT(-1);
retval.tree = (Object)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch(RecognitionException e)
{
throw e;
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "value"
public static class selectClause_return extends ParserRuleReturnScope {
Object tree;
public Object getTree() { return tree; }
};
// $ANTLR start "selectClause"
// org/alfresco/rest/antlr/WhereClause.g:142:1: selectClause : PROPERTYNAME ( COMMA PROPERTYNAME )* -> ( PROPERTYNAME )+ ;
public final WhereClauseParser.selectClause_return selectClause() throws RecognitionException {
WhereClauseParser.selectClause_return retval = new WhereClauseParser.selectClause_return();
retval.start = input.LT(1);
Object root_0 = null;
Token PROPERTYNAME55=null;
Token COMMA56=null;
Token PROPERTYNAME57=null;
Object PROPERTYNAME55_tree=null;
Object COMMA56_tree=null;
Object PROPERTYNAME57_tree=null;
RewriteRuleTokenStream stream_PROPERTYNAME=new RewriteRuleTokenStream(adaptor,"token PROPERTYNAME");
RewriteRuleTokenStream stream_COMMA=new RewriteRuleTokenStream(adaptor,"token COMMA");
try {
// org/alfresco/rest/antlr/WhereClause.g:142:13: ( PROPERTYNAME ( COMMA PROPERTYNAME )* -> ( PROPERTYNAME )+ )
// org/alfresco/rest/antlr/WhereClause.g:142:16: PROPERTYNAME ( COMMA PROPERTYNAME )*
{
PROPERTYNAME55=(Token)match(input,PROPERTYNAME,FOLLOW_PROPERTYNAME_in_selectClause1111);
stream_PROPERTYNAME.add(PROPERTYNAME55);
// org/alfresco/rest/antlr/WhereClause.g:142:29: ( COMMA PROPERTYNAME )*
loop15:
do {
int alt15=2;
switch ( input.LA(1) ) {
case COMMA:
{
alt15=1;
}
break;
}
switch (alt15) {
case 1 :
// org/alfresco/rest/antlr/WhereClause.g:142:30: COMMA PROPERTYNAME
{
COMMA56=(Token)match(input,COMMA,FOLLOW_COMMA_in_selectClause1114);
stream_COMMA.add(COMMA56);
PROPERTYNAME57=(Token)match(input,PROPERTYNAME,FOLLOW_PROPERTYNAME_in_selectClause1116);
stream_PROPERTYNAME.add(PROPERTYNAME57);
}
break;
default :
break loop15;
}
} while (true);
// AST REWRITE
// elements: PROPERTYNAME
// token labels:
// rule labels: retval
// token list labels:
// rule list labels:
// wildcard labels:
retval.tree = root_0;
RewriteRuleSubtreeStream stream_retval=new RewriteRuleSubtreeStream(adaptor,"rule retval",retval!=null?retval.tree:null);
root_0 = (Object)adaptor.nil();
// 142:51: -> ( PROPERTYNAME )+
{
if ( !(stream_PROPERTYNAME.hasNext()) ) {
throw new RewriteEarlyExitException();
}
while ( stream_PROPERTYNAME.hasNext() ) {
adaptor.addChild(root_0,
stream_PROPERTYNAME.nextNode()
);
}
stream_PROPERTYNAME.reset();
}
retval.tree = root_0;
}
retval.stop = input.LT(-1);
retval.tree = (Object)adaptor.rulePostProcessing(root_0);
adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop);
}
catch(RecognitionException e)
{
throw e;
}
finally {
// do for sure before leaving
}
return retval;
}
// $ANTLR end "selectClause"
// Delegated rules
protected DFA6 dfa6 = new DFA6(this);
static final String DFA6_eotS =
"\107\uffff";
static final String DFA6_eofS =
"\107\uffff";
static final String DFA6_minS =
"\1\10\1\5\1\17\1\10\3\17\2\25\1\5\1\17\3\25\2\4\1\25\1\27\3\17\3"+
"\25\2\6\1\25\2\6\1\25\2\27\3\uffff\1\4\3\25\2\4\1\25\1\27\2\25\2"+
"\4\1\25\2\6\1\25\2\6\1\25\2\27\1\4\2\27\2\6\2\25\3\4\2\27\2\6\1"+
"\4";
static final String DFA6_maxS =
"\1\25\1\22\1\17\1\25\3\17\1\26\1\31\1\22\1\17\3\31\2\27\1\25\1\27"+
"\3\17\1\26\1\31\1\26\2\6\1\26\2\27\1\26\2\27\3\uffff\1\27\3\31\2"+
"\27\1\25\1\27\2\26\2\27\1\26\2\6\1\26\2\27\1\26\7\27\2\26\10\27";
static final String DFA6_acceptS =
"\40\uffff\1\1\1\2\1\3\44\uffff";
static final String DFA6_specialS =
"\107\uffff}>";
static final String[] DFA6_transitionS = {
"\1\2\12\uffff\1\3\1\uffff\1\1",
"\1\4\1\uffff\1\7\1\uffff\2\7\3\uffff\1\5\1\uffff\2\7\1\6",
"\1\10",
"\1\12\14\uffff\1\11",
"\1\13",
"\1\14",
"\1\15",
"\1\17\1\16",
"\1\21\3\uffff\1\20",
"\1\22\1\uffff\1\25\1\uffff\2\25\3\uffff\1\23\1\uffff\2\25\1"+
"\24",
"\1\26",
"\1\31\1\30\2\uffff\1\27",
"\1\34\1\33\2\uffff\1\32",
"\1\37\1\36\2\uffff\1\35",
"\1\41\17\uffff\1\42\2\uffff\1\40",
"\1\41\17\uffff\1\42\2\uffff\1\40",
"\1\21",
"\1\43",
"\1\44",
"\1\45",
"\1\46",
"\1\50\1\47",
"\1\52\3\uffff\1\51",
"\1\31\1\30",
"\1\53",
"\1\53",
"\1\34\1\33",
"\1\54\20\uffff\1\55",
"\1\54\20\uffff\1\55",
"\1\37\1\36",
"\1\56",
"\1\56",
"",
"",
"",
"\1\41\17\uffff\1\42\2\uffff\1\40",
"\1\61\1\60\2\uffff\1\57",
"\1\64\1\63\2\uffff\1\62",
"\1\67\1\66\2\uffff\1\65",
"\1\41\17\uffff\1\42\2\uffff\1\40",
"\1\41\17\uffff\1\42\2\uffff\1\40",
"\1\52",
"\1\70",
"\1\72\1\71",
"\1\74\1\73",
"\1\41\17\uffff\1\42\2\uffff\1\40",
"\1\41\17\uffff\1\42\2\uffff\1\40",
"\1\61\1\60",
"\1\75",
"\1\75",
"\1\64\1\63",
"\1\76\20\uffff\1\77",
"\1\76\20\uffff\1\77",
"\1\67\1\66",
"\1\100",
"\1\100",
"\1\41\17\uffff\1\42\2\uffff\1\40",
"\1\101",
"\1\101",
"\1\54\20\uffff\1\55",
"\1\54\20\uffff\1\55",
"\1\103\1\102",
"\1\105\1\104",
"\1\41\17\uffff\1\42\2\uffff\1\40",
"\1\41\17\uffff\1\42\2\uffff\1\40",
"\1\41\17\uffff\1\42\2\uffff\1\40",
"\1\106",
"\1\106",
"\1\76\20\uffff\1\77",
"\1\76\20\uffff\1\77",
"\1\41\17\uffff\1\42\2\uffff\1\40"
};
static final short[] DFA6_eot = DFA.unpackEncodedString(DFA6_eotS);
static final short[] DFA6_eof = DFA.unpackEncodedString(DFA6_eofS);
static final char[] DFA6_min = DFA.unpackEncodedStringToUnsignedChars(DFA6_minS);
static final char[] DFA6_max = DFA.unpackEncodedStringToUnsignedChars(DFA6_maxS);
static final short[] DFA6_accept = DFA.unpackEncodedString(DFA6_acceptS);
static final short[] DFA6_special = DFA.unpackEncodedString(DFA6_specialS);
static final short[][] DFA6_transition;
static {
int numStates = DFA6_transitionS.length;
DFA6_transition = new short[numStates][];
for (int i=0; i<numStates; i++) {
DFA6_transition[i] = DFA.unpackEncodedString(DFA6_transitionS[i]);
}
}
class DFA6 extends DFA {
public DFA6(BaseRecognizer recognizer) {
this.recognizer = recognizer;
this.decisionNumber = 6;
this.eot = DFA6_eot;
this.eof = DFA6_eof;
this.min = DFA6_min;
this.max = DFA6_max;
this.accept = DFA6_accept;
this.special = DFA6_special;
this.transition = DFA6_transition;
}
public String getDescription() {
return "126:1: predicate : ( simplepredicate | simplepredicate ( AND simplepredicate )+ -> ^( AND ( simplepredicate )+ ) | simplepredicate ( OR simplepredicate )+ -> ^( OR ( simplepredicate )+ ) );";
}
}
public static final BitSet FOLLOW_WS_in_whereclause750 = new BitSet(new long[]{0x0000000000008000L});
public static final BitSet FOLLOW_LEFTPAREN_in_whereclause753 = new BitSet(new long[]{0x0000000002280100L});
public static final BitSet FOLLOW_WS_in_whereclause756 = new BitSet(new long[]{0x0000000000280100L});
public static final BitSet FOLLOW_predicate_in_whereclause759 = new BitSet(new long[]{0x0000000000800000L});
public static final BitSet FOLLOW_RIGHTPAREN_in_whereclause761 = new BitSet(new long[]{0x0000000002000002L});
public static final BitSet FOLLOW_WS_in_whereclause764 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_simplepredicate_in_predicate772 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_simplepredicate_in_predicate786 = new BitSet(new long[]{0x0000000000000010L});
public static final BitSet FOLLOW_AND_in_predicate789 = new BitSet(new long[]{0x0000000000280100L});
public static final BitSet FOLLOW_simplepredicate_in_predicate791 = new BitSet(new long[]{0x0000000000000012L});
public static final BitSet FOLLOW_simplepredicate_in_predicate816 = new BitSet(new long[]{0x0000000000100000L});
public static final BitSet FOLLOW_OR_in_predicate819 = new BitSet(new long[]{0x0000000000280100L});
public static final BitSet FOLLOW_simplepredicate_in_predicate821 = new BitSet(new long[]{0x0000000000100002L});
public static final BitSet FOLLOW_allowedpredicates_in_simplepredicate839 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_NEGATION_in_simplepredicate863 = new BitSet(new long[]{0x0000000000200100L});
public static final BitSet FOLLOW_allowedpredicates_in_simplepredicate865 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_comparisonpredicate_in_allowedpredicates880 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_existspredicate_in_allowedpredicates884 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_betweenpredicate_in_allowedpredicates888 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_inpredicate_in_allowedpredicates892 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_matchespredicate_in_allowedpredicates896 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_PROPERTYNAME_in_comparisonpredicate902 = new BitSet(new long[]{0x0000000000030680L});
public static final BitSet FOLLOW_comparisonoperator_in_comparisonpredicate904 = new BitSet(new long[]{0x0000000000600000L});
public static final BitSet FOLLOW_value_in_comparisonpredicate906 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_EXISTS_in_existspredicate936 = new BitSet(new long[]{0x0000000000008000L});
public static final BitSet FOLLOW_LEFTPAREN_in_existspredicate938 = new BitSet(new long[]{0x0000000002200000L});
public static final BitSet FOLLOW_WS_in_existspredicate940 = new BitSet(new long[]{0x0000000000200000L});
public static final BitSet FOLLOW_PROPERTYNAME_in_existspredicate943 = new BitSet(new long[]{0x0000000000800000L});
public static final BitSet FOLLOW_RIGHTPAREN_in_existspredicate945 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_PROPERTYNAME_in_betweenpredicate959 = new BitSet(new long[]{0x0000000000000020L});
public static final BitSet FOLLOW_BETWEEN_in_betweenpredicate961 = new BitSet(new long[]{0x0000000000008000L});
public static final BitSet FOLLOW_LEFTPAREN_in_betweenpredicate963 = new BitSet(new long[]{0x0000000002600000L});
public static final BitSet FOLLOW_WS_in_betweenpredicate965 = new BitSet(new long[]{0x0000000000600000L});
public static final BitSet FOLLOW_propertyvaluepair_in_betweenpredicate968 = new BitSet(new long[]{0x0000000000800000L});
public static final BitSet FOLLOW_RIGHTPAREN_in_betweenpredicate970 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_PROPERTYNAME_in_inpredicate986 = new BitSet(new long[]{0x0000000000004000L});
public static final BitSet FOLLOW_IN_in_inpredicate988 = new BitSet(new long[]{0x0000000000008000L});
public static final BitSet FOLLOW_LEFTPAREN_in_inpredicate990 = new BitSet(new long[]{0x0000000002600000L});
public static final BitSet FOLLOW_WS_in_inpredicate992 = new BitSet(new long[]{0x0000000000600000L});
public static final BitSet FOLLOW_propertyvaluelist_in_inpredicate995 = new BitSet(new long[]{0x0000000000800000L});
public static final BitSet FOLLOW_RIGHTPAREN_in_inpredicate997 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_PROPERTYNAME_in_matchespredicate1013 = new BitSet(new long[]{0x0000000000040000L});
public static final BitSet FOLLOW_MATCHES_in_matchespredicate1015 = new BitSet(new long[]{0x0000000000008000L});
public static final BitSet FOLLOW_LEFTPAREN_in_matchespredicate1017 = new BitSet(new long[]{0x0000000002600000L});
public static final BitSet FOLLOW_WS_in_matchespredicate1019 = new BitSet(new long[]{0x0000000000600000L});
public static final BitSet FOLLOW_value_in_matchespredicate1022 = new BitSet(new long[]{0x0000000000800000L});
public static final BitSet FOLLOW_RIGHTPAREN_in_matchespredicate1024 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_value_in_propertyvaluepair1040 = new BitSet(new long[]{0x0000000000000040L});
public static final BitSet FOLLOW_COMMA_in_propertyvaluepair1042 = new BitSet(new long[]{0x0000000000600000L});
public static final BitSet FOLLOW_value_in_propertyvaluepair1044 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_value_in_propertyvaluelist1055 = new BitSet(new long[]{0x0000000000000042L});
public static final BitSet FOLLOW_COMMA_in_propertyvaluelist1058 = new BitSet(new long[]{0x0000000000600000L});
public static final BitSet FOLLOW_value_in_propertyvaluelist1060 = new BitSet(new long[]{0x0000000000000042L});
public static final BitSet FOLLOW_PROPERTYVALUE_in_value1075 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_PROPERTYNAME_in_value1095 = new BitSet(new long[]{0x0000000000000002L});
public static final BitSet FOLLOW_PROPERTYNAME_in_selectClause1111 = new BitSet(new long[]{0x0000000000000042L});
public static final BitSet FOLLOW_COMMA_in_selectClause1114 = new BitSet(new long[]{0x0000000000200000L});
public static final BitSet FOLLOW_PROPERTYNAME_in_selectClause1116 = new BitSet(new long[]{0x0000000000000042L});
} | lgpl-3.0 |
Tybion/community-edition | projects/repository/source/test-java/org/alfresco/util/test/testusers/TestUserComponent.java | 1416 | /*
* Copyright (C) 2005-2013 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.util.test.testusers;
import org.alfresco.service.cmr.repository.NodeRef;
/**
* This interface defines a software test component, which is responsible for the creation and deletion
* of Alfresco users - to be used when running integration tests.
*
* @author Neil Mc Erlean
* @since 4.2
*/
public interface TestUserComponent
{
/**
* Creates a test user with the specified username.
*/
NodeRef createTestUser(String userName);
/**
* Deletes the test user with the specified username.
* @param userName
*/
void deleteTestUser(String userName);
}
| lgpl-3.0 |
chanakaudaya/developer-studio | esb/org.wso2.developerstudio.eclipse.gmf.esb.edit/src/org/wso2/developerstudio/eclipse/gmf/esb/provider/DBLookupMediatorItemProvider.java | 5314 | /**
* <copyright>
* </copyright>
*
* $Id$
*/
package org.wso2.developerstudio.eclipse.gmf.esb.provider;
import java.util.Collection;
import java.util.List;
import org.eclipse.emf.common.notify.AdapterFactory;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.ecore.EStructuralFeature;
import org.eclipse.emf.edit.provider.IEditingDomainItemProvider;
import org.eclipse.emf.edit.provider.IItemLabelProvider;
import org.eclipse.emf.edit.provider.IItemPropertyDescriptor;
import org.eclipse.emf.edit.provider.IItemPropertySource;
import org.eclipse.emf.edit.provider.IStructuredItemContentProvider;
import org.eclipse.emf.edit.provider.ITreeItemContentProvider;
import org.eclipse.emf.edit.provider.ViewerNotification;
import org.wso2.developerstudio.eclipse.gmf.esb.DBLookupMediator;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbFactory;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbPackage;
/**
* This is the item provider adapter for a {@link org.wso2.developerstudio.eclipse.gmf.esb.DBLookupMediator} object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public class DBLookupMediatorItemProvider
extends AbstractSqlExecutorMediatorItemProvider
implements
IEditingDomainItemProvider,
IStructuredItemContentProvider,
ITreeItemContentProvider,
IItemLabelProvider,
IItemPropertySource {
/**
* This constructs an instance from a factory and a notifier.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public DBLookupMediatorItemProvider(AdapterFactory adapterFactory) {
super(adapterFactory);
}
/**
* This returns the property descriptors for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated NOT
*/
@Override
public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) {
if (itemPropertyDescriptors != null) {
itemPropertyDescriptors.clear();
}
super.getPropertyDescriptors(object);
addDescriptionPropertyDescriptor(object);
return itemPropertyDescriptors;
}
/**
* This specifies how to implement {@link #getChildren} and is used to deduce an appropriate feature for an
* {@link org.eclipse.emf.edit.command.AddCommand}, {@link org.eclipse.emf.edit.command.RemoveCommand} or
* {@link org.eclipse.emf.edit.command.MoveCommand} in {@link #createCommand}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Collection<? extends EStructuralFeature> getChildrenFeatures(Object object) {
if (childrenFeatures == null) {
super.getChildrenFeatures(object);
childrenFeatures.add(EsbPackage.Literals.DB_LOOKUP_MEDIATOR__INPUT_CONNECTOR);
childrenFeatures.add(EsbPackage.Literals.DB_LOOKUP_MEDIATOR__OUTPUT_CONNECTOR);
}
return childrenFeatures;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EStructuralFeature getChildFeature(Object object, Object child) {
// Check the type of the specified child object and return the proper feature to use for
// adding (see {@link AddCommand}) it as a child.
return super.getChildFeature(object, child);
}
/**
* This returns DBLookupMediator.gif.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object getImage(Object object) {
return overlayImage(object, getResourceLocator().getImage("full/obj16/DBLookupMediator"));
}
/**
* This returns the label text for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getText(Object object) {
String label = ((DBLookupMediator)object).getConnectionDsName();
return label == null || label.length() == 0 ?
getString("_UI_DBLookupMediator_type") :
getString("_UI_DBLookupMediator_type") + " " + label;
}
/**
* This handles model notifications by calling {@link #updateChildren} to update any cached
* children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void notifyChanged(Notification notification) {
updateChildren(notification);
switch (notification.getFeatureID(DBLookupMediator.class)) {
case EsbPackage.DB_LOOKUP_MEDIATOR__INPUT_CONNECTOR:
case EsbPackage.DB_LOOKUP_MEDIATOR__OUTPUT_CONNECTOR:
fireNotifyChanged(new ViewerNotification(notification, notification.getNotifier(), true, false));
return;
}
super.notifyChanged(notification);
}
/**
* This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children
* that can be created under this object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) {
super.collectNewChildDescriptors(newChildDescriptors, object);
newChildDescriptors.add
(createChildParameter
(EsbPackage.Literals.DB_LOOKUP_MEDIATOR__INPUT_CONNECTOR,
EsbFactory.eINSTANCE.createDBLookupMediatorInputConnector()));
newChildDescriptors.add
(createChildParameter
(EsbPackage.Literals.DB_LOOKUP_MEDIATOR__OUTPUT_CONNECTOR,
EsbFactory.eINSTANCE.createDBLookupMediatorOutputConnector()));
}
}
| apache-2.0 |
WANdisco/amplab-hive | ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedColumnarSerDe.java | 10845 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.exec.vector;
import java.nio.ByteBuffer;
import java.sql.Timestamp;
import java.util.List;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.ByteStream;
import org.apache.hadoop.hive.serde2.SerDe;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.SerDeStats;
import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable;
import org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe;
import org.apache.hadoop.hive.serde2.io.DateWritable;
import org.apache.hadoop.hive.serde2.io.TimestampWritable;
import org.apache.hadoop.hive.serde2.lazy.LazyDate;
import org.apache.hadoop.hive.serde2.lazy.LazyLong;
import org.apache.hadoop.hive.serde2.lazy.LazyTimestamp;
import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.ObjectWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
/**
* VectorizedColumnarSerDe is used by Vectorized query execution engine
* for columnar based storage supported by RCFile.
*/
public class VectorizedColumnarSerDe extends ColumnarSerDe implements VectorizedSerde {
public VectorizedColumnarSerDe() throws SerDeException {
}
private final BytesRefArrayWritable[] byteRefArray = new BytesRefArrayWritable[VectorizedRowBatch.DEFAULT_SIZE];
private final ObjectWritable ow = new ObjectWritable();
private final ByteStream.Output serializeVectorStream = new ByteStream.Output();
/**
* Serialize a vectorized row batch
*
* @param vrg
* Vectorized row batch to serialize
* @param objInspector
* The ObjectInspector for the row object
* @return The serialized Writable object
* @throws SerDeException
* @see SerDe#serialize(Object, ObjectInspector)
*/
@Override
public Writable serializeVector(VectorizedRowBatch vrg, ObjectInspector objInspector)
throws SerDeException {
try {
// Validate that the OI is of struct type
if (objInspector.getCategory() != Category.STRUCT) {
throw new UnsupportedOperationException(getClass().toString()
+ " can only serialize struct types, but we got: "
+ objInspector.getTypeName());
}
VectorizedRowBatch batch = (VectorizedRowBatch) vrg;
StructObjectInspector soi = (StructObjectInspector) objInspector;
List<? extends StructField> fields = soi.getAllStructFieldRefs();
// Reset the byte buffer
serializeVectorStream.reset();
int count = 0;
int rowIndex = 0;
for (int i = 0; i < batch.size; i++) {
// If selectedInUse is true then we need to serialize only
// the selected indexes
if (batch.selectedInUse) {
rowIndex = batch.selected[i];
} else {
rowIndex = i;
}
BytesRefArrayWritable byteRow = byteRefArray[i];
int numCols = fields.size();
if (byteRow == null) {
byteRow = new BytesRefArrayWritable(numCols);
byteRefArray[i] = byteRow;
}
byteRow.resetValid(numCols);
for (int p = 0; p < batch.projectionSize; p++) {
int k = batch.projectedColumns[p];
ObjectInspector foi = fields.get(k).getFieldObjectInspector();
ColumnVector currentColVector = batch.cols[k];
switch (foi.getCategory()) {
case PRIMITIVE: {
PrimitiveObjectInspector poi = (PrimitiveObjectInspector) foi;
if (!currentColVector.noNulls
&& (currentColVector.isRepeating || currentColVector.isNull[rowIndex])) {
// The column is null hence write null value
serializeVectorStream.write(new byte[0], 0, 0);
} else {
// If here then the vector value is not null.
if (currentColVector.isRepeating) {
// If the vector has repeating values then set rowindex to zero
rowIndex = 0;
}
switch (poi.getPrimitiveCategory()) {
case BOOLEAN: {
LongColumnVector lcv = (LongColumnVector) batch.cols[k];
// In vectorization true is stored as 1 and false as 0
boolean b = lcv.vector[rowIndex] == 1 ? true : false;
if (b) {
serializeVectorStream.write(LazyUtils.trueBytes, 0, LazyUtils.trueBytes.length);
} else {
serializeVectorStream.write(LazyUtils.trueBytes, 0, LazyUtils.trueBytes.length);
}
}
break;
case BYTE:
case SHORT:
case INT:
case LONG:
LongColumnVector lcv = (LongColumnVector) batch.cols[k];
LazyLong.writeUTF8(serializeVectorStream, lcv.vector[rowIndex]);
break;
case FLOAT:
case DOUBLE:
DoubleColumnVector dcv = (DoubleColumnVector) batch.cols[k];
ByteBuffer b = Text.encode(String.valueOf(dcv.vector[rowIndex]));
serializeVectorStream.write(b.array(), 0, b.limit());
break;
case BINARY: {
BytesColumnVector bcv = (BytesColumnVector) batch.cols[k];
byte[] bytes = bcv.vector[rowIndex];
serializeVectorStream.write(bytes, 0, bytes.length);
}
break;
case STRING:
case CHAR:
case VARCHAR: {
// Is it correct to escape CHAR and VARCHAR?
BytesColumnVector bcv = (BytesColumnVector) batch.cols[k];
LazyUtils.writeEscaped(serializeVectorStream, bcv.vector[rowIndex],
bcv.start[rowIndex],
bcv.length[rowIndex],
serdeParams.isEscaped(), serdeParams.getEscapeChar(), serdeParams
.getNeedsEscape());
}
break;
case TIMESTAMP:
LongColumnVector tcv = (LongColumnVector) batch.cols[k];
long timeInNanoSec = tcv.vector[rowIndex];
Timestamp t = new Timestamp(0);
TimestampUtils.assignTimeInNanoSec(timeInNanoSec, t);
TimestampWritable tw = new TimestampWritable();
tw.set(t);
LazyTimestamp.writeUTF8(serializeVectorStream, tw);
break;
case DATE:
LongColumnVector dacv = (LongColumnVector) batch.cols[k];
DateWritable daw = new DateWritable((int) dacv.vector[rowIndex]);
LazyDate.writeUTF8(serializeVectorStream, daw);
break;
default:
throw new UnsupportedOperationException(
"Vectorizaton is not supported for datatype:"
+ poi.getPrimitiveCategory());
}
}
break;
}
case LIST:
case MAP:
case STRUCT:
case UNION:
throw new UnsupportedOperationException("Vectorizaton is not supported for datatype:"
+ foi.getCategory());
default:
throw new SerDeException("Unknown ObjectInspector category!");
}
byteRow.get(k).set(serializeVectorStream.getData(), count, serializeVectorStream
.getLength() - count);
count = serializeVectorStream.getLength();
}
}
ow.set(byteRefArray);
} catch (Exception e) {
throw new SerDeException(e);
}
return ow;
}
@Override
public SerDeStats getSerDeStats() {
return null;
}
@Override
public Class<? extends Writable> getSerializedClass() {
return BytesRefArrayWritable.class;
}
@Override
public Object deserialize(Writable blob) throws SerDeException {
// Ideally this should throw UnsupportedOperationException as the serde is
// vectorized serde. But since RC file reader does not support vectorized reading this
// is left as it is. This function will be called from VectorizedRowBatchCtx::addRowToBatch
// to deserialize the row one by one and populate the batch. Once RC file reader supports vectorized
// reading this serde and be standalone serde with no dependency on ColumnarSerDe.
return super.deserialize(blob);
}
@Override
public ObjectInspector getObjectInspector() throws SerDeException {
return cachedObjectInspector;
}
@Override
public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException {
throw new UnsupportedOperationException();
}
/**
* Deserializes the rowBlob into Vectorized row batch
* @param rowBlob
* rowBlob row batch to deserialize
* @param rowsInBlob
* Total number of rows in rowBlob to deserialize
* @param reuseBatch
* VectorizedRowBatch to which the rows should be serialized *
* @throws SerDeException
*/
@Override
public void deserializeVector(Object rowBlob, int rowsInBlob,
VectorizedRowBatch reuseBatch) throws SerDeException {
BytesRefArrayWritable[] refArray = (BytesRefArrayWritable[]) rowBlob;
DataOutputBuffer buffer = new DataOutputBuffer();
for (int i = 0; i < rowsInBlob; i++) {
Object row = deserialize(refArray[i]);
try {
VectorizedBatchUtil.addRowToBatch(row,
(StructObjectInspector) cachedObjectInspector, i,
reuseBatch, buffer);
} catch (HiveException e) {
throw new SerDeException(e);
}
}
}
}
| apache-2.0 |
robin13/elasticsearch | client/rest-high-level/src/test/java/org/elasticsearch/client/migration/DeprecationInfoResponseTests.java | 6544 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.client.migration;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.EqualsHashCodeTestUtils;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.client.migration.DeprecationInfoResponse.DeprecationIssue.Level.CRITICAL;
import static org.elasticsearch.client.migration.DeprecationInfoResponse.DeprecationIssue.Level.WARNING;
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
public class DeprecationInfoResponseTests extends ESTestCase {
private void toXContent(DeprecationInfoResponse response, XContentBuilder builder) throws IOException {
builder.startObject();
{
builder.startArray("cluster_settings");
for (DeprecationInfoResponse.DeprecationIssue issue : response.getClusterSettingsIssues()) {
toXContent(issue, builder);
}
builder.endArray();
builder.startArray("node_settings");
for (DeprecationInfoResponse.DeprecationIssue issue : response.getNodeSettingsIssues()) {
toXContent(issue, builder);
}
builder.endArray();
builder.field("index_settings");
builder.startObject();
{
for (Map.Entry<String, List<DeprecationInfoResponse.DeprecationIssue>> entry :
response.getIndexSettingsIssues().entrySet()) {
builder.field(entry.getKey());
builder.startArray();
for (DeprecationInfoResponse.DeprecationIssue issue : entry.getValue()) {
toXContent(issue, builder);
}
builder.endArray();
}
}
builder.endObject();
builder.startArray("ml_settings");
for (DeprecationInfoResponse.DeprecationIssue issue : response.getMlSettingsIssues()) {
toXContent(issue, builder);
}
builder.endArray();
}
builder.endObject();
}
private void toXContent(DeprecationInfoResponse.DeprecationIssue issue, XContentBuilder builder) throws IOException {
builder.startObject()
.field("level", issue.getLevel())
.field("message", issue.getMessage())
.field("url", issue.getUrl());
if (issue.getDetails()!= null) {
builder.field("details", issue.getDetails());
}
builder.endObject();
}
private Map<String, List<DeprecationInfoResponse.DeprecationIssue>> createIndexSettingsIssues() {
Map<String, List<DeprecationInfoResponse.DeprecationIssue>> indexSettingsIssues =
new HashMap<>();
for (int i = 0; i < randomIntBetween(1, 3); i++) {
indexSettingsIssues.put(randomAlphaOfLengthBetween(1, 5), createRandomIssues(false));
}
return indexSettingsIssues;
}
private List<DeprecationInfoResponse.DeprecationIssue> createRandomIssues(boolean canBeEmpty) {
List<DeprecationInfoResponse.DeprecationIssue> list = new ArrayList<>();
// the list of index settings cannot be zero, but the other lists can be, so this boolean is used to make the min number
// of elements for this list.
int startingRandomNumber = canBeEmpty ? 0: 1;
for (int i =0; i < randomIntBetween(startingRandomNumber, 2); i++) {
list.add(new DeprecationInfoResponse.DeprecationIssue(randomFrom(WARNING, CRITICAL),
randomAlphaOfLength(5),
randomAlphaOfLength(5),
randomBoolean() ? randomAlphaOfLength(5) : null));
}
return list;
}
private DeprecationInfoResponse createInstance() {
return new DeprecationInfoResponse(createRandomIssues(true), createRandomIssues(true), createIndexSettingsIssues(),
createRandomIssues(true));
}
private DeprecationInfoResponse copyInstance(DeprecationInfoResponse req) {
return new DeprecationInfoResponse(new ArrayList<>(req.getClusterSettingsIssues()),
new ArrayList<>(req.getNodeSettingsIssues()), new HashMap<>(req.getIndexSettingsIssues()),
new ArrayList<>(req.getMlSettingsIssues()));
}
private DeprecationInfoResponse mutateInstance(DeprecationInfoResponse req) {
return createInstance();
}
public void testFromXContent() throws IOException {
xContentTester(
this::createParser,
this::createInstance,
this::toXContent,
DeprecationInfoResponse::fromXContent)
.supportsUnknownFields(false) // old school parsing
.test();
}
public void testNullFailedIndices() {
NullPointerException exception = expectThrows(NullPointerException.class,
() -> new DeprecationInfoResponse(null, null, null, null));
assertEquals("cluster settings issues cannot be null", exception.getMessage());
exception = expectThrows(NullPointerException.class,
() -> new DeprecationInfoResponse(Collections.emptyList(), null, null, null));
assertEquals("node settings issues cannot be null", exception.getMessage());
exception = expectThrows(NullPointerException.class,
() -> new DeprecationInfoResponse(Collections.emptyList(), Collections.emptyList(), null, null));
assertEquals("index settings issues cannot be null", exception.getMessage());
exception = expectThrows(NullPointerException.class,
() -> new DeprecationInfoResponse(Collections.emptyList(), Collections.emptyList(), Collections.emptyMap(), null));
assertEquals("ml settings issues cannot be null", exception.getMessage());
}
public void testEqualsAndHashCode() {
for (int count = 0; count < 100; ++count) {
EqualsHashCodeTestUtils.checkEqualsAndHashCode(createInstance(), this::copyInstance, this::mutateInstance);
}
}
}
| apache-2.0 |
0359xiaodong/cw-omnibus | DeviceAdmin/PasswordEnforcer/src/com/commonsware/android/pwenforce/AdminReceiver.java | 2144 | /***
Copyright (c) 2013 CommonsWare, LLC
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy
of the License at http://www.apache.org/licenses/LICENSE-2.0. Unless required
by applicable law or agreed to in writing, software distributed under the
License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
OF ANY KIND, either express or implied. See the License for the specific
language governing permissions and limitations under the License.
From _The Busy Coder's Guide to Android Development_
http://commonsware.com/Android
*/
package com.commonsware.android.pwenforce;
import android.app.admin.DeviceAdminReceiver;
import android.app.admin.DevicePolicyManager;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.widget.Toast;
public class AdminReceiver extends DeviceAdminReceiver {
@Override
public void onEnabled(Context ctxt, Intent intent) {
ComponentName cn=new ComponentName(ctxt, AdminReceiver.class);
DevicePolicyManager mgr=
(DevicePolicyManager)ctxt.getSystemService(Context.DEVICE_POLICY_SERVICE);
mgr.setPasswordQuality(cn,
DevicePolicyManager.PASSWORD_QUALITY_ALPHANUMERIC);
onPasswordChanged(ctxt, intent);
}
@Override
public void onPasswordChanged(Context ctxt, Intent intent) {
DevicePolicyManager mgr=
(DevicePolicyManager)ctxt.getSystemService(Context.DEVICE_POLICY_SERVICE);
int msgId;
if (mgr.isActivePasswordSufficient()) {
msgId=R.string.compliant;
}
else {
msgId=R.string.not_compliant;
}
Toast.makeText(ctxt, msgId, Toast.LENGTH_LONG).show();
}
@Override
public void onPasswordFailed(Context ctxt, Intent intent) {
Toast.makeText(ctxt, R.string.password_failed, Toast.LENGTH_LONG)
.show();
}
@Override
public void onPasswordSucceeded(Context ctxt, Intent intent) {
Toast.makeText(ctxt, R.string.password_success, Toast.LENGTH_LONG)
.show();
}
}
| apache-2.0 |
mswiderski/drools | drools-persistence-jpa/src/main/java/org/drools/persistence/map/EnvironmentBuilder.java | 297 | package org.drools.persistence.map;
import org.drools.persistence.PersistenceContextManager;
import org.drools.persistence.TransactionManager;
public interface EnvironmentBuilder {
PersistenceContextManager getPersistenceContextManager();
TransactionManager getTransactionManager();
} | apache-2.0 |
lukecwik/incubator-beam | sdks/java/core/src/main/java/org/apache/beam/sdk/coders/DequeCoder.java | 2670 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.coders;
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.List;
import org.apache.beam.sdk.values.TypeDescriptor;
import org.apache.beam.sdk.values.TypeParameter;
/**
* A {@link Coder} for {@link Deque}, using the format of {@link IterableLikeCoder}.
*
* @param <T> the type of the elements of the Deques being transcoded
*/
public class DequeCoder<T> extends IterableLikeCoder<T, Deque<T>> {
public static <T> DequeCoder<T> of(Coder<T> elemCoder) {
return new DequeCoder<>(elemCoder);
}
/////////////////////////////////////////////////////////////////////////////
// Internal operations below here.
@Override
protected Deque<T> decodeToIterable(List<T> decodedElements) {
return new ArrayDeque<>(decodedElements);
}
protected DequeCoder(Coder<T> elemCoder) {
super(elemCoder, "Deque");
}
@Override
public boolean consistentWithEquals() {
return getElemCoder().consistentWithEquals();
}
@Override
public Object structuralValue(Deque<T> values) {
if (consistentWithEquals()) {
return values;
} else {
final Deque<Object> ret = new ArrayDeque<>(values.size());
for (T value : values) {
ret.add(getElemCoder().structuralValue(value));
}
return ret;
}
}
/**
* Deque sizes are always known, so DequeIterable may be deterministic while the general
* IterableLikeCoder is not.
*/
@Override
public void verifyDeterministic() throws NonDeterministicException {
verifyDeterministic(
this, "Coder for elements of DequeCoder must be determistic", getElemCoder());
}
@Override
public TypeDescriptor<Deque<T>> getEncodedTypeDescriptor() {
return new TypeDescriptor<Deque<T>>(getClass()) {}.where(
new TypeParameter<T>() {}, getElemCoder().getEncodedTypeDescriptor());
}
}
| apache-2.0 |
Shashikanth-Huawei/bmp | core/net/src/main/java/org/onosproject/net/flowobjective/impl/FlowObjectiveManager.java | 20270 | /*
* Copyright 2015-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.net.flowobjective.impl;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.ReferenceCardinality;
import org.apache.felix.scr.annotations.Service;
import org.onlab.osgi.DefaultServiceDirectory;
import org.onlab.osgi.ServiceDirectory;
import org.onlab.util.ItemNotFoundException;
import org.onosproject.cluster.ClusterService;
import org.onosproject.mastership.MastershipEvent;
import org.onosproject.mastership.MastershipListener;
import org.onosproject.mastership.MastershipService;
import org.onosproject.net.DeviceId;
import org.onosproject.net.behaviour.NextGroup;
import org.onosproject.net.behaviour.Pipeliner;
import org.onosproject.net.behaviour.PipelinerContext;
import org.onosproject.net.device.DeviceEvent;
import org.onosproject.net.device.DeviceListener;
import org.onosproject.net.device.DeviceService;
import org.onosproject.net.driver.DefaultDriverProviderService;
import org.onosproject.net.driver.DriverHandler;
import org.onosproject.net.driver.DriverService;
import org.onosproject.net.flow.FlowRuleService;
import org.onosproject.net.flowobjective.FilteringObjective;
import org.onosproject.net.flowobjective.FlowObjectiveService;
import org.onosproject.net.flowobjective.FlowObjectiveStore;
import org.onosproject.net.flowobjective.FlowObjectiveStoreDelegate;
import org.onosproject.net.flowobjective.ForwardingObjective;
import org.onosproject.net.flowobjective.NextObjective;
import org.onosproject.net.flowobjective.Objective;
import org.onosproject.net.flowobjective.ObjectiveError;
import org.onosproject.net.flowobjective.ObjectiveEvent;
import org.onosproject.net.flowobjective.ObjectiveEvent.Type;
import org.onosproject.net.group.GroupService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.util.concurrent.Executors.newFixedThreadPool;
import static org.onlab.util.Tools.groupedThreads;
import static org.onosproject.security.AppGuard.checkPermission;
import static org.onosproject.security.AppPermission.Type.FLOWRULE_WRITE;
/**
* Provides implementation of the flow objective programming service.
*/
@Component(immediate = true)
@Service
public class FlowObjectiveManager implements FlowObjectiveService {
public static final int INSTALL_RETRY_ATTEMPTS = 5;
public static final long INSTALL_RETRY_INTERVAL = 1000; // ms
private final Logger log = LoggerFactory.getLogger(getClass());
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected DriverService driverService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected DeviceService deviceService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected MastershipService mastershipService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected ClusterService clusterService;
// Note: The following dependencies are added on behalf of the pipeline
// driver behaviours to assure these services are available for their
// initialization.
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected FlowRuleService flowRuleService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected GroupService groupService;
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected FlowObjectiveStore flowObjectiveStore;
// Note: This must remain an optional dependency to allow re-install of default drivers.
// Note: For now disabled until we can move to OPTIONAL_UNARY dependency
// @Reference(cardinality = ReferenceCardinality.OPTIONAL_UNARY, policy = ReferencePolicy.DYNAMIC)
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected DefaultDriverProviderService defaultDriverService;
private final FlowObjectiveStoreDelegate delegate = new InternalStoreDelegate();
private final Map<DeviceId, DriverHandler> driverHandlers = Maps.newConcurrentMap();
private final Map<DeviceId, Pipeliner> pipeliners = Maps.newConcurrentMap();
private final PipelinerContext context = new InnerPipelineContext();
private final MastershipListener mastershipListener = new InnerMastershipListener();
private final DeviceListener deviceListener = new InnerDeviceListener();
protected ServiceDirectory serviceDirectory = new DefaultServiceDirectory();
private final Map<Integer, Set<PendingNext>> pendingForwards = Maps.newConcurrentMap();
// local store to track which nextObjectives were sent to which device
// for debugging purposes
private Map<Integer, DeviceId> nextToDevice = Maps.newConcurrentMap();
private ExecutorService executorService;
@Activate
protected void activate() {
executorService = newFixedThreadPool(4, groupedThreads("onos/objective-installer", "%d", log));
flowObjectiveStore.setDelegate(delegate);
mastershipService.addListener(mastershipListener);
deviceService.addListener(deviceListener);
deviceService.getDevices().forEach(device -> setupPipelineHandler(device.id()));
log.info("Started");
}
@Deactivate
protected void deactivate() {
flowObjectiveStore.unsetDelegate(delegate);
mastershipService.removeListener(mastershipListener);
deviceService.removeListener(deviceListener);
executorService.shutdown();
pipeliners.clear();
driverHandlers.clear();
nextToDevice.clear();
log.info("Stopped");
}
/**
* Task that passes the flow objective down to the driver. The task will
* make a few attempts to find the appropriate driver, then eventually give
* up and report an error if no suitable driver could be found.
*/
private class ObjectiveInstaller implements Runnable {
private final DeviceId deviceId;
private final Objective objective;
private final int numAttempts;
public ObjectiveInstaller(DeviceId deviceId, Objective objective) {
this(deviceId, objective, 1);
}
public ObjectiveInstaller(DeviceId deviceId, Objective objective, int attemps) {
this.deviceId = checkNotNull(deviceId);
this.objective = checkNotNull(objective);
this.numAttempts = checkNotNull(attemps);
}
@Override
public void run() {
try {
Pipeliner pipeliner = getDevicePipeliner(deviceId);
if (pipeliner != null) {
if (objective instanceof NextObjective) {
pipeliner.next((NextObjective) objective);
} else if (objective instanceof ForwardingObjective) {
pipeliner.forward((ForwardingObjective) objective);
} else {
pipeliner.filter((FilteringObjective) objective);
}
//Attempts to check if pipeliner is null for retry attempts
} else if (numAttempts < INSTALL_RETRY_ATTEMPTS) {
Thread.sleep(INSTALL_RETRY_INTERVAL);
executorService.execute(new ObjectiveInstaller(deviceId, objective, numAttempts + 1));
} else {
// Otherwise we've tried a few times and failed, report an
// error back to the user.
objective.context().ifPresent(
c -> c.onError(objective, ObjectiveError.NOPIPELINER));
}
//Excpetion thrown
} catch (Exception e) {
log.warn("Exception while installing flow objective", e);
}
}
}
@Override
public void filter(DeviceId deviceId, FilteringObjective filteringObjective) {
checkPermission(FLOWRULE_WRITE);
executorService.execute(new ObjectiveInstaller(deviceId, filteringObjective));
}
@Override
public void forward(DeviceId deviceId, ForwardingObjective forwardingObjective) {
checkPermission(FLOWRULE_WRITE);
if (queueObjective(deviceId, forwardingObjective)) {
return;
}
executorService.execute(new ObjectiveInstaller(deviceId, forwardingObjective));
}
@Override
public void next(DeviceId deviceId, NextObjective nextObjective) {
checkPermission(FLOWRULE_WRITE);
nextToDevice.put(nextObjective.id(), deviceId);
executorService.execute(new ObjectiveInstaller(deviceId, nextObjective));
}
@Override
public int allocateNextId() {
checkPermission(FLOWRULE_WRITE);
return flowObjectiveStore.allocateNextId();
}
@Override
public void initPolicy(String policy) {}
private boolean queueObjective(DeviceId deviceId, ForwardingObjective fwd) {
if (fwd.nextId() == null ||
flowObjectiveStore.getNextGroup(fwd.nextId()) != null) {
// fast path
return false;
}
boolean queued = false;
synchronized (pendingForwards) {
// double check the flow objective store, because this block could run
// after a notification arrives
if (flowObjectiveStore.getNextGroup(fwd.nextId()) == null) {
pendingForwards.compute(fwd.nextId(), (id, pending) -> {
PendingNext next = new PendingNext(deviceId, fwd);
if (pending == null) {
return Sets.newHashSet(next);
} else {
pending.add(next);
return pending;
}
});
queued = true;
}
}
if (queued) {
log.debug("Queued forwarding objective {} for nextId {} meant for device {}",
fwd.id(), fwd.nextId(), deviceId);
}
return queued;
}
// Retrieves the device pipeline behaviour from the cache.
private Pipeliner getDevicePipeliner(DeviceId deviceId) {
return pipeliners.get(deviceId);
}
private void setupPipelineHandler(DeviceId deviceId) {
if (defaultDriverService == null) {
// We're not ready to go to work yet.
return;
}
// Attempt to lookup the handler in the cache
DriverHandler handler = driverHandlers.get(deviceId);
cTime = now();
if (handler == null) {
try {
// Otherwise create it and if it has pipeline behaviour, cache it
handler = driverService.createHandler(deviceId);
dTime = now();
if (!handler.driver().hasBehaviour(Pipeliner.class)) {
log.warn("Pipeline behaviour not supported for device {}",
deviceId);
return;
}
} catch (ItemNotFoundException e) {
log.warn("No applicable driver for device {}", deviceId);
return;
}
driverHandlers.put(deviceId, handler);
eTime = now();
}
// Always (re)initialize the pipeline behaviour
log.info("Driver {} bound to device {} ... initializing driver",
handler.driver().name(), deviceId);
hTime = now();
Pipeliner pipeliner = handler.behaviour(Pipeliner.class);
hbTime = now();
pipeliner.init(deviceId, context);
pipeliners.putIfAbsent(deviceId, pipeliner);
}
// Triggers driver setup when the local node becomes a device master.
private class InnerMastershipListener implements MastershipListener {
@Override
public void event(MastershipEvent event) {
switch (event.type()) {
case MASTER_CHANGED:
log.debug("mastership changed on device {}", event.subject());
start = now();
if (deviceService.isAvailable(event.subject())) {
setupPipelineHandler(event.subject());
}
stopWatch();
break;
case BACKUPS_CHANGED:
break;
default:
break;
}
}
}
// Triggers driver setup when a device is (re)detected.
private class InnerDeviceListener implements DeviceListener {
@Override
public void event(DeviceEvent event) {
switch (event.type()) {
case DEVICE_ADDED:
case DEVICE_AVAILABILITY_CHANGED:
log.debug("Device either added or availability changed {}",
event.subject().id());
start = now();
if (deviceService.isAvailable(event.subject().id())) {
log.debug("Device is now available {}", event.subject().id());
setupPipelineHandler(event.subject().id());
}
stopWatch();
break;
case DEVICE_UPDATED:
break;
case DEVICE_REMOVED:
break;
case DEVICE_SUSPENDED:
break;
case PORT_ADDED:
break;
case PORT_UPDATED:
break;
case PORT_REMOVED:
break;
default:
break;
}
}
}
// Temporary mechanism to monitor pipeliner setup time-cost; there are
// intermittent time where this takes in excess of 2 seconds. Why?
private long start = 0, totals = 0, count = 0;
private long cTime, dTime, eTime, hTime, hbTime;
private static final long LIMIT = 500;
private long now() {
return System.currentTimeMillis();
}
private void stopWatch() {
long duration = System.currentTimeMillis() - start;
totals += duration;
count += 1;
if (duration > LIMIT) {
log.info("Pipeline setup took {} ms; avg {} ms; cTime={}, dTime={}, eTime={}, hTime={}, hbTime={}",
duration, totals / count, diff(cTime), diff(dTime), diff(eTime), diff(hTime), diff(hbTime));
}
}
private long diff(long bTime) {
long diff = bTime - start;
return diff < 0 ? 0 : diff;
}
// Processing context for initializing pipeline driver behaviours.
private class InnerPipelineContext implements PipelinerContext {
@Override
public ServiceDirectory directory() {
return serviceDirectory;
}
@Override
public FlowObjectiveStore store() {
return flowObjectiveStore;
}
}
private class InternalStoreDelegate implements FlowObjectiveStoreDelegate {
@Override
public void notify(ObjectiveEvent event) {
if (event.type() == Type.ADD) {
log.debug("Received notification of obj event {}", event);
Set<PendingNext> pending;
synchronized (pendingForwards) {
// needs to be synchronized for queueObjective lookup
pending = pendingForwards.remove(event.subject());
}
if (pending == null) {
log.debug("Nothing pending for this obj event {}", event);
return;
}
log.debug("Processing {} pending forwarding objectives for nextId {}",
pending.size(), event.subject());
pending.forEach(p -> getDevicePipeliner(p.deviceId())
.forward(p.forwardingObjective()));
}
}
}
/**
* Data class used to hold a pending forwarding objective that could not
* be processed because the associated next object was not present.
*/
private class PendingNext {
private final DeviceId deviceId;
private final ForwardingObjective fwd;
public PendingNext(DeviceId deviceId, ForwardingObjective fwd) {
this.deviceId = deviceId;
this.fwd = fwd;
}
public DeviceId deviceId() {
return deviceId;
}
public ForwardingObjective forwardingObjective() {
return fwd;
}
@Override
public int hashCode() {
return Objects.hash(deviceId, fwd);
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof PendingNext)) {
return false;
}
final PendingNext other = (PendingNext) obj;
if (this.deviceId.equals(other.deviceId) &&
this.fwd.equals(other.fwd)) {
return true;
}
return false;
}
}
@Override
public List<String> getNextMappings() {
List<String> mappings = new ArrayList<>();
Map<Integer, NextGroup> allnexts = flowObjectiveStore.getAllGroups();
// XXX if the NextGroup after de-serialization actually stored info of the deviceId
// then info on any nextObj could be retrieved from one controller instance.
// Right now the drivers on one instance can only fetch for next-ids that came
// to them.
// Also, we still need to send the right next-id to the right driver as potentially
// there can be different drivers for different devices. But on that account,
// no instance should be decoding for another instance's nextIds.
for (Map.Entry<Integer, NextGroup> e : allnexts.entrySet()) {
// get the device this next Objective was sent to
DeviceId deviceId = nextToDevice.get(e.getKey());
mappings.add("NextId " + e.getKey() + ": " +
((deviceId != null) ? deviceId : "nextId not in this onos instance"));
if (deviceId != null) {
// this instance of the controller sent the nextObj to a driver
Pipeliner pipeliner = getDevicePipeliner(deviceId);
List<String> nextMappings = pipeliner.getNextMappings(e.getValue());
if (nextMappings != null) {
mappings.addAll(nextMappings);
}
}
}
return mappings;
}
@Override
public List<String> getPendingNexts() {
List<String> pendingNexts = new ArrayList<>();
for (Integer nextId : pendingForwards.keySet()) {
Set<PendingNext> pnext = pendingForwards.get(nextId);
StringBuffer pend = new StringBuffer();
pend.append("Next Id: ").append(Integer.toString(nextId))
.append(" :: ");
for (PendingNext pn : pnext) {
pend.append(Integer.toString(pn.forwardingObjective().id()))
.append(" ");
}
pendingNexts.add(pend.toString());
}
return pendingNexts;
}
}
| apache-2.0 |
samaitra/ignite | modules/core/src/main/java/org/apache/ignite/IgniteJdbcDriver.java | 27195 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite;
import java.sql.Connection;
import java.sql.Driver;
import java.sql.DriverManager;
import java.sql.DriverPropertyInfo;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.util.Arrays;
import java.util.List;
import java.util.Properties;
import java.util.logging.Logger;
import org.apache.ignite.cache.affinity.AffinityKey;
import org.apache.ignite.internal.jdbc.JdbcConnection;
import org.apache.ignite.internal.jdbc.JdbcDriverPropertyInfo;
/**
* JDBC driver implementation for In-Memory Data Grid.
* <p>
* Driver allows to get distributed data from Ignite cache using standard
* SQL queries and standard JDBC API. It will automatically get only fields that
* you actually need from objects stored in cache.
* <h1 class="header">Limitations</h1>
* Data in Ignite cache is usually distributed across several nodes,
* so some queries may not work as expected since the query will be sent to each
* individual node and results will then be collected and returned as JDBC result set.
* Keep in mind following limitations (not applied if data is queried from one node only,
* or data is fully co-located or fully replicated on multiple nodes):
* <ul>
* <li>
* Joins will work correctly only if joined objects are stored in
* collocated mode. Refer to
* {@link AffinityKey}
* javadoc for more details.
* </li>
* <li>
* Note that if you are connected to local or replicated cache, all data will
* be queried only on one node, not depending on what caches participate in
* the query (some data from partitioned cache can be lost). And visa versa,
* if you are connected to partitioned cache, data from replicated caches
* will be duplicated.
* </li>
* </ul>
* <h1 class="header">SQL Notice</h1>
* Driver allows to query data from several caches. Cache that driver is connected to is
* treated as default schema in this case. Other caches can be referenced by their names.
* <p>
* Note that cache name is case sensitive and you have to always specify it in quotes.
* <h1 class="header">Dependencies</h1>
* JDBC driver is located in main Ignite JAR and depends on all libraries located in
* {@code IGNITE_HOME/libs} folder. So if you are using JDBC driver in any external tool,
* you have to add main Ignite JAR will all dependencies to its classpath.
* <h1 class="header">Configuration</h1>
*
* JDBC driver can return two different types of connection: Ignite Java client based connection and
* Ignite client node based connection. Java client best connection is deprecated and left only for
* compatibility with previous version, so you should always use Ignite client node based mode.
* It is also preferable because it has much better performance.
*
* The type of returned connection depends on provided JDBC connection URL.
*
* <h2 class="header">Configuration of Ignite client node based connection</h2>
*
* JDBC connection URL has the following pattern: {@code jdbc:ignite:cfg://[<params>@]<config_url>}.<br>
*
* {@code <config_url>} represents any valid URL which points to Ignite configuration file. It is required.<br>
*
* {@code <params>} are optional and have the following format: {@code param1=value1:param2=value2:...:paramN=valueN}.<br>
*
* The following parameters are supported:
* <ul>
* <li>{@code cache} - cache name. If it is not defined than default cache will be used.</li>
* <li>
* {@code nodeId} - ID of node where query will be executed.
* It can be useful for querying through local caches.
* If node with provided ID doesn't exist, exception is thrown.
* </li>
* <li>
* {@code local} - query will be executed only on local node. Use this parameter with {@code nodeId} parameter.
* Default value is {@code false}.
* </li>
* <li>
* {@code collocated} - flag that used for optimization purposes. Whenever Ignite executes
* a distributed query, it sends sub-queries to individual cluster members.
* If you know in advance that the elements of your query selection are collocated
* together on the same node, usually based on some <b>affinity-key</b>, Ignite
* can make significant performance and network optimizations.
* Default value is {@code false}.
* </li>
* <li>
* {@code distributedJoins} - enables support of distributed joins feature. This flag does not make sense in
* combination with {@code local} and/or {@code collocated} flags with {@code true} value or in case of querying
* of local cache. Default value is {@code false}.
* </li>
* <li>
* {@code enforceJoinOrder} - Sets flag to enforce join order of tables in the query. If set to {@code true}
* query optimizer will not reorder tables in join. By default is {@code false}.
* </li>
* <li>
* {@code lazy} - Sets flag to enable lazy query execution.
* By default Ignite attempts to fetch the whole query result set to memory and send it to the client.
* For small and medium result sets this provides optimal performance and minimize duration of internal
* database locks, thus increasing concurrency.
*
* <p> If result set is too big to fit in available memory this could lead to excessive GC pauses and even
* OutOfMemoryError. Use this flag as a hint for Ignite to fetch result set lazily, thus minimizing memory
* consumption at the cost of moderate performance hit.
*
* <p> Defaults to {@code false}, meaning that the whole result set is fetched to memory eagerly.
* </li>
* </ul>
*
* <h2 class="header">Configuration of Ignite Java client based connection</h2>
*
* All Ignite Java client configuration properties can be applied to JDBC connection of this type.
* <p>
* JDBC connection URL has the following pattern:
* {@code jdbc:ignite://<hostname>:<port>/<cache_name>?nodeId=<UUID>}<br>
* Note the following:
* <ul>
* <li>Hostname is required.</li>
* <li>If port is not defined, {@code 11211} is used (default for Ignite client).</li>
* <li>Leave {@code <cache_name>} empty if you are connecting to default cache.</li>
* <li>
* Provide {@code nodeId} parameter if you want to specify node where to execute
* your queries. Note that local and replicated caches will be queried locally on
* this node while partitioned cache is queried distributively. If node ID is not
* provided, random node is used. If node with provided ID doesn't exist,
* exception is thrown.
* </li>
* </ul>
* Other properties can be defined in {@link Properties} object passed to
* {@link DriverManager#getConnection(String, Properties)} method:
* <table class="doctable">
* <tr>
* <th>Name</th>
* <th>Description</th>
* <th>Default</th>
* <th>Optional</th>
* </tr>
* <tr>
* <td><b>ignite.client.protocol</b></td>
* <td>Communication protocol ({@code TCP} or {@code HTTP}).</td>
* <td>{@code TCP}</td>
* <td>Yes</td>
* </tr>
* <tr>
* <td><b>ignite.client.connectTimeout</b></td>
* <td>Socket connection timeout.</td>
* <td>{@code 0} (infinite timeout)</td>
* <td>Yes</td>
* </tr>
* <tr>
* <td><b>ignite.client.tcp.noDelay</b></td>
* <td>Flag indicating whether TCP_NODELAY flag should be enabled for outgoing connections.</td>
* <td>{@code true}</td>
* <td>Yes</td>
* </tr>
* <tr>
* <td><b>ignite.client.ssl.enabled</b></td>
* <td>Flag indicating that {@code SSL} is needed for connection.</td>
* <td>{@code false}</td>
* <td>Yes</td>
* </tr>
* <tr>
* <td><b>ignite.client.ssl.protocol</b></td>
* <td>SSL protocol ({@code SSL} or {@code TLS}).</td>
* <td>{@code TLS}</td>
* <td>Yes</td>
* </tr>
* <tr>
* <td><b>ignite.client.ssl.key.algorithm</b></td>
* <td>Key manager algorithm.</td>
* <td>{@code SunX509}</td>
* <td>Yes</td>
* </tr>
* <tr>
* <td><b>ignite.client.ssl.keystore.location</b></td>
* <td>Key store to be used by client to connect with Ignite topology.</td>
* <td> </td>
* <td>No (if {@code SSL} is enabled)</td>
* </tr>
* <tr>
* <td><b>ignite.client.ssl.keystore.password</b></td>
* <td>Key store password.</td>
* <td> </td>
* <td>Yes</td>
* </tr>
* <tr>
* <td><b>ignite.client.ssl.keystore.type</b></td>
* <td>Key store type.</td>
* <td>{@code jks}</td>
* <td>Yes</td>
* </tr>
* <tr>
* <td><b>ignite.client.ssl.truststore.location</b></td>
* <td>Trust store to be used by client to connect with Ignite topology.</td>
* <td> </td>
* <td>No (if {@code SSL} is enabled)</td>
* </tr>
* <tr>
* <td><b>ignite.client.ssl.truststore.password</b></td>
* <td>Trust store password.</td>
* <td> </td>
* <td>Yes</td>
* </tr>
* <tr>
* <td><b>ignite.client.ssl.truststore.type</b></td>
* <td>Trust store type.</td>
* <td>{@code jks}</td>
* <td>Yes</td>
* </tr>
* <tr>
* <td><b>ignite.client.credentials</b></td>
* <td>Client credentials used in authentication process.</td>
* <td> </td>
* <td>Yes</td>
* </tr>
* <tr>
* <td><b>ignite.client.cache.top</b></td>
* <td>
* Flag indicating that topology is cached internally. Cache will be refreshed in
* the background with interval defined by {@code ignite.client.topology.refresh}
* property (see below).
* </td>
* <td>{@code false}</td>
* <td>Yes</td>
* </tr>
* <tr>
* <td><b>ignite.client.topology.refresh</b></td>
* <td>Topology cache refresh frequency (ms).</td>
* <td>{@code 2000}</td>
* <td>Yes</td>
* </tr>
* <tr>
* <td><b>ignite.client.idleTimeout</b></td>
* <td>Maximum amount of time that connection can be idle before it is closed (ms).</td>
* <td>{@code 30000}</td>
* <td>Yes</td>
* </tr>
* </table>
* <h1 class="header">Example</h1>
* <pre name="code" class="java">
* // Open JDBC connection.
* Connection conn = DriverManager.getConnection("jdbc:ignite:cfg//cache=persons@file:///etc/configs/ignite-jdbc.xml");
*
* // Query persons' names
* ResultSet rs = conn.createStatement().executeQuery("select name from Person");
*
* while (rs.next()) {
* String name = rs.getString(1);
*
* ...
* }
*
* // Query persons with specific age
* PreparedStatement stmt = conn.prepareStatement("select name, age from Person where age = ?");
*
* stmt.setInt(1, 30);
*
* ResultSet rs = stmt.executeQuery();
*
* while (rs.next()) {
* String name = rs.getString("name");
* int age = rs.getInt("age");
*
* ...
* }
* </pre>
*/
public class IgniteJdbcDriver implements Driver {
/** Prefix for property names. */
private static final String PROP_PREFIX = "ignite.jdbc.";
/** Node ID parameter name. */
private static final String PARAM_NODE_ID = "nodeId";
/** Cache parameter name. */
private static final String PARAM_CACHE = "cache";
/** Local parameter name. */
private static final String PARAM_LOCAL = "local";
/** Collocated parameter name. */
private static final String PARAM_COLLOCATED = "collocated";
/** Distributed joins parameter name. */
private static final String PARAM_DISTRIBUTED_JOINS = "distributedJoins";
/** Transactions allowed parameter name. */
private static final String PARAM_TX_ALLOWED = "transactionsAllowed";
/** DML streaming parameter name. */
private static final String PARAM_STREAMING = "streaming";
/** DML streaming auto flush frequency. */
private static final String PARAM_STREAMING_FLUSH_FREQ = "streamingFlushFrequency";
/** DML streaming node buffer size. */
private static final String PARAM_STREAMING_PER_NODE_BUF_SIZE = "streamingPerNodeBufferSize";
/** DML streaming parallel operations per node. */
private static final String PARAM_STREAMING_PER_NODE_PAR_OPS = "streamingPerNodeParallelOperations";
/** Whether DML streaming will overwrite existing cache entries. */
private static final String PARAM_STREAMING_ALLOW_OVERWRITE = "streamingAllowOverwrite";
/** Allow queries with multiple statements. */
private static final String PARAM_MULTIPLE_STMTS = "multipleStatementsAllowed";
/** Skip reducer on update property name. */
private static final String PARAM_SKIP_REDUCER_ON_UPDATE = "skipReducerOnUpdate";
/** Parameter: enforce join order flag (SQL hint). */
public static final String PARAM_ENFORCE_JOIN_ORDER = "enforceJoinOrder";
/** Parameter: replicated only flag (SQL hint). */
public static final String PARAM_LAZY = "lazy";
/** Parameter: schema name. */
public static final String PARAM_SCHEMA = "schema";
/** Hostname property name. */
public static final String PROP_HOST = PROP_PREFIX + "host";
/** Port number property name. */
public static final String PROP_PORT = PROP_PREFIX + "port";
/** Cache name property name. */
public static final String PROP_CACHE = PROP_PREFIX + PARAM_CACHE;
/** Node ID property name. */
public static final String PROP_NODE_ID = PROP_PREFIX + PARAM_NODE_ID;
/** Local property name. */
public static final String PROP_LOCAL = PROP_PREFIX + PARAM_LOCAL;
/** Collocated property name. */
public static final String PROP_COLLOCATED = PROP_PREFIX + PARAM_COLLOCATED;
/** Distributed joins property name. */
public static final String PROP_DISTRIBUTED_JOINS = PROP_PREFIX + PARAM_DISTRIBUTED_JOINS;
/** Transactions allowed property name. */
public static final String PROP_TX_ALLOWED = PROP_PREFIX + PARAM_TX_ALLOWED;
/** DML streaming property name. */
public static final String PROP_STREAMING = PROP_PREFIX + PARAM_STREAMING;
/** DML stream auto flush frequency property name. */
public static final String PROP_STREAMING_FLUSH_FREQ = PROP_PREFIX + PARAM_STREAMING_FLUSH_FREQ;
/** DML stream node buffer size property name. */
public static final String PROP_STREAMING_PER_NODE_BUF_SIZE = PROP_PREFIX + PARAM_STREAMING_PER_NODE_BUF_SIZE;
/** DML stream parallel operations per node property name. */
public static final String PROP_STREAMING_PER_NODE_PAR_OPS = PROP_PREFIX + PARAM_STREAMING_PER_NODE_PAR_OPS;
/** Whether DML streaming will overwrite existing cache entries. */
public static final String PROP_STREAMING_ALLOW_OVERWRITE = PROP_PREFIX + PARAM_STREAMING_ALLOW_OVERWRITE;
/** Allow query with multiple statements. */
public static final String PROP_MULTIPLE_STMTS = PROP_PREFIX + PARAM_MULTIPLE_STMTS;
/** Skip reducer on update update property name. */
public static final String PROP_SKIP_REDUCER_ON_UPDATE = PROP_PREFIX + PARAM_SKIP_REDUCER_ON_UPDATE;
/** Transactions allowed property name. */
public static final String PROP_ENFORCE_JOIN_ORDER = PROP_PREFIX + PARAM_ENFORCE_JOIN_ORDER;
/** Lazy property name. */
public static final String PROP_LAZY = PROP_PREFIX + PARAM_LAZY;
/** Schema property name. */
public static final String PROP_SCHEMA = PROP_PREFIX + PARAM_SCHEMA;
/** Cache name property name. */
public static final String PROP_CFG = PROP_PREFIX + "cfg";
/** URL prefix. */
public static final String URL_PREFIX = "jdbc:ignite://";
/** Config URL prefix. */
public static final String CFG_URL_PREFIX = "jdbc:ignite:cfg://";
/** Default port. */
public static final int DFLT_PORT = 11211;
/** Major version. */
private static final int MAJOR_VER = 1;
/** Minor version. */
private static final int MINOR_VER = 0;
/** Logger. */
private static final Logger LOG = Logger.getLogger(IgniteJdbcDriver.class.getName());
/*
* Static initializer.
*/
static {
try {
DriverManager.registerDriver(new IgniteJdbcDriver());
}
catch (SQLException e) {
throw new RuntimeException("Failed to register Ignite JDBC driver.", e);
}
}
/** {@inheritDoc} */
@Override public Connection connect(String url, Properties props) throws SQLException {
if (!acceptsURL(url))
return null;
if (!parseUrl(url, props))
throw new SQLException("URL is invalid: " + url);
if (url.startsWith(URL_PREFIX)) {
if (props.getProperty(PROP_CFG) != null)
LOG.warning(PROP_CFG + " property is not applicable for this URL.");
return new JdbcConnection(url, props);
}
else
return new org.apache.ignite.internal.jdbc2.JdbcConnection(url, props);
}
/** {@inheritDoc} */
@Override public boolean acceptsURL(String url) throws SQLException {
return url.startsWith(URL_PREFIX) || url.startsWith(CFG_URL_PREFIX);
}
/** {@inheritDoc} */
@Override public DriverPropertyInfo[] getPropertyInfo(String url, Properties info) throws SQLException {
if (!parseUrl(url, info))
throw new SQLException("URL is invalid: " + url);
List<DriverPropertyInfo> props = Arrays.<DriverPropertyInfo>asList(
new JdbcDriverPropertyInfo("Hostname", info.getProperty(PROP_HOST), ""),
new JdbcDriverPropertyInfo("Port number", info.getProperty(PROP_PORT), ""),
new JdbcDriverPropertyInfo("Cache name", info.getProperty(PROP_CACHE), ""),
new JdbcDriverPropertyInfo("Node ID", info.getProperty(PROP_NODE_ID), ""),
new JdbcDriverPropertyInfo("Local", info.getProperty(PROP_LOCAL), ""),
new JdbcDriverPropertyInfo("Collocated", info.getProperty(PROP_COLLOCATED), ""),
new JdbcDriverPropertyInfo("Distributed Joins", info.getProperty(PROP_DISTRIBUTED_JOINS), ""),
new JdbcDriverPropertyInfo("Enforce Join Order", info.getProperty(PROP_ENFORCE_JOIN_ORDER), ""),
new JdbcDriverPropertyInfo("Lazy query execution", info.getProperty(PROP_LAZY), ""),
new JdbcDriverPropertyInfo("Transactions Allowed", info.getProperty(PROP_TX_ALLOWED), ""),
new JdbcDriverPropertyInfo("Queries with multiple statements allowed", info.getProperty(PROP_MULTIPLE_STMTS), ""),
new JdbcDriverPropertyInfo("Skip reducer on update", info.getProperty(PROP_SKIP_REDUCER_ON_UPDATE), ""),
new JdbcDriverPropertyInfo("Schema name", info.getProperty(PROP_SCHEMA), "")
);
if (info.getProperty(PROP_CFG) != null)
props.add(new JdbcDriverPropertyInfo("Configuration path", info.getProperty(PROP_CFG), ""));
else
props.addAll(Arrays.<DriverPropertyInfo>asList(
new JdbcDriverPropertyInfo("ignite.client.protocol",
info.getProperty("ignite.client.protocol", "TCP"),
"Communication protocol (TCP or HTTP)."),
new JdbcDriverPropertyInfo("ignite.client.connectTimeout",
info.getProperty("ignite.client.connectTimeout", "0"),
"Socket connection timeout."),
new JdbcDriverPropertyInfo("ignite.client.tcp.noDelay",
info.getProperty("ignite.client.tcp.noDelay", "true"),
"Flag indicating whether TCP_NODELAY flag should be enabled for outgoing connections."),
new JdbcDriverPropertyInfo("ignite.client.ssl.enabled",
info.getProperty("ignite.client.ssl.enabled", "false"),
"Flag indicating that SSL is needed for connection."),
new JdbcDriverPropertyInfo("ignite.client.ssl.protocol",
info.getProperty("ignite.client.ssl.protocol", "TLS"),
"SSL protocol."),
new JdbcDriverPropertyInfo("ignite.client.ssl.key.algorithm",
info.getProperty("ignite.client.ssl.key.algorithm", "SunX509"),
"Key manager algorithm."),
new JdbcDriverPropertyInfo("ignite.client.ssl.keystore.location",
info.getProperty("ignite.client.ssl.keystore.location", ""),
"Key store to be used by client to connect with Ignite topology."),
new JdbcDriverPropertyInfo("ignite.client.ssl.keystore.password",
info.getProperty("ignite.client.ssl.keystore.password", ""),
"Key store password."),
new JdbcDriverPropertyInfo("ignite.client.ssl.keystore.type",
info.getProperty("ignite.client.ssl.keystore.type", "jks"),
"Key store type."),
new JdbcDriverPropertyInfo("ignite.client.ssl.truststore.location",
info.getProperty("ignite.client.ssl.truststore.location", ""),
"Trust store to be used by client to connect with Ignite topology."),
new JdbcDriverPropertyInfo("ignite.client.ssl.keystore.password",
info.getProperty("ignite.client.ssl.truststore.password", ""),
"Trust store password."),
new JdbcDriverPropertyInfo("ignite.client.ssl.truststore.type",
info.getProperty("ignite.client.ssl.truststore.type", "jks"),
"Trust store type."),
new JdbcDriverPropertyInfo("ignite.client.credentials",
info.getProperty("ignite.client.credentials", ""),
"Client credentials used in authentication process."),
new JdbcDriverPropertyInfo("ignite.client.cache.top",
info.getProperty("ignite.client.cache.top", "false"),
"Flag indicating that topology is cached internally. Cache will be refreshed in the " +
"background with interval defined by topologyRefreshFrequency property (see below)."),
new JdbcDriverPropertyInfo("ignite.client.topology.refresh",
info.getProperty("ignite.client.topology.refresh", "2000"),
"Topology cache refresh frequency (ms)."),
new JdbcDriverPropertyInfo("ignite.client.idleTimeout",
info.getProperty("ignite.client.idleTimeout", "30000"),
"Maximum amount of time that connection can be idle before it is closed (ms).")
)
);
return props.toArray(new DriverPropertyInfo[0]);
}
/** {@inheritDoc} */
@Override public int getMajorVersion() {
return MAJOR_VER;
}
/** {@inheritDoc} */
@Override public int getMinorVersion() {
return MINOR_VER;
}
/** {@inheritDoc} */
@Override public boolean jdbcCompliant() {
return false;
}
/** {@inheritDoc} */
@Override public Logger getParentLogger() throws SQLFeatureNotSupportedException {
throw new SQLFeatureNotSupportedException("java.util.logging is not used.");
}
/**
* Validates and parses connection URL.
*
* @param props Properties.
* @param url URL.
* @return Whether URL is valid.
*/
private boolean parseUrl(String url, Properties props) {
if (url == null)
return false;
if (url.startsWith(URL_PREFIX) && url.length() > URL_PREFIX.length())
return parseJdbcUrl(url, props);
else if (url.startsWith(CFG_URL_PREFIX) && url.length() >= CFG_URL_PREFIX.length())
return parseJdbcConfigUrl(url, props);
return false;
}
/**
* @param url Url.
* @param props Properties.
*/
private boolean parseJdbcConfigUrl(String url, Properties props) {
url = url.substring(CFG_URL_PREFIX.length());
String[] parts = url.split("@");
if (parts.length > 2)
return false;
if (parts.length == 2) {
if (!parseParameters(parts[0], ":", props))
return false;
}
props.setProperty(PROP_CFG, parts[parts.length - 1]);
return true;
}
/**
* @param url Url.
* @param props Properties.
*/
private boolean parseJdbcUrl(String url, Properties props) {
url = url.substring(URL_PREFIX.length());
String[] parts = url.split("\\?");
if (parts.length > 2)
return false;
if (parts.length == 2)
if (!parseParameters(parts[1], "&", props))
return false;
parts = parts[0].split("/");
assert parts.length > 0;
if (parts.length > 2)
return false;
if (parts.length == 2 && !parts[1].isEmpty())
props.setProperty(PROP_CACHE, parts[1]);
url = parts[0];
parts = url.split(":");
assert parts.length > 0;
if (parts.length > 2)
return false;
props.setProperty(PROP_HOST, parts[0]);
try {
props.setProperty(PROP_PORT, String.valueOf(parts.length == 2 ? Integer.valueOf(parts[1]) : DFLT_PORT));
}
catch (NumberFormatException ignored) {
return false;
}
return true;
}
/**
* Validates and parses URL parameters.
*
* @param val Parameters string.
* @param delim Delimiter.
* @param props Properties.
* @return Whether URL parameters string is valid.
*/
private boolean parseParameters(String val, String delim, Properties props) {
String[] params = val.split(delim);
for (String param : params) {
String[] pair = param.split("=");
if (pair.length != 2 || pair[0].isEmpty() || pair[1].isEmpty())
return false;
props.setProperty(PROP_PREFIX + pair[0], pair[1]);
}
return true;
}
}
| apache-2.0 |
cschenyuan/hive-hack | service/src/java/org/apache/hive/service/cli/Type.java | 8080 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hive.service.cli;
import java.sql.DatabaseMetaData;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hive.service.cli.thrift.TTypeId;
/**
* Type.
*
*/
public enum Type {
NULL_TYPE("VOID",
java.sql.Types.NULL,
TTypeId.NULL_TYPE),
BOOLEAN_TYPE("BOOLEAN",
java.sql.Types.BOOLEAN,
TTypeId.BOOLEAN_TYPE),
TINYINT_TYPE("TINYINT",
java.sql.Types.TINYINT,
TTypeId.TINYINT_TYPE),
SMALLINT_TYPE("SMALLINT",
java.sql.Types.SMALLINT,
TTypeId.SMALLINT_TYPE),
INT_TYPE("INT",
java.sql.Types.INTEGER,
TTypeId.INT_TYPE),
BIGINT_TYPE("BIGINT",
java.sql.Types.BIGINT,
TTypeId.BIGINT_TYPE),
FLOAT_TYPE("FLOAT",
java.sql.Types.FLOAT,
TTypeId.FLOAT_TYPE),
DOUBLE_TYPE("DOUBLE",
java.sql.Types.DOUBLE,
TTypeId.DOUBLE_TYPE),
STRING_TYPE("STRING",
java.sql.Types.VARCHAR,
TTypeId.STRING_TYPE),
CHAR_TYPE("CHAR",
java.sql.Types.CHAR,
TTypeId.CHAR_TYPE,
true, false, false),
VARCHAR_TYPE("VARCHAR",
java.sql.Types.VARCHAR,
TTypeId.VARCHAR_TYPE,
true, false, false),
DATE_TYPE("DATE",
java.sql.Types.DATE,
TTypeId.DATE_TYPE),
TIMESTAMP_TYPE("TIMESTAMP",
java.sql.Types.TIMESTAMP,
TTypeId.TIMESTAMP_TYPE),
BINARY_TYPE("BINARY",
java.sql.Types.BINARY,
TTypeId.BINARY_TYPE),
DECIMAL_TYPE("DECIMAL",
java.sql.Types.DECIMAL,
TTypeId.DECIMAL_TYPE,
true, false, false),
ARRAY_TYPE("ARRAY",
java.sql.Types.ARRAY,
TTypeId.ARRAY_TYPE,
true, true),
MAP_TYPE("MAP",
java.sql.Types.JAVA_OBJECT,
TTypeId.MAP_TYPE,
true, true),
STRUCT_TYPE("STRUCT",
java.sql.Types.STRUCT,
TTypeId.STRUCT_TYPE,
true, false),
UNION_TYPE("UNIONTYPE",
java.sql.Types.OTHER,
TTypeId.UNION_TYPE,
true, false),
USER_DEFINED_TYPE("USER_DEFINED",
java.sql.Types.OTHER,
TTypeId.USER_DEFINED_TYPE,
true, false);
private final String name;
private final TTypeId tType;
private final int javaSQLType;
private final boolean isQualified;
private final boolean isComplex;
private final boolean isCollection;
Type(String name, int javaSQLType, TTypeId tType, boolean isQualified, boolean isComplex, boolean isCollection) {
this.name = name;
this.javaSQLType = javaSQLType;
this.tType = tType;
this.isQualified = isQualified;
this.isComplex = isComplex;
this.isCollection = isCollection;
}
Type(String name, int javaSQLType, TTypeId tType, boolean isComplex, boolean isCollection) {
this(name, javaSQLType, tType, false, isComplex, isCollection);
}
Type(String name, int javaSqlType, TTypeId tType) {
this(name, javaSqlType, tType, false, false, false);
}
public boolean isPrimitiveType() {
return !isComplex;
}
public boolean isQualifiedType() {
return isQualified;
}
public boolean isComplexType() {
return isComplex;
}
public boolean isCollectionType() {
return isCollection;
}
public static Type getType(TTypeId tType) {
for (Type type : values()) {
if (tType.equals(type.tType)) {
return type;
}
}
throw new IllegalArgumentException("Unregonized Thrift TTypeId value: " + tType);
}
public static Type getType(String name) {
if (name == null) {
throw new IllegalArgumentException("Invalid type name: null");
}
for (Type type : values()) {
if (name.equalsIgnoreCase(type.name)) {
return type;
} else if (type.isQualifiedType() || type.isComplexType()) {
if (name.toUpperCase().startsWith(type.name)) {
return type;
}
}
}
throw new IllegalArgumentException("Unrecognized type name: " + name);
}
/**
* Radix for this type (typically either 2 or 10)
* Null is returned for data types where this is not applicable.
*/
public Integer getNumPrecRadix() {
if (this.isNumericType()) {
return 10;
}
return null;
}
/**
* Maximum precision for numeric types.
* Returns null for non-numeric types.
* @return
*/
public Integer getMaxPrecision() {
switch (this) {
case TINYINT_TYPE:
return 3;
case SMALLINT_TYPE:
return 5;
case INT_TYPE:
return 10;
case BIGINT_TYPE:
return 19;
case FLOAT_TYPE:
return 7;
case DOUBLE_TYPE:
return 15;
case DECIMAL_TYPE:
return HiveDecimal.MAX_PRECISION;
default:
return null;
}
}
public boolean isNumericType() {
switch (this) {
case TINYINT_TYPE:
case SMALLINT_TYPE:
case INT_TYPE:
case BIGINT_TYPE:
case FLOAT_TYPE:
case DOUBLE_TYPE:
case DECIMAL_TYPE:
return true;
default:
return false;
}
}
/**
* Prefix used to quote a literal of this type (may be null)
*/
public String getLiteralPrefix() {
return null;
}
/**
* Suffix used to quote a literal of this type (may be null)
* @return
*/
public String getLiteralSuffix() {
return null;
}
/**
* Can you use NULL for this type?
* @return
* DatabaseMetaData.typeNoNulls - does not allow NULL values
* DatabaseMetaData.typeNullable - allows NULL values
* DatabaseMetaData.typeNullableUnknown - nullability unknown
*/
public Short getNullable() {
// All Hive types are nullable
return DatabaseMetaData.typeNullable;
}
/**
* Is the type case sensitive?
* @return
*/
public Boolean isCaseSensitive() {
switch (this) {
case STRING_TYPE:
return true;
default:
return false;
}
}
/**
* Parameters used in creating the type (may be null)
* @return
*/
public String getCreateParams() {
return null;
}
/**
* Can you use WHERE based on this type?
* @return
* DatabaseMetaData.typePredNone - No support
* DatabaseMetaData.typePredChar - Only support with WHERE .. LIKE
* DatabaseMetaData.typePredBasic - Supported except for WHERE .. LIKE
* DatabaseMetaData.typeSearchable - Supported for all WHERE ..
*/
public Short getSearchable() {
if (isPrimitiveType()) {
return DatabaseMetaData.typeSearchable;
}
return DatabaseMetaData.typePredNone;
}
/**
* Is this type unsigned?
* @return
*/
public Boolean isUnsignedAttribute() {
if (isNumericType()) {
return false;
}
return true;
}
/**
* Can this type represent money?
* @return
*/
public Boolean isFixedPrecScale() {
return false;
}
/**
* Can this type be used for an auto-increment value?
* @return
*/
public Boolean isAutoIncrement() {
return false;
}
/**
* Localized version of type name (may be null).
* @return
*/
public String getLocalizedName() {
return null;
}
/**
* Minimum scale supported for this type
* @return
*/
public Short getMinimumScale() {
return 0;
}
/**
* Maximum scale supported for this type
* @return
*/
public Short getMaximumScale() {
return 0;
}
public TTypeId toTType() {
return tType;
}
public int toJavaSQLType() {
return javaSQLType;
}
public String getName() {
return name;
}
}
| apache-2.0 |
yssharma/pig-on-drill | exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestValueVector.java | 9397 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.record.vector;
import static org.junit.Assert.assertEquals;
import java.nio.charset.Charset;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.common.types.TypeProtos.DataMode;
import org.apache.drill.common.types.TypeProtos.MajorType;
import org.apache.drill.common.types.TypeProtos.MinorType;
import org.apache.drill.exec.ExecTest;
import org.apache.drill.exec.expr.TypeHelper;
import org.apache.drill.exec.memory.TopLevelAllocator;
import org.apache.drill.exec.proto.UserBitShared.SerializedField;
import org.apache.drill.exec.record.MaterializedField;
import org.apache.drill.exec.vector.BitVector;
import org.apache.drill.exec.vector.NullableFloat4Vector;
import org.apache.drill.exec.vector.NullableUInt4Vector;
import org.apache.drill.exec.vector.NullableVarCharVector;
import org.apache.drill.exec.vector.UInt4Vector;
import org.junit.Test;
public class TestValueVector extends ExecTest {
TopLevelAllocator allocator = new TopLevelAllocator();
@Test
public void testFixedType() {
// Build a required uint field definition
MajorType.Builder typeBuilder = MajorType.newBuilder();
typeBuilder
.setMinorType(MinorType.UINT4)
.setMode(DataMode.REQUIRED)
.setWidth(4);
MaterializedField field = MaterializedField.create(SchemaPath.getSimplePath(""), typeBuilder.build());
// Create a new value vector for 1024 integers
UInt4Vector v = new UInt4Vector(field, allocator);
UInt4Vector.Mutator m = v.getMutator();
v.allocateNew(1024);
// Put and set a few values
m.setSafe(0, 100);
m.setSafe(1, 101);
m.setSafe(100, 102);
m.setSafe(1022, 103);
m.setSafe(1023, 104);
assertEquals(100, v.getAccessor().get(0));
assertEquals(101, v.getAccessor().get(1));
assertEquals(102, v.getAccessor().get(100));
assertEquals(103, v.getAccessor().get(1022));
assertEquals(104, v.getAccessor().get(1023));
}
@Test
public void testNullableVarLen2() {
// Build an optional varchar field definition
MajorType.Builder typeBuilder = MajorType.newBuilder();
SerializedField.Builder defBuilder = SerializedField.newBuilder();
typeBuilder
.setMinorType(MinorType.VARCHAR)
.setMode(DataMode.OPTIONAL)
.setWidth(2);
defBuilder
.setMajorType(typeBuilder.build());
MaterializedField field = MaterializedField.create(defBuilder.build());
// Create a new value vector for 1024 integers
NullableVarCharVector v = new NullableVarCharVector(field, allocator);
NullableVarCharVector.Mutator m = v.getMutator();
v.allocateNew(1024*10, 1024);
// Create and set 3 sample strings
String str1 = new String("AAAAA1");
String str2 = new String("BBBBBBBBB2");
String str3 = new String("CCCC3");
m.set(0, str1.getBytes(Charset.forName("UTF-8")));
m.set(1, str2.getBytes(Charset.forName("UTF-8")));
m.set(2, str3.getBytes(Charset.forName("UTF-8")));
// Check the sample strings
assertEquals(str1, new String(v.getAccessor().get(0), Charset.forName("UTF-8")));
assertEquals(str2, new String(v.getAccessor().get(1), Charset.forName("UTF-8")));
assertEquals(str3, new String(v.getAccessor().get(2), Charset.forName("UTF-8")));
// Ensure null value throws
boolean b = false;
try {
v.getAccessor().get(3);
} catch(AssertionError e) {
b = true;
}finally{
if(!b){
assert false;
}
}
}
@Test
public void testNullableFixedType() {
// Build an optional uint field definition
MajorType.Builder typeBuilder = MajorType.newBuilder();
SerializedField.Builder defBuilder = SerializedField.newBuilder();
typeBuilder
.setMinorType(MinorType.UINT4)
.setMode(DataMode.OPTIONAL)
.setWidth(4);
defBuilder
.setMajorType(typeBuilder.build());
MaterializedField field = MaterializedField.create(defBuilder.build());
// Create a new value vector for 1024 integers
NullableUInt4Vector v = new NullableUInt4Vector(field, allocator);
NullableUInt4Vector.Mutator m = v.getMutator();
v.allocateNew(1024);
// Put and set a few values
m.set(0, 100);
m.set(1, 101);
m.set(100, 102);
m.set(1022, 103);
m.set(1023, 104);
assertEquals(100, v.getAccessor().get(0));
assertEquals(101, v.getAccessor().get(1));
assertEquals(102, v.getAccessor().get(100));
assertEquals(103, v.getAccessor().get(1022));
assertEquals(104, v.getAccessor().get(1023));
// Ensure null values throw
{
boolean b = false;
try {
v.getAccessor().get(3);
} catch(AssertionError e) {
b = true;
}finally{
if(!b){
assert false;
}
}
}
v.allocateNew(2048);
{
boolean b = false;
try {
v.getAccessor().get(0);
} catch(AssertionError e) {
b = true;
}finally{
if(!b){
assert false;
}
}
}
m.set(0, 100);
m.set(1, 101);
m.set(100, 102);
m.set(1022, 103);
m.set(1023, 104);
assertEquals(100, v.getAccessor().get(0));
assertEquals(101, v.getAccessor().get(1));
assertEquals(102, v.getAccessor().get(100));
assertEquals(103, v.getAccessor().get(1022));
assertEquals(104, v.getAccessor().get(1023));
// Ensure null values throw
{
boolean b = false;
try {
v.getAccessor().get(3);
} catch(AssertionError e) {
b = true;
}finally{
if(!b){
assert false;
}
}
}
}
@Test
public void testNullableFloat() {
// Build an optional float field definition
MajorType.Builder typeBuilder = MajorType.newBuilder();
SerializedField.Builder defBuilder = SerializedField.newBuilder();
typeBuilder
.setMinorType(MinorType.FLOAT4)
.setMode(DataMode.OPTIONAL)
.setWidth(4);
defBuilder
.setMajorType(typeBuilder.build());
MaterializedField field = MaterializedField.create(defBuilder.build());
// Create a new value vector for 1024 integers
NullableFloat4Vector v = (NullableFloat4Vector) TypeHelper.getNewVector(field, allocator);
NullableFloat4Vector.Mutator m = v.getMutator();
v.allocateNew(1024);
// Put and set a few values
m.set(0, 100.1f);
m.set(1, 101.2f);
m.set(100, 102.3f);
m.set(1022, 103.4f);
m.set(1023, 104.5f);
assertEquals(100.1f, v.getAccessor().get(0), 0);
assertEquals(101.2f, v.getAccessor().get(1), 0);
assertEquals(102.3f, v.getAccessor().get(100), 0);
assertEquals(103.4f, v.getAccessor().get(1022), 0);
assertEquals(104.5f, v.getAccessor().get(1023), 0);
// Ensure null values throw
{
boolean b = false;
try {
v.getAccessor().get(3);
} catch(AssertionError e) {
b = true;
}finally{
if(!b){
assert false;
}
}
}
v.allocateNew(2048);
{
boolean b = false;
try {
v.getAccessor().get(0);
} catch(AssertionError e) {
b = true;
}finally{
if(!b){
assert false;
}
}
}
}
@Test
public void testBitVector() {
// Build a required boolean field definition
MajorType.Builder typeBuilder = MajorType.newBuilder();
SerializedField.Builder defBuilder = SerializedField.newBuilder();
typeBuilder
.setMinorType(MinorType.BIT)
.setMode(DataMode.REQUIRED)
.setWidth(4);
defBuilder
.setMajorType(typeBuilder.build());
MaterializedField field = MaterializedField.create(defBuilder.build());
// Create a new value vector for 1024 integers
BitVector v = new BitVector(field, allocator);
BitVector.Mutator m = v.getMutator();
v.allocateNew(1024);
// Put and set a few values
m.set(0, 1);
m.set(1, 0);
m.set(100, 0);
m.set(1022, 1);
assertEquals(1, v.getAccessor().get(0));
assertEquals(0, v.getAccessor().get(1));
assertEquals(0, v.getAccessor().get(100));
assertEquals(1, v.getAccessor().get(1022));
// test setting the same value twice
m.set(0, 1);
m.set(0, 1);
m.set(1, 0);
m.set(1, 0);
assertEquals(1, v.getAccessor().get(0));
assertEquals(0, v.getAccessor().get(1));
// test toggling the values
m.set(0, 0);
m.set(1, 1);
assertEquals(0, v.getAccessor().get(0));
assertEquals(1, v.getAccessor().get(1));
// Ensure unallocated space returns 0
assertEquals(0, v.getAccessor().get(3));
}
}
| apache-2.0 |
krosenvold/selenium-git-release-candidate | java/client/test/org/openqa/selenium/testing/JUnit4TestBase.java | 3674 | /*
Copyright 2012 Selenium committers
Copyright 2012 Software Freedom Conservancy
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.openqa.selenium.testing;
import java.util.logging.Logger;
import org.junit.Before;
import org.junit.Rule;
import org.junit.rules.TestRule;
import org.junit.rules.TestWatcher;
import org.junit.runner.Description;
import org.junit.runner.RunWith;
import org.openqa.selenium.Pages;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.environment.GlobalTestEnvironment;
import org.openqa.selenium.environment.InProcessTestEnvironment;
import org.openqa.selenium.environment.TestEnvironment;
import org.openqa.selenium.environment.webserver.AppServer;
import org.openqa.selenium.internal.WrapsDriver;
import org.openqa.selenium.testing.drivers.WebDriverBuilder;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.core.IsNot.not;
import static org.junit.Assert.assertThat;
@RunWith(SeleniumTestRunner.class)
public abstract class JUnit4TestBase implements WrapsDriver {
private static final Logger logger = Logger.getLogger(JUnit4TestBase.class.getName());
protected TestEnvironment environment;
protected AppServer appServer;
protected Pages pages;
private static ThreadLocal<WebDriver> storedDriver = new ThreadLocal<WebDriver>();
protected WebDriver driver;
@Before
public void prepareEnvironment() throws Exception {
environment = GlobalTestEnvironment.get(InProcessTestEnvironment.class);
appServer = environment.getAppServer();
pages = new Pages(appServer);
String hostName = environment.getAppServer().getHostName();
String alternateHostName = environment.getAppServer().getAlternateHostName();
assertThat(hostName, is(not(equalTo(alternateHostName))));
}
@Before
public void createDriver() throws Exception {
driver = actuallyCreateDriver();
}
@Rule
public TestRule traceMethodName = new TestWatcher() {
@Override
protected void starting(Description description) {
super.starting(description);
logger.info(">>> Starting " + description);
}
@Override
protected void finished(Description description) {
super.finished(description);
logger.info("<<< Finished " + description);
}
};
public WebDriver getWrappedDriver() {
return storedDriver.get();
}
public static WebDriver actuallyCreateDriver() {
WebDriver driver = storedDriver.get();
if (driver == null) {
driver = new WebDriverBuilder().get();
storedDriver.set(driver);
}
return storedDriver.get();
}
public static void removeDriver() {
if (Boolean.getBoolean("webdriver.singletestsuite.leaverunning")) {
return;
}
WebDriver current = storedDriver.get();
if (current == null) {
return;
}
try {
current.quit();
} catch (RuntimeException ignored) {
// fall through
}
storedDriver.remove();
}
protected boolean isIeDriverTimedOutException(IllegalStateException e) {
// The IE driver may throw a timed out exception
return e.getClass().getName().contains("TimedOutException");
}
} | apache-2.0 |
cgtz/ambry | ambry-utils/src/test/java/com/github/ambry/utils/KeyGenerator.java | 3733 | /**
* Copyright 2016 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package com.github.ambry.utils;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.nio.ByteBuffer;
import java.util.Random;
public class KeyGenerator {
private static ByteBuffer randomKey(Random r) {
byte[] bytes = new byte[48];
r.nextBytes(bytes);
return ByteBuffer.wrap(bytes);
}
static class RandomStringGenerator implements ResetableIterator<ByteBuffer> {
int i, n, seed;
Random random;
RandomStringGenerator(int seed, int n) {
i = 0;
this.seed = seed;
this.n = n;
reset();
}
public int size() {
return n;
}
public void reset() {
random = new Random(seed);
}
public boolean hasNext() {
return i < n;
}
public ByteBuffer next() {
i++;
return randomKey(random);
}
public void remove() {
throw new UnsupportedOperationException();
}
}
static class IntGenerator implements ResetableIterator<ByteBuffer> {
private int i, start, n;
IntGenerator(int n) {
this(0, n);
}
IntGenerator(int start, int n) {
this.start = start;
this.n = n;
reset();
}
public int size() {
return n - start;
}
public void reset() {
i = start;
}
public boolean hasNext() {
return i < n;
}
public ByteBuffer next() {
return ByteBuffer.wrap(Integer.toString(i++).getBytes());
}
public void remove() {
throw new UnsupportedOperationException();
}
}
static class WordGenerator implements ResetableIterator<ByteBuffer> {
static int WORDS;
static {
try {
BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream("/usr/share/dict/words")));
while (br.ready()) {
br.readLine();
WORDS++;
}
} catch (IOException e) {
WORDS = 0;
}
}
BufferedReader reader;
private int modulo;
private int skip;
byte[] next;
WordGenerator(int skip, int modulo) {
this.skip = skip;
this.modulo = modulo;
reset();
}
public int size() {
return (1 + WORDS - skip) / modulo;
}
public void reset() {
try {
reader = new BufferedReader(new InputStreamReader(new FileInputStream("/usr/share/dict/words")));
} catch (FileNotFoundException e) {
throw new RuntimeException(e);
}
for (int i = 0; i < skip; i++) {
try {
reader.readLine();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
next();
}
public boolean hasNext() {
return next != null;
}
public ByteBuffer next() {
try {
byte[] s = next;
for (int i = 0; i < modulo; i++) {
String line = reader.readLine();
next = line == null ? null : line.getBytes();
}
return s == null ? null : ByteBuffer.wrap(s);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public void remove() {
throw new UnsupportedOperationException();
}
}
}
| apache-2.0 |
DariusX/camel | catalog/camel-route-parser/src/test/java/org/apache/camel/parser/java/MySimpleToDRoute.java | 1217 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.parser.java;
import org.apache.camel.ExchangePattern;
import org.apache.camel.builder.RouteBuilder;
public class MySimpleToDRoute extends RouteBuilder {
@Override
public void configure() throws Exception {
String uri = "log:c";
from("direct:start")
.toD("log:a", true)
.to(ExchangePattern.InOnly, "log:b")
.to(uri);
}
}
| apache-2.0 |
mtjandra/izpack | izpack-installer/src/main/java/com/izforge/izpack/installer/automation/PanelAutomationHelper.java | 2921 | /*
* IzPack - Copyright 2001-2008 Julien Ponge, All Rights Reserved.
*
* http://izpack.org/
* http://izpack.codehaus.org/
*
* Copyright 2003 Tino Schwarze
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.izforge.izpack.installer.automation;
import com.izforge.izpack.api.handler.AbstractUIHandler;
/**
* Abstract class implementing basic functions needed by all panel automation helpers.
*
* @author tisc
*/
abstract public class PanelAutomationHelper implements AbstractUIHandler
{
/*
* @see com.izforge.izpack.api.handler.AbstractUIHandler#emitNotification(java.lang.String)
*/
@Override
public void emitNotification(String message)
{
System.out.println(message);
}
/*
* @see com.izforge.izpack.api.handler.AbstractUIHandler#emitWarning(java.lang.String,
* java.lang.String)
*/
@Override
public boolean emitWarning(String title, String message)
{
System.err.println("[ WARNING: " + message + " ]");
// default: continue
return true;
}
/*
* @see com.izforge.izpack.api.handler.AbstractUIHandler#emitError(java.lang.String, java.lang.String)
*/
@Override
public void emitError(String title, String message)
{
System.err.println("[ ERROR: " + message + " ]");
}
/*
* @see com.izforge.izpack.api.handler.AbstractUIHandler#emitErrorAndBlockNext(java.lang.String,
* java.lang.String)
*/
@Override
public void emitErrorAndBlockNext(String title, String message)
{
emitError(title, message);
}
/*
* @see com.izforge.izpack.api.handler.AbstractUIHandler#askQuestion(java.lang.String,
* java.lang.String, int)
*/
@Override
public int askQuestion(String title, String question, int choices)
{
// don't know what to answer
return AbstractUIHandler.ANSWER_CANCEL;
}
/*
* @see com.izforge.izpack.api.handler.AbstractUIHandler#askQuestion(java.lang.String,
* java.lang.String, int, int)
*/
@Override
public int askQuestion(String title, String question, int choices, int default_choice)
{
return default_choice;
}
@Override
public int askWarningQuestion(String title, String question, int choices, int default_choice)
{
return askQuestion(title, question, choices, default_choice);
}
}
| apache-2.0 |
fhg-fokus-nubomedia/ims-connector | src/main/java/gov/nist/javax/sip/header/ims/SecurityVerify.java | 1986 | /*
* Conditions Of Use
*
* This software was developed by employees of the National Institute of
* Standards and Technology (NIST), an agency of the Federal Government,
* and others.
* Pursuant to title 15 Untied States Code Section 105, works of NIST
* employees are not subject to copyright protection in the United States
* and are considered to be in the public domain. As a result, a formal
* license is not needed to use the software.
*
* This software is provided by NIST as a service and is expressly
* provided "AS IS." NIST MAKES NO WARRANTY OF ANY KIND, EXPRESS, IMPLIED
* OR STATUTORY, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTY OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, NON-INFRINGEMENT
* AND DATA ACCURACY. NIST does not warrant or make any representations
* regarding the use of the software or the results thereof, including but
* not limited to the correctness, accuracy, reliability or usefulness of
* the software.
*
* Permission to use this software is contingent upon your acceptance
* of the terms of this agreement
*
* .
*
*/
/************************************************************************************************
* PRODUCT OF PT INOVACAO - EST DEPARTMENT and Telecommunications Institute (Aveiro, Portugal) *
************************************************************************************************/
package gov.nist.javax.sip.header.ims;
import java.text.ParseException;
import javax.sip.header.ExtensionHeader;
/**
* Security-Verify header
* - sec-agree: RFC 3329 + 3GPP TS33.203 (Annex H).
*
* <p></p>
*
* @author Miguel Freitas (IT) PT-Inovacao
*/
public class SecurityVerify
extends SecurityAgree
implements SecurityVerifyHeader, ExtensionHeader
{
// TODO serialVersionUID
public SecurityVerify()
{
super(SecurityVerifyHeader.NAME);
}
public void setValue(String value) throws ParseException
{
throw new ParseException(value,0);
}
}
| apache-2.0 |
xodus7/tensorflow | tensorflow/contrib/lite/java/src/test/java/org/tensorflow/lite/NativeInterpreterWrapperTest.java | 19077 | /* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package org.tensorflow.lite;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.fail;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.HashMap;
import java.util.Map;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Unit tests for {@link org.tensorflow.lite.NativeInterpreterWrapper}. */
// TODO(b/71818425): Generates model files dynamically.
@RunWith(JUnit4.class)
public final class NativeInterpreterWrapperTest {
private static final String FLOAT_MODEL_PATH =
"tensorflow/contrib/lite/java/src/testdata/add.bin";
private static final String INT_MODEL_PATH =
"tensorflow/contrib/lite/java/src/testdata/int32.bin";
private static final String LONG_MODEL_PATH =
"tensorflow/contrib/lite/java/src/testdata/int64.bin";
private static final String BYTE_MODEL_PATH =
"tensorflow/contrib/lite/java/src/testdata/uint8.bin";
private static final String QUANTIZED_MODEL_PATH =
"tensorflow/contrib/lite/java/src/testdata/quantized.bin";
private static final String INVALID_MODEL_PATH =
"tensorflow/contrib/lite/java/src/testdata/invalid_model.bin";
private static final String MODEL_WITH_CUSTOM_OP_PATH =
"tensorflow/contrib/lite/java/src/testdata/with_custom_op.lite";
private static final String NONEXISTING_MODEL_PATH =
"tensorflow/contrib/lite/java/src/testdata/nonexisting_model.bin";
@Test
public void testConstructor() {
NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(FLOAT_MODEL_PATH);
assertThat(wrapper).isNotNull();
wrapper.close();
}
@Test
public void testConstructorWithInvalidModel() {
try {
NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(INVALID_MODEL_PATH);
fail();
} catch (IllegalArgumentException e) {
assertThat(e).hasMessageThat().contains("The model is not a valid Flatbuffer file");
}
}
@Test
public void testConstructorWithNonexistingModel() {
try {
NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(NONEXISTING_MODEL_PATH);
fail();
} catch (IllegalArgumentException e) {
assertThat(e).hasMessageThat().contains("The model is not a valid Flatbuffer file");
assertThat(e).hasMessageThat().contains("Could not open");
}
}
@Test
public void testConstructorWithUnresolableCustomOp() {
try {
NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(MODEL_WITH_CUSTOM_OP_PATH);
fail();
} catch (IllegalArgumentException e) {
assertThat(e)
.hasMessageThat()
.contains("Cannot create interpreter: Didn't find custom op for name 'Assign'");
}
}
@Test
public void testRunWithFloat() {
NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(FLOAT_MODEL_PATH);
float[] oneD = {1.23f, -6.54f, 7.81f};
float[][] twoD = {oneD, oneD, oneD, oneD, oneD, oneD, oneD, oneD};
float[][][] threeD = {twoD, twoD, twoD, twoD, twoD, twoD, twoD, twoD};
float[][][][] fourD = {threeD, threeD};
Object[] inputs = {fourD};
float[][][][] parsedOutputs = new float[2][8][8][3];
Map<Integer, Object> outputs = new HashMap<>();
outputs.put(0, parsedOutputs);
wrapper.run(inputs, outputs);
float[] outputOneD = parsedOutputs[0][0][0];
float[] expected = {3.69f, -19.62f, 23.43f};
assertThat(outputOneD).usingTolerance(0.1f).containsExactly(expected).inOrder();
wrapper.close();
}
@Test
public void testRunWithBufferOutput() {
try (NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(FLOAT_MODEL_PATH)) {
float[] oneD = {1.23f, -6.54f, 7.81f};
float[][] twoD = {oneD, oneD, oneD, oneD, oneD, oneD, oneD, oneD};
float[][][] threeD = {twoD, twoD, twoD, twoD, twoD, twoD, twoD, twoD};
float[][][][] fourD = {threeD, threeD};
Object[] inputs = {fourD};
ByteBuffer parsedOutput =
ByteBuffer.allocateDirect(2 * 8 * 8 * 3 * 4).order(ByteOrder.nativeOrder());
Map<Integer, Object> outputs = new HashMap<>();
outputs.put(0, parsedOutput);
wrapper.run(inputs, outputs);
float[] outputOneD = {
parsedOutput.getFloat(0), parsedOutput.getFloat(4), parsedOutput.getFloat(8)
};
float[] expected = {3.69f, -19.62f, 23.43f};
assertThat(outputOneD).usingTolerance(0.1f).containsExactly(expected).inOrder();
}
}
@Test
public void testRunWithInputsOfSameDims() {
NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(FLOAT_MODEL_PATH);
float[] oneD = {1.23f, -6.54f, 7.81f};
float[][] twoD = {oneD, oneD, oneD, oneD, oneD, oneD, oneD, oneD};
float[][][] threeD = {twoD, twoD, twoD, twoD, twoD, twoD, twoD, twoD};
float[][][][] fourD = {threeD, threeD};
Object[] inputs = {fourD};
float[][][][] parsedOutputs = new float[2][8][8][3];
Map<Integer, Object> outputs = new HashMap<>();
outputs.put(0, parsedOutputs);
wrapper.run(inputs, outputs);
float[] outputOneD = parsedOutputs[0][0][0];
float[] expected = {3.69f, -19.62f, 23.43f};
assertThat(outputOneD).usingTolerance(0.1f).containsExactly(expected).inOrder();
parsedOutputs = new float[2][8][8][3];
outputs.put(0, parsedOutputs);
wrapper.run(inputs, outputs);
outputOneD = parsedOutputs[0][0][0];
assertThat(outputOneD).usingTolerance(0.1f).containsExactly(expected).inOrder();
wrapper.close();
}
@Test
public void testRunWithInt() {
NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(INT_MODEL_PATH);
int[] oneD = {3, 7, -4};
int[][] twoD = {oneD, oneD, oneD, oneD, oneD, oneD, oneD, oneD};
int[][][] threeD = {twoD, twoD, twoD, twoD, twoD, twoD, twoD, twoD};
int[][][][] fourD = {threeD, threeD};
Object[] inputs = {fourD};
int[][][][] parsedOutputs = new int[2][4][4][12];
Map<Integer, Object> outputs = new HashMap<>();
outputs.put(0, parsedOutputs);
wrapper.run(inputs, outputs);
int[] outputOneD = parsedOutputs[0][0][0];
int[] expected = {3, 7, -4, 3, 7, -4, 3, 7, -4, 3, 7, -4};
assertThat(outputOneD).isEqualTo(expected);
wrapper.close();
}
@Test
public void testRunWithLong() {
NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(LONG_MODEL_PATH);
long[] oneD = {-892834092L, 923423L, 2123918239018L};
long[][] twoD = {oneD, oneD, oneD, oneD, oneD, oneD, oneD, oneD};
long[][][] threeD = {twoD, twoD, twoD, twoD, twoD, twoD, twoD, twoD};
long[][][][] fourD = {threeD, threeD};
Object[] inputs = {fourD};
long[][][][] parsedOutputs = new long[2][4][4][12];
Map<Integer, Object> outputs = new HashMap<>();
outputs.put(0, parsedOutputs);
wrapper.run(inputs, outputs);
long[] outputOneD = parsedOutputs[0][0][0];
long[] expected = {-892834092L, 923423L, 2123918239018L, -892834092L, 923423L, 2123918239018L,
-892834092L, 923423L, 2123918239018L, -892834092L, 923423L, 2123918239018L};
assertThat(outputOneD).isEqualTo(expected);
wrapper.close();
}
@Test
public void testRunWithByte() {
NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(BYTE_MODEL_PATH);
byte[] oneD = {(byte) 0xe0, 0x4f, (byte) 0xd0};
byte[][] twoD = {oneD, oneD, oneD, oneD, oneD, oneD, oneD, oneD};
byte[][][] threeD = {twoD, twoD, twoD, twoD, twoD, twoD, twoD, twoD};
byte[][][][] fourD = {threeD, threeD};
Object[] inputs = {fourD};
int[] inputDims = {2, 8, 8, 3};
wrapper.resizeInput(0, inputDims);
byte[][][][] parsedOutputs = new byte[2][4][4][12];
Map<Integer, Object> outputs = new HashMap<>();
outputs.put(0, parsedOutputs);
wrapper.run(inputs, outputs);
byte[] outputOneD = parsedOutputs[0][0][0];
byte[] expected = {(byte) 0xe0, 0x4f, (byte) 0xd0, (byte) 0xe0, 0x4f, (byte) 0xd0,
(byte) 0xe0, 0x4f, (byte) 0xd0, (byte) 0xe0, 0x4f, (byte) 0xd0};
assertThat(outputOneD).isEqualTo(expected);
wrapper.close();
}
@Test
public void testRunWithByteBufferHavingBytes() {
NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(BYTE_MODEL_PATH);
ByteBuffer bbuf = ByteBuffer.allocateDirect(2 * 8 * 8 * 3);
bbuf.order(ByteOrder.nativeOrder());
bbuf.rewind();
for (int i = 0; i < 2; ++i) {
for (int j = 0; j < 8; ++j) {
for (int k = 0; k < 8; ++k) {
bbuf.put((byte) 0xe0);
bbuf.put((byte) 0x4f);
bbuf.put((byte) 0xd0);
}
}
}
bbuf.rewind();
Object[] inputs = {bbuf};
int[] inputDims = {2, 8, 8, 3};
wrapper.resizeInput(0, inputDims);
byte[][][][] parsedOutputs = new byte[2][4][4][12];
Map<Integer, Object> outputs = new HashMap<>();
outputs.put(0, parsedOutputs);
wrapper.run(inputs, outputs);
byte[] outputOneD = parsedOutputs[0][0][0];
byte[] expected = {
(byte) 0xe0, 0x4f, (byte) 0xd0, (byte) 0xe0, 0x4f, (byte) 0xd0,
(byte) 0xe0, 0x4f, (byte) 0xd0, (byte) 0xe0, 0x4f, (byte) 0xd0
};
assertThat(outputOneD).isEqualTo(expected);
wrapper.close();
}
@Test
public void testRunWithByteBufferHavingFloats() {
NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(FLOAT_MODEL_PATH);
ByteBuffer bbuf = ByteBuffer.allocateDirect(4 * 8 * 8 * 3 * 4);
bbuf.order(ByteOrder.nativeOrder());
bbuf.rewind();
for (int i = 0; i < 4; ++i) {
for (int j = 0; j < 8; ++j) {
for (int k = 0; k < 8; ++k) {
bbuf.putFloat(1.23f);
bbuf.putFloat(-6.54f);
bbuf.putFloat(7.81f);
}
}
}
Object[] inputs = {bbuf};
float[][][][] parsedOutputs = new float[4][8][8][3];
Map<Integer, Object> outputs = new HashMap<>();
outputs.put(0, parsedOutputs);
try {
wrapper.run(inputs, outputs);
fail();
} catch (IllegalArgumentException e) {
assertThat(e)
.hasMessageThat()
.contains(
"Cannot convert between a TensorFlowLite buffer with 768 bytes and a "
+ "ByteBuffer with 3072 bytes.");
}
int[] inputDims = {4, 8, 8, 3};
wrapper.resizeInput(0, inputDims);
wrapper.run(inputs, outputs);
float[] outputOneD = parsedOutputs[0][0][0];
float[] expected = {3.69f, -19.62f, 23.43f};
assertThat(outputOneD).usingTolerance(0.1f).containsExactly(expected).inOrder();
wrapper.close();
}
@Test
public void testRunWithByteBufferHavingWrongSize() {
NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(BYTE_MODEL_PATH);
ByteBuffer bbuf = ByteBuffer.allocateDirect(2 * 7 * 8 * 3);
bbuf.order(ByteOrder.nativeOrder());
Object[] inputs = {bbuf};
Map<Integer, Object> outputs = new HashMap<>();
ByteBuffer parsedOutput = ByteBuffer.allocateDirect(2 * 7 * 8 * 3);
outputs.put(0, parsedOutput);
try {
wrapper.run(inputs, outputs);
fail();
} catch (IllegalArgumentException e) {
assertThat(e)
.hasMessageThat()
.contains(
"Cannot convert between a TensorFlowLite buffer with 192 bytes and a "
+ "ByteBuffer with 336 bytes.");
}
wrapper.close();
}
@Test
public void testRunWithWrongInputType() {
NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(FLOAT_MODEL_PATH);
int[] oneD = {4, 3, 9};
int[][] twoD = {oneD, oneD, oneD, oneD, oneD, oneD, oneD, oneD};
int[][][] threeD = {twoD, twoD, twoD, twoD, twoD, twoD, twoD, twoD};
int[][][][] fourD = {threeD, threeD};
Object[] inputs = {fourD};
int[][][][] parsedOutputs = new int[2][8][8][3];
Map<Integer, Object> outputs = new HashMap<>();
outputs.put(0, parsedOutputs);
try {
wrapper.run(inputs, outputs);
fail();
} catch (IllegalArgumentException e) {
assertThat(e)
.hasMessageThat()
.contains(
"Cannot convert between a TensorFlowLite tensor with type FLOAT32 and a Java object "
+ "of type [[[[I (which is compatible with the TensorFlowLite type INT32)");
}
wrapper.close();
}
@Test
public void testRunAfterClose() {
NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(FLOAT_MODEL_PATH);
wrapper.close();
float[] oneD = {1.23f, 6.54f, 7.81f};
float[][] twoD = {oneD, oneD, oneD, oneD, oneD, oneD, oneD, oneD};
float[][][] threeD = {twoD, twoD, twoD, twoD, twoD, twoD, twoD, twoD};
float[][][][] fourD = {threeD, threeD};
Object[] inputs = {fourD};
float[][][][] parsedOutputs = new float[2][8][8][3];
Map<Integer, Object> outputs = new HashMap<>();
outputs.put(0, parsedOutputs);
try {
wrapper.run(inputs, outputs);
fail();
} catch (IllegalArgumentException e) {
assertThat(e).hasMessageThat().contains("Invalid handle to Interpreter.");
}
}
@Test
public void testRunWithEmptyInputs() {
NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(FLOAT_MODEL_PATH);
try {
Object[] inputs = {};
wrapper.run(inputs, null);
fail();
} catch (IllegalArgumentException e) {
assertThat(e).hasMessageThat().contains("Inputs should not be null or empty.");
}
wrapper.close();
}
@Test
public void testRunWithWrongInputSize() {
NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(FLOAT_MODEL_PATH);
float[] oneD = {1.23f, 6.54f, 7.81f};
float[][] twoD = {oneD, oneD, oneD, oneD, oneD, oneD, oneD, oneD};
float[][][] threeD = {twoD, twoD, twoD, twoD, twoD, twoD, twoD, twoD};
float[][][][] fourD = {threeD, threeD};
Object[] inputs = {fourD, fourD};
float[][][][] parsedOutputs = new float[2][8][8][3];
Map<Integer, Object> outputs = new HashMap<>();
outputs.put(0, parsedOutputs);
try {
wrapper.run(inputs, outputs);
fail();
} catch (IllegalArgumentException e) {
assertThat(e).hasMessageThat().contains("Invalid input Tensor index: 1");
}
wrapper.close();
}
@Test
public void testRunWithWrongInputNumOfDims() {
NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(FLOAT_MODEL_PATH);
float[] oneD = {1.23f, 6.54f, 7.81f};
float[][] twoD = {oneD, oneD, oneD, oneD, oneD, oneD, oneD};
float[][][] threeD = {twoD, twoD, twoD, twoD, twoD, twoD, twoD, twoD};
Object[] inputs = {threeD};
float[][][][] parsedOutputs = new float[2][8][8][3];
Map<Integer, Object> outputs = new HashMap<>();
outputs.put(0, parsedOutputs);
try {
wrapper.run(inputs, outputs);
fail();
} catch (IllegalArgumentException e) {
assertThat(e)
.hasMessageThat()
.contains(
"Cannot copy between a TensorFlowLite tensor with shape [8, 7, 3] and a "
+ "Java object with shape [2, 8, 8, 3].");
}
wrapper.close();
}
@Test
public void testRunWithWrongInputDims() {
NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(FLOAT_MODEL_PATH);
float[] oneD = {1.23f, 6.54f, 7.81f};
float[][] twoD = {oneD, oneD, oneD, oneD, oneD, oneD, oneD};
float[][][] threeD = {twoD, twoD, twoD, twoD, twoD, twoD, twoD, twoD};
float[][][][] fourD = {threeD, threeD};
Object[] inputs = {fourD};
float[][][][] parsedOutputs = new float[2][8][8][3];
Map<Integer, Object> outputs = new HashMap<>();
outputs.put(0, parsedOutputs);
try {
wrapper.run(inputs, outputs);
fail();
} catch (IllegalArgumentException e) {
assertThat(e)
.hasMessageThat()
.contains(
"Cannot copy between a TensorFlowLite tensor with shape [2, 8, 7, 3] and a "
+ "Java object with shape [2, 8, 8, 3].");
}
wrapper.close();
}
@Test
public void testGetInferenceLatency() {
NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(FLOAT_MODEL_PATH);
float[] oneD = {1.23f, 6.54f, 7.81f};
float[][] twoD = {oneD, oneD, oneD, oneD, oneD, oneD, oneD, oneD};
float[][][] threeD = {twoD, twoD, twoD, twoD, twoD, twoD, twoD, twoD};
float[][][][] fourD = {threeD, threeD};
Object[] inputs = {fourD};
float[][][][] parsedOutputs = new float[2][8][8][3];
Map<Integer, Object> outputs = new HashMap<>();
outputs.put(0, parsedOutputs);
wrapper.run(inputs, outputs);
assertThat(wrapper.getLastNativeInferenceDurationNanoseconds()).isGreaterThan(0L);
wrapper.close();
}
@Test
public void testGetInferenceLatencyWithNewWrapper() {
NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(FLOAT_MODEL_PATH);
assertThat(wrapper.getLastNativeInferenceDurationNanoseconds()).isNull();
wrapper.close();
}
@Test
public void testGetLatencyAfterFailedInference() {
NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(FLOAT_MODEL_PATH);
float[] oneD = {1.23f, 6.54f, 7.81f};
float[][] twoD = {oneD, oneD, oneD, oneD, oneD, oneD, oneD};
float[][][] threeD = {twoD, twoD, twoD, twoD, twoD, twoD, twoD, twoD};
float[][][][] fourD = {threeD, threeD};
Object[] inputs = {fourD};
float[][][][] parsedOutputs = new float[2][8][8][3];
Map<Integer, Object> outputs = new HashMap<>();
outputs.put(0, parsedOutputs);
try {
wrapper.run(inputs, outputs);
fail();
} catch (IllegalArgumentException e) {
// Expected.
}
assertThat(wrapper.getLastNativeInferenceDurationNanoseconds()).isNull();
wrapper.close();
}
@Test
public void testGetInputDims() {
NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(FLOAT_MODEL_PATH);
int[] expectedDims = {1, 8, 8, 3};
assertThat(wrapper.getInputTensor(0).shape()).isEqualTo(expectedDims);
wrapper.close();
}
@Test
public void testGetOutputQuantizationParams() {
try (NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(FLOAT_MODEL_PATH)) {
assertThat(wrapper.getOutputQuantizationZeroPoint(0)).isEqualTo(0);
assertThat(wrapper.getOutputQuantizationScale(0)).isWithin(1e-6f).of(0.0f);
}
try (NativeInterpreterWrapper wrapper = new NativeInterpreterWrapper(QUANTIZED_MODEL_PATH)) {
assertThat(wrapper.getOutputQuantizationZeroPoint(0)).isEqualTo(127);
assertThat(wrapper.getOutputQuantizationScale(0)).isWithin(1e-6f).of(0.25f);
}
}
}
| apache-2.0 |
ropik/error-prone | core/src/test/resources/com/google/errorprone/bugpatterns/LongLiteralLowerCaseSuffixPositiveCase2.java | 1140 | /*
* Copyright 2012 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.errorprone.bugpatterns;
/**
* Positive cases for {@link LongLiteralLowerCaseSuffix}.
*/
public class LongLiteralLowerCaseSuffixPositiveCase2 {
// This constant string includes non-ASCII characters to make sure that we're not confusing
// bytes and chars:
@SuppressWarnings("unused")
private static final String TEST_STRING = "Îñţérñåţîöñåļîžåţîờñ";
public void underscoredLowerCase() {
// BUG: Diagnostic contains: value = 0_1__2L
long value = 0_1__2l;
}
}
| apache-2.0 |
Darsstar/framework | shared/src/main/java/com/vaadin/shared/ui/datefield/DateTimeResolution.java | 800 | /*
* Copyright 2000-2016 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.shared.ui.datefield;
/**
* Resolutions for DateTimeFields.
*
* @author Vaadin Ltd.
* @since 8.0
*/
public enum DateTimeResolution {
SECOND, MINUTE, HOUR, DAY, MONTH, YEAR;
}
| apache-2.0 |
rmelick/Argus | ArgusWebServices/src/main/java/com/salesforce/dva/argus/ws/dto/MetricDto.java | 5907 | /*
* Copyright (c) 2016, Salesforce.com, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of Salesforce.com nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package com.salesforce.dva.argus.ws.dto;
import com.salesforce.dva.argus.entity.Metric;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import javax.ws.rs.WebApplicationException;
import javax.ws.rs.core.Response.Status;
/**
* The metric DTO.
*
* @author Bhinav Sura (bhinav.sura@salesforce.com)
*/
public class MetricDto extends TSDBEntityDto {
//~ Instance fields ******************************************************************************************************************************
private String namespace;
private String displayName;
private String units;
private Map<Long, String> datapoints;
//~ Methods **************************************************************************************************************************************
/**
* Converts a metric entity to a DTO.
*
* @param metric The metric to convert.
*
* @return The corresponding DTO.
*
* @throws WebApplicationException If an error occurs.
*/
public static MetricDto transformToDto(Metric metric) {
if (metric == null) {
throw new WebApplicationException("Null entity object cannot be converted to Dto object.", Status.INTERNAL_SERVER_ERROR);
}
MetricDto result = createDtoObject(MetricDto.class, metric);
return result;
}
/**
* Converts list of alert entity objects to list of alertDto objects.
*
* @param metrics List of alert entities. Cannot be null.
*
* @return List of alertDto objects.
*
* @throws WebApplicationException If an error occurs.
*/
public static List<MetricDto> transformToDto(List<Metric> metrics) {
if (metrics == null) {
throw new WebApplicationException("Null entity object cannot be converted to Dto object.", Status.INTERNAL_SERVER_ERROR);
}
List<MetricDto> result = new ArrayList<>();
for (Metric metric : metrics) {
result.add(transformToDto(metric));
}
return result;
}
//~ Methods **************************************************************************************************************************************
@Override
public Object createExample() {
MetricDto result = new MetricDto();
Map<Long, String> dps = new TreeMap<>();
dps.put(System.currentTimeMillis(), "1.2");
result.setDatapoints(dps);
result.setDisplayName("A description of the metric");
result.setMetric("metric");
result.setScope("scope");
Map<String, String> sampleTags = new HashMap<>();
sampleTags.put("tagk", "tagv");
result.setTags(sampleTags);
result.setUnits("ms");
return result;
}
/**
* Returns the namespace.
*
* @return The namespace.
*/
public String getNamespace() {
return namespace;
}
/**
* Sets the namespace.
*
* @param namespace The namespace.
*/
public void setNamespace(String namespace) {
this.namespace = namespace;
}
/**
* Returns the display name.
*
* @return The display name.
*/
public String getDisplayName() {
return displayName;
}
/**
* Sets the display name.
*
* @param displayName The display name.
*/
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
/**
* Returns the metric units.
*
* @return The metric units.
*/
public String getUnits() {
return units;
}
/**
* Sets the metric units.
*
* @param units The metric units.
*/
public void setUnits(String units) {
this.units = units;
}
/**
* Returns the metric data points.
*
* @return The metric data points.
*/
public Map<Long, String> getDatapoints() {
return datapoints;
}
/**
* Sets the metric data points.
*
* @param datapoints The metric data points.
*/
public void setDatapoints(Map<Long, String> datapoints) {
this.datapoints = datapoints;
}
}
/* Copyright (c) 2016, Salesforce.com, Inc. All rights reserved. */
| bsd-3-clause |
cooperpellaton/k-9 | tests-on-jvm/src/test/java/com/fsck/k9/K9.java | 83 | package com.fsck.k9;
public class K9 {
public static boolean DEBUG = false;
}
| bsd-3-clause |
open-keychain/spongycastle | prov/src/main/java/org/spongycastle/jce/spec/GOST3410PublicKeySpec.java | 1488 | package org.spongycastle.jce.spec;
import java.math.BigInteger;
import java.security.spec.KeySpec;
/**
* This class specifies a GOST3410-94 public key with its associated parameters.
*/
public class GOST3410PublicKeySpec
implements KeySpec
{
private BigInteger y;
private BigInteger p;
private BigInteger q;
private BigInteger a;
/**
* Creates a new GOST3410PublicKeySpec with the specified parameter values.
*
* @param y the public key.
* @param p the prime.
* @param q the sub-prime.
* @param a the base.
*/
public GOST3410PublicKeySpec(
BigInteger y,
BigInteger p,
BigInteger q,
BigInteger a)
{
this.y = y;
this.p = p;
this.q = q;
this.a = a;
}
/**
* Returns the public key <code>y</code>.
*
* @return the public key <code>y</code>.
*/
public BigInteger getY()
{
return this.y;
}
/**
* Returns the prime <code>p</code>.
*
* @return the prime <code>p</code>.
*/
public BigInteger getP()
{
return this.p;
}
/**
* Returns the sub-prime <code>q</code>.
*
* @return the sub-prime <code>q</code>.
*/
public BigInteger getQ()
{
return this.q;
}
/**
* Returns the base <code>g</code>.
*
* @return the base <code>g</code>.
*/
public BigInteger getA()
{
return this.a;
}
}
| mit |
Adaptivity/BetterStorage | src/main/java/net/mcft/copy/betterstorage/tile/ContainerMaterial.java | 3888 | package net.mcft.copy.betterstorage.tile;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import net.mcft.copy.betterstorage.misc.BetterStorageResource;
import net.mcft.copy.betterstorage.utils.StackUtils;
import net.minecraft.block.Block;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.item.ItemStack;
import net.minecraft.util.ResourceLocation;
import net.minecraftforge.oredict.ShapedOreRecipe;
public class ContainerMaterial {
public static final String TAG_NAME = "Material";
private static Map<String, ContainerMaterial> materialMap = new HashMap<String, ContainerMaterial>();
private static Map<Integer, ContainerMaterial> materialMapOld = new HashMap<Integer, ContainerMaterial>();
private static List<ContainerMaterial> materials = new ArrayList<ContainerMaterial>();
// Vanilla materials
public static ContainerMaterial iron = new ContainerMaterial(0, "iron", Items.iron_ingot, Blocks.iron_block);
public static ContainerMaterial gold = new ContainerMaterial(1, "gold", Items.gold_ingot, Blocks.gold_block);
public static ContainerMaterial diamond = new ContainerMaterial(2, "diamond", Items.diamond, Blocks.diamond_block);
public static ContainerMaterial emerald = new ContainerMaterial(3, "emerald", Items.emerald, Blocks.emerald_block);
// Mod materials
public static ContainerMaterial copper = new ContainerMaterial(5, "copper", "ingotCopper", "blockCopper");
public static ContainerMaterial tin = new ContainerMaterial(6, "tin", "ingotTin", "blockTin");
public static ContainerMaterial silver = new ContainerMaterial(7, "silver", "ingotSilver", "blockSilver");
public static ContainerMaterial zinc = new ContainerMaterial(8, "zinc", "ingotZinc", "blockZinc");
public static ContainerMaterial steel = new ContainerMaterial( "steel", "ingotSteel", "blockSteel");
public static List<ContainerMaterial> getMaterials() { return materials; }
public static ContainerMaterial get(String name) { return materialMap.get(name); }
public static ContainerMaterial get(int id) { return materialMapOld.get(id); }
/** Gets the material of the stack, either using the new method, the
* old ID lookup or if everything fails, it'll return the default. */
public static ContainerMaterial getMaterial(ItemStack stack, ContainerMaterial _default) {
String name = StackUtils.get(stack, (String)null, TAG_NAME);
ContainerMaterial material = ((name != null) ? get(name) : get(stack.getItemDamage()));
return ((material != null) ? material : _default);
}
public final String name;
private final Object ingot;
private final Object block;
private ContainerMaterial(String name, Object ingot, Object block) {
this.name = name;
this.ingot = ingot;
this.block = block;
materialMap.put(name, this);
materials.add(this);
}
private ContainerMaterial(String name) { this(name, null, null); }
private ContainerMaterial(int id, String name, Object ingot, Object block) {
this(name, ingot, block);
materialMapOld.put(id, this);
}
public ShapedOreRecipe getReinforcedRecipe(Block middle, Block result) {
if ((ingot == null) || (block == null)) return null;
return new ShapedOreRecipe(setMaterial(new ItemStack(result)),
"o#o",
"#C#",
"oOo", 'C', middle,
'#', "logWood",
'o', ingot,
'O', block);
}
public ResourceLocation getChestResource(boolean large) {
return new BetterStorageResource("textures/models/chest" + (large ? "_large/" : "/") + name + ".png");
}
public ResourceLocation getLockerResource(boolean large) {
return new BetterStorageResource("textures/models/locker" + (large ? "_large/" : "/") + name + ".png");
}
public ItemStack setMaterial(ItemStack stack) {
StackUtils.set(stack, name, TAG_NAME);
return stack;
}
}
| mit |
Snickermicker/smarthome | extensions/binding/org.eclipse.smarthome.binding.homematic/src/main/java/org/eclipse/smarthome/binding/homematic/internal/communicator/message/XmlRpcResponse.java | 6064 | /**
* Copyright (c) 2014,2019 Contributors to the Eclipse Foundation
*
* See the NOTICE file(s) distributed with this work for additional
* information regarding copyright ownership.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.eclipse.smarthome.binding.homematic.internal.communicator.message;
import java.io.IOException;
import java.io.InputStream;
import java.text.ParseException;
import java.util.ArrayList;
import java.util.Base64;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
/**
* Decodes a XML-RPC message from the Homematic server.
*
* @author Gerhard Riegler - Initial contribution
*/
public class XmlRpcResponse implements RpcResponse {
private String methodName;
private Object[] responseData;
/**
* Decodes a XML-RPC message from the given InputStream.
*/
public XmlRpcResponse(InputStream is, String encoding)
throws SAXException, ParserConfigurationException, IOException {
SAXParserFactory factory = SAXParserFactory.newInstance();
SAXParser saxParser = factory.newSAXParser();
InputSource inputSource = new InputSource(is);
inputSource.setEncoding(encoding);
saxParser.parse(inputSource, new XmlRpcHandler());
}
@Override
public Object[] getResponseData() {
return responseData;
}
@Override
public String getMethodName() {
return methodName;
}
@Override
public String toString() {
return RpcUtils.dumpRpcMessage(methodName, responseData);
}
/**
* SAX parser implementation to decode XML-RPC.
*
* @author Gerhard Riegler
*/
private class XmlRpcHandler extends DefaultHandler {
private List<Object> result = new ArrayList<Object>();
private LinkedList<List<Object>> currentDataObject = new LinkedList<List<Object>>();
private StringBuilder tagValue;
private boolean isValueTag;
@Override
public void startDocument() throws SAXException {
currentDataObject.addLast(new ArrayList<Object>());
}
@Override
public void endDocument() throws SAXException {
result.addAll(currentDataObject.removeLast());
responseData = result.toArray();
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tag = qName.toLowerCase();
if (tag.equals("array") || tag.equals("struct")) {
currentDataObject.addLast(new ArrayList<Object>());
}
isValueTag = tag.equals("value");
tagValue = new StringBuilder();
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
String currentTag = qName.toLowerCase();
String currentValue = tagValue.toString();
List<Object> data = currentDataObject.peekLast();
switch (currentTag) {
case "boolean":
data.add("1".equals(currentValue) ? Boolean.TRUE : Boolean.FALSE);
break;
case "int":
case "i4":
data.add(new Integer(currentValue));
break;
case "double":
data.add(new Double(currentValue));
break;
case "string":
case "name":
data.add(currentValue);
break;
case "value":
if (isValueTag) {
data.add(currentValue);
isValueTag = false;
}
break;
case "array":
List<Object> arrayData = currentDataObject.removeLast();
currentDataObject.peekLast().add(arrayData.toArray());
break;
case "struct":
List<Object> mapData = currentDataObject.removeLast();
Map<Object, Object> resultMap = new HashMap<Object, Object>();
for (int i = 0; i < mapData.size(); i += 2) {
resultMap.put(mapData.get(i), mapData.get(i + 1));
}
currentDataObject.peekLast().add(resultMap);
break;
case "base64":
data.add(Base64.getDecoder().decode(currentValue));
break;
case "datetime.iso8601":
try {
data.add(XmlRpcRequest.xmlRpcDateFormat.parse(currentValue));
} catch (ParseException ex) {
throw new SAXException(ex.getMessage(), ex);
}
break;
case "methodname":
methodName = currentValue;
break;
case "params":
case "param":
case "methodcall":
case "methodresponse":
case "member":
case "data":
case "fault":
break;
default:
throw new SAXException("Unknown XML-RPC tag: " + currentTag);
}
}
@Override
public void characters(char[] ch, int start, int length) throws SAXException {
tagValue.append(new String(ch, start, length));
}
}
}
| epl-1.0 |
hosny1993/vogella | de.vogella.task.application/src/de/vogella/task/application/views/TaskOverview.java | 3030 | package de.vogella.task.application.views;
import org.eclipse.jface.viewers.TableViewer;
import org.eclipse.jface.viewers.TableViewerColumn;
import org.eclipse.jface.viewers.Viewer;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.ui.part.ViewPart;
import de.vogella.task.application.views.editing.TaskEditingSupport;
import de.vogella.task.application.views.provider.TaskContentProvider;
import de.vogella.task.application.views.provider.TaskLabelProvider;
import de.vogella.task.application.views.sorter.TaskSorter;
import de.vogella.task.dao.MockDao;
public class TaskOverview extends ViewPart {
public static final String ID = "de.vogella.task.application.views.TaskOverview";
private TableViewer viewer;
private TaskSorter taskSorter;
@Override
public void createPartControl(Composite parent) {
createViewer(parent);
getSite().setSelectionProvider(viewer);
}
private void createViewer(Composite parent) {
viewer = new TableViewer(parent, SWT.FULL_SELECTION | SWT.MULTI
| SWT.H_SCROLL | SWT.V_SCROLL);
createColumns(viewer);
cellEditors(viewer);
viewer.setContentProvider(new TaskContentProvider());
viewer.setLabelProvider(new TaskLabelProvider());
viewer.setInput(MockDao.INSTANCE.getTasks());
taskSorter = new TaskSorter();
viewer.setSorter(taskSorter);
}
private void cellEditors(TableViewer viewer2) {
}
// This will create the columns for the table
private void createColumns(final TableViewer viewer) {
String[] titles = { "", "Due Date", "Summary", "Priority", "Status" };
int[] bounds = { 60, 140, 200, 100, 100 };
for (int i = 0; i < titles.length; i++) {
final int index = i;
TableViewerColumn column = new TableViewerColumn(viewer, SWT.NONE);
final TableColumn tableColumn = column.getColumn();
tableColumn.setText(titles[i]);
tableColumn.setWidth(bounds[i]);
tableColumn.setResizable(true);
tableColumn.setMoveable(true);
// Setting the right sorter
tableColumn.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
taskSorter.setColumn(index);
int dir = viewer.getTable().getSortDirection();
if (viewer.getTable().getSortColumn() == tableColumn) {
dir = dir == SWT.UP ? SWT.DOWN : SWT.UP;
} else {
dir = SWT.DOWN;
}
viewer.getTable().setSortDirection(dir);
viewer.getTable().setSortColumn(tableColumn);
viewer.refresh();
}
});
column.setEditingSupport(new TaskEditingSupport(i, viewer));
}
Table table = viewer.getTable();
table.setHeaderVisible(true);
table.setLinesVisible(true);
}
@Override
public void setFocus() {
viewer.getControl().setFocus();
}
public void refresh() {
viewer.setInput(MockDao.INSTANCE.getTasks());
}
public Viewer getViewer() {
return viewer;
}
} | epl-1.0 |
biddyweb/checker-framework | checker/tests/i18n-formatter/Syntax.java | 3282 | import java.text.MessageFormat;
import java.util.Date;
public class Syntax {
// Test 2.1.1: Missing '}' at end of message format (Unmatched braces in the
// pattern)
public static void unmatchedBraces() {
//:: error: (i18nformat.string.invalid)
MessageFormat.format("{0, number", new Date(12));
//:: error: (i18nformat.string.invalid)
MessageFormat.format("{0}{", 1);
// good
//:: warning: (i18nformat.excess.arguments)
MessageFormat.format("'{0{}", 1);
//:: warning: (i18nformat.excess.arguments)
MessageFormat.format("'{0{}'", 1);
}
// Test 2.1.2.1: The argument number needs to be an integer
public static void integerRequired() {
//:: error: (i18nformat.string.invalid)
MessageFormat.format("{{0}}", 1);
//:: error: (i18nformat.string.invalid)
MessageFormat.format("{0.2}", 1);
// good
//:: warning: (i18nformat.excess.arguments)
MessageFormat.format("'{{0}}'", 1);
}
// Test 2.1.2.2: The argument number can't be negative
public static void nonNegativeRequired() {
//:: error: (i18nformat.string.invalid)
MessageFormat.format("{-1, number}", 1);
//:: error: (i18nformat.string.invalid)
MessageFormat.format("{-123}", 1);
// good
MessageFormat.format("{0}", 1);
}
// Test 2.1.3: Format Style required for choice format
public static void formatStyleRequired() {
//:: error: (i18nformat.string.invalid)
MessageFormat.format("{0, choice}", 1);
// good
MessageFormat.format("{0, choice, 0#zero}", 1);
}
// Test 2.1.4: Wrong format Style
public static void wrongFormatStyle() {
//:: error: (i18nformat.string.invalid)
MessageFormat.format("{0, time, number}", 1);
//:: error: (i18nformat.string.invalid)
MessageFormat.format("{0, number, y.m.d}", 1);
// good
MessageFormat.format("{0, time, short}", 1);
MessageFormat.format("{0, number, currency}", 1);
}
// Test 2.1.5: Unknown format type
public static void unknownFormatType() {
//:: error: (i18nformat.string.invalid)
MessageFormat.format("{0, general}", 1);
//:: error: (i18nformat.string.invalid)
MessageFormat.format("{0, fool}", 1);
// good
MessageFormat.format("{0}", 1);
MessageFormat.format("{0, time}", 1);
MessageFormat.format("{0, date}", 1);
MessageFormat.format("{0, number}", 1);
MessageFormat.format("{0, daTe}", 1);
MessageFormat.format("{0, NUMBER}", 1);
}
// Test 2.1.6: Invalid Subformat Pattern
public static void invalidSubformatPattern() {
//:: error: (i18nformat.string.invalid)
MessageFormat.format("{0, number, #.#.#}", 1);
//:: error: (i18nformat.string.invalid)
MessageFormat.format("{0, date, y.m.d.x}", new Date());
//:: error: (i18nformat.string.invalid)
MessageFormat.format("{0, choice, 0##zero}", 0);
// good
MessageFormat.format("{0, number, #.#}", 1);
MessageFormat.format("{0, date, y.m.d}", new Date());
MessageFormat.format("{0, choice, 0>zero}", 0);
}
}
| gpl-2.0 |
md-5/jdk10 | test/hotspot/jtreg/vmTestbase/nsk/jdi/BScenarios/hotswap/tc07x001/TestDescription.java | 3818 | /*
* Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
*
* @summary converted from VM Testbase nsk/jdi/BScenarios/hotswap/tc07x001.
* VM Testbase keywords: [quick, jpda, jdi, redefine]
* VM Testbase readme:
* DESCRIPTION:
* This test is from the group of so-called Borland's scenarios and
* implements the following test case:
* Suite 3 - Hot Swap
* Test case: TC7
* Description: After point of execution, different method - stepping
* Steps: 1.Set breakpoint at line 24 (call from a()
* to b())
* 2.Debug Main
* 3.Insert as first line in b():
* System.err.println("foo");
* 4.Smart Swap
* 5.F7 to step into
* X. Steps into b()
* 6.F7 to step into
* X. Prints "foo"
* 7.F7 to step into
* X. Steps on to line that prints "1"
* The description was drown up according to steps under JBuilder.
* Of course, the test has own line numbers and method/class names and
* works as follow:
* When the test is starting debugee, debugger sets breakpoint at
* the 38th line (method method_A).
* After the breakpoint is reached, debugger redefines debugee inserting
* first line into method_B, creates StepRequest and resumes debugee.
* When the location of the current StepEvent is in method_C, created
* StepRequest is disabled.
* The test checks up location of every step event and that new code
* becomes actual.
* COMMENTS:
* Test was fixed according to test bug:
* 4778296 TEST_BUG: debuggee VM intemittently hangs after resuming
* - handling VMStartEvent was removed from the debugger part of the test
* - quit on VMDeathEvent was added to the event handling loop
* Test updated to wait for debugee VM exit:
* - standard method Debugee.endDebugee() is used instead of final Debugee.resume()
*
* @library /vmTestbase
* /test/lib
* @run driver jdk.test.lib.FileInstaller . .
* @build nsk.jdi.BScenarios.hotswap.tc07x001
* nsk.jdi.BScenarios.hotswap.tc07x001a
*
* @comment compile newclassXX to bin/newclassXX
* with full debug info
* @run driver nsk.share.ExtraClassesBuilder
* -g:lines,source,vars
* newclass
*
* @run main/othervm PropertyResolvingWrapper
* nsk.jdi.BScenarios.hotswap.tc07x001
* ./bin
* -verbose
* -arch=${os.family}-${os.simpleArch}
* -waittime=5
* -debugee.vmkind=java
* -transport.address=dynamic
* "-debugee.vmkeys=${test.vm.opts} ${test.java.opts}"
*/
| gpl-2.0 |
mykmelez/pluotsorbet | java/cldc1.1.1/javax/microedition/io/PortRangeNormalizer.java | 2584 | /*
* Copyright 1990-2007 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License version
* 2 only, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License version 2 for more details (a copy is
* included at /legal/license.txt).
*
* You should have received a copy of the GNU General Public License
* version 2 along with this work; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA
*
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa
* Clara, CA 95054 or visit www.sun.com if you need additional
* information or have any questions.
*/
package javax.microedition.io;
/**
* Implementors of this interface encapsulate scheme-specific normalization of
* port range specification for an URI.
*/
interface PortRangeNormalizer {
/**
* Given the host and the original port range specification string from
* an URI, returns the port range and the string representing port range
* normalized as defined in RFC 3986 and the defining specification for the
* scheme.
* <p>
* If <code>host</code>, <code>portspec</code> or <code>portRange</code> is
* <code>null</code>, the behavior is undefined. If <code>portRange</code>
* is not an array of two elements, the behavior is undefined.
*
* @param host the host specification from an URI
* @param portspec the port range specification from an URI
* @param portRange array of two elements to store the port range
*
* @throws IllegalArgumentException if <code>portspec</code> is malformed.
*
* @return the normalized port range specification string or
* <code>null</code> if no scheme-specific normalization is applicable
*/
String normalize(String host, String portspec, int[] portRange);
/**
* Given the port range parsed from an URI, returns a string representation
* of the port range normalized for this protocol.
*
* @param portRange array of length two specifying port range
*
* @return the normalized port range specification string or
* <code>null</code> if no scheme-specific normalization is applicable
*/
String normalize(int[] portRange);
}
| gpl-2.0 |
JoeHsiao/bioformats | components/forks/poi/src/loci/poi/hssf/record/VerticalPageBreakRecord.java | 2559 | /*
* #%L
* Fork of Apache Jakarta POI.
* %%
* Copyright (C) 2008 - 2015 Open Microscopy Environment:
* - Board of Regents of the University of Wisconsin-Madison
* - Glencoe Software, Inc.
* - University of Dundee
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
package loci.poi.hssf.record;
/**
* VerticalPageBreak record that stores page breaks at columns
* <p>
* This class is just used so that SID compares work properly in the RecordFactory
* @see PageBreakRecord
* @author Danny Mui (dmui at apache dot org)
*/
public class VerticalPageBreakRecord extends PageBreakRecord {
public static final short sid = PageBreakRecord.VERTICAL_SID;
/**
*
*/
public VerticalPageBreakRecord() {
super();
}
/**
* @param sid
*/
public VerticalPageBreakRecord(short sid) {
super(sid);
}
/**
* @param in the RecordInputstream to read the record from
*/
public VerticalPageBreakRecord(RecordInputStream in) {
super(in);
}
/* (non-Javadoc)
* @see loci.poi.hssf.record.Record#getSid()
*/
public short getSid() {
return sid;
}
}
| gpl-2.0 |
brendandahl/j2me.js | java/jsr-179/com/sun/j2me/location/LocationProviderImpl.java | 19416 | /*
*
*
* Copyright 1990-2009 Sun Microsystems, Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License version
* 2 only, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License version 2 for more details (a copy is
* included at /legal/license.txt).
*
* You should have received a copy of the GNU General Public License
* version 2 along with this work; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA
*
* Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa
* Clara, CA 95054 or visit www.sun.com if you need additional
* information or have any questions.
*/
package com.sun.j2me.location;
import com.sun.j2me.log.Logging;
import com.sun.j2me.security.LocationPermission;
import java.util.Vector;
import javax.microedition.location.Criteria;
import javax.microedition.location.Location;
import javax.microedition.location.LocationException;
import javax.microedition.location.LocationListener;
import javax.microedition.location.LocationProvider;
import com.sun.j2me.main.Configuration;
/**
* This class is an implementation of the <code>LocationProvider</code> class
* defined by the JSR-179 specification.
*/
// JAVADOC COMMENT ELIDED
public abstract class LocationProviderImpl extends LocationProvider {
// JAVADOC COMMENT ELIDED
protected LocationListener locationListener;
// JAVADOC COMMENT ELIDED
protected Criteria criteria = new Criteria();
// JAVADOC COMMENT ELIDED
private LocationThread locationThread = null;
// JAVADOC COMMENT ELIDED
private StateThread stateThread = null;
// JAVADOC COMMENT ELIDED
private int locationQueries = 0;
// JAVADOC COMMENT ELIDED
private boolean resetRequested = false;
// JAVADOC COMMENT ELIDED
private static final String SEPARATOR = ",";
// JAVADOC COMMENT ELIDED
public boolean matchesCriteria(Criteria criteria) {
return compareCriterias(criteria, this.criteria);
}
// JAVADOC COMMENT ELIDED
static boolean compareCriterias(Criteria c1, Criteria c2) {
if (!c1.isAllowedToCost() && c2.isAllowedToCost()) {
return false;
}
if (c1.isSpeedAndCourseRequired() && !c2.isSpeedAndCourseRequired()) {
return false;
}
if (c1.isAltitudeRequired() && !c2.isAltitudeRequired()) {
return false;
}
if (c1.isAddressInfoRequired() && !c2.isAddressInfoRequired()) {
return false;
}
if (c1.getHorizontalAccuracy() != Criteria.NO_REQUIREMENT &&
c1.getHorizontalAccuracy() < c2.getHorizontalAccuracy()) {
return false;
}
if (c1.getVerticalAccuracy() != Criteria.NO_REQUIREMENT &&
c1.getVerticalAccuracy() < c2.getVerticalAccuracy()) {
return false;
}
if (c1.getPreferredResponseTime() != Criteria.NO_REQUIREMENT &&
c1.getPreferredResponseTime() < c2.getPreferredResponseTime()) {
return false;
}
if (c1.getPreferredPowerConsumption() != Criteria.NO_REQUIREMENT
&& c1.getPreferredPowerConsumption() <
c2.getPreferredPowerConsumption()) {
return false;
}
return true;
}
static LocationProviderImpl getBestProvider(Criteria c, Vector v) {
for (int i=0; i<v.size(); i++) {
LocationProviderImpl p = (LocationProviderImpl)v.elementAt(i);
Criteria cr = p.criteria;
if ((cr.isAllowedToCost() == c.isAllowedToCost()) &&
(cr.getPreferredPowerConsumption() == c.getPreferredPowerConsumption()) ) {
return p;
}
}
for (int i=0; i<v.size(); i++) {
LocationProviderImpl p = (LocationProviderImpl)v.elementAt(i);
Criteria cr = p.criteria;
}
return null;
}
// JAVADOC COMMENT ELIDED
public abstract int getDefaultInterval();
// JAVADOC COMMENT ELIDED
public abstract int getDefaultMaxAge();
// JAVADOC COMMENT ELIDED
public abstract int getDefaultTimeout();
// JAVADOC COMMENT ELIDED
public abstract int getResponseTime();
// JAVADOC COMMENT ELIDED
public abstract int getStateInterval();
// JAVADOC COMMENT ELIDED
abstract LocationImpl getLastLocation();
// JAVADOC COMMENT ELIDED
public static synchronized Location getLastKnownLocation() {
return PlatformLocationProvider.getLastKnownLocation();
}
// JAVADOC COMMENT ELIDED
public void reset() {
if (locationQueries > 0) {
resetRequested = true;
int attemptCount = Integer
.parseInt(Configuration
.getProperty("com.sun.j2me.location.ResetTimeout"))
* 10;
while (locationQueries > 0 && attemptCount-- > 0) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
// nothing to do
}
}
resetRequested = false;
}
}
// JAVADOC COMMENT ELIDED
public Location getLocation(int timeout)
throws LocationException, InterruptedException {
Util.checkForPermission(LocationPermission.LOCATION, false);
if (timeout == 0 || timeout < -1) {
throw new IllegalArgumentException("Illegal timeout value");
}
LocationImpl location = getLocationImpl(timeout);
return location;
}
// JAVADOC COMMENT ELIDED
protected LocationImpl getLocationImpl(int timeout)
throws LocationException, InterruptedException {
long startTime;
long endTime;
LocationImpl newLocation = null;
if (getState() == OUT_OF_SERVICE) {
throw new LocationException("Provider is out of service");
}
try {
if (timeout == -1) {
timeout = getDefaultTimeout();
}
startTime = System.currentTimeMillis();
endTime = startTime + (long)timeout * 1000;
locationQueries++;
while (!resetRequested && System.currentTimeMillis() < endTime) {
if (getState() == AVAILABLE) {
newLocation = updateLocation(endTime -
System.currentTimeMillis());
if (resetRequested) {
break;
}
if (newLocation != null) {
return newLocation;
}
} else {
Thread.sleep((long)getStateInterval() * 1000);
}
long delay = Math.min((long)getResponseTime() * 1000,
endTime - System.currentTimeMillis());
if (delay <= 0) {
break;
}
while (!resetRequested && delay > 0) {
Thread.sleep(100);
delay -= 100;
}
}
if (!resetRequested) {
if (getState() == TEMPORARILY_UNAVAILABLE) {
throw new LocationException("Provider is temporarily unavailable");
}
// try one last time
newLocation = updateLocation((long)getResponseTime() * 1000);
if (!resetRequested) {
if (newLocation != null) {
return newLocation;
}
throw new LocationException("Could not acquire location");
}
}
} finally {
locationQueries--;
}
throw new InterruptedException("Location query was interrupted");
}
// JAVADOC COMMENT ELIDED
protected abstract LocationImpl updateLocation(long timeout)
throws LocationException;
// JAVADOC COMMENT ELIDED
protected abstract void setUpdateInterval(int interval);
// JAVADOC COMMENT ELIDED
public static LocationProviderImpl getInstanceImpl(Criteria criteria)
throws LocationException {
Vector vectProviders = new Vector();
LocationProviderImpl found = null;
new LocationEventListener();
if (criteria == null) {
criteria = new Criteria();
}
String listProviders = PlatformLocationProvider.
getListOfLocationProviders();
if(listProviders == null || ((listProviders = listProviders.trim()).equals(""))) {
throw new LocationException("All providers are out of service");
}
String providerName = null;
try {
providerName = PlatformLocationProvider.getBestProviderByCriteria(criteria);
if (providerName != null) {
try {
return new PlatformLocationProvider(providerName);
} catch (IllegalAccessException ex) {
throw new LocationException("can not create Location Provider " + providerName);
}
}
return null;
} catch (IllegalAccessException ex) {
/* Direct creation from criteria is Unsupported */
/* try to create in Java */
}
/* parsing the list of providers */
while (listProviders.length() > 0) {
int posSpace = listProviders.indexOf(SEPARATOR);
String newProviderName;
if (posSpace == -1) { // last provider name
newProviderName = listProviders;
listProviders = "";
} else { // not last name
newProviderName = listProviders.substring(0, posSpace);
listProviders = listProviders.substring(posSpace + 1);
}
try {
Criteria cr = PlatformLocationProvider.getProviderInfo(newProviderName);
if (compareCriterias(criteria, cr)) {
LocationProviderImpl providerInstance = new
PlatformLocationProvider(newProviderName);
vectProviders.addElement(providerInstance);
}
} catch (IllegalAccessException e) {
if (Logging.TRACE_ENABLED) {
Logging.trace(e, "Illegal access to provider");
}
}
}
// loop over all providers and set the ones that match the criteria
// in their proper state, to give the one available preference over
// the unavailable one
LocationProviderImpl provider;
while (vectProviders.size()>0 &&
(provider = getBestProvider(criteria, vectProviders)) != null) {
int state = provider.getState();
if (state == AVAILABLE) {
return provider;
}
if (state == TEMPORARILY_UNAVAILABLE && found == null) {
found = provider;
}
vectProviders.removeElement(provider);
}
if (found != null) {
return found;
}
return null;
}
// JAVADOC COMMENT ELIDED
public LocationListener getLocationListener() {
return locationListener;
}
// JAVADOC COMMENT ELIDED
public void setLocationListener(LocationListener listener,
int interval, int timeout, int maxAge)
throws IllegalArgumentException, SecurityException {
if (listener != null)
Util.checkForPermission(LocationPermission.LOCATION, true);
if (interval < -1 ||
(interval != -1 && (timeout > interval || maxAge > interval ||
timeout < 1 && timeout != -1 ||
maxAge < 1 && maxAge != -1))) {
if (listener != null) {
throw new IllegalArgumentException("Timeout value is invalid");
}
}
// stop the current locationThread and stateThread
if (locationThread != null) {
locationThread.terminate();
try { // wait for thread to die
locationThread.join();
} catch (InterruptedException e) { // do nothing
if (Logging.TRACE_ENABLED) {
Logging.trace(e, "Wrong thread exception.");
}
}
locationThread = null;
}
if (stateThread != null) {
stateThread.terminate();
try { // wait for thread to die
stateThread.join();
} catch (InterruptedException e) { // do nothing
if (Logging.TRACE_ENABLED) {
Logging.trace(e, "Wrong thread exception.");
}
}
stateThread = null;
}
if (listener == null) {
locationListener = null;
setUpdateInterval(0);
return;
}
if (interval == -1) {
interval = getDefaultInterval();
maxAge = getDefaultMaxAge();
timeout = getDefaultInterval()/2;
}
if (maxAge == -1) {
maxAge = getDefaultMaxAge();
}
if (timeout == -1) {
timeout = getDefaultInterval()/2;
}
this.locationListener = listener;
// Start the location thread when interval > 0
if (interval > 0) {
setUpdateInterval(interval);
locationThread = new LocationThread(this, listener, interval,
timeout, maxAge);
locationThread.start();
}
// Start the state update thread
stateThread = new StateThread(this, listener);
stateThread.start();
}
}
/**
* Class LocationThread provides location updates through location listener.
*/
class LocationThread extends Thread {
/** Location provider listener is registered to. */
private LocationProviderImpl provider;
/** Current location listener. */
private LocationListener listener;
/** Current interval for location sampling. */
private int interval;
/** Current timeout for sampling. */
private int timeout;
/** Current limit for old samples. */
private int maxAge;
/** Flag indicating if the thread should terminate. */
private boolean terminated = false;
// JAVADOC COMMENT ELIDED
LocationThread(LocationProviderImpl provider, LocationListener listener,
int interval, int timeout, int maxAge) {
this.provider = provider;
this.listener = listener;
this.interval = interval;
this.timeout = timeout;
this.maxAge = maxAge;
}
/**
* Terminates the thread.
*/
void terminate() {
terminated = true;
synchronized (this) {
notify();
}
}
// JAVADOC COMMENT ELIDED
public void run() {
int responseTime = Math.min(provider.getResponseTime(), interval);
long lastUpdate = System.currentTimeMillis() - (long)interval * 1000;
try {
while (!terminated) {
Location location = provider.getLastLocation();
if (location == null || System.currentTimeMillis() +
(long)responseTime * 1000 -
location.getTimestamp() > maxAge) {
// need to update location
try {
location =
provider.getLocationImpl(responseTime + timeout);
} catch (LocationException e) {
// couldn't get location, send the invalid one
location = new LocationImpl(null, 0, 0, 0,
null, false);
} catch (InterruptedException e) {
// reset() was called on the provider
// should the thread terminate? most probably not
}
}
long delay = lastUpdate + (long)interval * 1000 -
System.currentTimeMillis();
if (delay > 0) {
synchronized (this) {
wait(delay); // wait for the right timing
}
}
if (terminated) { // thread was stopped
break;
}
// send the new location to location listener
lastUpdate = System.currentTimeMillis();
listener.locationUpdated(provider, location);
delay = (long)(interval - responseTime) * 1000;
if (delay > 0) {
synchronized (this) {
wait(delay);
}
}
}
} catch (InterruptedException e) {
if (Logging.TRACE_ENABLED) {
Logging.trace(e, "Wrong thread exception.");
}
}
}
}
/**
* Class StateThread checks the current provider state every
* <code>interval</code> seconds and sends the state update
* when state is changed.
*/
class StateThread extends Thread {
/** Current location provider. */
private LocationProviderImpl provider;
/** Current location listener. */
private LocationListener listener;
/** Flag indicating if the thread should terminate. */
private boolean terminated = false;
// JAVADOC COMMENT ELIDED
StateThread(LocationProviderImpl provider, LocationListener listener) {
this.provider = provider;
this.listener = listener;
}
/**
* Terminates the thread.
*/
void terminate() {
terminated = true;
synchronized (this) {
notify();
}
}
// JAVADOC COMMENT ELIDED
public void run() {
// get the current provider state
int interval = provider.getStateInterval() * 1000;
int state = provider.getState();
try {
while (!terminated) {
synchronized (this) {
// wait before querying the current state
wait(interval);
}
if (terminated) { // thread was stopped
break;
}
// check the new provider state
int newState = provider.getState();
if (newState != state) { // state was changed
state = newState;
// send the state update
listener.providerStateChanged(provider, state);
}
}
} catch (InterruptedException e) {
if (Logging.TRACE_ENABLED) {
Logging.trace(e, "Wrong thread exception.");
}
}
}
}
| gpl-2.0 |
5AMW3155/shattered-pixel-dungeon | src/com/shatteredpixel/shatteredpixeldungeon/items/weapon/enchantments/Instability.java | 1539 | /*
* Pixel Dungeon
* Copyright (C) 2012-2015 Oleg Dolya
*
* Shattered Pixel Dungeon
* Copyright (C) 2014-2015 Evan Debenham
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>
*/
package com.shatteredpixel.shatteredpixeldungeon.items.weapon.enchantments;
import com.shatteredpixel.shatteredpixeldungeon.actors.Char;
import com.shatteredpixel.shatteredpixeldungeon.items.weapon.Weapon;
import com.shatteredpixel.shatteredpixeldungeon.items.weapon.Weapon.Enchantment;
import com.shatteredpixel.shatteredpixeldungeon.items.weapon.missiles.Boomerang;
public class Instability extends Weapon.Enchantment {
private static final String TXT_UNSTABLE = "Unstable %s";
@Override
public boolean proc( Weapon weapon, Char attacker, Char defender, int damage ) {
return random().proc( weapon, attacker, defender, damage );
}
@Override
public String name( String weaponName) {
return String.format( TXT_UNSTABLE, weaponName );
}
}
| gpl-3.0 |