answer
stringlengths
17
10.2M
package io.github.crisstanza.tictactoe; public final class TicTacToeGame { public static final int STATUS_NOT_STARTED = -1; public static final int STATUS_RUNNING = 0; public static final int STATUS_GAME_OVER = 1; private String board; private int status; private String turn; public String getBoard() { return board; } public void setBoard(String board) { this.board = board; } public int getStatus() { return status; } public void setStatus(int status) { this.status = status; } public String getTurn() { return turn; } public void setTurn(String turn) { this.turn = turn; } }
package juja.progress.domain.service; import com.google.gdata.data.spreadsheet.CellEntry; import com.google.gdata.data.spreadsheet.ListEntry; import com.google.inject.Guice; import com.google.inject.Injector; import com.google.inject.Key; import com.google.inject.name.Names; import java.util.Set; import jdk.nashorn.internal.ir.annotations.Ignore; import juja.google.spreadsheet.api.Cell; import juja.google.spreadsheet.api.SpreadSheetReader; import juja.google.spreadsheet.api.gdata.GdataCell; import juja.mockdi.MockSlackModule; import juja.progress.di.GSRApplicationModule; import juja.progress.di.PropertiesModule; import juja.progress.domain.model.User; import org.hamcrest.MatcherAssert; import org.hamcrest.collection.IsCollectionWithSize; import org.hamcrest.core.Is; import org.hamcrest.core.IsCollectionContaining; import org.hamcrest.core.IsNot; import org.junit.After; import org.junit.BeforeClass; import org.junit.Test; public class JujacoreProgressServiceIntegrationTest { private static Injector injector; @BeforeClass public static void createInjector() { JujacoreProgressServiceIntegrationTest.injector = Guice.createInjector( new PropertiesModule(), new GSRApplicationModule(), new MockSlackModule() ); } @Test public void fetchCodesFromRealSpreadsheet() throws Exception { final ProgressService service = JujacoreProgressServiceIntegrationTest .injector.getInstance(ProgressService.class); final Set<String> codes = service.codes(); MatcherAssert.assertThat( codes, IsCollectionWithSize.hasSize(251) ); MatcherAssert.assertThat(codes, IsNot.not( IsCollectionContaining.hasItem("")) ); } @Test public void markProgressForUser() throws Exception { final ProgressService service = JujacoreProgressServiceIntegrationTest .injector.getInstance(ProgressService.class); final SpreadSheetReader spreadsheet = JujacoreProgressServiceIntegrationTest.injector.getInstance( Key.get(SpreadSheetReader.class, Names.named("progress")) ); Cell cell = new GdataCell( spreadsheet, "viktorkuchyn", "log-код", "+lms" ); cell.update(""); service.markProgressDone( User.create().withSlackNick("viktorkuchyn").build(), "+lms" ); cell = new GdataCell(spreadsheet, "viktorkuchyn", "log-код", "+lms"); MatcherAssert.assertThat(cell.value(), Is.is("DONE")); } @Ignore @Test public void createsNewColumnForUserId() throws Exception { final SpreadSheetReader spreadsheet = JujacoreProgressServiceIntegrationTest.injector.getInstance( Key.get(SpreadSheetReader.class, Names.named("progress")) ); final String title = "TEST_HEADER"; final ListEntry header = spreadsheet.findRowByColumnValue(title, ""); header.update(); final CellEntry cell = spreadsheet.createNewHeader(title); MatcherAssert.assertThat(cell.getCell().getInputValue(), Is.is(title)); } }
package org.diirt.datasource.formula; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.regex.Pattern; /** * A set of functions that can be used in the formulas. * <p> * Objects of this class can be registered in the {@link FormulaRegistry} and * the functions will be available in the formula language. * * @author carcassi */ public abstract class FormulaFunctionSet { static Pattern namePattern = Pattern.compile("[a-zA-Z_]\\w*"); private final String name; private final String description; private final Collection<FormulaFunction> formulaFunctions; /** * Creates a new ser of functions to be registered in the formula language. * * @param functionSetDescription the description of the function set */ public FormulaFunctionSet(FormulaFunctionSetDescription functionSetDescription) { this.name = functionSetDescription.name; this.description = functionSetDescription.description; this.formulaFunctions = Collections.unmodifiableSet(new HashSet<>(functionSetDescription.formulaFunctions)); } /** * Returns the name of the function set. * * @return the function set name */ public final String getName() { return name; } /** * Returns the description of the function set. * * @return the function set description */ public final String getDescription() { return description; } /** * The names of all functions in this set. * * @return the function names */ public final Collection<String> getFunctionNames() { Set<String> names = new HashSet<>(); for (FormulaFunction formulaFunction : formulaFunctions) { names.add(formulaFunction.getName()); } return names; } /** * Returns all the functions in the set with the given name. * * @param name the name of the function * @return the matched functions; never null */ public final Collection<FormulaFunction> findFunctions(String name) { if (name == null) { return Collections.emptyList(); } Set<FormulaFunction> formulas = new HashSet<>(); for (FormulaFunction formulaFunction : formulaFunctions) { if (name.equals(formulaFunction.getName())) { formulas.add(formulaFunction); } } return formulas; } /** * Returns all functions in the set. * * @return the functions in the set */ public final Collection<FormulaFunction> getFunctions() { return formulaFunctions; } }
package com.br.uepb.hl7; import java.io.IOException; import java.util.concurrent.ExecutorService; import com.br.uepb.model.MedicaoOximetroDomain; import com.br.uepb.model.PacienteDomain; import ca.uhn.hl7v2.DefaultHapiContext; import ca.uhn.hl7v2.HL7Exception; import ca.uhn.hl7v2.HapiContext; import ca.uhn.hl7v2.app.Connection; import ca.uhn.hl7v2.app.ConnectionHub; import ca.uhn.hl7v2.app.HL7Service; import ca.uhn.hl7v2.app.ServerConfiguration; import ca.uhn.hl7v2.conf.check.Validator; import ca.uhn.hl7v2.conf.store.CodeStoreRegistry; import ca.uhn.hl7v2.conf.store.ProfileStore; import ca.uhn.hl7v2.llp.LowerLayerProtocol; import ca.uhn.hl7v2.model.Message; import ca.uhn.hl7v2.model.v22.datatype.PN; import ca.uhn.hl7v2.model.v22.message.ADT_A01; import ca.uhn.hl7v2.model.v22.message.ORU_R01; import ca.uhn.hl7v2.model.v22.segment.MSH; import ca.uhn.hl7v2.model.v22.segment.NK1; import ca.uhn.hl7v2.model.v22.segment.OBR; import ca.uhn.hl7v2.model.v22.segment.OBX; import ca.uhn.hl7v2.model.v22.segment.PID; import ca.uhn.hl7v2.model.v23.message.MDM_T02; import ca.uhn.hl7v2.parser.EncodingNotSupportedException; import ca.uhn.hl7v2.parser.GenericParser; import ca.uhn.hl7v2.parser.ModelClassFactory; import ca.uhn.hl7v2.parser.Parser; import ca.uhn.hl7v2.parser.ParserConfiguration; import ca.uhn.hl7v2.parser.PipeParser; import ca.uhn.hl7v2.parser.XMLParser; import ca.uhn.hl7v2.util.SocketFactory; import ca.uhn.hl7v2.validation.ValidationContext; import ca.uhn.hl7v2.validation.ValidationExceptionHandlerFactory; import ca.uhn.hl7v2.validation.builder.ValidationRuleBuilder; public class tranformacaoDaStringParaHL7 { private static ADT_A01 adtMsg; private String msg = "MSH|^~\\&|HIS|RIH|EKG|EKG|199904140038||ADT^A01||P|2.2\r" + "PID|0001|00009874|00001122|A00977|SMITH^JOHN^M|MOM|19581119|F|NOTREAL^LINDA^M|C|564 SPRING ST^^NEEDHAM^MA^02494^US|0002|(818)565-1551|(425)828-3344|E|S|C|0000444444|252-00-4414||||SA|||SA||||NONE|V1|0001|I|D.ER^50A^M110^01|ER|P00055|11B^M011^02|070615^BATMAN^GEORGE^L|555888^NOTREAL^BOB^K^DR^MD|777889^NOTREAL^SAM^T^DR^MD^PHD|ER|D.WT^1A^M010^01|||ER|AMB|02|070615^NOTREAL^BILL^L|ER|000001916994|D||||||||||||||||GDD|WA|NORM|02|O|02|E.IN^02D^M090^01|E.IN^01D^M080^01|199904072124|199904101200|199904101200||||5555112333|||666097^NOTREAL^MANNY^P\r" + "NK1|0222555|NOTREAL^JAMES^R|FA|STREET^OTHER STREET^CITY^ST^55566|(222)111-3333|(888)999-0000|||||||ORGANIZATION\r" + "PV1|0001|I|D.ER^1F^M950^01|ER|P000998|11B^M011^02|070615^BATMAN^GEORGE^L|555888^OKNEL^BOB^K^DR^MD|777889^NOTREAL^SAM^T^DR^MD^PHD|ER|D.WT^1A^M010^01|||ER|AMB|02|070615^VOICE^BILL^L|ER|000001916994|D||||||||||||||||GDD|WA|NORM|02|O|02|E.IN^02D^M090^01|E.IN^01D^M080^01|199904072124|199904101200|||||5555112333|||666097^DNOTREAL^MANNY^P\r" + "PV2|||0112^TESTING|55555^PATIENT IS NORMAL|NONE|||19990225|19990226|1|1|TESTING|555888^NOTREAL^BOB^K^DR^MD||||||||||PROD^003^099|02|ER||NONE|19990225|19990223|19990316|NONE\r" + "AL1||SEV|001^POLLEN\r" + "GT1||0222PL|NOTREAL^BOB^B||STREET^OTHER STREET^CITY^ST^77787|(444)999-3333|(222)777-5555||||MO|111-33-5555||||NOTREAL GILL N|STREET^OTHER STREET^CITY^ST^99999|(111)222-3333\r" + "IN1||022254P|4558PD|BLUE CROSS|STREET^OTHER STREET^CITY^ST^00990||(333)333-6666||221K|LENIX|||19980515|19990515|||PATIENT01 TEST D||||||||||||||||||02LL|022LP554"; private String msg2 = "MSH|^~\\&|VSM001|MIRTH_CONNECT|HIS001|MIRTH_CONNECT|20100511220525||ORU ^R01|MSG0000001|P|2.5|||NE|NE|CO|8859/1|ES-CO" + "PID||6537077|6537077^^^^CC||ANDRES FELIPE^FERNANDEZ CORTES||19860705|M" //+ "OBR|1||VS12340000|28562-7^Vital Signs^LN" + "OBX|1|NM|271649006^Systolic blood pressure^SNOMED-CT||132|mm[Hg]|90-120|H|||F|||20100511220525" + "OBX|2|NM|271650006^Diastolic blood pressure^SNOMED-CT||86|mm[Hg]|60-80|H|||F|||20100511220525" + "OBX|3|NM|6797001^Mean blood pressure^SNOMED-CT||94|mm[Hg]|92-96|N|||F|||20100511220525" + "OBX|4|NM|386725007^Body temperature^SNOMED-CT||37|C|37|N|||F|||20100511220525" + "OBX|5|NM|78564009^Pulse rate^SNOMED-CT||80|bpm|60-100|N|||F|||20100511220525" + "OBX|6|NM|431314004^SpO2^SNOMED-CT||90|%|94-100|L|||F|||20100511220525"; public tranformacaoDaStringParaHL7() { } public String Oximetro(MedicaoOximetroDomain medicaoOximetroDomain) throws HL7Exception, IOException { MDM_T02 mdm = new MDM_T02(); mdm.initQuickstart("MDM", "T02", "M"); ca.uhn.hl7v2.model.v23.segment.MSH msh = mdm.getMSH(); ca.uhn.hl7v2.model.v23.segment.OBX obx = mdm.getOBX(); obx.getObservationIdentifier().getIdentifier().setValue("1"); String msg = "ads"; return msg; } @SuppressWarnings("resource") public String criarMessage(PacienteDomain pacienteDomain) throws HL7Exception, IOException { ADT_A01 adt = new ADT_A01(); adt.initQuickstart("ADT", "A01", "P"); MSH msh = adt.getMSH(); msh.getSendingApplication().setValue("Teste"); msh.getSequenceNumber().setValue("123"); String idPessoa = String.valueOf(pacienteDomain.getId()); PID pid = adt.getPID(); pid.getPatientName().getGivenName().setValue(pacienteDomain.getNome()); //pid.getPid10_Race().setValue(idPessoa); pid.getSex().setValue(pacienteDomain.getSexo()); NK1 nk1 = adt.getNK1(); nk1.getAddress().getCity().setValue(pacienteDomain.getCidade()); nk1.getBusinessPhoneNumber().setValue(pacienteDomain.getTelefoneCasa()); HapiContext context = new DefaultHapiContext(); Parser parser = context.getPipeParser(); String mensagem = parser.encode(adt); return mensagem; } /* public PacienteDomain informacoesDoPaciente() { PacienteDomain paciente = new PacienteDomain(); return paciente; } */ public static void main(String[] args) throws HL7Exception, IOException { tranformacaoDaStringParaHL7 tranformacaoDaStringParaHL7 = new tranformacaoDaStringParaHL7(); //MSH msh = adtMsg.getMSH(); // Recuperar alguns dados do segmento MSH //String msgType = msh.getMessageType().getMessageType().getValue(); //String msgTrigger = msh.getMessageType().getTriggerEvent().getValue(); // Imprimir "ADT A01" //System.out.println(msgType + " " + msgTrigger); //PN patientName = adtMsg.getPID().getPatientName(); //AD endereco = adtMsg.getNK1().getAddress(); //TN telefone = adtMsg.getNK1().getBusinessPhoneNumber(); PacienteDomain domain = new PacienteDomain(); domain.setId(1); domain.setNome("Bruno Teles Janaina Einstein"); domain.setCidade("Campina Grande"); domain.setSexo("Masculino"); domain.setTelefoneCasa("(83) 99999-9999"); String msg6 = tranformacaoDaStringParaHL7.criarMessage(domain); System.out.println(); System.out.println(msg6); } }
package net.runelite.client.plugins.cluescrolls.clues; import com.google.common.collect.ImmutableMap; import java.awt.Color; import java.awt.Graphics2D; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.Getter; import lombok.NonNull; import net.runelite.api.Varbits; import net.runelite.api.annotations.Varbit; import net.runelite.api.coords.LocalPoint; import net.runelite.api.coords.WorldPoint; import net.runelite.client.plugins.cluescrolls.ClueScrollPlugin; import static net.runelite.client.plugins.cluescrolls.clues.Enemy.ANCIENT_WIZARDS; import static net.runelite.client.plugins.cluescrolls.clues.Enemy.ARMADYLEAN_GUARD; import static net.runelite.client.plugins.cluescrolls.clues.Enemy.ARMADYLEAN_OR_BANDOSIAN_GUARD; import static net.runelite.client.plugins.cluescrolls.clues.Enemy.BANDOSIAN_GUARD; import static net.runelite.client.plugins.cluescrolls.clues.Enemy.BRASSICAN_MAGE; import static net.runelite.client.plugins.cluescrolls.clues.Enemy.SARADOMIN_WIZARD; import static net.runelite.client.plugins.cluescrolls.clues.Enemy.ZAMORAK_WIZARD; import net.runelite.client.ui.overlay.OverlayUtil; import net.runelite.client.ui.overlay.components.LineComponent; import net.runelite.client.ui.overlay.components.PanelComponent; import net.runelite.client.ui.overlay.components.TitleComponent; @Getter public class CoordinateClue extends ClueScroll implements TextClueScroll, LocationClueScroll { @Getter private static class CoordinateClueInfo { private final String directions; private final boolean lightRequired; @Getter(onMethod_ = {@Varbit}) private final int lightSourceVarbitId; private final Enemy enemy; private CoordinateClueInfo(@NonNull String directions) { this(directions, null); } private CoordinateClueInfo(@NonNull String directions, Enemy enemy) { this.directions = directions; this.enemy = enemy; this.lightRequired = false; this.lightSourceVarbitId = -1; } private CoordinateClueInfo(@Nonnull String directions, Enemy enemy, boolean lightRequired, @Varbit int lightSourceVarbitId) { this.directions = directions; this.enemy = enemy; this.lightRequired = lightRequired; this.lightSourceVarbitId = lightSourceVarbitId; } } private static final ImmutableMap<WorldPoint, CoordinateClueInfo> CLUES = new ImmutableMap.Builder<WorldPoint, CoordinateClueInfo>() // Medium .put(new WorldPoint(2479, 3158, 0), new CoordinateClueInfo("South of fruit tree patch, west of Tree Gnome Village.")) .put(new WorldPoint(2887, 3154, 0), new CoordinateClueInfo("West of Banana plantation on Karamja.")) .put(new WorldPoint(2743, 3151, 0), new CoordinateClueInfo("Entrance of Brimhaven dungeon.")) .put(new WorldPoint(3184, 3150, 0), new CoordinateClueInfo("South of Lumbridge Swamp.")) .put(new WorldPoint(3217, 3177, 0), new CoordinateClueInfo("East of Lumbridge Swamp.")) .put(new WorldPoint(3007, 3144, 0), new CoordinateClueInfo("Near the entrance to the Asgarnian Ice Dungeon, south of Port Sarim (AIQ).")) .put(new WorldPoint(2896, 3119, 0), new CoordinateClueInfo("Near Karambwan fishing spot (DKP).")) .put(new WorldPoint(2697, 3207, 0), new CoordinateClueInfo("Centre of Moss Giant Island, west of Brimhaven.")) .put(new WorldPoint(2679, 3110, 0), new CoordinateClueInfo("North of Hazelmere's house (CLS).")) .put(new WorldPoint(3510, 3074, 0), new CoordinateClueInfo("East of Uzer (DLQ).")) .put(new WorldPoint(3160, 3251, 0), new CoordinateClueInfo("West of trapdoor leading to H.A.M Hideout.")) .put(new WorldPoint(2643, 3252, 0), new CoordinateClueInfo("South of Ardougne Zoo, North of Tower of Life (DJP).")) .put(new WorldPoint(2322, 3061, 0), new CoordinateClueInfo("South-west of Castle wars (BKP).")) .put(new WorldPoint(2875, 3046, 0), new CoordinateClueInfo("North of nature altar, north of Shilo Village (CKR).")) .put(new WorldPoint(2849, 3033, 0), new CoordinateClueInfo("West of nature altar, north of Shilo Village (CKR).")) .put(new WorldPoint(2848, 3296, 0), new CoordinateClueInfo("North of Crandor island.")) .put(new WorldPoint(2583, 2990, 0), new CoordinateClueInfo("Feldip Hills, south-east of Gu'Thanoth (AKS).")) .put(new WorldPoint(3179, 3344, 0), new CoordinateClueInfo("In the cow pen north of the Lumbridge windmill.")) .put(new WorldPoint(2383, 3370, 0), new CoordinateClueInfo("West of the outpost")) .put(new WorldPoint(3312, 3375, 0), new CoordinateClueInfo("North-west of Exam Centre, on the hill.")) .put(new WorldPoint(3121, 3384, 0), new CoordinateClueInfo("North-east of Draynor Manor, near River Lum.")) .put(new WorldPoint(3430, 3388, 0), new CoordinateClueInfo("West of Mort Myre Swamp (BKR).")) .put(new WorldPoint(2920, 3403, 0), new CoordinateClueInfo("South-east of Taverley, near Lady of the Lake.")) .put(new WorldPoint(2594, 2899, 0), new CoordinateClueInfo("South-east of Feldip Hills, by the crimson swifts (AKS).")) .put(new WorldPoint(2387, 3435, 0), new CoordinateClueInfo("West of Tree Gnome Stronghold, near the pen containing terrorbirds.")) .put(new WorldPoint(2512, 3467, 0), new CoordinateClueInfo("Baxtorian Falls (Bring rope).")) .put(new WorldPoint(2381, 3468, 0), new CoordinateClueInfo("West of Tree Gnome Stronghold, north of the pen with terrorbirds.")) .put(new WorldPoint(3005, 3475, 0), new CoordinateClueInfo("Ice Mountain, west of Edgeville Monastery.")) .put(new WorldPoint(2585, 3505, 0), new CoordinateClueInfo("By the shore line north of the Coal Trucks.")) .put(new WorldPoint(3443, 3515, 0), new CoordinateClueInfo("South of Slayer Tower (CKS).")) .put(new WorldPoint(2416, 3516, 0), new CoordinateClueInfo("Tree Gnome Stronghold, west of Grand Tree, near swamp.")) .put(new WorldPoint(3429, 3523, 0), new CoordinateClueInfo("South of Slayer Tower (CKS).")) .put(new WorldPoint(2363, 3531, 0), new CoordinateClueInfo("North-east of Eagles' Peak (AKQ).")) .put(new WorldPoint(2919, 3535, 0), new CoordinateClueInfo("East of Burthorpe pub.")) .put(new WorldPoint(3548, 3560, 0), new CoordinateClueInfo("Inside Fenkenstrain's Castle.")) .put(new WorldPoint(1476, 3566, 0), new CoordinateClueInfo("Graveyard of Heroes in west Shayzien.")) .put(new WorldPoint(2735, 3638, 0), new CoordinateClueInfo("East of Rellekka, north-west of Golden Apple Tree (AJR).")) .put(new WorldPoint(2681, 3653, 0), new CoordinateClueInfo("Rellekka, in the garden of the south-east house.")) .put(new WorldPoint(2537, 3881, 0), new CoordinateClueInfo("Miscellania (CIP).")) .put(new WorldPoint(2828, 3234, 0), new CoordinateClueInfo("Southern coast of Crandor.")) .put(new WorldPoint(1247, 3726, 0), new CoordinateClueInfo("Just inside the Farming Guild")) .put(new WorldPoint(3770, 3898, 0), new CoordinateClueInfo("On the small island north-east of Fossil Island's mushroom forest.")) // Hard .put(new WorldPoint(2209, 3161, 0), new CoordinateClueInfo("North-east of Tyras Camp (BJS if 76 Agility).", SARADOMIN_WIZARD)) .put(new WorldPoint(2181, 3206, 0), new CoordinateClueInfo("South of Iorwerth Camp.", SARADOMIN_WIZARD)) .put(new WorldPoint(3081, 3209, 0), new CoordinateClueInfo("Small Island (CLP).", SARADOMIN_WIZARD)) .put(new WorldPoint(3399, 3246, 0), new CoordinateClueInfo("Behind the Duel Arena.")) .put(new WorldPoint(2699, 3251, 0), new CoordinateClueInfo("Little island (AIR).", SARADOMIN_WIZARD)) .put(new WorldPoint(3546, 3251, 0), new CoordinateClueInfo("North-east of Burgh de Rott.", SARADOMIN_WIZARD)) .put(new WorldPoint(3544, 3256, 0), new CoordinateClueInfo("North-east of Burgh de Rott.", SARADOMIN_WIZARD)) .put(new WorldPoint(2841, 3267, 0), new CoordinateClueInfo("Crandor island.", SARADOMIN_WIZARD)) .put(new WorldPoint(3168, 3041, 0), new CoordinateClueInfo("Bedabin Camp.", SARADOMIN_WIZARD)) .put(new WorldPoint(2542, 3031, 0), new CoordinateClueInfo("Gu'Tanoth, may require 20gp.", SARADOMIN_WIZARD)) .put(new WorldPoint(2581, 3030, 0), new CoordinateClueInfo("Gu'Tanoth island, enter cave north-west of Feldip Hills (AKS).", SARADOMIN_WIZARD)) .put(new WorldPoint(2961, 3024, 0), new CoordinateClueInfo("Ship yard (DKP).", SARADOMIN_WIZARD)) .put(new WorldPoint(2339, 3311, 0), new CoordinateClueInfo("East of Prifddinas on Arandar mountain pass.", SARADOMIN_WIZARD)) .put(new WorldPoint(3440, 3341, 0), new CoordinateClueInfo("Nature Spirit's grotto (BIP).", SARADOMIN_WIZARD)) .put(new WorldPoint(2763, 2974, 0), new CoordinateClueInfo("Cairn Isle, west of Shilo Village (CKR).", SARADOMIN_WIZARD)) .put(new WorldPoint(3138, 2969, 0), new CoordinateClueInfo("West of Bandit Camp in Kharidian Desert.", SARADOMIN_WIZARD)) .put(new WorldPoint(2924, 2963, 0), new CoordinateClueInfo("On the southern part of eastern Karamja.", SARADOMIN_WIZARD)) .put(new WorldPoint(2838, 2914, 0), new CoordinateClueInfo("Kharazi Jungle, near water pool (CKR).", SARADOMIN_WIZARD)) .put(new WorldPoint(3441, 3419, 0), new CoordinateClueInfo("Mort Myre Swamp (BKR).", SARADOMIN_WIZARD)) .put(new WorldPoint(2950, 2902, 0), new CoordinateClueInfo("South-east of Kharazi Jungle.", SARADOMIN_WIZARD)) .put(new WorldPoint(2775, 2891, 0), new CoordinateClueInfo("South-west of Kharazi Jungle.", SARADOMIN_WIZARD)) .put(new WorldPoint(3113, 3602, 0), new CoordinateClueInfo("Wilderness. South-west of Ferox Enclave (level 11).", ZAMORAK_WIZARD)) .put(new WorldPoint(2892, 3675, 0), new CoordinateClueInfo("On the summit of Trollheim.", SARADOMIN_WIZARD)) .put(new WorldPoint(3168, 3677, 0), new CoordinateClueInfo("Wilderness. Graveyard of Shadows.", ZAMORAK_WIZARD)) .put(new WorldPoint(2853, 3690, 0), new CoordinateClueInfo("Entrance to the troll Stronghold.", SARADOMIN_WIZARD)) .put(new WorldPoint(3305, 3692, 0), new CoordinateClueInfo("Wilderness. West of eastern green dragon.", ZAMORAK_WIZARD)) .put(new WorldPoint(3055, 3696, 0), new CoordinateClueInfo("Wilderness. Bandit Camp.", ZAMORAK_WIZARD)) .put(new WorldPoint(3302, 3696, 0), new CoordinateClueInfo("Wilderness. West of eastern green dragon.", ZAMORAK_WIZARD)) .put(new WorldPoint(1479, 3699, 0), new CoordinateClueInfo("Lizardman Canyon (DJR).", SARADOMIN_WIZARD)) .put(new WorldPoint(2712, 3732, 0), new CoordinateClueInfo("North-east of Rellekka (DKS).", SARADOMIN_WIZARD)) .put(new WorldPoint(2970, 3749, 0), new CoordinateClueInfo("Wilderness. Forgotten Cemetery.", ZAMORAK_WIZARD)) .put(new WorldPoint(3094, 3764, 0), new CoordinateClueInfo("Wilderness. Mining site north of Bandit Camp.", ZAMORAK_WIZARD)) .put(new WorldPoint(3311, 3769, 0), new CoordinateClueInfo("Wilderness. North of Venenatis.", ZAMORAK_WIZARD)) .put(new WorldPoint(1460, 3782, 0), new CoordinateClueInfo("Lovakengj, near burning man.", SARADOMIN_WIZARD)) .put(new WorldPoint(3244, 3792, 0), new CoordinateClueInfo("Wilderness. South-east of Lava Dragon Isle by some Chaos Dwarves.", ZAMORAK_WIZARD)) .put(new WorldPoint(3140, 3804, 0), new CoordinateClueInfo("Wilderness. North of Ruins.", ZAMORAK_WIZARD)) .put(new WorldPoint(2946, 3819, 0), new CoordinateClueInfo("Wilderness. Chaos Temple (level 38).", ZAMORAK_WIZARD)) .put(new WorldPoint(3771, 3825, 0), new CoordinateClueInfo("Fossil Island. East of Museum Camp.", SARADOMIN_WIZARD)) .put(new WorldPoint(3013, 3846, 0), new CoordinateClueInfo("Wilderness. West of Lava Maze, before KBD's lair.", ZAMORAK_WIZARD)) .put(new WorldPoint(3058, 3884, 0), new CoordinateClueInfo("Wilderness. Near runite ore north of Lava Maze.", ZAMORAK_WIZARD)) .put(new WorldPoint(3290, 3889, 0), new CoordinateClueInfo("Wilderness. Demonic Ruins.", ZAMORAK_WIZARD)) .put(new WorldPoint(3770, 3897, 0), new CoordinateClueInfo("Small Island north of Fossil Island.", SARADOMIN_WIZARD)) .put(new WorldPoint(2505, 3899, 0), new CoordinateClueInfo("Small Island north-west of Miscellania (AJS).", SARADOMIN_WIZARD)) .put(new WorldPoint(3285, 3942, 0), new CoordinateClueInfo("Wilderness. Rogues' Castle.", ZAMORAK_WIZARD)) .put(new WorldPoint(3159, 3959, 0), new CoordinateClueInfo("Wilderness. North of Deserted Keep, west of Resource Area.", ZAMORAK_WIZARD)) .put(new WorldPoint(3039, 3960, 0), new CoordinateClueInfo("Wilderness. Pirates' Hideout.", ZAMORAK_WIZARD)) .put(new WorldPoint(2987, 3963, 0), new CoordinateClueInfo("Wilderness. West of Wilderness Agility Course.", ZAMORAK_WIZARD)) .put(new WorldPoint(3189, 3963, 0), new CoordinateClueInfo("Wilderness. North of Resource Area, near magic axe hut.", ZAMORAK_WIZARD)) .put(new WorldPoint(2341, 3697, 0), new CoordinateClueInfo("North-east of the Piscatoris Fishing Colony bank.", SARADOMIN_WIZARD)) .put(new WorldPoint(3143, 3774, 0), new CoordinateClueInfo("In level 32 Wilderness, by the black chinchompa hunting area.", ZAMORAK_WIZARD)) .put(new WorldPoint(2992, 3941, 0), new CoordinateClueInfo("Wilderness Agility Course, past the log balance.", ZAMORAK_WIZARD)) .put(new WorldPoint(1410, 3611, 0), new CoordinateClueInfo("Lake Molch dock west of Shayzien Encampment.", SARADOMIN_WIZARD)) .put(new WorldPoint(1409, 3483, 0), new CoordinateClueInfo("South of Shayziens' Wall.", SARADOMIN_WIZARD)) // Elite .put(new WorldPoint(2357, 3151, 0), new CoordinateClueInfo("Lletya.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(3587, 3180, 0), new CoordinateClueInfo("Meiyerditch.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(2820, 3078, 0), new CoordinateClueInfo("Tai Bwo Wannai. Hardwood Grove. 100 Trading sticks or elite Karamja diary completion is needed to enter.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(3811, 3060, 0), new CoordinateClueInfo("Small island north-east of Mos Le'Harmless.", ARMADYLEAN_OR_BANDOSIAN_GUARD, true, Varbits.FIRE_PIT_MOS_LE_HARMLESS)) .put(new WorldPoint(2180, 3282, 0), new CoordinateClueInfo("North of Iorwerth Camp.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(2870, 2997, 0), new CoordinateClueInfo("North-east corner in Shilo Village.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(3302, 2988, 0), new CoordinateClueInfo("On top of a cliff to the west of Pollnivneach.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(2511, 2980, 0), new CoordinateClueInfo("Just south of Gu'Tanoth, west of gnome glider.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(2732, 3372, 0), new CoordinateClueInfo("Legends' Guild.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(3573, 3425, 0), new CoordinateClueInfo("North of Dessous's tomb from Desert Treasure.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(3828, 2848, 0), new CoordinateClueInfo("East of Harmony Island.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(3225, 2838, 0), new CoordinateClueInfo("South of Desert Treasure pyramid.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(1773, 3510, 0), new CoordinateClueInfo("Ruins north of the Hosidius mine.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(3822, 3562, 0), new CoordinateClueInfo("North-east of Dragontooth Island. Bring a Ghostspeak Amulet and 25 Ecto-tokens to reach the island.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(3603, 3564, 0), new CoordinateClueInfo("North of the wrecked ship, outside of Port Phasmatys.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(2936, 2721, 0), new CoordinateClueInfo("Eastern shore of Crash Island.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(2697, 2705, 0), new CoordinateClueInfo("South-west of Ape Atoll.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(2778, 3678, 0), new CoordinateClueInfo("Mountain Camp.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(2827, 3740, 0), new CoordinateClueInfo("West of the entrance to the Ice Path, where the Troll child resides.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(2359, 3799, 0), new CoordinateClueInfo("Neitiznot.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(2194, 3807, 0), new CoordinateClueInfo("Pirates' Cove.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(2700, 3808, 0), new CoordinateClueInfo("Northwestern part of the Trollweiss and Rellekka Hunter area (DKS).", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(3215, 3835, 0), new CoordinateClueInfo("Wilderness. Lava Dragon Isle.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(3369, 3894, 0), new CoordinateClueInfo("Wilderness. Fountain of Rune.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(2065, 3923, 0), new CoordinateClueInfo("Outside the western wall on Lunar Isle.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(3188, 3933, 0), new CoordinateClueInfo("Wilderness. Resource Area.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(2997, 3953, 0), new CoordinateClueInfo("Wilderness. Inside Agility Training Area.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(3380, 3963, 0), new CoordinateClueInfo("Wilderness. North of Volcano.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(3051, 3736, 0), new CoordinateClueInfo("East of the Wilderness Obelisk in 28 Wilderness.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(2316, 3814, 0), new CoordinateClueInfo("West of Neitiznot, near the bridge.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(2872, 3937, 0), new CoordinateClueInfo("Weiss.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(2484, 4016, 0), new CoordinateClueInfo("Northeast corner of the Island of Stone.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(2222, 3331, 0), new CoordinateClueInfo("Prifddinas, west of the Tower of Voices", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(3560, 3987, 0), new CoordinateClueInfo("Lithkren. Digsite pendant teleport if unlocked, otherwise take rowboat from west of Mushroom Meadow Mushtree.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(2318, 2954, 0), new CoordinateClueInfo("North-east corner of the Isle of Souls.", BANDOSIAN_GUARD)) .put(new WorldPoint(2094, 2889, 0), new CoordinateClueInfo("West side of the Isle of Souls.", ARMADYLEAN_GUARD)) .put(new WorldPoint(1451, 3509, 0), new CoordinateClueInfo("Ruins of Morra.", ARMADYLEAN_OR_BANDOSIAN_GUARD)) .put(new WorldPoint(3318, 2706, 0), new CoordinateClueInfo("Necropolis mine", ARMADYLEAN_OR_BANDOSIAN_GUARD)) // Master .put(new WorldPoint(2178, 3209, 0), new CoordinateClueInfo("South of Iorwerth Camp.", BRASSICAN_MAGE)) .put(new WorldPoint(2155, 3100, 0), new CoordinateClueInfo("South of Port Tyras (BJS if 76 Agility).", BRASSICAN_MAGE)) .put(new WorldPoint(2217, 3092, 0), new CoordinateClueInfo("Poison Waste island (DLR).", BRASSICAN_MAGE)) .put(new WorldPoint(3830, 3060, 0), new CoordinateClueInfo("Small island located north-east of Mos Le'Harmless.", BRASSICAN_MAGE, true, Varbits.FIRE_PIT_MOS_LE_HARMLESS)) .put(new WorldPoint(2834, 3271, 0), new CoordinateClueInfo("Crandor island.", BRASSICAN_MAGE)) .put(new WorldPoint(2732, 3284, 0), new CoordinateClueInfo("Witchaven.", BRASSICAN_MAGE)) .put(new WorldPoint(3622, 3320, 0), new CoordinateClueInfo("Meiyerditch. Outside mine.", BRASSICAN_MAGE)) .put(new WorldPoint(2303, 3328, 0), new CoordinateClueInfo("East of Prifddinas.", BRASSICAN_MAGE)) .put(new WorldPoint(3570, 3405, 0), new CoordinateClueInfo("North of Dessous's tomb from Desert Treasure.", BRASSICAN_MAGE)) .put(new WorldPoint(2840, 3423, 0), new CoordinateClueInfo("Water Obelisk Island.", BRASSICAN_MAGE)) .put(new WorldPoint(3604, 3564, 0), new CoordinateClueInfo("North of the wrecked ship, outside of Port Phasmatys (ALQ).", BRASSICAN_MAGE)) .put(new WorldPoint(3085, 3569, 0), new CoordinateClueInfo("Wilderness. Obelisk of Air.", BRASSICAN_MAGE)) .put(new WorldPoint(2934, 2727, 0), new CoordinateClueInfo("Eastern shore of Crash Island.", BRASSICAN_MAGE)) .put(new WorldPoint(1451, 3695, 0), new CoordinateClueInfo("West side of Lizardman Canyon with Lizardman shaman.", ANCIENT_WIZARDS)) .put(new WorldPoint(2538, 3739, 0), new CoordinateClueInfo("Waterbirth Island. Bring a pet rock and rune thrownaxe.", BRASSICAN_MAGE)) .put(new WorldPoint(1248, 3751, 0), new CoordinateClueInfo("In the north wing of the Farming Guild.", BRASSICAN_MAGE)) .put(new WorldPoint(1698, 3792, 0), new CoordinateClueInfo("Arceuus church.", ANCIENT_WIZARDS)) .put(new WorldPoint(2951, 3820, 0), new CoordinateClueInfo("Wilderness. Chaos Temple (level 38).", ANCIENT_WIZARDS)) .put(new WorldPoint(2202, 3825, 0), new CoordinateClueInfo("Pirates' Cove, between Lunar Isle and Rellekka.", ANCIENT_WIZARDS)) .put(new WorldPoint(1761, 3853, 0), new CoordinateClueInfo("Arceuus essence mine (CIS).", BRASSICAN_MAGE)) .put(new WorldPoint(2090, 3863, 0), new CoordinateClueInfo("South of Lunar Isle, west of Astral altar.", ANCIENT_WIZARDS)) .put(new WorldPoint(1442, 3878, 0), new CoordinateClueInfo("Northern area of the Lovakengj Sulphur Mine. Facemask or Slayer Helmet recommended.", BRASSICAN_MAGE)) .put(new WorldPoint(3380, 3929, 0), new CoordinateClueInfo("Wilderness. Near Volcano.", ANCIENT_WIZARDS)) .put(new WorldPoint(3188, 3939, 0), new CoordinateClueInfo("Wilderness. Resource Area.", BRASSICAN_MAGE)) .put(new WorldPoint(3304, 3941, 0), new CoordinateClueInfo("Wilderness. East of Rogues' Castle.", ANCIENT_WIZARDS)) .put(new WorldPoint(2994, 3961, 0), new CoordinateClueInfo("Wilderness. Inside Agility Training Area.", BRASSICAN_MAGE)) .put(new WorldPoint(1769, 3418, 0), new CoordinateClueInfo("Crabclaw Isle", ANCIENT_WIZARDS)) .build(); private final String text; private final WorldPoint location; /** * For regions which are mirrored, the location of the the clue in the mirrored region. */ @Nullable private final WorldPoint mirrorLocation; public CoordinateClue(String text, WorldPoint location, WorldPoint mirrorLocation) { this.text = text; this.location = location; this.mirrorLocation = mirrorLocation; final CoordinateClueInfo clueInfo = CLUES.get(location); if (clueInfo != null) { setFirePitVarbitId(clueInfo.getLightSourceVarbitId()); setRequiresLight(clueInfo.lightRequired); setEnemy(clueInfo.getEnemy()); } setRequiresSpade(true); } @Override public WorldPoint[] getLocations() { if (mirrorLocation != null) { return new WorldPoint[]{location, mirrorLocation}; } else { return new WorldPoint[]{location}; } } @Override public void makeOverlayHint(PanelComponent panelComponent, ClueScrollPlugin plugin) { panelComponent.getChildren().add(TitleComponent.builder().text("Coordinate Clue").build()); final CoordinateClueInfo solution = CLUES.get(location); if (solution != null) { panelComponent.getChildren().add(LineComponent.builder() .left(solution.getDirections()) .build()); panelComponent.getChildren().add(LineComponent.builder().build()); } panelComponent.getChildren().add(LineComponent.builder() .left("Click the clue scroll on your world map to see dig location.") .build()); } @Override public void makeWorldOverlayHint(Graphics2D graphics, ClueScrollPlugin plugin) { for (WorldPoint worldPoint : getLocations()) { LocalPoint localLocation = LocalPoint.fromWorld(plugin.getClient(), worldPoint); if (localLocation != null) { OverlayUtil.renderTileOverlay(plugin.getClient(), graphics, localLocation, plugin.getSpadeImage(), Color.ORANGE); } } } }
package com.jetbrains.python.inspections; import com.intellij.codeInspection.LocalInspectionToolSession; import com.intellij.codeInspection.ProblemsHolder; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiElementVisitor; import com.intellij.psi.PsiReference; import com.jetbrains.python.PyBundle; import com.jetbrains.python.actions.AddCallSuperQuickFix; import com.jetbrains.python.psi.*; import com.jetbrains.python.psi.types.TypeEvalContext; import org.jetbrains.annotations.Nls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import static com.jetbrains.python.PyNames.*; /** * User: catherine * * Inspection to warn if call to super constructor in class is missed */ public class PyMissingConstructorInspection extends PyInspection { @Nls @NotNull @Override public String getDisplayName() { return PyBundle.message("INSP.NAME.missing.super.constructor"); } @NotNull @Override public PsiElementVisitor buildVisitor(@NotNull ProblemsHolder holder, boolean isOnTheFly, @NotNull LocalInspectionToolSession session) { return new Visitor(holder, session); } private static class Visitor extends PyInspectionVisitor { public Visitor(@Nullable ProblemsHolder holder, @NotNull LocalInspectionToolSession session) { super(holder, session); } @Override public void visitPyClass(final PyClass node) { PsiElement[] superClasses = node.getSuperClassExpressions(); if (superClasses.length == 0 || (superClasses.length == 1 && OBJECT.equals(superClasses[0].getText()))) return; if (!superHasConstructor(node)) return; PyFunction initMethod = node.findMethodByName(INIT, false); if (initMethod != null) { if (isExceptionClass(node, myTypeEvalContext) || hasConstructorCall(node, initMethod)) { return; } if (superClasses.length == 1 || node.isNewStyleClass()) registerProblem(initMethod.getNameIdentifier(), "Call to constructor of super class is missed", new AddCallSuperQuickFix(node.getSuperClasses()[0], superClasses[0].getText())); else registerProblem(initMethod.getNameIdentifier(), "Call to constructor of super class is missed"); } } private static boolean superHasConstructor(@NotNull PyClass cls) { for (PyClass c : cls.iterateAncestorClasses()) { final String name = c.getName(); final String className = cls.getName(); if (!OBJECT.equals(name) && !FAKE_OLD_BASE.equals(name) && className != null && !className.equals(name) && c.findMethodByName(INIT, false) != null) { return true; } } return false; } private boolean isExceptionClass(@NotNull PyClass cls, @NotNull TypeEvalContext context) { if (PyBroadExceptionInspection.equalsException(cls, context)) { return true; } for (PyClass baseClass : cls.iterateAncestorClasses()) { if (PyBroadExceptionInspection.equalsException(baseClass, context)) { return true; } } return false; } private static boolean hasConstructorCall(PyClass node, PyFunction initMethod) { PyStatementList statementList = initMethod.getStatementList(); CallVisitor visitor = new CallVisitor(node); if (statementList != null) { statementList.accept(visitor); return visitor.myHasConstructorCall; } return false; } private static class CallVisitor extends PyRecursiveElementVisitor { private boolean myHasConstructorCall = false; private PyClass myClass; CallVisitor(PyClass node) { myClass = node; } @Override public void visitPyCallExpression(PyCallExpression node) { if (isConstructorCall(node, myClass)) myHasConstructorCall = true; } private static boolean isConstructorCall(PyCallExpression expression, PyClass cl) { PyExpression callee = expression.getCallee(); if (callee instanceof PyQualifiedExpression) { PyExpression qualifier = ((PyQualifiedExpression)callee).getQualifier(); if (qualifier != null) { String tmp = ""; if (qualifier instanceof PyCallExpression) { PyExpression innerCallee = ((PyCallExpression)qualifier).getCallee(); if (innerCallee != null) { tmp = innerCallee.getName(); } if (SUPER.equals(tmp) && (INIT.equals(callee.getName()))) { PyExpression[] args = ((PyCallExpression)qualifier).getArguments(); if (args.length > 0) { String firstArg = args[0].getText(); if (firstArg.equals(cl.getName()) || firstArg.equals(CANONICAL_SELF+"."+ CLASS)) return true; for (PyClass s : cl.iterateAncestorClasses()) { if (firstArg.equals(s.getName())) return true; } } else return true; } } if (INIT.equals(callee.getName())) { return isSuperClassCall(cl, qualifier); } } } return false; } private static boolean isSuperClassCall(PyClass cl, PyExpression qualifier) { PsiElement callingClass = null; if (qualifier instanceof PyCallExpression) { PyExpression innerCallee = ((PyCallExpression)qualifier).getCallee(); if (innerCallee != null) { PsiReference ref = innerCallee.getReference(); if (ref != null) callingClass = ref.resolve(); } } else { PsiReference ref = qualifier.getReference(); if (ref != null) callingClass = ref.resolve(); } for (PyClass s : cl.iterateAncestorClasses()) { if (s.equals(callingClass)) { return true; } } return false; } } } }
package net.finmath.time.daycount; import java.time.LocalDate; /** * Interface for various day count conventions. * * Classes implementing this interface have to implement the methods * {@link #getDaycount(LocalDate, LocalDate)} and {@link #getDaycountFraction(LocalDate, LocalDate)}. * * Classes implementing these methods then provide day counting and day count fractions for * a given interval. * * @author Christian Fries * @version 1.0 */ public interface DayCountConvention { /** * Return the number of days between startDate and endDate given the * specific daycount convention. * * @param startDate The start date given as a {@link java.time.LocalDate}. * @param endDate The end date given as a {@link java.time.LocalDate}. * @return The number of days within the given period. */ double getDaycount(LocalDate startDate, LocalDate endDate); /** * Return the daycount fraction (year fraction) corresponding to the period from * startDate to endDate given the specific daycount convention. * * @param startDate The start date given as a {@link java.time.LocalDate}. * @param endDate The end date given as a {@link java.time.LocalDate}. * @return The daycount year fraction corresponding to the given period. */ double getDaycountFraction(LocalDate startDate, LocalDate endDate); }
package com.jetbrains.python.inspections; import com.intellij.codeInsight.CodeInsightUtilBase; import com.intellij.codeInsight.controlflow.ControlFlow; import com.intellij.codeInsight.controlflow.ControlFlowUtil; import com.intellij.codeInsight.controlflow.Instruction; import com.intellij.codeInspection.LocalQuickFix; import com.intellij.codeInspection.ProblemDescriptor; import com.intellij.codeInspection.ProblemHighlightType; import com.intellij.codeInspection.ProblemsHolder; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.command.CommandProcessor; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.project.Project; import com.intellij.psi.PsiElement; import com.intellij.psi.ResolveResult; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.util.Function; import com.jetbrains.python.PyBundle; import com.jetbrains.python.PyNames; import com.jetbrains.python.actions.AddFieldQuickFix; import com.jetbrains.python.codeInsight.controlflow.ReadWriteInstruction; import com.jetbrains.python.codeInsight.controlflow.ScopeOwner; import com.jetbrains.python.codeInsight.dataflow.scope.Scope; import com.jetbrains.python.console.PydevConsoleRunner; import com.jetbrains.python.psi.*; import com.jetbrains.python.psi.impl.PyAugAssignmentStatementNavigator; import com.jetbrains.python.psi.impl.PyForStatementNavigator; import com.jetbrains.python.psi.impl.PyImportStatementNavigator; import com.jetbrains.python.psi.search.PyOverridingMethodsSearch; import com.jetbrains.python.psi.search.PySuperMethodsSearch; import org.jetbrains.annotations.NotNull; import java.util.HashSet; import java.util.Set; /** * @author oleg */ class PyUnusedLocalInspectionVisitor extends PyInspectionVisitor { private final boolean myIgnoreTupleUnpacking; private final HashSet<PsiElement> myUnusedElements; private final HashSet<PsiElement> myUsedElements; public PyUnusedLocalInspectionVisitor(final ProblemsHolder holder, boolean ignoreTupleUnpacking) { super(holder); myIgnoreTupleUnpacking = ignoreTupleUnpacking; myUnusedElements = new HashSet<PsiElement>(); myUsedElements = new HashSet<PsiElement>(); } @Override public void visitPyFunction(final PyFunction node) { processScope(node, node); } @Override public void visitPyLambdaExpression(final PyLambdaExpression node) { processScope(PsiTreeUtil.getParentOfType(node, ScopeOwner.class), node); } static class DontPerformException extends RuntimeException {} private void processScope(final ScopeOwner owner, final PyElement node) { if (owner.getContainingFile() instanceof PyExpressionCodeFragment || PydevConsoleRunner.isInPydevConsole(owner)){ return; } if (callsLocals(owner)) return; // If method overrides others or is overridden, do not mark parameters as unused if they are final Scope scope = owner.getScope(); final ControlFlow flow = owner.getControlFlow(); final Instruction[] instructions = flow.getInstructions(); // Iteration over write accesses for (int i = 0; i < instructions.length; i++) { final Instruction instruction = instructions[i]; final PsiElement element = instruction.getElement(); if (element instanceof PyFunction && owner instanceof PyFunction){ if (!myUsedElements.contains(element)){ myUnusedElements.add(element); } } else if (instruction instanceof ReadWriteInstruction) { final String name = ((ReadWriteInstruction)instruction).getName(); // Ignore empty, wildcards or global names if (name == null || "_".equals(name) || scope.isGlobal(name)) { continue; } // Ignore elements out of scope if (element == null || !PsiTreeUtil.isAncestor(node, element, false)){ continue; } // Ignore arguments of import statement if (PyImportStatementNavigator.getImportStatementByElement(element) != null) { continue; } if (element instanceof PyQualifiedExpression && ((PyQualifiedExpression)element).getQualifier() != null) { continue; } final ReadWriteInstruction.ACCESS access = ((ReadWriteInstruction)instruction).getAccess(); // WriteAccess if (access.isWriteAccess()) { if (!myUsedElements.contains(element)){ myUnusedElements.add(element); } } } } // Iteration over read accesses for (int i = 0; i < instructions.length; i++) { final Instruction instruction = instructions[i]; if (instruction instanceof ReadWriteInstruction) { final String name = ((ReadWriteInstruction)instruction).getName(); if (name == null) { continue; } final PsiElement element = instruction.getElement(); // Ignore elements out of scope if (element == null || !PsiTreeUtil.isAncestor(node, element, false)){ continue; } final ReadWriteInstruction.ACCESS access = ((ReadWriteInstruction)instruction).getAccess(); // Read or self assign access if (access.isReadAccess()) { int number = i; if (access == ReadWriteInstruction.ACCESS.READWRITE) { final PyAugAssignmentStatement augAssignmentStatement = PyAugAssignmentStatementNavigator.getStatementByTarget(element); number = ControlFlowUtil.findInstructionNumberByElement(instructions, augAssignmentStatement); } // Check out of scope resolve elements, processes nested scopes if (element instanceof PyReferenceExpression){ for (ResolveResult result : ((PyReferenceExpression)element).getReference().multiResolve(false)) { final PsiElement resolveElement = result.getElement(); // in case when we resolve out of the scope we still can have imported statements with write accesses inside if (resolveElement != null && !PsiTreeUtil.isAncestor(owner, resolveElement, false)){ myUsedElements.add(element); myUsedElements.add(resolveElement); myUnusedElements.remove(element); myUnusedElements.remove(resolveElement); } } } ControlFlowUtil .iteratePrev(number, instructions, new Function<Instruction, ControlFlowUtil.Operation>() { public ControlFlowUtil.Operation fun(final Instruction inst) { final PsiElement element = inst.getElement(); // Mark function as used if (element instanceof PyFunction){ if (name.equals(((PyFunction)element).getName())){ myUsedElements.add(element); myUnusedElements.remove(element); return ControlFlowUtil.Operation.CONTINUE; } } // Mark write access as used else if (inst instanceof ReadWriteInstruction) { final ReadWriteInstruction rwInstruction = (ReadWriteInstruction)inst; if (!name.equals(rwInstruction.getName()) || !rwInstruction.getAccess().isWriteAccess()) { return ControlFlowUtil.Operation.NEXT; } // Ignore elements out of scope if (element == null || !PsiTreeUtil.isAncestor(node, element, false)) { return ControlFlowUtil.Operation.CONTINUE; } myUsedElements.add(element); myUnusedElements.remove(element); // In case when assignment is inside try part we should move further if (PsiTreeUtil.getParentOfType(element, PyTryPart.class) != null) { return ControlFlowUtil.Operation.NEXT; } return ControlFlowUtil.Operation.CONTINUE; } return ControlFlowUtil.Operation.NEXT; } }); } } } } private static boolean callsLocals(final ScopeOwner owner) { try { owner.acceptChildren(new PyRecursiveElementVisitor(){ @Override public void visitPyCallExpression(final PyCallExpression node) { if ("locals".equals(node.getCallee().getText())){ throw new DontPerformException(); } } @Override public void visitPyFunction(final PyFunction node) { // stop here } }); } catch (DontPerformException e) { return true; } return false; } void registerProblems() { final UnusedLocalFilter[] filters = Extensions.getExtensions(UnusedLocalFilter.EP_NAME); // Register problems Set<PyFunction> functionsWithInheritors = new HashSet<PyFunction>(); for (PsiElement element : myUnusedElements) { boolean ignoreUnused = false; for (UnusedLocalFilter filter : filters) { if (filter.ignoreUnused(element)) { ignoreUnused = true; } } if (ignoreUnused) continue; // Local function if (element instanceof PyFunction){ final PsiElement nameIdentifier = ((PyFunction)element).getNameIdentifier(); registerWarning(nameIdentifier == null ? element : nameIdentifier, PyBundle.message("INSP.unused.locals.local.function.isnot.used", ((PyFunction)element).getName())); } // Local variable or parameter else { String name = element.getText(); if (element instanceof PyNamedParameter || element.getParent() instanceof PyNamedParameter) { PyNamedParameter namedParameter = element instanceof PyNamedParameter ? (PyNamedParameter) element : (PyNamedParameter) element.getParent(); name = namedParameter.getName(); // In case when function is declared inside the class first parameter is either self or class and shouldn't be processed if (((PyParameterList)namedParameter.getParent()).getParameters()[0] == namedParameter && PsiTreeUtil.getParentOfType(element, PyClass.class) != null){ continue; } boolean isInitMethod = false; PyClass containingClass = null; PyParameterList paramList = PsiTreeUtil.getParentOfType(element, PyParameterList.class); if (paramList != null && paramList.getParent() instanceof PyFunction) { PyFunction func = (PyFunction) paramList.getParent(); containingClass = func.getContainingClass(); if (PyNames.INIT.equals(func.getName()) && containingClass != null) { isInitMethod = true; } else if (ignoreUnusedParameters(func, functionsWithInheritors)) { continue; } } LocalQuickFix[] fixes = isInitMethod ? new LocalQuickFix[] { new AddFieldQuickFix(name, containingClass, name) } : LocalQuickFix.EMPTY_ARRAY; registerWarning(element, PyBundle.message("INSP.unused.locals.parameter.isnot.used", name), fixes); } else { if (myIgnoreTupleUnpacking && isTupleUnpacking(element)) { continue; } if (PyForStatementNavigator.getPyForStatementByIterable(element) != null) { registerProblem(element, PyBundle.message("INSP.unused.locals.local.variable.isnot.used", name), ProblemHighlightType.LIKE_UNUSED_SYMBOL, null, new ReplaceWithWildCard()); } else { registerWarning(element, PyBundle.message("INSP.unused.locals.local.variable.isnot.used", name)); } } } } } private static boolean ignoreUnusedParameters(PyFunction func, Set<PyFunction> functionsWithInheritors) { if (functionsWithInheritors.contains(func)) { return true; } if (PySuperMethodsSearch.search(func).findFirst() != null || PyOverridingMethodsSearch.search(func, true).findFirst() != null) { functionsWithInheritors.add(func); return true; } return false; } private boolean isTupleUnpacking(PsiElement element) { if (!(element instanceof PyTargetExpression)) { return false; } if (element.getParent() instanceof PyTupleExpression) { // if all the items of the tuple are unused, we still highlight all of them; if some are unused, we ignore PyTupleExpression tuple = (PyTupleExpression) element.getParent(); for (PyExpression expression : tuple.getElements()) { if (!myUnusedElements.contains(expression)) { return true; } } } return false; } private void registerWarning(@NotNull final PsiElement element, final String msg, LocalQuickFix... quickfixes) { registerProblem(element, msg, ProblemHighlightType.LIKE_UNUSED_SYMBOL, null, quickfixes); } private static class ReplaceWithWildCard implements LocalQuickFix { @NotNull public String getName() { return PyBundle.message("INSP.unused.locals.replace.with.wildcard"); } public void applyFix(@NotNull final Project project, @NotNull final ProblemDescriptor descriptor) { if (!CodeInsightUtilBase.preparePsiElementForWrite(descriptor.getPsiElement())) { return; } replace(descriptor.getPsiElement()); } private void replace(final PsiElement psiElement) { final PyFile pyFile = (PyFile) PyElementGenerator.getInstance(psiElement.getProject()).createDummyFile(LanguageLevel.getDefault(), "for _ in tuples:\n pass" ); final PyExpression target = ((PyForStatement)pyFile.getStatements().get(0)).getForPart().getTarget(); CommandProcessor.getInstance().executeCommand(psiElement.getProject(), new Runnable() { public void run() { ApplicationManager.getApplication().runWriteAction(new Runnable() { public void run() { psiElement.replace(target); } }); } }, getName(), null); } @NotNull public String getFamilyName() { return getName(); } } }
package no.ntnu.okse.protocol.amqp; import no.ntnu.okse.core.messaging.Message; import no.ntnu.okse.core.subscription.SubscriptionService; import no.ntnu.okse.protocol.AbstractProtocolServer; import org.apache.log4j.Logger; import org.apache.qpid.proton.engine.Collector; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.channels.Pipe; public class AMQProtocolServer extends AbstractProtocolServer { private static Logger log; private static Thread _serverThread; private static boolean _invoked; private static final String configurationFile = ""; private static AMQProtocolServer _singleton; private static String hostname = "0.0.0.0"; private Driver driver; private AMQProtocolServer(Integer port) { this.init(port); } public static AMQProtocolServer getInstance() { if (!_invoked) _singleton = new AMQProtocolServer(61050); return _singleton; } @Override protected void init(Integer port) { log = Logger.getLogger(AMQProtocolServer.class.getName()); protocolServerType = "AMQP"; _invoked = true; this.port = port; } @Override public void boot() { if (!_running) { _running = true; _serverThread = new Thread(() -> this.run()); _serverThread.setName("AMQProtocolServer"); _serverThread.start(); log.info("AMQProtocolServer booted successfully"); } } @Override public void run() { try { Collector collector = Collector.Factory.create(); //Router router = new Router(); SubscriptionHandler sh = new SubscriptionHandler(); SubscriptionService.getInstance().addSubscriptionChangeListener(sh); server = new AMQPServer(sh, false); driver = new Driver(collector, new Handshaker(), new FlowController(1024), sh, server); driver.listen(hostname, port); driver.run(); } catch (IOException e) { totalErrors++; log.error("I/O exception during accept(): " + e.getMessage()); } } @Override public void stopServer() { _running = false; //TODO: implement driver.stop() } @Override public String getProtocolServerType() { return protocolServerType; } @Override public void sendMessage(Message message) { if (!message.getOriginProtocol().equals(protocolServerType)) { server.addMessageToQueue(message); } } public void incrementTotalMessagesSent() { totalMessagesSent++; } public void incrementTotalMessagesRecieved() { totalMessagesRecieved++; } public void incrementTotalRequests() { totalRequests++; } public void incrementTotalBadRequest() { totalBadRequests++; } public void incrementTotalErrors() { totalErrors++; } private AMQPServer server; public Driver getDriver() { return driver; } }
package com.camunda.fox.cycle.test; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response.Status; import junit.framework.Assert; import org.apache.commons.io.FileUtils; import org.apache.http.HttpResponse; import org.apache.http.NameValuePair; import org.apache.http.client.entity.UrlEncodedFormEntity; import org.apache.http.client.methods.HttpPost; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.message.BasicNameValuePair; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import com.camunda.fox.cycle.connector.ConnectorLoginMode; import com.camunda.fox.cycle.connector.ConnectorNode; import com.camunda.fox.cycle.connector.ConnectorNodeType; import com.camunda.fox.cycle.connector.vfs.VfsConnector; import com.camunda.fox.cycle.entity.ConnectorConfiguration; import com.camunda.fox.cycle.util.IoUtil; import com.camunda.fox.cycle.web.dto.BpmnDiagramDTO; import com.camunda.fox.cycle.web.dto.ConnectorConfigurationDTO; import com.camunda.fox.cycle.web.dto.ConnectorNodeDTO; import com.camunda.fox.cycle.web.dto.RoundtripDTO; import com.camunda.fox.cycle.web.dto.SynchronizationResultDTO; import com.camunda.fox.cycle.web.dto.SynchronizationResultDTO.SynchronizationStatus; import com.camunda.fox.cycle.web.dto.UserDTO; import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.WebResource; import com.sun.jersey.api.client.config.ClientConfig; import com.sun.jersey.api.json.JSONConfiguration; import com.sun.jersey.client.apache4.ApacheHttpClient4; import com.sun.jersey.client.apache4.config.DefaultApacheHttpClient4Config; public class TestCycleRoundtrip { private static final File VFS_DIRECTORY = new File("target/vfs-repository"); private static final String TMP_DIR_NAME = "cycle-integration-test"; private static final String LHS_PROCESS_DIAGRAM = "/com/camunda/fox/cycle/roundtrip/repository/test-lhs.bpmn"; private static final String RHS_PROCESS_DIAGRAM = "/com/camunda/fox/cycle/roundtrip/repository/test-rhs.bpmn"; private static final String USER_ID = "1"; private static final String HOST_NAME = "localhost"; private static String httpPort = "8080"; private static final String CYCLE_BASE_PATH = "http://" + HOST_NAME + ":"+httpPort+"/cycle/"; private static ApacheHttpClient4 client; private static VfsConnector vfsConnector; private static RoundtripDTO roundtripDTO; private static DefaultHttpClient defaultHttpClient; private static Long vfsConnectorId; @BeforeClass public static void testCycleDeployment() throws Exception { // String profile = System.getProperty("profile"); // if (profile != null && !profile.isEmpty()) { // if (profile.equals("glassfish")) { // httpPort = "28080"; // } else if (profile.equals("jboss")) { // httpPort = "19099"; // } else if (profile.equals("was")) { // // TODO ClientConfig clientConfig = new DefaultApacheHttpClient4Config(); clientConfig.getFeatures().put(JSONConfiguration.FEATURE_POJO_MAPPING, Boolean.TRUE); client = ApacheHttpClient4.create(clientConfig); defaultHttpClient = (DefaultHttpClient) client.getClientHandler().getHttpClient(); // cleanUp(); boolean success = false; for (int i = 0; i <= 30; i++) { try { WebResource webResource = client.resource(CYCLE_BASE_PATH); ClientResponse clientResponse = webResource.get(ClientResponse.class); int status = clientResponse.getStatus(); clientResponse.close(); if (status == Status.OK.getStatusCode()) { success = true; break; } } catch (Exception e) { // do nothing } Thread.sleep(2000); } if (success) { createInitialUserAndLogin(); createVfsConnector(); createRoundtripWithDetails(); } else { Assert.fail("Cycle is not available! Check cycle deployment."); } } @Test public void testLeftToRightSynchronisation() throws Exception { WebResource webResource = client.resource(CYCLE_BASE_PATH+"app/secured/resource/roundtrip/"+roundtripDTO.getId()+"/sync?syncMode=LEFT_TO_RIGHT"); ClientResponse clientResponse = webResource.accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON).post(ClientResponse.class); SynchronizationStatus synchronizationStatus = clientResponse.getEntity(SynchronizationResultDTO.class).getStatus(); Assert.assertEquals(SynchronizationStatus.SYNC_SUCCESS, synchronizationStatus); int status = clientResponse.getStatus(); clientResponse.close(); Assert.assertEquals(Status.OK.getStatusCode(), status); } @Test public void testRightToLeftSynchronisation() throws Exception { WebResource webResource = client.resource(CYCLE_BASE_PATH+"app/secured/resource/roundtrip/"+roundtripDTO.getId()+"/sync?syncMode=RIGHT_TO_LEFT"); ClientResponse clientResponse = webResource.accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON).post(ClientResponse.class); SynchronizationStatus synchronizationStatus = clientResponse.getEntity(SynchronizationResultDTO.class).getStatus(); Assert.assertEquals(SynchronizationStatus.SYNC_SUCCESS, synchronizationStatus); int status = clientResponse.getStatus(); clientResponse.close(); Assert.assertEquals(Status.OK.getStatusCode(), status); } @AfterClass public static void cleanUp() throws Exception { // login with created user executeCycleLogin(); deleteRoundtrip(); deleteConnector(); deleteUser(); cleanVfsTargetDirectory(VFS_DIRECTORY); defaultHttpClient.getConnectionManager().shutdown(); } private static int executeCycleLogin() throws Exception { HttpPost httpPost = new HttpPost(CYCLE_BASE_PATH+"j_security_check"); List<NameValuePair> parameterList = new ArrayList<NameValuePair>(); parameterList.add(new BasicNameValuePair("j_username", "test")); parameterList.add(new BasicNameValuePair("j_password", "test")); httpPost.setEntity(new UrlEncodedFormEntity(parameterList, "UTF-8")); HttpResponse httpResponse = defaultHttpClient.execute(httpPost); int status = httpResponse.getStatusLine().getStatusCode(); httpResponse.getEntity().getContent().close(); return status; } private static void createInitialUserAndLogin() throws Exception { // create initial user WebResource webResource = client.resource(CYCLE_BASE_PATH+"app/first-time-setup"); UserDTO userDTO = new UserDTO(); userDTO.setName("test"); userDTO.setPassword("test"); userDTO.setEmail("test@camunda.com"); userDTO.setAdmin(true); ClientResponse clientResponse = webResource.accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON).post(ClientResponse.class, userDTO); int status = clientResponse.getStatus(); clientResponse.close(); Assert.assertEquals(Status.OK.getStatusCode(), status); // login with created user status = executeCycleLogin(); Assert.assertEquals(302, status); } private static void createVfsConnector() { WebResource webResource = client.resource(CYCLE_BASE_PATH+"app/secured/resource/connector/configuration"); ConnectorConfiguration connectorConfiguration = new ConnectorConfiguration(); connectorConfiguration.setConnectorName("FileSystemConnector"); connectorConfiguration.setName("FileSystemConnector"); connectorConfiguration.setLoginMode(ConnectorLoginMode.LOGIN_NOT_REQUIRED); connectorConfiguration.getProperties().put(VfsConnector.BASE_PATH_KEY, VFS_DIRECTORY.getAbsolutePath()); connectorConfiguration.setConnectorClass(VfsConnector.class.getName()); ClientResponse clientResponse = webResource.accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON).post(ClientResponse.class, ConnectorConfigurationDTO.wrap(connectorConfiguration)); ConnectorConfigurationDTO entity = clientResponse.getEntity(ConnectorConfigurationDTO.class); vfsConnectorId = entity.getConnectorId(); Assert.assertNotNull(vfsConnectorId); int status = clientResponse.getStatus(); clientResponse.close(); Assert.assertEquals(Status.OK.getStatusCode(), status); // init local instance of VfsConnector // (required for creating connector nodes in VFS_DIRECTORY) vfsConnector = new VfsConnector(); vfsConnector.setConfiguration(connectorConfiguration); vfsConnector.init(); } private static void createRoundtripWithDetails() throws Exception { WebResource webResource = client.resource(CYCLE_BASE_PATH+"app/secured/resource/roundtrip"); // create roundtrip roundtripDTO = new RoundtripDTO(); roundtripDTO.setName("test"); ClientResponse clientResponse = webResource.accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON).post(ClientResponse.class, roundtripDTO); roundtripDTO = clientResponse.getEntity(RoundtripDTO.class); Assert.assertNotNull(roundtripDTO.getId()); int status = clientResponse.getStatus(); clientResponse.close(); Assert.assertEquals(Status.OK.getStatusCode(), status); // update roundtrip details LHS webResource = client.resource(CYCLE_BASE_PATH+"app/secured/resource/roundtrip/"+roundtripDTO.getId()+"/details"); BpmnDiagramDTO leftHandSide = new BpmnDiagramDTO(); leftHandSide.setModeler("lhs-modeler"); ConnectorNodeDTO lhsConnectorNodeParentFolder = createConnectorNodeParentFolder(); leftHandSide.setConnectorNode(lhsConnectorNodeParentFolder); createConnectorNode(lhsConnectorNodeParentFolder, LHS_PROCESS_DIAGRAM); // update roundtrip details RHS BpmnDiagramDTO rightHandSide = new BpmnDiagramDTO(); rightHandSide.setModeler("rhs-modeler"); ConnectorNodeDTO rhsConnectorNodeParentFolder = createConnectorNodeParentFolder(); rightHandSide.setConnectorNode(rhsConnectorNodeParentFolder); createConnectorNode(rhsConnectorNodeParentFolder, RHS_PROCESS_DIAGRAM); roundtripDTO.setLeftHandSide(leftHandSide); roundtripDTO.setRightHandSide(rightHandSide); clientResponse = webResource.accept(MediaType.APPLICATION_JSON).type(MediaType.APPLICATION_JSON).post(ClientResponse.class, roundtripDTO); status = clientResponse.getStatus(); clientResponse.close(); Assert.assertEquals(Status.OK.getStatusCode(), status); } private static ConnectorNodeDTO createConnectorNodeParentFolder() { ConnectorNode connectorParentNode = vfsConnector.createNode(vfsConnector.getRoot().getId(), TMP_DIR_NAME, ConnectorNodeType.FOLDER); connectorParentNode.setConnectorId(vfsConnectorId); ConnectorNodeDTO connectorParentNodeDTO = new ConnectorNodeDTO(connectorParentNode); return connectorParentNodeDTO; } private static void createConnectorNode(ConnectorNodeDTO connectorNodeParentFolder, String processDiagramPath) throws Exception { InputStream modelInputStream = IoUtil.readFileAsInputStream(processDiagramPath); String label = processDiagramPath.substring(processDiagramPath.lastIndexOf("/") + 1, processDiagramPath.length()); ConnectorNode connectorNode = vfsConnector.createNode(connectorNodeParentFolder.getId(), label, ConnectorNodeType.BPMN_FILE); connectorNode.setConnectorId(vfsConnectorId); vfsConnector.updateContent(connectorNode, modelInputStream); } private static void cleanVfsTargetDirectory(File directory) throws IOException { if (directory.exists()) { if (directory.isDirectory()) { FileUtils.deleteDirectory(directory); } else { throw new IllegalArgumentException("Not a directory: " + directory); } } if (!directory.mkdirs()) { throw new IllegalArgumentException("Could not clean: " + directory); } } private static void deleteUser() throws Exception { WebResource webResource = client.resource(CYCLE_BASE_PATH+"app/secured/resource/user"); ClientResponse response = webResource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); List<Map> users = response.getEntity(List.class); response.close(); for (Map userDTO : users) { webResource = client.resource(CYCLE_BASE_PATH+"app/secured/resource/user"+userDTO.get("id")); ClientResponse clientResponse = webResource.delete(ClientResponse.class); clientResponse.close(); } ClientResponse clientResponse = webResource.delete(ClientResponse.class); clientResponse.close(); } private static void deleteRoundtrip() { WebResource webResource = client.resource(CYCLE_BASE_PATH+"app/secured/resource/roundtrip"); ClientResponse response = webResource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); List<Map> roundtrips = response.getEntity(List.class); response.close(); for (Map roundtripDTO : roundtrips) { webResource = client.resource(CYCLE_BASE_PATH+"app/secured/resource/roundtrip/"+roundtripDTO.get("id")); ClientResponse clientResponse = webResource.delete(ClientResponse.class); clientResponse.close(); } } private static void deleteConnector() { WebResource webResource = client.resource(CYCLE_BASE_PATH+"app/secured/resource/connector/configuration"); ClientResponse response = webResource.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class); List<Map> entity = response.getEntity(List.class); response.close(); for (Map<String,Object> connectorConfigurationDTO : entity) { webResource = client.resource(CYCLE_BASE_PATH+"app/secured/resource/connector/configuration"+connectorConfigurationDTO.get("connectorId")); ClientResponse clientResponse = webResource.delete(ClientResponse.class); clientResponse.close(); } if(vfsConnector != null) { vfsConnector.dispose(); } } }
// This file is part of Serleena. // Nicola Mometto, Filippo Sestini, Tobia Tesan, Sebastiano Valle. // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // all copies or substantial portions of the Software. // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. package com.kyloth.serleena.persistence.sqlite; import android.content.ContentValues; import android.content.Context; import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.media.Image; import com.kyloth.serleena.common.CheckpointReachedTelemetryEvent; import com.kyloth.serleena.common.EmergencyContact; import com.kyloth.serleena.common.GeoPoint; import com.kyloth.serleena.common.HeartRateTelemetryEvent; import com.kyloth.serleena.common.IQuadrant; import com.kyloth.serleena.common.LocationTelemetryEvent; import com.kyloth.serleena.common.TelemetryEvent; import com.kyloth.serleena.common.UserPoint; import com.kyloth.serleena.persistence.IExperienceStorage; import com.kyloth.serleena.persistence.ITelemetryStorage; import com.kyloth.serleena.persistence.ITrackStorage; import com.kyloth.serleena.persistence.IWeatherStorage; import com.kyloth.serleena.persistence.WeatherForecastEnum; import java.io.File; import java.text.DateFormat; import java.text.ParsePosition; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; public class SerleenaSQLiteDataSource implements ISerleenaSQLiteDataSource { SerleenaDatabase dbHelper; Context context; public SerleenaSQLiteDataSource(Context context, SerleenaDatabase dbHelper) { this.dbHelper = dbHelper; this.context = context; } /** * Implementazione di ISerleenaSQLiteDataSource.getTracks(). * * Viene eseguita una query sul database per ottenere gli ID di tutti i * Percorsi associati all'Esperienza specificata, da cui vengono creati * rispettivi oggetti SQLiteDAOTrack. * * @param experience Esperienza di cui si vogliono ottenere i Percorsi. * @return Insieme enumerabile di Percorsi. */ @Override public Iterable<SQLiteDAOTrack> getTracks(SQLiteDAOExperience experience) { SQLiteDatabase db = dbHelper.getReadableDatabase(); String where = "track_experience = " + experience.id(); Cursor result = db.query(SerleenaDatabase.TABLE_TRACKS, new String[] { "track_id" }, where, null, null, null, null); ArrayList<SQLiteDAOTrack> list = new ArrayList<SQLiteDAOTrack>(); int columnIndex = result.getColumnIndexOrThrow("track_id"); while (result.moveToNext()) { int trackId = result.getInt(columnIndex); list.add(new SQLiteDAOTrack(trackId, this)); } result.close(); return list; } /** * Implementazione di ISerleenaSQLiteDataSource.getTelemetries(). * * Viene eseguita una query sul database per ottenere gli ID di tutti i * Tracciamenti associati al Percorso specificato, da cui vengono creati * rispettivi oggetti SQLiteDAOTelemetry. * * @param track Percorso di cui si vogliono ottenere i Tracciamenti. * @return Insieme enumerabile di Tracciamenti. */ @Override public Iterable<SQLiteDAOTelemetry> getTelemetries(SQLiteDAOTrack track) { SQLiteDatabase db = dbHelper.getReadableDatabase(); String where = "telem_track = " + track.id(); Cursor result = db.query(SerleenaDatabase.TABLE_TELEMETRIES, new String[] { "telem_id" }, where, null, null, null, null); ArrayList<SQLiteDAOTelemetry> list = new ArrayList<SQLiteDAOTelemetry>(); int columnIndex = result.getColumnIndexOrThrow("telem_id"); while (result.moveToNext()) { int telemId = result.getInt(columnIndex); Iterable<TelemetryEvent> events = getTelemetryEvents(telemId); list.add(new SQLiteDAOTelemetry(telemId, events)); } result.close(); return list; } /** * Implementazione di ISerleenaSQLiteDataSource.addUserPoint(). * * @param experience Esperienza a cui aggiungere il punto utente. * @param point Punto utente da aggiungere. */ @Override public void addUserPoint(SQLiteDAOExperience experience, UserPoint point) { ContentValues values = new ContentValues(); SQLiteDatabase db = dbHelper.getWritableDatabase(); values.put("userpoint_x", point.latitude()); values.put("userpoint_y", point.longitude()); values.put("userpoint_experience", experience.id()); db.insert(SerleenaDatabase.TABLE_USER_POINTS, null, values); } /** * Implementazione di ISerleenaSQLiteDataSource.createTelemetry(). * * @param events Eventi di tracciamento da cui costruire il Tracciamento. * @param track Percorso a cui associare il Tracciamento. */ @Override public void createTelemetry(Iterable<TelemetryEvent> events, SQLiteDAOTrack track) { SQLiteDatabase db = dbHelper.getWritableDatabase(); SimpleDateFormat format = SerleenaDatabase.DATE_FORMAT; ContentValues values = new ContentValues(); values.put("telem_track", track.id()); long newId = db.insert(SerleenaDatabase.TABLE_TELEMETRIES, null, values); for (TelemetryEvent event : events) { values = new ContentValues(); if (event instanceof LocationTelemetryEvent) { LocationTelemetryEvent eventl = (LocationTelemetryEvent) event; values.put("eventl_timestamp", format.format(eventl.timestamp())); values.put("eventl_latitude", eventl.location().latitude()); values.put("eventl_longitude", eventl.location().longitude()); values.put("eventl_telem", newId); db.insert(SerleenaDatabase.TABLE_TELEM_EVENTS_LOCATION, null, values); } else if (event instanceof HeartRateTelemetryEvent) { HeartRateTelemetryEvent eventh = (HeartRateTelemetryEvent) event; values.put("eventhc_timestamp", format.format(eventh.timestamp())); values.put("eventhc_value", eventh.heartRate()); values.put("eventhc_type", SerleenaDatabase.EVENT_TYPE_HEARTRATE); values.put("eventhc_telem", newId); db.insert(SerleenaDatabase.TABLE_TELEM_EVENTS_HEART_CHECKP, null, values); } else if (event instanceof CheckpointReachedTelemetryEvent) { CheckpointReachedTelemetryEvent eventc = (CheckpointReachedTelemetryEvent) event; values.put("eventhc_timestamp", format.format(eventc.timestamp())); values.put("eventhc_value", eventc.checkpointNumber()); values.put("eventhc_type", SerleenaDatabase.EVENT_TYPE_CHECKPOINT); values.put("eventhc_telem", newId); db.insert(SerleenaDatabase.TABLE_TELEM_EVENTS_HEART_CHECKP, null, values); } } } /** * Implementazione di IPersistenceDataSource.getExperiences(). * * Ritorna un'enumerazione di tutte le Esperienze presenti nel database, * dietro interfaccia IExperienceStorage. * * @return Insieme enumerabile di Esperienze. * @see com.kyloth.serleena.persistence.IPersistenceDataSource */ @Override public Iterable<IExperienceStorage> getExperiences() { SQLiteDatabase db = dbHelper.getReadableDatabase(); Cursor result = db.query(SerleenaDatabase.TABLE_EXPERIENCES, new String[] { "experience_id" }, null, null, null, null, null); int idIndex = result.getColumnIndex("experience_id"); ArrayList<IExperienceStorage> list = new ArrayList<IExperienceStorage>(); while (result.moveToNext()) { int id = result.getInt(idIndex); list.add(new SQLiteDAOExperience(id, this)); } result.close(); return list; } /** * Implementazione di IPersistenceDataStorage.getWeatherInfo(). * * @param location Posizione geografica di cui si vogliono ottenere le * previsioni. * @param date Data di cui si vogliono ottenere le previsioni. * @return Previsioni metereologiche. */ @Override public IWeatherStorage getWeatherInfo(GeoPoint location, Date date) { if (date == null) return null; Date morningTime = new Date(date.getYear(), date.getMonth(), date.getDay(), 6, 0); Date afternoonTime = new Date(date.getYear(), date.getMonth(), date.getDay(), 14, 0); Date nightTime = new Date(date.getYear(), date.getMonth(), date.getDay(), 21, 0); int morningStart = Math.round(morningTime.getTime() / 1000); int morningEnd = Math.round(afternoonTime.getTime() / 1000); int afternoonStart = Math.round(afternoonTime.getTime() / 1000); int afternoonEnd = Math.round(nightTime.getTime() / 1000); int nightStart = Math.round(nightTime.getTime() / 1000); int nightEnd = Math.round(morningTime.getTime() / 1000); SimpleWeather morning = getForecast(location, morningStart, morningEnd); SimpleWeather afternoon = getForecast(location, afternoonStart, afternoonEnd); SimpleWeather night = getForecast(location, nightStart, nightEnd); if (morning != null && afternoon != null && night != null) { return new SQLiteDAOWeather(morning.forecast(), afternoon.forecast(), night.forecast(), morning.temperature(), afternoon.temperature(), night.temperature(), date); } return null; } /** * Restituisce il quadrante i cui limiti comprendono la posizione * geografica specificata. * * @param location Posizione geografica che ricade nei limiti del quadrante. * @return Oggetto IQuadrant. */ @Override public IQuadrant getQuadrant(GeoPoint location) { SQLiteDatabase db = dbHelper.getReadableDatabase(); String where = "(raster_ne_corner_latitude + 90) >= " + (location.latitude() + 90) + " AND " + "(raster_ne_corner_longitude + 180) >= " + (location.longitude() + 180) + " AND " + "(raster_sw_corner_latitude + 90) <= " + (location.latitude() + 90) + " AND " + "(raster_sw_corner_longitude + 180) <= " + (location.longitude() + 180); Cursor result = db.query(SerleenaDatabase.TABLE_RASTER_MAPS, new String[] { "raster_path", "rect_ne_corner_latitude", "rect_ne_corner_longitude", "rect_sw_corner_latitude", "rect_sw_corner_longitude" }, where, null, null, null, null); int pathIndex = result.getColumnIndexOrThrow("raster_path"); int ne_lat_index = result.getColumnIndexOrThrow("rect_ne_corner_latitude"); int ne_lon_index = result.getColumnIndexOrThrow("rect_ne_corner_longitude"); int sw_lat_index = result.getColumnIndexOrThrow("rect_sw_corner_latitude"); int sw_lon_index = result.getColumnIndexOrThrow("rect_sw_corner_longitude"); Bitmap bmp = null; GeoPoint p1 = null; GeoPoint p2 = null; if (pathIndex > -1) { String fileName = result.getString(pathIndex); File file = new File(context.getFilesDir(), fileName); if(file.exists()) bmp = BitmapFactory.decodeFile(file.getAbsolutePath()); p1 = new GeoPoint(result.getDouble(ne_lat_index), result.getDouble(ne_lon_index)); p2 = new GeoPoint(result.getDouble(sw_lat_index), result.getDouble(sw_lon_index)); } final Bitmap finalBmp = bmp; final GeoPoint finalP1 = p1; final GeoPoint finalP2 = p2; result.close(); return new IQuadrant() { @Override public Bitmap getRaster() { return finalBmp; } @Override public GeoPoint getFirstPoint() { return finalP1; } @Override public GeoPoint getSecondPoint() { return finalP2; } }; } @Override public Iterable<EmergencyContact> getContacts(GeoPoint location) { SimpleDateFormat parser = SerleenaDatabase.DATE_FORMAT; SQLiteDatabase db = dbHelper.getReadableDatabase(); ArrayList<EmergencyContact> list = new ArrayList<EmergencyContact>(); String where = "(contact_ne_corner_latitude + 90) >= " + (location.latitude() + 90) + " AND " + "(contact_ne_corner_longitude + 180) >= " + (location.longitude() + 180) + " AND " + "(contact_sw_corner_latitude + 90) <= " + (location.latitude() + 90) + " AND " + "(contact_sw_corner_longitude + 180) <= " + (location.longitude() + 180); Cursor result = db.query(SerleenaDatabase.TABLE_CONTACTS, new String[] { "contact_name", "contact_value" }, where, null, null, null, null); int nameIndex = result.getColumnIndexOrThrow("contact_name"); int valueIndex = result.getColumnIndexOrThrow("contact_value"); while (result.moveToNext()) { String name = result.getString(nameIndex); String value = result.getString(valueIndex); list.add(new EmergencyContact(name, value)); } result.close(); return list; } /** * Restituisce gli eventi di Tracciamento associati al Tracciamento con ID * specificato, memorizzati nel database SQLite. * * @param id ID del Tracciamento di cui si vogliono ottenere gli eventi. * @return Insieme enumerabile di eventi di Tracciamento. */ private Iterable<TelemetryEvent> getTelemetryEvents(int id) { SimpleDateFormat parser = SerleenaDatabase.DATE_FORMAT; SQLiteDatabase db = dbHelper.getReadableDatabase(); ArrayList<TelemetryEvent> list = new ArrayList<TelemetryEvent>(); String where = "eventhc_telem = " + id; Cursor result = db.query(SerleenaDatabase.TABLE_TELEM_EVENTS_HEART_CHECKP, new String[] { "eventhc_timestamp", "eventhc_value", "eventhc_type" }, where, null, null, null, null); int timestampIndex = result.getColumnIndexOrThrow("eventhc_timestamp"); int valueIndex = result.getColumnIndexOrThrow("eventhc_value"); int typeIndex = result.getColumnIndexOrThrow("eventhc_type"); while (result.moveToNext()) { Date d = parser.parse(result.getString(timestampIndex), new ParsePosition(0)); int value = Integer.parseInt(result.getString(valueIndex)); String type = result.getString(typeIndex); TelemetryEvent event = null; switch (type) { case SerleenaDatabase.EVENT_TYPE_CHECKPOINT: event = new CheckpointReachedTelemetryEvent(d, value); break; case SerleenaDatabase.EVENT_TYPE_HEARTRATE: event = new HeartRateTelemetryEvent(d, value); break; default: /*throw new Exception();*/ } list.add(event); } where = "eventl_telem = " + id; result = db.query(SerleenaDatabase.TABLE_TELEM_EVENTS_LOCATION, new String[] { "eventl_timestamp", "eventl_latitude", "eventhl_longitude" }, where, null, null, null, null); timestampIndex = result.getColumnIndexOrThrow("eventl_timestamp"); int latitudeIndex = result.getColumnIndexOrThrow("eventl_latitude"); int longitudeIndex = result.getColumnIndexOrThrow("eventl_longitude"); while (result.moveToNext()) { Date d = parser.parse(result.getString(timestampIndex), new ParsePosition(0)); double latitude = result.getDouble(latitudeIndex); double longitude = result.getDouble(longitudeIndex); GeoPoint location = new GeoPoint(latitude, longitude); list.add(new LocationTelemetryEvent(d, location)); } result.close(); return list; } /** * Restituisce le previsioni, comprensive di condizione metereologica e * temperatura, per una posizione geografica e un intervallo di tempo * specificati. * * @param location Posizione geografica di cui si vogliono ottenere le * previsioni. * @param startTime Inizio dell'intervallo di tempo di cui si vogliono * ottenere le previsioni, in UNIX time. * @param endTime Fine dell'intervallo di tempo di cui si vogliono * ottenere le previsioni, in UNIX time. * @return Previsioni metereologiche. */ private SimpleWeather getForecast(GeoPoint location, int startTime, int endTime) { SQLiteDatabase db = dbHelper.getReadableDatabase(); String where = "weather_end >= = " + startTime + " AND " + "weather_start <= " + endTime + " AND " + "(weather_ne_corner_latitude + 90) >= " + (location.latitude() + 90) + " AND " + "(weather_ne_corner_longitude + 180) >= " + (location.longitude() + 180) + " AND " + "(weather_sw_corner_latitude + 90) <= " + (location.latitude() + 90) + " AND " + "(weather_sw_corner_longitude + 180) <= " + (location.longitude() + 180); Cursor result = db.query(SerleenaDatabase.TABLE_WEATHER_FORECASTS, new String[] { "weather_condition", "wheather_temperature" }, where, null, null, null, null); int conditionIndex = result.getColumnIndex("weather_condition"); int temperatureIndex = result.getColumnIndex("weather_temperature"); if (result.moveToNext()) { WeatherForecastEnum forecast = WeatherForecastEnum.valueOf(result.getString(conditionIndex)); int temperature = result.getInt(temperatureIndex); return new SimpleWeather(forecast, temperature); } else return null; } /** * Rappresenta una previsione metereologica in un istante di tempo, * comprensiva di condizione e temperatura previste. */ private class SimpleWeather { private WeatherForecastEnum forecast; private int temperature; public SimpleWeather(WeatherForecastEnum forecast, int temperature) { this.forecast = forecast; this.temperature = temperature; } public WeatherForecastEnum forecast() { return forecast; } public int temperature() { return temperature; } } }
package org.aksw.mlbenchmark.systemrunner; import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.NoSuchElementException; import java.util.Set; import org.aksw.mlbenchmark.BenchmarkLog; import org.aksw.mlbenchmark.ConfigLoader; import org.aksw.mlbenchmark.ConfigLoaderException; import org.aksw.mlbenchmark.Constants; import org.aksw.mlbenchmark.LearningSystemInfo; import org.aksw.mlbenchmark.MeasureMethod; import org.aksw.mlbenchmark.Scenario; import org.aksw.mlbenchmark.examples.ExamplesSplit; import org.aksw.mlbenchmark.process.ProcessRunner; import org.aksw.mlbenchmark.resultloader.ResultLoaderBase; import org.aksw.mlbenchmark.util.FileFinder; import org.aksw.mlbenchmark.validation.measures.MeasureMethodTwoValued; import org.apache.commons.configuration2.BaseConfiguration; import org.apache.commons.configuration2.CombinedConfiguration; import org.apache.commons.configuration2.Configuration; import org.apache.commons.configuration2.HierarchicalConfiguration; import org.apache.commons.configuration2.ex.ConversionException; import org.apache.commons.configuration2.tree.ImmutableNode; import org.apache.commons.configuration2.tree.MergeCombiner; import org.apache.commons.exec.ExecuteException; /** * Actions shared between several types of steps (Cross Validation etc.). Each * step abstracts the execution of * - the training and validation run * - for one single learning system instance * - on one learning problem */ public abstract class CommonStep { protected final LearningSystemInfo lsi; protected final Scenario scenario; protected final Configuration runtimeConfig; protected final FileFinder fileFinder; protected final ExamplesSplit examples; protected final BenchmarkLog log; protected Constants.State state; protected File trainingResultFile; /** * The portion of time spent for initializing, loading data, etc. A system * setup portion of 0.14 for example means that 14% of the overall runtime * is spent for initialization and data loading and 86% is spent for actual * learning. This estimation helps to ensure that learning systems like the * DL-Learner really finish within the given execution time. For these * systems *their* execution time starts *after* the initialization and * data loading. With an overall execution time of 60 seconds and a system * setup portion of 0.14 the execution time set for such learning systems * would be 60*0.86 = 51.60 seconds. So hopefully the learning system will * finish within the overall 60 seconds and does not have to be killed. */ protected final double systemSetupPortion = 0.14; public CommonStep(Scenario scenario, LearningSystemInfo lsi, ExamplesSplit examples, Configuration runtimeConfig, FileFinder fileFinder, BenchmarkLog log) { this.scenario = scenario; this.lsi = lsi; this.runtimeConfig = runtimeConfig; this.fileFinder = fileFinder; this.examples = examples; this.log = log; } /** * Prepares and runs the training phase. All it has before invoking this is * a directory to work on, the positive and negative training and * validation examples and the knowledge about what scenario to solve with * what learning system. * Thus, * * 1) the training examples have to be written to file * 2) a configuration file for the learning system has to be prepared * 3) the learning system has to be invoked * 4) the results have to be collected * @return A config holding the training results, i.e. the learned classifiers */ public Configuration train() { File trainDir = fileFinder.getTrainingDir(); trainDir.mkdirs(); File posFile = fileFinder.getPositiveTrainingExamplesFile(); File negFile = fileFinder.getNegativeTrainingExamplesFile(); Set<String> trainingPos = getPositiveTrainingExamples(); Set<String> trainingNeg = getNegativeTrainingExamples(); AbstractSystemRunner.writeExamplesFiles(lsi.getLanguage(), posFile.getAbsolutePath(), trainingPos, negFile.getAbsolutePath(), trainingNeg); File configFile = new File(trainDir, "config." + lsi.getConfigFormat()); Configuration cc = collectTrainingConfig(runtimeConfig); AbstractSystemRunner.writeConfig(configFile.getAbsolutePath(), cc); saveLearningSystemConfig(configFile.getAbsolutePath()); List<String> args = new LinkedList<>(); args.add(configFile.getAbsolutePath()); long maxExecutionTime = runtimeConfig.getLong(Constants.MAX_EXECUTION_TIME_KEY); final long now = System.nanoTime(); this.trainingResultFile = new File(trainDir, "train.out"); state = simpleProcessRunner("./run", args, cc, maxExecutionTime, trainingResultFile.getAbsolutePath(), "learning system"); long duration = System.nanoTime() - now; Configuration result = new BaseConfiguration(); result.setProperty(getResultKey() + "." + "duration", duration / 1000000000); // nanoseconds -> seconds ResultLoaderBase resultLoader = new ResultLoaderBase(); try { resultLoader.loadResults(trainingResultFile); result.setProperty(getResultKey() + "." + "trainingRaw", resultLoader.getResults()); } catch (IOException e) { CrossValidationRunner.logger.warn("learning system " + lsi.asString() + " result cannot be read: " + e.getMessage()); state = Constants.State.ERROR; } result.setProperty(getResultKey() + "." + "trainingResult", state.toString().toLowerCase()); return result; } /** * Prepares and runs the validation phase. All it has before invoking this * method is a working directory which already holds the training results, * the positive and negative training and validation examples and the * knowledge about what scenario to solve with what learning system. * Thus, * * 1) the validation examples have to be written to file * 2) a configuration file guiding the validation process has to be written * 3) the validation executable has to be invoked * 4) the validation results have to be gathered * @return */ public Configuration validate() { File validateDir = fileFinder.getValidationDir(); validateDir.mkdirs(); File posFilename = fileFinder.getPositiveValidationExamplesFile(); File negFilename = fileFinder.getNegativeValidationExamplesFile(); Set<String> testingPos = getPositiveValidationExamples(); Set<String> testingNeg = getNegativeValidationExamples(); AbstractSystemRunner.writeExamplesFiles(lsi.getLanguage(), posFilename.getAbsolutePath(), testingPos, negFilename.getAbsolutePath(), testingNeg); File configFile = new File(validateDir, "config." + lsi.getConfigFormat()); Configuration cc = collectValidationConfig(runtimeConfig); AbstractSystemRunner.writeConfig(configFile.getAbsolutePath(), cc); List<String> args = new LinkedList<>(); args.add(configFile.getAbsolutePath()); File outputFile = new File(validateDir, "validateResult.prop"); state = simpleProcessRunner("./validate", args, cc, 0, outputFile.getAbsolutePath(), "validation system"); String resultKey = getResultKey(); HierarchicalConfiguration<ImmutableNode> rawValRes = null; if (state.equals(Constants.State.OK)) { try { rawValRes = ConfigLoader.load(outputFile.getAbsolutePath()); } catch (ConfigLoaderException e) { CrossValidationRunner.logger.warn("could not load validation result: " + e.getMessage()); state = Constants.State.FAILURE; } } Configuration results = new BaseConfiguration(); results.setProperty(resultKey + "." + "validationResult", state.toString().toLowerCase()); if (!state.equals(Constants.State.OK)) { saveResultSet(results); return results; } if (rawValRes != null) { Iterator<String> keys = rawValRes.getKeys(); while (keys.hasNext()) { String key = keys.next(); results.setProperty(resultKey + "." + "ValidationRaw" + "." + key, rawValRes.getProperty(key)); } } List<String> measures = runtimeConfig.getList(String.class, Constants.MEASURES_KEY, Arrays.asList("pred_acc")); try { int tp = rawValRes.getInt("tp"); int fn = rawValRes.getInt("fn"); int fp = rawValRes.getInt("fp"); int tn = rawValRes.getInt("tn"); for (String m : measures) { MeasureMethodTwoValued method = MeasureMethod.create(m); double measure = method.getMeasure(tp, fn, fp, tn); results.setProperty(resultKey + "." + "measure" + "." + m, measure); } } catch (ConversionException | NoSuchElementException e) { CrossValidationRunner.logger.warn("invalid validation results: " + e.getMessage()); state = Constants.State.ERROR; results.setProperty(resultKey + "." + "validationResult", state.toString().toLowerCase()); } saveResultSet(results); return results; } public Constants.State getState() { return state; } public boolean isStateOk() { return Constants.State.OK.equals(state); } protected abstract Set<String> getPositiveTrainingExamples(); protected abstract Set<String> getNegativeTrainingExamples(); protected abstract Set<String> getPositiveValidationExamples(); protected abstract Set<String> getNegativeValidationExamples(); protected abstract void saveLearningSystemConfig(String configFilePath); protected abstract String getResultKey(); protected abstract void saveResultSet(Configuration result); /** * Collects all the settings needed to run a learning system. These * settings comprise * * - file names like the working directory, positive/negative examples * files, the output file * - the learning task and learning problem to run * - learning system specific configuration * * @return The learning system-specific training configuration */ protected Configuration collectTrainingConfig(Configuration runtimeConfig) { CombinedConfiguration trainConfig = new CombinedConfiguration(); trainConfig.setNodeCombiner(new MergeCombiner()); // file names trainConfig.setProperty( Constants.WORKDIR_KEY, fileFinder.getTrainingDir().getAbsolutePath()); trainConfig.setProperty( Constants.POS_EXAMPLE_FILE_KEY, fileFinder.getPositiveTrainingExamplesFile().getAbsolutePath()); trainConfig.setProperty( Constants.NEG_EXAMPLE_FILE_KEY, fileFinder.getNegativeTrainingExamplesFile().getAbsolutePath()); trainConfig.setProperty( Constants.OUTPUT_FILE_KEY, fileFinder.getTrainingResultOutputFile().getAbsolutePath()); // general settings trainConfig.addProperty(Constants.LEARNING_TASK_KEY, scenario.getTask()); trainConfig.addProperty(Constants.LEARNING_PROBLEM_KEY, scenario.getProblem()); trainConfig.addProperty(Constants.STEP_KEY, Constants.STEP_TRAIN); // learning system settings // settings declared for all learning system instances of type lsi.getLearningSystem Iterator<String> keysIt = runtimeConfig.getKeys( Constants.LEARNING_SYSTEMS_KEY + "." + lsi.getLearningSystem()); extractLearningSystemSettings(keysIt, runtimeConfig, trainConfig); if (lsi.getIdentifier() != null) { // settings declared for a specific instance with identifier lsi.getIdentifier() keysIt = runtimeConfig.getKeys( Constants.LEARNING_SYSTEMS_KEY + "." + lsi.asString()); extractLearningSystemSettings(keysIt, runtimeConfig, trainConfig); } return trainConfig; } private void extractLearningSystemSettings(Iterator<String> keysIt, Configuration runtimeConfig, Configuration targetConfig) { String wholeKey, key; Object[] val; while (keysIt.hasNext()) { wholeKey = keysIt.next(); // strip off the "learningsystems." key = wholeKey.split("\\.", 2)[1]; /* strip off the actual learning system part of the key, i.e. * "dllearner-1.algorithm.type" --> "algorithm.type" */ key = key.split("\\.", 2)[1]; val = (Object[]) runtimeConfig.getArray(Object.class, wholeKey); targetConfig.addProperty(Constants.LS_SPECIFIC_SETTINGS_KEY + "." + key, val); } } /** * Collects all the settings to run the validation of a learning system's * training output. These settings comprise * * - file names like the working directory, positive/negative examples * files, input file, output file * - the learning task and learning problem to validate the training * results for * * @return The validation configuration */ protected Configuration collectValidationConfig(Configuration runtimeConfig) { Configuration validationConfig = new BaseConfiguration(); // file names validationConfig.addProperty( Constants.WORKDIR_KEY, fileFinder.getValidationDir().getAbsolutePath()); validationConfig.addProperty( Constants.POS_EXAMPLE_FILE_KEY, fileFinder.getPositiveValidationExamplesFile().getAbsolutePath()); validationConfig.addProperty( Constants.NEG_EXAMPLE_FILE_KEY, fileFinder.getNegativeValidationExamplesFile().getAbsolutePath()); validationConfig.addProperty( Constants.INPUT_FILE_KEY, fileFinder.getTrainingResultOutputFile().getAbsolutePath()); validationConfig.addProperty( Constants.OUTPUT_FILE_KEY, fileFinder.getValidationResultOutputFile().getAbsolutePath()); // general settings validationConfig.addProperty(Constants.LEARNING_TASK_KEY, scenario.getTask()); validationConfig.addProperty(Constants.LEARNING_PROBLEM_KEY, scenario.getProblem()); validationConfig.addProperty(Constants.STEP_KEY, Constants.STEP_VALIDATE); return validationConfig; } protected Constants.State simpleProcessRunner(String command, List<String> args, Configuration cc, long maxExecutionTime, String expectedOutput, String info) { Constants.State state = Constants.State.RUNNING; try { ProcessRunner processRunner = new ProcessRunner( lsi.getDir(), command, args, cc, maxExecutionTime); state = Constants.State.OK; } catch (ExecuteException e) { if (e.getExitValue() == 143) { CrossValidationRunner.logger.warn(info + " " + lsi.toString() + " was canceled due to timeout"); state = Constants.State.TIMEOUT; } else { CrossValidationRunner.logger.warn(info + " " + lsi.toString() + " did not finish cleanly: " + e.getMessage()); state = Constants.State.FAILURE; } } catch (IOException e) { CrossValidationRunner.logger.warn(info + " " + lsi.toString() + " could not execute: " + e.getMessage() + "[" + e.getClass() + "]"); state = Constants.State.ERROR; } if (expectedOutput != null) { File file = new File(expectedOutput); if (state.equals(Constants.State.OK) && !file.isFile()) { CrossValidationRunner.logger.warn(info + " " + lsi.toString() + " did not produce an output"); state = Constants.State.FAILURE; } } return state; } }
package no.deichman.services.search; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import no.deichman.services.entity.EntityService; import no.deichman.services.entity.EntityType; import no.deichman.services.uridefaults.XURI; import org.apache.commons.io.IOUtils; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.NameValuePair; import org.apache.http.client.methods.CloseableHttpResponse; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.client.utils.URIBuilder; import org.apache.http.entity.ContentType; import org.apache.http.entity.InputStreamEntity; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.message.BasicNameValuePair; import org.apache.jena.rdf.model.Model; import org.apache.jena.rdf.model.Property; import org.apache.jena.rdf.model.ResIterator; import org.apache.jena.rdf.model.Resource; import org.apache.jena.rdf.model.ResourceFactory; import org.apache.jena.rdf.model.SimpleSelector; import org.apache.jena.rdf.model.Statement; import org.apache.jena.rdf.model.StmtIterator; import org.apache.jena.vocabulary.RDF; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.ServerErrorException; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.Response; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import static com.google.common.collect.ImmutableMap.of; import static com.google.common.collect.Lists.newArrayList; import static java.lang.String.format; import static java.net.HttpURLConnection.HTTP_INTERNAL_ERROR; import static java.net.HttpURLConnection.HTTP_OK; import static java.net.URLEncoder.encode; import static java.util.Arrays.stream; import static java.util.stream.Collectors.toList; import static javax.ws.rs.core.Response.Status.INTERNAL_SERVER_ERROR; import static no.deichman.services.uridefaults.BaseURI.ontology; import static org.apache.http.impl.client.HttpClients.createDefault; import static org.apache.jena.rdf.model.ResourceFactory.createProperty; /** * Responsibility: perform indexing and searching. */ public class SearchServiceImpl implements SearchService { public static final Property AGENT = createProperty(ontology("agent")); private static final Logger LOG = LoggerFactory.getLogger(SearchServiceImpl.class); private static final String UTF_8 = "UTF-8"; public static final int SIXTY_ONE = 61; private final EntityService entityService; private final String elasticSearchBaseUrl; private ModelToIndexMapper workModelToIndexMapper = new ModelToIndexMapper("work"); private ModelToIndexMapper eventModelToIndexMapper = new ModelToIndexMapper("event"); private ModelToIndexMapper serialModelToIndexMapper = new ModelToIndexMapper("serial"); private ModelToIndexMapper personModelToIndexMapper = new ModelToIndexMapper("person"); private ModelToIndexMapper corporationModelToIndexMapper = new ModelToIndexMapper("corporation"); private ModelToIndexMapper publicationModelToIndexMapper = new ModelToIndexMapper("publication"); public static final Gson GSON = new GsonBuilder().setPrettyPrinting().create(); public SearchServiceImpl(String elasticSearchBaseUrl, EntityService entityService) { this.elasticSearchBaseUrl = elasticSearchBaseUrl; this.entityService = entityService; getIndexUriBuilder(); } @Override public final void index(XURI xuri) throws Exception { switch (xuri.getTypeAsEntityType()) { case WORK: doIndexWork(xuri, false, false); break; case PERSON: case CORPORATION: doIndexWorkCreator(xuri, false); break; case PUBLICATION: doIndexPublication(xuri); break; case EVENT: doIndexEvent(xuri); break; case SERIAL: doIndexSerial(xuri); break; default: doIndex(xuri); } } public final void indexOnly(XURI xuri) throws Exception { switch (xuri.getTypeAsEntityType()) { case WORK: doIndexWork(xuri, true, true); break; case PERSON: case CORPORATION: doIndexWorkCreator(xuri, true); break; case PUBLICATION: doIndexPublication(xuri); break; case EVENT: doIndexEvent(xuri); break; case SERIAL: doIndexSerial(xuri); break; default: doIndex(xuri); } } private void doIndexEvent(XURI xuri) { Model eventModelWithLinkedResources = entityService.retrieveEventWithLinkedResources(xuri); indexDocument(xuri, eventModelToIndexMapper.createIndexDocument(eventModelWithLinkedResources, xuri)); } private void doIndexSerial(XURI xuri) { Model serialModelWithLinkedResources = entityService.retrieveSerialWithLinkedResources(xuri); indexDocument(xuri, serialModelToIndexMapper.createIndexDocument(serialModelWithLinkedResources, xuri)); } @Override public final Response searchPersonWithJson(String json) { return searchWithJson(json, getPersonSearchUriBuilder()); } @Override public final Response searchWorkWithJson(String json, MultivaluedMap<String, String> queryParams) { return searchWithJson(json, getWorkSearchUriBuilder(queryParams)); } @Override public final Response searchPublicationWithJson(String json) { return searchWithJson(json, getPublicationSearchUriBuilder()); } @Override public final Response searchInstrument(String query) { return doSearch(query, getInstrumentSearchUriBuilder()); } @Override public final Response searchCompositionType(String query) { return doSearch(query, getCompositionTypeSearchUriBuilder()); } @Override public final Response searchEvent(String query) { return doSearch(query, getEventSearchUriBuilder()); } @Override public final Response clearIndex() { try (CloseableHttpClient httpclient = createDefault()) { URI uri = getIndexUriBuilder().setPath("/search").build(); try (CloseableHttpResponse getExistingIndex = httpclient.execute(new HttpGet(uri))) { if (getExistingIndex.getStatusLine().getStatusCode() == HTTP_OK) { try (CloseableHttpResponse delete = httpclient.execute(new HttpDelete(uri))) { int statusCode = delete.getStatusLine().getStatusCode(); LOG.info("Delete index request returned status " + statusCode); if (statusCode != HTTP_OK) { throw new ServerErrorException("Failed to delete elasticsearch index", HTTP_INTERNAL_ERROR); } } } } HttpPut createIndexRequest = new HttpPut(uri); createIndexRequest.setEntity(new InputStreamEntity(getClass().getResourceAsStream("/search_index.json"), ContentType.APPLICATION_JSON)); try (CloseableHttpResponse create = httpclient.execute(createIndexRequest)) { int statusCode = create.getStatusLine().getStatusCode(); LOG.info("Create index request returned status " + statusCode); if (statusCode != HTTP_OK) { throw new ServerErrorException("Failed to create elasticsearch index", HTTP_INTERNAL_ERROR); } } putIndexMapping(httpclient, "work"); putIndexMapping(httpclient, "person"); putIndexMapping(httpclient, "serial"); putIndexMapping(httpclient, "corporation"); putIndexMapping(httpclient, "place"); putIndexMapping(httpclient, "subject"); putIndexMapping(httpclient, "genre"); putIndexMapping(httpclient, "publication"); putIndexMapping(httpclient, "instrument"); putIndexMapping(httpclient, "compositionType"); putIndexMapping(httpclient, "event"); putIndexMapping(httpclient, "workSeries"); return Response.status(Response.Status.OK).build(); } catch (Exception e) { LOG.error(e.getMessage(), e); throw new ServerErrorException(e.getMessage(), INTERNAL_SERVER_ERROR); } } private void putIndexMapping(CloseableHttpClient httpclient, String type) throws URISyntaxException, IOException { URI workIndexUri = getIndexUriBuilder().setPath("/search/_mapping/" + type).build(); HttpPut putWorkMappingRequest = new HttpPut(workIndexUri); putWorkMappingRequest.setEntity(new InputStreamEntity(getClass().getResourceAsStream("/" + type + "_mapping.json"), ContentType.APPLICATION_JSON)); try (CloseableHttpResponse create = httpclient.execute(putWorkMappingRequest)) { int statusCode = create.getStatusLine().getStatusCode(); LOG.info("Create mapping request for " + type + " returned status " + statusCode); if (statusCode != HTTP_OK) { throw new ServerErrorException("Failed to create elasticsearch mapping for " + type, HTTP_INTERNAL_ERROR); } } } private Response searchWithJson(String body, URIBuilder searchUriBuilder) { try { HttpPost httpPost = new HttpPost(searchUriBuilder.build()); httpPost.setEntity(new StringEntity(body, StandardCharsets.UTF_8)); httpPost.setHeader("Content-type", "application/json"); return executeHttpRequest(httpPost); } catch (Exception e) { LOG.error(e.getMessage(), e); throw new ServerErrorException(e.getMessage(), INTERNAL_SERVER_ERROR); } } private Response executeHttpRequest(HttpRequestBase httpRequestBase) throws IOException { try (CloseableHttpClient httpclient = createDefault(); CloseableHttpResponse response = httpclient.execute(httpRequestBase)) { HttpEntity responseEntity = response.getEntity(); String jsonContent = IOUtils.toString(responseEntity.getContent()); Response.ResponseBuilder responseBuilder = Response.ok(jsonContent); Header[] headers = response.getAllHeaders(); for (Header header : headers) { responseBuilder = responseBuilder.header(header.getName(), header.getValue()); } return responseBuilder.build(); } catch (Exception e) { throw e; } } @Override public final Response searchWork(String query) { return doSearch(query, getWorkSearchUriBuilder(null)); } @Override public final Response searchPerson(String query) { return doSearch(query, getPersonSearchUriBuilder()); } @Override public final Response searchPlace(String query) { return doSearch(query, getPlaceUriBuilder()); } @Override public final Response searchCorporation(String query) { return doSearch(query, getCorporationSearchUriBuilder()); } @Override public final Response searchSerial(String query) { return doSearch(query, getSerialSearchUriBuilder()); } @Override public final Response searchSubject(String query) { return doSearch(query, getSubjectSearchUriBuilder()); } @Override public final Response searchGenre(String query) { return doSearch(query, getGenreSearchUriBuilder()); } @Override public final Response searchPublication(String query) { return doSearch(query, getPublicationSearchUriBuilder()); } @Override public final void delete(XURI xuri) { try (CloseableHttpClient httpclient = createDefault()) { HttpDelete httpDelete = new HttpDelete(getIndexUriBuilder() .setPath(format("/search/%s/%s", xuri.getType(), encode(xuri.getUri(), UTF_8))) .build()); try (CloseableHttpResponse putResponse = httpclient.execute(httpDelete)) { LOG.debug(putResponse.getStatusLine().toString()); } } catch (Exception e) { LOG.error(format("Failed to delete %s in elasticsearch", xuri.getUri()), e); throw new ServerErrorException(e.getMessage(), INTERNAL_SERVER_ERROR); } } @Override public final Response sortedList(String type, String prefix, int minSize, String field) { EntityType entityType = EntityType.get(type); URIBuilder searchUriBuilder = getIndexUriBuilder().setPath("/search/" + type + "/_search").setParameter("size", Integer.toString(minSize)); switch (entityType) { case PERSON: case CORPORATION: case PLACE: case SUBJECT: case EVENT: case WORK_SERIES: case SERIAL: case GENRE: case MUSICAL_INSTRUMENT: case MUSICAL_COMPOSITION_TYPE: return searchWithJson(createPreIndexedSearchQuery(prefix, minSize, entityType, field), searchUriBuilder); default: return searchWithJson(createSortedListQuery(prefix, minSize, field), searchUriBuilder); } } private String createSortedListQuery(String prefix, int minSize, String field) { String sortedListQuery; List<Map> should = new ArrayList<>(); for (int i = 0; i < prefix.length(); i++) { should.add( of("constant_score", of("boost", 2 << Math.max(prefix.length() - i, SIXTY_ONE), "query", of("match_phrase_prefix", of(field, prefix.substring(0, prefix.length() - i)))))); } sortedListQuery = GSON.toJson(of( "size", minSize, "query", of( "bool", of( "should", should) ) )); return sortedListQuery; } private String createPreIndexedSearchQuery(String prefix, int minSize, EntityType entityType, String field) { Collection<NameEntry> nameEntries = entityService.neighbourhoodOfName(entityType, prefix, minSize); List<Map> should = new ArrayList<>(); should.add(of("match_phrase_prefix", of(field, prefix))); should.addAll(nameEntries .stream() .filter(NameEntry::isBestMatch) .map(e -> of( "ids", of("values", newArrayList(urlEncode(e.getUri()))))) .collect(toList())); should.add(of( "ids", of("values", nameEntries .stream() .map(NameEntry::getUri) .map(SearchServiceImpl::urlEncode) .collect(toList()) ) )); return GSON.toJson( of( "size", minSize, "query", of( "bool", of("should", should) ) ) ); } private static String urlEncode(String uri) { return uri.replace(":", "%3A").replace("/", "%2F"); } @Override public final Response searchWorkWhereUriIsSubject(String subjectUri, int maxSize) { String body = GSON.toJson(of( "size", maxSize, "query", of( "nested", of( "path", "subjects", "query", of("term", of( "subjects.uri", subjectUri) ) ) ) )); return searchWithJson(body, getIndexUriBuilder().setPath("/search/work/_search")); } @Override public final Response searchWorkSeries(String query) { return doSearch(query, getWorkSeriesSearchUriBuilder()); } private URIBuilder getWorkSeriesSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/workSeries/_search"); } private void doIndexPublication(XURI pubUri) throws Exception { Model pubModel = entityService.retrieveById(pubUri); Property publicationOfProperty = ResourceFactory.createProperty(ontology("publicationOf")); if (pubModel.getProperty(null, publicationOfProperty) != null) { String workUri = pubModel.getProperty(ResourceFactory.createResource(pubUri.toString()), publicationOfProperty).getObject().toString(); XURI workXURI = new XURI(workUri); pubModel = entityService.retrieveWorkWithLinkedResources(workXURI); } indexDocument(pubUri, publicationModelToIndexMapper.createIndexDocument(pubModel, pubUri)); } private void doIndexWork(XURI xuri, boolean indexedPerson, boolean indexedPublication) throws Exception { Model workModelWithLinkedResources = entityService.retrieveWorkWithLinkedResources(xuri); indexDocument(xuri, workModelToIndexMapper.createIndexDocument(workModelWithLinkedResources, xuri)); if (!indexedPerson) { for (Statement stmt : workModelWithLinkedResources.listStatements().toList()) { if (stmt.getPredicate().equals(AGENT)) { XURI creatorXuri = new XURI(stmt.getObject().asNode().getURI()); doIndexWorkCreatorOnly(creatorXuri); } } } if (indexedPublication) { return; } // Index all publications belonging to work // TODO instead of iterating over all subjects, find only subjects of triples with publicationOf as predicate ResIterator subjectIterator = workModelWithLinkedResources.listSubjects(); while (subjectIterator.hasNext()) { Resource subj = subjectIterator.next(); if (subj.isAnon()) { continue; } if (subj.toString().contains("publication")) { XURI pubUri = new XURI(subj.toString()); indexDocument(pubUri, publicationModelToIndexMapper.createIndexDocument(workModelWithLinkedResources, pubUri)); } } } private void doIndexWorkCreator(XURI creatorUri, boolean indexedWork) throws Exception { Model works = entityService.retrieveWorksByCreator(creatorUri); if (!indexedWork) { ResIterator subjectIterator = works.listSubjects(); while (subjectIterator.hasNext()) { Resource subj = subjectIterator.next(); if (subj.isAnon() || subj.toString().indexOf(' continue; } XURI workUri = new XURI(subj.toString()); if (!workUri.getUri().equals(creatorUri.getUri())) { doIndexWorkOnly(workUri); } } } switch (creatorUri.getTypeAsEntityType()) { case PERSON: indexDocument(creatorUri, personModelToIndexMapper .createIndexDocument(entityService.retrievePersonWithLinkedResources(creatorUri).add(works), creatorUri)); cacheNameIndex(creatorUri, works); break; case CORPORATION: indexDocument(creatorUri, corporationModelToIndexMapper .createIndexDocument(entityService.retrieveCorporationWithLinkedResources(creatorUri).add(works), creatorUri)); cacheNameIndex(creatorUri, works); break; default: throw new RuntimeException(format( "Tried to index work creator of type %1$s. Should be %2$s or %3$s", creatorUri.getTypeAsEntityType(), EntityType.PERSON, EntityType.CORPORATION )); } } private void doIndex(XURI xuri) throws Exception { Model indexModel = entityService.retrieveById(xuri); indexDocument(xuri, new ModelToIndexMapper(xuri.getTypeAsEntityType().getPath()).createIndexDocument(indexModel, xuri)); cacheNameIndex(xuri, indexModel); } private void cacheNameIndex(XURI xuri, Model indexModel) { statementsInModelAbout(xuri, indexModel, ontology("name"), ontology("prefLabel"), ontology("work")) .forEachRemaining(statement -> { entityService.addIndexedName( xuri.getTypeAsEntityType(), statement.getObject().asLiteral().toString(), statement.getSubject().getURI()); }); } private StmtIterator statementsInModelAbout(final XURI xuri, final Model indexModel, final String... predicates) { return indexModel.listStatements(new SimpleSelector() { @Override public boolean test(Statement s) { return (stream(predicates).anyMatch(p -> s.getPredicate().equals(ResourceFactory.createResource(p))) && indexModel.contains(s.getSubject(), RDF.type, ResourceFactory.createResource(ontology(xuri.getTypeAsEntityType().getRdfType())))); } }); } private void doIndexWorkOnly(XURI xuri) throws Exception { doIndexWork(xuri, true, false); } private void indexDocument(XURI xuri, String document) { try (CloseableHttpClient httpclient = createDefault()) { HttpPut httpPut = new HttpPut(getIndexUriBuilder() .setPath(format("/search/%s/%s", xuri.getType(), encode(xuri.getUri(), UTF_8))) // TODO drop urlencoded ID, and define _id in mapping from field uri .build()); httpPut.setEntity(new StringEntity(document, Charset.forName(UTF_8))); httpPut.setHeader(HttpHeaders.CONTENT_TYPE, ContentType.APPLICATION_JSON.withCharset(UTF_8).toString()); try (CloseableHttpResponse putResponse = httpclient.execute(httpPut)) { LOG.debug(putResponse.getStatusLine().toString()); } } catch (Exception e) { LOG.error(format("Failed to index %s in elasticsearch", xuri.getUri()), e); throw new ServerErrorException(e.getMessage(), INTERNAL_SERVER_ERROR); } } private Response doSearch(String query, URIBuilder searchUriBuilder) { try { HttpGet httpGet = new HttpGet(searchUriBuilder .setParameter("q", query) .setParameter("size", "100") .build()); return executeHttpRequest(httpGet); } catch (Exception e) { LOG.error(e.getMessage(), e); throw new ServerErrorException(e.getMessage(), INTERNAL_SERVER_ERROR); } } private void doIndexWorkCreatorOnly(XURI xuri) throws Exception { doIndexWorkCreator(xuri, true); } private URIBuilder getIndexUriBuilder() { try { return new URIBuilder(this.elasticSearchBaseUrl); } catch (URISyntaxException e) { LOG.error("Failed to create uri builder for elasticsearch"); throw new RuntimeException(e); } } private URIBuilder getWorkSearchUriBuilder(MultivaluedMap<String, String> queryParams) { URIBuilder uriBuilder = getIndexUriBuilder().setPath("/search/work/_search"); if (queryParams != null && !queryParams.isEmpty()) { List<NameValuePair> nvpList = new ArrayList<>(queryParams.size()); queryParams.forEach((key, values) -> { values.forEach(value -> { nvpList.add(new BasicNameValuePair(key, value)); }); }); uriBuilder.setParameters(nvpList); } return uriBuilder; } private URIBuilder getPersonSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/person/_search"); } public final URIBuilder getPlaceUriBuilder() { return getIndexUriBuilder().setPath("/search/place/_search"); } public final URIBuilder getCorporationSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/corporation/_search"); } public final URIBuilder getSerialSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/serial/_search"); } public final URIBuilder getSubjectSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/subject/_search"); } public final URIBuilder getGenreSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/genre/_search"); } public final URIBuilder getPublicationSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/publication/_search"); } public final URIBuilder getInstrumentSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/instrument/_search"); } public final URIBuilder getCompositionTypeSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/compositionType/_search"); } private URIBuilder getEventSearchUriBuilder() { return getIndexUriBuilder().setPath("/search/event/_search"); } }
package org.jboss.as.arquillian.service; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.jboss.as.server.deployment.DeploymentPhaseContext; import org.jboss.as.server.deployment.DeploymentUnit; import org.jboss.as.server.deployment.DeploymentUnitProcessingException; import org.jboss.as.server.deployment.DeploymentUnitProcessor; import org.jboss.as.server.deployment.annotation.AnnotationIndexUtils; import org.jboss.as.server.deployment.module.ResourceRoot; import org.jboss.jandex.AnnotationInstance; import org.jboss.jandex.AnnotationTarget; import org.jboss.jandex.ClassInfo; import org.jboss.jandex.DotName; import org.jboss.jandex.Index; import org.junit.runner.RunWith; /** * Uses the annotation index to check whether there is a class annotated with @RunWith. * In which case an {@link ArquillianConfig} object that names the test class is attached to the context. * * @author <a href="kabir.khan@jboss.com">Kabir Khan</a> * @author Thomas.Diesler@jboss.com */ public class ArquillianRunWithAnnotationProcessor implements DeploymentUnitProcessor { @Override public void deploy(DeploymentPhaseContext phaseContext) throws DeploymentUnitProcessingException { final Map<ResourceRoot, Index> indexes = AnnotationIndexUtils.getAnnotationIndexes(phaseContext.getDeploymentUnit()); final List<AnnotationInstance> instances = new ArrayList<AnnotationInstance>(); final DotName runWithName = DotName.createSimple(RunWith.class.getName()); for (Index index : indexes.values()) { final List<AnnotationInstance> annotations = index.getAnnotations(runWithName); if (annotations != null) { instances.addAll(annotations); } } if (instances.isEmpty()) return; // Skip if there are no @RunWith annotations final DeploymentUnit deploymentUnitContext = phaseContext.getDeploymentUnit(); ArquillianConfig arqConfig = new ArquillianConfig(deploymentUnitContext); deploymentUnitContext.putAttachment(ArquillianConfig.KEY, arqConfig); for (AnnotationInstance instance : instances) { final AnnotationTarget target = instance.target(); if (target instanceof ClassInfo) { final ClassInfo classInfo = (ClassInfo) target; final String testClassName = classInfo.name().toString(); arqConfig.addTestClass(testClassName); } } } public void undeploy(final DeploymentUnit context) { context.removeAttachment(ArquillianConfig.KEY); } }
package org.dynmap.bukkitbridge; import java.util.logging.Logger; import org.bukkit.Location; import org.bukkit.entity.Player; import org.bukkit.event.EventHandler; import org.bukkit.event.EventPriority; import org.bukkit.event.Listener; import org.bukkit.event.player.AsyncPlayerChatEvent; import org.bukkit.event.player.PlayerJoinEvent; import org.bukkit.event.player.PlayerQuitEvent; import org.bukkit.plugin.Plugin; import org.bukkit.plugin.java.JavaPlugin; import org.dynmap.DynmapAPI; import org.dynmap.DynmapCommonAPI; import org.dynmap.DynmapCommonAPIListener; import org.dynmap.bukkitbridge.permissions.BukkitPermissions; import org.dynmap.bukkitbridge.permissions.GroupManagerPermissions; import org.dynmap.bukkitbridge.permissions.PEXPermissions; import org.dynmap.markers.MarkerAPI; import org.dynmap.permissions.PermissionsHandler; public class DynmapCBBridgePlugin extends JavaPlugin implements DynmapAPI { public static Logger log; private DynmapCommonAPI commonapi; private boolean disableChatHandling = false; private boolean wasDisabled = false; private class OurAPIListener extends DynmapCommonAPIListener { @Override public void apiEnabled(DynmapCommonAPI api) { commonapi = api; PermissionsHandler ph = PEXPermissions.create(); if (ph == null) ph = GroupManagerPermissions.create(); if (ph == null) ph = BukkitPermissions.create(); PermissionsHandler.setHandler(ph); wasDisabled = commonapi.setDisableChatToWebProcessing(true); } @Override public void apiDisabled(DynmapCommonAPI api) { commonapi = null; } } private OurAPIListener apilisten = new OurAPIListener(); @Override public void onLoad() { log = this.getLogger(); } @Override public void onDisable() { if ((commonapi != null) && (!wasDisabled)) { commonapi.setDisableChatToWebProcessing(false); } } @Override public void onEnable() { log.info("Dynmap CraftBukkit-to_Forge Bridge, version " + this.getDescription().getVersion()); DynmapCommonAPIListener.register(apilisten); this.getServer().getPluginManager().registerEvents(new Listener() { @EventHandler(priority=EventPriority.MONITOR, ignoreCancelled=true) public void onPlayerChat(AsyncPlayerChatEvent evt) { final Player p = evt.getPlayer(); final String msg = evt.getMessage(); getServer().getScheduler().scheduleSyncDelayedTask(DynmapCBBridgePlugin.this, new Runnable() { public void run() { if ((commonapi != null) && (!disableChatHandling)) { commonapi.postPlayerMessageToWeb(p.getName(), p.getDisplayName(), msg); } } }); } @EventHandler(priority=EventPriority.MONITOR, ignoreCancelled=true) public void onPlayerJoin(PlayerJoinEvent evt) { final Player p = evt.getPlayer(); // Give other handlers a change to prep player (nicknames and such from Essentials) getServer().getScheduler().scheduleSyncDelayedTask(DynmapCBBridgePlugin.this, new Runnable() { @Override public void run() { if ((commonapi != null) && (!disableChatHandling)) { commonapi.postPlayerJoinQuitToWeb(p.getName(), p.getDisplayName(), true); } } }, 2); } @EventHandler(priority=EventPriority.MONITOR, ignoreCancelled=true) public void onPlayerQuit(PlayerQuitEvent evt) { Player p = evt.getPlayer(); if ((commonapi != null) && (!disableChatHandling)) { commonapi.postPlayerJoinQuitToWeb(p.getName(), p.getDisplayName(), false); } } }, this); log.info("DynmapCBBridge enabled"); } @Override public MarkerAPI getMarkerAPI() { if (commonapi == null) return null; else return commonapi.getMarkerAPI(); } @Override public boolean markerAPIInitialized() { if (commonapi == null) return false; else return commonapi.markerAPIInitialized(); } @Override public boolean sendBroadcastToWeb(String sender, String msg) { if (commonapi == null) return false; else return commonapi.sendBroadcastToWeb(sender, msg); } @Override public int triggerRenderOfVolume(String wid, int minx, int miny, int minz, int maxx, int maxy, int maxz) { if (commonapi == null) return 0; else return commonapi.triggerRenderOfVolume(wid, minx, miny, minz, maxx, maxy, maxz); } @Override public int triggerRenderOfBlock(String wid, int x, int y, int z) { if (commonapi == null) return 0; else return commonapi.triggerRenderOfBlock(wid, x, y, z); } @Override public void setPauseFullRadiusRenders(boolean dopause) { if (commonapi == null) return; else commonapi.setPauseFullRadiusRenders(dopause); } @Override public boolean getPauseFullRadiusRenders() { if (commonapi == null) return false; else return commonapi.getPauseFullRadiusRenders(); } @Override public void setPauseUpdateRenders(boolean dopause) { if (commonapi == null) return; else commonapi.setPauseUpdateRenders(dopause); } @Override public boolean getPauseUpdateRenders() { if (commonapi == null) return false; else return commonapi.getPauseUpdateRenders(); } @Override public void setPlayerVisiblity(String player, boolean is_visible) { if (commonapi == null) return; else commonapi.setPlayerVisiblity(player, is_visible); } @Override public boolean getPlayerVisbility(String player) { if (commonapi == null) return false; else return commonapi.getPlayerVisbility(player); } @Override public void assertPlayerInvisibility(String player, boolean is_invisible, String plugin_id) { if (commonapi == null) return; else commonapi.assertPlayerInvisibility(player, is_invisible, plugin_id); } @Override public void assertPlayerVisibility(String player, boolean is_visible, String plugin_id) { if (commonapi == null) return; else commonapi.assertPlayerVisibility(player, is_visible, plugin_id); } @Override public void postPlayerMessageToWeb(String playerid, String playerdisplay, String message) { if (commonapi == null) return; else commonapi.postPlayerMessageToWeb(playerid, playerdisplay, message); } @Override public void postPlayerJoinQuitToWeb(String playerid, String playerdisplay, boolean isjoin) { if (commonapi == null) return; else commonapi.postPlayerJoinQuitToWeb(playerid, playerdisplay, isjoin); } @Override public String getDynmapCoreVersion() { if (commonapi == null) return "?"; else return commonapi.getDynmapCoreVersion(); } @Override public boolean setDisableChatToWebProcessing(boolean disable) { boolean prev = disableChatHandling; disableChatHandling = disable; return prev; } @Override public boolean testIfPlayerVisibleToPlayer(String player, String player_to_see) { if (commonapi == null) return false; else return commonapi.testIfPlayerVisibleToPlayer(player, player_to_see); } @Override public boolean testIfPlayerInfoProtected() { if (commonapi == null) return false; else return commonapi.testIfPlayerInfoProtected(); } @Override public int triggerRenderOfVolume(Location l0, Location l1) { if(commonapi == null) return 0; int x0 = l0.getBlockX(), y0 = l0.getBlockY(), z0 = l0.getBlockZ(); int x1 = l1.getBlockX(), y1 = l1.getBlockY(), z1 = l1.getBlockZ(); return commonapi.triggerRenderOfVolume(l0.getWorld().getName(), Math.min(x0, x1), Math.min(y0, y1), Math.min(z0, z1), Math.max(x0, x1), Math.max(y0, y1), Math.max(z0, z1)); } @Override public void setPlayerVisiblity(Player player, boolean is_visible) { if(commonapi == null) return; commonapi.setPlayerVisiblity(player.getName(), is_visible); } @Override public boolean getPlayerVisbility(Player player) { if(commonapi == null) return false; return commonapi.getPlayerVisbility(player.getName()); } @Override public void postPlayerMessageToWeb(Player player, String message) { if(commonapi == null) return; commonapi.postPlayerMessageToWeb(player.getName(), player.getDisplayName(), message); } @Override public void postPlayerJoinQuitToWeb(Player player, boolean isjoin) { if(commonapi == null) return; commonapi.postPlayerJoinQuitToWeb(player.getName(), player.getDisplayName(), isjoin); } @Override public String getDynmapVersion() { if(commonapi == null) return "?"; return commonapi.getDynmapCoreVersion(); } @Override public void assertPlayerInvisibility(Player player, boolean is_invisible, Plugin plugin) { if(commonapi == null) return; commonapi.assertPlayerInvisibility(player.getName(), is_invisible, plugin.getDescription().getName()); } @Override public void assertPlayerVisibility(Player player, boolean is_visible, Plugin plugin) { if(commonapi == null) return; commonapi.assertPlayerVisibility(player.getName(), is_visible, plugin.getDescription().getName()); } }
package com.intellij.refactoring.rename; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import com.intellij.psi.*; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.refactoring.HelpID; import com.intellij.refactoring.JavaRefactoringSettings; import com.intellij.refactoring.listeners.RefactoringElementListener; import com.intellij.usageView.UsageInfo; import com.intellij.util.IncorrectOperationException; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.Nullable; import java.util.Collection; import java.util.Map; /** * @author yole */ public class RenamePsiPackageProcessor extends RenamePsiElementProcessor { private final Logger LOG = Logger.getInstance("#com.intellij.refactoring.rename.RenamePsiPackageProcessor"); public boolean canProcessElement(final PsiElement element) { return element instanceof PsiPackage; } public void renameElement(final PsiElement element, final String newName, final UsageInfo[] usages, final RefactoringElementListener listener) throws IncorrectOperationException { final PsiPackage psiPackage = (PsiPackage)element; psiPackage.handleQualifiedNameChange(RenameUtil.getQualifiedNameAfterRename(psiPackage.getQualifiedName(), newName)); RenameUtil.doRenameGenericNamedElement(element, newName, usages, listener); } public String getQualifiedNameAfterRename(final PsiElement element, final String newName, final boolean nonJava) { return getPackageQualifiedNameAfterRename((PsiPackage)element, newName, nonJava); } public static String getPackageQualifiedNameAfterRename(final PsiPackage element, final String newName, final boolean nonJava) { if (nonJava) { String qName = element.getQualifiedName(); int index = qName.lastIndexOf('.'); return index < 0 ? newName : qName.substring(0, index + 1) + newName; } else { return newName; } } @Override public void findExistingNameConflicts(PsiElement element, String newName, Collection<String> conflicts) { final PsiPackage aPackage = (PsiPackage)element; final Project project = element.getProject(); final String qualifiedNameAfterRename = getPackageQualifiedNameAfterRename(aPackage, newName, true); if (JavaPsiFacade.getInstance(project).findClass(qualifiedNameAfterRename, GlobalSearchScope.allScope(project)) != null) { conflicts.add("Class with qualified name \'" + qualifiedNameAfterRename + "\' already exist"); } } public void prepareRenaming(final PsiElement element, final String newName, final Map<PsiElement, String> allRenames) { preparePackageRenaming((PsiPackage)element, newName, allRenames); } public static void preparePackageRenaming(PsiPackage psiPackage, final String newName, Map<PsiElement, String> allRenames) { final PsiDirectory[] directories = psiPackage.getDirectories(); for (PsiDirectory directory : directories) { if (!JavaDirectoryService.getInstance().isSourceRoot(directory)) { allRenames.put(directory, newName); } } } @Nullable public Runnable getPostRenameCallback(final PsiElement element, final String newName, final RefactoringElementListener listener) { final Project project = element.getProject(); final PsiPackage psiPackage = (PsiPackage)element; final String newQualifiedName = RenameUtil.getQualifiedNameAfterRename(psiPackage.getQualifiedName(), newName); return new Runnable() { public void run() { final PsiPackage aPackage = JavaPsiFacade.getInstance(project).findPackage(newQualifiedName); if (aPackage == null) { LOG.error("Package cannot be found: "+newQualifiedName+"; listener="+listener); } listener.elementRenamed(aPackage); } }; } @Nullable @NonNls public String getHelpID(final PsiElement element) { return HelpID.RENAME_PACKAGE; } public boolean isToSearchInComments(final PsiElement psiElement) { return JavaRefactoringSettings.getInstance().RENAME_SEARCH_IN_COMMENTS_FOR_PACKAGE; } public void setToSearchInComments(final PsiElement element, final boolean enabled) { JavaRefactoringSettings.getInstance().RENAME_SEARCH_IN_COMMENTS_FOR_PACKAGE = enabled; } public boolean isToSearchForTextOccurrences(final PsiElement element) { return JavaRefactoringSettings.getInstance().RENAME_SEARCH_FOR_TEXT_FOR_PACKAGE; } public void setToSearchForTextOccurrences(final PsiElement element, final boolean enabled) { JavaRefactoringSettings.getInstance().RENAME_SEARCH_FOR_TEXT_FOR_PACKAGE = enabled; } }
package org.javarosa.core.reference; import java.util.Vector; /** * <p>The reference manager is a singleton class which * is responsible for deriving reference URI's into * references at runtime.</p> * * <p>Raw reference factories * (which are capable of actually creating fully * qualified reference objects) are added with the * addFactory() method. The most common method * of doing so is to implement the PrefixedRootFactory * as either a full class, or an anonymous inner class, * providing the roots available in the current environment * and the code for constructing a reference from them.</p> * * <p>RootTranslators (which rely on other factories) are * used to describe that a particular reference style (generally * a high level reference like "jr://media/" or "jr://images/" * should be translated to another available reference in this * environment like "jr://file/". Root Translators do not * directly derive references, but rather translate them to what * the reference should look like in the current circumstances.</p> * * @author ctsims */ public class ReferenceManager { private static ThreadLocal<ReferenceManager> instance = new ThreadLocal<ReferenceManager>() { @Override protected ReferenceManager initialValue() { return new ReferenceManager(); } }; private final Vector<RootTranslator> translators; private final Vector<ReferenceFactory> factories; private final Vector<RootTranslator> sessionTranslators; private ReferenceManager() { translators = new Vector<>(); factories = new Vector<>(); sessionTranslators = new Vector<>(); } /** * @return Singleton accessor to the global * ReferenceManager. */ public static ReferenceManager instance() { return instance.get(); } /** * @return The available reference factories */ public ReferenceFactory[] getFactories() { ReferenceFactory[] roots = new ReferenceFactory[translators.size()]; translators.copyInto(roots); return roots; } /** * Adds a new Translator to the current environment. */ public void addRootTranslator(RootTranslator translator) { if (!translators.contains(translator)) { translators.addElement(translator); } } /** * Adds a factory for deriving reference URI's into references * * @param factory A raw ReferenceFactory capable of creating * a reference. */ public void addReferenceFactory(ReferenceFactory factory) { if (!factories.contains(factory)) { factories.addElement(factory); } } public boolean removeReferenceFactory(ReferenceFactory factory) { return factories.removeElement(factory); } /** * Derives a global reference from a URI in the current environment. * * @param uri The URI representing a global reference. * @return A reference which is identified by the provided URI. * @throws InvalidReferenceException If the current reference could * not be derived by the current environment */ public Reference DeriveReference(String uri) throws InvalidReferenceException { return DeriveReference(uri, (String)null); } /** * Derives a reference from a URI in the current environment. * * @param uri The URI representing a reference. * @param context A reference which provides context for any * relative reference accessors. * @return A reference which is identified by the provided URI. * @throws InvalidReferenceException If the current reference could * not be derived by the current environment */ public Reference DeriveReference(String uri, Reference context) throws InvalidReferenceException { return DeriveReference(uri, context.getURI()); } /** * Derives a reference from a URI in the current environment. * * @param uri The URI representing a reference. * @param context A reference URI which provides context for any * relative reference accessors. * @return A reference which is identified by the provided URI. * @throws InvalidReferenceException If the current reference could * not be derived by the current environment, or if the context URI * is not valid in the current environment. */ public Reference DeriveReference(String uri, String context) throws InvalidReferenceException { if (uri == null) { throw new InvalidReferenceException("Null references aren't valid", null); } //Relative URI's need to determine their context first. if (isRelative(uri)) { //Clean up the relative reference to lack any leading separators. if (uri.startsWith("./")) { uri = uri.substring(2); } if (context == null) { throw new RuntimeException("Attempted to retrieve local reference with no context"); } else { return derivingRoot(context).derive(uri, context); } } else { return derivingRoot(uri).derive(uri); } } /** * Adds a root translator that is maintained over the course of a session. It will be globally * available until the session is cleared using the "clearSession" method. * * @param translator A Root Translator that will be added to the current session */ public void addSessionRootTranslator(RootTranslator translator) { sessionTranslators.addElement(translator); } /** * Wipes out all of the translators being maintained in the current session (IE: Any translators * added via "addSessionRootTranslator". Used to manage a temporary set of translations for a limited * amount of time. */ public void clearSession() { sessionTranslators.removeAllElements(); } private ReferenceFactory derivingRoot(String uri) throws InvalidReferenceException { //First, try any/all roots which are put in the temporary session stack for (RootTranslator root : sessionTranslators) { if (root.derives(uri)) { return root; } } //Now, try any/all roots referenced at runtime. for (RootTranslator root : translators) { if (root.derives(uri)) { return root; } } //Now try all of the raw connectors available for (ReferenceFactory root : factories) { if (root.derives(uri)) { return root; } } throw new InvalidReferenceException(getPrettyPrintException(uri), uri); } private String getPrettyPrintException(String uri) { if ("".equals(uri)) { return "Attempt to derive a blank reference"; } try { String uriRoot = uri; String jrRefMessagePortion = "reference type"; if (uri.contains("jr: uriRoot = uri.substring("jr://".length()); jrRefMessagePortion = "javarosa jr:// reference root"; } int endOfRoot = uriRoot.indexOf("://") + "://".length(); if (endOfRoot == "://".length() - 1) { endOfRoot = uriRoot.indexOf("/"); } if (endOfRoot != -1) { uriRoot = uriRoot.substring(0, endOfRoot); } String message = "The reference \"" + uri + "\" was invalid and couldn't be understood. The " + jrRefMessagePortion + " \"" + uriRoot + "\" is not available on this system and may have been mis-typed. Some available roots: "; for (RootTranslator root : sessionTranslators) { message += "\n" + root.prefix; } //Now, try any/all roots referenced at runtime. for (RootTranslator root : translators) { message += "\n" + root.prefix; } //Now try all of the raw connectors available for (ReferenceFactory root : factories) { //TODO: Skeeeeeeeeeeeeetch try { if (root instanceof PrefixedRootFactory) { for (String rootName : ((PrefixedRootFactory)root).roots) { message += "\n" + rootName; } } else { message += "\n" + root.derive("").getURI(); } } catch (Exception e) { } } return message; } catch (Exception e) { return "Couldn't process the reference " + uri + " . It may have been entered incorrectly. " + "Note that this doesn't mean that this doesn't mean the file or location referenced " + "couldn't be found, the reference itself was not understood."; } } /** * @return Whether the provided URI describe a relative reference. */ public static boolean isRelative(String URI) { return URI.startsWith("./"); } }
package org.jboss.forge.plugin.idea.util; import org.jboss.forge.addon.ui.command.CommandFactory; import org.jboss.forge.addon.ui.command.UICommand; import org.jboss.forge.addon.ui.context.UIContext; import org.jboss.forge.addon.ui.metadata.UICategory; import org.jboss.forge.addon.ui.metadata.UICommandMetadata; import org.jboss.forge.addon.ui.util.Categories; import org.jboss.forge.addon.ui.util.Commands; import org.jboss.forge.furnace.util.Lists; import org.jboss.forge.plugin.idea.service.ForgeService; import java.util.*; public class CommandUtil { private static final String RECENT_COMMANDS = "Recent Commands"; public static List<UICommand> getAllCommands() { CommandFactory commandFactory = ForgeService.getInstance().getCommandFactory(); return Lists.toList(commandFactory.getCommands()); } public static Map<UICommand, UICommandMetadata> indexMetadata(List<UICommand> commands, UIContext context) { Map<UICommand, UICommandMetadata> index = new HashMap<>(); for (UICommand command : commands) { UICommandMetadata metadata = command.getMetadata(context); index.put(command, metadata); } return index; } public static Map<Object, String> indexFilterData(List<Object> elements, Map<String, List<UICommand>> categories, Map<UICommand, UICommandMetadata> metadataIndex) { Map<Object, String> result = new HashMap<>(); for (Object object : elements) { if (object instanceof UICommand) { UICommand command = (UICommand) object; UICommandMetadata metadata = metadataIndex.get(command); result.put(object, categoryName(metadata.getCategory()) + " " + metadata.getName()); } else if (object instanceof String) { StringBuilder categoryStringBuilder = new StringBuilder(); for (UICommand command : categories.get(object)) { UICommandMetadata metadata = metadataIndex.get(command); categoryStringBuilder.append(categoryName(metadata.getCategory()) + " " + metadata.getName() + " "); } result.put(object, categoryStringBuilder.toString()); } else { throw new IllegalArgumentException("Unknown object type: " + object.getClass()); } } return result; } /** * Returns a list of pairs: (category name, list of commands). Sorted by category name, also each command list * is sorted by command name. */ public static List<Map.Entry<String, List<UICommand>>> sortCategories(Map<String, List<UICommand>> categories, final Map<UICommand, UICommandMetadata> index) { List<Map.Entry<String, List<UICommand>>> result = new ArrayList<>(); // Sort each entry and add to the result for (Map.Entry<String, List<UICommand>> entry : categories.entrySet()) { Collections.sort(entry.getValue(), new Comparator<UICommand>() { @Override public int compare(UICommand o1, UICommand o2) { return index.get(o1).getName().compareTo( index.get(o2).getName()); } }); result.add(entry); } // Sort result Collections.sort(result, new Comparator<Map.Entry<String, List<UICommand>>>() { @Override public int compare(Map.Entry<String, List<UICommand>> o1, Map.Entry<String, List<UICommand>> o2) { String o1Name = o1.getKey(); String o2Name = o2.getKey(); if (o1Name.equals(RECENT_COMMANDS)) { return -1; } if (o2Name.equals(RECENT_COMMANDS)) { return 1; } return o1Name.compareTo(o2Name); } }); return result; } public static Map<String, List<UICommand>> categorizeCommands(List<UICommand> commands, List<UICommand> recentCommands, Map<UICommand, UICommandMetadata> index) { Map<String, List<UICommand>> categories = new HashMap<>(); for (UICommand command : commands) { UICommandMetadata metadata = index.get(command); String category = categoryName(metadata.getCategory()); if (!categories.containsKey(category)) { categories.put(category, new ArrayList<UICommand>()); } categories.get(category).add(command); } if (!recentCommands.isEmpty()) { categories.put(RECENT_COMMANDS, recentCommands); } return categories; } public static List<Object> categoriesToList(List<Map.Entry<String, List<UICommand>>> categories) { List<Object> list = new ArrayList<>(); for (Map.Entry<String, List<UICommand>> entry : categories) { list.add(entry.getKey()); list.addAll(entry.getValue()); } return list; } private static String categoryName(UICategory category) { if (category == null) { category = Categories.createDefault(); } StringBuilder name = new StringBuilder(); name.append(category.getName().trim()); category = category.getSubCategory(); while (category != null) { name.append(" / "); name.append(category.getName().trim()); category = category.getSubCategory(); } return name.toString(); } public static List<UICommand> getEnabledCommands(UIContext uiContext) { return Lists.toList(Commands.getEnabledCommands(getAllCommands(), uiContext)); } }
package net.sandius.rembulan.compiler.gen; import java.util.Objects; public abstract class SlotType { private SlotType() { // not to be instantiated by the outside world } // TODO: number-as-string, string-as-number, true, false, actual constant values? // return true iff type(this) =< type(that) public abstract boolean isSubtypeOf(SlotType that); // return true iff type(this) >= type(that) public boolean isSupertypeOf(SlotType that) { return that.isSubtypeOf(this); } // return the most specific type that is more general than both this and that, // or null if such type does not exist public abstract SlotType join(SlotType that); // return the most general type that is more specific than both this and that, // or null if such type does not exist public abstract SlotType meet(SlotType that); @Deprecated public static String toString(SlotType type) { return type.toString(); } public static final SlotType ANY = new AnyType(); public static final SlotType NIL = new ConcreteType(ANY, "nil", "-"); public static final SlotType BOOLEAN = new ConcreteType(ANY, "boolean", "B"); public static final SlotType NUMBER = new ConcreteType(ANY, "number", "N"); public static final SlotType NUMBER_INTEGER = new ConcreteType(NUMBER, "integer", "i"); public static final SlotType NUMBER_FLOAT = new ConcreteType(NUMBER, "float", "f"); public static final SlotType STRING = new ConcreteType(ANY, "string", "S"); public static final FunctionType FUNCTION = new FunctionType(ArgTypes.empty().withVararg(), ArgTypes.empty().withVararg()); public static final SlotType TABLE = new ConcreteType(ANY, "table", "T"); public static final SlotType THREAD = new ConcreteType(ANY, "thread", "C"); private static class AnyType extends SlotType { @Override public String toString() { return "A"; } @Override public boolean isSubtypeOf(SlotType that) { return this.equals(that) || that.isSupertypeOf(this); } @Override public boolean isSupertypeOf(SlotType that) { Objects.requireNonNull(that); return true; } @Override public SlotType join(SlotType that) { return this; } @Override public SlotType meet(SlotType that) { return that; } } private static abstract class AbstractConcreteType extends SlotType { protected final SlotType supertype; protected AbstractConcreteType(SlotType supertype) { this.supertype = Objects.requireNonNull(supertype); } public SlotType supertype() { return supertype; } @Override public boolean isSubtypeOf(SlotType that) { return this.equals(that) || this.supertype().isSubtypeOf(that); } @Override public SlotType join(SlotType that) { Objects.requireNonNull(that); if (that.isSubtypeOf(this)) return this; else return this.supertype().join(that); } @Override public SlotType meet(SlotType that) { Objects.requireNonNull(that); if (this.isSubtypeOf(that)) return this; else if (that.isSubtypeOf(this)) return that; else return null; } } private static class ConcreteType extends AbstractConcreteType { private final String name; private final String shortName; private ConcreteType(SlotType supertype, String name, String shortName) { super(supertype); this.name = Objects.requireNonNull(name); this.shortName = Objects.requireNonNull(shortName); } @Override public String toString() { return shortName; } } public static class FunctionType extends AbstractConcreteType { protected final ArgTypes argTypes; protected final ArgTypes returnTypes; private FunctionType(ArgTypes arg, ArgTypes ret) { super(ANY); this.argTypes = Objects.requireNonNull(arg); this.returnTypes = Objects.requireNonNull(ret); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; FunctionType that = (FunctionType) o; return argTypes.equals(that.argTypes) && returnTypes.equals(that.returnTypes); } @Override public int hashCode() { int result = argTypes.hashCode(); result = 31 * result + returnTypes.hashCode(); return result; } public static FunctionType of(int numArgs, boolean vararg) { return new FunctionType(ArgTypes.init(numArgs, vararg), ArgTypes.empty().withVararg()); } public static FunctionType of(ArgTypes arg, ArgTypes ret) { return new FunctionType(arg, ret); } @Override public String toString() { if (argumentTypes().isVarargOnly() && returnTypes().isVarargOnly()) return "F"; else return "F(" + argumentTypes().toString() + ";" + returnTypes().toString() + ")"; } public String toExplicitString() { return "(" + argumentTypes().toString() + ") -> (" + returnTypes().toString() + ")"; } public ArgTypes argumentTypes() { return argTypes; } public ArgTypes returnTypes() { return returnTypes; } @Override public boolean isSubtypeOf(SlotType that) { Objects.requireNonNull(that); if (this.equals(that)) { return true; } if (that instanceof FunctionType) { FunctionType ft = (FunctionType) that; return ft.argumentTypes().isSubsumedBy(this.argumentTypes()) && this.returnTypes().isSubsumedBy(ft.returnTypes()); } else { return this.supertype().isSubtypeOf(that); } } @Override public SlotType join(SlotType that) { Objects.requireNonNull(that); if (this.isSubtypeOf(that)) { return that; } else if (that instanceof FunctionType) { FunctionType ft = (FunctionType) that; ArgTypes arg = this.argumentTypes().meet(ft.argumentTypes()); ArgTypes ret = this.returnTypes().join(ft.returnTypes()); return arg != null && ret != null ? new FunctionType(arg, ret) : null; } else { return this.supertype().join(that); } } @Override public SlotType meet(SlotType that) { Objects.requireNonNull(that); if (this.isSubtypeOf(that)) { return this; } else if (that.isSubtypeOf(this)) { return that; } else if (that instanceof FunctionType) { FunctionType ft = (FunctionType) that; ArgTypes arg = this.argumentTypes().join(ft.argumentTypes()); ArgTypes ret = this.returnTypes().meet(ft.returnTypes()); return arg != null && ret != null ? new FunctionType(arg, ret) : null; } else { return null; } } } }
package org.jmxtrans.embedded.output; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.StringWriter; import java.io.StringReader; import java.io.Reader; import java.io.Writer; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import java.io.File; import java.net.*; import java.nio.charset.Charset; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.io.BufferedReader; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.util.Collections; import java.util.Comparator; import com.fasterxml.jackson.core.Base64Variants; import com.fasterxml.jackson.core.JsonEncoding; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; import com.fasterxml.jackson.databind.MappingJsonFactory; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectReader; import com.fasterxml.jackson.databind.node.ObjectNode; import com.fasterxml.jackson.core.JsonGenerationException; import com.fasterxml.jackson.databind.JsonMappingException; import org.jmxtrans.embedded.EmbeddedJmxTransException; import org.jmxtrans.embedded.QueryResult; import org.jmxtrans.embedded.util.io.IoUtils2; import org.jmxtrans.embedded.util.StringUtils2; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.lang.management.ManagementFactory; public class CopperEggWriter extends AbstractOutputWriter implements OutputWriter { public static final String METRIC_TYPE_GAUGE = "gauge"; public static final String METRIC_TYPE_COUNTER = "counter"; public static final String DEFAULT_COPPEREGG_API_URL = "https://api.copperegg.com/v2/revealmetrics"; public static final String SETTING_COPPEREGG_API_TIMEOUT_IN_MILLIS = "coppereggApiTimeoutInMillis"; public static final int DEFAULT_COPPEREGG_API_TIMEOUT_IN_MILLIS = 10000; public static final String SETTING_SOURCE = "source"; public static final String DEFAULT_SOURCE = "#hostname#"; private final static String DEFAULT_COPPEREGG_CONFIGURATION_PATH = "classpath:copperegg_config.json"; private final Logger logger = LoggerFactory.getLogger(getClass()); private final AtomicInteger exceptionCounter = new AtomicInteger(); private JsonFactory jsonFactory = new JsonFactory(); /** * CopperEgg API URL */ private URL url; private String url_str; private int coppereggApiTimeoutInMillis = DEFAULT_COPPEREGG_API_TIMEOUT_IN_MILLIS; private long myPID = 0; private String myhost; private String myPID_host; private String config_location; private static Map<String, String> dashMap = new HashMap<String, String>(); private static Map<String, String> metricgroupMap = new HashMap<String, String>(); private String jvm_metric_groupID = null; private String heap_metric_groupID = null; private String nonheap_metric_groupID = null; private String jmxtrans_metric_groupID = null; private String app_metric_groupID = null; private String tomcat_global_groupID = null; private String tomcat_servlet_groupID = null; private String tomcat_manager_groupID = null; private String tomcat_website_groupID = null; private String tomcat_db_groupID = null; /** * CopperEgg API authentication username */ private String user; /** * CopperEgg APIKEY */ private String token; private String basicAuthentication; /** * Optional proxy for the http API calls */ @Nullable private Proxy proxy; /** * CopperEgg measurement property 'source', */ @Nullable private String source; /** * Load settings<p/> */ @Override public void start() { config_location = DEFAULT_COPPEREGG_CONFIGURATION_PATH; String path = config_location.substring("classpath:".length()); long thisPID = getPID(); if( myPID == thisPID) { logger.info("Started from two threads with the same PID, {}",thisPID); return; } myPID = thisPID; try { String str = String.valueOf(myPID); url_str = getStringSetting(SETTING_URL, DEFAULT_COPPEREGG_API_URL); url = new URL(url_str); user = getStringSetting(SETTING_USERNAME); token = getStringSetting(SETTING_TOKEN); user = token; basicAuthentication = Base64Variants.getDefaultVariant().encode((user + ":" + "U").getBytes(Charset.forName("US-ASCII"))); if (getStringSetting(SETTING_PROXY_HOST, null) != null && !getStringSetting(SETTING_PROXY_HOST).isEmpty()) { proxy = new Proxy(Proxy.Type.HTTP, new InetSocketAddress(getStringSetting(SETTING_PROXY_HOST), getIntSetting(SETTING_PROXY_PORT))); } coppereggApiTimeoutInMillis = getIntSetting(SETTING_COPPEREGG_API_TIMEOUT_IN_MILLIS, DEFAULT_COPPEREGG_API_TIMEOUT_IN_MILLIS); source = getStringSetting(SETTING_SOURCE, DEFAULT_SOURCE); source = getStrategy().resolveExpression(source); myhost = source; myPID_host = myhost + '.' + str; try{ InputStream in = Thread.currentThread().getContextClassLoader().getResourceAsStream(path); if(in == null) { logger.warn("No file found for classpath:" + config_location); } else { read_config(in); } } catch (Exception e){ exceptionCounter.incrementAndGet(); logger.warn("Exception in start " + e); } ensure_metric_groups(); ensure_dashboards(); logger.info("Started CopperEggWriter Successfully on jvm '{}', connected to '{}', proxy {}", myPID_host, url, proxy); } catch (MalformedURLException e) { exceptionCounter.incrementAndGet(); throw new EmbeddedJmxTransException(e); } } /** * Export collected metrics to CopperEgg */ @Override public void write(Iterable<QueryResult> results) { List<QueryResult> jvm_counters = new ArrayList<QueryResult>(); List<QueryResult> nonheap_counters = new ArrayList<QueryResult>(); List<QueryResult> heap_counters = new ArrayList<QueryResult>(); List<QueryResult> jmxtrans_counters = new ArrayList<QueryResult>(); List<QueryResult> app_counters = new ArrayList<QueryResult>(); List<QueryResult> tomcat_global_counters = new ArrayList<QueryResult>(); List<QueryResult> tomcat_servlet_counters = new ArrayList<QueryResult>(); List<QueryResult> tomcat_manager_counters = new ArrayList<QueryResult>(); List<QueryResult> tomcat_website_counters = new ArrayList<QueryResult>(); List<QueryResult> tomcat_db_counters = new ArrayList<QueryResult>(); long epochInMillis = 0; String myname = null; Object myval = null; long thisPID = 0; String tmp = null; String pidHost = null; String delims = "[.]"; for (QueryResult result : results) { epochInMillis = result.getEpochInMillis(); myname = result.getName(); myval = result.getValue(); thisPID = getPID(); tmp = String.valueOf(thisPID); pidHost = source + "." + tmp; String[] parts = myname.split(delims); if( parts.length > 0 ) { String p1 = parts[0]; if( p1.equals("jmxtrans") ) { QueryResult new_result = new QueryResult(myname, pidHost, myval, epochInMillis); jmxtrans_counters.add(new_result); } else if( p1.equals("jvm") ) { if( parts[1].equals("memorypool") ){ if( ( (parts[2].equals("Perm_Gen")) || (parts[2].equals("Code_Cache")) ) && ( (parts[4].equals("committed")) || (parts[4].equals("used")) ) ) { myname = "jvmNonHeapMemoryUsage"; String fullID = pidHost + "." + parts[2] + "." + parts[4]; //Float newval = (Float) myval; //newval = newval/(1024.0f*1024.0f); QueryResult new_result = new QueryResult(myname, fullID, myval, epochInMillis); nonheap_counters.add(new_result); } else if( ( (parts[2].equals("Eden_Space")) || (parts[2].equals("Survivor_Space")) || (parts[2].equals("Tenured_Gen")) ) && ( (parts[4].equals("committed")) || (parts[4].equals("used")) ) ) { myname = "jvmHeapMemoryUsage"; String fullID = pidHost + "." + parts[2] + "." + parts[4]; //Float newval = (Float) myval; //newval = newval/(1024.0f*1024.0f); QueryResult new_result = new QueryResult(myname, fullID, myval, epochInMillis); heap_counters.add(new_result); } } else if( !parts[1].equals("memory") ){ QueryResult new_result = new QueryResult(myname, pidHost, myval, epochInMillis); jvm_counters.add(new_result); } } else if( p1.equals("tomcat") ) { if( (parts[1].equals("thread-pool")) || (parts[1].equals("global-request-processor")) ) { String connector = parts[2]; myname = parts[0] + "." + parts[1] + "." + parts[3]; String fullID = pidHost + "." + connector; QueryResult new_result = new QueryResult(myname, fullID, myval, epochInMillis); tomcat_global_counters.add(new_result); } else if( parts[1].equals("manager") ) { String myhost = parts[2]; String mycontext = parts[3]; myname = parts[0] + "." + parts[1] + "." + parts[4]; String fullID = pidHost + "." + myhost + "." + mycontext; QueryResult new_result = new QueryResult(myname, fullID, myval, epochInMillis); tomcat_manager_counters.add(new_result); } else if( parts[1].equals("servlet") ) { String myWebmodule = parts[2]; String myServletname = parts[3]; myname = parts[0] + "." + parts[1] + "." + parts[4]; String fullID = pidHost + "." + myWebmodule + "." + myServletname; QueryResult new_result = new QueryResult(myname, fullID, myval, epochInMillis); tomcat_servlet_counters.add(new_result); } else if( parts[1].equals("data-source") ) { String myhost = parts[2]; String mycontext = parts[3]; String mydbname = parts[4]; myname = parts[0] + "." + parts[1] + "." + parts[5]; String fullID = pidHost + "." + myhost + "." + mycontext + "." + mydbname; QueryResult new_result = new QueryResult(myname, fullID, myval, epochInMillis); tomcat_db_counters.add(new_result); } } else if( p1.equals("cocktail") ) { if( !(parts[1].equals("CreatedCocktailCount")) && !(parts[1].equals("UpdatedCocktailCount")) ) { QueryResult new_result = new QueryResult(myname, pidHost, myval, epochInMillis); app_counters.add(new_result); } } else if( p1.equals("sales") ) { QueryResult new_result = new QueryResult(myname, pidHost, myval, epochInMillis); app_counters.add(new_result); } } else { logger.warn("parts return NULL!!!"); } } if(jvm_counters.size() > 0) { sort_n_send(jvm_metric_groupID, jvm_counters); } if(heap_counters.size() > 0) { sort_n_send(heap_metric_groupID, heap_counters); } if(nonheap_counters.size() > 0) { sort_n_send(nonheap_metric_groupID, nonheap_counters); } if(jmxtrans_counters.size() > 0) { sort_n_send(jmxtrans_metric_groupID, jmxtrans_counters); } if(tomcat_global_counters.size() > 0) { sort_n_send(tomcat_global_groupID, tomcat_global_counters); } if(tomcat_servlet_counters.size() > 0) { sort_n_send(tomcat_servlet_groupID, tomcat_servlet_counters); } if(tomcat_manager_counters.size() > 0) { sort_n_send(tomcat_manager_groupID, tomcat_manager_counters); } if(tomcat_website_counters.size() > 0) { sort_n_send(tomcat_website_groupID, tomcat_website_counters); } if(tomcat_db_counters.size() > 0) { sort_n_send(tomcat_db_groupID, tomcat_db_counters); } if(app_counters.size() > 0) { Collections.sort(jvm_counters, new Comparator<QueryResult>() { public int compare(QueryResult o1, QueryResult o2) { //Sorts by 'epochInMillis' property return o1.getEpochInMillis()<o2.getEpochInMillis()?-1:o1.getEpochInMillis()>o2.getEpochInMillis()?1:0; } }); send_metrics(app_metric_groupID, app_counters); } } public void sort_n_send(String mg_name, List<QueryResult> mg_counters) { Collections.sort(mg_counters, new Comparator<QueryResult>() { public int compare(QueryResult o1, QueryResult o2) { //Sorts by 'epochInMillis' property Integer rslt = o1.getEpochInMillis()<o2.getEpochInMillis()?-1:o1.getEpochInMillis()>o2.getEpochInMillis()?1:0; if(rslt == 0){ rslt = (o1.getType()).compareTo(o2.getType()); } return rslt; //return o1.getEpochInMillis()<o2.getEpochInMillis()?-1:o1.getEpochInMillis()>o2.getEpochInMillis()?1:0; } }); send_metrics(mg_name, mg_counters); } public void send_metrics(String mg_name, List<QueryResult> counters) { long timeblock = counters.get(0).getEpoch(TimeUnit.SECONDS); String identifier = counters.get(0).getType(); int remaining = counters.size(); List<QueryResult> sorted_ctrs = new ArrayList<QueryResult>(); for (QueryResult counter : counters) { remaining = remaining - 1; if( (timeblock != (counter.getEpoch(TimeUnit.SECONDS))) || (!identifier.equals(counter.getType()) ) ) { one_set(mg_name, sorted_ctrs); timeblock = counter.getEpoch(TimeUnit.SECONDS); identifier = counter.getType(); sorted_ctrs.clear(); sorted_ctrs.add(counter); } else { sorted_ctrs.add(counter); } if( remaining == 0 ) { one_set(mg_name, sorted_ctrs); } } } public void one_set(String mg_name, List<QueryResult> counters) { HttpURLConnection urlCxn = null; URL newurl = null; try { newurl = new URL(url_str + "/samples/" + mg_name + ".json"); if (proxy == null) { urlCxn = (HttpURLConnection) newurl.openConnection(); } else { urlCxn = (HttpURLConnection) newurl.openConnection(proxy); } if (urlCxn != null) { urlCxn.setRequestMethod("POST"); urlCxn.setDoInput(true); urlCxn.setDoOutput(true); urlCxn.setReadTimeout(coppereggApiTimeoutInMillis); urlCxn.setRequestProperty("content-type", "application/json; charset=utf-8"); urlCxn.setRequestProperty("Authorization", "Basic " + basicAuthentication); } } catch (Exception e) { exceptionCounter.incrementAndGet(); logger.warn("Exception: one_set: failed to connect to CopperEgg Service '{}' with proxy {}", newurl, proxy, e); return; } if( urlCxn != null ) { try { cue_serialize(counters, urlCxn.getOutputStream()); int responseCode = urlCxn.getResponseCode(); if (responseCode != 200) { logger.warn("one_set: Failure {}: {} to send result to CopperEgg service {}", responseCode, urlCxn.getResponseMessage(), newurl); } try { InputStream in = urlCxn.getInputStream(); IoUtils2.copy(in, IoUtils2.nullOutputStream()); IoUtils2.closeQuietly(in); InputStream err = urlCxn.getErrorStream(); if (err != null) { IoUtils2.copy(err, IoUtils2.nullOutputStream()); IoUtils2.closeQuietly(err); } } catch (IOException e) { exceptionCounter.incrementAndGet(); logger.warn("Execption one_set: Write-Exception flushing http connection", e); } } catch (Exception e) { exceptionCounter.incrementAndGet(); logger.warn("Execption: one_set: Failure to send result to CopperEgg Service '{}' with proxy {}", newurl, proxy, e); } } } public void cue_serialize(@Nonnull Iterable<QueryResult> counters, @Nonnull OutputStream out) throws IOException { int first = 0; long time = 0; String myID = null; JsonGenerator g = jsonFactory.createGenerator(out, JsonEncoding.UTF8); for (QueryResult counter : counters) { if( 0 == first ) { time = counter.getEpoch(TimeUnit.SECONDS); myID = counter.getType(); first = 1; g.writeStartObject(); g.writeStringField("identifier", myID); g.writeNumberField("timestamp", time); g.writeObjectFieldStart("values"); } if (counter.getValue() instanceof Integer) { g.writeNumberField(counter.getName(), (Integer) counter.getValue()); } else if (counter.getValue() instanceof Long) { g.writeNumberField(counter.getName(), (Long) counter.getValue()); } else if (counter.getValue() instanceof Float) { g.writeNumberField(counter.getName(), (Float) counter.getValue()); } else if (counter.getValue() instanceof Double) { g.writeNumberField(counter.getName(), (Double) counter.getValue()); } } g.writeEndObject(); g.writeEndObject(); g.flush(); g.close(); } public void debug_cue_serialize(@Nonnull Iterable<QueryResult> counters, @Nonnull OutputStream out) throws IOException { int first = 0; long time = 0; String myID = null; JsonGenerator g = jsonFactory.createGenerator(out, JsonEncoding.UTF8); StringWriter strout = new StringWriter(); JsonFactory fac = new JsonFactory(); JsonGenerator gen = fac.createJsonGenerator(strout); for (QueryResult counter : counters) { if( 0 == first ) { time = counter.getEpoch(TimeUnit.SECONDS); myID = counter.getType(); first = 1; g.writeStartObject(); g.writeStringField("identifier", myID); g.writeNumberField("timestamp", time); g.writeObjectFieldStart("values"); gen.writeStartObject(); gen.writeStringField("identifier", myID); gen.writeNumberField("timestamp", time); gen.writeObjectFieldStart("values"); } if( (time != counter.getEpoch(TimeUnit.SECONDS)) || (!(myID.equals(counter.getType()))) ) { logger.warn("Messed-up serialize: "); logger.warn("time {} should be this time: {}; id {} should be this id: {}", counter.getEpoch(TimeUnit.SECONDS), time, counter.getType(), myID); } if (counter.getValue() instanceof Integer) { g.writeNumberField(counter.getName(), (Integer) counter.getValue()); gen.writeNumberField(counter.getName(), (Integer) counter.getValue()); } else if (counter.getValue() instanceof Long) { g.writeNumberField(counter.getName(), (Long) counter.getValue()); gen.writeNumberField(counter.getName(), (Long) counter.getValue()); } else if (counter.getValue() instanceof Float) { g.writeNumberField(counter.getName(), (Float) counter.getValue()); gen.writeNumberField(counter.getName(), (Long) counter.getValue()); } else if (counter.getValue() instanceof Double) { g.writeNumberField(counter.getName(), (Double) counter.getValue()); gen.writeNumberField(counter.getName(), (Double) counter.getValue()); } } g.writeEndObject(); g.writeEndObject(); g.flush(); g.close(); gen.writeEndObject(); gen.writeEndObject(); gen.flush(); gen.close(); logger.warn("Serialized output: " + strout.toString()); } private static long getPID() { String processName = java.lang.management.ManagementFactory.getRuntimeMXBean().getName(); return Long.parseLong(processName.split("@")[0]); } public int cue_getExceptionCounter() { return exceptionCounter.get(); } /** * If metric group doesn't exist, create it * If it does exist, update it. */ public void ensure_metric_groups() { HttpURLConnection urlConnection = null; OutputStreamWriter wr = null; URL myurl = null; try { myurl = new URL(url_str + "/metric_groups.json?show_hidden=1"); urlConnection = (HttpURLConnection) myurl.openConnection(); urlConnection.setRequestMethod("GET"); urlConnection.setDoInput(true); urlConnection.setDoOutput(true); urlConnection.setReadTimeout(coppereggApiTimeoutInMillis); urlConnection.setRequestProperty("content-type", "application/json; charset=utf-8"); urlConnection.setRequestProperty("Authorization", "Basic " + basicAuthentication); int responseCode = urlConnection.getResponseCode(); if (responseCode != 200) { logger.warn("Bad responsecode " + String.valueOf(responseCode)+ " from metric_groups Index: " + myurl); } } catch (Exception e) { exceptionCounter.incrementAndGet(); logger.warn("Failure to execute metric_groups index request "+ myurl + " "+ e); } finally { if (urlConnection != null) { try { InputStream in = urlConnection.getInputStream(); String theString = convertStreamToString(in); for (Map.Entry<String, String> entry : metricgroupMap.entrySet()) { String checkName = entry.getKey(); try { String Rslt = groupFind(checkName, theString, 0); if(Rslt != null){ // Update it Rslt = Send_Commmand("/metric_groups/" + Rslt + ".json?show_hidden=1", "PUT", entry.getValue(),0); } else { // create it Rslt = Send_Commmand("/metric_groups.json", "POST", entry.getValue(),0); } if(Rslt != null) { if (Rslt.toLowerCase().contains("tomcat")) { if (Rslt.toLowerCase().contains("global")) { tomcat_global_groupID = Rslt; } else if(Rslt.toLowerCase().contains("servlet")) { tomcat_servlet_groupID = Rslt; } else if(Rslt.toLowerCase().contains("manager")) { tomcat_manager_groupID = Rslt; } else if(Rslt.toLowerCase().contains("website")) { tomcat_website_groupID = Rslt; } else if(Rslt.toLowerCase().contains("db")) { tomcat_db_groupID = Rslt; } } else if (Rslt.toLowerCase().contains("nonheap")){ nonheap_metric_groupID = Rslt; } else if (Rslt.toLowerCase().contains("heap")){ heap_metric_groupID = Rslt; } else if (Rslt.toLowerCase().contains("jvm")){ jvm_metric_groupID = Rslt; } else if (Rslt.toLowerCase().contains("jmxtrans")){ jmxtrans_metric_groupID = Rslt; } else { app_metric_groupID = Rslt; } } } catch (Exception e) { exceptionCounter.incrementAndGet(); logger.warn("Exception in metric_group update or create: "+ myurl + " "+ e); } } } catch (IOException e) { exceptionCounter.incrementAndGet(); logger.warn("Exception flushing http connection"+ e); } } } } /** * If dashboard doesn't exist, create it * If it does exist, update it. */ private void ensure_dashboards() { HttpURLConnection urlConnection = null; OutputStreamWriter wr = null; URL myurl = null; try { myurl = new URL(url_str + "/dashboards.json"); urlConnection = (HttpURLConnection) myurl.openConnection(); urlConnection.setRequestMethod("GET"); urlConnection.setDoInput(true); urlConnection.setDoOutput(true); urlConnection.setReadTimeout(coppereggApiTimeoutInMillis); urlConnection.setRequestProperty("content-type", "application/json; charset=utf-8"); urlConnection.setRequestProperty("Authorization", "Basic " + basicAuthentication); int responseCode = urlConnection.getResponseCode(); if (responseCode != 200) { logger.warn("Bad responsecode " + String.valueOf(responseCode)+ " from Dahsboards Index: " + myurl); } } catch (Exception e) { exceptionCounter.incrementAndGet(); logger.warn("Exception on dashboards index request "+ myurl + " "+ e); } finally { if (urlConnection != null) { try { InputStream in = urlConnection.getInputStream(); String theString = convertStreamToString(in); for (Map.Entry<String, String> entry : dashMap.entrySet()) { String checkName = entry.getKey(); try { String Rslt = groupFind(checkName, theString, 1); if(Rslt != null){ // Update it Rslt = Send_Commmand("/dashboards/" + Rslt + ".json", "PUT", entry.getValue(),1); } else { // create it Rslt = Send_Commmand("/dashboards.json", "POST", entry.getValue(),1); } } catch (Exception e) { exceptionCounter.incrementAndGet(); logger.warn("Exception in dashboard update or create: "+ myurl + " "+ e); } } } catch (IOException e) { exceptionCounter.incrementAndGet(); logger.warn("Exception flushing http connection"+ e); } } } } private String jparse(String jsonstr, Integer ExpectInt) { ObjectMapper mapper = new ObjectMapper(); String Result = null; try { JsonNode root = mapper.readTree(jsonstr); if(ExpectInt != 0) { int myid = root.get("id").asInt(); Result = String.valueOf(myid); } else { Result = root.get("id").asText().toString(); } } catch (JsonGenerationException e) { exceptionCounter.incrementAndGet(); logger.warn("JsonGenerationException "+ e); } catch (JsonMappingException e) { exceptionCounter.incrementAndGet(); logger.warn("JsonMappingException "+ e); } catch (IOException e) { exceptionCounter.incrementAndGet(); logger.warn("IOException "+ e); } return(Result); } public String convertStreamToString(InputStream is) throws IOException { // To convert the InputStream to String we use the // Reader.read(char[] buffer) method. We iterate until the // Reader return -1 which means there's no more data to // read. We use the StringWriter class to produce the string. if (is != null) { Writer writer = new StringWriter(); char[] buffer = new char[1024]; try { Reader reader = new BufferedReader( new InputStreamReader(is, "UTF-8")); int n; while ((n = reader.read(buffer)) != -1) { writer.write(buffer, 0, n); } } finally { is.close(); } return writer.toString(); } else { return ""; } } /** * read_config() * The copperegg_config.json file contains a specification for the metric groups and dashboards to be created / or updated. * Mandatory */ public void read_config(InputStream in) throws Exception { JsonFactory f = new MappingJsonFactory(); JsonParser jp = f.createJsonParser(in); JsonToken current; current = jp.nextToken(); if (current != JsonToken.START_OBJECT) { logger.warn("read_config: Error: START_OBJECT not found : quiting."); return; } current = jp.nextToken(); String fieldName = jp.getCurrentName(); current = jp.nextToken(); if (fieldName.equals("config")) { if (current != JsonToken.START_OBJECT) { logger.warn("read_config: Error: START_OBJECT not found after config : quiting."); return; } current = jp.nextToken(); String fieldName2 = jp.getCurrentName(); if (fieldName2.equals("metric_groups")) { current = jp.nextToken(); if (current != JsonToken.START_ARRAY) { logger.warn("read_config: Error: START_ARRAY not found after metric_groups : quiting."); return; } current = jp.nextToken(); while (current != JsonToken.END_ARRAY) { if (current != JsonToken.START_OBJECT) { logger.warn("read_config: Error: START_OBJECT not found after metric_groups START_ARRAY : quiting."); return; } current = jp.nextToken(); JsonNode node1 = jp.readValueAsTree(); String node1string = write_tostring(node1); metricgroupMap.put(node1.get("name").asText(),node1string); current = jp.nextToken(); } current = jp.nextToken(); String fieldName3 = jp.getCurrentName(); if (fieldName3.equals("dashboards")) { current = jp.nextToken(); if (current != JsonToken.START_ARRAY) { logger.warn("read_config: Error: START_ARRAY not found after dashboards : quiting."); return; } current = jp.nextToken(); while (current != JsonToken.END_ARRAY) { if (current != JsonToken.START_OBJECT) { logger.warn("read_config: Error: START_OBJECT not found after dashboards START_ARRAY : quiting."); return; } current = jp.nextToken(); JsonNode node = jp.readValueAsTree(); String nodestring = write_tostring(node); dashMap.put(node.get("name").asText(),nodestring); current = jp.nextToken(); } if(jp.nextToken() != JsonToken.END_OBJECT) { logger.warn("read_config: Error: END_OBJECT expected, not found (1): quiting."); return; } if(jp.nextToken() != JsonToken.END_OBJECT) { logger.warn("read_config: Error: END_OBJECT expected, not found (2): quiting."); return; } } else { logger.warn("read_config: Error: Expected dashboards : quiting."); return; } } else { logger.warn("read_config: Error: Expected metric_groups : quiting."); return; } } } public String groupFind(String findName, String findIndex, Integer ExpectInt) throws Exception { JsonFactory f = new MappingJsonFactory(); JsonParser jp = f.createJsonParser(findIndex); int count = 0; int foundit = 0; String Result = null; JsonToken current = jp.nextToken(); if (current != JsonToken.START_ARRAY) { logger.warn("groupFind: Error: START_ARRAY expected, not found : quiting."); return(Result); } current = jp.nextToken(); while (current != JsonToken.END_ARRAY) { if (current != JsonToken.START_OBJECT) { logger.warn("groupFind: Error: START_OBJECT expected, not found : quiting."); return(Result); } current = jp.nextToken(); JsonNode node = jp.readValueAsTree(); String tmpStr = node.get("name").asText().toString(); if(findName.equals(node.get("name").asText().toString())) { if(ExpectInt != 0) { foundit = node.get("id").asInt(); Result = String.valueOf(foundit); } else { Result = node.get("id").asText().toString(); } break; } current = jp.nextToken(); count = count + 1; } return(Result); } public String write_tostring(JsonNode json){ ObjectMapper mapper = new ObjectMapper(); StringWriter out = new StringWriter(); try { JsonFactory fac = new JsonFactory(); JsonGenerator gen = fac.createJsonGenerator(out); // Now write: mapper.writeTree(gen, json); gen.flush(); gen.close(); return out.toString(); } catch(Exception e) { exceptionCounter.incrementAndGet(); logger.warn("Exception in write_tostring: " + e); } return(null); } public String Send_Commmand(String command, String msgtype, String payload, Integer ExpectInt){ HttpURLConnection urlConnection = null; URL myurl = null; OutputStreamWriter wr = null; int responseCode = 0; String id = null; int error = 0; try { myurl = new URL(url_str + command); urlConnection = (HttpURLConnection) myurl.openConnection(); urlConnection.setRequestMethod(msgtype); urlConnection.setDoInput(true); urlConnection.setDoOutput(true); urlConnection.setReadTimeout(coppereggApiTimeoutInMillis); urlConnection.addRequestProperty("User-Agent", "Mozilla/4.76"); urlConnection.setRequestProperty("content-type", "application/json; charset=utf-8"); urlConnection.setRequestProperty("Authorization", "Basic " + basicAuthentication); wr = new OutputStreamWriter(urlConnection.getOutputStream(),"UTF-8"); wr.write(payload); wr.flush(); responseCode = urlConnection.getResponseCode(); if (responseCode != 200) { logger.warn("Send Command: Response code " + responseCode + " url is " + myurl + " command " + msgtype); error = 1; } } catch (Exception e) { exceptionCounter.incrementAndGet(); logger.warn("Exception in Send Command: url is " + myurl + " command " + msgtype + "; " + e); error = 1; } finally { if (urlConnection != null) { try { if( error > 0 ) { InputStream err = urlConnection.getErrorStream(); String errString = convertStreamToString(err); logger.warn("Reported error : " + errString); IoUtils2.closeQuietly(err); } else { InputStream in = urlConnection.getInputStream(); String theString = convertStreamToString(in); id = jparse(theString, ExpectInt); IoUtils2.closeQuietly(in); } } catch (IOException e) { exceptionCounter.incrementAndGet(); logger.warn("Exception in Send Command : flushing http connection " + e); } } if(wr != null) { try { wr.close(); } catch (IOException e) { exceptionCounter.incrementAndGet(); logger.warn("Exception in Send Command: closing OutputWriter " + e); } } } return(id); } }
package org.kohsuke.github; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import static org.kohsuke.github.Previews.*; /** * Builder to configure the branch protection settings. * * @see GHBranch#enableProtection() */ @SuppressFBWarnings(value = { "UWF_UNWRITTEN_PUBLIC_OR_PROTECTED_FIELD", "UWF_UNWRITTEN_FIELD", "NP_UNWRITTEN_FIELD", "URF_UNREAD_FIELD" }, justification = "JSON API") public class GHBranchProtectionBuilder { private final GHBranch branch; private boolean enforceAdmins; private Map<String, Object> prReviews; private Restrictions restrictions; private StatusChecks statusChecks; GHBranchProtectionBuilder(GHBranch branch) { this.branch = branch; } public GHBranchProtectionBuilder addRequiredChecks(Collection<String> checks) { getStatusChecks().contexts.addAll(checks); return this; } public GHBranchProtectionBuilder addRequiredChecks(String... checks) { addRequiredChecks(Arrays.asList(checks)); return this; } public GHBranchProtectionBuilder dismissStaleReviews() { return dismissStaleReviews(true); } public GHBranchProtectionBuilder dismissStaleReviews(boolean v) { getPrReviews().put("dismiss_stale_reviews", v); return this; } public GHBranchProtection enable() throws IOException { return requester().method("PUT") .withNullable("required_status_checks", statusChecks) .withNullable("required_pull_request_reviews", prReviews) .withNullable("restrictions", restrictions) .withNullable("enforce_admins", enforceAdmins) .to(branch.getProtectionUrl().toString(), GHBranchProtection.class) .wrap(branch); } public GHBranchProtectionBuilder includeAdmins() { return includeAdmins(true); } public GHBranchProtectionBuilder includeAdmins(boolean v) { enforceAdmins = v; return this; } public GHBranchProtectionBuilder requiredReviewers(int v) { getPrReviews().put("required_approving_review_count", v); return this; } public GHBranchProtectionBuilder requireBranchIsUpToDate() { return requireBranchIsUpToDate(true); } public GHBranchProtectionBuilder requireBranchIsUpToDate(boolean v) { getStatusChecks().strict = v; return this; } public GHBranchProtectionBuilder requireCodeOwnReviews() { return requireCodeOwnReviews(true); } public GHBranchProtectionBuilder requireCodeOwnReviews(boolean v) { getPrReviews().put("require_code_owner_reviews", v); return this; } public GHBranchProtectionBuilder requireReviews() { getPrReviews(); return this; } public GHBranchProtectionBuilder restrictReviewDismissals() { getPrReviews(); if (!prReviews.containsKey("dismissal_restrictions")) { prReviews.put("dismissal_restrictions", new Restrictions()); } return this; } public GHBranchProtectionBuilder restrictPushAccess() { getRestrictions(); return this; } public GHBranchProtectionBuilder teamPushAccess(Collection<GHTeam> teams) { for (GHTeam team : teams) { teamPushAccess(team); } return this; } public GHBranchProtectionBuilder teamPushAccess(GHTeam... teams) { for (GHTeam team : teams) { getRestrictions().teams.add(team.getSlug()); } return this; } public GHBranchProtectionBuilder teamReviewDismissals(Collection<GHTeam> teams) { for (GHTeam team : teams) { teamReviewDismissals(team); } return this; } public GHBranchProtectionBuilder teamReviewDismissals(GHTeam... teams) { for (GHTeam team : teams) { addReviewRestriction(team.getSlug(), true); } return this; } public GHBranchProtectionBuilder userPushAccess(Collection<GHUser> users) { for (GHUser user : users) { userPushAccess(user); } return this; } public GHBranchProtectionBuilder userPushAccess(GHUser... users) { for (GHUser user : users) { getRestrictions().users.add(user.getLogin()); } return this; } public GHBranchProtectionBuilder userReviewDismissals(Collection<GHUser> users) { for (GHUser team : users) { userReviewDismissals(team); } return this; } public GHBranchProtectionBuilder userReviewDismissals(GHUser... users) { for (GHUser user : users) { addReviewRestriction(user.getLogin(), false); } return this; } private void addReviewRestriction(String restriction, boolean isTeam) { restrictReviewDismissals(); Restrictions restrictions = (Restrictions) prReviews.get("dismissal_restrictions"); if (isTeam) { restrictions.teams.add(restriction); } else { restrictions.users.add(restriction); } } private Map<String, Object> getPrReviews() { if (prReviews == null) { prReviews = new HashMap<String, Object>(); } return prReviews; } private Restrictions getRestrictions() { if (restrictions == null) { restrictions = new Restrictions(); } return restrictions; } private StatusChecks getStatusChecks() { if (statusChecks == null) { statusChecks = new StatusChecks(); } return statusChecks; } private Requester requester() { return new Requester(branch.getRoot()).withPreview(LUKE_CAGE); } private static class Restrictions { private Set<String> teams = new HashSet<String>(); private Set<String> users = new HashSet<String>(); } private static class StatusChecks { final List<String> contexts = new ArrayList<String>(); boolean strict; } }
package org.motechproject.ws.server; import java.util.Date; import javax.jws.WebMethod; import javax.jws.WebParam; import javax.jws.WebService; import org.motechproject.ws.ContactNumberType; import org.motechproject.ws.Gender; import org.motechproject.ws.LogType; /** * The service endpoint interface for the major motech server web service * endpoint. The annotations allow for minimal configuration deployment of * JAX-WS enpoints and clients, with appropriate behavior and metadata. */ @WebService public interface RegistrarService { @WebMethod public void registerChild(@WebParam(name = "chpsId") String chpsId, @WebParam(name = "regDate") Date regDate, @WebParam(name = "motherRegNum") String motherRegNum, @WebParam(name = "childRegNum") String childRegNum, @WebParam(name = "childDob") Date childDob, @WebParam(name = "childGender") Gender childGender, @WebParam(name = "childFirstName") String childFirstName, @WebParam(name = "nhis") String nhis, @WebParam(name = "nhisExpires") Date nhisExpires); @WebMethod public void editPatient( @WebParam(name = "chpsId") String chpsId, @WebParam(name = "patientRegNum") String patientRegNum, @WebParam(name = "primaryPhone") String primaryPhone, @WebParam(name = "primaryPhoneType") ContactNumberType primaryPhoneType, @WebParam(name = "secondaryPhone") String secondaryPhone, @WebParam(name = "secondaryPhoneType") ContactNumberType secondaryPhoneType, @WebParam(name = "nhis") String nhis, @WebParam(name = "nhisExpires") Date nhisExpires); @WebMethod public void stopPregnancyProgram(@WebParam(name = "chpsId") String chpsId, @WebParam(name = "patientRegNum") String patientRegNum); @WebMethod public void recordGeneralVisit( @WebParam(name = "clinicId") Integer clinicId, @WebParam(name = "visitDate") Date visitDate, @WebParam(name = "patientSerial") String patientSerial, @WebParam(name = "patientGender") Gender patientGender, @WebParam(name = "patientBirthDate") Date patientBirthDate, @WebParam(name = "patientDiagnosis") Integer patientDiagnosis, @WebParam(name = "patientReferral") Boolean patientReferral) throws ValidationException; @WebMethod public void log(@WebParam(name = "type") LogType type, @WebParam(name = "message") String message); @WebMethod public void setMessageStatus( @WebParam(name = "messageId") String messageId, @WebParam(name = "success") Boolean success); }
// copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // persons to whom the Software is furnished to do so, subject to the // notice shall be included in all copies or substantial portions of the // Software. // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. package phasereditor.project.core; import static java.lang.System.currentTimeMillis; import static java.lang.System.out; import static phasereditor.ui.PhaserEditorUI.swtRun; import java.io.IOException; import java.net.URL; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Collection; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Consumer; import org.eclipse.core.resources.IContainer; import org.eclipse.core.resources.IFile; import org.eclipse.core.resources.IFolder; import org.eclipse.core.resources.IMarker; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IResource; import org.eclipse.core.resources.IResourceDelta; import org.eclipse.core.resources.IResourceVisitor; import org.eclipse.core.resources.IWorkspace; import org.eclipse.core.resources.IWorkspaceRoot; import org.eclipse.core.resources.IncrementalProjectBuilder; import org.eclipse.core.resources.ProjectScope; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IConfigurationElement; import org.eclipse.core.runtime.IExtensionPoint; import org.eclipse.core.runtime.IPath; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.ListenerList; import org.eclipse.core.runtime.NullProgressMonitor; import org.eclipse.core.runtime.Path; import org.eclipse.core.runtime.Platform; import org.eclipse.core.runtime.QualifiedName; import org.eclipse.core.runtime.Status; import org.eclipse.jface.dialogs.MessageDialog; import org.eclipse.jface.preference.IPreferenceStore; import org.eclipse.swt.graphics.Point; import org.eclipse.ui.IWorkbenchPage; import org.eclipse.ui.IWorkbenchWindow; import org.eclipse.ui.PartInitException; import org.eclipse.ui.PlatformUI; import org.eclipse.ui.browser.IWebBrowser; import org.eclipse.ui.browser.IWorkbenchBrowserSupport; import org.eclipse.ui.ide.IDE; import org.eclipse.ui.preferences.ScopedPreferenceStore; import org.eclipse.ui.progress.UIJob; import org.eclipse.ui.statushandlers.StatusManager; import phasereditor.inspect.core.IProjectTemplate; import phasereditor.project.core.codegen.SourceLang; public class ProjectCore { protected static final String PLUGIN_ID = Activator.PLUGIN_ID; public static final String PHASER_PROJECT_NATURE = PLUGIN_ID + ".nature"; public static final String GLOBAL_SCOPE_INITIALIZER_ID = PLUGIN_ID + ".globalScope"; public static final String BROWSER_SCOPE_INITIALIZER_ID = PLUGIN_ID + ".browserscope"; public static final String ECMA5_SCOPE_INITIALIZER_ID = PLUGIN_ID + ".ecma5scope"; public static final String PHASER_BUILDER_ID = PLUGIN_ID + ".builder"; public static final String PHASER_PROBLEM_MARKER_ID = PLUGIN_ID + ".problem"; private static final QualifiedName PROJECT_LANG = new QualifiedName("phasereditor.project.core", "lang"); public static final String PREF_PROP_PROJECT_GAME_WIDTH = "phasereditor.project.ui.gameWidth"; public static final String PREF_PROP_PROJECT_GAME_HEIGHT = "phasereditor.project.ui.gameHeight"; public static final String PREF_PROP_PROJECT_WIZARD_LANGUAJE = "phasereditor.project.ui.projectWizardLang"; private static IProject _activeProject; private static final ListenerList<Consumer<IProject>> _activeProjectListeners = new ListenerList<>(); private static final QualifiedName ACTIVE_PROJECT = new QualifiedName("phasereditor.project.core", "activeProject"); private static final QualifiedName OPEN_TIME_PROJECT = new QualifiedName("phasereditor.project.core", "activeProject"); public static IProject getActiveProject() { if (_activeProject == null) { restoreActiveProject(); } return _activeProject; } private static void restoreActiveProject() { try { var root = ResourcesPlugin.getWorkspace().getRoot(); var name = root.getPersistentProperty(ACTIVE_PROJECT); if (name != null) { var project = root.getProject(name); if (project.exists()) { _activeProject = project; } } } catch (CoreException e) { logError(e); } } public static void setActiveProject(IProject activeProject) { _activeProject = activeProject; try { var root = ResourcesPlugin.getWorkspace().getRoot(); if (activeProject != null) { root.setPersistentProperty(ACTIVE_PROJECT, activeProject.getName()); activeProject.setPersistentProperty(OPEN_TIME_PROJECT, Long.toString(currentTimeMillis())); } } catch (CoreException e) { logError(e); } for (var l : _activeProjectListeners) { l.accept(activeProject); } } public static Comparator<IProject> getProjectOpenTimeComparator() { return new Comparator<>() { @Override public int compare(IProject o1, IProject o2) { try { var t1 = o1.getPersistentProperty(OPEN_TIME_PROJECT); if (t1 == null) { t1 = "0"; } var t2 = o2.getPersistentProperty(OPEN_TIME_PROJECT); if (t2 == null) { t2 = "0"; } return -t1.compareTo(t2); } catch (CoreException e) { logError(e); return 0; } } }; } public static void addActiveProjectListener(Consumer<IProject> listener) { _activeProjectListeners.add(listener); } public static void removeActiveProjectListener(Consumer<IProject> listener) { _activeProjectListeners.remove(listener); } public static IPreferenceStore getPreferenceStore() { return Activator.getDefault().getPreferenceStore(); } public static List<IProjectBuildParticipant> getBuildParticipants() { List<IProjectBuildParticipant> list = new ArrayList<>(); IExtensionPoint point = Platform.getExtensionRegistry() .getExtensionPoint("phasereditor.project.core.buildParticipant"); Map<IProjectBuildParticipant, String> orderMap = new HashMap<>(); for (IConfigurationElement element : point.getConfigurationElements()) { try { IProjectBuildParticipant participant = (IProjectBuildParticipant) element .createExecutableExtension("handler"); list.add(participant); String order = element.getAttribute("order"); orderMap.put(participant, order); } catch (Exception e) { ProjectCore.logError(e); } } list.sort((a, b) -> { try { String order1 = orderMap.get(a); String order2 = orderMap.get(b); int c = Double.valueOf(Double.parseDouble(order1)) .compareTo(Double.valueOf(Double.parseDouble(order2))); return c; } catch (Exception e) { return 0; } }); return list; } public static IPath getDesignPath(IProject project) { IPath path = project.getFullPath(); IContainer folder = project.getFolder("Design"); if (folder.exists()) { path = folder.getFullPath(); } return path; } /** * Returns the source folder of the specified project * * @param project * the project which source path is needed * @return IPath of the source folder */ public static IPath getWebContentPath(IProject project) { IPath path = project.getFullPath(); IContainer folder = project.getFolder("WebContent"); if (!folder.exists()) { // default to project, but look for index.html IContainer[] result = { project }; try { project.accept(new IResourceVisitor() { @Override public boolean visit(IResource resource) throws CoreException { if (result[0] != null) { return false; } if (resource instanceof IFile && resource.getName().equals("index.html")) { result[0] = ((IFile) resource).getParent(); return false; } return true; } }); folder = result[0]; } catch (CoreException e) { throw new RuntimeException(e); } } path = folder.getFullPath(); return path; } /** * Get the source folder of given the Phaser project. * * @param project * The project. * @return The source folder or null if no source folder is found. */ public static IContainer getWebContentFolder(IProject project) { IPath path = getWebContentPath(project); if (path.equals(project.getFullPath())) { return project; } IWorkspaceRoot root = project.getWorkspace().getRoot(); IFolder folder = root.getFolder(path); return folder; } public static String getAssetUrl(IFile file) { IProject project = file.getProject(); IPath fullPath = file.getFullPath(); return getAssetUrl(project, fullPath); } public static String getAssetUrl(IProject project, IPath assetFullPath) { IContainer assetsFolder = ProjectCore.getWebContentFolder(project); String relPath = assetFullPath.makeRelativeTo(assetsFolder.getFullPath()).toPortableString(); return relPath; } public static IPath getAssetsPath(IProject project) { IPath webpath = getWebContentPath(project); IResource member = ResourcesPlugin.getWorkspace().getRoot().findMember(webpath.append("assets")); if (member.exists() && member instanceof IFolder) { return member.getFullPath(); } return webpath; } public static boolean isPhaserProject(IProject project) { return PhaserProjectNature.hasNature(project); } public static List<IProject> getPhaserProjects() { List<IProject> list = new ArrayList<>(); for (IProject project : ResourcesPlugin.getWorkspace().getRoot().getProjects()) { if (isPhaserProject(project)) { list.add(project); } } return list; } public static void configureNewPhaserProject(IProject project, IProjectTemplate template, Map<String, String> paramValues, SourceLang lang, IProgressMonitor monitor) throws CoreException { ProjectCore.setActiveProject(project); var nullMonitor = new NullProgressMonitor(); monitor.beginTask("Copying template content.", 5); PhaserProjectBuilder.setActionAfterFirstBuild(project, () -> { openTemplateMainFileInEditor(project, template); }); IFolder webContentFolder = project.getFolder("WebContent"); webContentFolder.create(true, true, nullMonitor); monitor.worked(1); IFolder folder = project.getFolder("Design"); folder.create(true, true, nullMonitor); monitor.worked(1); template.copyInto(webContentFolder, paramValues, nullMonitor); monitor.worked(1); setProjectLanguage(project, lang); { ProjectCore.getProjectSceneSize(project); } PhaserProjectNature.addPhaserNature(project, nullMonitor); monitor.worked(1); project.build(IncrementalProjectBuilder.CLEAN_BUILD, nullMonitor); monitor.worked(1); { var perspId = "phasereditor.ide.code"; var file = template.getOpenFile(webContentFolder); if (file != null && file.getName().endsWith(".scene")) { perspId = "phasereditor.ide.ui.perspective"; } var finalPerspId = perspId; swtRun(() -> { var workbench = PlatformUI.getWorkbench(); var page = workbench.getActiveWorkbenchWindow().getActivePage(); page.setPerspective(workbench.getPerspectiveRegistry().findPerspectiveWithId(finalPerspId)); }); } } /** * Returns the project that contains the specified path * * @param path * the path which project is needed * @return IProject object. If path is <code>null</code> the return value is * also <code>null</code>. */ public static IProject getProjectFromPath(IPath path) { IWorkspace workspace = ResourcesPlugin.getWorkspace(); IProject project = null; if (path != null) { if (workspace.validatePath(path.toString(), IResource.PROJECT).isOK()) { project = workspace.getRoot().getProject(path.toString()); } else { project = workspace.getRoot().getFile(path).getProject(); } } return project; } public static void logError(Exception e) { e.printStackTrace(); StatusManager.getManager().handle(new Status(IStatus.ERROR, PLUGIN_ID, e.getMessage(), e)); } /** * Test if the file is part of the web content tree. */ public static boolean isWebContentFile(IFile file) { IContainer webContentFolder = getWebContentFolder(file.getProject()); return webContentFolder.getFullPath().isPrefixOf(file.getFullPath()); } public static void deleteResourceMarkers(IResource resource, String type) { try { resource.deleteMarkers(type, true, IResource.DEPTH_INFINITE); } catch (CoreException e) { logError(e); } } public static boolean hasProblems(IFile file) { try { IMarker[] markers = file.findMarkers(PHASER_PROBLEM_MARKER_ID, true, IResource.DEPTH_INFINITE); return markers.length > 0; } catch (CoreException e) { logError(e); } return false; } public static IMarker createErrorMarker(String type, IStatus status, IResource resource) { try { int severity; switch (status.getSeverity()) { case IStatus.ERROR: severity = IMarker.SEVERITY_ERROR; break; default: severity = IMarker.SEVERITY_WARNING; break; } IMarker marker = resource.createMarker(type); marker.setAttribute(IMarker.SEVERITY, severity); marker.setAttribute(IMarker.MESSAGE, status.getMessage()); marker.setAttribute(IMarker.LOCATION, resource.getProject().getName()); return marker; } catch (CoreException e) { throw new RuntimeException(e); } } static void openTemplateMainFileInEditor(IProject project, IProjectTemplate template) { IFolder webContentFolder = project.getFolder("WebContent"); IFile file = template.getOpenFile(webContentFolder); if (file != null) { out.println("Opening project main file: " + file); new UIJob(PlatformUI.getWorkbench().getDisplay(), "Opening project main file") { @Override public IStatus runInUIThread(IProgressMonitor monitor) { IWorkbenchWindow window = PlatformUI.getWorkbench().getActiveWorkbenchWindow(); IWorkbenchPage page = window.getActivePage(); try { IDE.openEditor(page, file); } catch (PartInitException e) { e.printStackTrace(); throw new RuntimeException(e); } String url = template.getInfo().getUrl(); if (url != null) { if (MessageDialog.openQuestion(window.getShell(), "Open URL", "Do you want to open the template url?")) { try { IWorkbenchBrowserSupport support = PlatformUI.getWorkbench().getBrowserSupport(); IWebBrowser browser = support.getExternalBrowser(); browser.openURL(new URL(url)); } catch (Exception e) { e.printStackTrace(); throw new RuntimeException(e); } } } return Status.OK_STATUS; } }.schedule(500); } } public static boolean hasErrors(IProject project) throws CoreException { int severity = project.findMaxProblemSeverity(ProjectCore.PHASER_PROBLEM_MARKER_ID, true, IResource.DEPTH_INFINITE); return severity == IMarker.SEVERITY_ERROR; } /** * Let's say that a TypeScript project is that one with a * <code>tsconfig.json</code> file in the WebContent folder. * * @param project * The project to test. * @return If the project is a TypeScript one. */ public static boolean isTypeScriptProject(IProject project) { IContainer folder = getWebContentFolder(project); IFile tsconfig = folder.getFile(new Path("tsconfig.json")); boolean exists = tsconfig.exists(); return exists; } public static void setProjectLanguage(IProject project, SourceLang lang) { try { project.setPersistentProperty(PROJECT_LANG, lang.name()); } catch (CoreException e) { throw new RuntimeException(e); } } public static SourceLang getProjectLanguage(IProject project) { try { Map<QualifiedName, String> props = project.getPersistentProperties(); String name = props.getOrDefault(PROJECT_LANG, SourceLang.JAVA_SCRIPT_6.name()); SourceLang lang = SourceLang.valueOf(name); return lang; } catch (Exception e) { logError(e); return SourceLang.JAVA_SCRIPT_6; } } public static SourceLang getProjectLanguage(IPath path) { IResource res = ResourcesPlugin.getWorkspace().getRoot().getFolder(path); if (res == null) { res = ResourcesPlugin.getWorkspace().getRoot().getFile(path); } if (res == null) { return SourceLang.JAVA_SCRIPT_6; } IProject project = res.getProject(); return getProjectLanguage(project); } public static Point getProjectSceneSize(IPath path) { var res = ResourcesPlugin.getWorkspace().getRoot().getFolder(path); IProject project = res == null ? null : res.getProject(); return getProjectSceneSize(project); } private static Map<IProject, IPreferenceStore> _projectPrefMap = new HashMap<>(); public static IPreferenceStore getProjectPreferenceStore(IProject project) { if (_projectPrefMap.containsKey(project)) { return _projectPrefMap.get(project); } var store = new ScopedPreferenceStore(new ProjectScope(project), "phasereditor.project.core"); _projectPrefMap.put(project, store); return store; } public static Point getProjectSceneSize(IProject project) { var width = getPreferenceStore().getInt(PREF_PROP_PROJECT_GAME_WIDTH); var height = getPreferenceStore().getInt(PREF_PROP_PROJECT_GAME_HEIGHT); if (project != null) { var projectStore = getProjectPreferenceStore(project); if (projectStore.contains(PREF_PROP_PROJECT_GAME_WIDTH)) { width = projectStore.getInt(PREF_PROP_PROJECT_GAME_WIDTH); height = projectStore.getInt(PREF_PROP_PROJECT_GAME_HEIGHT); } } return new Point(width, height); } public static SourceLang getDefaultProjectLanguage() { var str = getPreferenceStore().getString(PREF_PROP_PROJECT_WIZARD_LANGUAJE); return SourceLang.valueOf(str); } public static void setProjectSceneSize(IProject project, int width, int height) { var store = getProjectPreferenceStore(project); store.putValue(PREF_PROP_PROJECT_GAME_WIDTH, Integer.toString(width)); store.putValue(PREF_PROP_PROJECT_GAME_HEIGHT, Integer.toString(height)); } public static boolean areFilesAffectedByDelta(IResourceDelta delta, Collection<IFile> files) { boolean[] touched = { false }; for (IFile used : files) { if (used == null) { continue; } try { delta.accept(d -> { IResource resource = d.getResource(); if (used.equals(resource)) { touched[0] = true; return false; } IPath movedTo = d.getMovedToPath(); IPath movedFrom = d.getMovedFromPath(); if (movedTo != null) { if (used.getFullPath().equals(movedTo)) { touched[0] = true; return false; } } if (movedFrom != null) { if (used.getFullPath().equals(movedFrom)) { touched[0] = true; return false; } } return true; }); } catch (CoreException e) { e.printStackTrace(); } if (touched[0]) { return true; } } return touched[0]; } public enum OS { WINDOWS, LINUX, MAC } private static OS _os; private static java.nio.file.Path _userFolderPath; public static OS getOS() { if (_os == null) { String osname = System.getProperty("os.name").toLowerCase(); if (osname.contains("windows")) { _os = OS.WINDOWS; } else if (osname.contains("mac")) { _os = OS.MAC; } else { _os = OS.LINUX; } } return _os; } public static java.nio.file.Path getUserCacheFolder() { if (_userFolderPath == null) { String home = System.getProperty("user.home"); java.nio.file.Path homePath = Paths.get(home); java.nio.file.Path dir; if (getOS() == OS.MAC) { dir = homePath.resolve("Library/Caches/com.phasereditor2d"); } else { dir = homePath.resolve(".phasereditor"); } _userFolderPath = dir; } try { Files.createDirectories(_userFolderPath); return _userFolderPath; } catch (IOException e) { throw new RuntimeException(e); } } public static void setDefaultPreferences() { var store = getPreferenceStore(); store.setDefault(ProjectCore.PREF_PROP_PROJECT_GAME_WIDTH, 800); store.setDefault(ProjectCore.PREF_PROP_PROJECT_GAME_HEIGHT, 450); store.setDefault(ProjectCore.PREF_PROP_PROJECT_WIZARD_LANGUAJE, SourceLang.JAVA_SCRIPT_6.name()); } }
package com.splicemachine.derby.test.framework; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.Statement; import com.splicemachine.derby.impl.db.AuthenticationConstants; import org.apache.commons.dbutils.DbUtils; import org.apache.log4j.Logger; import org.junit.rules.TestWatcher; import org.junit.runner.Description; import com.splicemachine.constants.SpliceConstants; public class SpliceUserWatcher extends TestWatcher { private static final Logger LOG = Logger.getLogger(SpliceUserWatcher.class); public String userName; public String password; public SpliceUserWatcher(String userName, String password) { this.userName = userName; this.password = password; } @Override protected void starting(Description description) { Connection connection = null; Statement statement = null; ResultSet rs = null; try { dropAndCreateUser(userName,password); } catch (Exception e) { throw new RuntimeException(e); } finally { DbUtils.closeQuietly(rs); DbUtils.closeQuietly(statement); DbUtils.commitAndCloseQuietly(connection); } super.starting(description); } @Override protected void finished(Description description) { LOG.trace("Finished"); } public void createUser(String userName, String password) { if (! AuthenticationConstants.authentication.toUpperCase().equals("LDAP")) { Connection connection = null; PreparedStatement statement = null; try { connection = SpliceNetConnection.getConnection(); statement = connection.prepareStatement("call syscs_util.syscs_create_user(?,?)"); statement.setString(1, userName); statement.setString(2, password); statement.execute(); } catch (Exception e) { LOG.error("error Creating " + e.getMessage()); e.printStackTrace(); throw new RuntimeException(e); } finally { DbUtils.closeQuietly(statement); DbUtils.commitAndCloseQuietly(connection); } } } public void dropUser(String userName) { if (! AuthenticationConstants.authentication.toUpperCase().equals("LDAP")) { Connection connection = null; PreparedStatement statement = null; try { connection = SpliceNetConnection.getConnection(); statement = connection.prepareStatement("select username from sys.sysusers where username = ?"); statement.setString(1, userName.toUpperCase()); ResultSet rs = statement.executeQuery(); if (rs.next()) { statement = connection.prepareStatement("call syscs_util.syscs_drop_user(?)"); statement.setString(1, userName); statement.execute(); } } catch (Exception e) { LOG.error("error Creating " + e.getMessage()); e.printStackTrace(); throw new RuntimeException(e); } finally { DbUtils.closeQuietly(statement); DbUtils.commitAndCloseQuietly(connection); } } } public void dropAndCreateUser(String userName, String password) { dropUser(userName); createUser(userName,password); } }
package com.github.vanroy.springdata.jest; import static com.github.vanroy.springdata.jest.MappingBuilder.buildMapping; import static org.apache.commons.lang.StringUtils.*; import static org.elasticsearch.index.VersionType.*; import static org.elasticsearch.index.query.QueryBuilders.moreLikeThisQuery; import static org.springframework.util.CollectionUtils.isEmpty; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.lang.reflect.Method; import java.util.*; import com.github.vanroy.springdata.jest.internal.ExtendedSearchResult; import com.github.vanroy.springdata.jest.internal.MultiDocumentResult; import com.github.vanroy.springdata.jest.mapper.*; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonPrimitive; import io.searchbox.action.Action; import io.searchbox.client.JestClient; import io.searchbox.client.JestResult; import io.searchbox.core.*; import io.searchbox.indices.CreateIndex; import io.searchbox.indices.DeleteIndex; import io.searchbox.indices.IndicesExists; import io.searchbox.indices.Refresh; import io.searchbox.indices.aliases.AddAliasMapping; import io.searchbox.indices.aliases.GetAliases; import io.searchbox.indices.aliases.ModifyAliases; import io.searchbox.indices.aliases.RemoveAliasMapping; import io.searchbox.indices.mapping.GetMapping; import io.searchbox.indices.mapping.PutMapping; import io.searchbox.indices.settings.GetSettings; import io.searchbox.indices.type.TypeExist; import io.searchbox.params.Parameters; import io.searchbox.params.SearchType; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.metadata.AliasMetaData; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.query.MoreLikeThisQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortOrder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.BeansException; import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContextAware; import org.springframework.core.io.ClassPathResource; import org.springframework.data.domain.*; import org.springframework.data.elasticsearch.ElasticsearchException; import org.springframework.data.elasticsearch.annotations.Document; import org.springframework.data.elasticsearch.annotations.Mapping; import org.springframework.data.elasticsearch.annotations.Setting; import org.springframework.data.elasticsearch.core.*; import org.springframework.data.elasticsearch.core.convert.ElasticsearchConverter; import org.springframework.data.elasticsearch.core.convert.MappingElasticsearchConverter; import com.github.vanroy.springdata.jest.internal.SearchScrollResult; import org.springframework.data.elasticsearch.core.mapping.ElasticsearchPersistentEntity; import org.springframework.data.elasticsearch.core.mapping.SimpleElasticsearchMappingContext; import org.springframework.data.elasticsearch.core.query.*; import org.springframework.data.mapping.PersistentProperty; import org.springframework.data.util.CloseableIterator; import org.springframework.util.Assert; /** * Jest implementation of ElasticsearchOperations. * * @author Julien Roy */ public class JestElasticsearchTemplate implements ElasticsearchOperations, ApplicationContextAware { private static final Logger logger = LoggerFactory.getLogger(JestElasticsearchTemplate.class); private final JestClient client; private final ElasticsearchConverter elasticsearchConverter; private final JestResultsMapper resultsMapper; public JestElasticsearchTemplate(JestClient client) { this(client, null, null); } public JestElasticsearchTemplate(JestClient client, JestResultsMapper resultMapper) { this(client, null, resultMapper); } public JestElasticsearchTemplate(JestClient client, ElasticsearchConverter elasticsearchConverter, JestResultsMapper resultsMapper) { this.client = client; this.elasticsearchConverter = (elasticsearchConverter == null) ? new MappingElasticsearchConverter(new SimpleElasticsearchMappingContext()) : elasticsearchConverter; this.resultsMapper = (resultsMapper == null) ? new DefaultJestResultsMapper(this.elasticsearchConverter.getMappingContext()) : resultsMapper; } public static String readFileFromClasspath(String url) { StringBuilder stringBuilder = new StringBuilder(); BufferedReader bufferedReader = null; try { ClassPathResource classPathResource = new ClassPathResource(url); InputStreamReader inputStreamReader = new InputStreamReader(classPathResource.getInputStream()); bufferedReader = new BufferedReader(inputStreamReader); String line; String lineSeparator = System.getProperty("line.separator"); while ((line = bufferedReader.readLine()) != null) { stringBuilder.append(line).append(lineSeparator); } } catch (Exception e) { logger.debug(String.format("Failed to load file from url: %s: %s", url, e.getMessage())); return null; } finally { if (bufferedReader != null) try { bufferedReader.close(); } catch (IOException e) { logger.debug(String.format("Unable to close buffered reader.. %s", e.getMessage())); } } return stringBuilder.toString(); } @Override public void setApplicationContext(ApplicationContext context) throws BeansException { if (elasticsearchConverter instanceof ApplicationContextAware) { ((ApplicationContextAware) elasticsearchConverter).setApplicationContext(context); } } @Override public ElasticsearchConverter getElasticsearchConverter() { return elasticsearchConverter; } @Override public Client getClient() { throw new UnsupportedOperationException(); } @Override public <T> boolean createIndex(Class<T> clazz) { return createIndexIfNotCreated(clazz); } @Override public boolean createIndex(String indexName) { JestResult result = execute(new CreateIndex.Builder(indexName).build()); return result.isSucceeded(); } @Override public boolean createIndex(String indexName, Object settings) { CreateIndex.Builder createIndexBuilder = new CreateIndex.Builder(indexName); if (settings instanceof String) { createIndexBuilder.settings(String.valueOf(settings)); } else if (settings instanceof Map) { createIndexBuilder.settings(settings); } else if (settings instanceof XContentBuilder) { createIndexBuilder.settings(settings); } return executeWithAcknowledge(createIndexBuilder.build()); } @Override public <T> boolean createIndex(Class<T> clazz, Object settings) { return createIndex(getPersistentEntityFor(clazz).getIndexName(), settings); } @Override public <T> boolean putMapping(Class<T> clazz) { if (clazz.isAnnotationPresent(Mapping.class)) { String mappingPath = clazz.getAnnotation(Mapping.class).mappingPath(); if (isNotBlank(mappingPath)) { String mappings = readFileFromClasspath(mappingPath); if (isNotBlank(mappings)) { return putMapping(clazz, mappings); } } else { logger.info("mappingPath in @Mapping has to be defined. Building mappings using @Field"); } } ElasticsearchPersistentEntity<T> persistentEntity = getPersistentEntityFor(clazz); String mapping; try { mapping = buildMapping(clazz, persistentEntity.getIndexType(), persistentEntity .getIdProperty().getFieldName(), persistentEntity.getParentType()).string(); } catch (Exception e) { throw new ElasticsearchException("Failed to build mapping for " + clazz.getSimpleName(), e); } return putMapping(clazz, mapping); } @Override public boolean putMapping(String indexName, String type, Object mapping) { Assert.notNull(indexName, "No index defined for putMapping()"); Assert.notNull(type, "No type defined for putMapping()"); try { Object source = null; if (mapping instanceof String) { source = String.valueOf(mapping); } else if (mapping instanceof Map) { XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); builder.map((Map)mapping); source = builder.string(); } else if (mapping instanceof XContentBuilder) { source = ((XContentBuilder) mapping).string(); } else if (mapping instanceof DocumentMapper) { source = ((DocumentMapper) mapping).mappingSource().toString(); } PutMapping.Builder requestBuilder = new PutMapping.Builder(indexName, type, source); return executeWithAcknowledge(requestBuilder.build()); } catch (Exception e) { throw new ElasticsearchException("Failed to build mapping for " + indexName + ":" + type, e); } } @Override public <T> boolean putMapping(Class<T> clazz, Object mapping) { return putMapping(getPersistentEntityFor(clazz).getIndexName(), getPersistentEntityFor(clazz).getIndexType(), mapping); } @Override public <T> Map getMapping(Class<T> clazz) { return getMapping(getPersistentEntityFor(clazz).getIndexName(), getPersistentEntityFor(clazz).getIndexType()); } @Override public Map getMapping(String indexName, String type) { Assert.notNull(indexName, "No index defined for putMapping()"); Assert.notNull(type, "No type defined for putMapping()"); Map mappings = null; try { GetMapping.Builder getMappingBuilder = new GetMapping.Builder(); getMappingBuilder.addIndex(indexName).addType(type); JestResult result = execute(getMappingBuilder.build()); if (!result.getJsonObject().has(indexName)) { logger.info("Index {} did not exist when retrieving mappings for type {}.", indexName, type); } else { JsonObject index = result.getJsonObject().get(indexName).getAsJsonObject(); if (index != null) { JsonObject mappingElem = index.get("mappings").getAsJsonObject(); if (!mappingElem.has(type)) { logger.info("Type {} did not exist in index {} when retrieving mappings.", type, indexName); } else { mappings = resultsMapper.getEntityMapper().mapToObject(mappingElem.get(type).toString(), Map.class); } } } } catch (Exception e) { throw new ElasticsearchException("Error while getting mapping for indexName : " + indexName + " type : " + type + " " + e.getMessage()); } return mappings; } @Override public Map getSetting(String indexName) { Assert.notNull(indexName, "No index defined for getSettings"); GetSettings.Builder getSettingsBuilder = new GetSettings.Builder(); getSettingsBuilder.addIndex(indexName); JestResult result = execute(getSettingsBuilder.build()); JsonObject entries = result.getJsonObject() .get(indexName).getAsJsonObject() .get("settings").getAsJsonObject() .get("index").getAsJsonObject(); HashMap<String, String> mappings = new HashMap<>(); flatMap("index", entries, mappings); return mappings; } private void flatMap(String prefix, JsonObject jsonObject, Map<String, String> mappings) { Set<Map.Entry<String, JsonElement>> entries = jsonObject.entrySet(); for (Map.Entry<String, JsonElement> entry : entries) { String key = entry.getKey(); JsonElement value = entry.getValue(); if(value.isJsonPrimitive()) { mappings.put(prefix + "." + key, value.getAsString()); } else if(value.isJsonObject()) { flatMap(prefix + "." + key, value.getAsJsonObject(), mappings); } } } @Override public <T> Map getSetting(Class<T> clazz) { return getSetting(getPersistentEntityFor(clazz).getIndexName()); } @Override public <T> T queryForObject(GetQuery query, Class<T> clazz) { return queryForObject(query, clazz, resultsMapper); } @Override public <T> T queryForObject(GetQuery query, Class<T> clazz, GetResultMapper mapper) { throw new UnsupportedOperationException(); } public <T> T queryForObject(GetQuery query, Class<T> clazz, JestGetResultMapper mapper) { return queryForObject(null, query, clazz, mapper); } public <T> T queryForObject(String indexName, GetQuery query, Class<T> clazz) { return queryForObject(indexName, query, clazz, resultsMapper); } public <T> T queryForObject(String indexName, GetQuery query, Class<T> clazz, JestGetResultMapper mapper) { ElasticsearchPersistentEntity<T> persistentEntity = getPersistentEntityFor(clazz); String index = indexName == null ? persistentEntity.getIndexName() : indexName; Get.Builder build = new Get.Builder(index, query.getId()).type(persistentEntity.getIndexType()); DocumentResult result = execute(build.build()); return mapper.mapResult(result, clazz); } @Override public <T> T queryForObject(CriteriaQuery query, Class<T> clazz) { Page<T> page = queryForPage(query, clazz); Assert.isTrue(page.getTotalElements() < 2, "Expected 1 but found " + page.getTotalElements() + " results"); return page.getTotalElements() > 0 ? page.getContent().get(0) : null; } @Override public <T> T queryForObject(StringQuery query, Class<T> clazz) { Page<T> page = queryForPage(query, clazz); Assert.isTrue(page.getTotalElements() < 2, "Expected 1 but found " + page.getTotalElements() + " results"); return page.getTotalElements() > 0 ? page.getContent().get(0) : null; } @Override public <T> Page<T> queryForPage(SearchQuery query, Class<T> clazz) { return queryForPage(query, clazz, resultsMapper); } @Override public <T> Page<T> queryForPage(SearchQuery query, Class<T> clazz, SearchResultMapper mapper) { throw new UnsupportedOperationException(); } public <T> Page<T> queryForPage(SearchQuery query, Class<T> clazz, JestSearchResultMapper mapper) { SearchResult response = doSearch(prepareSearch(query, clazz), query); return mapper.mapResults(response, clazz, query.getPageable()); } @Override public <T> T query(SearchQuery query, ResultsExtractor<T> resultsExtractor) { throw new UnsupportedOperationException(); } public <T> T query(SearchQuery query, JestResultsExtractor<T> resultsExtractor) { SearchResult response = doSearch(prepareSearch(query), query); return resultsExtractor.extract(response); } @Override public <T> List<T> queryForList(CriteriaQuery query, Class<T> clazz) { return queryForPage(query, clazz).getContent(); } @Override public <T> List<T> queryForList(StringQuery query, Class<T> clazz) { return queryForPage(query, clazz).getContent(); } @Override public <T> List<T> queryForList(SearchQuery query, Class<T> clazz) { return queryForPage(query, clazz).getContent(); } @Override public <T> List<String> queryForIds(SearchQuery query) { SearchSourceBuilder search = prepareSearch(query).query(query.getQuery()).noFields(); if (query.getFilter() != null) { search.postFilter(query.getFilter()); } SearchResult result = executeSearch(query, search); return extractIds(result); } @Override public <T> Page<T> queryForPage(CriteriaQuery criteriaQuery, Class<T> clazz) { QueryBuilder elasticsearchQuery = new CriteriaQueryProcessor().createQueryFromCriteria(criteriaQuery.getCriteria()); QueryBuilder elasticsearchFilter = new CriteriaFilterProcessor().createFilterFromCriteria(criteriaQuery.getCriteria()); SearchSourceBuilder searchRequestBuilder = prepareSearch(criteriaQuery, clazz); if (elasticsearchQuery != null) { searchRequestBuilder.query(elasticsearchQuery); } else { searchRequestBuilder.query(QueryBuilders.matchAllQuery()); } if (criteriaQuery.getMinScore() > 0) { searchRequestBuilder.minScore(criteriaQuery.getMinScore()); } if (elasticsearchFilter != null) searchRequestBuilder.postFilter(elasticsearchFilter); SearchResult response = executeSearch(criteriaQuery, searchRequestBuilder); return resultsMapper.mapResults(response, clazz, criteriaQuery.getPageable()); } @Override public <T> Page<T> queryForPage(StringQuery query, Class<T> clazz) { return queryForPage(query, clazz, resultsMapper); } @Override public <T> Page<T> queryForPage(StringQuery query, Class<T> clazz, SearchResultMapper mapper) { throw new UnsupportedOperationException(); } public <T> Page<T> queryForPage(StringQuery query, Class<T> clazz, JestSearchResultMapper mapper) { SearchResult response = executeSearch(null, prepareSearch(query, clazz).query(query.getSource())); return mapper.mapResults(response, clazz, query.getPageable()); } @Override public <T> CloseableIterator<T> stream(CriteriaQuery query, Class<T> clazz) { final long scrollTimeInMillis = TimeValue.timeValueMinutes(1).millis(); setPersistentEntityIndexAndType(query, clazz); final String initScrollId = scan(query, scrollTimeInMillis, false); return doStream(initScrollId, scrollTimeInMillis, clazz, resultsMapper); } @Override public <T> CloseableIterator<T> stream(SearchQuery query, Class<T> clazz) { return stream(query, clazz, resultsMapper); } @Override public <T> CloseableIterator<T> stream(SearchQuery query, Class<T> clazz, SearchResultMapper mapper) { throw new UnsupportedOperationException(); } public <T> CloseableIterator<T> stream(SearchQuery query, Class<T> clazz, JestResultsMapper mapper) { final long scrollTimeInMillis = TimeValue.timeValueMinutes(1).millis(); setPersistentEntityIndexAndType(query, clazz); final String initScrollId = scan(query, scrollTimeInMillis, false); return doStream(initScrollId, scrollTimeInMillis, clazz, mapper); } private <T> CloseableIterator<T> doStream(final String initScrollId, final long scrollTime, final Class<T> clazz, final JestResultsMapper mapper) { return new CloseableIterator<T>() { /** As we couldn't retrieve single result with scroll, store current hits. */ private volatile Iterator<T> currentHits; /** The scroll id. */ private volatile String scrollId = initScrollId; /** If stream is finished (ie: cluster returns no results. */ private volatile boolean finished; @Override public void close() { try { // Clear scroll on cluster only in case of error (cause elasticsearch auto clear scroll when it's done) if (!finished && scrollId != null && currentHits != null && currentHits.hasNext()) { clearScroll(scrollId); } } finally { currentHits = null; scrollId = null; } } @Override public boolean hasNext() { // Test if stream is finished if (finished) { return false; } // Test if it remains hits if (currentHits == null || !currentHits.hasNext()) { // Do a new request Action searchScroll = new SearchScroll.Builder(scrollId, TimeValue.timeValueMillis(scrollTime).toString()).build(); SearchScrollResult response = new SearchScrollResult(execute(searchScroll)); // Save hits and scroll id currentHits = mapper.mapResults(response, clazz).iterator(); finished = !currentHits.hasNext(); scrollId = response.getScrollId(); } return currentHits.hasNext(); } @Override public T next() { if (hasNext()) { return currentHits.next(); } throw new NoSuchElementException(); } }; } @Override public <T> long count(CriteriaQuery criteriaQuery, Class<T> clazz) { QueryBuilder elasticsearchQuery = new CriteriaQueryProcessor().createQueryFromCriteria(criteriaQuery.getCriteria()); QueryBuilder elasticsearchFilter = new CriteriaFilterProcessor().createFilterFromCriteria(criteriaQuery.getCriteria()); if (elasticsearchFilter == null) { return doCount(prepareCount(criteriaQuery, clazz), elasticsearchQuery); } else { // filter could not be set into CountRequestBuilder, convert request into search request return doCount(prepareSearch(criteriaQuery, clazz), elasticsearchQuery, elasticsearchFilter); } } @Override public <T> long count(SearchQuery searchQuery, Class<T> clazz) { QueryBuilder elasticsearchQuery = searchQuery.getQuery(); QueryBuilder elasticsearchFilter = searchQuery.getFilter(); if (elasticsearchFilter == null) { return doCount(prepareCount(searchQuery, clazz), elasticsearchQuery); } else { // filter could not be set into CountRequestBuilder, convert request into search request return doCount(prepareSearch(searchQuery, clazz), elasticsearchQuery, elasticsearchFilter); } } @Override public <T> long count(CriteriaQuery query) { return count(query, null); } @Override public <T> long count(SearchQuery query) { return count(query, null); } private long doCount(Count.Builder countRequestBuilder, QueryBuilder elasticsearchQuery) { if (elasticsearchQuery != null) { countRequestBuilder.query(new SearchSourceBuilder().query(elasticsearchQuery).toString()); } CountResult result = execute(countRequestBuilder.build()); return result.getCount().longValue(); } private long doCount(SearchSourceBuilder searchRequestBuilder, QueryBuilder elasticsearchQuery, QueryBuilder elasticsearchFilter) { if (elasticsearchQuery != null) { searchRequestBuilder.query(elasticsearchQuery); } else { searchRequestBuilder.query(QueryBuilders.matchAllQuery()); } if (elasticsearchFilter != null) { searchRequestBuilder.postFilter(elasticsearchFilter); } CountResult result = execute(new Count.Builder().query(searchRequestBuilder.toString()).build()); return result.getCount().longValue(); } private <T> Count.Builder prepareCount(Query query, Class<T> clazz) { String indexName[] = !isEmpty(query.getIndices()) ? query.getIndices().toArray(new String[query.getIndices().size()]) : retrieveIndexNameFromPersistentEntity(clazz); String types[] = !isEmpty(query.getTypes()) ? query.getTypes().toArray(new String[query.getTypes().size()]) : retrieveTypeFromPersistentEntity(clazz); Assert.notNull(indexName, "No index defined for Query"); Count.Builder countRequestBuilder = new Count.Builder().addIndex(Arrays.asList(indexName)); if (types != null) { countRequestBuilder.addType(Arrays.asList(types)); } return countRequestBuilder; } @Override public <T> LinkedList<T> multiGet(SearchQuery searchQuery, Class<T> clazz, MultiGetResultMapper getResultMapper) { throw new UnsupportedOperationException(); } public <T> LinkedList<T> multiGet(SearchQuery searchQuery, Class<T> clazz, JestMultiGetResultMapper getResultMapper) { return getResultMapper.mapResults(getMultiResponse(searchQuery, clazz), clazz); } @Override public <T> LinkedList<T> multiGet(SearchQuery searchQuery, Class<T> clazz) { return resultsMapper.mapResults(getMultiResponse(searchQuery, clazz), clazz); } private <T> MultiDocumentResult getMultiResponse(Query searchQuery, Class<T> clazz) { String indexName = !isEmpty(searchQuery.getIndices()) ? searchQuery.getIndices().get(0) : getPersistentEntityFor(clazz).getIndexName(); String type = !isEmpty(searchQuery.getTypes()) ? searchQuery.getTypes().get(0) : getPersistentEntityFor(clazz).getIndexType(); Assert.notNull(indexName, "No index defined for Query"); Assert.notNull(type, "No type define for Query"); Assert.notEmpty(searchQuery.getIds(), "No Id define for Query"); MultiGet.Builder.ById builder = new MultiGet.Builder.ById(indexName, type).addId(searchQuery.getIds()); return new MultiDocumentResult(execute(builder.build())); } @Override public String index(IndexQuery query) { String documentId = execute(prepareIndex(query)).getId(); // We should call this because we are not going through a mapper. if (query.getObject() != null && isDocument(query.getObject().getClass())) { setPersistentEntityId(query.getObject(), documentId); } return documentId; } @Override public UpdateResponse update(UpdateQuery updateQuery) { DocumentResult result = execute(prepareUpdate(updateQuery)); return new UpdateResponse(result.getIndex(), result.getType(), result.getId(), result.getJsonObject().get("_version").getAsLong(), false); } @Override public void bulkIndex(List<IndexQuery> queries) { Bulk.Builder bulk = new Bulk.Builder(); for (IndexQuery query : queries) { bulk.addAction(prepareIndex(query)); } BulkResult bulkResult = new BulkResult(execute(bulk.build())); if (!bulkResult.isSucceeded()) { Map<String, String> failedDocuments = new HashMap<>(); for (BulkResult.BulkResultItem item : bulkResult.getFailedItems()) { failedDocuments.put(item.id, item.error); } throw new ElasticsearchException( "Bulk indexing has failures. Use ElasticsearchException.getFailedDocuments() for detailed messages [" + failedDocuments + "]", failedDocuments ); } } @Override public void bulkUpdate(List<UpdateQuery> queries) { Bulk.Builder bulk = new Bulk.Builder(); for (UpdateQuery query : queries) { bulk.addAction(prepareUpdate(query)); } BulkResult bulkResult = new BulkResult(execute(bulk.build())); if (!bulkResult.isSucceeded()) { Map<String, String> failedDocuments = new HashMap<>(); for (BulkResult.BulkResultItem item : bulkResult.getFailedItems()) { failedDocuments.put(item.id, item.error); } throw new ElasticsearchException( "Bulk indexing has failures. Use ElasticsearchException.getFailedDocuments() for detailed messages [" + failedDocuments + "]", failedDocuments ); } } @Override public String delete(String indexName, String type, String id) { return execute(new Delete.Builder(id).index(indexName).type(type).build()).getId(); } @Override public <T> void delete(CriteriaQuery criteriaQuery, Class<T> clazz) { QueryBuilder elasticsearchQuery = new CriteriaQueryProcessor().createQueryFromCriteria(criteriaQuery.getCriteria()); Assert.notNull(elasticsearchQuery, "Query can not be null."); DeleteQuery deleteQuery = new DeleteQuery(); deleteQuery.setQuery(elasticsearchQuery); delete(deleteQuery, clazz); } @Override public <T> String delete(Class<T> clazz, String id) { ElasticsearchPersistentEntity persistentEntity = getPersistentEntityFor(clazz); return delete(persistentEntity.getIndexName(), persistentEntity.getIndexType(), id); } @Override public <T> void delete(DeleteQuery deleteQuery, Class<T> clazz) { String indexName = isNotBlank(deleteQuery.getIndex()) ? deleteQuery.getIndex() : getPersistentEntityFor(clazz).getIndexName(); String typeName = isNotBlank(deleteQuery.getType()) ? deleteQuery.getType() : getPersistentEntityFor(clazz).getIndexType(); Integer pageSize = deleteQuery.getPageSize() != null ? deleteQuery.getPageSize() : 1000; Long scrollTimeInMillis = deleteQuery.getScrollTimeInMillis() != null ? deleteQuery.getScrollTimeInMillis() : 10000L; SearchQuery searchQuery = new NativeSearchQueryBuilder().withQuery(deleteQuery.getQuery()) .withIndices(indexName) .withTypes(typeName) .withPageable(new PageRequest(0, pageSize)) .build(); String scrollId = scan(searchQuery, scrollTimeInMillis, true); List<String> ids = new ArrayList<>(); boolean hasRecords = true; while (hasRecords) { Page<String> page = scroll(scrollId, scrollTimeInMillis, new JestScrollResultMapper() { @Override public <T> Page<T> mapResults(SearchScrollResult response, Class<T> clazz) { List<String> result = new ArrayList<>(); for (SearchScrollResult.Hit<JsonObject, Void> searchHit : response.getHits(JsonObject.class)) { result.add(searchHit.source.get(JestResult.ES_METADATA_ID).getAsString()); } if (result.size() > 0) { return new PageImpl<>((List<T>) result); } return null; } }); if (page != null && page.getContent().size() > 0) { ids.addAll(page.getContent()); } else { hasRecords = false; } } if(!ids.isEmpty()) { Bulk.Builder bulk = new Bulk.Builder(); for (String id : ids) { bulk.addAction(new Delete.Builder(id).index(indexName).type(typeName).build()); } execute(bulk.build()); } clearScroll(scrollId); } @Override public void delete(DeleteQuery deleteQuery) { Assert.notNull(deleteQuery.getIndex(), "No index defined for Query"); Assert.notNull(deleteQuery.getType(), "No type define for Query"); delete(deleteQuery, null); } @Override public <T> boolean deleteIndex(Class<T> clazz) { return deleteIndex(getPersistentEntityFor(clazz).getIndexName()); } @Override public boolean deleteIndex(String indexName) { Assert.notNull(indexName, "No index defined for delete operation"); if (indexExists(indexName)) { return executeWithAcknowledge(new DeleteIndex.Builder(indexName).build()); } return false; } @Override public <T> boolean indexExists(Class<T> clazz) { return indexExists(getPersistentEntityFor(clazz).getIndexName()); } @Override public boolean indexExists(String indexName) { return executeWithAcknowledge(new IndicesExists.Builder(indexName).build()); } @Override public boolean typeExists(String index, String type) { return executeWithAcknowledge(new TypeExist.Builder(index).addType(type).build()); } @Override public void refresh(String indexName) { execute(new Refresh.Builder().addIndex(indexName).refresh(true).build()); } @Override public <T> void refresh(Class<T> clazz) { ElasticsearchPersistentEntity persistentEntity = getPersistentEntityFor(clazz); execute(new Refresh.Builder().addIndex(persistentEntity.getIndexName()).refresh(true).build()); } @Override public String scan(CriteriaQuery criteriaQuery, long scrollTimeInMillis, boolean noFields) { Assert.notNull(criteriaQuery.getIndices(), "No index defined for Query"); Assert.notNull(criteriaQuery.getTypes(), "No type define for Query"); Assert.notNull(criteriaQuery.getPageable(), "Query.pageable is required for scan & scroll"); QueryBuilder elasticsearchQuery = new CriteriaQueryProcessor().createQueryFromCriteria(criteriaQuery.getCriteria()); QueryBuilder elasticsearchFilter = new CriteriaFilterProcessor().createFilterFromCriteria(criteriaQuery.getCriteria()); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(elasticsearchQuery != null ? elasticsearchQuery : QueryBuilders.matchAllQuery()); if (elasticsearchFilter != null) { searchSourceBuilder.postFilter(elasticsearchFilter); } if (!isEmpty(criteriaQuery.getFields())) { searchSourceBuilder.fields(criteriaQuery.getFields()); } if (noFields) { searchSourceBuilder.noFields(); } Search.Builder search = new Search.Builder(searchSourceBuilder.toString()). addType(criteriaQuery.getTypes()). addIndex(criteriaQuery.getIndices()). setSearchType(SearchType.SCAN). setParameter(Parameters.SIZE, criteriaQuery.getPageable().getPageSize()). setParameter(Parameters.SCROLL, scrollTimeInMillis + "ms"); return new ExtendedSearchResult(execute(search.build())).getScrollId(); } @Override public <T> String scan(CriteriaQuery query, long scrollTimeInMillis, boolean noFields, Class<T> clazz) { setPersistentEntityIndexAndType(query, clazz); return scan(query, scrollTimeInMillis, noFields); } @Override public String scan(SearchQuery searchQuery, long scrollTimeInMillis, boolean noFields) { Assert.notNull(searchQuery.getIndices(), "No index defined for Query"); Assert.notNull(searchQuery.getTypes(), "No type define for Query"); Assert.notNull(searchQuery.getPageable(), "Query.pageable is required for scan & scroll"); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().query(searchQuery.getQuery()); if (!isEmpty(searchQuery.getFields())) { searchSourceBuilder.fields(searchQuery.getFields()); } if (noFields) { searchSourceBuilder.noFields(); } if (searchQuery.getFilter() != null) { searchSourceBuilder.postFilter(searchQuery.getFilter()); } Search.Builder search = new Search.Builder(searchSourceBuilder.toString()). addType(searchQuery.getTypes()). addIndex(searchQuery.getIndices()). setSearchType(SearchType.SCAN). setParameter(Parameters.SIZE, searchQuery.getPageable().getPageSize()). setParameter(Parameters.SCROLL, scrollTimeInMillis + "ms"); return new ExtendedSearchResult(execute(search.build())).getScrollId(); } @Override public <T> String scan(SearchQuery query, long scrollTimeInMillis, boolean noFields, Class<T> clazz) { setPersistentEntityIndexAndType(query, clazz); return scan(query, scrollTimeInMillis, noFields); } @Override public <T> Page<T> scroll(String scrollId, long scrollTimeInMillis, Class<T> clazz) { SearchScroll scroll = new SearchScroll.Builder(scrollId, scrollTimeInMillis + "ms").build(); SearchScrollResult response = new SearchScrollResult(execute(scroll)); return resultsMapper.mapResults(response, clazz); } @Override public <T> Page<T> scroll(String scrollId, long scrollTimeInMillis, SearchResultMapper mapper) { throw new UnsupportedOperationException(); } @Override public <T> void clearScroll(String scrollId) { execute(new ClearScroll.Builder().addScrollId(scrollId).build()); } public <T> Page<T> scroll(String scrollId, long scrollTimeInMillis, JestScrollResultMapper mapper) { SearchScroll scroll = new SearchScroll.Builder(scrollId, scrollTimeInMillis + "ms").build(); SearchScrollResult response = new SearchScrollResult(execute(scroll)); return mapper.mapResults(response, null); } @Override public <T> Page<T> moreLikeThis(MoreLikeThisQuery query, Class<T> clazz) { ElasticsearchPersistentEntity persistentEntity = getPersistentEntityFor(clazz); String indexName = isNotBlank(query.getIndexName()) ? query.getIndexName() : persistentEntity.getIndexName(); String type = isNotBlank(query.getType()) ? query.getType() : persistentEntity.getIndexType(); Assert.notNull(indexName, "No 'indexName' defined for MoreLikeThisQuery"); Assert.notNull(type, "No 'type' defined for MoreLikeThisQuery"); Assert.notNull(query.getId(), "No document id defined for MoreLikeThisQuery"); MoreLikeThisQueryBuilder moreLikeThisQueryBuilder = moreLikeThisQuery() .addLikeItem(new MoreLikeThisQueryBuilder.Item(indexName, type, query.getId())); if (query.getMinTermFreq() != null) { moreLikeThisQueryBuilder.minTermFreq(query.getMinTermFreq()); } if (query.getMaxQueryTerms() != null) { moreLikeThisQueryBuilder.maxQueryTerms(query.getMaxQueryTerms()); } if (!isEmpty(query.getStopWords())) { moreLikeThisQueryBuilder.stopWords(toArray(query.getStopWords())); } if (query.getMinDocFreq() != null) { moreLikeThisQueryBuilder.minDocFreq(query.getMinDocFreq()); } if (query.getMaxDocFreq() != null) { moreLikeThisQueryBuilder.maxDocFreq(query.getMaxDocFreq()); } if (query.getMinWordLen() != null) { moreLikeThisQueryBuilder.minWordLength(query.getMinWordLen()); } if (query.getMaxWordLen() != null) { moreLikeThisQueryBuilder.maxWordLength(query.getMaxWordLen()); } if (query.getBoostTerms() != null) { moreLikeThisQueryBuilder.boostTerms(query.getBoostTerms()); } return queryForPage(new NativeSearchQueryBuilder().withQuery(moreLikeThisQueryBuilder).build(), clazz); } @Override public Boolean addAlias(AliasQuery query) { Assert.notNull(query.getIndexName(), "No index defined for Alias"); Assert.notNull(query.getAliasName(), "No alias defined"); AddAliasMapping.Builder aliasAction = new AddAliasMapping.Builder(query.getIndexName(), query.getAliasName()); if (query.getFilterBuilder() != null) { //TODO(setFilter on alias) // aliasAction.setFilter(query.getFilterBuilder()); } else if (query.getFilter() != null) { aliasAction.setFilter(query.getFilter()); } else if (isNotBlank(query.getRouting())) { aliasAction.addRouting(query.getRouting()); } else if (isNotBlank(query.getSearchRouting())) { aliasAction.addSearchRouting(query.getSearchRouting()); } else if (isNotBlank(query.getIndexRouting())) { aliasAction.addIndexRouting(query.getIndexRouting()); } return executeWithAcknowledge(new ModifyAliases.Builder(aliasAction.build()).build()); } @Override public Boolean removeAlias(AliasQuery query) { Assert.notNull(query.getIndexName(), "No index defined for Alias"); Assert.notNull(query.getAliasName(), "No alias defined"); RemoveAliasMapping removeAlias = new RemoveAliasMapping.Builder(query.getIndexName(), query.getAliasName()).build(); return executeWithAcknowledge(new ModifyAliases.Builder(removeAlias).build()); } @Override public List<AliasMetaData> queryForAlias(String indexName) { GetAliases getAliases = new GetAliases.Builder().addIndex(indexName).build(); JestResult result = execute(getAliases); if (!result.isSucceeded()) { return Collections.emptyList(); } Set<Map.Entry<String, JsonElement>> entries = result.getJsonObject().getAsJsonObject(indexName).getAsJsonObject("aliases").entrySet(); List<AliasMetaData> aliases = new ArrayList<>(entries.size()); for (Map.Entry<String, JsonElement> entry : entries) { aliases.add(AliasMetaData.newAliasMetaDataBuilder(entry.getKey()).build()); } return aliases; } public ElasticsearchPersistentEntity getPersistentEntityFor(Class clazz) { Assert.isTrue(clazz.isAnnotationPresent(Document.class), "Unable to identify index name. " + clazz.getSimpleName() + " is not a Document. Make sure the document class is annotated with @Document(indexName=\"foo\")"); return elasticsearchConverter.getMappingContext().getPersistentEntity(clazz); } private <T extends JestResult> T execute(Action<T> action) { try { T result = client.execute(action); if (!result.isSucceeded()) { String errorMessage = String.format("Cannot execute jest action , response code : %s , error : %s , message : %s", result.getResponseCode(), result.getErrorMessage(), getMessage(result)); if(isSuccessfulResponse(result.getResponseCode())) { logger.debug(errorMessage); } else { logger.error(errorMessage); throw new ElasticsearchException(errorMessage); } } return result; } catch (IOException e) { throw new ElasticsearchException("failed to execute action", e); } } private boolean executeWithAcknowledge(Action<?> action) { return execute(action).isSucceeded(); } private <T> SearchSourceBuilder prepareSearch(Query query, Class<T> clazz) { setPersistentEntityIndexAndType(query, clazz); return prepareSearch(query); } private SearchSourceBuilder prepareSearch(Query query) { Assert.notNull(query.getIndices(), "No index defined for Query"); Assert.notNull(query.getTypes(), "No type defined for Query"); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); int startRecord = 0; if (query.getPageable() != null) { startRecord = query.getPageable().getPageNumber() * query.getPageable().getPageSize(); searchSourceBuilder.size(query.getPageable().getPageSize()); } searchSourceBuilder.from(startRecord); if (!query.getFields().isEmpty()) { searchSourceBuilder.fields(query.getFields()); } if (query.getSort() != null) { for (Sort.Order order : query.getSort()) { searchSourceBuilder.sort(order.getProperty(), order.getDirection() == Sort.Direction.DESC ? SortOrder.DESC : SortOrder.ASC); } } if (query.getMinScore() > 0) { searchSourceBuilder.minScore(query.getMinScore()); } return searchSourceBuilder; } private SearchResult doSearch(SearchSourceBuilder searchSourceBuilder, SearchQuery searchQuery) { if (searchQuery.getFilter() != null) { searchSourceBuilder.postFilter(searchQuery.getFilter()); } if (!isEmpty(searchQuery.getElasticsearchSorts())) { for (SortBuilder sort : searchQuery.getElasticsearchSorts()) { searchSourceBuilder.sort(sort); } } if (searchQuery.getHighlightFields() != null) { HighlightBuilder highlighter = searchSourceBuilder.highlighter(); for (HighlightBuilder.Field highlightField : searchQuery.getHighlightFields()) { highlighter.field(highlightField); } } if (!isEmpty(searchQuery.getAggregations())) { for (AbstractAggregationBuilder aggregationBuilder : searchQuery.getAggregations()) { searchSourceBuilder.aggregation(aggregationBuilder); } } if (!isEmpty(searchQuery.getIndicesBoost())) { for (IndexBoost indexBoost : searchQuery.getIndicesBoost()) { searchSourceBuilder.indexBoost(indexBoost.getIndexName(), indexBoost.getBoost()); } } if (!searchQuery.getScriptFields().isEmpty()) { searchSourceBuilder.field("_source"); for (ScriptField scriptedField : searchQuery.getScriptFields()) { searchSourceBuilder.scriptField(scriptedField.fieldName(), scriptedField.script()); } } return executeSearch(searchQuery, searchSourceBuilder.query(searchQuery.getQuery())); } private SearchResult executeSearch(Query query, SearchSourceBuilder request) { Search.Builder search = new Search.Builder(request.toString()); if (query != null) { search. addType(query.getTypes()). addIndex(query.getIndices()). setSearchType(SearchType.valueOf(query.getSearchType().name())); } return new ExtendedSearchResult(execute(search.build())); } private Index prepareIndex(IndexQuery query) { try { String indexName = isBlank(query.getIndexName()) ? retrieveIndexNameFromPersistentEntity(query.getObject() .getClass())[0] : query.getIndexName(); String type = isBlank(query.getType()) ? retrieveTypeFromPersistentEntity(query.getObject().getClass())[0] : query.getType(); Index.Builder indexBuilder; if (query.getObject() != null) { String entityId = null; if (isDocument(query.getObject().getClass())) { entityId = getPersistentEntityId(query.getObject()); } indexBuilder = new Index.Builder(resultsMapper.getEntityMapper().mapToString(query.getObject())); // If we have a query id and a document id, do not ask ES to generate one. if (query.getId() != null && entityId != null) { indexBuilder.index(indexName).type(type).id(query.getId()); } else { indexBuilder.index(indexName).type(type); } } else if (query.getSource() != null) { indexBuilder = new Index.Builder(query.getSource()).index(indexName).type(type).id(query.getId()); } else { throw new ElasticsearchException("object or source is null, failed to index the document [id: " + query.getId() + "]"); } if (query.getVersion() != null) { indexBuilder.setParameter(Parameters.VERSION, query.getVersion()); indexBuilder.setParameter(Parameters.VERSION_TYPE, EXTERNAL.name().toLowerCase()); } if (query.getParentId() != null) { indexBuilder.setParameter(Parameters.PARENT, query.getParentId()); } return indexBuilder.build(); } catch (IOException e) { throw new ElasticsearchException("failed to index the document [id: " + query.getId() + "]", e); } } private Update prepareUpdate(UpdateQuery query) { String indexName = isNotBlank(query.getIndexName()) ? query.getIndexName() : getPersistentEntityFor(query.getClazz()).getIndexName(); String type = isNotBlank(query.getType()) ? query.getType() : getPersistentEntityFor(query.getClazz()).getIndexType(); Assert.notNull(indexName, "No index defined for Query"); Assert.notNull(type, "No type define for Query"); Assert.notNull(query.getId(), "No Id define for Query"); Assert.notNull(query.getUpdateRequest(), "No IndexRequest define for Query"); Map<String, Object> payLoadMap = new HashMap<>(); if (query.getUpdateRequest().script() == null) { // doc if (query.DoUpsert()) { payLoadMap.put("doc_as_upsert", Boolean.TRUE); payLoadMap.put("doc", query.getUpdateRequest().doc().sourceAsMap()); } else { payLoadMap.put("doc", query.getUpdateRequest().doc().sourceAsMap()); } } else { // or script /* .setScript(query.getUpdateRequest().script(), query.getUpdateRequest().scriptType()) .setScriptParams(query.getUpdateRequest().scriptParams()) .setScriptLang(query.getUpdateRequest().scriptLang()); */ } try { String payload = resultsMapper.getEntityMapper().mapToString(payLoadMap); Update.Builder updateBuilder = new Update.Builder(payload).index(indexName).type(type).id(query.getId()); return updateBuilder.build(); } catch (IOException e) { throw new ElasticsearchException("failed to index the document [id: " + query.getId() + "]", e); } } private <T> Map getDefaultSettings(ElasticsearchPersistentEntity<T> persistentEntity) { if (persistentEntity.isUseServerConfiguration()) return new HashMap(); return new MapBuilder<String, String>().put("index.number_of_shards", String.valueOf(persistentEntity.getShards())) .put("index.number_of_replicas", String.valueOf(persistentEntity.getReplicas())) .put("index.refresh_interval", persistentEntity.getRefreshInterval()) .put("index.store.type", persistentEntity.getIndexStoreType()).map(); } private <T> boolean createIndexIfNotCreated(Class<T> clazz) { return indexExists(getPersistentEntityFor(clazz).getIndexName()) || createIndexWithSettings(clazz); } private <T> boolean createIndexWithSettings(Class<T> clazz) { if (clazz.isAnnotationPresent(Setting.class)) { String settingPath = clazz.getAnnotation(Setting.class).settingPath(); if (isNotBlank(settingPath)) { String settings = readFileFromClasspath(settingPath); if (isNotBlank(settings)) { return createIndex(getPersistentEntityFor(clazz).getIndexName(), settings); } } else { logger.info("settingPath in @Setting has to be defined. Using default instead."); } } return createIndex(getPersistentEntityFor(clazz).getIndexName(), getDefaultSettings(getPersistentEntityFor(clazz))); } private boolean isDocument(Class clazz) { return clazz.isAnnotationPresent(Document.class); } private String getPersistentEntityId(Object entity) { PersistentProperty idProperty = getPersistentEntityFor(entity.getClass()).getIdProperty(); if (idProperty != null) { Method getter = idProperty.getGetter(); if (getter != null) { try { Object id = getter.invoke(entity); if (id != null) { return String.valueOf(id); } } catch (Throwable t) { t.printStackTrace(); } } } return null; } private <T extends JestResult> String getMessage(T result) { if (result.getJsonObject() == null) { return null; } JsonPrimitive message = result.getJsonObject().getAsJsonPrimitive("message"); if (message == null) { return null; } return message.getAsString(); } private static String[] toArray(List<String> values) { String[] valuesAsArray = new String[values.size()]; return values.toArray(valuesAsArray); } private void setPersistentEntityId(Object entity, String id) { PersistentProperty idProperty = getPersistentEntityFor(entity.getClass()).getIdProperty(); // Only deal with String because ES generated Ids are strings ! if (idProperty != null && idProperty.getType().isAssignableFrom(String.class)) { Method setter = idProperty.getSetter(); if (setter != null) { try { setter.invoke(entity, id); } catch (Throwable t) { t.printStackTrace(); } } } } private void setPersistentEntityIndexAndType(Query query, Class clazz) { if (query.getIndices().isEmpty()) { query.addIndices(retrieveIndexNameFromPersistentEntity(clazz)); } if (query.getTypes().isEmpty()) { query.addTypes(retrieveTypeFromPersistentEntity(clazz)); } } private String[] retrieveIndexNameFromPersistentEntity(Class clazz) { if (clazz != null) { return new String[]{getPersistentEntityFor(clazz).getIndexName()}; } return null; } private String[] retrieveTypeFromPersistentEntity(Class clazz) { if (clazz != null) { return new String[]{getPersistentEntityFor(clazz).getIndexType()}; } return null; } private List<String> extractIds(SearchResult result) { List<String> ids = new ArrayList<>(); for (SearchResult.Hit<JsonObject, Void> hit : result.getHits(JsonObject.class)) { if (hit != null) { ids.add(hit.source.get(JestResult.ES_METADATA_ID).toString()); } } return ids; } private static boolean isSuccessfulResponse(int statusCode) { return statusCode < 300 || statusCode == 404; } }
package org.spongepowered.api.util.rotation; import org.spongepowered.api.util.annotation.CatalogedBy; /** * Represents an angle of rotation. */ @CatalogedBy(Rotations.class) public interface Rotation { /** * The angle in degrees. * * @return The angle in degrees */ //TODO we should have an Angle class in the future int getAngle(); /** * Checks if this is a flowerpot. * * @return Whether this is a flowerpot */ boolean isFlowerPot(); }
package pete.metrics.installability.util; import java.nio.file.Path; public class GroupReader { public static String readGroupFromPath(String pathName) { if (pathName.contains("active-bpel") || pathName.toLowerCase().contains("activebpel")) { return "active"; } else if (pathName.contains("bpelg") || pathName.contains("bpel-g")) { return "bpelg"; } else if (pathName.contains("ode") || pathName.contains("ODE")) { return "ode"; } else if (pathName.contains("openesb23") || pathName.toLowerCase().contains("openesb")) { return "openesb"; } else if (pathName.contains("orchestra")) { return "orchestra"; } else if (pathName.contains("petals")) { return "petals"; } else { return ""; } } public static String readGroupFromPath(Path path) { return readGroupFromPath(path.toString()); } }
package sc.iview.commands.edit; import graphics.scenery.Cylinder; import graphics.scenery.Icosphere; import graphics.scenery.Node; import graphics.scenery.Sphere; import org.joml.Matrix4f; import org.joml.Quaternionf; import org.joml.Vector3f; import org.scijava.command.Command; import org.scijava.command.CommandService; import org.scijava.plugin.Menu; import org.scijava.plugin.Parameter; import org.scijava.plugin.Plugin; import sc.iview.SciView; import sc.iview.node.Line3D; import java.util.HashMap; import static sc.iview.commands.MenuWeights.EDIT; import static sc.iview.commands.MenuWeights.EDIT_ADD_COMPASS; /** * Command to orientation compass (R,G,B cylinders oriented along X,Y,Z axes, respectively) to the scene * * @author Vladimir Ulman * */ @Plugin(type = Command.class, menuRoot = "SciView", menu = { @Menu(label = "Edit", weight = EDIT), @Menu(label = "Add Compass", weight = EDIT_ADD_COMPASS) }) public class AddOrientationCompass implements Command { @Parameter private SciView sciView; @Parameter private float axisLength = 10.0f; @Parameter private float AXESBARRADIUS = 1.0f; @Parameter private Vector3f xColor = new Vector3f(1f,0f,0f); @Parameter private Vector3f yColor = new Vector3f(0f,1f,0f); @Parameter private Vector3f zColor = new Vector3f(0f,0f,1f); private Node makeAxis( float axisLength, float angleX, float angleY, float angleZ, Vector3f color ) { Cylinder axisNode = new Cylinder(AXESBARRADIUS, axisLength,4); axisNode.setName("compass axis: X"); axisNode.setRotation( new Quaternionf().rotateXYZ( angleX, angleY, angleZ ) ); axisNode.getMaterial().getDiffuse().set(color); Icosphere axisCap = new Icosphere(AXESBARRADIUS, 2); axisCap.setPosition(new Vector3f(0, axisLength, 0)); axisCap.getMaterial().getDiffuse().set(color); axisNode.addChild(axisCap); return axisNode; } @Override public void run() { final Node root = new Node("Scene orientation compass"); //NB: RGB colors ~ XYZ axes //x axis: Node axisNode = makeAxis( axisLength, 0,0,(float)(-0.5*Math.PI), xColor ); axisNode.setName("compass axis: X"); root.addChild( axisNode ); //y axis: axisNode = makeAxis( axisLength, 0,0, 0, yColor ); axisNode.setName("compass axis: Y"); root.addChild( axisNode ); //z axis: axisNode = makeAxis( axisLength, (float)(0.5*Math.PI),0,0, zColor ); axisNode.setName("compass axis: Z"); root.addChild( axisNode ); sciView.addNode( root ); sciView.getCamera().addChild(root); root.setPosition( new Vector3f(-58, 30, -90)); root.getUpdate().add(() -> { root.setWantsComposeModel(false); root.getModel().identity(); root.getModel().translate( root.getPosition() ); root.getModel().mul( new Quaternionf(sciView.getCamera().getRotation()).invert().conjugate().get(new Matrix4f()) ); root.setNeedsUpdate(false); root.setNeedsUpdateWorld(false); return null; }); } public static void main(String... args) throws Exception { SciView sv = SciView.create(); CommandService command = sv.getScijavaContext().getService(CommandService.class); HashMap<String, Object> argmap = new HashMap<>(); command.run(AddOrientationCompass.class, true, argmap); } }
package studentcapture.datalayer.database; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DataAccessException; import org.springframework.dao.IncorrectResultSizeDataAccessException; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.stereotype.Repository; import java.sql.Timestamp; import java.text.SimpleDateFormat; import java.util.*; @Repository public class Submission { // This template should be used to send queries to the database @Autowired protected JdbcTemplate jdbcTemplate; /** * Add a new submission for an assignment * * @param assignmentID Unique identifier for the assignment we're submitting to * @param studentID Unique identifier for the student submitting * @return True if everything went well, otherwise false */ public boolean addSubmission(String assignmentID,String studentID) { String sql = "INSERT INTO Submission (assignmentId, studentId, SubmissionDate) VALUES (?,?,?)"; java.util.Date date = new java.util.Date(System.currentTimeMillis()); java.sql.Timestamp timestamp = new java.sql.Timestamp(date.getTime()); timestamp.setNanos(0); int rowsAffected = jdbcTemplate.update(sql,new Object[]{Integer.parseInt(assignmentID),Integer.parseInt(studentID),timestamp}); if(rowsAffected == 1){ return true; } return false; } /** * Add a grade for a submission * * @param assID Unique identifier for the assignment with the submission being graded * @param teacherID Unique identifier for the teacher grading * @param studentID Unique identifier for the student being graded * @param grade The grade of the submission * @return True if everything went well, otherwise false */ public boolean setGrade(String assID, String teacherID, String studentID, String grade) { String setGrade = "UPDATE Submission (Grade, TeacherID, Date) = (?, ?, ?) WHERE (AssignmentID = ?) AND (StudentID = ?)"; SimpleDateFormat dateFormat = new SimpleDateFormat("dd-MM-yyyy HH:mm"); Date date = new Date(); int updatedRows = jdbcTemplate.update(setGrade, new Object[]{grade, teacherID, dateFormat.format(date), assID, studentID}); if (updatedRows == 1) return true; else return false; } /** * Remove a submission * * @param assID Unique identifier for the assignment with the submission being removed * @param studentID Unique identifier for the student whose submission is removed * @return True if everything went well, otherwise false */ private static final String removeSubmissionStatement = "DELETE FROM " + "Submission WHERE (AssignmentId=? AND StudentId=?)"; public boolean removeSubmission(String assID, String studentID) { boolean result; int assignmentId = Integer.parseInt(assID); int studentId = Integer.parseInt(studentID); try { int rowsAffected = jdbcTemplate.update(removeSubmissionStatement, new Object[] {assignmentId, studentId}); if(rowsAffected == 1) { result = true; } else { result = false; } }catch (IncorrectResultSizeDataAccessException e){ result = false; }catch (DataAccessException e1){ result = false; } return result; } /** * Changes the grade of a submission * * @param assID Unique identifier for the assignment with the submission being graded * @param teacherID Unique identifier of the teacher updating * @param studentID Unique identifier for the student * @param grade The new grade of the submission * @param date The date the grade was updated * @return True if everything went well, otherwise false */ public boolean updateGrade(String assID, String teacherID, String studentID, String grade, Date date) { return true; } /** * Get information about the grade of a submission * * @param assignmentID Unique identifier for the assignment submission grade bra * @param studentID Unique identifier for the student associated with the submission * @return A list containing the grade, date, and grader */ public Map<String, Object> getGrade(int studentID, int assignmentID) { String query = "SELECT grade, submissiondate as time, concat(firstname,' ', lastname) as teacher" + " FROM submission JOIN users ON (teacherid = userid) WHERE (studentid = ? AND assignmentid = ?)"; Map<String, Object> response; try { response = jdbcTemplate.queryForMap(query, new Object[] {studentID, assignmentID}); //return the time as string instead of timestamp response.put("time", response.get("time").toString()); } catch (IncorrectResultSizeDataAccessException e) { response = new HashMap<>(); //TODO create better error message response.put("error", e.getMessage()); } catch (DataAccessException e) { response = new HashMap<>(); //TODO create better error message response.put("error", e.getMessage()); } return response; } /** * Get all ungraded submissions for an assignment * * @param assID The assignment to get submissions for * @return A list of ungraded submissions for the assignment */ private final static String getAllUngradedStatement = "SELECT " + "sub.AssignmentId,sub.StudentId,stu.FirstName,stu.LastName," + "sub.SubmissionDate,sub.Grade,sub.TeacherId FROM " + "Submission AS sub LEFT JOIN Users AS stu ON " + "sub.studentId=stu.userId WHERE (AssignmentId=?) AND " + "(Grade IS NULL)"; public Optional<List<SubmissionWrapper>> getAllUngraded(String assId) { List<SubmissionWrapper> submissions = new ArrayList<>(); int assignmentId = Integer.parseInt(assId); try { List<Map<String, Object>> rows = jdbcTemplate.queryForList( getAllUngradedStatement, new Object[] {assignmentId}); for (Map<String, Object> row : rows) { SubmissionWrapper submission = new SubmissionWrapper(); submission.assignmentId = (int) row.get("AssignmentId"); submission.studentId = (int) row.get("StudentId"); //submission.teacherId = (int) row.get("TeacherId"); //submission.grade = (String) row.get("Grade"); submission.submissionDate = ((Timestamp) row.get("SubmissionDate")).toString(); try { String firstName = (String) row.get("FirstName"); String lastName = (String) row.get("LastName"); submission.studentName = firstName + " " + lastName; } catch (NullPointerException e) { submission.studentName = null; } submissions.add(submission); } } catch (IncorrectResultSizeDataAccessException e) { //TODO return Optional.empty(); } catch (DataAccessException e1) { //TODO return Optional.empty(); } return Optional.of(submissions); } /** * Get all submissions for an assignment * @param assID The assignment to get submissions for * @return A list of submissions for the assignment */ private final static String getAllSubmissionsStatement = "SELECT " + "sub.AssignmentId,sub.StudentId,stu.FirstName,stu.LastName," + "sub.SubmissionDate,sub.Grade,sub.TeacherId FROM " + "Submission AS sub LEFT JOIN Users AS stu ON " + "sub.studentId=stu.userId WHERE (AssignmentId=?)"; public Optional<List<SubmissionWrapper>> getAllSubmissions(String assId) { List<SubmissionWrapper> submissions = new ArrayList<>(); int assignmentId = Integer.parseInt(assId); try { List<Map<String, Object>> rows = jdbcTemplate.queryForList( getAllSubmissionsStatement, new Object[] {assignmentId}); for (Map<String, Object> row : rows) { SubmissionWrapper submission = new SubmissionWrapper(); submission.assignmentId = (int) row.get("AssignmentId"); submission.studentId = (int) row.get("StudentId"); try { submission.teacherId = (int) row.get("TeacherId"); } catch (NullPointerException e) { submission.teacherId = null; } try { submission.grade = (String) row.get("Grade"); } catch (NullPointerException e) { submission.grade = null; } submission.submissionDate = ((Timestamp) row.get("SubmissionDate")).toString(); try { String firstName = (String) row.get("FirstName"); String lastName = (String) row.get("LastName"); submission.studentName = firstName + " " + lastName; } catch (NullPointerException e) { submission.studentName = null; } submissions.add(submission); } } catch (IncorrectResultSizeDataAccessException e){ //TODO return Optional.empty(); } catch (DataAccessException e1){ //TODO return Optional.empty(); } return Optional.of(submissions); } /** * * Get all submissions for an assignment, including students that have not * yet made a submission. * * @param assID The assignment to get submissions for * @return A list of submissions for the assignment */ private final static String getAllSubmissionsWithStudentsStatement = "SELECT ass.AssignmentId,par.UserId AS StudentId,sub.SubmissionDate" + ",sub.Grade,sub.TeacherId FROM Assignment AS ass RIGHT JOIN " + "Participant AS par ON ass.CourseId=par.CourseId LEFT JOIN " + "Submission AS sub ON par.userId=sub.studentId WHERE " + "(par.function='Student') AND (ass.AssignmentId=?)"; public Optional<List<SubmissionWrapper>> getAllSubmissionsWithStudents (String assId) { List<SubmissionWrapper> submissions = new ArrayList<>(); int assignmentId = Integer.parseInt(assId); try { List<Map<String, Object>> rows = jdbcTemplate.queryForList( getAllSubmissionsWithStudentsStatement, new Object[] {assignmentId}); for (Map<String, Object> row : rows) { SubmissionWrapper submission = new SubmissionWrapper(); submission.assignmentId = (int) row.get("AssignmentId"); submission.studentId = (int) row.get("StudentId"); try { submission.teacherId = (int) row.get("TeacherId"); } catch (NullPointerException e) { submission.teacherId = null; } try { submission.grade = (String) row.get("Grade"); } catch (NullPointerException e) { submission.grade = null; } try { submission.submissionDate = ((Timestamp) row.get("SubmissionDate")).toString(); } catch (NullPointerException e) { submission.submissionDate = null; } submissions.add(submission); } } catch (IncorrectResultSizeDataAccessException e){ //TODO return Optional.empty(); } catch (DataAccessException e1){ //TODO return Optional.empty(); } return Optional.of(submissions); } public class SubmissionWrapper { public int assignmentId; public int studentId; public String studentName; public String submissionDate; public String grade; public Integer teacherId; } }
package weave.utils; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.rmi.RemoteException; import java.security.InvalidParameterException; import java.sql.Connection; import java.sql.DatabaseMetaData; import java.sql.Driver; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Types; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.Vector; /** * SQLUtils * * @author Andy Dufilie * @author Andrew Wilkinson * @author Kyle Monico * @author Yen-Fu Luo * @author Philip Kovac * @author Patrick Stickney * @author John Fallon */ public class SQLUtils { public static String MYSQL = "MySQL"; public static String SQLITE = "SQLite"; public static String POSTGRESQL = "PostgreSQL"; public static String SQLSERVER = "Microsoft SQL Server"; public static String ORACLE = "Oracle"; public static String DEFAULT_SQLITE_DATABASE = "sqlite_master"; /** * @param dbms The name of a DBMS (MySQL, PostgreSQL, ...) * @return A driver name that can be used in the getConnection() function. */ public static String getDriver(String dbms) throws RemoteException { if (dbms.equalsIgnoreCase(MYSQL)) return "com.mysql.jdbc.Driver"; if(dbms.equalsIgnoreCase(SQLITE)) return "org.sqlite.JDBC"; if (dbms.equalsIgnoreCase(POSTGRESQL)) return "org.postgis.DriverWrapper"; if (dbms.equalsIgnoreCase(SQLSERVER)) return "net.sourceforge.jtds.jdbc.Driver"; if (dbms.equalsIgnoreCase(ORACLE)) return "oracle.jdbc.OracleDriver"; throw new RemoteException("Unknown DBMS"); } public static String getDbmsFromConnection(Connection conn) { try { String dbms = conn.getMetaData().getDatabaseProductName(); for (String match : new String[]{ ORACLE, SQLSERVER, MYSQL, SQLITE, POSTGRESQL }) if (dbms.equalsIgnoreCase(match)) return match; return dbms; } catch (SQLException e) { return ""; } } public static String getDbmsFromConnectString(String connectString) throws RemoteException { if (connectString.startsWith("jdbc:jtds")) return SQLSERVER; if (connectString.startsWith("jdbc:oracle")) return ORACLE; if (connectString.startsWith("jdbc:mysql")) return MYSQL; if (connectString.startsWith("jdbc:sqlite")) return SQLITE; if (connectString.startsWith("jdbc:postgresql")) return POSTGRESQL; throw new RemoteException("Unknown DBMS"); } /** * @param dbms The name of a DBMS (MySQL, PostgreSQL, Microsoft SQL Server) * @param ip The IP address of the DBMS. * @param port The port the DBMS is on (optional, can be "" to use default). * @param database The name of a database to connect to (can be "" for MySQL) * @param user The username to use when connecting. * @param pass The password associated with the username. * @return A connect string that can be used in the getConnection() function. */ public static String getConnectString(String dbms, String ip, String port, String database, String user, String pass) { String host; if (port == null || port.length() == 0) host = ip; // default port for specific dbms will be used else host = ip + ":" + port; String format = null; if (SQLSERVER.equalsIgnoreCase(dbms)) { dbms = "sqlserver"; // this will be put in the format string format = "jdbc:jtds:%s://%s/;instance=%s;user=%s;password=%s"; } else if (ORACLE.equalsIgnoreCase(dbms)) { format = "jdbc:%s:thin:%s/%s@%s:%s"; //"jdbc:oracle:thin:<user>/<password>@<host>:<port>:<instance>" } else if(SQLITE.equalsIgnoreCase(dbms)) { format = "jdbc:%s:%s"; // "jdbc:sqlite:C:/path/to/file/DataBase.db" } else // MySQL or PostGreSQL { format = "jdbc:%s://%s/%s?user=%s&password=%s"; } // MySQL connect string uses % as an escape character, so we must use URLEncoder. // PostgreSQL does not support % as an escape character, and does not work with the & character. if (dbms.equalsIgnoreCase(MYSQL)) { try { String utf = "UTF-8"; database = URLEncoder.encode(database, utf); user = URLEncoder.encode(user, utf); pass = URLEncoder.encode(pass, utf); } catch (UnsupportedEncodingException e) { // this should never happen throw new RuntimeException(e); } } String result = ""; if (dbms.equalsIgnoreCase(ORACLE)) result = String.format(format, dbms.toLowerCase(), user, pass, host, database); else if( dbms.equalsIgnoreCase(SQLITE)) result = String.format(format, dbms.toLowerCase(), database); else result = String.format(format, dbms.toLowerCase(), host, database, user, pass); return result; } /** * This maps a driver name to a Driver instance. * The purpose of this map is to avoid instantiating extra Driver objects unnecessarily. */ private static DriverMap _driverMap = new DriverMap(); @SuppressWarnings("serial") private static class DriverMap extends HashMap<String,Driver> { protected void finalize() { for (Driver driver : _driverMap.values()) { try { DriverManager.deregisterDriver(driver); } catch (SQLException e) { e.printStackTrace(); } } } } /** * This maps a connection string to a Connection object. Used by getStaticReadOnlyConnection(). */ private static Map<String, Connection> _staticReadOnlyConnections = new HashMap<String, Connection>(); /** * This function will test a connection by running a simple test query. * We cannot rely on Connection.isValid(timeout) because it does not work in some drivers. * Running a test query is a reliable way to find out if the connection is valid. * @param conn A SQL Connection. * @throws SQLException Thrown if the test query fails. */ public static void testConnection(Connection conn) throws SQLException { Statement stmt = null; try { stmt = conn.createStatement(); if (SQLUtils.isOracleServer(conn)) stmt.execute("SELECT 0 FROM DUAL"); else stmt.execute("SELECT 0"); } catch (RuntimeException e) // This is important for catching unexpected errors. { /* Example unexpected error when the connection is invalid: java.lang.NullPointerException at com.mysql.jdbc.PreparedStatement.fillSendPacket(PreparedStatement.java:2484) at com.mysql.jdbc.PreparedStatement.fillSendPacket(PreparedStatement.java:2460) at com.mysql.jdbc.PreparedStatement.execute(PreparedStatement.java:1298) at weave.utils.SQLUtils.testConnection(SQLUtils.java:173) [...] */ throw new SQLException("Connection is invalid", e); } finally { cleanup(stmt); } } /** * This function tests if a given Connection is valid, and closes the connection if it is not. * @param conn A Connection object which may or may not be valid. * @return A value of true if the given Connection is still connected. */ public static boolean connectionIsValid(Connection conn) { if (conn == null) return false; try { testConnection(conn); return true; } catch (SQLException e) { SQLUtils.cleanup(conn); } return false; } /** * This function returns a read-only connection that can be reused. The connection should not be closed. * @param connectString The connect string used to create the Connection. * @return A static read-only Connection. */ public static Connection getStaticReadOnlyConnection(String connectString) throws RemoteException { synchronized (_staticReadOnlyConnections) { Connection conn = null; if (_staticReadOnlyConnections.containsKey(connectString)) { conn = _staticReadOnlyConnections.get(connectString); if (connectionIsValid(conn)) return conn; // if connection is not valid, remove this entry from the Map _staticReadOnlyConnections.remove(connectString); } // get a new connection, throwing an exception if this fails conn = getConnection(connectString); // try to set readOnly.. if this fails, continue anyway. try { conn.setReadOnly(true); } catch (SQLException e) { e.printStackTrace(); } // remember this static, read-only connection. if (conn != null) _staticReadOnlyConnections.put(connectString, conn); return conn; } } /** * @param connectString The connect string to use. * @return A new SQL connection using the specified driver & connect string */ public static Connection getConnection(String connectString) throws RemoteException { String dbms = getDbmsFromConnectString(connectString); String driver = getDriver(dbms); Connection conn = null; try { // only call newInstance once per driver if (!_driverMap.containsKey(driver)) _driverMap.put(driver, (Driver)Class.forName(driver).newInstance()); conn = DriverManager.getConnection(connectString); } catch (SQLException ex) { System.err.println(String.format("driver: %s\nconnectString: %s", driver, connectString)); throw new RemoteException("Unable to connect to SQL database", ex); } catch (Exception ex) { throw new RemoteException("Failed to load driver: \"" + driver + "\"", ex); } return conn; } /** * @param colName * @return colName with special characters replaced and truncated to 30 characters. */ public static String fixColumnName(String colName, String suffix) { colName = colName .replace("<=", "LTE") .replace(">=", "GTE") .replace("<", "LT") .replace(">", "GT"); StringBuilder sb = new StringBuilder(); boolean space = false; for (int i = 0; i < colName.length(); i++) { char c = colName.charAt(i); if (Character.isJavaIdentifierPart(c)) { if (space) sb.append(' '); sb.append(c); space = false; } else { space = true; } } // append suffix before truncating sb.append(suffix); colName = sb.toString(); // if the length of the column name is longer than the 30-character limit in oracle (MySQL limit is 64 characters) int max = 30; // if name too long, remove spaces if (colName.length() > max) colName = colName.replace(" ", ""); // if still too long, truncate if (colName.length() > max) { int halfLeft = max / 2; int halfRight = max / 2 - 1 + max % 2; // subtract 1 for the "_" unless max is odd colName = colName.substring(0, halfLeft) + "_" + colName.substring(colName.length() - halfRight); } return colName; } /** * @param dbms The name of a DBMS (MySQL, PostgreSQL, ...) * @param symbol The symbol to quote. * @return The symbol surrounded in quotes, usable in queries for the specified DBMS. */ public static String quoteSymbol(String dbms, String symbol) throws IllegalArgumentException { //the quote symbol is required for names of variables that include spaces or special characters String openQuote, closeQuote; if (dbms.equalsIgnoreCase(MYSQL)) { openQuote = closeQuote = "`"; } else if (dbms.equalsIgnoreCase(POSTGRESQL) || dbms.equalsIgnoreCase(ORACLE) || dbms.equalsIgnoreCase(SQLITE)) { openQuote = closeQuote = "\""; } else if (dbms.equalsIgnoreCase(SQLSERVER)) { openQuote = "["; closeQuote = "]"; } else throw new IllegalArgumentException("Unsupported DBMS type: "+dbms); if (symbol.contains(openQuote) || symbol.contains(closeQuote)) throw new IllegalArgumentException(String.format("Unable to surround SQL symbol with quote marks (%s%s) because it already contains one: %s", openQuote, closeQuote, symbol)); return openQuote + symbol + closeQuote; } /** * @param conn An SQL connection. * @param symbol The symbol to quote. * @return The symbol surrounded in quotes, usable in queries for the specified connection. */ public static String quoteSymbol(Connection conn, String symbol) throws SQLException, IllegalArgumentException { String dbms = getDbmsFromConnection(conn); return quoteSymbol(dbms, symbol); } /** * @param dbms The name of a DBMS (MySQL, PostgreSQL, ...) * @param symbol The quoted symbol. * @return The symbol without its dbms-specific quotes. */ public static String unquoteSymbol(String dbms, String symbol) { char openQuote, closeQuote; int length = symbol.length(); if (dbms.equalsIgnoreCase(MYSQL)) { openQuote = closeQuote = '`'; } else if (dbms.equalsIgnoreCase(POSTGRESQL) || dbms.equalsIgnoreCase(ORACLE) || dbms.equalsIgnoreCase(SQLITE)) { openQuote = closeQuote = '"'; } else if (dbms.equalsIgnoreCase(SQLSERVER)) { openQuote = '['; closeQuote = ']'; } else throw new IllegalArgumentException("Unsupported DBMS type: "+dbms); String result = symbol; if (length > 2 && symbol.charAt(0) == openQuote && symbol.charAt(length - 1) == closeQuote) result = symbol.substring(1, length - 1); if (result.indexOf(openQuote) >= 0 || result.indexOf(closeQuote) >= 0) throw new IllegalArgumentException("Cannot unquote symbol: "+symbol); return result; } /** * @param conn An SQL connection. * @param symbol The quoted symbol. * @return The symbol without its dbms-specific quotes. */ public static String unquoteSymbol(Connection conn, String symbol) throws SQLException, IllegalArgumentException { char quote = conn.getMetaData().getIdentifierQuoteString().charAt(0); int length = symbol.length(); String result = symbol; if (length > 2 && symbol.charAt(0) == quote && symbol.charAt(length - 1) == quote) result = symbol.substring(1, length - 1); if (result.indexOf(quote) >= 0) throw new IllegalArgumentException("Cannot unquote symbol: "+symbol); return symbol; } /** * This will build a case sensitive compare expression out of two sql query expressions. * @param conn An SQL Connection. * @param expr1 The first SQL expression to be used in the comparison. * @param expr2 The second SQL expression to be used in the comparison. * @return A SQL expression comparing the two expressions using case-sensitive string comparison. */ public static String caseSensitiveCompare(Connection conn, String expr1, String expr2) throws SQLException { String operator; if (getDbmsFromConnection(conn).equals(MYSQL)) operator = "= BINARY"; else operator = "="; return String.format( "%s %s %s", stringCast(conn, expr1), operator, stringCast(conn, expr2) ); } /** * This will wrap a query expression in a string cast. * If the database is not supported by this function, the queryExpression will not be altered. * @param conn An SQL Connection. * @param queryExpression An expression to be used in a SQL Query. * @return The query expression wrapped in a string cast. */ public static String stringCast(Connection conn, String queryExpression) throws SQLException { String dbms = getDbmsFromConnection(conn); if (dbms.equals(MYSQL)) return String.format("cast(%s as char)", queryExpression); if (dbms.equals(POSTGRESQL) || dbms.equals(SQLITE)) return String.format("cast(%s as varchar)", queryExpression); // dbms type not supported by this function yet return queryExpression; } /** * This function returns the name of a binary data type that can be used in SQL queries. * @param dbms The name of a DBMS (MySQL, PostgreSQL, ...) * @return The name of the binary SQL type to use for the given DBMS. */ public static String binarySQLType(String dbms) { if (POSTGRESQL.equalsIgnoreCase(dbms)) return "bytea"; else if (SQLSERVER.equalsIgnoreCase(dbms)) return "image"; //if (dbms.equalsIgnoreCase(MYSQL)) return "BLOB"; } /** * Returns quoted schema & table to use in SQL queries for the given DBMS. * @param dbms The name of a DBMS (MySQL, PostgreSQL, ...) * @param schema The schema the table resides in. * @param table The table. * @return The schema & table name surrounded in quotes, usable in queries for the specified DBMS. */ public static String quoteSchemaTable(String dbms, String schema, String table) { if (schema.length() == 0) return quoteSymbol(dbms, table); if (dbms.equalsIgnoreCase(ORACLE)) schema = schema.toUpperCase(); if(dbms.equalsIgnoreCase(SQLITE)) return quoteSymbol(dbms, table); return quoteSymbol(dbms, schema) + "." + quoteSymbol(dbms, table); } /** * Returns quoted schema & table to use in SQL queries for the given Connection. * @param conn An SQL connection. * @param schema The schema the table resides in. * @param table The table. * @return The schema & table name surrounded in quotes, usable in queries for the specified connection. */ public static String quoteSchemaTable(Connection conn, String schema, String table) throws SQLException { String dbms = getDbmsFromConnection(conn); return quoteSchemaTable(dbms, schema, table); } public static boolean sqlTypeIsNumeric(int sqlType) { switch (sqlType) { case Types.TINYINT: case Types.SMALLINT: case Types.BIGINT: case Types.DECIMAL: case Types.INTEGER: case Types.FLOAT: case Types.DOUBLE: case Types.REAL: case Types.NUMERIC: /* case Types.ROWID: // produces compiler error in some environments */ return true; default: return false; } } public static boolean sqlTypeIsGeometry(int sqlType) { // using 1111 as the literal value returned by postgis as a PGGeometry type. return sqlType == 1111; } /** * Example usage: * {@code * getResultFromQuery(conn, "SELECT a, b FROM mytable WHERE c = ? and d = ?", new Object[]&#124; "my-c-value", 0xDDDD &#125;, false) * } * @param connection An SQL Connection * @param query An SQL Query with '?' place holders for parameters * @param params Parameters for the SQL query for all '?' place holders, or null if there are no parameters. * @return A SQLResult object containing the result of the query * @throws SQLException */ public static <TYPE> SQLResult getResultFromQuery(Connection connection, String query, TYPE[] params, boolean convertToStrings) throws SQLException { Statement stmt = null; ResultSet rs = null; SQLResult result = null; try { if (params == null || params.length == 0) { stmt = connection.createStatement(); rs = stmt.executeQuery(query); } else { stmt = prepareStatement(connection, query, params); rs = ((PreparedStatement)stmt).executeQuery(); } // make a copy of the query result result = new SQLResult(rs, convertToStrings); } catch (SQLException e) { //e.printStackTrace(); throw new SQLExceptionWithQuery(query, e); } finally { // close everything in reverse order SQLUtils.cleanup(rs); SQLUtils.cleanup(stmt); } // return the copy of the query result return result; } /** * @param rs The ResultSet returned from a SQL query. * @param valueType The Class used for casting values in the ResultSet. * @return A list of field-value pairs containing the record data. * @throws SQLException If the query fails. */ @SuppressWarnings("unchecked") public static <VALUE_TYPE> List<Map<String,VALUE_TYPE>> getRecordsFromResultSet(ResultSet rs, Class<VALUE_TYPE> valueType) throws SQLException { // list the column names in the result String[] columnNames = getColumnNamesFromResultSet(rs); // create a Map from each row List<Map<String,VALUE_TYPE>> records = new Vector<Map<String,VALUE_TYPE>>(); rs.setFetchSize(SQLResult.FETCH_SIZE); while (rs.next()) { Map<String,VALUE_TYPE> record = new HashMap<String,VALUE_TYPE>(columnNames.length); for (int i = 0; i < columnNames.length; i++) { String columnName = columnNames[i]; Object columnValue = (valueType == String.class) ? rs.getString(columnName) : rs.getObject(columnName); record.put(columnName, (VALUE_TYPE)columnValue); } records.add(record); } return records; } public static String[] getColumnNamesFromResultSet(ResultSet rs) throws SQLException { String[] columnNames = new String[rs.getMetaData().getColumnCount()]; for (int i = 0; i < columnNames.length; i++) columnNames[i] = rs.getMetaData().getColumnName(i + 1); return columnNames; } /** * @param conn An existing SQL Connection * @param selectColumns The list of column names, or null for all columns * @param fromSchema The schema containing the table to perform the SELECT statement on. * @param fromTable The table to perform the SELECT statement on. * @param where Used to construct the WHERE clause * @param orderBy The field to order by, or null for no specific order. * @param valueType Either String.class or Object.class to denote the VALUE_TYPE class. * @return The resulting rows returned by the query. * @throws SQLException If the query fails. */ public static <VALUE_TYPE> List<Map<String,VALUE_TYPE>> getRecordsFromQuery( Connection conn, List<String> selectColumns, String fromSchema, String fromTable, WhereClause<VALUE_TYPE> where, String orderBy, Class<VALUE_TYPE> valueType ) throws SQLException { PreparedStatement pstmt = null; ResultSet rs = null; String query = null; try { // create list of columns to use in SELECT statement String columnQuery = ""; for (int i = 0; selectColumns != null && i < selectColumns.size(); i++) { if (i > 0) columnQuery += ","; columnQuery += quoteSymbol(conn, selectColumns.get(i)); } if (columnQuery.length() == 0) columnQuery = "*"; // select all columns String orderByQuery = ""; if (orderBy != null) orderByQuery = String.format("ORDER BY %s", quoteSymbol(conn, orderBy)); // build complete query query = String.format( "SELECT %s FROM %s %s %s", columnQuery, quoteSchemaTable(conn, fromSchema, fromTable), where.clause, orderByQuery ); pstmt = prepareStatement(conn, query, where.params); rs = pstmt.executeQuery(); return getRecordsFromResultSet(rs, valueType); } catch (SQLException e) { throw new SQLExceptionWithQuery(query, e); } finally { // close everything in reverse order cleanup(rs); cleanup(pstmt); } } /** * @param conn An existing SQL Connection * @param selectColumns The list of columns in the SELECT statement, or null for all columns. * @param fromSchema The schema containing the table to perform the SELECT statement on. * @param fromTable The table to perform the SELECT statement on. * @param whereParams A map of column names to String values used to construct a WHERE clause. * @return The resulting rows returned by the query. * @throws SQLException If the query fails. */ public static <V> SQLResult getResultFromQuery( Connection conn, List<String> selectColumns, String fromSchema, String fromTable, Map<String,V> whereParams, Set<String> caseSensitiveFields ) throws SQLException { PreparedStatement pstmt = null; ResultSet rs = null; SQLResult result = null; String query = null; try { // create list of columns to use in SELECT statement String columnQuery = ""; for (int i = 0; selectColumns != null && i < selectColumns.size(); i++) { if (i > 0) columnQuery += ","; columnQuery += quoteSymbol(conn, selectColumns.get(i)); } if (columnQuery.length() == 0) columnQuery = "*"; // select all columns // build WHERE clause WhereClause<V> where = new WhereClauseBuilder<V>(false) .addGroupedConditions(whereParams, caseSensitiveFields, null) .build(conn); // build complete query query = String.format( "SELECT %s FROM %s %s", columnQuery, quoteSchemaTable(conn, fromSchema, fromTable), where.clause ); pstmt = prepareStatement(conn, query, where.params); rs = pstmt.executeQuery(); // make a copy of the query result result = new SQLResult(rs); } catch (SQLException e) { throw new SQLExceptionWithQuery(query, e); } finally { // close everything in reverse order SQLUtils.cleanup(rs); SQLUtils.cleanup(pstmt); } // return the copy of the query result return result; } public static int executeUpdate(Connection connection, String query) throws SQLException { Statement stmt = null; int result = 0; try { stmt = connection.createStatement(); result = stmt.executeUpdate(query); } catch (SQLException e) { throw e; } finally { // close everything in reverse order SQLUtils.cleanup(stmt); } // return the copy of the query result return result; } public static int executeUpdate(Connection conn, String query, Object[] params) throws SQLException { PreparedStatement stmt = null; int result = 0; try { stmt = conn.prepareStatement(query); constrainQueryParams(conn, params); setPreparedStatementParams(stmt, params); result = stmt.executeUpdate(); } catch (SQLException e) { throw e; } finally { // close everything in reverse order SQLUtils.cleanup(stmt); } // return the copy of the query result return result; } public static int getSingleIntFromQuery(Connection conn, String query, int defaultValue) throws SQLException { Statement stmt = null; try { stmt = conn.createStatement(); return getSingleIntFromQuery(stmt, query, defaultValue); } finally { SQLUtils.cleanup(stmt); } } public static int getSingleIntFromQuery(Statement stmt, String query, int defaultValue) throws SQLException { ResultSet resultSet = null; try { resultSet = stmt.executeQuery(query); if (resultSet.next()) return resultSet.getInt(1); return defaultValue; } finally { SQLUtils.cleanup(resultSet); } } public static int getSingleIntFromQuery(Connection conn, String query, Object[] params, int defaultValue) throws SQLException { PreparedStatement pstmt = null; ResultSet resultSet = null; try { pstmt = conn.prepareStatement(query); setPreparedStatementParams(pstmt, params); resultSet = pstmt.executeQuery(); if (resultSet.next()) return resultSet.getInt(1); return defaultValue; } finally { SQLUtils.cleanup(resultSet); SQLUtils.cleanup(pstmt); } } /** * @param conn An existing SQL Connection * @return A List of schema names * @throws SQLException If the query fails. */ public static List<String> getSchemas(Connection conn) throws SQLException { List<String> schemas = new Vector<String>(); ResultSet rs = null; try { DatabaseMetaData md = conn.getMetaData(); // MySQL "doesn't support schemas," so use catalogs. if (md.getDatabaseProductName().equalsIgnoreCase(MYSQL)) { rs = md.getCatalogs(); // use column index instead of name because sometimes the names are lower case, sometimes upper. while (rs.next()) schemas.add(rs.getString(1)); // table_catalog } else if( md.getDatabaseProductName().equalsIgnoreCase(SQLITE)) { schemas.add("sqlite_master"); } else { rs = md.getSchemas(); // use column index instead of name because sometimes the names are lower case, sometimes upper. while (rs.next()) schemas.add(rs.getString(1)); // table_schem } Collections.sort(schemas, String.CASE_INSENSITIVE_ORDER); } finally { SQLUtils.cleanup(rs); } return schemas; } /** * @param conn An existing SQL Connection * @param schemaName A schema name accessible through the given connection * @return A List of table names in the given schema * @throws SQLException If the query fails. */ public static List<String> getTables(Connection conn, String schemaName) throws SQLException { if (schemaName != null) { if (SQLUtils.isOracleServer(conn)) schemaName = schemaName.toUpperCase(); if (schemaName.length() == 0) schemaName = null; } List<String> tables = new Vector<String>(); ResultSet rs = null; try { DatabaseMetaData md = conn.getMetaData(); String[] types = new String[]{"TABLE", "VIEW"}; // MySQL uses "catalogs" instead of "schemas" if (md.getDatabaseProductName().equalsIgnoreCase(MYSQL)) rs = md.getTables(schemaName, null, null, types); else rs = md.getTables(null, schemaName, null, types); //May need a case here for SQLITE using sqlite_master as a catalog name. // use column index instead of name because sometimes the names are lower case, sometimes upper. // column indices: 1=table_cat,2=table_schem,3=table_name,4=table_type,5=remarks while (rs.next()) tables.add(rs.getString(3)); // table_name Collections.sort(tables, String.CASE_INSENSITIVE_ORDER); } finally { // close everything in reverse order cleanup(rs); } return tables; } /** * @param conn An existing SQL Connection * @param schemaName A schema name accessible through the given connection * @param tableName A table name existing in the given schema * @return A List of column names in the given table * @throws SQLException If the query fails. */ public static List<String> getColumns(Connection conn, String schemaName, String tableName) throws SQLException { List<String> columns = new Vector<String>(); ResultSet rs = null; try { DatabaseMetaData md = conn.getMetaData(); tableName = escapeSearchString(conn, tableName); // MySQL uses "catalogs" instead of "schemas" if (md.getDatabaseProductName().equalsIgnoreCase(MYSQL)) rs = md.getColumns(schemaName, null, tableName, null); else if (isOracleServer(conn)) rs = md.getColumns(null, schemaName.toUpperCase(), tableName, null); else rs = md.getColumns(null, schemaName, tableName, null); //May need a case here for SQLITE using sqlite_master as a catalog name. // use column index instead of name because sometimes the names are lower case, sometimes upper. while (rs.next()) columns.add(rs.getString(4)); // column_name } finally { // close everything in reverse order SQLUtils.cleanup(rs); } return columns; } // not implemented by SQLite JDBC driver /* * * Attaches a database to an SQLite instance. * @param conn A SQLite connection. * @param databaseName The name of the database. * @param filePath The path to the file where the database either exists or should be created. * @throws SQLException * / public static void createSQLiteDatabase(Connection conn, String databaseName, String filePath) throws SQLException { if (SQLUtils.schemaExists(conn, databaseName)) return; String query = String.format("ATTACH DATABASE ? AS %s", quoteSymbol(conn, databaseName)); PreparedStatement stmt = null; try { stmt = prepareStatement(conn, query, new String[]{ filePath }); stmt.executeUpdate(query); } catch (SQLException e) { throw new SQLExceptionWithQuery(query, e); } finally { SQLUtils.cleanup(stmt); } }*/ /** * Creates a schema in a non-SQLite database. For SQLite, this does nothing. * @param conn An existing SQL Connection * @param schema The value to be used as the Schema name * @throws SQLException If the query fails. */ public static void createSchema(Connection conn, String schema) throws SQLException { if (isSQLite(conn)) return; if (SQLUtils.schemaExists(conn, schema)) return; String query = String.format("CREATE SCHEMA %s", schema); Statement stmt = null; try { stmt = conn.createStatement(); stmt.executeUpdate(query); } catch (SQLException e) { throw new SQLExceptionWithQuery(query, e); } finally { SQLUtils.cleanup(stmt); } } /** * @param conn An existing SQL Connection * @param schemaName A schema name accessible through the given connection * @param tableName The value to be used as the table name * @param columnNames The values to be used as the column names * @param columnTypes The SQL types to use when creating the table * @param primaryKeyColumns The list of columns to be used for primary keys * @throws SQLException If the query fails. */ public static void createTable( Connection conn, String schemaName, String tableName, List<String> columnNames, List<String> columnTypes, List<String> primaryKeyColumns ) throws SQLException { if (columnNames.size() != columnTypes.size()) throw new IllegalArgumentException(String.format("columnNames length (%s) does not match columnTypes length (%s)", columnNames.size(), columnTypes.size())); //if table exists return if( tableExists(conn, schemaName, tableName) ) return; StringBuilder columnClause = new StringBuilder(); for (int i = 0; i < columnNames.size(); i++) { if( i > 0 ) columnClause.append(','); String type = columnTypes.get(i); columnClause.append(String.format("%s %s", quoteSymbol(conn, columnNames.get(i)), type)); } if (primaryKeyColumns != null && primaryKeyColumns.size() > 0) { String pkName = truncate(String.format("pk_%s", tableName), 30); String[] quotedKeyColumns = new String[primaryKeyColumns.size()]; int i = 0; for (String keyCol : primaryKeyColumns) quotedKeyColumns[i++] = quoteSymbol(conn, keyCol); columnClause.append( String.format( ", CONSTRAINT %s PRIMARY KEY (%s)", quoteSymbol(conn, pkName), Strings.join(",", quotedKeyColumns) ) ); } String quotedSchemaTable = quoteSchemaTable(conn, schemaName, tableName); String query = String.format("CREATE TABLE %s (%s)", quotedSchemaTable, columnClause); Statement stmt = null; try { stmt = conn.createStatement(); stmt.executeUpdate(query); } catch (SQLException e) { throw new SQLExceptionWithQuery(query, e); } finally { SQLUtils.cleanup(stmt); } } public static void addForeignKey( Connection conn, String schemaName, String tableName, String keyName, String targetTable, String targetKey ) throws SQLException { // TODO: Check for cross-DB portability Statement stmt = null; String query = String.format("ALTER TABLE %s ADD FOREIGN KEY (%s) REFERENCES %s(%s)", quoteSchemaTable(conn, schemaName, tableName), quoteSymbol(conn, keyName), quoteSchemaTable(conn, schemaName, targetTable), quoteSymbol(conn, targetKey)); try { stmt = conn.createStatement(); stmt.executeUpdate(query); } catch (SQLException e) { throw new SQLExceptionWithQuery(query, e); } finally { SQLUtils.cleanup(stmt); } } public static <TYPE> PreparedStatement prepareStatement(Connection conn, String query, List<TYPE> params) throws SQLException { PreparedStatement cstmt = conn.prepareStatement(query); constrainQueryParams(conn, params); setPreparedStatementParams(cstmt, params); return cstmt; } public static <TYPE> PreparedStatement prepareStatement(Connection conn, String query, TYPE[] params) throws SQLException { PreparedStatement cstmt = conn.prepareStatement(query); constrainQueryParams(conn, params); setPreparedStatementParams(cstmt, params); return cstmt; } protected static <T> void constrainQueryParams(Connection conn, List<T> params) { if (isOracleServer(conn)) for (int i = 0; i < params.size(); i++) params.set(i, constrainOracleQueryParam(params.get(i))); } protected static <T> void constrainQueryParams(Connection conn, T[] params) { if (isOracleServer(conn)) for (int i = 0; i < params.length; i++) params[i] = constrainOracleQueryParam(params[i]); } @SuppressWarnings("unchecked") protected static <T> T constrainOracleQueryParam(T param) { // constrain oracle double values to float range if (param instanceof Double) param = (T)(Float)((Double) param).floatValue(); return param; } public static <TYPE> void setPreparedStatementParams(PreparedStatement cstmt, List<TYPE> params) throws SQLException { int i = 1; for (TYPE param : params) cstmt.setObject(i++, param); } public static <TYPE> void setPreparedStatementParams(PreparedStatement cstmt, TYPE[] params) throws SQLException { int i = 1; for (TYPE param : params) cstmt.setObject(i++, param); } public static int updateRows(Connection conn, String fromSchema, String fromTable, Map<String,Object> whereParams, Map<String,Object> dataUpdate, Set<String> caseSensitiveFields) throws SQLException { PreparedStatement stmt = null; try { // build the update block String updateBlock; List<String> updateBlockList = new LinkedList<String>(); List<Object> queryParams = new LinkedList<Object>(); for (Entry<String,Object> data : dataUpdate.entrySet()) { updateBlockList.add(String.format("%s=?", data.getKey())); queryParams.add(data.getValue()); } updateBlock = Strings.join(",", updateBlockList); // build where clause WhereClause<Object> where = new WhereClauseBuilder<Object>(false) .addGroupedConditions(whereParams, caseSensitiveFields, null) .build(conn); queryParams.addAll(where.params); // build and execute query String query = String.format("UPDATE %s SET %s %s", fromTable, updateBlock, where.clause); stmt = prepareStatement(conn, query, queryParams); return stmt.executeUpdate(); } finally { cleanup(stmt); } } /** * Modifies a query so it will only return a single row. * @param dbms The target DBMS. * @param query A SQL Query. * @return The modified query. */ private static String limitQueryToOneRow(String dbms, String query) { if (dbms.equals(ORACLE)) return String.format("SELECT * FROM (%s) WHERE ROWNUM <= 1", query); if (dbms.equals(MYSQL) || dbms.equals(POSTGRESQL) || dbms.equals(SQLITE)) return query + " LIMIT 1"; throw new InvalidParameterException("DBMS not supported: " + dbms); } private static String newIdClause(String dbms, String quotedIdField, String quotedTable) { if (dbms.equals(SQLITE)) { return String.format( "(SELECT CASE WHEN MAX(%s) IS NULL THEN 1 ELSE MAX(%s)+1 END FROM %s LIMIT 1)", quotedIdField, quotedIdField, quotedTable ); } if (dbms.equals(SQLSERVER)) { return String.format( "(SELECT TOP 1 CASE WHEN MAX(%s) IS NULL THEN 1 ELSE MAX(%s)+1 END FROM %s)", quotedIdField, quotedIdField, quotedTable ); } if (dbms.equals(ORACLE)) { String query = String.format("SELECT MAX(%s)+1 FROM %s", quotedIdField, quotedTable); query = limitQueryToOneRow(dbms, query); return String.format("GREATEST(1, (%s))", query); } // MySQL/PostgreSQL return String.format( "GREATEST(1, (SELECT MAX(%s)+1 FROM %s LIMIT 1))", quotedIdField, quotedTable ); } /** * Generates a new id manually using MAX(idField)+1. * @param conn * @param schemaName * @param tableName * @param data Unquoted field names mapped to raw values. * @param idField * @return The ID of the new row. * @throws SQLException */ public static int insertRowReturnID(Connection conn, String schemaName, String tableName, Map<String,Object> data, String idField) throws SQLException { String dbms = getDbmsFromConnection(conn); boolean isOracle = dbms.equals(ORACLE); boolean isSQLServer = dbms.equals(SQLSERVER); boolean isMySQL = dbms.equals(MYSQL); boolean isPostgreSQL = dbms.equals(POSTGRESQL); boolean isSQLite = dbms.equals(SQLITE); boolean useTwoQueries = isMySQL || isOracle || isSQLite; String query = null; List<String> columns = new LinkedList<String>(); LinkedList<Object> values = new LinkedList<Object>(); for (Entry<String,Object> entry : data.entrySet()) { columns.add(quoteSymbol(conn, entry.getKey())); values.add(entry.getValue()); } String quotedIdField = quoteSymbol(conn, idField); String quotedTable = quoteSchemaTable(conn, schemaName, tableName); String fields_string = quotedIdField + "," + Strings.join(",", columns); String id_string; if (useTwoQueries) id_string = "?"; // we get the new id below and then give it as a param else id_string = newIdClause(dbms, quotedIdField, quotedTable); String values_string = id_string + "," + Strings.mult(",", "?", values.size()); // build query query = String.format("INSERT INTO %s (%s)", quotedTable, fields_string); if (isSQLServer) query += String.format(" OUTPUT INSERTED.%s", quotedIdField); query += String.format(" VALUES (%s)", values_string); if (isPostgreSQL) query += String.format(" RETURNING %s", quotedIdField); try { int id; synchronized (conn) { if (useTwoQueries) { String nextQuery = query; query = String.format( "SELECT %s FROM %s", newIdClause(dbms, quotedIdField, quotedTable), quotedTable ); query = limitQueryToOneRow(dbms, query); id = getSingleIntFromQuery(conn, query, 1); values.addFirst(id); query = nextQuery; executeUpdate(conn, query, values.toArray()); } else { id = getSingleIntFromQuery(conn, query, values.toArray(), -1); } } return id; } catch (SQLException e) { throw new SQLExceptionWithQuery(query, e); } } /** * This function checks if a connection is for a PostgreSQL server. * @param conn A SQL Connection. * @return A value of true if the Connection is for a PostgreSQL server. * @throws SQLException */ public static boolean isPostgreSQL(Connection conn) { return getDbmsFromConnection(conn).equals(POSTGRESQL); } /** * This function checks if a connection is for a MySQL server. * @param conn A SQL Connection. * @return A value of true if the Connection is for a MySQL server. * @throws SQLException */ public static boolean isMySQL(Connection conn) { return getDbmsFromConnection(conn).equals(MYSQL); } /** * This function checks if a connection is for an Oracle server. * @param conn A SQL Connection. * @return A value of true if the Connection is for an Oracle server. * @throws SQLException */ public static boolean isOracleServer(Connection conn) { return getDbmsFromConnection(conn).equals(ORACLE); } /** * This function checks if a connection is for a Microsoft SQL Server. * @param conn A SQL Connection. * @return A value of true if the Connection is for a Microsoft SQL Server. */ public static boolean isSQLServer(Connection conn) { return getDbmsFromConnection(conn).equals(SQLSERVER); } /** * This function checks if a connection is for a SQLite server. * @param conn A SQL Connection. * @return A value of true if the Connection is for a SQLite Server. */ public static boolean isSQLite(Connection conn) { return getDbmsFromConnection(conn).equals(SQLITE); } private static String truncate(String str, int maxLength) { if (str.length() > maxLength) return str.substring(0, maxLength); return str; } private static String generateSymbolName(String prefix, Object ...items) { int hash = Arrays.deepToString(items).hashCode(); return String.format("%s_%s", prefix, hash); } private static String generateQuotedSymbolName(String prefix, Connection conn, String schema, String table, String ...columns) throws SQLException { String indexName = generateSymbolName(prefix, schema, table, columns); if (isOracleServer(conn)) return quoteSchemaTable(conn, schema, indexName); else return quoteSymbol(conn, indexName); } /** * @param conn An existing SQL Connection * @param schemaName A schema name accessible through the given connection * @param tableName The name of an existing table * @param columnNames The names of the columns to use * @throws SQLException If the query fails. */ public static void createIndex(Connection conn, String schemaName, String tableName, String[] columnNames) throws SQLException { createIndex(conn, schemaName, tableName, columnNames, null); } /** * @param conn An existing SQL Connection * @param schemaName A schema name accessible through the given connection * @param tableName The name of an existing table * @param columnNames The names of the columns to use. * @param columnLengths The lengths to use as indices, may be null. * @throws SQLException If the query fails. */ public static void createIndex(Connection conn, String schemaName, String tableName, String[] columnNames, Integer[] columnLengths) throws SQLException { boolean isMySQL = getDbmsFromConnection(conn).equals(MYSQL); String fields = ""; for (int i = 0; i < columnNames.length; i++) { if (i > 0) fields += ", "; String symbol = quoteSymbol(conn, columnNames[i]); if (isMySQL && columnLengths != null && columnLengths[i] > 0) fields += String.format("%s(%d)", symbol, columnLengths[i]); else fields += symbol; } String query = String.format( "CREATE INDEX %s ON %s (%s)", generateQuotedSymbolName("index", conn, schemaName, tableName, columnNames), SQLUtils.quoteSchemaTable(conn, schemaName, tableName), fields ); Statement stmt = null; try { stmt = conn.createStatement(); stmt.executeUpdate(query); } catch (SQLException e) { throw new SQLExceptionWithQuery(query, e); } finally { SQLUtils.cleanup(stmt); } } /** * @param conn An existing SQL Connection * @param schemaName A schema name accessible through the given connection * @param tableName The name of an existing table * @param columnName The name of the column to create * @param columnType An SQL type to use when creating the column * @throws SQLException If the query fails. */ public static void addColumn( Connection conn, String schemaName, String tableName, String columnName, String columnType) throws SQLException { String format = "ALTER TABLE %s ADD %s %s"; // Note: PostgreSQL does not accept parentheses around the new column definition. String query = String.format(format, quoteSchemaTable(conn, schemaName, tableName), quoteSymbol(conn, columnName), columnType); Statement stmt = null; try { stmt = conn.createStatement(); stmt.executeUpdate(query); } catch (SQLException e) { throw new SQLExceptionWithQuery(query, e); } finally { SQLUtils.cleanup(stmt); } } /** * @param conn An existing SQL Connection * @param schemaName A schema name accessible through the given connection * @param tableName A table name existing in the given schema * @param columnArg The name of the column to grab * @return A List of string values from the column * @throws SQLException If the query fails. */ public static List<String> getColumn(Connection conn, String schemaName, String tableName, String columnArg) throws SQLException { List<String> values = new Vector<String>(); //Return value Statement stmt = null; ResultSet rs = null; String query = ""; try { query = String.format("SELECT %s FROM %s", quoteSymbol(conn, columnArg), quoteSchemaTable(conn, schemaName, tableName)); stmt = conn.createStatement(); rs = stmt.executeQuery(query); rs.setFetchSize(SQLResult.FETCH_SIZE); while (rs.next()) values.add(rs.getString(1)); } catch (SQLException e) { throw new SQLExceptionWithQuery(query, e); } finally { SQLUtils.cleanup(rs); SQLUtils.cleanup(stmt); } return values; } /** * @param conn An existing SQL Connection * @param schemaName A schema name accessible through the given connection * @param tableName A table name existing in the given schema * @param columnArg The name of the integer column to grab * @return A List of integer values from the column * @throws SQLException If the query fails. */ public static List<Integer> getIntColumn(Connection conn, String schemaName, String tableName, String columnArg) throws SQLException { List<Integer> values = new Vector<Integer>(); //Return value Statement stmt = null; ResultSet rs = null; String query = ""; try { query = String.format("SELECT %s FROM %s", quoteSymbol(conn, columnArg), quoteSchemaTable(conn, schemaName, tableName)); stmt = conn.createStatement(); rs = stmt.executeQuery(query); rs.setFetchSize(SQLResult.FETCH_SIZE); while (rs.next()) values.add(rs.getInt(1)); } catch (SQLException e) { throw new SQLExceptionWithQuery(query, e); } finally { SQLUtils.cleanup(rs); SQLUtils.cleanup(stmt); } return values; } /** * @param conn An existing SQL Connection * @param schemaName A schema name accessible through the given connection * @param tableName A table name existing in the given schema * @param record The record to be inserted into the table * @return The number of rows inserted. * @throws SQLException If the query fails. */ public static <V> int insertRow( Connection conn, String schemaName, String tableName, Map<String,V> record) throws SQLException { List<Map<String,V>> list = new Vector<Map<String,V>>(1); list.add(record); return insertRows(conn, schemaName, tableName, list); } /** * @param conn An existing SQL Connection * @param schemaName A schema name accessible through the given connection * @param tableName A table name existing in the given schema * @param records The records to be inserted into the table * @return The number of rows inserted. * @throws SQLException If the query fails. */ public static <V> int insertRows( Connection conn, String schemaName, String tableName, List<Map<String,V>> records) throws SQLException { PreparedStatement pstmt = null; String query = "insertRows()"; try { // get a list of all the field names in all the records Set<String> fieldSet = new HashSet<String>(); for (Map<String,V> record : records) fieldSet.addAll(record.keySet()); List<String> fieldNames = new Vector<String>(fieldSet); // stop if there aren't any records or field names if (records.size() == 0 || fieldNames.size() == 0) return 0; // get full list of ordered query params Object[] queryParams = new Object[fieldNames.size() * records.size()]; int i = 0; for (Map<String, V> record : records) for (String fieldName : fieldNames) queryParams[i++] = record.get(fieldName); // quote field names for (i = 0; i < fieldNames.size(); i++) fieldNames.set(i, quoteSymbol(conn, fieldNames.get(i))); String quotedSchemaTable = quoteSchemaTable(conn, schemaName, tableName); String fieldNamesStr = Strings.join(",", fieldNames); // construct query String recordClause = String.format("(%s)", Strings.mult(",", "?", fieldNames.size())); String valuesClause = Strings.mult(",", recordClause, records.size()); query = String.format( "INSERT INTO %s (%s) VALUES %s", quotedSchemaTable, fieldNamesStr, valuesClause ); // prepare call and set string parameters pstmt = prepareStatement(conn, query.toString(), queryParams); int result = pstmt.executeUpdate(); return result; } catch (SQLException e) { System.err.println(records); throw new SQLExceptionWithQuery(query, e); } finally { SQLUtils.cleanup(pstmt); } } /** * @param conn An existing SQL Connection * @param schema The name of a schema to check for. * @return true if the schema exists * @throws SQLException If the getSchemas query fails. */ public static boolean schemaExists(Connection conn, String schema) throws SQLException { List<String> schemas = getSchemas(conn); for (String existingSchema : schemas) if (existingSchema.equalsIgnoreCase(schema)) return true; return false; } public static void dropTableIfExists(Connection conn, String schema, String table) throws SQLException { String dbms = getDbmsFromConnection(conn); String quotedTable = SQLUtils.quoteSchemaTable(conn, schema, table); String query = ""; if (dbms.equals(SQLSERVER)) { query = "IF OBJECT_ID('" + quotedTable + "','U') IS NOT NULL DROP TABLE " + quotedTable; } else if (dbms.equals(ORACLE)) { // do nothing if table doesn't exist if (!SQLUtils.tableExists(conn, schema, table)) return; query = "DROP TABLE " + quotedTable; } else { query = "DROP TABLE IF EXISTS " + quotedTable; } Statement stmt = conn.createStatement(); stmt.executeUpdate(query); stmt.close(); cleanup(stmt); } /** * This function will delete from a table the rows that have a specified set of column values. * @param conn An existing SQL Connection * @param schemaName A schema name accessible through the given connection * @param tableName A table name existing in the given schema * @param where The conditions to be used in the WHERE clause of the query * @return The number of rows that were deleted. * @throws SQLException If the query fails. */ public static <V> int deleteRows(Connection conn, String schemaName, String tableName, WhereClause<V> where) throws SQLException { // VERY IMPORTANT - do not delete if there are no records specified, because that would delete everything. if (Strings.isEmpty(where.clause)) return 0; PreparedStatement pstmt = null; String query = null; try { query = String.format("DELETE FROM %s %s", SQLUtils.quoteSchemaTable(conn, schemaName, tableName), where.clause); pstmt = prepareStatement(conn, query, where.params); return pstmt.executeUpdate(); } catch (SQLException e) { throw new SQLExceptionWithQuery(query, e); } finally { SQLUtils.cleanup(pstmt); } } /** * This will escape special characters in a SQL search string. * Not reliable on SQLite since there is no escape character. * @param conn A SQL Connection. * @param searchString A SQL search string containing special characters to be escaped. * @return The searchString with special characters escaped. * @throws SQLException */ private static String escapeSearchString(Connection conn, String searchString) throws SQLException { String escape = conn.getMetaData().getSearchStringEscape(); if (escape == null) { return searchString; } StringBuilder sb = new StringBuilder(); int n = searchString.length(); for (int i = 0; i < n; i++) { char c = searchString.charAt(i); if (c == '.' || c == '%' || c == '_' || c == '"' || c == '\'' || c == '`') sb.append(escape); sb.append(c); } return sb.toString(); } /** * @param conn An existing SQL Connection * @param schema The name of a schema to check in. * @param table The name of a table to check for. * @return true if the table exists in the specified schema. * @throws SQLException If either getSchemas() or getTables() fails. */ public static boolean tableExists(Connection conn, String schema, String table) throws SQLException { List<String> tables = getTables(conn, schema); for (String existingTable : tables) if (existingTable.equalsIgnoreCase(table)) return true; return false; } public static void cleanup(ResultSet obj) { if (obj != null) try { obj.close(); } catch (Exception e) { } } public static void cleanup(Statement obj) { if (obj != null) try { obj.close(); } catch (Exception e) { } } public static void cleanup(Connection obj) { if (obj != null) try { obj.close(); } catch (Exception e) { } } public static String getVarcharTypeString(Connection conn, int length) { if (isOracleServer(conn)) return String.format("VARCHAR2(%s CHAR)", length); return String.format("VARCHAR(%s)", length); } public static String getTinyIntTypeString(Connection conn) throws SQLException { String dbms = getDbmsFromConnection(conn); if (dbms.equals(ORACLE)) return "NUMBER(1,0)"; if (dbms.equals(POSTGRESQL)) return "SMALLINT"; // mysql, sqlserver return "TINYINT"; } public static String getIntTypeString(Connection conn) { if (isOracleServer(conn)) return "NUMBER(10,0)"; return "INT"; } public static String getDoubleTypeString(Connection conn) { if (isSQLServer(conn)) return "FLOAT"; // this is an 8 floating point type with 53 bits for the mantissa, the same as an 8 byte double. // but SQL Server's DOUBLE PRECISION type isn't standard return "DOUBLE PRECISION"; } public static String getBigIntTypeString(Connection conn) { if (isOracleServer(conn)) return "NUMBER(20,0)"; return "BIGINT"; } public static String getDateTimeTypeString(Connection conn) { if (isOracleServer(conn)) return "DATE"; return "DATETIME"; } protected static String getCSVNullValue(Connection conn) { try { String dbms = getDbmsFromConnection(conn); if (dbms.equals(MYSQL)) return "\\N"; else if (dbms.equals(POSTGRESQL) || dbms.equals(SQLSERVER) || dbms.equals(ORACLE)) return ""; // empty string (no quotes) else throw new InvalidParameterException("Unsupported DBMS type: " + dbms); } catch (Exception e) { // this should never happen throw new RuntimeException(e); } } /** * This function should only be called on a SQL Server connection. * @param conn * @param schema * @param table * @param on * @throws SQLException */ public static void setSQLServerIdentityInsert(Connection conn, String schema, String table, boolean on) throws SQLException { if (!isSQLServer(conn)) return; String quotedTable = SQLUtils.quoteSchemaTable(conn, schema, table); String query = String.format("SET IDENTITY_INSERT %s %s", quotedTable, on ? "ON" : "OFF"); Statement stmt = null; try { stmt = conn.createStatement(); stmt.execute(query); } catch (SQLException e) { throw new SQLExceptionWithQuery(query, e); } finally { SQLUtils.cleanup(stmt); } } public static class WhereClause<V> { public String clause; public List<V> params; /** * An object with three modes: and, or, and cond. * If one property is specified, the others must be null. */ public static class NestedColumnFilters { /** * Nested filters to be grouped with AND logic. */ public NestedColumnFilters[] and; /** * Nested filters to be grouped with OR logic. */ public NestedColumnFilters[] or; /** * A condition for a particular field. */ public ColumnFilter cond; /** * Makes sure the values in this object are specified correctly. * @throws RemoteException If this object or any of its nested objects are missing required values. */ public void assertValid() throws RemoteException { if ((and==null?0:1) + (or==null?0:1) + (cond==null?0:1) != 1) error("Exactly one of the properties 'and', 'or', 'cond' must be set"); if (cond != null) cond.assertValid(); if (and != null) { if (and.length == 0) error("'and' must have at least one item"); for (NestedColumnFilters nested : and) if (nested != null) nested.assertValid(); else error("'and' must not contain null items"); } if (or != null) { if (or.length == 0) error("'or' must have at least one item"); for (NestedColumnFilters nested : or) if (nested != null) nested.assertValid(); else error("'or' must not contain null items"); } } private void error(String message) throws RemoteException { throw new RemoteException("NestedColumnFilters: " + message); } } public WhereClause(String whereClause, List<V> params) { this.clause = whereClause; this.params = params; } /** * A condition for filtering query results. */ public static class ColumnFilter { /** * The unquoted field name. */ public Object f; /** * Contains a list of String values ["a", "b", ...] * If <code>v</code> is set, <code>r</code> must be null. */ public Object[] v; /** * Contains a list of numeric ranges [[min,max], [min2,max2], ...] * If <code>r</code> is set, <code>v</code> must be null. */ public Object[][] r; /** * Makes sure the values in this object are specified correctly. * @throws RemoteException If this object or any of its nested objects are missing required values. */ public void assertValid() throws RemoteException { if (f == null) error("'f' cannot be null"); if ((v == null) == (r == null)) error("Either 'v' or 'r' must be set, but not both"); } private void error(String message) throws RemoteException { throw new RemoteException("ColumnFilter: " + message); } } /** * Builds a WhereClause from nested filtering logic. * @param conn * @param filters * @return The WhereClause. * @throws SQLException */ public static WhereClause<Object> fromFilters(Connection conn, NestedColumnFilters filters) throws SQLException { WhereClause<Object> where = new WhereClause<Object>("", new Vector<Object>()); StringBuilder sb = new StringBuilder(" WHERE "); if (filters != null) build(conn, sb, where.params, filters); if (!where.params.isEmpty()) where.clause = sb.toString(); return where; } private static void build(Connection conn, StringBuilder clause, List<Object> params, NestedColumnFilters filters) throws SQLException { clause.append("("); if (filters.cond != null) { String quotedField = quoteSymbol(conn, filters.cond.f.toString()); String stringCompare = null; Object[] values = filters.cond.v != null ? filters.cond.v : filters.cond.r; for (int i = 0; i < values.length; i++) { if (i > 0) clause.append(" OR "); if (values == filters.cond.v) // string value { if (stringCompare == null) { stringCompare = String.format("%s = ?", quotedField); //stringCompare = caseSensitiveCompare(conn, quotedField, "?"); } clause.append(stringCompare); params.add(values[i]); } else // numeric range { clause.append(String.format("(? <= %s AND %s <= ?)", quotedField, quotedField)); Object[] range = (Object[])values[i]; params.add(range[0]); params.add(range[1]); } } } else { NestedColumnFilters[] list = filters.and != null ? filters.and : filters.or; int i = 0; for (NestedColumnFilters item : list) { if (i > 0) clause.append(list == filters.and ? " AND " : " OR "); build(conn, clause, params, item); i++; } } clause.append(")"); } } /** * The escape character (backslash) used by convertWildcards() and getLikeEscapeClause() * @see #convertWildcards(String) * @see #getLikeEscapeClause(Connection) */ public static final char WILDCARD_ESCAPE = '\\'; /** * Converts a search string which uses basic '?' and '*' wildcards into an equivalent SQL search string. * @param searchString A search string which uses basic '?' and '*' wildcards * @return The equivalent SQL search string using a backslash (\) as an escape character. * @see #getLikeEscapeClause(Connection) */ public static String convertWildcards(String searchString) { // escape special characters (including the escape character first) for (char chr : new char[]{ WILDCARD_ESCAPE, '%', '_', '[' }) searchString = searchString.replace("" + chr, "" + WILDCARD_ESCAPE + chr); // replace our wildcards with SQL wildcards searchString = searchString.replace('?', '_').replace('*', '%'); return searchString; } /** * Returns an ESCAPE clause for use with a LIKE comparison. * @param conn The SQL Connection where the ESCAPE clause will be used. * @return The ESCAPE clause specifying a backslash (\) as the escape character. * @see #convertWildcards(String) */ public static String getLikeEscapeClause(Connection conn) { String dbms = getDbmsFromConnection(conn); if (dbms.equals(MYSQL) || dbms.equals(POSTGRESQL)) return " ESCAPE '\\\\' "; return " ESCAPE '\\' "; } /** * Specifies how two SQL terms should be compared */ public static enum CompareMode { NORMAL, CASE_SENSITIVE, WILDCARD } public static class WhereClauseBuilder<V> { private List<List<Condition>> _nestedConditions = new Vector<List<Condition>>(); private List<V> _params = new Vector<V>(); private boolean _conjunctive = false; /** * @param conjunctive Set to <code>true</code> for Conjunctive Normal Form: (a OR b) AND (x OR y). * Set to <code>false</code> for Disjunctive Normal Form: (a AND b) OR (x AND y). */ public WhereClauseBuilder(boolean conjunctive) { _conjunctive = conjunctive; } /** * Adds a set of grouped inner conditions. * Conjunctive Normal Form uses outer AND logic and will group these inner conditions with OR logic like (field1 = value1 OR field2 = value2). * Disjunctive Normal Form uses outer OR logic and will group these inner conditions with AND logic like (field1 = value1 AND field2 = value2). * @param fieldsAndValues Unquoted field names mapped to raw values * @param caseSensitiveFields A set of field names which should use case sensitive compare. * @param wildcardFields A set of field names which should use a "LIKE" SQL clause for wildcard search. * @see weave.utils.SQLUtils#convertWildcards(String) */ public WhereClauseBuilder<V> addGroupedConditions(Map<String,V> fieldsAndValues, Set<String> caseSensitiveFields, Set<String> wildcardFields) throws SQLException { Map<String, CompareMode> compareModes = new HashMap<String,CompareMode>(); if (caseSensitiveFields != null) for (String field : caseSensitiveFields) compareModes.put(field, CompareMode.CASE_SENSITIVE); if (wildcardFields != null) for (String field : wildcardFields) compareModes.put(field, CompareMode.WILDCARD); return addGroupedConditions(fieldsAndValues, compareModes); } /** * Adds a set of grouped inner conditions. * Conjunctive Normal Form uses outer AND logic and will group these inner conditions with OR logic like (field1 = value1 OR field2 = value2). * Disjunctive Normal Form uses outer OR logic and will group these inner conditions with AND logic like (field1 = value1 AND field2 = value2). * @param fieldsAndValues Unquoted field names mapped to raw values * @param compareModes Field names mapped to compare modes * @see weave.utils.SQLUtils#convertWildcards(String) */ public WhereClauseBuilder<V> addGroupedConditions(Map<String,V> fieldsAndValues, Map<String,CompareMode> compareModes) throws SQLException { if (fieldsAndValues.size() == 0) throw new InvalidParameterException("No values specified"); List<Condition> conditions = new Vector<Condition>(); for (Entry<String,V> entry : fieldsAndValues.entrySet()) { Condition cond = new Condition(); cond.field = entry.getKey(); cond.valueExpression = "?"; if (compareModes != null) cond.compareMode = compareModes.get(cond.field); conditions.add(cond); _params.add(entry.getValue()); } _nestedConditions.add(conditions); return this; } /** * Checks the number of groups which have been added via addGroupedConditions(). * @return The number of groups. */ public int countGroups() { return _nestedConditions.size(); } /** * Builds a WhereClause based on the conditions previously specified with addGroupedConditions(). * @param conn A SQL Connection for which the query will be formatted. * @return A WhereClause. * @throws SQLException */ public WhereClause<V> build(Connection conn) throws SQLException { String dnf = buildNormalForm(conn); String clause = ""; if (dnf.length() > 0) clause = String.format(" WHERE %s ", dnf); return new WhereClause<V>(clause, _params); } protected String buildNormalForm(Connection conn) throws SQLException { String outerJunction = _conjunctive ? " AND " : " OR "; String innerJunction = _conjunctive ? " OR " : " AND "; List<String> junctions = new LinkedList<String>(); for (List<Condition> conditions : _nestedConditions) { List<String> predicates = new LinkedList<String>(); for (Condition condition : conditions) predicates.add(condition.buildPredicate(conn)); junctions.add(String.format("(%s)", Strings.join(innerJunction, predicates))); } return Strings.join(outerJunction, junctions); } protected static class Condition { /** * Unquoted SQL field name */ public String field; /** * Fragment of a SQL query for a value (recommended to be "?" unless hard-coded and safe). */ public String valueExpression; /** * Specifies how the field and value should be compared */ public CompareMode compareMode = CompareMode.NORMAL; public Condition() { } /** * @param field Unquoted SQL field name * @param value Fragment of a SQL query for a value (recommended to be "?" unless hard-coded and safe). * @param compareMode Specifies how the field and value should be compared */ public Condition(String field, String value, CompareMode compareMode) { this.field = field; this.valueExpression = value; this.compareMode = compareMode; } public String buildPredicate(Connection conn) throws SQLException { // prevent null pointer error from switch if (compareMode == null) compareMode = CompareMode.NORMAL; String quotedField = quoteSymbol(conn, field); switch (compareMode) { case CASE_SENSITIVE: String compare = caseSensitiveCompare(conn, quotedField, valueExpression); return new StringBuilder().append('(').append(compare).append(')').toString(); case WILDCARD: return new StringBuilder().append('(') .append(quotedField).append(" LIKE ").append(valueExpression).append(getLikeEscapeClause(conn)) .append(')').toString(); default: return new StringBuilder().append('(').append(quotedField).append('=').append(valueExpression).append(')').toString(); } } } } }
package Integration; import java.util.List; import java.util.Map; import com.google.gson.Gson; import com.lob.api.ApiException; import com.lob.api.Configuration; import com.lob.api.client.PostcardsApi; import com.lob.api.client.TemplatesApi; import org.openapitools.client.model.AddressEditable; import org.openapitools.client.model.MailType; import org.openapitools.client.model.Postcard; import org.openapitools.client.model.PostcardDeletion; import org.openapitools.client.model.PostcardEditable; import org.openapitools.client.model.PostcardList; import org.openapitools.client.model.PostcardSize; import org.openapitools.client.model.SortBy5; import org.openapitools.client.model.Template; import org.openapitools.client.model.TemplateWritable; import org.testng.Assert; import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeGroups; import org.testng.annotations.Test; import Helper.TestFixtures; public class PostcardsApiSpecTest { private PostcardsApi validApi = new PostcardsApi(Configuration.getConfigForIntegration()); private TestFixtures testFixtures = new TestFixtures(); private PostcardEditable dummyPostcard = new PostcardEditable();; private List<AddressEditable> addressEditableList = testFixtures.get_ADDRESSES_EDITABLE(); // values for postcardsList call private String nextUrl = ""; private String previousUrl = ""; private Integer limit = null; private String before = null; private String after = null; private List<String> include = null; private Map<String, String> dateCreated = null; private Map<String, String> metadata = null; private PostcardSize size = null; private Boolean scheduled = null; private Map<String, String> sendDate = null; private MailType mailType = null; private SortBy5 sortBy = null; @BeforeClass public void before_class() { Gson gson = new Gson(); dummyPostcard.setTo(gson.toJson(addressEditableList.get(2))); dummyPostcard.setFrom(gson.toJson(addressEditableList.get(1))); dummyPostcard.setFront(testFixtures.get_FILE_LOCATION_4X6()); dummyPostcard.setBack(testFixtures.get_FILE_LOCATION_4X6()); } @Test( enabled=true, groups={"Integration", "Create", "Postcard", "Valid"} ) public void postcardCreateRetrieveDeleteTest() throws ApiException { Postcard postcard = validApi.create(dummyPostcard, null); Assert.assertNotNull(postcard.getId()); Assert.assertNotNull(postcard.getUrl()); Postcard retrievedPostcard = validApi.get(postcard.getId()); Assert.assertNotNull(retrievedPostcard); PostcardDeletion deletedPostcard = validApi.cancel(postcard.getId()); Assert.assertTrue(deletedPostcard.getDeleted()); } @Test( enabled=true, groups={"Integration", "Create", "Postcard", "Valid"} ) public void postcardCreateTestWithTemplateId() throws ApiException { Gson gson = new Gson(); TemplateWritable templateWrite = new TemplateWritable(); templateWrite.setDescription("Newer Template"); templateWrite.setHtml("<html>Updated HTML</html>"); TemplatesApi templatesApi = new TemplatesApi(Configuration.getConfigForIntegration()); Template createdTemplate = templatesApi.create(templateWrite); Assert.assertNotNull(createdTemplate.getId()); PostcardEditable postcardWithTemplateIds = new PostcardEditable(); postcardWithTemplateIds.setTo(gson.toJson(addressEditableList.get(2))); postcardWithTemplateIds.setFrom(gson.toJson(addressEditableList.get(1))); postcardWithTemplateIds.setFront(createdTemplate.getId()); postcardWithTemplateIds.setBack(createdTemplate.getId()); Postcard postcard = validApi.create(postcardWithTemplateIds, null); Assert.assertNotNull(postcard.getId()); Assert.assertEquals(postcard.getFrontTemplateId(), createdTemplate.getId()); Assert.assertEquals(postcard.getBackTemplateId(), createdTemplate.getId()); Assert.assertNotNull(postcard.getUrl()); // Clean up PostcardDeletion deletedPostcard = validApi.cancel(postcard.getId()); Assert.assertTrue(deletedPostcard.getDeleted()); templatesApi.delete(createdTemplate.getId()); } @Test( enabled=true, groups={"Integration", "List", "Postcard", "Valid"} ) public void postcardListTest() throws ApiException { PostcardList response = validApi.list(limit, before, after, include, dateCreated, metadata, size, scheduled, sendDate, mailType, sortBy); Assert.assertNotNull(response.getData()); List<Postcard> postcardList = response.getData(); Assert.assertTrue(postcardList.size() > 0); } }
package org.openspaces.core.space; import com.gigaspaces.internal.client.cache.localcache.LocalCacheContainer; import com.gigaspaces.internal.client.cache.localview.LocalViewContainer; import com.gigaspaces.internal.client.spaceproxy.ISpaceProxy; import com.j_spaces.core.IJSpace; import com.j_spaces.core.admin.IInternalRemoteJSpaceAdmin; import com.j_spaces.core.admin.IRemoteJSpaceAdmin; import com.j_spaces.core.client.SpaceURL; import net.jini.core.lookup.ServiceID; import org.openspaces.core.util.SpaceUtils; import org.openspaces.pu.service.PlainServiceDetails; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.rmi.RemoteException; /** * A Space service defined within a processing unit. * * @author kimchy */ public class SpaceServiceDetails extends PlainServiceDetails { public static final String SERVICE_TYPE = "space"; public static final class Attributes { public static final String SERVICEID = "service-id"; public static final String SPACENAME = "space-name"; public static final String SPACECONTAINERNAME = "space-container-name"; public static final String SPACETYPE = "space-type"; public static final String CLUSTERED = "clustered"; public static final String URL = "url"; public static final String SPACE_URL = "spaceUrl"; public static final String MIRROR = "mirror"; } private IJSpace space; private IJSpace directSpace; private IInternalRemoteJSpaceAdmin directSpaceAdmin; public SpaceServiceDetails() { } public SpaceServiceDetails(IJSpace space) { this(null, space); } public SpaceServiceDetails(String id, IJSpace space) { super(id, SERVICE_TYPE, null, null, null); this.space = space; getAttributes().put(Attributes.SERVICEID, new ServiceID(space.getReferentUuid().getMostSignificantBits(), space.getReferentUuid().getLeastSignificantBits())); SpaceURL spaceURL = space.getFinderURL(); serviceSubType = "embedded"; SpaceType spaceType = SpaceType.EMBEDDED; getAttributes().put(Attributes.MIRROR, false); if (space instanceof LocalViewContainer) { serviceSubType = "localview"; spaceType = SpaceType.LOCAL_VIEW; } else if (space instanceof LocalCacheContainer) { serviceSubType = "localcache"; spaceType = SpaceType.LOCAL_CACHE; } else if (SpaceUtils.isRemoteProtocol(space)) { serviceSubType = "remote"; spaceType = SpaceType.REMOTE; } else { // embedded try { if (((IRemoteJSpaceAdmin) space.getAdmin()).getConfig().isMirrorServiceEnabled()) { getAttributes().put(Attributes.MIRROR, true); } } catch (RemoteException e) { getAttributes().put(Attributes.MIRROR, false); } try { directSpace = ((ISpaceProxy) space).getClusterMember(); directSpaceAdmin = (IInternalRemoteJSpaceAdmin) directSpace.getAdmin(); } catch (Exception e) { // no direct space??? } } getAttributes().put(Attributes.SPACETYPE, spaceType); getAttributes().put(Attributes.SPACENAME, spaceURL.getSpaceName()); getAttributes().put(Attributes.SPACECONTAINERNAME, spaceURL.getContainerName()); getAttributes().put(Attributes.CLUSTERED, ((ISpaceProxy) space).isClustered()); description = spaceURL.getSpaceName(); longDescription = spaceURL.getContainerName() + ":" + spaceURL.getSpaceName(); getAttributes().put(Attributes.URL, space.getFinderURL().toString()); getAttributes().put(Attributes.SPACE_URL, space.getFinderURL()); if (id == null) { this.id = serviceSubType + ":" + spaceURL.getSpaceName(); } } public String getName() { return (String) getAttributes().get(Attributes.SPACENAME); } public String getContainerName() { return (String) getAttributes().get(Attributes.SPACECONTAINERNAME); } public ServiceID getServiceID() { return (ServiceID) getAttributes().get(Attributes.SERVICEID); } public SpaceType getSpaceType() { return (SpaceType) getAttributes().get(Attributes.SPACETYPE); } public boolean isMirror() { return (Boolean) getAttributes().get(Attributes.MIRROR); } public boolean isClustered() { return (Boolean) getAttributes().get(Attributes.CLUSTERED); } public String getUrl() { return (String) getAttributes().get(Attributes.URL); } public SpaceURL getSpaceUrl() { return (SpaceURL) getAttributes().get(Attributes.SPACE_URL); } @Override public void writeExternal(ObjectOutput out) throws IOException { super.writeExternal(out); } @Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { super.readExternal(in); } }
package com.tinkerpop.rexster.protocol; import com.tinkerpop.pipes.util.iterators.SingleIterator; import com.tinkerpop.rexster.Tokens; import com.tinkerpop.rexster.protocol.message.ConsoleScriptResponseMessage; import com.tinkerpop.rexster.protocol.message.ErrorResponseMessage; import com.tinkerpop.rexster.protocol.message.RexProMessage; import com.tinkerpop.rexster.protocol.message.ScriptRequestMessage; import jline.ConsoleReader; import jline.History; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.io.PrintStream; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; public class RexsterConsole { private RemoteRexsterSession session = null; private String host; private String language; private String username = ""; private String password = ""; private int port; private int timeout; private List<String> currentBindings = new ArrayList<String>(); private final PrintStream output = System.out; private static final String REXSTER_HISTORY = ".rexster_history"; public RexsterConsole(String host, int port, String language, int timeout, String username, String password) throws Exception { this.output.println(" (l_(l"); this.output.println("(_______( 0 0"); this.output.println("( (-Y-) <woof>"); this.output.println("l l this.output.println("l l,, l l,,"); this.host = host; this.port = port; this.language = language; this.timeout = timeout; this.username = username; this.password = password; this.output.println("opening session [" + this.host + ":" + this.port + "]"); this.session = new RemoteRexsterSession(this.host, this.port, this.timeout, this.username, this.password); this.session.open(); if (this.session.isOpen()) { this.output.println("?h for help"); this.primaryLoop(); } else { this.output.println("could not connect to the Rexster server"); } } public RexsterConsole(String host, int port, String language, int timeout, String script, String username, String password) throws Exception { this.host = host; this.port = port; this.language = language; this.timeout = timeout; this.username = username; this.password = password; this.session = new RemoteRexsterSession(this.host, this.port, this.timeout, this.username, this.password); this.session.open(); if (!this.session.isOpen()) { this.output.println("could not connect to the Rexster server"); } else { this.executeScript(script, false); } } public void primaryLoop() throws Exception { final ConsoleReader reader = new ConsoleReader(); reader.setBellEnabled(false); reader.setUseHistory(true); try { History history = new History(); history.setHistoryFile(new File(REXSTER_HISTORY)); reader.setHistory(history); } catch (IOException e) { System.err.println("Could not find history file"); } String line = ""; this.output.println(); while (line != null) { try { line = ""; boolean submit = false; boolean newline = false; while (!submit) { if (newline) line = line + "\n" + reader.readLine(RexsterConsole.makeSpace(this.getPrompt().length())); else line = line + "\n" + reader.readLine(this.getPrompt()); if (line.endsWith(" .")) { newline = true; line = line.substring(0, line.length() - 2); } else { line = line.trim(); submit = true; } } if (line.isEmpty()) continue; if (line.equals(Tokens.REXSTER_CONSOLE_QUIT)) { this.output.print("closing session with Rexster [" + this.host + ":" + this.port + "]"); if (this.session != null) { this.session.close(); this.session = null; } this.output.println("--> done"); return; } else if (line.equals(Tokens.REXSTER_CONSOLE_HELP)) { this.printHelp(); } else if (line.equals(Tokens.REXSTER_CONSOLE_BINDINGS)) { this.printBindings(); } else if (line.equals(Tokens.REXSTER_CONSOLE_RESET)) { this.output.print("resetting session with Rexster [" + this.host + ":" + this.port + "]"); if (this.session != null) { this.session.reset(); } else { this.session = new RemoteRexsterSession(this.host, this.port, this.timeout, this.username, this.password); } this.output.println("--> done"); } else if (line.startsWith(Tokens.REXSTER_CONSOLE_EXECUTE)) { String fileToExecute = line.substring(Tokens.REXSTER_CONSOLE_EXECUTE.length()).trim(); if (fileToExecute == null || fileToExecute.isEmpty()) { this.output.print("specify the file to execute"); } else { try { this.executeScript(readFile(fileToExecute)); } catch (IOException ioe) { this.output.println("could not read the file specified"); } } } else if (line.equals(Tokens.REXSTER_CONSOLE_LANGUAGES)) { this.printAvailableLanguages(); } else if (line.startsWith(Tokens.REXSTER_CONSOLE_LANGUAGE)) { String langToChangeTo = line.substring(1); if (langToChangeTo == null || langToChangeTo.isEmpty()) { this.output.println("specify a language on Rexster ?<language-name>"); this.printAvailableLanguages(); } else if (this.session.isAvailableLanguage(langToChangeTo)) { this.language = langToChangeTo; } else { this.output.println("not a valid language on Rexster: [" + langToChangeTo + "]."); this.printAvailableLanguages(); } } else { executeScript(line); } } catch (Exception e) { this.output.println("Evaluation error: " + e.getMessage()); } } } private void executeScript(String line) { executeScript(line, true); } private void executeScript(String line, boolean showPrefix) { ResultAndBindings result = eval(line, this.language, this.session); Iterator itty; if (result.getResult() instanceof Iterator) { itty = (Iterator) result.getResult(); } else if (result.getResult() instanceof Iterable) { itty = ((Iterable) result.getResult()).iterator(); } else if (result.getResult() instanceof Map) { itty = ((Map) result.getResult()).entrySet().iterator(); } else { itty = new SingleIterator<Object>(result.getResult()); } while (itty.hasNext()) { if (showPrefix) { this.output.println("==>" + itty.next()); } else { this.output.println(itty.next()); } } this.currentBindings = result.getBindings(); } private void printAvailableLanguages() { this.output.println("-= Available Languages =-"); Iterator<String> languages = this.session.getAvailableLanguages(); while (languages.hasNext()) { this.output.println("?" + languages.next()); } } public void printHelp() { this.output.println("-= Console Specific =-"); this.output.println("?<language-name>: jump to engine"); this.output.println(Tokens.REXSTER_CONSOLE_LANGUAGES + ": list of available languages on Rexster"); this.output.println(Tokens.REXSTER_CONSOLE_RESET + ": reset the rexster session"); this.output.println(Tokens.REXSTER_CONSOLE_EXECUTE + " <file-name>: execute a script file"); this.output.println(Tokens.REXSTER_CONSOLE_QUIT + ": quit"); this.output.println(Tokens.REXSTER_CONSOLE_HELP + ": displays this message"); this.output.println(""); this.output.println("-= Rexster Context =-"); this.output.println("rexster.getGraph(graphName) - gets a Graph instance"); this.output.println(" :graphName - [String] - the name of a graph configured within Rexster"); this.output.println("rexster.getGraphNames() - gets the set of graph names configured within Rexster"); this.output.println("rexster.getVersion() - gets the version of Rexster server"); this.output.println(""); } public void printBindings() { for (String binding : this.currentBindings) { this.output.println("==>" + binding); } } public String getPrompt() { return "rexster[" + this.language + "]> "; } public static String makeSpace(int number) { String space = new String(); for (int i = 0; i < number; i++) { space = space + " "; } return space; } private static ResultAndBindings eval(String script, String scriptEngineName, RemoteRexsterSession session) { ResultAndBindings returnValue = null; try { session.open(); // pass in some dummy rexster bindings...not really fully working quite right for scriptengine usage final RexProMessage scriptMessage = new ScriptRequestMessage( session.getSessionKey(), scriptEngineName, new RexsterBindings(), script); final RexProMessage resultMessage = session.sendRequest(scriptMessage, 3, 500); ArrayList<String> lines = new ArrayList<String>(); List<String> bindings = new ArrayList<String>(); try { ConsoleScriptResponseMessage responseMessage = new ConsoleScriptResponseMessage(resultMessage); bindings = responseMessage.getBindings(); ByteBuffer bb = ByteBuffer.wrap(responseMessage.getBody()); // navigate to the start of the results...bindings are attached if there is no error present int lengthOfBindings = bb.getInt(); bb.position(lengthOfBindings + 4); // multiple objects (those from an iterator) are returned as multiple lines each with their // own length counter. while (bb.hasRemaining()) { int segmentLength = bb.getInt(); byte[] resultObjectBytes = new byte[segmentLength]; bb.get(resultObjectBytes); lines.add(new String(resultObjectBytes)); } } catch (IllegalArgumentException iae) { ErrorResponseMessage errorMessage = new ErrorResponseMessage(resultMessage); lines.add(errorMessage.getErrorMessage()); } Object result = lines.iterator(); if (lines.size() == 1) { result = lines.get(0); } returnValue = new ResultAndBindings(result, bindings); } catch (Exception e) { System.out.println("The session with Rexster Server may have been lost. Please try again or refresh your session with ?r"); } finally { } return returnValue; } private static String readFile(String file) throws IOException { BufferedReader reader = new BufferedReader(new FileReader(file)); String line = null; StringBuilder stringBuilder = new StringBuilder(); String ls = System.getProperty("line.separator"); while ((line = reader.readLine()) != null) { stringBuilder.append(line); stringBuilder.append(ls); } return stringBuilder.toString(); } @SuppressWarnings("static-access") private static Options getCliOptions() { Option help = new Option("h", "help", false, "print this message"); Option hostName = OptionBuilder.withArgName("host-name") .hasArg() .withDescription("the rexster server to connect to") .withLongOpt("rexsterhost") .create("rh"); Option port = OptionBuilder.withArgName("port") .hasArg() .withDescription("the port of the rexster server that is serving rexpro") .withLongOpt("rexsterport") .create("rp"); Option language = OptionBuilder.withArgName("language") .hasArg() .withDescription("the script engine language to use by default") .withLongOpt("language") .create("l"); Option timeout = OptionBuilder.withArgName("seconds") .hasArg() .withDescription("time allowed when waiting for results from server (default 100 seconds)") .withLongOpt("timeout") .create("t"); Option scriptFile = OptionBuilder.withArgName("file") .hasArg() .withDescription("script to execute remotely") .withLongOpt("execute") .create("e"); Option username = OptionBuilder.withArgName("username") .hasArg() .withDescription("username for authentication (if needed)") .withLongOpt("user") .create("u"); Option password = OptionBuilder.withArgName("password") .hasArg() .withDescription("password for authentication (if needed)") .withLongOpt("pass") .create("p"); Options options = new Options(); options.addOption(help); options.addOption(hostName); options.addOption(port); options.addOption(language); options.addOption(timeout); options.addOption(scriptFile); options.addOption(username); options.addOption(password); return options; } private static CommandLine getCliInput(final String[] args) throws Exception { Options options = getCliOptions(); CommandLineParser parser = new GnuParser(); CommandLine line; try { line = parser.parse(options, args); } catch (ParseException exp) { throw new Exception("Parsing failed. Reason: " + exp.getMessage()); } if (line.hasOption("help")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("rexster console", options); System.exit(0); } return line; } public static void main(String[] args) throws Exception { CommandLine line = getCliInput(args); String host = "localhost"; int port = 8184; String language = "groovy"; int timeout = RexPro.DEFAULT_TIMEOUT_SECONDS; String username = ""; String password = ""; if (line.hasOption("rexsterhost")) { host = line.getOptionValue("rexsterhost"); } if (line.hasOption("rexsterport")) { String portString = line.getOptionValue("rexsterport"); try { port = Integer.parseInt(portString); } catch (NumberFormatException nfe) { System.out.println("the rexsterport parameter must be an integer value. Defaulting to: [" + port + "]"); } } if (line.hasOption("language")) { language = line.getOptionValue("language"); } if (line.hasOption("timeout")) { String timeoutString = line.getOptionValue("timeout"); try { port = Integer.parseInt(timeoutString); } catch (NumberFormatException nfe) { System.out.println("the timeout parameter must be an integer value. Defaulting to: " + timeout); } } if (line.hasOption("user")) { username = line.getOptionValue("user"); } if (line.hasOption("pass")) { password = line.getOptionValue("pass"); } String fileToExecute = null; if (line.hasOption("execute")) { fileToExecute = line.getOptionValue("execute"); try { new RexsterConsole(host, port, language, timeout, readFile(fileToExecute), username, password); } catch (IOException ioe) { System.out.println("could not read the file specified"); } } else { new RexsterConsole(host, port, language, timeout, username, password); } } }
package net.sf.taverna.t2.workbench.run; import java.awt.BorderLayout; import java.awt.Color; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.ByteArrayInputStream; import java.io.IOException; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import javax.swing.DefaultListModel; import javax.swing.ImageIcon; import javax.swing.JButton; import javax.swing.JComponent; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JSplitPane; import javax.swing.ListSelectionModel; import javax.swing.border.EmptyBorder; import javax.swing.border.LineBorder; import javax.swing.event.ListSelectionEvent; import javax.swing.event.ListSelectionListener; import net.sf.taverna.platform.spring.RavenAwareClassPathXmlApplicationContext; import net.sf.taverna.t2.facade.WorkflowInstanceFacade; import net.sf.taverna.t2.provenance.api.ProvenanceAccess; import net.sf.taverna.t2.provenance.lineageservice.utils.WorkflowInstance; import net.sf.taverna.t2.reference.ReferenceService; import net.sf.taverna.t2.reference.T2Reference; import net.sf.taverna.t2.reference.impl.WriteQueueAspect; import net.sf.taverna.t2.workbench.reference.config.DataManagementConfiguration; import net.sf.taverna.t2.workbench.ui.zaria.UIComponentSPI; import net.sf.taverna.t2.workflowmodel.Dataflow; import net.sf.taverna.t2.workflowmodel.EditException; import net.sf.taverna.t2.workflowmodel.serialization.DeserializationException; import net.sf.taverna.t2.workflowmodel.serialization.xml.XMLDeserializerRegistry; import org.apache.log4j.Logger; import org.jdom.Document; import org.jdom.Element; import org.jdom.JDOMException; import org.jdom.input.SAXBuilder; import org.springframework.beans.factory.NoSuchBeanDefinitionException; import org.springframework.context.ApplicationContext; public class DataflowRunsComponent extends JSplitPane implements UIComponentSPI { private static final long serialVersionUID = 1L; private static Logger logger = Logger .getLogger(DataflowRunsComponent.class); private static DataflowRunsComponent singletonInstance; private ReferenceService referenceService; private ReferenceService referenceServiceWithDatabase; // for previous runs, we always need the one using database private String referenceContext; private DefaultListModel runListModel; private JList runList; private JButton removeWorkflowRunsButton; private JSplitPane topPanel; // Queue for previous workflow runs to be deleted // The reason for not using LinkedBlockingQueue here is that we need to do a peek first and then remove // in the delete run thread, rather than taking the first element of the queue since // shutdown hook checks if the queue is empty and then pops up a dialog it is taking a while (and if we do // a remove immediately the hook would not detect that there is deletion going on if there was only one element // in the queue). Peek in LinkedBlockingQueue does not wait so we would have to sync anyway so there is not // advantage in using it over LinkedList. private static final LinkedList<DataflowRun> runsToBeDeletedQueue = new LinkedList<DataflowRun>(); private DataflowRunsComponent() { super(JSplitPane.VERTICAL_SPLIT); setDividerLocation(400); topPanel = new JSplitPane(JSplitPane.HORIZONTAL_SPLIT); topPanel.setDividerLocation(240); topPanel.setBorder(null); setTopComponent(topPanel); runListModel = new DefaultListModel(); runList = new JList(runListModel); runList.setBorder(new EmptyBorder(5, 5, 5, 5)); runList .setSelectionMode(ListSelectionModel.MULTIPLE_INTERVAL_SELECTION); // runList.setSelectedIndex(0); runList.addListSelectionListener(new ListSelectionListener() { public void valueChanged(ListSelectionEvent e) { if (!e.getValueIsAdjusting()) { Object selection = runList.getSelectedValue(); int location = getDividerLocation(); if (selection == null) { // there is no workflow items in // the list JPanel tempMonitorPanel = new JPanel(new BorderLayout()); tempMonitorPanel.setBorder(LineBorder .createGrayLineBorder()); tempMonitorPanel.setBackground(Color.WHITE); tempMonitorPanel.add(new JLabel( "No workflows runs available", JLabel.CENTER), BorderLayout.CENTER); topPanel.setBottomComponent(tempMonitorPanel); JPanel tempResultsPanel = new JPanel(new BorderLayout()); tempResultsPanel.setBackground(Color.WHITE); tempResultsPanel.add(new JLabel("Results"), BorderLayout.NORTH); tempResultsPanel.add(new JLabel("No results available", JLabel.CENTER), BorderLayout.CENTER); setBottomComponent(tempResultsPanel); removeWorkflowRunsButton.setEnabled(false); setDividerLocation(location); revalidate(); } else if (selection instanceof DataflowRun) { DataflowRun dataflowRun = (DataflowRun) selection; topPanel.setBottomComponent(dataflowRun .getMonitorViewComponent()); setBottomComponent(dataflowRun.getResultsComponent()); setDividerLocation(location); removeWorkflowRunsButton.setEnabled(true); revalidate(); } } } }); JPanel runListPanel = new JPanel(new BorderLayout()); runListPanel.setBorder(LineBorder.createGrayLineBorder()); JLabel worklflowRunsLabel = new JLabel("Workflow Runs"); worklflowRunsLabel.setBorder(new EmptyBorder(5, 5, 5, 5)); worklflowRunsLabel.setAlignmentX(JComponent.LEFT_ALIGNMENT); // button to remove previous workflow runs removeWorkflowRunsButton = new JButton("Remove"); removeWorkflowRunsButton.setAlignmentX(JComponent.RIGHT_ALIGNMENT); removeWorkflowRunsButton.setEnabled(false); removeWorkflowRunsButton.setToolTipText("Remove workflow run(s)"); removeWorkflowRunsButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { // Warn user that removing workflow run will // cause all provenance data for that run to be deleted int option = JOptionPane .showConfirmDialog( null, new JLabel("<html><body>Are you sure you want to delete the selected workflow run(s)?<br>" + "Deleting them will remove all provenance data related to the run(s).</body></html>"), "Confirm workflow run deletion", JOptionPane.OK_CANCEL_OPTION); if (option == JOptionPane.CANCEL_OPTION){ return; } int[] selected = runList.getSelectedIndices(); for (int i = selected.length - 1; i >= 0; i final DataflowRun dataflowRunToBeDeleted = (DataflowRun) runListModel .remove(selected[i]); dataflowRunToBeDeleted.getMonitorViewComponent().onDispose(); // Add this workflow run to the queue to be deleted synchronized (runsToBeDeletedQueue) { runsToBeDeletedQueue.add(dataflowRunToBeDeleted); } } if (selected.length >0){ synchronized (runsToBeDeletedQueue) { runsToBeDeletedQueue.notify(); } } // Set the first item as selected - if there is one if (runListModel.size() > 0) { runList.setSelectedIndex(0); } System.gc(); } }); JPanel runListTopPanel = new JPanel(); runListTopPanel.setLayout(new BorderLayout()); runListTopPanel.add(worklflowRunsLabel, BorderLayout.WEST); runListTopPanel.add(removeWorkflowRunsButton, BorderLayout.EAST); JPanel runListWithHintTopPanel = new JPanel(); runListWithHintTopPanel.setLayout(new BorderLayout()); runListWithHintTopPanel.add(runListTopPanel, BorderLayout.NORTH); JPanel hintsPanel = new JPanel(); hintsPanel.setLayout(new BorderLayout()); hintsPanel.add(new JLabel("Click on a run to see its results"), BorderLayout.NORTH); if (DataManagementConfiguration.getInstance().isProvenanceEnabled()) { hintsPanel.add(new JLabel("Click on a service in the diagram"), BorderLayout.CENTER); } else { hintsPanel.add(new JLabel("Enable provenance under preferences"), BorderLayout.CENTER); } hintsPanel.add(new JLabel("to see intermediate results"), BorderLayout.SOUTH); runListWithHintTopPanel.add(hintsPanel, BorderLayout.SOUTH); runListPanel.add(runListWithHintTopPanel, BorderLayout.NORTH); JScrollPane scrollPane = new JScrollPane(runList); scrollPane.setBorder(null); runListPanel.add(scrollPane, BorderLayout.CENTER); topPanel.setTopComponent(runListPanel); JPanel tempMonitorPanel = new JPanel(new BorderLayout()); tempMonitorPanel.setBorder(LineBorder.createGrayLineBorder()); tempMonitorPanel.setBackground(Color.WHITE); tempMonitorPanel.add(new JLabel("No workflows have been run yet", JLabel.CENTER), BorderLayout.CENTER); topPanel.setBottomComponent(tempMonitorPanel); JPanel tempResultsPanel = new JPanel(new BorderLayout()); tempResultsPanel.setBackground(Color.WHITE); tempResultsPanel.add(new JLabel("Results"), BorderLayout.NORTH); tempResultsPanel.add(new JLabel("No results yet", JLabel.CENTER), BorderLayout.CENTER); setBottomComponent(tempResultsPanel); // revalidate(); // setDividerLocation(.3); Thread thread = new Thread("Retrieve Previous Runs") { @Override public void run() { // force reference service to be constructed now rather than at first // workflow run getReferenceService(); getReferenceServiceWithDatabase(); // get the Reference Service with database for previous runs retrievePreviousRuns(); } }; thread.start(); // Start listening for requests for previous workflow runs to be deleted Thread deleteWorkflowRunsThread = new DeleteWorkflowRunsThread(); deleteWorkflowRunsThread.start(); } @SuppressWarnings("unchecked") public ArrayList<DataflowRun> getPreviousWFRuns(){ return (ArrayList<DataflowRun>) Collections.list(runListModel.elements()); } private void retrievePreviousRuns() { String connectorType = DataManagementConfiguration.getInstance() .getConnectorType(); ProvenanceAccess provenanceAccess = new ProvenanceAccess(connectorType); List<WorkflowInstance> allWorkflowIDs = provenanceAccess .getAllWorkflowIDs(); Collections.reverse(allWorkflowIDs); /*ArrayList<String> topLevelWorkflowRunIds = new ArrayList<String>(); for (WorkflowInstance workflowInstance : allWorkflowIDs){ // get only top level workflow runs, not the nested wf runs topLevelWorkflowRunIds.add(workflowInstance.getWorkflowIdentifier()); } removeDuplicate(topLevelWorkflowRunIds);*/ for (WorkflowInstance workflowInstance : allWorkflowIDs) { //if (topLevelWorkflowRunIds.contains(workflowInstance.getInstanceID())){ logger.info("retrieved previous run, workflow id: " + workflowInstance.getInstanceID() + " date: " + workflowInstance.getTimestamp()); Timestamp time = Timestamp.valueOf(workflowInstance.getTimestamp()); Date date = new Date(time.getTime()); try { SAXBuilder builder = new SAXBuilder(); Document document = builder.build(new ByteArrayInputStream(workflowInstance.getDataflowBlob())); Element rootElement = document.getRootElement(); Dataflow dataflow = XMLDeserializerRegistry.getInstance() .getDeserializer().deserializeDataflow(rootElement); DataflowRun runComponent = new DataflowRun(dataflow, date, workflowInstance.getInstanceID(), referenceServiceWithDatabase); runComponent.setDataSavedInDatabase(true); runComponent.setProvenanceEnabledForRun(true); runListModel.add(runListModel.getSize(), runComponent); } catch (JDOMException e) { logger.error("Problem with previous run: " + workflowInstance.getInstanceID() + " " + e); } catch (IOException e) { logger.error("Problem with previous run: " + workflowInstance.getInstanceID() + " " + e); } catch (DeserializationException e) { logger.error("Problem with previous run: " + workflowInstance.getInstanceID() + " " + e); } catch (EditException e) { logger.error("Problem with previous run: " + workflowInstance.getInstanceID() + " " + e); } } } private static void removeDuplicate(ArrayList arlList) { // List order not maintained HashSet h = new HashSet(arlList); arlList.clear(); arlList.addAll(h); } public static DataflowRunsComponent getInstance() { if (singletonInstance == null) { singletonInstance = new DataflowRunsComponent(); } return singletonInstance; } public long getRunListCount() { return runListModel.size(); } public ReferenceService getReferenceService() { String context = DataManagementConfiguration.getInstance() .getDatabaseContext(); if (!context.equals(referenceContext)) { referenceContext = context; ApplicationContext appContext = new RavenAwareClassPathXmlApplicationContext( context); referenceService = (ReferenceService) appContext .getBean("t2reference.service.referenceService"); try { WriteQueueAspect cache = (WriteQueueAspect) appContext .getBean("t2reference.cache.cacheAspect"); ReferenceServiceShutdownHook.setReferenceServiceCache(cache); } catch (NoSuchBeanDefinitionException e) { // ReferenceServiceShutdown.setReferenceServiceCache(null); } catch (ClassCastException e) { // ReferenceServiceShutdown.setReferenceServiceCache(null); } } return referenceService; } private ReferenceService getReferenceServiceWithDatabase() { // Force creation of a Ref. Service that uses database regardless of what current context is // This Ref. Service will be used for previous wf runs to get intermediate results even if // current Ref. Manager uses in-memory store. If current Ref. Manager if (referenceServiceWithDatabase == null){ String databasecontext = DataManagementConfiguration.HIBERNATE_CONTEXT; ApplicationContext appContext = new RavenAwareClassPathXmlApplicationContext( databasecontext); referenceServiceWithDatabase = (ReferenceService) appContext .getBean("t2reference.service.referenceService"); } return referenceServiceWithDatabase; } public void runDataflow(WorkflowInstanceFacade facade, Map<String, T2Reference> inputs) { DataflowRun runComponent = new DataflowRun(facade, inputs, new Date(), referenceService); runComponent.setProvenanceEnabledForRun(DataManagementConfiguration.getInstance().isProvenanceEnabled()); runComponent.setDataSavedInDatabase(DataManagementConfiguration .getInstance().getProperty( DataManagementConfiguration.IN_MEMORY) .equalsIgnoreCase("false")); runListModel.add(0, runComponent); runList.setSelectedIndex(0); runComponent.run(); } public ImageIcon getIcon() { // TODO Auto-generated method stub return null; } public void onDisplay() { // TODO Auto-generated method stub } public void onDispose() { // TODO Auto-generated method stub } public static LinkedList<DataflowRun> getRunsToBeDeletedQueue() { return runsToBeDeletedQueue; } /** * Thread that deletes provenance for previous workflow runs * placed in a special queue. */ private class DeleteWorkflowRunsThread extends Thread { public DeleteWorkflowRunsThread() { super("Deleting old workflow runs"); setDaemon(true); } public void run() { try { DataflowRun runToDelete = null; while (true) { synchronized (runsToBeDeletedQueue) { // Wait until an element is placed in the queue while (runsToBeDeletedQueue.isEmpty()) { runsToBeDeletedQueue.wait(); } } // Retrieve the first element from the queue (but do not // remove it) runToDelete = runsToBeDeletedQueue.peek(); // Remove provenance data for the run (if any) and all // references held by the workflow run from the Reference Manager's store try { logger.info("Starting deletion of workflow run '" + runToDelete.toString() + "' (run id " + runToDelete.getRunId() + ")."); if (runToDelete.isProvenanceEnabledForRun()){ String connectorType = DataManagementConfiguration .getInstance().getConnectorType(); ProvenanceAccess provenanceAccess = new ProvenanceAccess( connectorType); // Remove the run from provenance database (if it is stored there at all) provenanceAccess.removeRun(runToDelete.getRunId()); } // Remove references from the Reference Manager's store (regardless if in-memory or database) runToDelete.getReferenceService().deleteReferencesForWorkflowRun(runToDelete.getRunId()); logger.info("Deletion of workflow run '" + runToDelete.toString() + "' (run id " + runToDelete.getRunId() + ") from provenance database and Reference Manager's store completed."); } catch (Exception ex) { logger.error("Failed to delete workflow run '" + runToDelete.toString() + "' (run id " + runToDelete.getRunId() + ") from provenance database and Reference Manager's store.", ex); } finally { synchronized (runsToBeDeletedQueue) { // Remove the run we have just deleted runsToBeDeletedQueue.removeFirst(); } } } } catch (InterruptedException ignored) { } } } }
package net.meisen.dissertation.model.handler; import java.io.ByteArrayInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Properties; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.locks.ReentrantReadWriteLock; import net.meisen.dissertation.config.xslt.DefaultValues; import net.meisen.dissertation.exceptions.TidaModelException; import net.meisen.dissertation.exceptions.TidaModelHandlerException; import net.meisen.dissertation.impl.persistence.ZipPersistor; import net.meisen.dissertation.model.data.TidaModel; import net.meisen.dissertation.model.persistence.ILocation; import net.meisen.dissertation.model.persistence.Identifier; import net.meisen.dissertation.model.persistence.MetaData; import net.meisen.general.genmisc.exceptions.ForwardedRuntimeException; import net.meisen.general.genmisc.exceptions.registry.IExceptionRegistry; import net.meisen.general.genmisc.resources.IByteBufferReader; import net.meisen.general.genmisc.resources.Xml; import net.meisen.general.genmisc.types.Files; import net.meisen.general.genmisc.types.Streams; import net.meisen.general.sbconfigurator.api.IConfiguration; import net.meisen.general.sbconfigurator.api.IModuleHolder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.BeanCreationException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.w3c.dom.Document; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; /** * A {@code TidaModelHandler} is an instance used to load {@code TidaModel} * instances. * * @author pmeisen * */ @SuppressWarnings({"ConstantConditions", "ResultOfMethodCallIgnored"}) public class TidaModelHandler { private final static String AUTOLOAD_FILENAME = "handler.data"; private final static String MODEL_FILENAME = "model.xml"; private final static Logger LOG = LoggerFactory .getLogger(TidaModelHandler.class); private String defaultLocation; private ReentrantReadWriteLock autoloadLock = new ReentrantReadWriteLock(); /** * Class to keep track of changes on an xml document. * * @author pmeisen * */ protected class ManipulatedXml { private byte[] manipulatedXml; private Map<String, String> oldValues = new HashMap<>(); /** * Adds the old value of a changed value. * * @param name * the name of the changed value * @param oldValue * the old value */ public void addValue(final String name, final String oldValue) { oldValues.put(name, oldValue); } /** * Gets the old value of the specified {@code name}. * * @param name * the name to get the old value for * * @return the old value or {@code null} if no old value was set or * known */ public String getOldValue(final String name) { return oldValues.get(name); } /** * Gets the manipulated xml. * * @return the manipulated xml */ public byte[] getXml() { return manipulatedXml; } /** * Sets the manipulated xml. * * @param manipulatedXml * the manipulated xml */ public void setManipulatedXml(final byte[] manipulatedXml) { this.manipulatedXml = manipulatedXml; } } /** * The {@code ExceptionRegistry} used to handle exceptions. */ @Autowired @Qualifier(DefaultValues.EXCEPTIONREGISTRY_ID) protected IExceptionRegistry exceptionRegistry; /** * The loaded configuration used to load the {@code TidaModel} instances * using {@link IConfiguration#loadDelayed(String, InputStream)}. */ @Autowired() @Qualifier("coreConfiguration") protected IConfiguration configuration; private Map<String, IModuleHolder> moduleHolders = new ConcurrentHashMap<>(); private Map<String, byte[]> configurations = new ConcurrentHashMap<>(); /** * Initializes the handler. */ public void init() { this.autoloadModels(); } /** * The {@code TidaModel} instances held by {@code this}. * * @return a set of loaded {@code TidaModel} instances */ public Set<String> getTidaModels() { return Collections.unmodifiableSet(moduleHolders.keySet()); } /** * Gets all the identifiers loaded automatically. * * @return the identifiers loaded automatically */ public Set<String> getAutoloadedTidaModels() { autoloadLock.readLock().lock(); try { final Set<String> modelIds = _readAutoloads(); return modelIds; } finally { autoloadLock.readLock().unlock(); } } /** * Gets the models available by the handler. * * @return the available (i.e. also not loaded models) */ public Set<String> getAvailableTidaModels() { final Set<String> set = new HashSet<>(); final File defLoc = new File(getDefaultLocation()); final List<File> modelDirs = Files.getCurrentSubDirectories(defLoc); for (final File modelDir : modelDirs) { final File modelFile = new File(modelDir, MODEL_FILENAME); if (Files.checkFile(modelFile, false) != null) { set.add(modelDir.getName()); } } return set; } /** * Gets the {@code TidaModel} loaded by the {@code ModuleHolder} with the * specified id. If no {@code ModuleHolder} with the specified id is loaded, * {@code null} is returned. * * @param id * the if to moduleHolder to get the {@code TidaModel} for * * @return the {@code TidaModel} or {@code null} if the id is unknown */ public TidaModel getTidaModel(final String id) { final IModuleHolder holder = moduleHolders.get(id); if (holder == null) { return null; } else { return holder.getModule(DefaultValues.TIDAMODEL_ID); } } /** * Helper method to load the modules defined by the specified {@code is}. * The loaded {@code ModuleHolder} is kept internally within a {@code Map}. * * @param is * the {@code InputStream} to load the modules from * @param force * {@code true} if the model should be loaded even if this might * lead to an exception, because another or the same model with * the same identifier exists, otherwise {@code false} * * @return the loaded {@code ModuleHolder} * * @throws RuntimeException * if the specified resource is invalid or falsely defined */ protected IModuleHolder getModuleHolder(final InputStream is, final boolean force) { if (is == null) { exceptionRegistry.throwRuntimeException( TidaModelHandlerException.class, 1000); } // keep the complete configuration in memory for saving purposes final byte[] config; try { config = Streams.copyStreamToByteArray(is); } catch (final IOException e) { exceptionRegistry.throwRuntimeException( TidaModelHandlerException.class, 1002); return null; } // load the configuration final InputStream bais = new ByteArrayInputStream(config); IModuleHolder moduleHolder = null; try { moduleHolder = configuration.loadDelayed("tidaXsltModelLoader", bais); } catch (final BeanCreationException e) { final Throwable cause = e.getMostSpecificCause(); if (cause != null && cause instanceof ForwardedRuntimeException) { exceptionRegistry .throwRuntimeException((ForwardedRuntimeException) cause); } else { throw e; } } catch (final ForwardedRuntimeException e) { exceptionRegistry.throwRuntimeException(e); } // close the streams silently Streams.closeIO(bais); Streams.closeIO(is); final TidaModel model = moduleHolder .getModule(DefaultValues.TIDAMODEL_ID); final String id = model.getId(); if (moduleHolders.get(id) == null) { moduleHolders.put(id, moduleHolder); configurations.put(id, config); if (LOG.isInfoEnabled()) { LOG.info("Loaded ModuleHolder '" + id + "'."); } } else if (!force) { return moduleHolders.get(id); } else { exceptionRegistry.throwRuntimeException( TidaModelHandlerException.class, 1001, id); } return moduleHolder; } /** * Unloads all the loaded {@code TidaModel} instances. */ public synchronized void unloadAll() { for (final IModuleHolder moduleHolder : moduleHolders.values()) { final TidaModel model = moduleHolder .getModule(DefaultValues.TIDAMODEL_ID); model.release(); moduleHolder.release(); } if (LOG.isInfoEnabled()) { LOG.info("Unloaded all ModuleHolder " + moduleHolders.keySet() + "."); } configurations.clear(); moduleHolders.clear(); } /** * Unload the specified {@code TidaModel} instance and all the related * stuff. * * @param id * the identifier of the {@code TidaModel} to be unloaded, the * identifier is specified when loading the {@code TidaModel} */ public synchronized void unload(final String id) { final IModuleHolder moduleHolder = moduleHolders.get(id); if (moduleHolder != null) { moduleHolders.remove(id); configurations.remove(id); moduleHolder.release(); // release the model final TidaModel model = moduleHolder.getModule(DefaultValues.TIDAMODEL_ID); model.release(); if (LOG.isInfoEnabled()) { LOG.info("Unloaded ModuleHolder '" + id + "'."); } } } /** * Loads a {@code TidaModel} from the specified {@code classPathResource}. * * @param file * the {@code File} to load the {@code TidaModel} from * * @return the loaded instance of the {@code TidaModel} */ public TidaModel loadViaXslt(final File file) { if (file == null) { return loadViaXslt((InputStream) null); } else { try { return loadViaXslt(new FileInputStream(file)); } catch (final FileNotFoundException e) { exceptionRegistry.throwRuntimeException( TidaModelHandlerException.class, 1003, e, file); return null; } } } /** * Loads a {@code TidaModel} from the specified {@code classPathResource}. * * @param classPathResource * the classpath location to load the {@code TidaModel} from * * @return the loaded instance of the {@code TidaModel} */ public TidaModel loadViaXslt(final String classPathResource) { if (classPathResource == null) { return loadViaXslt((InputStream) null); } else { return loadViaXslt(getClass() .getResourceAsStream(classPathResource)); } } /** * Loads a {@code TidaModel} from the specified {@code is}. * * @param is * the {@code InputStream} to load the {@code TidaModel} from * * @return the loaded instance of the {@code TidaModel} */ public synchronized TidaModel loadViaXslt(final InputStream is) { return loadViaXslt(is, true); } /** * Loads a {@code TidaModel} from the specified {@code is}. * * @param is * the {@code InputStream} to load the {@code TidaModel} from * @param force * {@code true} if the model should be loaded even if this might * lead to an exception, because another or the same model with * the same identifier exists, otherwise {@code false} * * @return the loaded instance of the {@code TidaModel} */ public synchronized TidaModel loadViaXslt(final InputStream is, final boolean force) { final TidaModel model = getModuleHolder(is, force).getModule( DefaultValues.TIDAMODEL_ID); // check if the model is already loaded if (!force && model.isInitialized()) { return model; } else if (LOG.isInfoEnabled()) { LOG.info("Loaded TidaModel '" + model.getId() + "' from ModuleHolder '" + model.getId() + "'."); } // initialize the model try { model.initialize(); // define the file to store the model at final String modelId = model.getId(); final File modelDir = getModelDir(model.getId()); final File modelFile = new File(modelDir, MODEL_FILENAME); if (modelFile.exists()) { if (modelFile.isDirectory()) { exceptionRegistry.throwRuntimeException( TidaModelHandlerException.class, 1006, modelFile); } else if (!modelFile.delete()) { exceptionRegistry.throwRuntimeException( TidaModelHandlerException.class, 1007, modelFile); } } else { modelDir.mkdirs(); } // copy the file to the specified location final byte[] config = configurations.get(modelId); try { Streams.copyStreamToFile(new ByteArrayInputStream(config), modelFile); } catch (final IOException e) { exceptionRegistry.throwRuntimeException( TidaModelHandlerException.class, 1008, e, modelFile); } } catch (final Throwable t) { if (model != null) { /* * if an exception is thrown we have to remove the model, at * least try it */ try { this.deleteModel(model.getId()); } catch (final RuntimeException ignore) { if (LOG.isWarnEnabled()) { LOG.warn("Deleting the failed model '" + model.getId() + "' failed.", ignore); } } } if (t instanceof RuntimeException) { throw (RuntimeException) t; } else { exceptionRegistry.throwRuntimeException( TidaModelHandlerException.class, 1016, t, model.getId(), t.getLocalizedMessage()); } } return model; } /** * Loads a previously loaded model from the default location, i.e. the * location where all the models are stored. * * @param modelId * the identifier of the model to be loaded * * @return the loaded {@code TidaModel} */ public synchronized TidaModel loadFromDefaultLocation(final String modelId) { final File modelDir = new File(getDefaultLocation(), modelId); final File modelFile = new File(modelDir, MODEL_FILENAME); // check if the file does not exist or if it's a directory if (!modelFile.exists() || modelFile.isDirectory()) { exceptionRegistry.throwRuntimeException( TidaModelHandlerException.class, 1009, modelId, modelFile); } return loadViaXslt(modelFile); } /** * Loads a {@code TidaModel} using a persisted {@code location}. * * @param location * the {@code Location} to load the {@code TidaModel} from * * @return the loaded {@code TidaModel} * * @see ILocation */ public TidaModel load(final ILocation location) { final Identifier configId = new Identifier("config.xml"); final ZipPersistor persistor = new ZipPersistor(exceptionRegistry); // set the identifiers to be handled persistor.setIncludedIdentifier(configId); // just load the MetaData final MetaData config = new MetaData(configId); persistor.load(location, config); // manipulate the configuration so that it can be used for loading final ManipulatedXml xml = manipulateXmlForLoading(config.getStream(), "offlinemode", "auto"); // load the xml final ByteArrayInputStream configIs = new ByteArrayInputStream( xml.getXml()); final TidaModel model = loadViaXslt(configIs); if (!"auto".equalsIgnoreCase(xml.getOldValue("offlinemode"))) { if (LOG.isWarnEnabled()) { LOG.warn("The 'offlinemode' of the model was changed during the loading from the location '" + location + "' (old: '" + xml.getOldValue("offlinemode") + "', new: 'auto')"); } } // close the created and used stream Streams.closeIO(configIs); // set the identifiers to be handled persistor.clearAllIdentifiers(); persistor.addExcludedIdentifier(configId); // register the model and load again persistor.register(configId.getGroup().append("model"), model); persistor.load(location); return model; } /** * Saves the {@code TidaModel} specified by the {@code id} under the * specified {@code location}. * * @param id * the id of the {@code TidaModel} to be saved * @param location * the {@code Location} to store the data * * @see ILocation */ public void save(final String id, final ILocation location) { final Identifier configId = new Identifier("config.xml"); final ZipPersistor persistor = new ZipPersistor(exceptionRegistry); // get the configuration and the holder final IModuleHolder moduleHolder = moduleHolders.get(id); if (moduleHolder == null) { exceptionRegistry.throwRuntimeException( TidaModelHandlerException.class, 1004, id); } final byte[] config = configurations.get(id); // get the module from the holder final TidaModel model = moduleHolder.getModule(DefaultValues.TIDAMODEL_ID); persistor.register(configId.getGroup().append("model"), model); // write the file final InputStream is = new ByteArrayInputStream(config); // save the data with the additional MetaData persistor.save(location, new MetaData(configId, is)); } /** * Manipulates the specified {@code xml} to be used for loading. * * @param xml * the xml to be manipulated * @param key * the attribute to be manipulated * @param value * the new value of the attribute * * @return the manipulated xml */ protected ManipulatedXml manipulateXmlForLoading(final byte[] xml, final String key, final String value) { final Properties p = new Properties(); p.setProperty("offlinemode", "auto"); return manipulateXmlForLoading(xml, p); } /** * Manipulates the specified {@code xml} to be used for loading. * * @param xml * the xml to be manipulated * @param properties * the attributes to be manipulated * * @return the manipulated xml */ protected ManipulatedXml manipulateXmlForLoading(final byte[] xml, final Properties properties) { final InputStream bais = new ByteArrayInputStream(xml); final ManipulatedXml manipulatedXml = manipulateXmlForLoading(bais, properties); Streams.closeIO(bais); return manipulatedXml; } /** * Manipulates the specified {@code xml} to be used for loading. * * @param xml * the xml to be manipulated * @param key * the attribute to be manipulated * @param value * the new value of the attribute * * @return the manipulated xml */ protected ManipulatedXml manipulateXmlForLoading(final InputStream xml, final String key, final String value) { final Properties p = new Properties(); p.setProperty("offlinemode", "auto"); return manipulateXmlForLoading(xml, p); } /** * Manipulates the specified {@code xml} to be used for loading. * * @param xml * the xml to be manipulated * @param properties * the attributes to be manipulated * * @return the manipulated xml */ protected ManipulatedXml manipulateXmlForLoading(final InputStream xml, final Properties properties) { final Document doc = Xml.createDocument(xml, true); // check if the document could be read if (doc == null) { exceptionRegistry.throwRuntimeException( TidaModelHandlerException.class, 1005); } // prepare the result final ManipulatedXml manipulatedXml = new ManipulatedXml(); // get the root element and it's attributes final Node root = doc.getDocumentElement(); final NamedNodeMap nodeAttributes = root.getAttributes(); // manipulate the offlinemode attribute for (final Entry<Object, Object> e : properties.entrySet()) { final String key = (String) e.getKey(); Node attribute = nodeAttributes.getNamedItem(key); if (attribute == null) { attribute = doc.createAttribute(key); } else { manipulatedXml.addValue(key, attribute.getTextContent()); } attribute.setTextContent((String) e.getValue()); // make sure it is added nodeAttributes.setNamedItem(attribute); } manipulatedXml.setManipulatedXml(Xml.createByteArray(doc)); return manipulatedXml; } /** * Gets the default location under which the models are stored. * * @return the default location under which the models are stored */ public String getDefaultLocation() { return getDefaultLocation(false); } /** * Gets the default location under which the models are stored. If needed * (i.e. {@code createIfNotAvailable} is set to {@code true}) the location * is created. * * @param createIfNotAvailable * {@code true} if the location should be created if not * available, otherwise {@code false} * * @return the default location under which the models are stored */ public String getDefaultLocation(final boolean createIfNotAvailable) { if (createIfNotAvailable) { final File locFile = new File(defaultLocation); if (!locFile.exists() && !locFile.mkdirs()) { exceptionRegistry.throwRuntimeException( TidaModelHandlerException.class, 1015, defaultLocation); } } return defaultLocation; } /** * Specifies the default location under which the models are stored. * * @param defaultLocation * the default location under which the models are stored */ public void setDefaultLocation(final String defaultLocation) { this.defaultLocation = defaultLocation; } /** * Enables the automatically loading for the specified {@code modelId}. * * @param modelId * the modelId to be automatically loaded * @throws TidaModelHandlerException * if the {@code TidaModel} with the specified {@code modelId} * is not known, i.e. was never loaded before */ public void enableAutoload(final String modelId) throws TidaModelHandlerException { // check if the directory exists final File modelDir = getModelDir(modelId); if (!modelDir.exists()) { exceptionRegistry.throwRuntimeException( TidaModelHandlerException.class, 1011, modelId, modelDir); } // check if the file exists final File modelFile = new File(modelDir, MODEL_FILENAME); if (!modelFile.exists()) { exceptionRegistry.throwRuntimeException( TidaModelHandlerException.class, 1011, modelId, modelFile); } // the model is qualified to be autoloaded setAutoload(modelId, true); } /** * Disables the automatically loading for the specified {@code modelId}. * * @param modelId * the modelId to be automatically loaded */ public void disableAutoload(final String modelId) { setAutoload(modelId, false); } /** * Loads all the models defined for automatically loading. */ public void autoloadModels() { autoloadLock.readLock().lock(); final Collection<String> modelIds; try { modelIds = _readAutoloads(); } finally { autoloadLock.readLock().unlock(); } if (LOG.isInfoEnabled()) { LOG.info("Autoloading the modules: " + modelIds); } // load each model for (final String modelId : modelIds) { this.loadFromDefaultLocation(modelId); } } /** * Sets the automatically loading, i.e. {@code true} to enable and * {@code false} to disable, for the specified {@code modelId}. * * @param modelId * the identifier of the model to be set * @param autoload * {@code true} to enable auto-loading, otherwise {@code false} */ protected void setAutoload(final String modelId, final boolean autoload) { autoloadLock.writeLock().lock(); try { final Set<String> modelIds = _readAutoloads(); if (modelIds.contains(modelId) == autoload) { // autoload == true == contains => nothing to do // autoload == false == contains => nothing to do return; } // modify the set according to the autoload argument if (autoload) { modelIds.add(modelId); } else { modelIds.remove(modelId); } // remove the file if it exists final File autoloadFile = getAutoloadFile(); if (autoloadFile.exists() && !autoloadFile.delete()) { exceptionRegistry.throwRuntimeException( TidaModelHandlerException.class, 1012, autoloadFile); return; } // create the file and get access final FileOutputStream out; try { autoloadFile.createNewFile(); out = new FileOutputStream(autoloadFile); } catch (final IOException e) { exceptionRegistry.throwRuntimeException( TidaModelHandlerException.class, 1013, e, autoloadFile); return; } // write everything to the file try { out.write(Streams.writeAllObjects(modelIds.toArray())); } catch (final IOException e) { exceptionRegistry.throwRuntimeException( TidaModelHandlerException.class, 1014, e, autoloadFile); return; } finally { Streams.closeIO(out); } } finally { autoloadLock.writeLock().unlock(); } } /** * Reads the file of models to be automatically loaded and returns the * defined identifiers of the models to be automatically loaded. * * @return set of identifiers of {@code TidaModel} instances to be * automatically loaded */ protected Set<String> _readAutoloads() { final Set<String> autoloads = new HashSet<>(); final File autoloadFile = getAutoloadFile(); if (!autoloadFile.exists()) { // do nothing } else if (autoloadFile.isDirectory()) { exceptionRegistry.throwRuntimeException( TidaModelHandlerException.class, 1010, autoloadFile); } else { final IByteBufferReader reader = Streams .createByteBufferReader(autoloadFile); try { // read all the objects while (reader.hasRemaining()) { final String modelId = (String) Streams .readNextObject(reader); final File modelDir = getModelDir(modelId); // validate the directory if (!modelDir.exists() || !modelDir.isDirectory()) { exceptionRegistry.throwRuntimeException( TidaModelHandlerException.class, 1011, modelId, modelDir); } else { autoloads.add(modelId); } } } finally { Streams.closeIO(reader); } } return autoloads; } /** * Gets the file containing all the models' identifier to be automatically * loaded. * * @return the file containing all the models' identifier to be * automatically loaded * * @see #_readAutoloads() */ protected File getAutoloadFile() { return new File(getDefaultLocation(), AUTOLOAD_FILENAME); } /** * Gets the default directory used to store data about a specific * {@code TidaModel}. * * @param modelId * the identifier of the model to get the directory for * * @return default directory used to store data for the {@code TidaModel} * with the specified {@code modelId} */ protected File getModelDir(final String modelId) { return new File(getDefaultLocation(), modelId); } /** * Deletes the model with the specified id and removes it from the handler. * * @param modelId * the model to be deleted */ public void deleteModel(final String modelId) { // we have to get the loaded model (if it exists) final TidaModel model = getTidaModel(modelId); disableAutoload(modelId); unload(modelId); // finally we have to delete the model's file final File dir = getModelDir(modelId); if (dir.exists() && !Files.deleteOnExitDir(dir)) { if (LOG.isWarnEnabled()) { LOG.warn("Unable to remove the model's directory at '" + Files.getCanonicalPath(dir) + "'."); } } // delete the folder of the model if there is one if (model != null) { try { model.release(true); } catch (final TidaModelException e) { /* * TODO: the directory might be not deletable, because of MapDB */ if (LOG.isErrorEnabled()) { LOG.error("Could not clean-up correctly!", e); } } } } }
package com.smartdevicelink.api.screen; import android.support.annotation.NonNull; import android.util.Log; import com.smartdevicelink.api.BaseSubManager; import com.smartdevicelink.api.CompletionListener; import com.smartdevicelink.api.FileManager; import com.smartdevicelink.api.MultipleFileCompletionListener; import com.smartdevicelink.api.datatypes.SdlArtwork; import com.smartdevicelink.protocol.enums.FunctionID; import com.smartdevicelink.proxy.RPCNotification; import com.smartdevicelink.proxy.RPCResponse; import com.smartdevicelink.proxy.interfaces.ISdl; import com.smartdevicelink.proxy.interfaces.OnSystemCapabilityListener; import com.smartdevicelink.proxy.rpc.DisplayCapabilities; import com.smartdevicelink.proxy.rpc.OnButtonEvent; import com.smartdevicelink.proxy.rpc.OnButtonPress; import com.smartdevicelink.proxy.rpc.OnHMIStatus; import com.smartdevicelink.proxy.rpc.Show; import com.smartdevicelink.proxy.rpc.SoftButton; import com.smartdevicelink.proxy.rpc.SoftButtonCapabilities; import com.smartdevicelink.proxy.rpc.enums.ButtonName; import com.smartdevicelink.proxy.rpc.enums.HMILevel; import com.smartdevicelink.proxy.rpc.enums.Result; import com.smartdevicelink.proxy.rpc.enums.SoftButtonType; import com.smartdevicelink.proxy.rpc.enums.SystemCapabilityType; import com.smartdevicelink.proxy.rpc.listeners.OnRPCNotificationListener; import com.smartdevicelink.proxy.rpc.listeners.OnRPCResponseListener; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.CopyOnWriteArrayList; /** * <strong>SoftButtonManager</strong> <br> * SoftButtonManager gives the developer the ability to control how soft buttons are displayed on the head unit.<br> * Note: This class must be accessed through the SdlManager->ScreenManager. Do not instantiate it by itself.<br> */ class SoftButtonManager extends BaseSubManager { private static final String TAG = "SoftButtonManager"; private FileManager fileManager; private DisplayCapabilities displayCapabilities; private SoftButtonCapabilities softButtonCapabilities; private CopyOnWriteArrayList<SoftButtonObject> softButtonObjects; private HMILevel currentHMILevel; private Show inProgressShowRPC; private CompletionListener inProgressListener, queuedUpdateListener, cachedListener; private boolean hasQueuedUpdate, batchUpdates, waitingOnHMILevelUpdateToSetButtons; private final OnSystemCapabilityListener onSoftButtonCapabilitiesListener, onDisplayCapabilitiesListener; private final OnRPCNotificationListener onHMIStatusListener, onButtonPressListener, onButtonEventListener; private final SoftButtonObject.UpdateListener updateListener; /** * HAX: This is necessary due to a Ford Sync 3 bug that doesn't like Show requests without a main field being set (it will accept them, but with a GENERIC_ERROR, and 10-15 seconds late...) */ private String currentMainField1; /** * Creates a new instance of the SoftButtonManager * @param internalInterface * @param fileManager */ SoftButtonManager(ISdl internalInterface, FileManager fileManager) { super(internalInterface); transitionToState(BaseSubManager.SETTING_UP); this.fileManager = fileManager; this.softButtonObjects = new CopyOnWriteArrayList<>(); this.currentHMILevel = HMILevel.HMI_NONE; // Assume NONE until we get something else this.waitingOnHMILevelUpdateToSetButtons = false; this.updateListener = new SoftButtonObject.UpdateListener() { @Override public void onUpdate() { update(null); } }; // Add OnSoftButtonCapabilitiesListener to keep softButtonCapabilities updated onSoftButtonCapabilitiesListener = new OnSystemCapabilityListener() { @Override public void onCapabilityRetrieved(Object capability) { List<SoftButtonCapabilities> softButtonCapabilitiesList = (List<SoftButtonCapabilities>) capability; if (softButtonCapabilitiesList != null && !softButtonCapabilitiesList.isEmpty()) { softButtonCapabilities = softButtonCapabilitiesList.get(0); } else { softButtonCapabilities = null; } } @Override public void onError(String info) { Log.w(TAG, "SoftButton Capability cannot be retrieved:"); softButtonCapabilities = null; } }; this.internalInterface.addOnSystemCapabilityListener(SystemCapabilityType.SOFTBUTTON, onSoftButtonCapabilitiesListener); // Add OnDisplayCapabilitiesListener to keep displayCapabilities updated onDisplayCapabilitiesListener = new OnSystemCapabilityListener() { @Override public void onCapabilityRetrieved(Object capability) { displayCapabilities = (DisplayCapabilities) capability; } @Override public void onError(String info) { Log.w(TAG, "Display Capability cannot be retrieved:"); displayCapabilities = null; } }; this.internalInterface.addOnSystemCapabilityListener(SystemCapabilityType.DISPLAY, onDisplayCapabilitiesListener); // Add OnHMIStatusListener to keep currentHMILevel updated this.onHMIStatusListener = new OnRPCNotificationListener() { @Override public void onNotified(RPCNotification notification) { OnHMIStatus onHMIStatus = (OnHMIStatus) notification; HMILevel oldHmiLevel = currentHMILevel; currentHMILevel = onHMIStatus.getHmiLevel(); // Auto-send an updated show if we were in NONE and now we are not if (oldHmiLevel == HMILevel.HMI_NONE && currentHMILevel != HMILevel.HMI_NONE) { if (waitingOnHMILevelUpdateToSetButtons) { setSoftButtonObjects(softButtonObjects); } else { update(cachedListener); } } } }; this.internalInterface.addOnRPCNotificationListener(FunctionID.ON_HMI_STATUS, onHMIStatusListener); // Add OnButtonPressListener to notify SoftButtonObjects when there is a button press this.onButtonPressListener = new OnRPCNotificationListener() { @Override public void onNotified(RPCNotification notification) { OnButtonPress onButtonPress = (OnButtonPress) notification; if (onButtonPress!= null && onButtonPress.getButtonName() == ButtonName.CUSTOM_BUTTON) { Integer buttonId = onButtonPress.getCustomButtonName(); if (getSoftButtonObjects() != null) { for (SoftButtonObject softButtonObject : getSoftButtonObjects()) { if (softButtonObject.getButtonId() == buttonId && softButtonObject.getOnEventListener() != null) { softButtonObject.getOnEventListener().onPress(getSoftButtonObjectById(buttonId), onButtonPress); break; } } } } } }; this.internalInterface.addOnRPCNotificationListener(FunctionID.ON_BUTTON_PRESS, onButtonPressListener); // Add OnButtonEventListener to notify SoftButtonObjects when there is a button event this.onButtonEventListener = new OnRPCNotificationListener() { @Override public void onNotified(RPCNotification notification) { OnButtonEvent onButtonEvent = (OnButtonEvent) notification; if (onButtonEvent!= null && onButtonEvent.getButtonName() == ButtonName.CUSTOM_BUTTON) { Integer buttonId = onButtonEvent.getCustomButtonID(); if (getSoftButtonObjects() != null) { for (SoftButtonObject softButtonObject : getSoftButtonObjects()) { if (softButtonObject.getButtonId() == buttonId && softButtonObject.getOnEventListener() != null) { softButtonObject.getOnEventListener().onEvent(getSoftButtonObjectById(buttonId), onButtonEvent); break; } } } } } }; this.internalInterface.addOnRPCNotificationListener(FunctionID.ON_BUTTON_EVENT, onButtonEventListener); // Transition to ready state transitionToState(READY); } /** * Get the SoftButtonObject that has the provided name * @param name a String value that represents the name * @return a SoftButtonObject */ protected SoftButtonObject getSoftButtonObjectByName(String name) { for (SoftButtonObject softButtonObject : softButtonObjects) { if (softButtonObject.getName().equals(name)) { return softButtonObject; } } return null; } /** * Get the SoftButtonObject that has the provided buttonId * @param buttonId a int value that represents the id of the button * @return a SoftButtonObject */ protected SoftButtonObject getSoftButtonObjectById(int buttonId) { for (SoftButtonObject softButtonObject : softButtonObjects) { if (softButtonObject.getButtonId() == buttonId) { return softButtonObject; } } return null; } /** * Get the soft button objects list * @return a List<SoftButtonObject> */ protected List<SoftButtonObject> getSoftButtonObjects() { return softButtonObjects; } /** * Set softButtonObjects list and upload the images to the head unit * @param list the list of the SoftButtonObject values that should be displayed on the head unit */ protected void setSoftButtonObjects(@NonNull List<SoftButtonObject> list) { // Convert the List to CopyOnWriteArrayList CopyOnWriteArrayList<SoftButtonObject> softButtonObjects; if(list instanceof CopyOnWriteArrayList){ softButtonObjects = (CopyOnWriteArrayList<SoftButtonObject>) list; }else{ softButtonObjects = new CopyOnWriteArrayList<>(list); } if (hasTwoSoftButtonObjectsOfSameName(softButtonObjects)) { this.softButtonObjects = new CopyOnWriteArrayList<>(); Log.e(TAG, "Attempted to set soft button objects, but two buttons had the same name"); return; } // Set ids and updateListeners for soft button objects for (int i = 0; i < softButtonObjects.size(); i++) { softButtonObjects.get(i).setButtonId(i * 100); softButtonObjects.get(i).setUpdateListener(updateListener); } this.softButtonObjects = softButtonObjects; if (currentHMILevel == null || currentHMILevel == HMILevel.HMI_NONE) { waitingOnHMILevelUpdateToSetButtons = true; return; } // End any in-progress update inProgressShowRPC = null; if (inProgressListener != null) { inProgressListener.onComplete(false); inProgressListener = null; } // End any queued update hasQueuedUpdate = false; if (queuedUpdateListener != null) { queuedUpdateListener.onComplete(false); queuedUpdateListener = null; } // Prepare soft button images to be uploaded to the head unit. // we will prepare a list for initial state images and another list for other state images // so we can upload the initial state images first, then the other states images. List<SdlArtwork> initialStatesToBeUploaded = new ArrayList<>(); List<SdlArtwork> otherStatesToBeUploaded = new ArrayList<>(); if (softButtonImagesSupported()) { for (SoftButtonObject softButtonObject : softButtonObjects) { SoftButtonState initialState = null; if (softButtonObject != null) { initialState = softButtonObject.getCurrentState(); } if (initialState != null && softButtonObject.getStates() != null) { for (SoftButtonState softButtonState : softButtonObject.getStates()) { if (softButtonState != null && softButtonState.getName() != null && softButtonState.getArtwork() != null && !fileManager.hasUploadedFile(softButtonState.getArtwork())) { if (softButtonState.getName().equals(initialState.getName())) { initialStatesToBeUploaded.add(softButtonObject.getCurrentState().getArtwork()); } else{ otherStatesToBeUploaded.add(softButtonState.getArtwork()); } } } } } } // Upload initial state images if (initialStatesToBeUploaded.size() > 0) { Log.v(TAG, "Uploading soft button initial state artworks"); fileManager.uploadArtworks(initialStatesToBeUploaded, new MultipleFileCompletionListener() { @Override public void onComplete(Map<String, String> errors) { if (errors != null && errors.size() > 0) { Log.e(TAG, "Error uploading soft button artworks"); } Log.d(TAG, "Soft button initial artworks uploaded"); update(cachedListener); } }); } // Upload other state images if (otherStatesToBeUploaded.size() > 0) { Log.v(TAG, "Uploading soft button other state artworks"); fileManager.uploadArtworks(otherStatesToBeUploaded, new MultipleFileCompletionListener() { @Override public void onComplete(Map<String, String> errors) { if (errors != null && errors.size() > 0) { Log.e(TAG, "Error uploading soft button artworks"); } Log.d(TAG, "Soft button other state artworks uploaded"); // In case our soft button states have changed in the meantime update(cachedListener); } }); } // This is necessary because there may be no images needed to be uploaded update(cachedListener); } /** * Update the SoftButtonManger by sending a new Show RPC to reflect the changes * @param listener a CompletionListener */ protected void update(CompletionListener listener) { cachedListener = listener; if (batchUpdates) { return; } // Don't send if we're in HMI NONE if (currentHMILevel == null || currentHMILevel == HMILevel.HMI_NONE) { return; } Log.v(TAG, "Updating soft buttons"); cachedListener = null; // Check if we have update already in progress if (inProgressShowRPC != null) { Log.d(TAG, "In progress update exists, queueing update"); // If we already have a pending update, we're going to tell the old listener that it was superseded by a new update and then return if (queuedUpdateListener != null) { Log.d(TAG, "Queued update already exists, superseding previous queued update"); queuedUpdateListener.onComplete(false); queuedUpdateListener = null; } // Note: the queued update will be started after the in-progress one finishes if (listener != null) { queuedUpdateListener = listener; } hasQueuedUpdate = true; return; } // Send Show RPC with soft buttons representing the current state for the soft button objects inProgressListener = listener; inProgressShowRPC = new Show(); inProgressShowRPC.setMainField1(getCurrentMainField1()); if (softButtonObjects == null) { Log.d(TAG, "Soft button objects are null, sending an empty array"); inProgressShowRPC.setSoftButtons(new ArrayList<SoftButton>()); } else if ((currentStateHasImages() && !allCurrentStateImagesAreUploaded()) || !softButtonImagesSupported()) { // The images don't yet exist on the head unit, or we cannot use images, send a text update if possible, otherwise, don't send anything yet List<SoftButton> textOnlySoftButtons = createTextSoftButtonsForCurrentState(); if (textOnlySoftButtons != null) { Log.d(TAG, "Soft button images unavailable, sending text buttons"); inProgressShowRPC.setSoftButtons(textOnlySoftButtons); } else { Log.d(TAG, "Soft button images unavailable, text buttons unavailable"); inProgressShowRPC = null; return; } } else { Log.d(TAG, "Sending soft buttons with images"); inProgressShowRPC.setSoftButtons(createSoftButtonsForCurrentState()); } inProgressShowRPC.setOnRPCResponseListener(new OnRPCResponseListener() { @Override public void onResponse(int correlationId, RPCResponse response) { Log.i(TAG, "Soft button update completed"); handleResponse(true); } @Override public void onError(int correlationId, Result resultCode, String info) { super.onError(correlationId, resultCode, info); Log.e(TAG, "Soft button update error"); handleResponse(false); } private void handleResponse(boolean success){ inProgressShowRPC = null; CompletionListener currentListener; if (inProgressListener != null) { currentListener = inProgressListener; inProgressListener = null; currentListener.onComplete(success); } if (hasQueuedUpdate) { Log.d(TAG, "Queued update exists, sending another update"); currentListener = queuedUpdateListener; queuedUpdateListener = null; hasQueuedUpdate = false; update(currentListener); } } }); internalInterface.sendRPCRequest(inProgressShowRPC); } private boolean softButtonImagesSupported(){ boolean graphicSupported = false; if (displayCapabilities == null || displayCapabilities.getGraphicSupported()){ graphicSupported = true; } boolean imageSupported = false; if (softButtonCapabilities == null || softButtonCapabilities.getImageSupported()){ imageSupported = true; } return graphicSupported && imageSupported; } /** * Check if two SoftButtonObject have the same name * @param softButtonObjects * @return a boolean value */ private boolean hasTwoSoftButtonObjectsOfSameName(List<SoftButtonObject> softButtonObjects) { for (int i = 0; i < softButtonObjects.size(); i++) { String buttonName = softButtonObjects.get(i).getName(); for (int j = (i + 1); j < softButtonObjects.size(); j++) { if (softButtonObjects.get(j).getName().equals(buttonName)) { return true; } } } return false; } /** * Get the TextField1 * @return currentMainField1 */ protected String getCurrentMainField1() { if (currentMainField1 == null){ return ""; } return currentMainField1; } /** * Set the TextField1 * @param currentMainField1 */ protected void setCurrentMainField1(String currentMainField1) { this.currentMainField1 = currentMainField1; } /** * Set the batchUpdates flag that represents whether the manager should wait until commit() is called to send the updated show RPC * @param batchUpdates */ protected void setBatchUpdates(boolean batchUpdates) { this.batchUpdates = batchUpdates; } /** * Clean up everything after the manager is no longer needed */ @Override public void dispose() { super.dispose(); transitionToState(SHUTDOWN); // Remove listeners internalInterface.removeOnRPCNotificationListener(FunctionID.ON_HMI_STATUS, onHMIStatusListener); internalInterface.removeOnRPCNotificationListener(FunctionID.ON_BUTTON_PRESS, onButtonPressListener); internalInterface.removeOnRPCNotificationListener(FunctionID.ON_BUTTON_EVENT, onButtonEventListener); internalInterface.removeOnSystemCapabilityListener(SystemCapabilityType.SOFTBUTTON, onSoftButtonCapabilitiesListener); internalInterface.removeOnSystemCapabilityListener(SystemCapabilityType.DISPLAY, onDisplayCapabilitiesListener); } /** * Check if the current state for any SoftButtonObject has images * @return a boolean value */ private boolean currentStateHasImages() { for (SoftButtonObject softButtonObject : this.softButtonObjects) { if (softButtonObject.getCurrentState() != null && softButtonObject.getCurrentState().getArtwork() != null) { return true; } } return false; } /** * Check if the current state for any SoftButtonObject has images that are not uploaded yet * @return a boolean value */ private boolean allCurrentStateImagesAreUploaded() { for (SoftButtonObject softButtonObject : softButtonObjects) { SoftButtonState currentState = softButtonObject.getCurrentState(); if (currentState != null && currentState.getArtwork() != null && !fileManager.hasUploadedFile(currentState.getArtwork())) { return false; } } return true; } /** * Returns text soft buttons representing the initial states of the button objects, or null if _any_ of the buttons' current states are image only buttons. * @return The text soft buttons */ private List<SoftButton> createTextSoftButtonsForCurrentState() { List<SoftButton> textButtons = new ArrayList<>(); for (SoftButtonObject softButtonObject : softButtonObjects) { SoftButton softButton = softButtonObject.getCurrentStateSoftButton(); if (softButton.getText() == null) { return null; } // We should create a new softButtonObject rather than modifying the original one SoftButton textOnlySoftButton = new SoftButton(SoftButtonType.SBT_TEXT, softButton.getSoftButtonID()); textOnlySoftButton.setText(softButton.getText()); textButtons.add(textOnlySoftButton); } return textButtons; } /** * Returns a list of the SoftButton for the SoftButtonObjects' current state * @return a List<SoftButton> */ protected List<SoftButton> createSoftButtonsForCurrentState() { List<SoftButton> softButtons = new ArrayList<>(); for (SoftButtonObject softButtonObject : softButtonObjects) { softButtons.add(softButtonObject.getCurrentStateSoftButton()); } return softButtons; } }
package org.pentaho.di.job.entries.ftpdelete; import static org.pentaho.di.job.entry.validator.AndValidator.putValidators; import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.andValidator; import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.fileExistsValidator; import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.notBlankValidator; import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.notNullValidator; import java.io.File; import java.net.InetAddress; import java.util.List; import java.util.Vector; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.Iterator; import java.util.HashSet; import org.apache.log4j.Logger; import org.pentaho.di.cluster.SlaveServer; import org.pentaho.di.core.CheckResultInterface; import org.pentaho.di.core.Const; import org.pentaho.di.core.Result; import org.pentaho.di.core.RowMetaAndData; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleXMLException; import org.pentaho.di.core.logging.LogWriter; import org.pentaho.di.core.xml.XMLHandler; import org.pentaho.di.job.Job; import org.pentaho.di.job.JobEntryType; import org.pentaho.di.job.JobMeta; import org.pentaho.di.job.entries.ftpdelete.Messages; import org.pentaho.di.job.entry.JobEntryBase; import org.pentaho.di.job.entry.JobEntryInterface; import org.pentaho.di.repository.Repository; import org.pentaho.di.resource.ResourceEntry; import org.pentaho.di.resource.ResourceReference; import org.pentaho.di.resource.ResourceEntry.ResourceType; import org.w3c.dom.Node; import org.pentaho.di.job.entries.sftp.SFTPClient; import com.enterprisedt.net.ftp.FTPClient; import com.enterprisedt.net.ftp.FTPConnectMode; import com.enterprisedt.net.ftp.FTPException; import com.trilead.ssh2.Connection; import com.trilead.ssh2.HTTPProxyData; import com.trilead.ssh2.SFTPv3Client; import com.trilead.ssh2.SFTPv3DirectoryEntry; /** * This defines an FTP job entry. * * @author Matt * @since 05-11-2003 * */ public class JobEntryFTPDelete extends JobEntryBase implements Cloneable, JobEntryInterface { private static Logger log4j = Logger.getLogger(JobEntryFTPDelete.class); private String serverName; private String port; private String userName; private String password; private String ftpDirectory; private String wildcard; private int timeout; private boolean activeConnection; private boolean publicpublickey; private String keyFilename; private String keyFilePass; private boolean useproxy; private String proxyHost; private String proxyPort; /* string to allow variable substitution */ private String proxyUsername; private String proxyPassword; private String protocol; public String SUCCESS_IF_AT_LEAST_X_FILES_DOWNLOADED="success_when_at_least"; public String SUCCESS_IF_ERRORS_LESS="success_if_errors_less"; public String SUCCESS_IF_ALL_FILES_DOWNLOADED="success_is_all_files_downloaded"; private String nr_limit_success; private String success_condition; private boolean copyprevious; long NrErrors=0; long NrfilesDeleted=0; boolean successConditionBroken=false; String targetFilename =null; int limitFiles=0; FTPClient ftpclient=null; SFTPClient sftpclient=null; SFTPv3Client sshclient = null; public JobEntryFTPDelete(String n) { super(n, ""); copyprevious=false; protocol="FTP"; port="21"; nr_limit_success="10"; success_condition=SUCCESS_IF_ALL_FILES_DOWNLOADED; publicpublickey=false; keyFilename=null; keyFilePass=null; serverName=null; setID(-1L); setJobEntryType(JobEntryType.FTP_DELETE); } public JobEntryFTPDelete() { this(""); } public JobEntryFTPDelete(JobEntryBase jeb) { super(jeb); } public Object clone() { JobEntryFTPDelete je = (JobEntryFTPDelete) super.clone(); return je; } public String getXML() { StringBuffer retval = new StringBuffer(128); retval.append(super.getXML()); retval.append(" ").append(XMLHandler.addTagValue("protocol", protocol)); retval.append(" ").append(XMLHandler.addTagValue("servername", serverName)); retval.append(" ").append(XMLHandler.addTagValue("port", port)); retval.append(" ").append(XMLHandler.addTagValue("username", userName)); retval.append(" ").append(XMLHandler.addTagValue("password", password)); retval.append(" ").append(XMLHandler.addTagValue("ftpdirectory", ftpDirectory)); retval.append(" ").append(XMLHandler.addTagValue("wildcard", wildcard)); retval.append(" ").append(XMLHandler.addTagValue("timeout", timeout)); retval.append(" ").append(XMLHandler.addTagValue("active", activeConnection)); retval.append(" ").append(XMLHandler.addTagValue("useproxy", useproxy)); retval.append(" ").append(XMLHandler.addTagValue("proxy_host", proxyHost)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("proxy_port", proxyPort)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("proxy_username", proxyUsername)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("proxy_password", proxyPassword)); //$NON-NLS-1$ //$NON-NLS-2$ retval.append(" ").append(XMLHandler.addTagValue("publicpublickey", publicpublickey)); retval.append(" ").append(XMLHandler.addTagValue("keyfilename", keyFilename)); retval.append(" ").append(XMLHandler.addTagValue("keyfilepass", keyFilePass)); retval.append(" ").append(XMLHandler.addTagValue("nr_limit_success", nr_limit_success)); retval.append(" ").append(XMLHandler.addTagValue("success_condition", success_condition)); retval.append(" ").append(XMLHandler.addTagValue("copyprevious", copyprevious)); return retval.toString(); } public void loadXML(Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep) throws KettleXMLException { try { super.loadXML(entrynode, databases, slaveServers); protocol = XMLHandler.getTagValue(entrynode, "protocol"); port = XMLHandler.getTagValue(entrynode, "port"); //$NON-NLS-1$ serverName = XMLHandler.getTagValue(entrynode, "servername"); userName = XMLHandler.getTagValue(entrynode, "username"); password = XMLHandler.getTagValue(entrynode, "password"); ftpDirectory = XMLHandler.getTagValue(entrynode, "ftpdirectory"); wildcard = XMLHandler.getTagValue(entrynode, "wildcard"); timeout = Const.toInt(XMLHandler.getTagValue(entrynode, "timeout"), 10000); activeConnection = "Y".equalsIgnoreCase( XMLHandler.getTagValue(entrynode, "active") ); useproxy = "Y".equalsIgnoreCase( XMLHandler.getTagValue(entrynode, "useproxy") ); proxyHost = XMLHandler.getTagValue(entrynode, "proxy_host"); //$NON-NLS-1$ proxyPort = XMLHandler.getTagValue(entrynode, "proxy_port"); //$NON-NLS-1$ proxyUsername = XMLHandler.getTagValue(entrynode, "proxy_username"); //$NON-NLS-1$ proxyPassword = XMLHandler.getTagValue(entrynode, "proxy_password"); //$NON-NLS-1$ publicpublickey = "Y".equalsIgnoreCase( XMLHandler.getTagValue(entrynode, "publicpublickey") ); keyFilename = XMLHandler.getTagValue(entrynode, "keyfilename"); keyFilePass = XMLHandler.getTagValue(entrynode, "keyfilepass"); nr_limit_success = XMLHandler.getTagValue(entrynode, "nr_limit_success"); success_condition = XMLHandler.getTagValue(entrynode, "success_condition"); copyprevious = "Y".equalsIgnoreCase( XMLHandler.getTagValue(entrynode, "copyprevious") ); } catch(KettleXMLException xe) { throw new KettleXMLException("Unable to load job entry of type 'ftp' from XML node", xe); } } public void loadRep(Repository rep, long id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException { try { super.loadRep(rep, id_jobentry, databases, slaveServers); protocol = rep.getJobEntryAttributeString(id_jobentry, "protocol"); port = rep.getJobEntryAttributeString(id_jobentry, "port"); serverName = rep.getJobEntryAttributeString(id_jobentry, "servername"); userName = rep.getJobEntryAttributeString(id_jobentry, "username"); password = rep.getJobEntryAttributeString(id_jobentry, "password"); ftpDirectory = rep.getJobEntryAttributeString(id_jobentry, "ftpdirectory"); wildcard = rep.getJobEntryAttributeString(id_jobentry, "wildcard"); timeout = (int)rep.getJobEntryAttributeInteger(id_jobentry, "timeout"); activeConnection = rep.getJobEntryAttributeBoolean(id_jobentry, "active"); copyprevious = rep.getJobEntryAttributeBoolean(id_jobentry, "copyprevious"); useproxy = rep.getJobEntryAttributeBoolean(id_jobentry, "useproxy"); proxyHost = rep.getJobEntryAttributeString(id_jobentry, "proxy_host"); //$NON-NLS-1$ proxyPort = rep.getJobEntryAttributeString(id_jobentry, "proxy_port"); //$NON-NLS-1$ proxyUsername = rep.getJobEntryAttributeString(id_jobentry, "proxy_username"); //$NON-NLS-1$ proxyPassword = rep.getJobEntryAttributeString(id_jobentry, "proxy_password"); //$NON-NLS-1$ publicpublickey = rep.getJobEntryAttributeBoolean(id_jobentry, "publicpublickey"); keyFilename = rep.getJobEntryAttributeString(id_jobentry, "keyfilename"); keyFilePass = rep.getJobEntryAttributeString(id_jobentry, "keyfilepass"); nr_limit_success = rep.getJobEntryAttributeString(id_jobentry, "nr_limit_success"); success_condition = rep.getJobEntryAttributeString(id_jobentry, "success_condition"); } catch(KettleException dbe) { throw new KettleException("Unable to load job entry of type 'ftp' from the repository for id_jobentry="+id_jobentry, dbe); } } public void saveRep(Repository rep, long id_job) throws KettleException { try { super.saveRep(rep, id_job); rep.saveJobEntryAttribute(id_job, getID(), "protocol", protocol); rep.saveJobEntryAttribute(id_job, getID(), "port", port); rep.saveJobEntryAttribute(id_job, getID(), "servername", serverName); rep.saveJobEntryAttribute(id_job, getID(), "username", userName); rep.saveJobEntryAttribute(id_job, getID(), "password", password); rep.saveJobEntryAttribute(id_job, getID(), "ftpdirectory", ftpDirectory); rep.saveJobEntryAttribute(id_job, getID(), "wildcard", wildcard); rep.saveJobEntryAttribute(id_job, getID(), "timeout", timeout); rep.saveJobEntryAttribute(id_job, getID(), "active", activeConnection); rep.saveJobEntryAttribute(id_job, getID(), "copyprevious", copyprevious); rep.saveJobEntryAttribute(id_job, getID(), "useproxy", useproxy); rep.saveJobEntryAttribute(id_job, getID(), "publicpublickey", publicpublickey); rep.saveJobEntryAttribute(id_job, getID(), "keyfilename", keyFilename); rep.saveJobEntryAttribute(id_job, getID(), "keyfilepass", keyFilePass); rep.saveJobEntryAttribute(id_job, getID(), "proxy_host", proxyHost); //$NON-NLS-1$ rep.saveJobEntryAttribute(id_job, getID(), "proxy_port", proxyPort); //$NON-NLS-1$ rep.saveJobEntryAttribute(id_job, getID(), "proxy_username", proxyUsername); //$NON-NLS-1$ rep.saveJobEntryAttribute(id_job, getID(), "proxy_password", proxyPassword); //$NON-NLS-1$ rep.saveJobEntryAttribute(id_job, getID(), "nr_limit_success", nr_limit_success); rep.saveJobEntryAttribute(id_job, getID(), "success_condition", success_condition); } catch(KettleDatabaseException dbe) { throw new KettleException("Unable to save job entry of type 'ftp' to the repository for id_job="+id_job, dbe); } } private boolean getStatus() { boolean retval=false; if ((NrErrors==0 && getSuccessCondition().equals(SUCCESS_IF_ALL_FILES_DOWNLOADED)) || (NrfilesDeleted>=limitFiles && getSuccessCondition().equals(SUCCESS_IF_AT_LEAST_X_FILES_DOWNLOADED)) || (NrErrors<=limitFiles && getSuccessCondition().equals(SUCCESS_IF_ERRORS_LESS))) { retval=true; } return retval; } public boolean isCopyPrevious() { return copyprevious; } public void setCopyPrevious(boolean copyprevious) { this.copyprevious=copyprevious; } /** * @param publickey The publicpublickey to set. */ public void setUsePublicKey(boolean publickey) { this.publicpublickey = publickey; } /** * @return Returns the use public key. */ public boolean isUsePublicKey() { return publicpublickey; } /** * @param keyfilename The key filename to set. */ public void setKeyFilename(String keyfilename) { this.keyFilename = keyfilename; } /** * @return Returns the key filename. */ public String getKeyFilename() { return keyFilename; } /** * @param keyFilePass The key file pass to set. */ public void setKeyFilePass(String keyFilePass) { this.keyFilePass = keyFilePass; } /** * @return Returns the key file pass. */ public String getKeyFilePass() { return keyFilePass; } public void setLimitSuccess(String nr_limit_successin) { this.nr_limit_success=nr_limit_successin; } public String getLimitSuccess() { return nr_limit_success; } public void setSuccessCondition(String success_condition) { this.success_condition=success_condition; } public String getSuccessCondition() { return success_condition; } /** * @return Returns the directory. */ public String getFtpDirectory() { return ftpDirectory; } /** * @param directory The directory to set. */ public void setFtpDirectory(String directory) { this.ftpDirectory = directory; } /** * @return Returns the password. */ public String getPassword() { return password; } /** * @param password The password to set. */ public void setPassword(String password) { this.password = password; } /** * @return Returns the serverName. */ public String getServerName() { return serverName; } /** * @param serverName The serverName to set. */ public void setServerName(String serverName) { this.serverName = serverName; } public void setProtocol(String protocol) { this.protocol=protocol; } public String getProtocol() { return protocol; } /** * @return Returns the userName. */ public String getUserName() { return userName; } /** * @param userName The userName to set. */ public void setUserName(String userName) { this.userName = userName; } /** * @return Returns the wildcard. */ public String getWildcard() { return wildcard; } /** * @param wildcard The wildcard to set. */ public void setWildcard(String wildcard) { this.wildcard = wildcard; } /** * @param timeout The timeout to set. */ public void setTimeout(int timeout) { this.timeout = timeout; } /** * @return Returns the timeout. */ public int getTimeout() { return timeout; } /** * @return Returns the hostname of the ftp-proxy. */ public String getProxyHost() { return proxyHost; } /** * @param proxyHost The hostname of the proxy. */ public void setProxyHost(String proxyHost) { this.proxyHost = proxyHost; } public boolean isUseProxy() { return useproxy; } public void setUseProxy(boolean useproxy) { this.useproxy=useproxy; } /** * @return Returns the password which is used to authenticate at the proxy. */ public String getProxyPassword() { return proxyPassword; } /** * @param proxyPassword The password which is used to authenticate at the proxy. */ public void setProxyPassword(String proxyPassword) { this.proxyPassword = proxyPassword; } /** * @return Returns the port of the ftp. */ public String getPort() { return port; } /** * @param proxyPort The port of the ftp. */ public void setPort(String port) { this.port = port; } /** * @return Returns the port of the ftp-proxy. */ public String getProxyPort() { return proxyPort; } /** * @param proxyPort The port of the ftp-proxy. */ public void setProxyPort(String proxyPort) { this.proxyPort = proxyPort; } /** * @return Returns the username which is used to authenticate at the proxy. */ public String getProxyUsername() { return proxyUsername; } /** * @param proxyUsername The username which is used to authenticate at the proxy. */ public void setProxyUsername(String proxyUsername) { this.proxyUsername = proxyUsername; } @SuppressWarnings("unchecked") // Needed for the Vector coming from sshclient.ls() public Result execute(Result previousResult, int nr, Repository rep, Job parentJob) { LogWriter log = LogWriter.getInstance(); log4j.info(Messages.getString("JobEntryFTPDelete.Started", serverName)); //$NON-NLS-1$ RowMetaAndData resultRow = null; Result result = previousResult; List<RowMetaAndData> rows = result.getRows(); result.setResult( false ); NrErrors = 0; NrfilesDeleted=0; successConditionBroken=false; HashSet<String> list_previous_files = new HashSet<String>(); // Here let's put some controls before stating the job String realservername=environmentSubstitute(serverName); String realserverpassword=environmentSubstitute(password); String realFtpDirectory=environmentSubstitute(ftpDirectory); int realserverport=Const.toInt(environmentSubstitute(port), 0); String realUsername=environmentSubstitute(userName); String realPassword=environmentSubstitute(password); String realproxyhost=environmentSubstitute(proxyHost); String realproxyusername=environmentSubstitute(proxyUsername); String realproxypassword=environmentSubstitute(proxyPassword); int realproxyport=Const.toInt(environmentSubstitute(proxyPort), 0); String realkeyFilename=environmentSubstitute(keyFilename); String realkeyPass=environmentSubstitute(keyFilePass); if(log.isDetailed()) log.logDetailed(toString(), Messages.getString("JobEntryFTPDelete.Start")); //$NON-NLS-1$ if(copyprevious && rows.size()==0) { if(log.isDetailed()) log.logDetailed(toString(), Messages.getString("JobEntryFTPDelete.ArgsFromPreviousNothing")); result.setResult(true); return result; } try { // Get all the files in the current directory... String[] filelist=null; if(protocol.equals("FTP")) { // establish the connection FTPConnect(log,realservername,realUsername, realPassword, realserverport,realFtpDirectory, realproxyhost,realproxyusername, realproxypassword,realproxyport, timeout); filelist = ftpclient.dir(); // Some FTP servers return a message saying no files found as a string in the filenlist // e.g. Solaris 8 // CHECK THIS !!! if (filelist.length == 1) { String translatedWildcard = environmentSubstitute(wildcard); if (!Const.isEmpty(translatedWildcard)) { if (filelist[0].startsWith(translatedWildcard)) { throw new FTPException(filelist[0]); } } } } else if(protocol.equals("SFTP")) { // establish the secure connection SFTPConnect(log,realservername,realUsername,realserverport,realPassword, realFtpDirectory); // Get all the files in the current directory... filelist = sftpclient.dir(); } else if(protocol.equals("SSH")) { // establish the secure connection SSHConnect(log,realservername, realserverpassword, realserverport, realUsername, realPassword, realproxyhost,realproxyusername, realproxypassword, realproxyport, realkeyFilename, realkeyPass); String sourceFolder="."; if (realFtpDirectory!=null) sourceFolder=realFtpDirectory + "/"; else sourceFolder="./"; // NOTE: Source of the unchecked warning suppression for the declaration of this method. Vector<SFTPv3DirectoryEntry> vfilelist = sshclient.ls(sourceFolder); int i=0; if(vfilelist!=null) { filelist = new String[vfilelist.size()]; Iterator<SFTPv3DirectoryEntry> iterator = vfilelist.iterator(); while (iterator.hasNext()) { SFTPv3DirectoryEntry dirEntry = iterator.next(); if (dirEntry != null && !dirEntry.filename.equals(".") && !dirEntry.filename.equals("..") && !isDirectory(sshclient, sourceFolder+dirEntry.filename)) { filelist[i++]=dirEntry.filename; } } } } if(log.isDetailed()) log.logDetailed(toString(), "JobEntryFTPDelete.FoundNFiles",""+filelist.length); int found = filelist == null ? 0 : filelist.length; if(found==0) { result.setResult(true); return result; } Pattern pattern = null; if (copyprevious ) { // Copy the input row to the (command line) arguments for (int iteration=0;iteration<rows.size();iteration++) { resultRow = rows.get(iteration); // Get file names String file_previous = resultRow.getString(0,null); if(!Const.isEmpty(file_previous)) { list_previous_files.add(file_previous); } } }else { if(!Const.isEmpty(wildcard)) { String realWildcard = environmentSubstitute(wildcard); pattern = Pattern.compile(realWildcard); } } if(!getSuccessCondition().equals(SUCCESS_IF_ALL_FILES_DOWNLOADED)) limitFiles=Const.toInt(environmentSubstitute(getLimitSuccess()),10); // Get the files in the list... for (int i=0;i<filelist.length && !parentJob.isStopped();i++) { if(successConditionBroken) throw new Exception(Messages.getString("JobEntryFTPDelete.SuccesConditionBroken")); boolean getIt = false; if(log.isDebug()) log.logDebug(toString(), Messages.getString("JobEntryFTPDelete.AnalysingFile",filelist[i])); try { // First see if the file matches the regular expression! if(copyprevious) { if(list_previous_files.contains(filelist[i])) getIt=true; }else { if (pattern!=null) { Matcher matcher = pattern.matcher(filelist[i]); getIt = matcher.matches(); } } if (getIt) { // Delete file if(protocol.equals("FTP")) { ftpclient.delete(filelist[i]); } else if(protocol.equals("SFTP")) { sftpclient.delete(filelist[i]); } else if(protocol.equals("SSH")) { sshclient.rm(filelist[i]); } if(log.isDetailed()) log.logDetailed(toString(),"JobEntryFTPDelete.RemotfileDeleted",filelist[i]); updateDeletedFiles(); } }catch (Exception e) { // Update errors number updateErrors(); log.logError(toString(),Messages.getString("JobFTP.UnexpectedError",e.getMessage())); if(successConditionBroken) throw new Exception(Messages.getString("JobEntryFTPDelete.SuccesConditionBroken")); } } // end for } catch(Exception e) { updateErrors(); log.logError(toString(), Messages.getString("JobEntryFTPDelete.ErrorGetting", e.getMessage())); //$NON-NLS-1$ log.logError(toString(), Const.getStackTracker(e)); } finally { if (ftpclient!=null && ftpclient.connected()) { try { ftpclient.quit(); ftpclient=null; } catch(Exception e) { log.logError(toString(), Messages.getString("JobEntryFTPDelete.ErrorQuitting", e.getMessage())); //$NON-NLS-1$ } } if (sftpclient!=null) { try { sftpclient.disconnect(); sftpclient=null; } catch(Exception e) { log.logError(toString(), Messages.getString("JobEntryFTPDelete.ErrorQuitting", e.getMessage())); //$NON-NLS-1$ } } if (sshclient!=null) { try { sshclient.close(); sshclient=null; } catch(Exception e) { log.logError(toString(), Messages.getString("JobEntryFTPDelete.ErrorQuitting", e.getMessage())); //$NON-NLS-1$ } } } result.setResult(!successConditionBroken ); result.setNrFilesRetrieved(NrfilesDeleted); result.setNrErrors(NrErrors); return result; } /** * Checks if file is a directory * * @param sftpClient * @param filename * @return true, if filename is a directory */ public boolean isDirectory(SFTPv3Client sftpClient, String filename) { try { return sftpClient.stat(filename).isDirectory(); } catch(Exception e) {} return false; } private void SSHConnect(LogWriter log,String realservername, String realserverpassword, int realserverport, String realUsername, String realPassword, String realproxyhost,String realproxyusername, String realproxypassword, int realproxyport, String realkeyFilename, String realkeyPass) throws Exception { /* Create a connection instance */ Connection conn = new Connection(realservername,realserverport); /* We want to connect through a HTTP proxy */ if(useproxy) { conn.setProxyData(new HTTPProxyData(realproxyhost, realproxyport)); /* Now connect */ // if the proxy requires basic authentication: if(!Const.isEmpty(realproxyusername) || !Const.isEmpty(realproxypassword)) { conn.setProxyData(new HTTPProxyData(realproxyhost, realproxyport, realproxyusername, realproxypassword)); } } if(timeout>0) { // Use timeout conn.connect(null,0,timeout*1000); }else { // Cache Host Key conn.connect(); } // Authenticate boolean isAuthenticated = false; if(publicpublickey) { isAuthenticated=conn.authenticateWithPublicKey(realUsername, new File(keyFilename), realkeyPass); }else { isAuthenticated=conn.authenticateWithPassword(realUsername, realserverpassword); } if(!isAuthenticated) throw new Exception("Can not connect to "); sshclient = new SFTPv3Client(conn); } private void SFTPConnect(LogWriter log,String realservername,String realusername,int realport, String realpassword, String realFTPDirectory) throws Exception { // Create sftp client to host ... sftpclient = new SFTPClient(InetAddress.getByName(realservername), realport, realusername); //if(log.isDetailed()) log.logDetailed(toString(), "Opened SFTP connection to server ["+realServerName+"] on port ["+realServerPort+"] with username ["+realUsername+"]"); // login to ftp host ... sftpclient.login(realpassword); // move to spool dir ... if (!Const.isEmpty(realFTPDirectory)) { sftpclient.chdir(realFTPDirectory); if(log.isDetailed()) log.logDetailed(toString(), "Changed to directory ["+realFTPDirectory+"]"); } } private void FTPConnect(LogWriter log,String realServername,String realusername, String realpassword, int realport,String realFtpDirectory, String realProxyhost,String realproxyusername, String realproxypassword,int realproxyport, int realtimeout) throws Exception { // Create ftp client to host:port ... ftpclient = new FTPClient(); ftpclient.setRemoteAddr(InetAddress.getByName(realServername)); if(realport!=0) ftpclient.setRemotePort(realport); if (!Const.isEmpty(realProxyhost)) { ftpclient.setRemoteAddr(InetAddress.getByName(realProxyhost)); if ( log.isDetailed() ) log.logDetailed(toString(), Messages.getString("JobEntryFTPDelete.OpenedProxyConnectionOn",realProxyhost)); // FIXME: Proper default port for proxy if (realproxyport != 0) { ftpclient.setRemotePort(realproxyport); } } else { ftpclient.setRemoteAddr(InetAddress.getByName(realServername)); if ( log.isDetailed() ) log.logDetailed(toString(), Messages.getString("JobEntryFTPDelete.OpenedConnectionTo",realServername)); } // set activeConnection connectmode ... if (activeConnection) { ftpclient.setConnectMode(FTPConnectMode.ACTIVE); if(log.isDetailed()) log.logDetailed(toString(), Messages.getString("JobEntryFTPDelete.SetActive")); //$NON-NLS-1$ } else { ftpclient.setConnectMode(FTPConnectMode.PASV); if(log.isDetailed()) log.logDetailed(toString(), Messages.getString("JobEntryFTPDelete.SetPassive")); //$NON-NLS-1$ } // Set the timeout ftpclient.setTimeout(realtimeout); if(log.isDetailed()) log.logDetailed(toString(), Messages.getString("JobEntryFTPDelete.SetTimeout", String.valueOf(realtimeout))); //$NON-NLS-1$ // login to ftp host ... ftpclient.connect(); String realUsername = realusername + (!Const.isEmpty(realProxyhost) ? "@" + realServername : "") + (!Const.isEmpty(realproxyusername) ? " " + realproxyusername : ""); String realPassword = realpassword + (!Const.isEmpty(realproxypassword) ? " " + realproxypassword : "" ); ftpclient.login(realUsername, realPassword); // Remove password from logging, you don't know where it ends up. if(log.isDetailed()) log.logDetailed(toString(), Messages.getString("JobEntryFTPDelete.LoggedIn", realUsername)); //$NON-NLS-1$ // move to spool dir ... if (!Const.isEmpty(realFtpDirectory)) { ftpclient.chdir(realFtpDirectory); if(log.isDetailed()) log.logDetailed(toString(), Messages.getString("JobEntryFTPDelete.ChangedDir", realFtpDirectory)); //$NON-NLS-1$ } } private void updateErrors() { NrErrors++; if(!getStatus()) { // Success condition was broken successConditionBroken=true; } } private void updateDeletedFiles() { NrfilesDeleted++; } public boolean evaluates() { return true; } /** * @return the activeConnection */ public boolean isActiveConnection() { return activeConnection; } /** * @param activeConnection the activeConnection to set */ public void setActiveConnection(boolean passive) { this.activeConnection = passive; } public void check(List<CheckResultInterface> remarks, JobMeta jobMeta) { andValidator().validate(this, "serverName", remarks, putValidators(notBlankValidator())); //$NON-NLS-1$ andValidator() .validate(this, "targetDirectory", remarks, putValidators(notBlankValidator(), fileExistsValidator())); //$NON-NLS-1$ andValidator().validate(this, "userName", remarks, putValidators(notBlankValidator())); //$NON-NLS-1$ andValidator().validate(this, "password", remarks, putValidators(notNullValidator())); //$NON-NLS-1$ } public List<ResourceReference> getResourceDependencies(JobMeta jobMeta) { List<ResourceReference> references = super.getResourceDependencies(jobMeta); if (!Const.isEmpty(serverName)) { String realServername = jobMeta.environmentSubstitute(serverName); ResourceReference reference = new ResourceReference(this); reference.getEntries().add(new ResourceEntry(realServername, ResourceType.SERVER)); references.add(reference); } return references; } }
package sorting; import java.util.Random; import static sorting.BubbleSort.bubbleSort; import static sorting.CountingSort.countingSort; public class SortingApp { public static void main(String[] args) { int size = 10000000; // 10 mln max from IDE, 100 mln with hip java -Xmx2048m int[] arr = getArrayWithRandomIntegers(size); // long bubbleTime = bubbleSort(arr); // System.out.println("bubbleTime=" + bubbleTime); long countingTime = countingSort(arr); System.out.println("countingTime=" + countingTime); } private static int[] getArrayWithRandomIntegers(int size) { Random random = new Random(); int[] arr = new int[size]; for (int index = 0; index < size; index++) { arr[index] = random.nextInt(size); } return arr; } static void notifyIfNotSorted(int[] aux) { for (int i = 0; i < aux.length - 1; i++) { if (aux[i] > aux[i + 1]) { System.out.println("Not sorted"); break; } } } }
package org.pentaho.di.trans.steps.textfileinput; import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.zip.GZIPInputStream; import java.util.zip.ZipInputStream; import org.apache.commons.vfs.FileObject; import org.pentaho.di.core.Const; import org.pentaho.di.core.ResultFile; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.fileinput.FileInputList; import org.pentaho.di.core.logging.LogWriter; import org.pentaho.di.core.playlist.FilePlayListAll; import org.pentaho.di.core.playlist.FilePlayListReplay; import org.pentaho.di.core.row.RowDataUtil; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStep; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.di.trans.step.errorhandling.AbstractFileErrorHandler; import org.pentaho.di.trans.step.errorhandling.CompositeFileErrorHandler; import org.pentaho.di.trans.step.errorhandling.FileErrorHandler; import org.pentaho.di.trans.step.errorhandling.FileErrorHandlerContentLineNumber; import org.pentaho.di.trans.step.errorhandling.FileErrorHandlerMissingFiles; /** * Read all sorts of text files, convert them to rows and writes these to one or * more output streams. * * @author Matt * @since 4-apr-2003 */ public class TextFileInput extends BaseStep implements StepInterface { private static final int BUFFER_SIZE_INPUT_STREAM = 500; private static LogWriter log = LogWriter.getInstance(); private TextFileInputMeta meta; private TextFileInputData data; private long lineNumberInFile; private TransMeta transmeta; public TextFileInput(StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans) { super(stepMeta, stepDataInterface, copyNr, transMeta, trans); this.transmeta = transMeta; } public static final String getLine(LogWriter log, InputStreamReader reader, int formatNr, StringBuffer line) throws KettleFileException { int c = 0; line.setLength(0); try { switch(formatNr) { case TextFileInputMeta.FILE_FORMAT_DOS: { while (c >= 0) { c = reader.read(); if (c == '\r' || c == '\n' ) { c = reader.read(); // skip \n and \r if( c != '\r' && c != '\n' ) { // make sure its really a linefeed or cariage return // raise an error this is not a DOS file // so we have pulled a character from the next line throw new KettleFileException(Messages.getString("TextFileInput.Log.SingleLineFound")); } return line.toString(); } if (c >= 0) line.append((char) c); } } break; case TextFileInputMeta.FILE_FORMAT_UNIX: { while (c >= 0) { c = reader.read(); if (c == '\n' || c == '\r') { return line.toString(); } if (c >= 0) line.append((char) c); } } break; case TextFileInputMeta.FILE_FORMAT_MIXED: // in mixed mode we suppose the LF is the last char and CR is ignored // not for MAC OS 9 but works for Mac OS X. Mac OS 9 can use UNIX-Format { while (c >= 0) { c = reader.read(); if (c == '\n') { return line.toString(); } else if (c != '\r') { if (c >= 0) line.append((char) c); } } } break; } } catch(KettleFileException e) { throw e; } catch (Exception e) { if (line.length() == 0) { throw new KettleFileException(Messages.getString("TextFileInput.Log.Error.ExceptionReadingLine",e.toString()), e); } return line.toString(); } if (line.length() > 0) return line.toString(); return null; } public static final String[] guessStringsFromLine(String line, TextFileInputMeta inf) throws KettleException { List<String> strings = new ArrayList<String>(); int fieldnr; String pol; // piece of line try { if (line == null) return null; if (inf.getFileType().equalsIgnoreCase("CSV")) { // Split string in pieces, only for CSV! fieldnr = 0; int pos = 0; int length = line.length(); boolean dencl = false; int len_encl = (inf.getEnclosure() == null ? 0 : inf.getEnclosure().length()); int len_esc = (inf.getEscapeCharacter() == null ? 0 : inf.getEscapeCharacter().length()); while (pos < length) { int from = pos; int next; boolean encl_found; boolean contains_escaped_enclosures = false; boolean contains_escaped_separators = false; // Is the field beginning with an enclosure? // "aa;aa";123;"aaa-aaa";000;... if (len_encl > 0 && line.substring(from, from + len_encl).equalsIgnoreCase(inf.getEnclosure())) { if (log.isRowLevel()) log.logRowlevel(Messages.getString("TextFileInput.Log.ConvertLineToRowTitle"), Messages.getString("TextFileInput.Log.ConvertLineToRow",line.substring(from, from + len_encl))); encl_found = true; int p = from + len_encl; boolean is_enclosure = len_encl > 0 && p + len_encl < length && line.substring(p, p + len_encl).equalsIgnoreCase(inf.getEnclosure()); boolean is_escape = len_esc > 0 && p + len_esc < length && line.substring(p, p + len_esc).equalsIgnoreCase(inf.getEscapeCharacter()); boolean enclosure_after = false; // Is it really an enclosure? See if it's not repeated twice or escaped! if ((is_enclosure || is_escape) && p < length - 1) { String strnext = line.substring(p + len_encl, p + 2 * len_encl); if (strnext.equalsIgnoreCase(inf.getEnclosure())) { p++; enclosure_after = true; dencl = true; // Remember to replace them later on! if (is_escape) contains_escaped_enclosures = true; } } // Look for a closing enclosure! while ((!is_enclosure || enclosure_after) && p < line.length()) { p++; enclosure_after = false; is_enclosure = len_encl > 0 && p + len_encl < length && line.substring(p, p + len_encl).equals(inf.getEnclosure()); is_escape = len_esc > 0 && p + len_esc < length && line.substring(p, p + len_esc).equals(inf.getEscapeCharacter()); // Is it really an enclosure? See if it's not repeated twice or escaped! if ((is_enclosure || is_escape) && p < length - 1) { String strnext = line.substring(p + len_encl, p + 2 * len_encl); if (strnext.equals(inf.getEnclosure())) { p++; enclosure_after = true; dencl = true; // Remember to replace them later on! if (is_escape) contains_escaped_enclosures = true; // remember } } } if (p >= length) next = p; else next = p + len_encl; if (log.isRowLevel()) log.logRowlevel(Messages.getString("TextFileInput.Log.ConvertLineToRowTitle"), Messages.getString("TextFileInput.Log.EndOfEnclosure", ""+ p)); } else { encl_found = false; boolean found = false; int startpoint = from; int tries = 1; do { next = line.indexOf(inf.getSeparator(), startpoint); // See if this position is preceded by an escape character. if (len_esc > 0 && next - len_esc > 0) { String before = line.substring(next - len_esc, next); if (inf.getEscapeCharacter().equals(before)) { // take the next separator, this one is escaped... startpoint = next + 1; tries++; contains_escaped_separators = true; } else { found = true; } } else { found = true; } } while (!found && next >= 0); } if (next == -1) next = length; if (encl_found) { pol = line.substring(from + len_encl, next - len_encl); if (log.isRowLevel()) log.logRowlevel(Messages.getString("TextFileInput.Log.ConvertLineToRowTitle"), Messages.getString("TextFileInput.Log.EnclosureFieldFound", ""+ pol)); } else { pol = line.substring(from, next); if (log.isRowLevel()) log.logRowlevel(Messages.getString("TextFileInput.Log.ConvertLineToRowTitle"), Messages.getString("TextFileInput.Log.NormalFieldFound",""+ pol)); } if (dencl) { StringBuffer sbpol = new StringBuffer(pol); int idx = sbpol.indexOf(inf.getEnclosure() + inf.getEnclosure()); while (idx >= 0) { sbpol.delete(idx, idx + inf.getEnclosure().length()); idx = sbpol.indexOf(inf.getEnclosure() + inf.getEnclosure()); } pol = sbpol.toString(); } // replace the escaped enclosures with enclosures... if (contains_escaped_enclosures) { String replace = inf.getEscapeCharacter() + inf.getEnclosure(); String replaceWith = inf.getEnclosure(); pol = Const.replace(pol, replace, replaceWith); } //replace the escaped separators with separators... if (contains_escaped_separators) { String replace = inf.getEscapeCharacter() + inf.getSeparator(); String replaceWith = inf.getSeparator(); pol = Const.replace(pol, replace, replaceWith); } // Now add pol to the strings found! strings.add(pol); pos = next + inf.getSeparator().length(); fieldnr++; } if ( pos == length ) { if (log.isRowLevel()) log.logRowlevel(Messages.getString("TextFileInput.Log.ConvertLineToRowTitle"), Messages.getString("TextFileInput.Log.EndOfEmptyLineFound")); strings.add(""); fieldnr++; } } else { // Fixed file format: Simply get the strings at the required positions... for (int i = 0; i < inf.getInputFields().length; i++) { TextFileInputField field = inf.getInputFields()[i]; int length = line.length(); if (field.getPosition() + field.getLength() <= length) { strings.add( line.substring(field.getPosition(), field.getPosition() + field.getLength()) ); } else { if (field.getPosition() < length) { strings.add( line.substring(field.getPosition()) ); } else { strings.add( "" ); } } } } } catch (Exception e) { throw new KettleException(Messages.getString("TextFileInput.Log.Error.ErrorConvertingLine",e.toString()), e); } return strings.toArray(new String[strings.size()]); } public static final String[] convertLineToStrings(String line, InputFileMetaInterface inf) throws KettleException { String[] strings = new String[inf.getInputFields().length]; int fieldnr; String pol; // piece of line try { if (line == null) return null; if (inf.getFileType().equalsIgnoreCase("CSV")) { // Split string in pieces, only for CSV! fieldnr = 0; int pos = 0; int length = line.length(); boolean dencl = false; int len_encl = (inf.getEnclosure() == null ? 0 : inf.getEnclosure().length()); int len_esc = (inf.getEscapeCharacter() == null ? 0 : inf.getEscapeCharacter().length()); while (pos < length) { int from = pos; int next; boolean encl_found; boolean contains_escaped_enclosures = false; boolean contains_escaped_separators = false; // Is the field beginning with an enclosure? // "aa;aa";123;"aaa-aaa";000;... if (len_encl > 0 && line.substring(from, from + len_encl).equalsIgnoreCase(inf.getEnclosure())) { if (log.isRowLevel()) log.logRowlevel(Messages.getString("TextFileInput.Log.ConvertLineToRowTitle"), Messages.getString("TextFileInput.Log.Encloruse", line.substring(from, from + len_encl))); encl_found = true; int p = from + len_encl; boolean is_enclosure = len_encl > 0 && p + len_encl < length && line.substring(p, p + len_encl).equalsIgnoreCase(inf.getEnclosure()); boolean is_escape = len_esc > 0 && p + len_esc < length && line.substring(p, p + len_esc).equalsIgnoreCase(inf.getEscapeCharacter()); boolean enclosure_after = false; // Is it really an enclosure? See if it's not repeated twice or escaped! if ((is_enclosure || is_escape) && p < length - 1) { String strnext = line.substring(p + len_encl, p + 2 * len_encl); if (strnext.equalsIgnoreCase(inf.getEnclosure())) { p++; enclosure_after = true; dencl = true; // Remember to replace them later on! if (is_escape) contains_escaped_enclosures = true; } } // Look for a closing enclosure! while ((!is_enclosure || enclosure_after) && p < line.length()) { p++; enclosure_after = false; is_enclosure = len_encl > 0 && p + len_encl < length && line.substring(p, p + len_encl).equals(inf.getEnclosure()); is_escape = len_esc > 0 && p + len_esc < length && line.substring(p, p + len_esc).equals(inf.getEscapeCharacter()); // Is it really an enclosure? See if it's not repeated twice or escaped! if ((is_enclosure || is_escape) && p < length - 1) { String strnext = line.substring(p + len_encl, p + 2 * len_encl); if (strnext.equals(inf.getEnclosure())) { p++; enclosure_after = true; dencl = true; // Remember to replace them later on! if (is_escape) contains_escaped_enclosures = true; // remember } } } if (p >= length) next = p; else next = p + len_encl; if (log.isRowLevel()) log.logRowlevel(Messages.getString("TextFileInput.Log.ConvertLineToRowTitle"), Messages.getString("TextFileInput.Log.EndOfEnclosure",""+ p)); } else { encl_found = false; boolean found = false; int startpoint = from; int tries = 1; do { next = line.indexOf(inf.getSeparator(), startpoint); // See if this position is preceded by an escape character. if (len_esc > 0 && next - len_esc > 0) { String before = line.substring(next - len_esc, next); if (inf.getEscapeCharacter().equals(before)) { // take the next separator, this one is escaped... startpoint = next + 1; tries++; contains_escaped_separators = true; } else { found = true; } } else { found = true; } } while (!found && next >= 0); } if (next == -1) next = length; if (encl_found && ((from + len_encl) <= (next - len_encl))) { pol = line.substring(from + len_encl, next - len_encl); if (log.isRowLevel()) log.logRowlevel(Messages.getString("TextFileInput.Log.ConvertLineToRowTitle"), Messages.getString("TextFileInput.Log.EnclosureFieldFound",""+ pol )); } else { pol = line.substring(from, next); if (log.isRowLevel()) log.logRowlevel(Messages.getString("TextFileInput.Log.ConvertLineToRowTitle"), Messages.getString("TextFileInput.Log.NormalFieldFound",""+ pol)); } if (dencl && Const.isEmpty(inf.getEscapeCharacter())) { StringBuffer sbpol = new StringBuffer(pol); int idx = sbpol.indexOf(inf.getEnclosure() + inf.getEnclosure()); while (idx >= 0) { sbpol.delete(idx, idx + inf.getEnclosure().length()); idx = sbpol.indexOf(inf.getEnclosure() + inf.getEnclosure()); } pol = sbpol.toString(); } // replace the escaped enclosures with enclosures... if (contains_escaped_enclosures) { String replace = inf.getEscapeCharacter() + inf.getEnclosure(); String replaceWith = inf.getEnclosure(); pol = Const.replace(pol, replace, replaceWith); } //replace the escaped separators with separators... if (contains_escaped_separators) { String replace = inf.getEscapeCharacter() + inf.getSeparator(); String replaceWith = inf.getSeparator(); pol = Const.replace(pol, replace, replaceWith); } // Now add pol to the strings found! try { strings[fieldnr]=pol; } catch(ArrayIndexOutOfBoundsException e) { // In case we didn't allocate enough space. // This happens when you have less header values specified than there are actual values in the rows. // As this is "the exception" we catch and resize here. String[] newStrings = new String[strings.length]; for (int x=0;x<strings.length;x++) newStrings[x] = strings[x]; strings = newStrings; } pos = next + inf.getSeparator().length(); fieldnr++; } if ( pos == length) { if (log.isRowLevel()) log.logRowlevel(Messages.getString("TextFileInput.Log.ConvertLineToRowTitle"), Messages.getString("TextFileInput.Log.EndOfEmptyLineFound")); if (fieldnr<strings.length) strings[fieldnr]= Const.EMPTY_STRING; fieldnr++; } } else { // Fixed file format: Simply get the strings at the required positions... for (int i = 0; i < inf.getInputFields().length; i++) { TextFileInputField field = inf.getInputFields()[i]; int length = line.length(); if (field.getPosition() + field.getLength() <= length) { strings[i] = line.substring(field.getPosition(), field.getPosition() + field.getLength()); } else { if (field.getPosition() < length) { strings[i] = line.substring(field.getPosition()); } else { strings[i] = ""; } } } } } catch (Exception e) { throw new KettleException(Messages.getString("TextFileInput.Log.Error.ErrorConvertingLine",e.toString()), e); } return strings; } public static final Object[] convertLineToRow(TextFileLine textFileLine, InputFileMetaInterface info, RowMetaInterface outputRowMeta, RowMetaInterface convertRowMeta, String fname, long rowNr, FileErrorHandler errorHandler) throws KettleException { if (textFileLine == null || textFileLine.line == null /*|| textFileLine.line.length() == 0*/) return null; Object[] r = RowDataUtil.allocateRowData(outputRowMeta.size()); // over-allocate a bit in the row producing steps... int nrfields = info.getInputFields().length; int fieldnr; Long errorCount = null; if (info.isErrorIgnored() && info.getErrorCountField() != null && info.getErrorCountField().length() > 0) { errorCount = new Long(0L); } String errorFields = null; if (info.isErrorIgnored() && info.getErrorFieldsField() != null && info.getErrorFieldsField().length() > 0) { errorFields = ""; } String errorText = null; if (info.isErrorIgnored() && info.getErrorTextField() != null && info.getErrorTextField().length() > 0) { errorText = ""; } try { // System.out.println("Convertings line to string ["+line+"]"); String[] strings = convertLineToStrings(textFileLine.line, info); for (fieldnr = 0; fieldnr < nrfields; fieldnr++) { TextFileInputField f = info.getInputFields()[fieldnr]; ValueMetaInterface valueMeta = outputRowMeta.getValueMeta(fieldnr); ValueMetaInterface convertMeta = convertRowMeta.getValueMeta(fieldnr); Object value; String nullif = fieldnr < nrfields ? f.getNullString() : ""; String ifnull = fieldnr < nrfields ? f.getIfNullValue() : ""; int trim_type = fieldnr < nrfields ? f.getTrimType() : ValueMetaInterface.TRIM_TYPE_NONE; if (fieldnr < strings.length) { String pol = strings[fieldnr]; try { value = valueMeta.convertDataFromString(pol, convertMeta, nullif, ifnull, trim_type); } catch (Exception e) { // OK, give some feedback! String message = Messages.getString("TextFileInput.Log.CoundNotParseField",valueMeta.toStringMeta(),"" + pol,valueMeta.getConversionMask(),""+ rowNr ); if (info.isErrorIgnored()) { LogWriter.getInstance().logBasic(fname, Messages.getString("TextFileInput.Log.Warning")+": " + message+" : " + e.getMessage()); value = null; if (errorCount != null) { errorCount=new Long( errorCount.longValue()+1L ); } if (errorFields != null) { StringBuffer sb = new StringBuffer(errorFields); if (sb.length() > 0) sb.append("\t"); // TODO document this change sb.append(valueMeta.getName()); errorFields = sb.toString(); } if (errorText != null) { StringBuffer sb = new StringBuffer(errorText); if (sb.length() > 0) sb.append(Const.CR); sb.append(message); errorText=sb.toString(); } if (errorHandler != null) { errorHandler.handleLineError(textFileLine.lineNumber, AbstractFileErrorHandler.NO_PARTS); } if (info.isErrorLineSkipped()) r=null; // compensates for stmt: r.setIgnore(); } else { throw new KettleException(message, e); } } } else { // No data found: TRAILING NULLCOLS: add null value... value = null; } // Now add value to the row (if we're not skipping the row) if ( r != null ) { r[fieldnr] = value; } } // Support for trailing nullcols! // Should be OK at allocation time, but it doesn't hurt :-) if (fieldnr < nrfields) { for (int i = fieldnr; i < info.getInputFields().length; i++) { r[i] = null; } } // Add the error handling fields... int index = nrfields; if (errorCount != null) { r[index]=errorCount; index++; } if (errorFields != null) { r[index]=errorFields; index++; } if (errorText != null) { r[index]=errorText; index++; } // Possibly add a filename... if (info.includeFilename()) { r[index]=fname; index++; } // Possibly add a row number... if (info.includeRowNumber()) { r[index] = new Long(rowNr); index++; } } catch (Exception e) { throw new KettleException(Messages.getString("TextFileInput.Log.Error.ErrorConvertingLineText"), e); } return r; } public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException { Object[] r = null; boolean retval = true; boolean putrow = false; if (first) // we just got started { first = false; // Create the output row meta-data data.outputRowMeta = new RowMeta(); meta.getFields(data.outputRowMeta, getStepname(), null, null, this); // get the metadata populated. Simple and easy. // Create convert meta-data objects that will contain Date & Number formatters data.convertRowMeta = data.outputRowMeta.clone(); for (int i=0;i<data.convertRowMeta.size();i++) data.convertRowMeta.getValueMeta(i).setType(ValueMetaInterface.TYPE_STRING); if (meta.isAcceptingFilenames()) { // Read the files from the specified input stream... data.files.getFiles().clear(); int idx = -1; data.rowSet = findInputRowSet(meta.getAcceptingStepName()); Object[] fileRow = getRowFrom(data.rowSet); while (fileRow!=null) { if (idx<0) { idx = data.rowSet.getRowMeta().indexOfValue(meta.getAcceptingField()); if (idx<0) { logError(Messages.getString("TextFileInput.Log.Error.UnableToFindFilenameField", meta.getAcceptingField())); setErrors(1); stopAll(); return false; } } String fileValue = data.rowSet.getRowMeta().getString(fileRow, idx); try { FileObject fileObject = KettleVFS.getFileObject(fileValue); data.files.addFile(fileObject); } catch(IOException e) { logError(Messages.getString("TextFileInput.Log.Error.UnableToCreateFileObject", fileValue)); } // Grab another row fileRow = getRowFrom(data.rowSet); } if (data.files.nrOfFiles()==0) { if(log.isBasic()) logBasic(Messages.getString("TextFileInput.Log.Error.NoFilesSpecified")); setOutputDone(); return false; } } handleMissingFiles(); // Open the first file & read the required rows in the buffer, stop // if it fails... if (!openNextFile()) { closeLastFile(); setOutputDone(); return false; } // Count the number of repeat fields... for (int i = 0; i < meta.getInputFields().length; i++) { if (meta.getInputFields()[i].isRepeated()) data.nr_repeats++; } } else { if (!data.doneReading) { int repeats = 1; if (meta.isLineWrapped()) repeats = meta.getNrWraps() > 0 ? meta.getNrWraps() : repeats; // Read a number of lines... for (int i = 0; i < repeats && !data.doneReading; i++) { String line = getLine(log, data.isr, data.fileFormatType, data.lineStringBuffer); // Get one line of data; if (line != null) { // Filter row? boolean isFilterLastLine = false; boolean filterOK = checkFilterRow(line, isFilterLastLine); if (filterOK) { // logRowlevel("LINE READ: "+line); data.lineBuffer.add(new TextFileLine(line, lineNumberInFile, data.file)); } else { if (isFilterLastLine) { data.doneReading = true; } repeats++; // grab another line, this one got filtered } } else { data.doneReading = true; } } } } /* If the buffer is empty: open the next file. * (if nothing in there, open the next, etc.) */ while (data.lineBuffer.size() == 0) { if (!openNextFile()) // Open fails: done processing! { closeLastFile(); setOutputDone(); // signal end to receiver(s) return false; } } /* Take the first line available in the buffer & remove the line from the buffer */ TextFileLine textLine = (TextFileLine) data.lineBuffer.get(0); linesInput++; lineNumberInFile++; data.lineBuffer.remove(0); if (meta.isLayoutPaged()) { /* Different rules apply: on each page: a header a number of data lines a footer */ if (!data.doneWithHeader && data.pageLinesRead == 0) // We are reading header lines { if (log.isRowLevel()) logRowlevel("P-HEADER (" + data.headerLinesRead + ") : " + textLine.line); data.headerLinesRead++; if (data.headerLinesRead >= meta.getNrHeaderLines()) { data.doneWithHeader = true; } } else // data lines or footer on a page { if (data.pageLinesRead < meta.getNrLinesPerPage()) { // See if we are dealing with wrapped lines: if (meta.isLineWrapped()) { for (int i = 0; i < meta.getNrWraps(); i++) { String extra = ""; if (data.lineBuffer.size() > 0) { extra = ((TextFileLine) data.lineBuffer.get(0)).line; data.lineBuffer.remove(0); } textLine.line += extra; } } if (log.isRowLevel()) logRowlevel("P-DATA: " + textLine.line); // Read a normal line on a page of data. data.pageLinesRead++; data.lineInFile ++; long useNumber = meta.isRowNumberByFile() ? data.lineInFile : linesWritten + 1; r = convertLineToRow(textLine, meta, data.outputRowMeta, data.convertRowMeta, data.filename, useNumber, data.dataErrorLineHandler); if (r != null) putrow = true; // Possible fix for bug PDI-1121 - paged layout header and line count off by 1 // We need to reset these BEFORE the next header line is read, so that it // is treated as a header ... obviously, only if there is no footer, and we are // done reading data. if (!meta.hasFooter() && (data.pageLinesRead == meta.getNrLinesPerPage())) { /* OK, we are done reading the footer lines, start again on 'next page' with the header */ data.doneWithHeader = false; data.headerLinesRead = 0; data.pageLinesRead = 0; data.footerLinesRead = 0; if (log.isRowLevel()) logRowlevel("RESTART PAGE"); } } else // done reading the data lines, skip the footer lines { if (meta.hasFooter() && data.footerLinesRead < meta.getNrFooterLines()) { if (log.isRowLevel()) logRowlevel("P-FOOTER: " + textLine.line); data.footerLinesRead++; } if (!meta.hasFooter() || data.footerLinesRead >= meta.getNrFooterLines()) { /* OK, we are done reading the footer lines, start again on 'next page' with the header */ data.doneWithHeader = false; data.headerLinesRead = 0; data.pageLinesRead = 0; data.footerLinesRead = 0; if (log.isRowLevel()) logRowlevel("RESTART PAGE"); } } } } else // A normal data line, can also be a header or a footer line { if (!data.doneWithHeader) // We are reading header lines { data.headerLinesRead++; if (data.headerLinesRead >= meta.getNrHeaderLines()) { data.doneWithHeader = true; } } else { /* IF we are done reading and we have a footer AND the number of lines in the buffer is smaller then the number of footer lines THEN we can remove the remaining rows from the buffer: they are all footer rows. */ if (data.doneReading && meta.hasFooter() && data.lineBuffer.size() < meta.getNrFooterLines()) { data.lineBuffer.clear(); } else // Not yet a footer line: it's a normal data line. { // See if we are dealing with wrapped lines: if (meta.isLineWrapped()) { for (int i = 0; i < meta.getNrWraps(); i++) { String extra = ""; if (data.lineBuffer.size() > 0) { extra = ((TextFileLine) data.lineBuffer.get(0)).line; data.lineBuffer.remove(0); } textLine.line += extra; } } if (data.filePlayList.isProcessingNeeded(textLine.file, textLine.lineNumber, AbstractFileErrorHandler.NO_PARTS)) { data.lineInFile ++; long useNumber = meta.isRowNumberByFile() ? data.lineInFile : linesWritten + 1; r = convertLineToRow(textLine, meta, data.outputRowMeta, data.convertRowMeta, data.filename, useNumber, data.dataErrorLineHandler); if (r != null) { if (log.isRowLevel()) logRowlevel("Found data row: "+data.outputRowMeta.getString(r)); putrow = true; } } else putrow = false; } } } if (putrow && r != null) { // See if the previous values need to be repeated! if (data.nr_repeats > 0) { if (data.previous_row == null) // First invocation... { data.previous_row = data.outputRowMeta.cloneRow(r); } else { int repnr = 0; for (int i = 0; i < meta.getInputFields().length; i++) { if (meta.getInputFields()[i].isRepeated()) { if (r[i]==null) // if it is empty: take the previous value! { r[i] = data.previous_row[i]; } else // not empty: change the previous_row entry! { data.previous_row[i] = r[i]; } repnr++; } } } } if (log.isRowLevel()) logRowlevel("Putting row: " + data.outputRowMeta.getString(r)); putRow(data.outputRowMeta, r); if ( linesInput >= meta.getRowLimit() && meta.getRowLimit() >0 ) { closeLastFile(); setOutputDone(); // signal end to receiver(s) return false; } } if (checkFeedback(linesInput)) { if(log.isBasic()) logBasic("linenr " + linesInput); } return retval; } /** * Check if the line should be taken. * @param line * @param isFilterLastLine (dummy input param, only set when return value is false) * @return true when the line should be taken (when false, isFilterLastLine will be set) */ private boolean checkFilterRow(String line, boolean isFilterLastLine) { boolean filterOK=true; // check for noEmptyLines if (meta.noEmptyLines() && line.length() == 0) { filterOK=false; } else { // check the filters filterOK = data.filterProcessor.doFilters(line); if ( ! filterOK ) { if ( data.filterProcessor.isStopProcessing()) { data.doneReading = true; } } } return filterOK; } private void handleMissingFiles() throws KettleException { List<FileObject> nonExistantFiles = data.files.getNonExistantFiles(); if (nonExistantFiles.size() != 0) { String message = FileInputList.getRequiredFilesDescription(nonExistantFiles); if(log.isBasic()) log.logBasic("Required files", "WARNING: Missing " + message); if (meta.isErrorIgnored()) { for (FileObject fileObject : nonExistantFiles) { data.dataErrorLineHandler.handleNonExistantFile(fileObject); } } else { throw new KettleException("Following required files are missing: " + message); } } List<FileObject> nonAccessibleFiles = data.files.getNonAccessibleFiles(); if (nonAccessibleFiles.size() != 0) { String message = FileInputList.getRequiredFilesDescription(nonAccessibleFiles); if(log.isBasic()) log.logBasic("Required files", "WARNING: Not accessible " + message); if (meta.isErrorIgnored()) { for (FileObject fileObject : nonAccessibleFiles) { data.dataErrorLineHandler.handleNonAccessibleFile(fileObject); } } else { throw new KettleException("Following required files are not accessible: " + message); } } } private boolean closeLastFile() { try { // Close previous file! if (data.filename != null) { String sFileCompression = meta.getFileCompression(); if (sFileCompression != null && sFileCompression.equals("Zip")) { data.zi.closeEntry(); data.zi.close(); } else if (sFileCompression != null && sFileCompression.equals("GZip")) { data.gzi.close(); } data.fr.close(); data.isr.close(); data.filename = null; // send it down the next time. if ( data.file != null ) { data.file.close(); data.file = null; } } data.dataErrorLineHandler.close(); } catch (Exception e) { logError("Couldn't close file : " + data.filename + " --> " + e.toString()); stopAll(); setErrors(1); return false; } finally { // This is for bug #5797 : it tries to assure that the file handle // is actually freed/garbarge collected. System.gc(); } return !data.isLastFile; } private boolean openNextFile() { try { lineNumberInFile = 0; if (!closeLastFile()) return false; if (data.files.nrOfFiles() == 0) return false; // Is this the last file? data.isLastFile = (data.filenr == data.files.nrOfFiles() - 1); data.file = data.files.getFile(data.filenr); data.filename = KettleVFS.getFilename( data.file ); data.lineInFile = 0; // Add this files to the result of this transformation. if(meta.isAddResultFile()) { ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, data.file, getTransMeta().getName(), toString()); resultFile.setComment("File was read by an Text File input step"); addResultFile(resultFile); } if(log.isBasic()) logBasic("Opening file: " + data.filename); data.fr = KettleVFS.getInputStream(data.file); data.dataErrorLineHandler.handleFile(data.file); String sFileCompression = meta.getFileCompression(); if (sFileCompression != null && sFileCompression.equals("Zip")) { if(log.isBasic()) logBasic("This is a zipped file"); data.zi = new ZipInputStream(data.fr); data.zi.getNextEntry(); if (meta.getEncoding() != null && meta.getEncoding().length() > 0) { data.isr = new InputStreamReader(new BufferedInputStream(data.zi, BUFFER_SIZE_INPUT_STREAM), meta.getEncoding()); } else { data.isr = new InputStreamReader(new BufferedInputStream(data.zi, BUFFER_SIZE_INPUT_STREAM)); } } else if (sFileCompression != null && sFileCompression.equals("GZip")) { if(log.isBasic()) logBasic("This is a gzipped file"); data.gzi = new GZIPInputStream(data.fr); if (meta.getEncoding() != null && meta.getEncoding().length() > 0) { data.isr = new InputStreamReader(new BufferedInputStream(data.gzi, BUFFER_SIZE_INPUT_STREAM), meta.getEncoding()); } else { data.isr = new InputStreamReader(new BufferedInputStream(data.gzi, BUFFER_SIZE_INPUT_STREAM)); } } else { if (meta.getEncoding() != null && meta.getEncoding().length() > 0) { data.isr = new InputStreamReader(new BufferedInputStream(data.fr, BUFFER_SIZE_INPUT_STREAM), meta.getEncoding()); } else { data.isr = new InputStreamReader(new BufferedInputStream(data.fr, BUFFER_SIZE_INPUT_STREAM)); } } // Move file pointer ahead! data.filenr++; // Read the first lines... /* Keep track of the status of the file: are there any lines left to read? */ data.doneReading = false; /* OK, read a number of lines in the buffer: The header rows The nr rows in the page : optional The footer rows */ int bufferSize = 1; bufferSize += meta.hasHeader() ? meta.getNrHeaderLines() : 0; bufferSize += meta.isLayoutPaged() ? meta.getNrLinesPerPage() : 0; bufferSize += meta.hasFooter() ? meta.getNrFooterLines() : 0; // See if we need to skip the document header lines... if (meta.isLayoutPaged()) { for (int i = 0; i < meta.getNrLinesDocHeader(); i++) { // Just skip these... getLine(log, data.isr, data.fileFormatType, data.lineStringBuffer); // header and footer: not wrapped lineNumberInFile++; } } String line; for (int i = 0; i < bufferSize && !data.doneReading; i++) { line = getLine(log, data.isr, data.fileFormatType, data.lineStringBuffer); if (line != null) { // when there is no header, check the filter for the first line if (!meta.hasHeader()) { // Filter row? boolean isFilterLastLine = false; boolean filterOK = checkFilterRow(line, isFilterLastLine); if (filterOK) { data.lineBuffer.add(new TextFileLine(line, lineNumberInFile, data.file)); // Store it in the // line buffer... } else { bufferSize++; // grab another line, this one got filtered } } else //there is a header, so don't checkFilterRow { if (!meta.noEmptyLines() || line.length() != 0) { data.lineBuffer.add(new TextFileLine(line, lineNumberInFile, data.file)); // Store it in the line buffer... } } } else { data.doneReading = true; } } // Reset counters etc. data.headerLinesRead = 0; data.footerLinesRead = 0; data.pageLinesRead = 0; // Set a flags data.doneWithHeader = !meta.hasHeader(); } catch (Exception e) { logError("Couldn't open file #" + data.filenr + " : " + data.filename + " --> " + e.toString()); stopAll(); setErrors(1); return false; } return true; } public boolean init(StepMetaInterface smi, StepDataInterface sdi) { meta = (TextFileInputMeta) smi; data = (TextFileInputData) sdi; if (super.init(smi, sdi)) { initErrorHandling(); initReplayFactory(); data.files = meta.getTextFileList(this); data.filterProcessor = new TextFileFilterProcessor(meta.getFilter()); // If there are missing files, fail if we don't ignore errors if ( (transmeta.getPreviousResult()==null || transmeta.getPreviousResult().getResultFiles()==null || transmeta.getPreviousResult().getResultFiles().size()==0) && data.files.nrOfMissingFiles() > 0 && !meta.isAcceptingFilenames() && !meta.isErrorIgnored() ) { logError(Messages.getString("TextFileInput.Log.Error.NoFilesSpecified")); return false; } String nr = getVariable(Const.INTERNAL_VARIABLE_SLAVE_SERVER_NUMBER); if (!Const.isEmpty(nr)) { // TODO: add metadata to configure this. if(log.isBasic()) logBasic("Running on slave server #"+nr+" : assuming that each slave reads a dedicated part of the same file(s)."); } // If no nullif field is supplied, take the default. // String null_value = nullif; // if (null_value == null) // // value=""; // String null_cmp = Const.rightPad(new StringBuffer(null_value), pol.length()); // calculate the file format type in advance so we can use a switch data.fileFormatType = meta.getFileFormatTypeNr(); // calculate the file type in advance CSV or Fixed? data.fileType = meta.getFileTypeNr(); return true; } return false; } private void initReplayFactory() { Date replayDate = getTrans().getReplayDate(); if (replayDate == null) data.filePlayList = FilePlayListAll.INSTANCE; else data.filePlayList = new FilePlayListReplay(replayDate, meta.getLineNumberFilesDestinationDirectory(), meta.getLineNumberFilesExtension(), meta.getErrorFilesDestinationDirectory(), meta.getErrorLineFilesExtension(), meta.getEncoding()); } private void initErrorHandling() { List<FileErrorHandler> dataErrorLineHandlers = new ArrayList<FileErrorHandler>(2); if (meta.getLineNumberFilesDestinationDirectory() != null) dataErrorLineHandlers.add(new FileErrorHandlerContentLineNumber(getTrans().getCurrentDate(), meta .getLineNumberFilesDestinationDirectory(), meta.getLineNumberFilesExtension(), meta.getEncoding(), this)); if (meta.getErrorFilesDestinationDirectory() != null) dataErrorLineHandlers.add(new FileErrorHandlerMissingFiles(getTrans().getCurrentDate(), meta.getErrorFilesDestinationDirectory(), meta .getErrorLineFilesExtension(), meta.getEncoding(), this)); data.dataErrorLineHandler = new CompositeFileErrorHandler(dataErrorLineHandlers); } public void dispose(StepMetaInterface smi, StepDataInterface sdi) { meta = (TextFileInputMeta) smi; data = (TextFileInputData) sdi; if(data.file!=null) { try{ data.file.close(); }catch (Exception e){} } super.dispose(smi, sdi); // This is for bug #5797 : it tries to assure that the file handle // is actually freed/garbarge collected. System.gc(); } // Run is were the action happens! public void run() { BaseStep.runStepThread(this, meta, data); } }
package org.jboss.forge.addon.shell.commands; import javax.inject.Inject; import org.jboss.forge.addon.ui.annotation.Command; import org.jboss.forge.addon.ui.annotation.handler.NonGUIEnabledHandler; import org.jboss.forge.addon.ui.output.UIOutput; import org.jboss.forge.furnace.Furnace; import org.jboss.forge.furnace.versions.Version; /** * * @author <a href="ggastald@redhat.com">George Gastaldi</a> */ public class AboutCommand { @Inject Furnace furnace; @Command(value = "version", help = "Displays the current Forge version.", enabledHandler = NonGUIEnabledHandler.class) public void showVersion(final UIOutput output) { Version version = furnace.getVersion(); output.out().println( "JBoss Forge, version [ " + version + " ] - JBoss, by Red Hat, Inc. [ http://jboss.org/forge ]"); } @Command(value = "about", help = "Display information about this forge.", enabledHandler = NonGUIEnabledHandler.class) public void showAbout(final UIOutput output) { output.out().println(); output.out().println(" _____ "); output.out().println(" | ___|__ _ __ __ _ ___ "); output.out().println(" | |_ / _ \\| `__/ _` |/ _ \\ \\\\"); output.out().println(" | _| (_) | | | (_| | __/ output.out().println(" |_| \\___/|_| \\__, |\\___| "); output.out().println(" |___/ "); output.out().println(""); showVersion(output); } }
package com.scg.net.server; import java.io.File; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintStream; import java.io.UnsupportedEncodingException; import java.net.Socket; import java.time.LocalDate; import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.scg.domain.ClientAccount; import com.scg.domain.Consultant; import com.scg.domain.Invoice; import com.scg.domain.TimeCard; import com.scg.net.cmd.AddClientCommand; import com.scg.net.cmd.AddConsultantCommand; import com.scg.net.cmd.AddTimeCardCommand; import com.scg.net.cmd.CreateInvoicesCommand; import com.scg.net.cmd.DisconnectCommand; import com.scg.net.cmd.ShutdownCommand; /** * The command processor for the invoice server. Implements the receiver role in * the Command design pattern, provides the execute method for all of the * supported commands. Is provided with the client and consultant lists from the * Invoice server, maintains its own time card list. * * @author Russ Moul */ public final class CommandProcessor implements Runnable { /** The class' logger. */ private static final Logger logger = LoggerFactory.getLogger(CommandProcessor.class); /** Character encoding to use. */ private static final String ENCODING = "ISO-8859-1"; /** The socket connection. */ private final Socket clientSocket; /** The client list to be maintained by this CommandProcessor. */ private final List<ClientAccount> clientList; /** The consultant list to be maintained by this CommandProcessor. */ private final List<Consultant> consultantList; /** The time card list to be maintained by this CommandProcessor. */ private final List<TimeCard> timeCardList = new ArrayList<TimeCard>(); /** The name of the directory to be used for files output by commands. */ private String outputDirectoryName = "."; /** The server this command processor is spawned from. */ private final InvoiceServer server; /** The name of the CommandProcess, mostly used for logging */ private final String name; /** * Construct a CommandProcessor. * * @param connection - the Socket connecting the server to the client. * @param name - the name assigned to this CommandProcessor by the server; mostly for logging. * @param clientList - the ClientList to add Clients to. * @param consultantList - the ConsultantList to add Consultants to. * @param server - the server that created this command processor */ public CommandProcessor(final Socket connection, String name, final List<ClientAccount> clientList, final List<Consultant> consultantList, final InvoiceServer server) { this.clientSocket = connection; this.clientList = clientList; this.consultantList = consultantList; this.server = server; this.name = name; } /** * Set the output directory name. * * @param outPutDirectoryName the output directory name. */ public void setOutPutDirectoryName(final String outPutDirectoryName) { this.outputDirectoryName = outPutDirectoryName; } /** * Execute and AddTimeCardCommand. * * @param command the command to execute. */ public void execute(final AddTimeCardCommand command) { logger.info("Executing add time card command: " + command); final TimeCard newTimeCard = command.getTarget(); synchronized(timeCardList){ if(!timeCardList.contains(newTimeCard)){ timeCardList.add(newTimeCard); } } } /** * Execute an AddClientCommand. * * @param command the command to execute. */ public void execute(final AddClientCommand command) { logger.info("Executing add client command: " + command); final ClientAccount newAccount = command.getTarget(); synchronized(clientList){ if(!clientList.contains(newAccount)){ clientList.add(newAccount); } } } /** * Execute and AddConsultantCommand. * * @param command the command to execute. */ public void execute(final AddConsultantCommand command) { logger.info("Executing add consultant command: " + command); final Consultant newConsultant = command.getTarget(); synchronized(consultantList){ if(!consultantList.contains(newConsultant)){ consultantList.add(newConsultant); } } } /** * Execute a CreateInvoicesCommand. * * @param command the command to execute. */ public void execute(final CreateInvoicesCommand command) { logger.info("Executing invoice command: " + command); Invoice invoice = null; LocalDate date = command.getTarget(); final DateTimeFormatter formatter = DateTimeFormatter.ofPattern("MMMMyyyy"); final String monthString = formatter.format(date); synchronized(clientList){ for (final ClientAccount client : clientList) { invoice = new Invoice(client, date.getMonth(), date.getYear()); for (final TimeCard currentTimeCard : timeCardList) { invoice.extractLineItems(currentTimeCard); } if (invoice.getTotalHours() > 0) { final File serverDir = new File(outputDirectoryName); if (!serverDir.exists()) { if (!serverDir.mkdirs()) { logger.error("Unable to create directory, " + serverDir.getAbsolutePath()); return; } } final String outFileName = String.format("%s%sInvoice.txt", client.getName().replaceAll(" ", ""), monthString); final File outFile = new File(outputDirectoryName, outFileName); try (PrintStream printOut = new PrintStream(new FileOutputStream(outFile), true, ENCODING);) { printOut.println(invoice.toReportString()); } catch (final FileNotFoundException e) { logger.error("Can't open file " + outFileName, e); } catch (UnsupportedEncodingException e) { logger.error("Can't write to file, bad encoding.", e); } } } } } /** * Execute a DisconnectCommand. * * @param command the input DisconnectCommand. */ public void execute(final DisconnectCommand command) { logger.info("Executing disconnect command: " + command); try { clientSocket.close(); } catch (final IOException e) { logger.warn("Disconnect unable to close client connection.", e); } } /** * Execute a ShutdownCommand. Closes any current connections, stops * listening for connections and then terminates the server, without * calling System.exit. * * @param command the input ShutdownCommand. */ public void execute(final ShutdownCommand command) { logger.info("Executing shutdown command: " + command); try { clientSocket.close(); } catch (final IOException e) { logger.warn("Shutdown unable to close client connection.", e); } finally { server.shutdown(); } } /** * Run this CommandProcessor, reads Commands from the connection and executes them. * @see run in interface Runnable */ @Override public void run() { try (ServerSocket serverSocket = new ServerSocket(port)) { this.serverSocket = serverSocket; logger.info("InvoiceServer started on: " + serverSocket.getInetAddress().getHostName() + ":" + serverSocket.getLocalPort()); while (!serverSocket.isClosed()) { logger.info("InvoiceServer waiting for connection on port " + port); try (Socket client = serverSocket.accept()) { serviceConnection(client); } catch (final SocketException sx) { logger.info("Server socket closed."); } } } catch (final IOException e1) { logger.error("Unable to bind server socket to port " + port); } } }
package org.slc.sli.disco.web.controllers; import java.io.IOException; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import org.codehaus.jackson.JsonNode; import org.codehaus.jackson.map.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Scope; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.client.RestClientException; import org.springframework.web.client.RestTemplate; /** * Fetches realm information * * @author dkornishev * */ @Controller @Scope("request") @RequestMapping("/realms") public class RealmsController { private static final Logger LOG = LoggerFactory.getLogger(RealmsController.class); private ObjectMapper mapper = new ObjectMapper(); private RestTemplate rest = new RestTemplate(); @Autowired @Value("${security.realms.list.url}") private String listUrl; @Autowired @Value("${security.realms.ssoInit.url}") private String ssoInitUrl; /** * Calls api to list available realms and injects into model * * @param model spring injected model * @return name of the template to use * @throws IOException */ @RequestMapping(value = "list.do", method = RequestMethod.GET) public String listRealms(@RequestParam(value = "RelayState", required = false) String relayState, Model model) throws IOException { ResponseEntity<String> resp = rest.getForEntity(this.listUrl, String.class); LOG.debug(resp.getBody()); Map<String, String> map = new HashMap<String, String>(); JsonNode json = mapper.readTree(resp.getBody()); Iterator<JsonNode> nodes = json.getElements(); while (nodes.hasNext()) { JsonNode node = nodes.next(); map.put(node.get("id").getTextValue(), node.get("state").getTextValue()); } model.addAttribute("dummy", new HashMap<String, String>()); model.addAttribute("realms", map); model.addAttribute("relayState", relayState != null ? relayState : ""); if (relayState == null) { model.addAttribute("errorMsg", "No relay state provided. User won't be redirected back to the application"); } return "realms"; } /** * Redirects user to the sso init url given valid id * * @param realmId id of the realm * @return directive to redirect to sso init page * @throws IOException */ @RequestMapping(value = "sso.do", method = { RequestMethod.GET, RequestMethod.POST }) public String ssoInit(@RequestParam(value = "realmId", required = false) String realmId, @RequestParam(value = "RelayState", required = false) String relayState, Model model) throws IOException { try { ResponseEntity<String> redirect = rest.getForEntity(this.ssoInitUrl, String.class, realmId); return "redirect:" + redirect.getBody() + "&RelayState=" + relayState; } catch (RestClientException e) { LOG.error("Error Calling API", e); model.addAttribute("errorMsg", realmId == null ? "No realm selected. Please select your realm." : "Error calling server"); return this.listRealms(relayState, model); } } public void setRest(RestTemplate rest) { this.rest = rest; } }
package ie.ucd.clops.runtime.parser; import ie.ucd.clops.logging.CLOLogger; import ie.ucd.clops.runtime.automaton.Tokenizer.IllegalCharacterException; import ie.ucd.clops.runtime.automaton.Tokenizer.UnknownOptionException; import ie.ucd.clops.runtime.flyrules.FlyRuleStore; import ie.ucd.clops.runtime.options.InvalidOptionPropertyValueException; import ie.ucd.clops.runtime.options.OptionStore; import java.util.logging.Level; /** * This class is used as a base class for the automatically generated parser. */ public abstract class AbstractSpecificCLParser { public AbstractSpecificCLParser() throws InvalidOptionPropertyValueException { } public abstract String getFormatString(); public abstract OptionStore getOptionStore(); public abstract FlyRuleStore getFlyRuleStore(); public boolean parse(String[] args) { return parse(new GenericCLParser(), args); } public boolean parse(GenericCLParser parser, String[] args) { try { return parser.parse(getFormatString(), getOptionStore(), getFlyRuleStore(), args); } catch (IllegalCharacterException e) { CLOLogger.getLogger().log(Level.SEVERE, "Error initialising automaton. " + e); return false; } catch (UnknownOptionException e) { CLOLogger.getLogger().log(Level.SEVERE, "Error initialising automaton. " + e); return false; } } }
package edu.wustl.cab2b.client.ui.experiment; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Component; import java.awt.Dimension; import java.rmi.RemoteException; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.Vector; import javax.swing.BorderFactory; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.border.EmptyBorder; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.tree.DefaultMutableTreeNode; import org.jdesktop.swingx.JXPanel; import org.jdesktop.swingx.JXTree; import edu.common.dynamicextensions.domaininterface.AbstractAttributeInterface; import edu.common.dynamicextensions.domaininterface.EntityInterface; import edu.common.dynamicextensions.entitymanager.EntityRecord; import edu.common.dynamicextensions.entitymanager.EntityRecordInterface; import edu.common.dynamicextensions.entitymanager.EntityRecordResultInterface; import edu.wustl.cab2b.client.ui.RiverLayout; import edu.wustl.cab2b.client.ui.controls.Cab2bPanel; import edu.wustl.cab2b.client.ui.controls.StackedBox; import edu.wustl.cab2b.client.ui.mainframe.MainFrame; import edu.wustl.cab2b.client.ui.util.CommonUtils; import edu.wustl.cab2b.client.ui.util.CustomSwingWorker; import edu.wustl.cab2b.client.ui.viewresults.TreePanel; import edu.wustl.cab2b.common.datalist.DataListBusinessInterface; import edu.wustl.cab2b.common.datalist.DataListHome; import edu.wustl.cab2b.common.domain.Experiment; import edu.wustl.cab2b.common.ejb.EjbNamesConstants; import edu.wustl.cab2b.common.exception.CheckedException; import edu.wustl.cab2b.common.experiment.ExperimentBusinessInterface; import edu.wustl.common.tree.ExperimentTreeNode; import edu.wustl.common.util.logger.Logger; /* * Class used to display left hand side stack panel. * */ public class ExperimentStackBox extends Cab2bPanel{ /** * @param args */ /*panel to display data-list (flat structure PPT slide no :44 )*/ Cab2bPanel dataCategoryPanel = null; /*panel to display filters on selected data-category*/ Cab2bPanel dataFilterPanel = null; /*panel to display analysed data on selected data-category*/ Cab2bPanel analyseDataPanel = null; Cab2bPanel visualiseDataPanel = null; Cab2bPanel dataViewPanel = null; /*stack box to add all this panels*/ StackedBox stackedBox; /*ExperimentOpen Panel*/ ExperimentDataCategoryGridPanel m_experimentDataCategoryGridPanel= null; JXTree datalistTree; JScrollPane treeViewScrollPane; ExperimentBusinessInterface m_experimentBusinessInterface; Experiment m_selectedExperiment = null; String columnName[] = null; Object recordObject[][] = null; public ExperimentStackBox(ExperimentBusinessInterface expBus,Experiment selectedExperiment) { m_experimentBusinessInterface = expBus; m_selectedExperiment = selectedExperiment ; initGUI(); } public ExperimentStackBox(ExperimentBusinessInterface expBus,Experiment selectedExperiment, ExperimentDataCategoryGridPanel experimentDataCategoryGridPanel) { m_experimentBusinessInterface = expBus; m_selectedExperiment = selectedExperiment ; m_experimentDataCategoryGridPanel = experimentDataCategoryGridPanel; initGUI(); } public void initGUI() { this.setLayout(new BorderLayout()); stackedBox = new StackedBox(); stackedBox.setTitleBackgroundColor(new Color(200, 200, 220)); Set<EntityInterface> entitySet = null; try { entitySet = m_experimentBusinessInterface.getDataListEntityNames(m_selectedExperiment); } catch (RemoteException e) { e.printStackTrace(); } Iterator iter = entitySet.iterator(); DefaultMutableTreeNode rootNode = new DefaultMutableTreeNode("Experiment Data Categories"); DefaultMutableTreeNode node = null; while(iter.hasNext()) { EntityInterface entity = (EntityInterface) iter.next(); ExperimentEntity expEnity = new ExperimentEntity(); expEnity.setEntityInterface(entity); node = new DefaultMutableTreeNode(expEnity); //node.setUserObject(entity); rootNode.add(node); } //creating datalist tree datalistTree = new JXTree(rootNode); datalistTree.addTreeSelectionListener(new TreeSelectionListener() { public void valueChanged(TreeSelectionEvent e) { treeSelectionListenerAction(); } }); //Adding Select data category pane treeViewScrollPane = new JScrollPane(datalistTree); stackedBox.addBox("Select Data Category", treeViewScrollPane,"resources/images/mysearchqueries_icon.gif"); //Adding Filter data category panel dataFilterPanel = new Cab2bPanel(); dataFilterPanel.setPreferredSize(new Dimension(250, 150)); dataFilterPanel.setOpaque(false); stackedBox.addBox("Filter Data ", dataFilterPanel,"resources/images/mysearchqueries_icon.gif"); //Adding Analyse data panel analyseDataPanel = new Cab2bPanel(); analyseDataPanel.setPreferredSize(new Dimension(250, 150)); analyseDataPanel.setOpaque(false); stackedBox.addBox("Analyze Data ", analyseDataPanel,"resources/images/mysearchqueries_icon.gif"); stackedBox.setPreferredSize(new Dimension(250,500)); stackedBox.setMinimumSize(new Dimension(250,500)); this.add(stackedBox); } /** * Method to perform tree node selection action * for currently selected node */ public void treeSelectionListenerAction(){ CustomSwingWorker swingWorker = new CustomSwingWorker(datalistTree) { @Override protected void doNonUILogic() throws RuntimeException { Logger.out.info("Clicked on datalist"); DefaultMutableTreeNode node = (DefaultMutableTreeNode)datalistTree.getLastSelectedPathComponent(); if (node == null) return; //if rot node is selected, clear all table content if(node.isRoot() == true) { columnName = null; recordObject = null; } Object nodeInfo = node.getUserObject(); if (nodeInfo instanceof ExperimentEntity) { EntityInterface entityNode = ((ExperimentEntity)nodeInfo).getEntityInterface(); entityNode.getAttributeCollection(); Logger.out.info("ID :: "+entityNode.getId()); //getting datalist entity interface DataListBusinessInterface dataListBI = (DataListBusinessInterface) CommonUtils .getBusinessInterface(EjbNamesConstants.DATALIST_BEAN, DataListHome.class); try { //getting list of attributes EntityRecordResultInterface recordResultInterface = dataListBI.getEntityRecord(entityNode.getId()); List<AbstractAttributeInterface> headerList = recordResultInterface.getEntityRecordMetadata().getAttributeList(); Iterator it = headerList.iterator(); columnName = new String[headerList.size()]; int i=0; while(it.hasNext()) { AbstractAttributeInterface attribute = (AbstractAttributeInterface)it.next(); columnName[i++] = CommonUtils.getFormattedString(attribute.getName()); Logger.out.info("Table Header :" +attribute.getName()); } //getting actual records List<EntityRecordInterface> recordList = recordResultInterface.getEntityRecordList(); it = recordList.iterator(); recordObject = new Object[recordList.size()][headerList.size()]; Logger.out.info("Record Size :: " + recordList.size()); i=0; while(it.hasNext()) { EntityRecordInterface record = (EntityRecordInterface) it.next(); List recordValueList = record.getRecordValueList(); int j=0; Iterator iterList = recordValueList.iterator(); while(iterList.hasNext()) { recordObject[i][j] = new Object(); recordObject[i][j] = iterList.next(); Logger.out.info("Data ["+i+"]"+"["+j+"]"+recordObject[i][j]); j++; } i++; } } catch (RemoteException e1) { e1.printStackTrace(); } } } protected void doUIUpdateLogic() throws RuntimeException { m_experimentDataCategoryGridPanel.refreshTable(columnName,recordObject); } }; swingWorker.start(); /*Logger.out.info("Clicked on datalist"); DefaultMutableTreeNode node = (DefaultMutableTreeNode)datalistTree.getLastSelectedPathComponent(); if (node == null) return; //if rot node is selected, clear all table content if(node.isRoot() == true) { m_experimentDataCategoryGridPanel.refreshTable(null,null); return; } Object nodeInfo = node.getUserObject(); if (nodeInfo instanceof ExperimentEntity) { EntityInterface entityNode = ((ExperimentEntity)nodeInfo).getEntityInterface(); entityNode.getAttributeCollection(); Logger.out.info("ID :: "+entityNode.getId()); //getting datalist entity interface DataListBusinessInterface dataListBI = (DataListBusinessInterface) CommonUtils .getBusinessInterface(EjbNamesConstants.DATALIST_BEAN, DataListHome.class); try { //getting list of attributes EntityRecordResultInterface recordResultInterface = dataListBI.getEntityRecord(entityNode.getId()); List<AbstractAttributeInterface> headerList = recordResultInterface.getEntityRecordMetadata().getAttributeList(); Iterator it = headerList.iterator(); columnName = new String[headerList.size()]; int i=0; while(it.hasNext()) { AbstractAttributeInterface attribute = (AbstractAttributeInterface)it.next(); columnName[i++] = CommonUtils.getFormattedString(attribute.getName()); Logger.out.info("Table Header :" +attribute.getName()); } //getting actual records List<EntityRecordInterface> recordList = recordResultInterface.getEntityRecordList(); it = recordList.iterator(); recordObject = new Object[recordList.size()][headerList.size()]; Logger.out.info("Record Size :: " + recordList.size()); i=0; while(it.hasNext()) { EntityRecordInterface record = (EntityRecordInterface) it.next(); List recordValueList = record.getRecordValueList(); int j=0; Iterator iterList = recordValueList.iterator(); while(iterList.hasNext()) { recordObject[i][j] = new Object(); recordObject[i][j] = iterList.next(); Logger.out.info("Data ["+i+"]"+"["+j+"]"+recordObject[i][j]); j++; } i++; } m_experimentDataCategoryGridPanel.refreshTable(columnName,recordObject); updateUI(); } catch (RemoteException e1) { e1.printStackTrace(); } } }*/ } }
/** * @author cdr */ package com.intellij.openapi.editor.actions.moveUpDown; import com.intellij.lang.ASTNode; import com.intellij.lang.StdLanguages; import com.intellij.openapi.actionSystem.DataContext; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.actionSystem.EditorWriteActionHandler; import com.intellij.openapi.project.Project; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.psi.util.PsiUtil; class MoveStatementHandler extends EditorWriteActionHandler { private final boolean isDown; private final Mover[] myMovers; public MoveStatementHandler(boolean down) { isDown = down; // order is important myMovers = new Mover[]{new StatementMover(down), new DeclarationMover(down), new LineMover(down)}; } public void executeWriteAction(Editor editor, DataContext dataContext) { final Project project = editor.getProject(); final PsiDocumentManager documentManager = PsiDocumentManager.getInstance(project); final Document document = editor.getDocument(); PsiFile file = getRoot(documentManager.getPsiFile(document), editor); final Mover mover = getSuitableMover(editor, file); mover.move(editor,file); } public boolean isEnabled(Editor editor, DataContext dataContext) { if (editor.isViewer() || editor.isOneLineMode()) return false; final Project project = editor.getProject(); final PsiDocumentManager documentManager = PsiDocumentManager.getInstance(project); final Document document = editor.getDocument(); documentManager.commitDocument(document); PsiFile psiFile = documentManager.getPsiFile(document); PsiFile file = getRoot(psiFile, editor); if (file == null) return false; final Mover mover = getSuitableMover(editor, file); if (mover == null || mover.toMove2 == null) return false; final int maxLine = editor.offsetToLogicalPosition(editor.getDocument().getTextLength()).line; final LineRange range = mover.toMove; if (range.startLine <= 1 && !isDown) return false; return range.endLine < maxLine || !isDown; } private static PsiFile getRoot(final PsiFile file, final Editor editor) { if (file == null) return null; int offset = editor.getCaretModel().getOffset(); if (offset == editor.getDocument().getTextLength()) offset if (offset<0) return null; PsiElement leafElement = file.findElementAt(offset); if (leafElement == null) return null; if (leafElement.getLanguage() == StdLanguages.ANT) { leafElement = file.getViewProvider().findElementAt(offset, StdLanguages.XML); if (leafElement == null) return null; } ASTNode node = leafElement.getNode(); if (node == null) return null; return (PsiFile)PsiUtil.getRoot(node).getPsi(); } private Mover getSuitableMover(final Editor editor, final PsiFile file) { for (final Mover mover : myMovers) { final boolean available = mover.checkAvailable(editor, file); if (available) return mover; } return null; } }
package com.intellij.refactoring.typeCook.deductive.resolver; import com.intellij.refactoring.typeCook.deductive.PsiTypeVariableFactory; import com.intellij.psi.PsiTypeVariable; import com.intellij.refactoring.typeCook.deductive.PsiExtendedTypeVisitor; import com.intellij.refactoring.typeCook.deductive.builder.Constraint; import com.intellij.refactoring.typeCook.deductive.builder.Subtype; import com.intellij.refactoring.typeCook.Util; import com.intellij.psi.Bottom; import com.intellij.psi.*; import com.intellij.psi.search.PsiSearchHelper; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.util.TypeConversionUtil; import com.intellij.psi.util.InheritanceUtil; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Pair; import com.intellij.openapi.project.Project; import com.intellij.util.IncorrectOperationException; import java.util.*; public class BindingFactory { private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.typeCook.deductive.resolver.BindingFactory"); private HashSet<PsiTypeVariable> myBoundVariables; private Project myProject; private PsiTypeVariableFactory myFactory; private PsiClass[] getGreatestLowerClasses(final PsiClass aClass, final PsiClass bClass) { if (InheritanceUtil.isInheritorOrSelf(aClass, bClass, true)) { return new PsiClass[]{aClass}; } if (InheritanceUtil.isInheritorOrSelf(bClass, aClass, true)) { return new PsiClass[]{bClass}; } final Set<PsiClass> descendants = new LinkedHashSet<PsiClass>(); new Object() { public void getGreatestLowerClasses(final PsiClass aClass, final PsiClass bClass, final Set<PsiClass> descendants) { if (aClass.isInheritor(bClass, true)) { descendants.add(aClass); } else { final PsiSearchHelper helper = aClass.getManager().getSearchHelper(); final PsiClass[] bSubs = helper.findInheritors(bClass, GlobalSearchScope.allScope(myProject), false); for (int i = 0; i < bSubs.length; i++) { getGreatestLowerClasses(bSubs[i], aClass, descendants); } } } }.getGreatestLowerClasses(aClass, bClass, descendants); return descendants.toArray(new PsiClass[descendants.size()]); } private class BindingImpl extends Binding { private HashMap<Integer, PsiType> myBindings; private boolean myCyclic; BindingImpl(final PsiTypeVariable var, final PsiType type) { myBindings = new HashMap<Integer, PsiType>(); myCyclic = type instanceof PsiTypeVariable; myBindings.put(new Integer(var.getIndex()), type); } BindingImpl(final int index, final PsiType type) { myBindings = new HashMap<Integer, PsiType>(); myCyclic = type instanceof PsiTypeVariable; myBindings.put(new Integer(index), type); if (type instanceof Bottom) { final HashSet<PsiTypeVariable> cluster = myFactory.getClusterOf(index); if (cluster != null) { for (final Iterator<PsiTypeVariable> v = cluster.iterator(); v.hasNext();) { myBindings.put(new Integer(v.next().getIndex()), type); } } } } BindingImpl() { myBindings = new HashMap<Integer, PsiType>(); myCyclic = false; } public PsiType apply(final PsiType type) { if (type instanceof PsiTypeVariable) { final PsiType t = myBindings.get(new Integer(((PsiTypeVariable)type).getIndex())); return t == null ? type : t; } else if (type instanceof PsiArrayType) { return apply(((PsiArrayType)type).getComponentType()).createArrayType(); } else if (type instanceof PsiClassType) { final PsiClassType.ClassResolveResult result = Util.resolveType(type); final PsiClass theClass = result.getElement(); final PsiSubstitutor aSubst = result.getSubstitutor(); PsiSubstitutor theSubst = PsiSubstitutor.EMPTY; if (theClass != null) { for (Iterator<PsiTypeParameter> p = aSubst.getSubstitutionMap().keySet().iterator(); p.hasNext();) { final PsiTypeParameter aParm = p.next(); final PsiType aType = aSubst.substitute(aParm); theSubst = theSubst.put(aParm, apply(aType)); } return theClass.getManager().getElementFactory().createType(theClass, theSubst); } else { return type; } } else if (type instanceof PsiWildcardType) { final PsiWildcardType wcType = (PsiWildcardType)type; final PsiType bound = wcType.getBound(); if (bound != null) { final PsiType abound = apply(bound); if (abound instanceof PsiWildcardType) { return null; } return wcType.isExtends() ? PsiWildcardType.createExtends(PsiManager.getInstance(myProject), abound) : PsiWildcardType.createSuper(PsiManager.getInstance(myProject), abound); } return type; } else { return type; } } public boolean equals(final Object o) { if (this == o) return true; if (!(o instanceof BindingImpl)) return false; final BindingImpl binding = (BindingImpl)o; if (!myBindings.equals(binding.myBindings)) { return false; } return true; } public Binding compose(final Binding b) { LOG.assertTrue(b instanceof BindingImpl); final BindingImpl b1 = this; final BindingImpl b2 = (BindingImpl)b; final BindingImpl b3 = new BindingImpl(); for (final Iterator<PsiTypeVariable> v = myBoundVariables.iterator(); v.hasNext();) { final Integer i = new Integer(v.next().getIndex()); final PsiType b1i = b1.myBindings.get(i); final PsiType b2i = b2.myBindings.get(i); final int flag = (b1i == null ? 0 : 1) + (b2i == null ? 0 : 2); switch (flag) { case 0: break; case 1: /* b1(i)\b2(i) */ { final PsiType type = b2.apply(b1i); if (type == null) { return null; } b3.myBindings.put(i, type); b3.myCyclic = type instanceof PsiTypeVariable; } break; case 2: /* b2(i)\b1(i) */ { final PsiType type = b1.apply(b2i); if (type == null) { return null; } b3.myBindings.put(i, type); b3.myCyclic = type instanceof PsiTypeVariable; } break; case 3: /* b2(i) \cap b1(i) */ { final Binding common = rise(b1i, b2i, null); if (common == null) { return null; } final PsiType type = common.apply(b1i); if (type == null) { return null; } final PsiType type1 = type;//b2.apply(type); if (type1 == null) { return null; } b3.myBindings.put(i, type1); b3.myCyclic = type instanceof PsiTypeVariable; } } } return b3; } public String toString() { final StringBuffer buffer = new StringBuffer(); for (final Iterator<PsiTypeVariable> v = myBoundVariables.iterator(); v.hasNext();) { final Integer i = new Integer(v.next().getIndex()); final PsiType binding = myBindings.get(i); if (binding != null) { buffer.append("#" + i + " -> " + binding.getPresentableText() + "; "); } } return buffer.toString(); } private PsiType normalize(final PsiType t) { if (t == null || t instanceof PsiTypeVariable) { return Bottom.BOTTOM; } if (t instanceof PsiWildcardType) { return ((PsiWildcardType)t).getBound(); } return t; } public int compare(final Binding binding) { final BindingImpl b2 = (BindingImpl)binding; final BindingImpl b1 = this; int directoin = Binding.NONCOMPARABLE; boolean first = true; for (final Iterator<PsiTypeVariable> v = myBoundVariables.iterator(); v.hasNext();) { final Integer index = new Integer(v.next().getIndex()); final PsiType x = normalize(b1.myBindings.get(index)); final PsiType y = normalize(b2.myBindings.get(index)); final int comp = new Object() { int compare(final PsiType x, final PsiType y) { final int[] kinds = new Object() { private int classify(final PsiType x) { if (x == null) { return 0; } if (x instanceof PsiPrimitiveType) { return 1; } if (x instanceof PsiArrayType) { return 2; } if (x instanceof PsiClassType) { return 3; } return 4; // Bottom } int[] classify2(final PsiType x, final PsiType y) { return new int[]{classify(x), classify(y)}; } }.classify2(x, y); final int kindX = kinds[0]; final int kindY = kinds[1]; // Break your brain here... if (kindX + kindY == 0) { return Binding.SAME; } if (kindX * kindY == 0) { if (kindX == 0) { return Binding.WORSE; } return Binding.BETTER; } if (kindX * kindY == 1) { if (x.equals(y)) { return Binding.SAME; } return Binding.NONCOMPARABLE; } if (kindX != kindY) { if (kindX == 4) { return Binding.WORSE; } if (kindY == 4) { return Binding.BETTER; } if (kindX + kindY == 5) { try { final PsiElementFactory f = PsiManager.getInstance(myProject).getElementFactory(); final PsiType cloneable = f.createTypeFromText("java.lang.Cloneable", null); final PsiType object = f.createTypeFromText("java.lang.Object", null); final PsiType serializable = f.createTypeFromText("java.io.Serializable", null); PsiType type; int flag; if (kindX == 3) { type = x; flag = Binding.WORSE; } else { type = y; flag = Binding.BETTER; } if (type.equals(object) || type.equals(cloneable) || type.equals(serializable)) { return flag; } } catch (IncorrectOperationException e) { LOG.error(e); } } return Binding.NONCOMPARABLE; } if (kindX == 2) { return compare(((PsiArrayType)x).getComponentType(), ((PsiArrayType)y).getComponentType()); } if (x.equals(y)) { return Binding.SAME; } // End of breaking... final PsiClassType.ClassResolveResult resultX = Util.resolveType(x); final PsiClassType.ClassResolveResult resultY = Util.resolveType(y); final PsiClass xClass = resultX.getElement(); final PsiClass yClass = resultY.getElement(); final PsiSubstitutor xSubst = resultX.getSubstitutor(); final PsiSubstitutor ySubst = resultY.getSubstitutor(); if (xClass == null || yClass == null) { return Binding.NONCOMPARABLE; } if (xClass.equals(yClass)) { boolean first = true; int direction = Binding.SAME; for (Iterator<PsiTypeParameter> i = xSubst.getSubstitutionMap().keySet().iterator(); i.hasNext();) { final PsiTypeParameter p = i.next(); final PsiType xParm = xSubst.substitute(p); final PsiType yParm = ySubst.substitute(p); final int comp = compare(xParm, yParm); if (comp == Binding.NONCOMPARABLE) { return Binding.NONCOMPARABLE; } if (first) { first = false; direction = comp; } if (direction != comp) { return Binding.NONCOMPARABLE; } } return direction; } else { if (InheritanceUtil.isCorrectDescendant(xClass, yClass, true)) { return Binding.BETTER; } else if (InheritanceUtil.isCorrectDescendant(yClass, xClass, true)) { return Binding.WORSE; } return Binding.NONCOMPARABLE; } } }.compare(x, y); if (comp == Binding.NONCOMPARABLE) { return Binding.NONCOMPARABLE; } if (first) { first = false; directoin = comp; } if (directoin != SAME) { if (comp != Binding.SAME && directoin != comp) { return Binding.NONCOMPARABLE; } } else if (comp != SAME) { directoin = comp; } } return directoin; } public boolean nonEmpty() { return myBindings.size() > 0; } public boolean isCyclic() { return myCyclic; } public Binding reduceRecursive() { final BindingImpl binding = (BindingImpl)create(); for (final Iterator<PsiTypeVariable> v = myBoundVariables.iterator(); v.hasNext();) { final PsiTypeVariable var = v.next(); final Integer index = new Integer(var.getIndex()); final PsiType type = myBindings.get(index); if (type != null) { class Verifier extends PsiExtendedTypeVisitor { boolean myFlag = false; public Object visitTypeVariable(final PsiTypeVariable var) { if (var.getIndex() == index.intValue()) { myFlag = true; } return null; } } final Verifier verifier = new Verifier(); type.accept(verifier); if (verifier.myFlag) { myBindings.put(index, Bottom.BOTTOM); binding.myBindings.put(index, Bottom.BOTTOM); } else { binding.myBindings.put(index, type); } } else { binding.myBindings.put(index, type); } } for (final Iterator<PsiTypeVariable> v = myBoundVariables.iterator(); v.hasNext();) { final PsiTypeVariable var = v.next(); final Integer index = new Integer(var.getIndex()); final PsiType type = myBindings.get(index); if (type != null) { myBindings.put(index, binding.apply(type)); } } return this; } public boolean binds(final PsiTypeVariable var) { return myBindings.get(new Integer(var.getIndex())) != null; } public void merge(final Binding b, final boolean removeObject) { for (final Iterator<PsiTypeVariable> v = b.getBoundVariables().iterator(); v.hasNext();) { final PsiTypeVariable var = v.next(); final Integer index = new Integer(var.getIndex()); if (myBindings.get(index) != null) { LOG.error("Oops... Binding conflict..."); } else { final PsiType type = b.apply(var); final PsiClassType javaLangObject = PsiType.getJavaLangObject(PsiManager.getInstance(myProject), GlobalSearchScope.allScope(myProject)); if (removeObject && javaLangObject.equals(type)) { final HashSet<PsiTypeVariable> cluster = myFactory.getClusterOf(var.getIndex()); if (cluster != null) { for (final Iterator<PsiTypeVariable> w = cluster.iterator(); w.hasNext();) { final PsiTypeVariable war = w.next(); final PsiType wtype = b.apply(war); if (!javaLangObject.equals(wtype)) { myBindings.put(index, type); break; } } } } else { myBindings.put(index, type); } } } } public HashSet<PsiTypeVariable> getBoundVariables() { return myBoundVariables; } public int getWidth() { int w = 0; for (final Iterator<PsiType> t = myBindings.values().iterator(); t.hasNext();) { final PsiType type = substitute(t.next()); if (type != null) { w++; } } return w; } public boolean isValid() { for (final Iterator<PsiTypeVariable> v = myBoundVariables.iterator(); v.hasNext();) { final PsiTypeVariable var = v.next(); final PsiType type = substitute(var); if (!var.isValidInContext(type)) { return false; } } return true; } public void addTypeVariable(final PsiTypeVariable var) { myBoundVariables.add(var); } public PsiType substitute(final PsiType t) { if (t instanceof PsiWildcardType) { final PsiWildcardType wcType = (PsiWildcardType)t; final PsiType bound = wcType.getBound(); if (bound == null) { return t; } final PsiManager manager = PsiManager.getInstance(myProject); final PsiType subst = substitute(bound); if (subst == null) return null; return subst instanceof PsiWildcardType ? subst : wcType.isExtends() ? PsiWildcardType.createExtends(manager, subst) : PsiWildcardType.createSuper(manager, subst); } else if (t instanceof PsiTypeVariable) { final PsiType b = apply(t); if (b instanceof Bottom || b instanceof PsiTypeVariable) { return null; } return substitute(b); } else if (t instanceof Bottom) { return null; } else if (t instanceof PsiArrayType) { return substitute(((PsiArrayType)t).getComponentType()).createArrayType(); } else if (t instanceof PsiClassType) { final PsiClassType.ClassResolveResult result = ((PsiClassType)t).resolveGenerics(); final PsiClass aClass = result.getElement(); final PsiSubstitutor aSubst = result.getSubstitutor(); if (aClass != null) { PsiSubstitutor theSubst = PsiSubstitutor.EMPTY; for (final Iterator<PsiTypeParameter> p = aSubst.getSubstitutionMap().keySet().iterator(); p.hasNext();) { final PsiTypeParameter parm = p.next(); final PsiType type = aSubst.substitute(parm); theSubst = theSubst.put(parm, substitute(type)); } return aClass.getManager().getElementFactory().createType(aClass, theSubst); } } return t; } } interface Balancer { Binding varType(PsiTypeVariable x, PsiType y); Binding varVar(PsiTypeVariable x, PsiTypeVariable y); Binding typeVar(PsiType x, PsiTypeVariable y); } interface Unifier { Binding unify(PsiType x, PsiType y); } public Binding balance(final PsiType x, final PsiType y, final Balancer balancer, final HashSet<Constraint> constraints) { final int indicator = (x instanceof PsiTypeVariable ? 1 : 0) + (y instanceof PsiTypeVariable ? 2 : 0); switch (indicator) { case 0: if (x instanceof PsiWildcardType || y instanceof PsiWildcardType) { final PsiType xType = x instanceof PsiWildcardType ? ((PsiWildcardType)x).getBound() : x; final PsiType yType = y instanceof PsiWildcardType ? ((PsiWildcardType)y).getBound() : y; switch ((x instanceof PsiWildcardType ? 1 : 0) + (y instanceof PsiWildcardType ? 2 : 0)) { case 1: if (((PsiWildcardType)x).isExtends()) { /* ? extends T1, T2 */ return null; } else { /* ? super T1, T2 */ if (!xType.getCanonicalText().equals("java.lang.Object")) { return null; } return create(); } case 2: if (((PsiWildcardType)y).isExtends()) { /* T1, ? extends T2 */ if (yType instanceof PsiTypeVariable) { return create((PsiTypeVariable)yType, PsiWildcardType.createSuper(PsiManager.getInstance(myProject), xType)); } else { if (constraints != null) { constraints.add(new Subtype(xType, yType)); } return balance(xType, yType, balancer, constraints); } } else {/* T1, ? super T2 */ if (yType instanceof PsiTypeVariable) { return create(((PsiTypeVariable)yType), PsiWildcardType.createExtends(PsiManager.getInstance(myProject), xType)); } else { if (constraints != null) { constraints.add(new Subtype(yType, xType)); } return balance(xType, yType, balancer, constraints); } } case 3: switch ((((PsiWildcardType)x).isExtends() ? 0 : 1) + (((PsiWildcardType)y).isExtends() ? 0 : 2)) { case 0: /* ? super T1, ? super T2 */ if (constraints != null) { constraints.add(new Subtype(yType, xType)); } return balance(xType, yType, balancer, constraints); case 1: /* ? extends T1, ? super T2 */ if (constraints != null) { constraints.add(new Subtype(xType, yType)); } return balance(xType, yType, balancer, constraints); case 2: /* ? super T1, ? extends T2*/ return null; case 3: /* ? extends T1, ? extends T2*/ if (constraints != null) { constraints.add(new Subtype(xType, yType)); } return balance(xType, yType, balancer, constraints); } } return create(); } else if (x instanceof PsiArrayType || y instanceof PsiArrayType) { final PsiType xType = x instanceof PsiArrayType ? ((PsiArrayType)x).getComponentType() : x; final PsiType yType = y instanceof PsiArrayType ? ((PsiArrayType)y).getComponentType() : y; return balance(xType, yType, balancer, constraints); } else if (x instanceof PsiClassType && y instanceof PsiClassType) { final PsiClassType.ClassResolveResult resultX = Util.resolveType(x); final PsiClassType.ClassResolveResult resultY = Util.resolveType(y); final PsiClass xClass = resultX.getElement(); final PsiClass yClass = resultY.getElement(); if (xClass != null && yClass != null) { final PsiSubstitutor ySubst = resultY.getSubstitutor(); PsiSubstitutor xSubst = TypeConversionUtil.getClassSubstitutor(yClass, xClass, resultX.getSubstitutor()); if (xSubst == null) return null; Binding b = create(); for (Iterator<PsiTypeParameter> p = xSubst.getSubstitutionMap().keySet().iterator(); p.hasNext();) { final PsiTypeParameter aParm = p.next(); final PsiType xType = xSubst.substitute(aParm); final PsiType yType = ySubst.substitute(aParm); final Binding b1 = unify(xType, yType, new Unifier() { public Binding unify(final PsiType x, final PsiType y) { return balance(x, y, balancer, constraints); } }); if (b1 == null) { return null; } b = b.compose(b1); } return b; } } else if (y instanceof Bottom) { return create(); } else { return null; } break; case 1: return balancer.varType((PsiTypeVariable)x, y); case 2: return balancer.typeVar(x, (PsiTypeVariable)y); case 3: return balancer.varVar((PsiTypeVariable)x, (PsiTypeVariable)y); } return null; } private Binding unify(final PsiType x, final PsiType y, final Unifier unifier) { final int indicator = (x instanceof PsiTypeVariable ? 1 : 0) + (y instanceof PsiTypeVariable ? 2 : 0); switch (indicator) { case 0: if (x instanceof PsiWildcardType || y instanceof PsiWildcardType) { return unifier.unify(x, y); } else if (x instanceof PsiArrayType || y instanceof PsiArrayType) { final PsiType xType = x instanceof PsiArrayType ? ((PsiArrayType)x).getComponentType() : x; final PsiType yType = y instanceof PsiArrayType ? ((PsiArrayType)y).getComponentType() : y; return unify(xType, yType, unifier); } else if (x instanceof PsiClassType && y instanceof PsiClassType) { final PsiClassType.ClassResolveResult resultX = Util.resolveType(x); final PsiClassType.ClassResolveResult resultY = Util.resolveType(y); final PsiClass xClass = resultX.getElement(); final PsiClass yClass = resultY.getElement(); if (xClass != null && yClass != null) { final PsiSubstitutor ySubst = resultY.getSubstitutor(); final PsiSubstitutor xSubst = resultX.getSubstitutor(); if (!xClass.equals(yClass)) { return null; } Binding b = create(); for (Iterator<PsiTypeParameter> p = xSubst.getSubstitutionMap().keySet().iterator(); p.hasNext();) { final PsiTypeParameter aParm = p.next(); final PsiType xType = xSubst.substitute(aParm); final PsiType yType = ySubst.substitute(aParm); final Binding b1 = unify(xType, yType, unifier); if (b1 == null) { return null; } b = b.compose(b1); } return b; } } else if (y instanceof Bottom) { return create(); } else { return null; } default: return unifier.unify(x, y); } } public Binding riseWithWildcard(final PsiType x, final PsiType y, final HashSet<Constraint> constraints) { final Binding binding = balance(x, y, new Balancer() { public Binding varType(final PsiTypeVariable x, final PsiType y) { if (y instanceof Bottom) { return create(); } if (y instanceof PsiWildcardType) { return null; } final PsiTypeVariable var = myFactory.create(); final Binding binding = create(x, PsiWildcardType.createSuper(PsiManager.getInstance(myProject), var)); binding.addTypeVariable(var); constraints.add(new Subtype(var, y)); return binding; } public Binding varVar(final PsiTypeVariable x, final PsiTypeVariable y) { final int xi = x.getIndex(); final int yi = y.getIndex(); if (xi < yi) { return create(x, y); } else if (yi < xi) { return create(y, x); } else { return create(); } } public Binding typeVar(final PsiType x, final PsiTypeVariable y) { if (x == null) { return create(y, Bottom.BOTTOM); } if (x instanceof PsiWildcardType) { return null; } final PsiTypeVariable var = myFactory.create(); final Binding binding = create(y, PsiWildcardType.createExtends(PsiManager.getInstance(myProject), var)); binding.addTypeVariable(var); constraints.add(new Subtype(x, var)); return binding; } }, constraints); return binding != null ? binding.reduceRecursive() : null; } public Binding rise(final PsiType x, final PsiType y, final HashSet<Constraint> constraints) { final Binding binding = balance(x, y, new Balancer() { public Binding varType(final PsiTypeVariable x, final PsiType y) { if (y instanceof Bottom) { return create(); } return create(x, y); } public Binding varVar(final PsiTypeVariable x, final PsiTypeVariable y) { final int xi = x.getIndex(); final int yi = y.getIndex(); if (xi < yi) { return create(x, y); } else if (yi < xi) { return create(y, x); } else { return create(); } } public Binding typeVar(final PsiType x, final PsiTypeVariable y) { if (x == null) { return create(y, Bottom.BOTTOM); } return create(y, x); } }, constraints); return binding != null ? binding.reduceRecursive() : null; } public Binding sink(final PsiType x, final PsiType y, final HashSet<Constraint> constraints) { return balance(x, y, new Balancer() { public Binding varType(final PsiTypeVariable x, final PsiType y) { return create(x, y); } public Binding varVar(final PsiTypeVariable x, final PsiTypeVariable y) { return create(y, Bottom.BOTTOM); } public Binding typeVar(final PsiType x, final PsiTypeVariable y) { return create(y, Bottom.BOTTOM); } }, constraints); } public LinkedList<Pair<PsiType, Binding>> union(final PsiType x, final PsiType y) { final LinkedList<Pair<PsiType, Binding>> list = new LinkedList<Pair<PsiType, Binding>>(); new Object() { void union(final PsiType x, final PsiType y, final LinkedList<Pair<PsiType, Binding>> list) { if (x instanceof PsiArrayType && y instanceof PsiArrayType) { union(((PsiArrayType)x).getComponentType(), ((PsiArrayType)y).getComponentType(), list); } else if (x instanceof PsiClassType && y instanceof PsiClassType) { final PsiClassType.ClassResolveResult xResult = Util.resolveType(x); final PsiClassType.ClassResolveResult yResult = Util.resolveType(y); final PsiClass xClass = xResult.getElement(); final PsiClass yClass = yResult.getElement(); final PsiSubstitutor xSubst = xResult.getSubstitutor(); final PsiSubstitutor ySubst = yResult.getSubstitutor(); if (xClass == null || yClass == null) { return; } if (xClass.equals(yClass)) { final Binding risen = rise(x, y, null); if (risen == null) { return; } list.addFirst(new Pair<PsiType, Binding>(risen.apply(x), risen)); } else { final PsiClass[] descendants = getGreatestLowerClasses(xClass, yClass); for (int i = 0; i < descendants.length; i++) { final PsiClass descendant = descendants[i]; final PsiSubstitutor x2aSubst = TypeConversionUtil.getClassSubstitutor(xClass, descendant, xSubst); final PsiSubstitutor y2aSubst = TypeConversionUtil.getClassSubstitutor(yClass, descendant, ySubst); LOG.assertTrue(x2aSubst != null && y2aSubst != null); final PsiElementFactory factory = xClass.getManager().getElementFactory(); union(factory.createType(descendant, x2aSubst), factory.createType(descendant, y2aSubst), list); } } } } }.union(x, y, list); return list; } public LinkedList<Pair<PsiType, Binding>> intersect(final PsiType x, final PsiType y) { final LinkedList<Pair<PsiType, Binding>> list = new LinkedList<Pair<PsiType, Binding>>(); new Object() { void intersect(final PsiType x, final PsiType y, final LinkedList<Pair<PsiType, Binding>> list) { if (x instanceof PsiWildcardType || y instanceof PsiWildcardType) { final PsiType xType = x instanceof PsiWildcardType ? ((PsiWildcardType)x).getBound() : x; final PsiType yType = y instanceof PsiWildcardType ? ((PsiWildcardType)y).getBound() : y; intersect(xType, yType, list); } if (x instanceof PsiArrayType || y instanceof PsiArrayType) { if (x instanceof PsiClassType || y instanceof PsiClassType) { try { final PsiElementFactory f = PsiManager.getInstance(myProject).getElementFactory(); final PsiType keyType = x instanceof PsiClassType ? x : y; final PsiType object = f.createTypeFromText("java.lang.Object", null); final PsiType cloneable = f.createTypeFromText("java.lang.Cloneable", null); final PsiType serializable = f.createTypeFromText("java.io.Serializable", null); intersect(keyType, object, list); intersect(keyType, cloneable, list); intersect(keyType, serializable, list); } catch (IncorrectOperationException e) { LOG.error("Exception " + e); } } else if (x instanceof PsiArrayType && y instanceof PsiArrayType) { intersect(((PsiArrayType)x).getComponentType(), ((PsiArrayType)y).getComponentType(), list); } } else if (x instanceof PsiClassType && y instanceof PsiClassType) { final PsiClassType.ClassResolveResult xResult = Util.resolveType(x); final PsiClassType.ClassResolveResult yResult = Util.resolveType(y); final PsiClass xClass = xResult.getElement(); final PsiClass yClass = yResult.getElement(); final PsiSubstitutor xSubst = xResult.getSubstitutor(); final PsiSubstitutor ySubst = yResult.getSubstitutor(); if (xClass == null || yClass == null) { return; } if (xClass.equals(yClass)) { final Binding risen = rise(x, y, null); if (risen == null) { final PsiElementFactory factory = xClass.getManager().getElementFactory(); list.addFirst(new Pair<PsiType, Binding>(Util.banalize(factory.createType(xClass, factory.createRawSubstitutor(xClass))), create())); } else { list.addFirst(new Pair<PsiType, Binding>(risen.apply(x), risen)); } } else { final PsiClass[] ancestors = GenericsUtil.getLeastUpperClasses(xClass, yClass); for (int i = 0; i < ancestors.length; i++) { final PsiClass ancestor = ancestors[i]; if (ancestor.getQualifiedName().equals("java.lang.Object") && ancestors.length > 1) { continue; } final PsiSubstitutor x2aSubst = TypeConversionUtil.getSuperClassSubstitutor(ancestor, xClass, xSubst); final PsiSubstitutor y2aSubst = TypeConversionUtil.getSuperClassSubstitutor(ancestor, yClass, ySubst); final PsiElementFactory factory = xClass.getManager().getElementFactory(); intersect(factory.createType(ancestor, x2aSubst), factory.createType(ancestor, y2aSubst), list); } } } } }.intersect(x, y, list); return list; } public BindingFactory(final com.intellij.refactoring.typeCook.deductive.builder.System system) { myBoundVariables = system.getBoundVariables(); myProject = system.getProject(); myFactory = system.getVariableFactory(); } public Binding create(final PsiTypeVariable var, final PsiType type) { return new BindingImpl(var, type); } public Binding create() { return new BindingImpl(); } public HashSet<PsiTypeVariable> getBoundVariables() { return myBoundVariables; } }
package uk.ac.ebi.quickgo.graphics.service; import uk.ac.ebi.quickgo.graphics.ontology.GraphImageResult; import uk.ac.ebi.quickgo.graphics.ontology.RenderingGraphException; import java.util.List; /** * Responsible for creating a {@link GraphImageResult} for a given list of * {@code ids}. * * Created 26/09/16 * @author Edd */ public interface GraphImageService { /** * Creates a {@link GraphImageResult} for a given list of {@code ids} and for * a given {@code scope}. * * @param ids the term ids whose graphical representation is required * @param scope the scope in which the graph is being drawn. Currently this is either "ECO" or "GO". * @return the corresponding graphical image representing the {@code ids} * @throws RenderingGraphException if there was a problem rendering the graph */ GraphImageResult createChart(List<String> ids, String scope); }
package com.splicemachine.derby.utils; import com.splicemachine.EngineDriver; import com.splicemachine.db.iapi.error.PublicAPI; import com.splicemachine.db.iapi.error.StandardException; import com.splicemachine.db.iapi.reference.Property; import com.splicemachine.db.iapi.services.io.FormatableBitSet; import com.splicemachine.db.iapi.services.property.PropertyUtil; import com.splicemachine.db.iapi.services.uuid.UUIDFactory; import com.splicemachine.db.iapi.sql.Activation; import com.splicemachine.db.iapi.sql.ResultColumnDescriptor; import com.splicemachine.db.iapi.sql.conn.Authorizer; import com.splicemachine.db.iapi.sql.conn.LanguageConnectionContext; import com.splicemachine.db.iapi.sql.dictionary.*; import com.splicemachine.db.iapi.sql.execute.ExecRow; import com.splicemachine.db.iapi.stats.ColumnStatisticsImpl; import com.splicemachine.db.iapi.stats.FakeColumnStatisticsImpl; import com.splicemachine.db.iapi.stats.ItemStatistics; import com.splicemachine.db.iapi.store.access.TransactionController; import com.splicemachine.db.iapi.types.*; import com.splicemachine.db.impl.jdbc.EmbedConnection; import com.splicemachine.db.impl.jdbc.EmbedResultSet40; import com.splicemachine.db.impl.sql.GenericColumnDescriptor; import com.splicemachine.db.impl.sql.catalog.SYSCOLUMNSTATISTICSRowFactory; import com.splicemachine.db.impl.sql.catalog.SYSTABLESTATISTICSRowFactory; import com.splicemachine.db.impl.sql.execute.IteratorNoPutResultSet; import com.splicemachine.db.impl.sql.execute.ValueRow; import com.splicemachine.db.shared.common.reference.SQLState; import com.splicemachine.ddl.DDLMessage.DDLChange; import com.splicemachine.derby.ddl.DDLUtils; import com.splicemachine.derby.impl.store.access.SpliceTransactionManager; import com.splicemachine.derby.impl.store.access.base.SpliceConglomerate; import com.splicemachine.derby.stream.iapi.DataSetProcessor; import com.splicemachine.derby.stream.iapi.DistributedDataSetProcessor; import com.splicemachine.derby.stream.iapi.OperationContext; import com.splicemachine.derby.stream.iapi.ScanSetBuilder; import com.splicemachine.metrics.Metrics; import com.splicemachine.pipeline.ErrorState; import com.splicemachine.pipeline.Exceptions; import com.splicemachine.protobuf.ProtoUtil; import com.splicemachine.si.api.txn.TxnView; import com.splicemachine.si.impl.driver.SIDriver; import com.splicemachine.storage.DataScan; import com.splicemachine.utils.Pair; import com.splicemachine.utils.SpliceLogUtils; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import org.apache.log4j.Logger; import org.spark_project.guava.base.Function; import org.spark_project.guava.collect.FluentIterable; import org.spark_project.guava.collect.Lists; import javax.annotation.Nullable; import java.io.ByteArrayInputStream; import java.io.ObjectInputStream; import java.sql.*; import java.util.*; import java.util.concurrent.ExecutionException; import static com.splicemachine.derby.utils.EngineUtils.getSchemaDescriptor; import static com.splicemachine.derby.utils.EngineUtils.verifyTableExists; public class StatisticsAdmin extends BaseAdminProcedures { private static final Logger LOG = Logger.getLogger(StatisticsAdmin.class); public static final String TABLEID_FROM_SCHEMA = "select tableid from sysvw.systablesView t where t.schemaid = ?"; @SuppressWarnings("UnusedDeclaration") public static void DISABLE_COLUMN_STATISTICS(String schema, String table, String columnName) throws SQLException { schema = EngineUtils.validateSchema(schema); table = EngineUtils.validateTable(table); columnName = EngineUtils.validateColumnName(columnName); EmbedConnection conn = (EmbedConnection) SpliceAdmin.getDefaultConn(); try { TableDescriptor td = verifyTableExists(conn, schema, table); //verify that that column exists ColumnDescriptorList columnDescriptorList = td.getColumnDescriptorList(); for (ColumnDescriptor descriptor : columnDescriptorList) { if (descriptor.getColumnName().equalsIgnoreCase(columnName)) { //need to make sure it's not a pk or indexed column ensureNotKeyed(descriptor, td); descriptor.setCollectStatistics(false); LanguageConnectionContext languageConnection=conn.getLanguageConnection(); TransactionController transactionCompile=languageConnection.getTransactionCompile(); transactionCompile.elevate("dictionary"); languageConnection.getDataDictionary().setCollectStats(transactionCompile, td.getUUID(), columnName, false); return; } } throw ErrorState.LANG_COLUMN_NOT_FOUND_IN_TABLE.newException(columnName, schema + "." + table); } catch (StandardException e) { throw PublicAPI.wrapStandardException(e); } } public static void SET_STATS_EXTRAPOLATION_FOR_COLUMN(String schema, String table, String columnName, short useExtrapolation) throws SQLException { schema = EngineUtils.validateSchema(schema); table = EngineUtils.validateTable(table); columnName = EngineUtils.validateColumnName(columnName); EmbedConnection conn = (EmbedConnection) SpliceAdmin.getDefaultConn(); try { TableDescriptor td = verifyTableExists(conn, schema, table); //verify that that column exists ColumnDescriptorList columnDescriptorList = td.getColumnDescriptorList(); for (ColumnDescriptor descriptor : columnDescriptorList) { if (descriptor.getColumnName().equalsIgnoreCase(columnName)) { byte value = (byte)(useExtrapolation==0 ? 0 : 1); // make sure the column type can support extrapolation if ((value == 1) && !ColumnDescriptor.allowsExtrapolation(descriptor.getType())) throw ErrorState.LANG_STATS_EXTRAPOLATION_NOT_SUPPORTED.newException(columnName, descriptor.getType()); descriptor.setUseExtrapolation(value); LanguageConnectionContext languageConnection=conn.getLanguageConnection(); TransactionController transactionCompile=languageConnection.getTransactionCompile(); transactionCompile.elevate("dictionary"); languageConnection.getDataDictionary().setUseExtrapolation(transactionCompile, td.getUUID(), columnName, value); return; } } throw ErrorState.LANG_COLUMN_NOT_FOUND_IN_TABLE.newException(columnName, schema + "." + table); } catch (StandardException e) { throw PublicAPI.wrapStandardException(e); } } @SuppressWarnings("UnusedDeclaration") public static void DISABLE_ALL_COLUMN_STATISTICS(String schema, String table) throws SQLException { schema = EngineUtils.validateSchema(schema); table = EngineUtils.validateTable(table); EmbedConnection conn = (EmbedConnection) SpliceAdmin.getDefaultConn(); try { TableDescriptor td = verifyTableExists(conn, schema, table); ColumnDescriptorList columnDescriptorList = td.getColumnDescriptorList(); //get the list of index columns whose stats are mandatory boolean[] indexColumns = new boolean[columnDescriptorList.size()]; IndexLister indexLister = td.getIndexLister(); if (indexLister != null) { IndexRowGenerator[] indexRowGenerators = indexLister.getIndexRowGenerators(); for (IndexRowGenerator irg : indexRowGenerators) { int[] keyColumns = irg.getIndexDescriptor().baseColumnPositions(); for (int keyColumn : keyColumns) { indexColumns[keyColumn - 1] = true; } } } // get the list of columns in PK whose stats are also mandatory ReferencedKeyConstraintDescriptor keyDescriptor = td.getPrimaryKey(); if (keyDescriptor != null) { int[] pkColumns = keyDescriptor.getReferencedColumns(); for (int keyColumn : pkColumns) { indexColumns[keyColumn - 1] = true; } } //go through all columns for (ColumnDescriptor descriptor : columnDescriptorList) { String columnName = descriptor.getColumnName(); //need to make sure it's not a pk or indexed column if (!indexColumns[descriptor.getPosition() - 1]) { descriptor.setCollectStatistics(false); LanguageConnectionContext languageConnection = conn.getLanguageConnection(); TransactionController transactionCompile = languageConnection.getTransactionCompile(); transactionCompile.elevate("dictionary"); languageConnection.getDataDictionary().setCollectStats(transactionCompile, td.getUUID(), columnName, false); } } } catch (StandardException e) { throw PublicAPI.wrapStandardException(e); } } @SuppressWarnings("UnusedDeclaration") public static void ENABLE_COLUMN_STATISTICS(String schema, String table, String columnName) throws SQLException { schema = EngineUtils.validateSchema(schema); table = EngineUtils.validateTable(table); columnName = EngineUtils.validateColumnName(columnName); EmbedConnection conn = (EmbedConnection) SpliceAdmin.getDefaultConn(); try { TableDescriptor td = verifyTableExists(conn, schema, table); //verify that that column exists ColumnDescriptorList columnDescriptorList = td.getColumnDescriptorList(); for (ColumnDescriptor descriptor : columnDescriptorList) { if (descriptor.getColumnName().equalsIgnoreCase(columnName)) { DataTypeDescriptor type = descriptor.getType(); if (!ColumnDescriptor.allowsStatistics(type)) throw ErrorState.LANG_COLUMN_STATISTICS_NOT_POSSIBLE.newException(columnName, type.getTypeName()); descriptor.setCollectStatistics(true); LanguageConnectionContext languageConnection=conn.getLanguageConnection(); TransactionController transactionCompile=languageConnection.getTransactionCompile(); transactionCompile.elevate("dictionary"); languageConnection.getDataDictionary().setCollectStats(transactionCompile, td.getUUID(), columnName, true); return; } } throw ErrorState.LANG_COLUMN_NOT_FOUND_IN_TABLE.newException(columnName, schema + "." + table); } catch (StandardException e) { throw PublicAPI.wrapStandardException(e); } } @SuppressWarnings("UnusedDeclaration") public static void ENABLE_ALL_COLUMN_STATISTICS(String schema, String table) throws SQLException { schema = EngineUtils.validateSchema(schema); table = EngineUtils.validateTable(table); EmbedConnection conn = (EmbedConnection) SpliceAdmin.getDefaultConn(); try { TableDescriptor td = verifyTableExists(conn, schema, table); //verify that that column exists ColumnDescriptorList columnDescriptorList = td.getColumnDescriptorList(); for (ColumnDescriptor descriptor : columnDescriptorList) { String columnName = descriptor.getColumnName(); DataTypeDescriptor type = descriptor.getType(); if (!descriptor.collectStatistics() && ColumnDescriptor.allowsStatistics(type)) { descriptor.setCollectStatistics(true); LanguageConnectionContext languageConnection = conn.getLanguageConnection(); TransactionController transactionCompile = languageConnection.getTransactionCompile(); transactionCompile.elevate("dictionary"); languageConnection.getDataDictionary().setCollectStats(transactionCompile, td.getUUID(), columnName, true); } } } catch (StandardException e) { throw PublicAPI.wrapStandardException(e); } } private static final ResultColumnDescriptor[] COLLECTED_STATS_OUTPUT_COLUMNS = new GenericColumnDescriptor[]{ new GenericColumnDescriptor("schemaName", DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.VARCHAR)), new GenericColumnDescriptor("tableName", DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.VARCHAR)), new GenericColumnDescriptor("partition", DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.VARCHAR)), new GenericColumnDescriptor("rowsCollected", DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.BIGINT)), new GenericColumnDescriptor("partitionSize", DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.BIGINT)), new GenericColumnDescriptor("partitionCount", DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.BIGINT)), new GenericColumnDescriptor("statsType", DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.INTEGER)), new GenericColumnDescriptor("sampleFraction", DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.DOUBLE)) }; private static final ResultColumnDescriptor[] COLUMN_STATS_OUTPUT_COLUMNS = new GenericColumnDescriptor[]{ new GenericColumnDescriptor("schemaName", DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.VARCHAR)), new GenericColumnDescriptor("tableName", DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.VARCHAR)), new GenericColumnDescriptor("columnName", DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.VARCHAR)), new GenericColumnDescriptor("partition", DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.VARCHAR)), new GenericColumnDescriptor("nullCount", DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.BIGINT)), new GenericColumnDescriptor("totalCount", DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.BIGINT)), new GenericColumnDescriptor("cardinality", DataTypeDescriptor.getBuiltInDataTypeDescriptor(Types.BIGINT)) }; @SuppressWarnings("unused") public static void COLLECT_SCHEMA_STATISTICS(String schema, boolean staleOnly, ResultSet[] outputResults) throws SQLException { EmbedConnection conn = (EmbedConnection)getDefaultConn(); try { if (schema == null) throw ErrorState.TABLE_NAME_CANNOT_BE_NULL.newException(); //TODO -sf- change this to proper SCHEMA // error? schema = EngineUtils.validateSchema(schema); LanguageConnectionContext lcc = conn.getLanguageConnection(); DataDictionary dd = lcc.getDataDictionary(); dd.startWriting(lcc); /* Invalidate dependencies remotely. */ TransactionController tc = lcc.getTransactionExecute(); SchemaDescriptor sd = getSchemaDescriptor(schema, lcc, dd); //get a list of all the TableDescriptors in the schema List<TableDescriptor> tds = getAllTableDescriptors(sd, conn); if (tds.isEmpty()) { // No point in continuing with empty TableDescriptor list, possible NPE return; } authorize(tds); TransactionController transactionExecute = lcc.getTransactionExecute(); transactionExecute.elevate("statistics"); dropTableStatistics(tds, dd, tc); ddlNotification(tc, tds); TxnView txn = ((SpliceTransactionManager) transactionExecute).getRawTransaction().getActiveStateTxn(); HashMap<Long, Pair<String, String>> display = new HashMap<>(); ArrayList<StatisticsOperation> statisticsOperations = new ArrayList<>(tds.size()); for (TableDescriptor td : tds) { display.put(td.getHeapConglomerateId(), Pair.newPair(schema, td.getName())); statisticsOperations.add(createCollectTableStatisticsOperation(td, false, 0, true, txn, conn)); } IteratorNoPutResultSet resultsToWrap = wrapResults(conn, displayTableStatistics(statisticsOperations,true, dd, transactionExecute, display), COLLECTED_STATS_OUTPUT_COLUMNS); outputResults[0] = new EmbedResultSet40(conn, resultsToWrap, false, null, true); } catch (StandardException se) { throw PublicAPI.wrapStandardException(se); } catch (ExecutionException e) { throw PublicAPI.wrapStandardException(Exceptions.parseException(e.getCause())); } } private static void authorize(List<TableDescriptor> tableDescriptorList) throws SQLException, StandardException { EmbedConnection conn = (EmbedConnection) SpliceAdmin.getDefaultConn(); LanguageConnectionContext lcc = conn.getLanguageConnection(); Authorizer authorizer=lcc.getAuthorizer(); Activation activation = lcc.getActivationCount()>0?lcc.getLastActivation():null; if(activation==null){ //TODO -sf- this can happen sometimes for some reason for(TableDescriptor td : tableDescriptorList){ authorizer.authorize(Authorizer.INSERT_PRIV); } return; } List requiredPermissionsList = activation.getPreparedStatement().getRequiredPermissionsList(); for (TableDescriptor tableDescriptor : tableDescriptorList) { StatementTablePermission key = null; try { key = new StatementTablePermission( tableDescriptor.getSchemaDescriptor().getUUID(), tableDescriptor.getUUID(), Authorizer.INSERT_PRIV); requiredPermissionsList.add(key); lcc.getAuthorizer().authorize(activation, 1); } catch (StandardException e) { if (e.getSqlState().compareTo(SQLState.AUTH_NO_TABLE_PERMISSION) == 0) { throw StandardException.newException( com.splicemachine.db.iapi.reference.SQLState.AUTH_NO_TABLE_PERMISSION_FOR_ANALYZE, lcc.getCurrentUserId(activation), "INSERT", tableDescriptor.getSchemaName(), tableDescriptor.getName()); } else throw e; } finally { if (key != null) { requiredPermissionsList.remove(key); } } } } @SuppressWarnings({"unchecked"}) public static void COLLECT_TABLE_STATISTICS(String schema, String table, boolean staleOnly, ResultSet[] outputResults) throws SQLException { doStatsCollectionForTables(schema, table, false, 0.0, staleOnly, true, outputResults); } public static void COLLECT_TABLE_SAMPLE_STATISTICS(String schema, String table, double sample, boolean staleOnly, ResultSet[] outputResults) throws SQLException { doStatsCollectionForTables(schema, table, true, sample, staleOnly, true, outputResults); } @SuppressWarnings({"unchecked"}) public static void COLLECT_NONMERGED_TABLE_STATISTICS(String schema, String table, boolean staleOnly, ResultSet[] outputResults) throws SQLException { doStatsCollectionForTables(schema, table, false, 0.0, staleOnly, false, outputResults); } public static void COLLECT_NONMERGED_TABLE_SAMPLE_STATISTICS(String schema, String table, double sample, boolean staleOnly, ResultSet[] outputResults) throws SQLException { doStatsCollectionForTables(schema, table, true, sample, staleOnly, false, outputResults); } public static void DROP_SCHEMA_STATISTICS(String schema) throws SQLException { EmbedConnection conn = (EmbedConnection) getDefaultConn(); try { if (schema == null) throw ErrorState.LANG_SCHEMA_DOES_NOT_EXIST.newException(); schema = schema.toUpperCase(); LanguageConnectionContext lcc = conn.getLanguageConnection(); DataDictionary dd = lcc.getDataDictionary(); SchemaDescriptor sd = getSchemaDescriptor(schema, lcc, dd); List<TableDescriptor> tds = getAllTableDescriptors(sd, conn); authorize(tds); TransactionController tc = conn.getLanguageConnection().getTransactionExecute(); tc.elevate("statistics"); dropTableStatistics(tds,dd,tc); ddlNotification(tc,tds); SpliceLogUtils.debug(LOG, "Done dropping statistics for schema %s.", schema); } catch (StandardException se) { throw PublicAPI.wrapStandardException(se); } finally { if (conn != null) conn.close(); } } public static void DROP_TABLE_STATISTICS(String schema, String table) throws SQLException { EmbedConnection conn = (EmbedConnection) getDefaultConn(); try { schema = EngineUtils.validateSchema(schema); table = EngineUtils.validateTable(table); TableDescriptor tableDesc = verifyTableExists(conn, schema, table); TransactionController tc = conn.getLanguageConnection().getTransactionExecute(); tc.elevate("statistics"); DataDictionary dd = conn.getLanguageConnection().getDataDictionary(); List<TableDescriptor> tds = Collections.singletonList(tableDesc); dropTableStatistics(tds,dd,tc); ddlNotification(tc,tds); SpliceLogUtils.debug(LOG, "Done dropping statistics for table %s.", table); } catch (StandardException se) { throw PublicAPI.wrapStandardException(se); } finally { if (conn != null) conn.close(); } } public static void FAKE_TABLE_STATISTICS(String schema, String table, long rowCount, int meanRowWidth, long numPartitions, ResultSet[] outputResults) throws SQLException { EmbedConnection conn = (EmbedConnection) SpliceAdmin.getDefaultConn(); try { schema = EngineUtils.validateSchema(schema); table = EngineUtils.validateTable(table); TableDescriptor tableDesc = verifyTableExists(conn, schema, table); if (rowCount < 0) throw ErrorState.LANG_INVALID_FAKE_STATS.newException("table", "row count cannot be a negative value"); if (meanRowWidth <= 0) throw ErrorState.LANG_INVALID_FAKE_STATS.newException("table", "meanRowWidth has to be greater than 0"); if (numPartitions <= 0) throw ErrorState.LANG_INVALID_FAKE_STATS.newException("table", "number of partitions has to be greater than 0"); List<TableDescriptor> tds = Collections.singletonList(tableDesc); authorize(tds); DataDictionary dd = conn.getLanguageConnection().getDataDictionary(); dd.startWriting(conn.getLanguageConnection()); TransactionController tc = conn.getLanguageConnection().getTransactionExecute(); tc.elevate("statistics"); dropTableStatistics(tds,dd,tc); ddlNotification(tc, tds); // compose the fake table stats row ExecRow statsRow; int statsType = SYSTABLESTATISTICSRowFactory.FAKE_MERGED_STATS; long conglomerateId = tableDesc.getHeapConglomerateId(); statsRow = StatisticsAdmin.generateRowFromStats(conglomerateId, "-All-", rowCount, rowCount*meanRowWidth, meanRowWidth, numPartitions, statsType, 0.0d); dd.addTableStatistics(statsRow, tc); ExecRow resultRow = generateOutputRow(schema, table, statsRow); IteratorNoPutResultSet resultsToWrap = wrapResults( conn, Lists.newArrayList(resultRow), COLLECTED_STATS_OUTPUT_COLUMNS); outputResults[0] = new EmbedResultSet40(conn, resultsToWrap, false, null, true); } catch (StandardException se) { throw PublicAPI.wrapStandardException(se); } } public static void FAKE_COLUMN_STATISTICS(String schema, String table, String column, double nullCountRatio, long rpv, ResultSet[] outputResults) throws SQLException { EmbedConnection conn = (EmbedConnection) SpliceAdmin.getDefaultConn(); try { schema = EngineUtils.validateSchema(schema); table = EngineUtils.validateTable(table); column = EngineUtils.validateColumnName(column); TableDescriptor td = verifyTableExists(conn, schema, table); //verify that that column exists int columnId = -1; ColumnDescriptor columnDescriptor = null; ColumnDescriptorList columnDescriptorList = td.getColumnDescriptorList(); for (ColumnDescriptor descriptor : columnDescriptorList) { if (descriptor.getColumnName().equalsIgnoreCase(column)) { columnId = descriptor.getPosition(); columnDescriptor = descriptor; break; } } if (columnId == -1) throw ErrorState.LANG_COLUMN_NOT_FOUND_IN_TABLE.newException(column, schema + "." + table); List<TableDescriptor> tds = Collections.singletonList(td); authorize(tds); DataDictionary dd = conn.getLanguageConnection().getDataDictionary(); dd.startWriting(conn.getLanguageConnection()); TransactionController tc = conn.getLanguageConnection().getTransactionExecute(); tc.elevate("statistics"); // get the row count from table stats long totalCount = getRowCountFromTableStats(td.getHeapConglomerateId(), dd, tc); if (totalCount < 0) throw ErrorState.LANG_INVALID_FAKE_STATS.newException("column", "table stats do not exist, please add table stats first"); if (nullCountRatio < 0 || nullCountRatio > 1) throw ErrorState.LANG_INVALID_FAKE_STATS.newException("column", "null count ratio should be in the range of [0,1]"); long nullCount = (long)(nullCountRatio * totalCount); if (rpv > totalCount - nullCount || rpv < 1) throw ErrorState.LANG_INVALID_FAKE_STATS.newException("column", "rows per value shouldn't be less than 1 or larger than the total number of not-null count : " + (totalCount - nullCount)); long cardinality = (long)(((double)(totalCount- nullCount))/rpv); dropColumnStatistics(td.getHeapConglomerateId(), columnId, dd,tc); ddlNotification(tc, tds); // compose the fake column stats row long conglomerateId = td.getHeapConglomerateId(); FakeColumnStatisticsImpl columnStatistics = new FakeColumnStatisticsImpl(columnDescriptor.getType().getNull(), nullCount, totalCount, cardinality); // compose the entry for a given column ExecRow statsRow = StatisticsAdmin.generateRowFromStats(conglomerateId, "-All-", columnId, columnStatistics); dd.addColumnStatistics(statsRow, tc); ExecRow resultRow = generateOutputRowForColumnStats(schema, table, column, "-All-", nullCount, totalCount, cardinality); IteratorNoPutResultSet resultsToWrap = wrapResults( conn, Lists.newArrayList(resultRow), COLUMN_STATS_OUTPUT_COLUMNS); outputResults[0] = new EmbedResultSet40(conn, resultsToWrap, false, null, true); } catch (StandardException se) { throw PublicAPI.wrapStandardException(se); } } private static void ddlNotification(TransactionController tc, List<TableDescriptor> tds) throws StandardException { DDLChange ddlChange = ProtoUtil.alterStats(((SpliceTransactionManager) tc).getActiveStateTxn().getTxnId(),tds); tc.prepareDataDictionaryChange(DDLUtils.notifyMetadataChange(ddlChange)); } /*private helper methods*/ private static void doStatsCollectionForTables(String schema, String table, boolean useSample, double samplePercent, boolean staleOnly, boolean mergeStats, ResultSet[] outputResults) throws SQLException { EmbedConnection conn = (EmbedConnection) SpliceAdmin.getDefaultConn(); try { schema = EngineUtils.validateSchema(schema); table = EngineUtils.validateTable(table); TableDescriptor tableDesc = verifyTableExists(conn, schema, table); List<TableDescriptor> tds = Collections.singletonList(tableDesc); authorize(tds); //check if sample fraction is in the valid range if (useSample) { if (samplePercent<0.0 || samplePercent>100.0) throw ErrorState.LANG_INVALID_VALUE_RANGE.newException("samplePercent value " + samplePercent, "[0,100]"); } DataDictionary dd = conn.getLanguageConnection().getDataDictionary(); dd.startWriting(conn.getLanguageConnection()); TransactionController tc = conn.getLanguageConnection().getTransactionExecute(); dropTableStatistics(tds,dd,tc); ddlNotification(tc, tds); TxnView txn = ((SpliceTransactionManager) tc).getRawTransaction().getActiveStateTxn(); HashMap<Long,Pair<String,String>> display = new HashMap<>(); display.put(tableDesc.getHeapConglomerateId(),Pair.newPair(schema,table)); IteratorNoPutResultSet resultsToWrap = wrapResults( conn, displayTableStatistics(Lists.newArrayList( createCollectTableStatisticsOperation(tableDesc, useSample, samplePercent/100, mergeStats, txn, conn) ), mergeStats, dd, tc, display), COLLECTED_STATS_OUTPUT_COLUMNS); outputResults[0] = new EmbedResultSet40(conn, resultsToWrap, false, null, true); } catch (StandardException se) { throw PublicAPI.wrapStandardException(se); } catch (ExecutionException e) { throw PublicAPI.wrapStandardException(Exceptions.parseException(e.getCause())); } } private static StatisticsOperation createCollectTableStatisticsOperation(TableDescriptor table, boolean useSample, double sampleFraction, boolean mergeStats, TxnView txn, EmbedConnection conn) throws StandardException, ExecutionException { long heapConglomerateId = table.getHeapConglomerateId(); Activation activation = conn.getLanguageConnection().getLastActivation(); DistributedDataSetProcessor dsp = EngineDriver.driver().processorFactory().distributedProcessor(); ScanSetBuilder ssb = dsp.newScanSet(null,Long.toString(heapConglomerateId)); ssb.tableVersion(table.getVersion()); ScanSetBuilder scanSetBuilder = createTableScanner(ssb,conn,table,txn,mergeStats); String scope = getScopeName(table); // no sample stats support on mem platform if (dsp.getType() != DataSetProcessor.Type.SPARK) { useSample = false; sampleFraction = 0.0d; } List<ColumnDescriptor> colsToCollect = getCollectedColumns(conn, table); DataTypeDescriptor[] dtds = new DataTypeDescriptor[colsToCollect.size()]; int index = 0; for (ColumnDescriptor descriptor : colsToCollect ) { dtds[index++] = descriptor.getType(); } StatisticsOperation op = new StatisticsOperation(scanSetBuilder,useSample,sampleFraction,mergeStats,scope,activation,dtds); return op; } private static final String getScopeName(TableDescriptor td) { return String.format(OperationContext.Scope.COLLECT_STATS.displayName(), td.getName()); } private static DataScan createScan (TxnView txn) { DataScan scan=SIDriver.driver().getOperationFactory().newDataScan(txn); scan.returnAllVersions(); //make sure that we read all versions of the data return scan.startKey(new byte[0]).stopKey(new byte[0]); } public static int[] getFormatIds(EmbedConnection conn, long columnStatsConglomId) throws StandardException{ TransactionController transactionExecute = conn.getLanguageConnection().getTransactionExecute(); SpliceConglomerate conglomerate = (SpliceConglomerate) ((SpliceTransactionManager) transactionExecute) .findConglomerate(columnStatsConglomId); return conglomerate.getFormat_ids(); } private static ScanSetBuilder createTableScanner(ScanSetBuilder builder, EmbedConnection conn, TableDescriptor table, TxnView txn, boolean mergeStats) throws StandardException{ List<ColumnDescriptor> colsToCollect = getCollectedColumns(conn, table); ExecRow row = new ValueRow(colsToCollect.size()); BitSet accessedColumns = new BitSet(table.getMaxStorageColumnID()); int outputCol = 0; int[] columnPositionMap = new int[table.getNumberOfColumns()]; Arrays.fill(columnPositionMap, -1); int[] allColumnLengths = new int[table.getMaxStorageColumnID()]; for (ColumnDescriptor descriptor : colsToCollect) { accessedColumns.set(descriptor.getStoragePosition() - 1); row.setColumn(outputCol + 1, descriptor.getType().getNull()); columnPositionMap[outputCol] = descriptor.getPosition(); outputCol++; allColumnLengths[descriptor.getPosition() - 1] = descriptor.getType().getMaximumWidth(); } int[] rowDecodingMap = new int[accessedColumns.length()]; int[] fieldLengths = new int[accessedColumns.length()]; Arrays.fill(rowDecodingMap, -1); outputCol = 0; for (int i = accessedColumns.nextSetBit(0); i >= 0; i = accessedColumns.nextSetBit(i + 1)) { rowDecodingMap[i] = outputCol; fieldLengths[outputCol] = allColumnLengths[i]; outputCol++; } TransactionController transactionExecute = conn.getLanguageConnection().getTransactionExecute(); SpliceConglomerate conglomerate = (SpliceConglomerate) ((SpliceTransactionManager) transactionExecute) .findConglomerate(table.getHeapConglomerateId()); boolean[] keyColumnSortOrder = conglomerate.getAscDescInfo(); int[] keyColumnEncodingOrder = conglomerate.getColumnOrdering(); int[] formatIds = conglomerate.getFormat_ids(); int[] keyColumnTypes = null; int[] keyDecodingMap = null; FormatableBitSet collectedKeyColumns = null; if (keyColumnEncodingOrder != null) { keyColumnTypes = new int[keyColumnEncodingOrder.length]; keyDecodingMap = new int[keyColumnEncodingOrder.length]; Arrays.fill(keyDecodingMap, -1); collectedKeyColumns = new FormatableBitSet(table.getNumberOfColumns()); for (int i = 0; i < keyColumnEncodingOrder.length; i++) { int keyColumn = keyColumnEncodingOrder[i]; keyColumnTypes[i] = formatIds[keyColumn]; if (accessedColumns.get(keyColumn)) { collectedKeyColumns.set(i); keyDecodingMap[i] = rowDecodingMap[keyColumn]; rowDecodingMap[keyColumn] = -1; } } } DataScan scan = createScan(txn); return builder.transaction(txn) .metricFactory(Metrics.basicMetricFactory()) .template(row) .scan(scan) .rowDecodingMap(rowDecodingMap) .keyColumnEncodingOrder(keyColumnEncodingOrder) .keyColumnSortOrder(keyColumnSortOrder) .keyColumnTypes(keyColumnTypes) .keyDecodingMap(keyDecodingMap) .baseTableConglomId(table.getHeapConglomerateId()) .accessedKeyColumns(collectedKeyColumns) .tableVersion(table.getVersion()) .fieldLengths(fieldLengths) .columnPositionMap(columnPositionMap) .oneSplitPerRegion(!mergeStats) .storedAs(table.getStoredAs()) .location(table.getLocation()) .compression(table.getCompression()) .delimited(table.getDelimited()) .lines(table.getLines()) .escaped(table.getEscaped()) .partitionByColumns(table.getPartitionBy()) ; } private static IteratorNoPutResultSet wrapResults(EmbedConnection conn, Iterable<ExecRow> rows, ResultColumnDescriptor[] columnDescriptors) throws StandardException { Activation lastActivation = conn.getLanguageConnection().getLastActivation(); IteratorNoPutResultSet resultsToWrap = new IteratorNoPutResultSet(rows, columnDescriptors, lastActivation); resultsToWrap.openCore(); return resultsToWrap; } private static ExecRow buildOutputTemplateRow() throws StandardException { ExecRow outputRow = new ValueRow(COLLECTED_STATS_OUTPUT_COLUMNS.length); DataValueDescriptor[] dvds = new DataValueDescriptor[COLLECTED_STATS_OUTPUT_COLUMNS.length]; for (int i = 0; i < dvds.length; i++) { dvds[i] = COLLECTED_STATS_OUTPUT_COLUMNS[i].getType().getNull(); } outputRow.setRowArray(dvds); return outputRow; } public static List<TableDescriptor> getAllTableDescriptors(SchemaDescriptor sd, EmbedConnection conn) throws SQLException { try (PreparedStatement statement = conn.prepareStatement(TABLEID_FROM_SCHEMA)) { statement.setString(1, sd.getUUID().toString()); try (ResultSet resultSet = statement.executeQuery()) { DataDictionary dd = conn.getLanguageConnection().getDataDictionary(); UUIDFactory uuidFactory = dd.getUUIDFactory(); List<TableDescriptor> tds = new LinkedList<>(); while (resultSet.next()) { com.splicemachine.db.catalog.UUID tableId = uuidFactory.recreateUUID(resultSet.getString(1)); TableDescriptor tableDescriptor = dd.getTableDescriptor(tableId); /* * We need to filter out views from the TableDescriptor list. Views * are special cases where the number of conglomerate descriptors is 0. We * don't collect statistics for those views */ if (tableDescriptor != null && !tableDescriptor.getConglomerateDescriptorList().isEmpty()) { tds.add(tableDescriptor); } } return tds; } } catch (StandardException e) { throw PublicAPI.wrapStandardException(e); } } private static final Comparator<ColumnDescriptor> order = new Comparator<ColumnDescriptor>() { @Override public int compare(ColumnDescriptor o1, ColumnDescriptor o2) { return o1.getPosition() - o2.getPosition(); } }; private static List<ColumnDescriptor> getCollectedColumns(EmbedConnection conn, TableDescriptor td) throws StandardException { ColumnDescriptorList columnDescriptorList = td.getColumnDescriptorList(); List<ColumnDescriptor> toCollect = new ArrayList<>(columnDescriptorList.size()); /* check the default collect stats behavior, whether to collect stats on all columns or just index columns */ String collectStatsMode = PropertyUtil.getServiceProperty(conn.getLanguageConnection().getTransactionCompile(), Property.COLLECT_INDEX_STATS_ONLY); boolean collectIndexStatsOnly = Boolean.valueOf(collectStatsMode); boolean[] indexColumns = new boolean[columnDescriptorList.size()]; IndexLister indexLister = td.getIndexLister(); if (collectIndexStatsOnly) { // get all other index columns if (indexLister != null) { IndexRowGenerator[] indexRowGenerators = indexLister.getIndexRowGenerators(); for (IndexRowGenerator irg : indexRowGenerators) { int[] keyColumns = irg.getIndexDescriptor().baseColumnPositions(); for (int keyColumn : keyColumns) { indexColumns[keyColumn - 1] = true; } } } } /* * Get all the enabled statistics columns */ for (ColumnDescriptor columnDescriptor : columnDescriptorList) { if (!collectIndexStatsOnly || indexColumns[columnDescriptor.getPosition()-1]) { if (columnDescriptor.collectStatistics()) toCollect.add(columnDescriptor); } } /* * Add in any disabled key columns. * * We want to collect for all key columns always, because they are very important when * comparing index columns. By default, we turn them on when possible, but even if they are disabled * for some reason, we should still collect them. Of course, we should also not be able to disable * keyed columns, but that's a to-do for now. */ if (indexLister != null) { IndexRowGenerator[] distinctIndexRowGenerators = indexLister.getDistinctIndexRowGenerators(); for (IndexRowGenerator irg : distinctIndexRowGenerators) { int[] keyColumns = irg.getIndexDescriptor().baseColumnPositions(); for (int keyColumn : keyColumns) { for (ColumnDescriptor cd : columnDescriptorList) { if (cd.getPosition() == keyColumn) { if (!toCollect.contains(cd)) { toCollect.add(cd); } break; } } } } } // we should always include primary key if it exists ReferencedKeyConstraintDescriptor keyDescriptor = td.getPrimaryKey(); if (keyDescriptor != null) { int[] pkColumns = keyDescriptor.getReferencedColumns(); for (int keyColumn : pkColumns) { for (ColumnDescriptor cd : columnDescriptorList) { if (cd.getPosition() == keyColumn) { if (!toCollect.contains(cd)) { toCollect.add(cd); } break; } } } } Collections.sort(toCollect, order); //sort the columns into adjacent position order return toCollect; } private static void ensureNotKeyed(ColumnDescriptor descriptor, TableDescriptor td) throws StandardException { ConglomerateDescriptor heapConglom = td.getConglomerateDescriptor(td.getHeapConglomerateId()); IndexRowGenerator pkDescriptor = heapConglom.getIndexDescriptor(); if (pkDescriptor != null && pkDescriptor.getIndexDescriptor() != null) { for (int pkCol : pkDescriptor.baseColumnPositions()) { if (pkCol == descriptor.getPosition()) { throw ErrorState.LANG_DISABLE_STATS_FOR_KEYED_COLUMN.newException(descriptor.getColumnName()); } } } IndexLister indexLister = td.getIndexLister(); if (indexLister != null) { for (IndexRowGenerator irg : indexLister.getIndexRowGenerators()) { if (irg.getIndexDescriptor() == null) continue; for (int col : irg.baseColumnPositions()) { if (col == descriptor.getPosition()) throw ErrorState.LANG_DISABLE_STATS_FOR_KEYED_COLUMN.newException(descriptor.getColumnName()); } } } } public static ExecRow generateRowFromStats(long conglomId, String partitionId, long rowCount, long partitionSize, int meanRowWidth, long numberOfPartitions, int statsType, double sampleFraction) throws StandardException { ExecRow row = new ValueRow(SYSTABLESTATISTICSRowFactory.SYSTABLESTATISTICS_COLUMN_COUNT); row.setColumn(SYSTABLESTATISTICSRowFactory.CONGLOMID,new SQLLongint(conglomId)); row.setColumn(SYSTABLESTATISTICSRowFactory.PARTITIONID,new SQLVarchar(partitionId)); row.setColumn(SYSTABLESTATISTICSRowFactory.TIMESTAMP,new SQLTimestamp(new Timestamp(System.currentTimeMillis()))); row.setColumn(SYSTABLESTATISTICSRowFactory.STALENESS,new SQLBoolean(false)); row.setColumn(SYSTABLESTATISTICSRowFactory.INPROGRESS,new SQLBoolean(false)); row.setColumn(SYSTABLESTATISTICSRowFactory.ROWCOUNT,new SQLLongint(rowCount)); row.setColumn(SYSTABLESTATISTICSRowFactory.PARTITION_SIZE,new SQLLongint(partitionSize)); row.setColumn(SYSTABLESTATISTICSRowFactory.MEANROWWIDTH,new SQLInteger(meanRowWidth)); row.setColumn(SYSTABLESTATISTICSRowFactory.NUMBEROFPARTITIONS,new SQLLongint(numberOfPartitions)); row.setColumn(SYSTABLESTATISTICSRowFactory.STATSTYPE,new SQLInteger(statsType)); row.setColumn(SYSTABLESTATISTICSRowFactory.SAMPLEFRACTION, new SQLDouble(sampleFraction)); return row; } public static ExecRow generateRowFromStats(long conglomId, String partitionId, long timestamp, boolean isStale, boolean inProgress, long rowCount, long partitionSize, int meanRowWidth, long numberOfPartitions, int statsType, double sampleFraction) throws StandardException { ExecRow row = new ValueRow(SYSTABLESTATISTICSRowFactory.SYSTABLESTATISTICS_COLUMN_COUNT); row.setColumn(SYSTABLESTATISTICSRowFactory.CONGLOMID,new SQLLongint(conglomId)); row.setColumn(SYSTABLESTATISTICSRowFactory.PARTITIONID,new SQLVarchar(partitionId)); row.setColumn(SYSTABLESTATISTICSRowFactory.TIMESTAMP,new SQLTimestamp(new Timestamp(timestamp))); row.setColumn(SYSTABLESTATISTICSRowFactory.STALENESS,new SQLBoolean(isStale)); row.setColumn(SYSTABLESTATISTICSRowFactory.INPROGRESS,new SQLBoolean(inProgress)); row.setColumn(SYSTABLESTATISTICSRowFactory.ROWCOUNT,new SQLLongint(rowCount)); row.setColumn(SYSTABLESTATISTICSRowFactory.PARTITION_SIZE,new SQLLongint(partitionSize)); row.setColumn(SYSTABLESTATISTICSRowFactory.MEANROWWIDTH,new SQLInteger(meanRowWidth)); row.setColumn(SYSTABLESTATISTICSRowFactory.NUMBEROFPARTITIONS,new SQLLongint(numberOfPartitions)); row.setColumn(SYSTABLESTATISTICSRowFactory.STATSTYPE,new SQLInteger(statsType)); row.setColumn(SYSTABLESTATISTICSRowFactory.SAMPLEFRACTION, new SQLDouble(sampleFraction)); return row; } public static ExecRow generateRowFromStats(long conglomId, String regionId, int columnId, ItemStatistics columnStatistics) throws StandardException { ExecRow row = new ValueRow(SYSCOLUMNSTATISTICSRowFactory.SYSCOLUMNSTATISTICS_COLUMN_COUNT); row.setColumn(SYSCOLUMNSTATISTICSRowFactory.CONGLOMID,new SQLLongint(conglomId)); row.setColumn(SYSCOLUMNSTATISTICSRowFactory.PARTITIONID,new SQLVarchar(regionId)); row.setColumn(SYSCOLUMNSTATISTICSRowFactory.COLUMNID,new SQLInteger(columnId)); row.setColumn(SYSCOLUMNSTATISTICSRowFactory.DATA, new UserType(columnStatistics)); return row; } public static ExecRow generateOutputRowForColumnStats(String schemaName, String tableName, String columnName, String partitionName, long nullCount, long totalCount, long cardinality) throws StandardException { ExecRow row = new ValueRow(7); row.setColumn(1,new SQLVarchar(schemaName)); row.setColumn(2,new SQLVarchar(tableName)); row.setColumn(3,new SQLVarchar(columnName)); row.setColumn(4,new SQLVarchar(partitionName)); row.setColumn(5,new SQLLongint(nullCount)); row.setColumn(6,new SQLLongint(totalCount)); row.setColumn(7,new SQLLongint(cardinality)); return row; } public static ExecRow generateOutputRow(String schemaName, String tableName, ExecRow partitionRow) throws StandardException { ExecRow row = new ValueRow(8); row.setColumn(1,new SQLVarchar(schemaName)); row.setColumn(2,new SQLVarchar(tableName)); row.setColumn(3,partitionRow.getColumn(SYSTABLESTATISTICSRowFactory.PARTITIONID)); row.setColumn(4,partitionRow.getColumn(SYSTABLESTATISTICSRowFactory.ROWCOUNT)); row.setColumn(5,partitionRow.getColumn(SYSTABLESTATISTICSRowFactory.PARTITION_SIZE)); row.setColumn(6,partitionRow.getColumn(SYSTABLESTATISTICSRowFactory.NUMBEROFPARTITIONS)); row.setColumn(7,partitionRow.getColumn(SYSTABLESTATISTICSRowFactory.STATSTYPE)); row.setColumn(8,partitionRow.getColumn(SYSTABLESTATISTICSRowFactory.SAMPLEFRACTION)); return row; } public static Iterable displayTableStatistics(ArrayList<StatisticsOperation> collectOps, boolean mergeStats, final DataDictionary dataDictionary, final TransactionController tc, final HashMap<Long, Pair<String, String>> displayPair) throws StandardException { // Schedule the first <maximumConcurrent> jobs int maximumConcurrent = EngineDriver.driver().getConfiguration().getCollectSchemaStatisticsMaximumConcurrent(); for (int i = 0; i < maximumConcurrent && i < collectOps.size(); ++i) { collectOps.get(i).openCore(); } // Handle the next jobs as we go: (One job returns -> one job can start), ensuring <maximumConcurrent> jobs at all time Iterable<StatisticsOperation> movingExecutionWindow = () -> new Iterator<StatisticsOperation>() { int i = 0; @Override public boolean hasNext() { return i < collectOps.size(); } @Override public StatisticsOperation next() { if ((long)i + (long)maximumConcurrent < (long)collectOps.size()) { try { collectOps.get(i + maximumConcurrent).openCore(); } catch (StandardException e) { throw new RuntimeException(e); } } return collectOps.get(i++); } }; if (mergeStats) { return FluentIterable.from(movingExecutionWindow).transformAndConcat(new Function<StatisticsOperation, Iterable<ExecRow>>() { @Nullable @Override public Iterable<ExecRow> apply(@Nullable StatisticsOperation input) { try { // We have to create a new savepoint because we already returned from the opening of the result set // and derby released the prior savepoint for us. If we don't create one we'd end up inserting the // rows with the user transaction, and that's problematic especially if we had to remove existing // statistics, since those deletes would mask these new inserts. tc.setSavePoint("statistics", null); tc.elevate("statistics"); final Iterator iterator = new Iterator<ExecRow>() { private ExecRow nextRow; private boolean fetched = false; // data structures to accumulate the partition stats private long conglomId = 0; private long rowCount = 0L; private long totalSize = 0; private int avgRowWidth = 0; private long numberOfPartitions = 0; private int statsType = SYSTABLESTATISTICSRowFactory.REGULAR_NONMERGED_STATS; private double sampleFraction = 0.0d; @Override @SuppressFBWarnings(value = "REC_CATCH_EXCEPTION", justification = "SpotBugs is confused, we rethrow the exception") public boolean hasNext() { try { if (!fetched) { nextRow = input.getNextRowCore(); while (nextRow != null) { fetched = true; if (nextRow.nColumns() == 2) { int columnId = nextRow.getColumn(1).getInt(); ByteArrayInputStream bais = new ByteArrayInputStream(nextRow.getColumn(2).getBytes()); ObjectInputStream ois = new ObjectInputStream(bais); // compose the entry for a given column ExecRow statsRow = StatisticsAdmin.generateRowFromStats(conglomId, "-All-", columnId, (ColumnStatisticsImpl) ois.readObject()); dataDictionary.addColumnStatistics(statsRow, tc); bais.close(); } else { // process tablestats row conglomId = nextRow.getColumn(SYSCOLUMNSTATISTICSRowFactory.CONGLOMID).getLong(); long partitionRowCount = nextRow.getColumn(SYSTABLESTATISTICSRowFactory.ROWCOUNT).getLong(); rowCount = partitionRowCount; totalSize = nextRow.getColumn(SYSTABLESTATISTICSRowFactory.PARTITION_SIZE).getLong(); avgRowWidth = nextRow.getColumn(SYSTABLESTATISTICSRowFactory.MEANROWWIDTH).getInt(); numberOfPartitions = nextRow.getColumn(SYSTABLESTATISTICSRowFactory.NUMBEROFPARTITIONS).getLong(); statsType = nextRow.getColumn(SYSTABLESTATISTICSRowFactory.STATSTYPE).getInt(); sampleFraction = nextRow.getColumn(SYSTABLESTATISTICSRowFactory.SAMPLEFRACTION).getDouble(); } nextRow = input.getNextRowCore(); } } if (!fetched) tc.releaseSavePoint("statistics", null); return fetched; } catch (Exception e) { throw new RuntimeException(e); } } @Override public ExecRow next() { try { fetched = false; // insert rows to dictionary tables, and return ExecRow statsRow; //change statsType to 2: merged full stats or 3: merged sample stats if (statsType == SYSTABLESTATISTICSRowFactory.REGULAR_NONMERGED_STATS) statsType = SYSTABLESTATISTICSRowFactory.REGULAR_MERGED_STATS; else if (statsType == SYSTABLESTATISTICSRowFactory.SAMPLE_NONMERGED_STATS) statsType = SYSTABLESTATISTICSRowFactory.SAMPLE_MERGED_STATS; statsRow = StatisticsAdmin.generateRowFromStats(conglomId, "-All-", rowCount, totalSize, avgRowWidth, numberOfPartitions, statsType, sampleFraction); dataDictionary.addTableStatistics(statsRow, tc); Pair<String, String> pair = displayPair.get(conglomId); return generateOutputRow(pair.getFirst(), pair.getSecond(), statsRow); } catch (Exception e) { throw new RuntimeException(e); } } }; return () -> iterator; } catch (Exception e) { throw new RuntimeException(e); } } }); } else { return FluentIterable.from(movingExecutionWindow).transformAndConcat(new Function<StatisticsOperation, Iterable<ExecRow>>() { @Nullable @Override public Iterable<ExecRow> apply(@Nullable StatisticsOperation input) { try { final Iterator iterator = new Iterator<ExecRow>() { private ExecRow nextRow; private boolean fetched = false; @Override public boolean hasNext() { try { if (!fetched) { nextRow = input.getNextRowCore(); while (nextRow != null && nextRow.nColumns() == SYSCOLUMNSTATISTICSRowFactory.SYSCOLUMNSTATISTICS_COLUMN_COUNT) { dataDictionary.addColumnStatistics(nextRow, tc); nextRow = input.getNextRowCore(); } fetched = true; } return nextRow != null; } catch (Exception e) { throw new RuntimeException(e); } } @Override public ExecRow next() { try { fetched = false; dataDictionary.addTableStatistics(nextRow, tc); Pair<String,String> pair = displayPair.get(nextRow.getColumn(SYSTABLESTATISTICSRowFactory.CONGLOMID).getLong()); return generateOutputRow(pair.getFirst(),pair.getSecond(),nextRow); } catch (Exception e) { throw new RuntimeException(e); } } }; return () -> iterator; } catch (Exception e) { throw new RuntimeException(e); } } }); } } private static void dropTableStatistics(TableDescriptor td, DataDictionary dd, TransactionController tc) throws StandardException { for (ConglomerateDescriptor cd: td.getConglomerateDescriptorList()) { if (LOG.isDebugEnabled()) SpliceLogUtils.debug(LOG,"Dropping conglomerate statistics [%d]",cd.getConglomerateNumber()); dd.deletePartitionStatistics(cd.getConglomerateNumber(),tc); } } private static void dropTableStatistics(List<TableDescriptor> tds, DataDictionary dd, TransactionController tc) throws StandardException { for (TableDescriptor td: tds) { if (LOG.isDebugEnabled()) SpliceLogUtils.debug(LOG,"Dropping Table statistics [%s]",td.getName()); dropTableStatistics(td,dd,tc); } } private static long getRowCountFromTableStats(long conglomerateId, DataDictionary dd, TransactionController tc) throws StandardException { long totalCount = 0; List<PartitionStatisticsDescriptor> partitionStatsDescriptors = dd.getPartitionStatistics(conglomerateId, tc); if (partitionStatsDescriptors.isEmpty()) return -1; double sampleFraction = 0.0d; int statsType = partitionStatsDescriptors.get(0).getStatsType(); boolean isSampleStats = statsType == SYSTABLESTATISTICSRowFactory.SAMPLE_NONMERGED_STATS || statsType == SYSTABLESTATISTICSRowFactory.SAMPLE_MERGED_STATS; if (isSampleStats) sampleFraction = partitionStatsDescriptors.get(0).getSampleFraction(); for (PartitionStatisticsDescriptor item: partitionStatsDescriptors) { totalCount += item.getRowCount(); } if (isSampleStats) totalCount = (long)((double)totalCount/sampleFraction); return totalCount; } private static void dropColumnStatistics(long conglomerateId, int columnId, DataDictionary dd, TransactionController tc) throws StandardException { dd.deleteColumnStatisticsByColumnId(conglomerateId, columnId, tc); } }
package pitt.search.semanticvectors.infer; import java.util.ArrayList; import java.util.Enumeration; import java.util.Hashtable; import pitt.search.semanticvectors.FlagConfig; import pitt.search.semanticvectors.ObjectVector; import pitt.search.semanticvectors.VectorStoreRAM; import pitt.search.semanticvectors.hashing.Bobcat; import pitt.search.semanticvectors.vectors.ComplexVector; import pitt.search.semanticvectors.vectors.ComplexVector.Mode; import pitt.search.semanticvectors.vectors.Vector; import pitt.search.semanticvectors.vectors.VectorFactory; public class NumberRepresentation { ArrayList<ObjectVector> _numbers = new ArrayList<ObjectVector>(); Hashtable<String,VectorStoreRAM> _pregenerated = new Hashtable<String, VectorStoreRAM>(); FlagConfig flagConfig = null; int _iDimension = 1000; String startString, endString; java.util.Random random; Vector vL, vR; /** * @param args */ public static void main(String[] args) { FlagConfig flagConfig; try { flagConfig = FlagConfig.getFlagConfig(args); args = flagConfig.remainingArgs; } catch (IllegalArgumentException e) { System.err.println(e.getMessage()); throw e; } NumberRepresentation NR = new NumberRepresentation(flagConfig); VectorStoreRAM VSR = NR.getNumberVectors(1,6); Enumeration<ObjectVector> VEN = VSR.getAllVectors(); while (VEN.hasMoreElements()) System.out.println(VEN.nextElement().getObject()); } /** * @param iStart * @param iEnd */ public NumberRepresentation(FlagConfig flagConfig) { this.flagConfig = flagConfig; if (flagConfig.vectortype().equals("complex")) ComplexVector.setDominantMode(Mode.CARTESIAN); this._iDimension = flagConfig.dimension(); random = new java.util.Random(); // generate a vector for the lowest number and one for the highest and make sure they have no overlap startString = "*START*"; random.setSeed(Bobcat.asLong(startString)); vL = VectorFactory.generateRandomVector(flagConfig.vectortype(), _iDimension, flagConfig.seedlength(), random ); endString = "*END*"; random.setSeed(Bobcat.asLong(endString)); vR = VectorFactory.generateRandomVector(flagConfig.vectortype(), _iDimension, flagConfig.seedlength(), random ); while ( Math.abs(vL.measureOverlap( vR )) > 0.01d ) { System.out.println(vL.measureOverlap(vR)); endString += "*"; random.setSeed(Bobcat.asLong(endString)); vR = VectorFactory.generateRandomVector(flagConfig.vectortype(), _iDimension, flagConfig.seedlength(), random ); } //System.exit( 0 ); } public VectorStoreRAM getNumberVectors(int iStart, int iEnd) { if (_pregenerated.containsKey(iStart+":"+iEnd)) return _pregenerated.get(iStart+":"+iEnd); _numbers.clear(); int original_iEnd = iEnd; if ((iEnd-iStart) %2 !=0) iEnd++; for ( int i = iStart; i <= iEnd+1; ++i ) _numbers.add( null ); // add them to an arraylist ObjectVector ovL = new ObjectVector(Integer.toString(iStart), vL); ObjectVector ovR = new ObjectVector(Integer.toString(iEnd), vR); _numbers.set( iStart, ovL ); _numbers.set( iEnd, ovR ); // recursively fill the arraylist with number vectors generateNumbers( iStart, iEnd ); /** for ( int i = iStart; i <= iEnd; ++i ) System.out.println( String.format( "overlap % 3d to %3d: % 1.4f", iStart, i, vL.measureOverlap(_numbers.get( i ).getVector() ) ) ); System.out.println(); **/ VectorStoreRAM theVSR = new VectorStoreRAM(flagConfig); for (int q=iStart; q <= iEnd; q++) { theVSR.putVector((original_iEnd)+":"+q, _numbers.get(q).getVector()); } if (iEnd > original_iEnd) //even number of vectors theVSR.removeVector((original_iEnd)+":"+iEnd); _pregenerated.put(iStart+":"+iEnd, theVSR); return theVSR; } /** * insert new number at (iLeft + iRight) / 2 and continue recursively * * @param iLeft * @param iRight */ private void generateNumbers(int iLeft, int iRight) { if ( Math.abs( iLeft - iRight ) <= 1 ) return; Vector m = VectorFactory.createZeroVector(flagConfig.vectortype(), _iDimension); m.superpose(_numbers.get( iLeft ).getVector(), 1d, null); m.superpose(_numbers.get( iRight ).getVector(), 1d, null); m.normalize(); int iMiddle = ( iLeft + iRight ) / 2; _numbers.set( iMiddle, new ObjectVector(Integer.toString(iMiddle), m) ); generateNumbers( iLeft, iMiddle ); generateNumbers( iMiddle, iRight ); } }
package org.owasp.esapi; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.List; import java.util.regex.Pattern; /** * Simple wrapper implementation of {@link SecurityConfiguration}. * This allows for easy subclassing and property fixups for unit tests. * * Note that there are some compilers have issues with Override * attributes on methods implementing a interface method with some * compilers. Technically Override on such methods is a 1.6 feature so * they are commented out here. */ public class SecurityConfigurationWrapper implements SecurityConfiguration { private SecurityConfiguration wrapped; /** * Constructor wrapping the given configuration. * @param wrapped The configuration to wrap. */ public SecurityConfigurationWrapper(SecurityConfiguration wrapped) { this.wrapped = wrapped; } /** * Access the wrapped configuration. * @return The wrapped configuration. */ public SecurityConfiguration getWrappedSecurityConfiguration() { return wrapped; } /** * {@inheritDoc} */ // @Override public String getApplicationName() { return wrapped.getApplicationName(); } /** * {@inheritDoc} */ // @Override public String getLogImplementation() { return wrapped.getLogImplementation(); } /** * {@inheritDoc} */ // @Override public String getAuthenticationImplementation() { return wrapped.getAuthenticationImplementation(); } /** * {@inheritDoc} */ // @Override public String getEncoderImplementation() { return wrapped.getEncoderImplementation(); } /** * {@inheritDoc} */ // @Override public String getAccessControlImplementation() { return wrapped.getAccessControlImplementation(); } /** * {@inheritDoc} */ // @Override public String getIntrusionDetectionImplementation() { return wrapped.getIntrusionDetectionImplementation(); } /** * {@inheritDoc} */ // @Override public String getRandomizerImplementation() { return wrapped.getRandomizerImplementation(); } /** * {@inheritDoc} */ // @Override public String getEncryptionImplementation() { return wrapped.getEncryptionImplementation(); } /** * {@inheritDoc} */ // @Override public String getValidationImplementation() { return wrapped.getValidationImplementation(); } /** * {@inheritDoc} */ // @Override public Pattern getValidationPattern( String typeName ) { return wrapped.getValidationPattern(typeName); } /** * {@inheritDoc} */ // @Override public String getExecutorImplementation() { return wrapped.getExecutorImplementation(); } /** * {@inheritDoc} */ // @Override public String getHTTPUtilitiesImplementation() { return wrapped.getHTTPUtilitiesImplementation(); } /** * {@inheritDoc} */ // @Override public byte[] getMasterKey() { return wrapped.getMasterKey(); } /** * {@inheritDoc} */ // @Override public File getUploadDirectory() { return wrapped.getUploadDirectory(); } /** * {@inheritDoc} */ // @Override public File getUploadTempDirectory() { return wrapped.getUploadTempDirectory(); } /** * {@inheritDoc} */ // @Override public int getEncryptionKeyLength() { return wrapped.getEncryptionKeyLength(); } /** * {@inheritDoc} */ // @Override public byte[] getMasterSalt() { return wrapped.getMasterSalt(); } /** * {@inheritDoc} */ // @Override public List getAllowedExecutables() { return wrapped.getAllowedExecutables(); } /** * {@inheritDoc} */ // @Override public List getAllowedFileExtensions() { return wrapped.getAllowedFileExtensions(); } /** * {@inheritDoc} */ // @Override public int getAllowedFileUploadSize() { return wrapped.getAllowedFileUploadSize(); } /** * {@inheritDoc} */ // @Override public String getPasswordParameterName() { return wrapped.getPasswordParameterName(); } /** * {@inheritDoc} */ // @Override public String getUsernameParameterName() { return wrapped.getUsernameParameterName(); } /** * {@inheritDoc} */ // @Override public String getEncryptionAlgorithm() { return wrapped.getEncryptionAlgorithm(); } /** * {@inheritDoc} */ // @Override public String getCipherTransformation() { return wrapped.getCipherTransformation(); } /** * {@inheritDoc} */ // @Override public String setCipherTransformation(String cipherXform) { return wrapped.setCipherTransformation(cipherXform); } /** * {@inheritDoc} */ // @Override public boolean useMACforCipherText() { return wrapped.useMACforCipherText(); } /** * {@inheritDoc} */ // @Override public boolean overwritePlainText() { return wrapped.overwritePlainText(); } /** * {@inheritDoc} */ // @Override public String getIVType() { return wrapped.getIVType(); } /** * {@inheritDoc} */ // @Override public String getFixedIV() { return wrapped.getFixedIV(); } /** * {@inheritDoc} */ // @Override public String getHashAlgorithm() { return wrapped.getHashAlgorithm(); } /** * {@inheritDoc} */ // @Override public int getHashIterations() { return wrapped.getHashIterations(); } /** * {@inheritDoc} */ // @Override public String getCharacterEncoding() { return wrapped.getCharacterEncoding(); } /** * {@inheritDoc} */ // @Override public boolean getAllowMultipleEncoding() { return wrapped.getAllowMultipleEncoding(); } /** * {@inheritDoc} */ // @Override public List getDefaultCanonicalizationCodecs() { return wrapped.getDefaultCanonicalizationCodecs(); } /** * {@inheritDoc} */ // @Override public String getDigitalSignatureAlgorithm() { return wrapped.getDigitalSignatureAlgorithm(); } /** * {@inheritDoc} */ // @Override public int getDigitalSignatureKeyLength() { return wrapped.getDigitalSignatureKeyLength(); } /** * {@inheritDoc} */ // @Override public String getRandomAlgorithm() { return wrapped.getRandomAlgorithm(); } /** * {@inheritDoc} */ // @Override public int getAllowedLoginAttempts() { return wrapped.getAllowedLoginAttempts(); } /** * {@inheritDoc} */ // @Override public int getMaxOldPasswordHashes() { return wrapped.getMaxOldPasswordHashes(); } /** * {@inheritDoc} */ // @Override public Threshold getQuota(String eventName) { return wrapped.getQuota(eventName); } /** * {@inheritDoc} */ // @Override public File getResourceFile( String filename ) { return wrapped.getResourceFile(filename); } /** * {@inheritDoc} */ // @Override public boolean getForceHttpOnlySession() { return wrapped.getForceHttpOnlySession(); } /** * {@inheritDoc} */ // @Override public boolean getForceSecureSession() { return wrapped.getForceSecureSession(); } /** * {@inheritDoc} */ // @Override public boolean getForceHttpOnlyCookies() { return wrapped.getForceHttpOnlyCookies(); } /** * {@inheritDoc} */ // @Override public boolean getForceSecureCookies() { return wrapped.getForceSecureCookies(); } /** * {@inheritDoc} */ // @Override public InputStream getResourceStream( String filename ) throws IOException { return wrapped.getResourceStream(filename); } /** * {@inheritDoc} */ // @Override public void setResourceDirectory(String dir) { wrapped.setResourceDirectory(dir); } /** * {@inheritDoc} */ // @Override public String getResponseContentType() { return wrapped.getResponseContentType(); } /** * {@inheritDoc} */ // @Override public long getRememberTokenDuration() { return wrapped.getRememberTokenDuration(); } /** * {@inheritDoc} */ // @Override public int getSessionIdleTimeoutLength() { return wrapped.getSessionIdleTimeoutLength(); } /** * {@inheritDoc} */ // @Override public int getSessionAbsoluteTimeoutLength() { return wrapped.getSessionAbsoluteTimeoutLength(); } /** * {@inheritDoc} */ // @Override public boolean getLogEncodingRequired() { return wrapped.getLogEncodingRequired(); } /** * {@inheritDoc} */ // @Override public boolean getLogApplicationName() { return wrapped.getLogApplicationName(); } /** * {@inheritDoc} */ // @Override public boolean getLogServerIP() { return wrapped.getLogServerIP(); } /** * {@inheritDoc} */ // @Override public int getLogLevel() { return wrapped.getLogLevel(); } /** * {@inheritDoc} */ // @Override public String getLogFileName() { return wrapped.getLogFileName(); } /** * {@inheritDoc} */ // @Override public int getMaxLogFileSize() { return wrapped.getMaxLogFileSize(); } /** * {@inheritDoc} */ // @Override public File getWorkingDirectory() { return wrapped.getWorkingDirectory(); } /** * {@inheritDoc} */ // @Override public List<String> getAdditionalAllowedCipherModes() { return wrapped.getAdditionalAllowedCipherModes(); } /** * {@inheritDoc} */ // @Override public List<String> getCombinedCipherModes() { return wrapped.getCombinedCipherModes(); } /** * {@inheritDoc} */ public String getPreferredJCEProvider() { return wrapped.getPreferredJCEProvider(); } /** * {@inheritDoc} */ // @Override public boolean getDisableIntrusionDetection() { return wrapped.getDisableIntrusionDetection(); } }
package org.owasp.esapi; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.List; import java.util.regex.Pattern; /** * Simple wrapper implementation of {@link SecurityConfiguration}. * This allows for easy subclassing and property fixups for unit tests. * * Note that there are some compilers have issues with Override * attributes on methods implementing a interface method with some * compilers. Technically Override on such methods is a 1.6 feature so * they are commented out here. */ public class SecurityConfigurationWrapper implements SecurityConfiguration { private SecurityConfiguration wrapped; /** * Constructor wrapping the given configuration. * @param wrapped The configuration to wrap. */ public SecurityConfigurationWrapper(SecurityConfiguration wrapped) { this.wrapped = wrapped; } /** * Access the wrapped configuration. * @return The wrapped configuration. */ public SecurityConfiguration getWrappedSecurityConfiguration() { return wrapped; } /** * {@inheritDoc} */ // @Override public String getApplicationName() { return wrapped.getApplicationName(); } /** * {@inheritDoc} */ // @Override public String getLogImplementation() { return wrapped.getLogImplementation(); } /** * {@inheritDoc} */ // @Override public String getAuthenticationImplementation() { return wrapped.getAuthenticationImplementation(); } /** * {@inheritDoc} */ // @Override public String getEncoderImplementation() { return wrapped.getEncoderImplementation(); } /** * {@inheritDoc} */ // @Override public String getAccessControlImplementation() { return wrapped.getAccessControlImplementation(); } /** * {@inheritDoc} */ // @Override public String getIntrusionDetectionImplementation() { return wrapped.getIntrusionDetectionImplementation(); } /** * {@inheritDoc} */ // @Override public String getRandomizerImplementation() { return wrapped.getRandomizerImplementation(); } /** * {@inheritDoc} */ // @Override public String getEncryptionImplementation() { return wrapped.getEncryptionImplementation(); } /** * {@inheritDoc} */ // @Override public String getValidationImplementation() { return wrapped.getValidationImplementation(); } /** * {@inheritDoc} */ // @Override public Pattern getValidationPattern( String typeName ) { return wrapped.getValidationPattern(typeName); } /** * {@inheritDoc} */ // @Override public String getExecutorImplementation() { return wrapped.getExecutorImplementation(); } /** * {@inheritDoc} */ // @Override public String getHTTPUtilitiesImplementation() { return wrapped.getHTTPUtilitiesImplementation(); } /** * {@inheritDoc} */ // @Override public byte[] getMasterKey() { return wrapped.getMasterKey(); } /** * {@inheritDoc} */ // @Override public File getUploadDirectory() { return wrapped.getUploadDirectory(); } /** * {@inheritDoc} */ // @Override public File getUploadTempDirectory() { return wrapped.getUploadTempDirectory(); } /** * {@inheritDoc} */ // @Override public int getEncryptionKeyLength() { return wrapped.getEncryptionKeyLength(); } /** * {@inheritDoc} */ // @Override public byte[] getMasterSalt() { return wrapped.getMasterSalt(); } /** * {@inheritDoc} */ // @Override public List getAllowedExecutables() { return wrapped.getAllowedExecutables(); } /** * {@inheritDoc} */ // @Override public List getAllowedFileExtensions() { return wrapped.getAllowedFileExtensions(); } /** * {@inheritDoc} */ // @Override public int getAllowedFileUploadSize() { return wrapped.getAllowedFileUploadSize(); } /** * {@inheritDoc} */ // @Override public String getPasswordParameterName() { return wrapped.getPasswordParameterName(); } /** * {@inheritDoc} */ // @Override public String getUsernameParameterName() { return wrapped.getUsernameParameterName(); } /** * {@inheritDoc} */ // @Override public String getEncryptionAlgorithm() { return wrapped.getEncryptionAlgorithm(); } /** * {@inheritDoc} */ // @Override public String getCipherTransformation() { return wrapped.getCipherTransformation(); } /** * {@inheritDoc} */ // @Override public String setCipherTransformation(String cipherXform) { return wrapped.setCipherTransformation(cipherXform); } /** * {@inheritDoc} */ // @Override public boolean useMACforCipherText() { return wrapped.useMACforCipherText(); } /** * {@inheritDoc} */ // @Override public boolean overwritePlainText() { return wrapped.overwritePlainText(); } /** * {@inheritDoc} */ // @Override public String getIVType() { return wrapped.getIVType(); } /** * {@inheritDoc} */ // @Override public String getFixedIV() { return wrapped.getFixedIV(); } /** * {@inheritDoc} */ // @Override public String getHashAlgorithm() { return wrapped.getHashAlgorithm(); } /** * {@inheritDoc} */ // @Override public int getHashIterations() { return wrapped.getHashIterations(); } /** * {@inheritDoc} */ // @Override public String getCharacterEncoding() { return wrapped.getCharacterEncoding(); } /** * {@inheritDoc} */ // @Override public boolean getAllowMultipleEncoding() { return wrapped.getAllowMultipleEncoding(); } /** * {@inheritDoc} */ // @Override public boolean getAllowMixedEncoding() { return wrapped.getAllowMixedEncoding(); } /** * {@inheritDoc} */ // @Override public List getDefaultCanonicalizationCodecs() { return wrapped.getDefaultCanonicalizationCodecs(); } /** * {@inheritDoc} */ // @Override public String getDigitalSignatureAlgorithm() { return wrapped.getDigitalSignatureAlgorithm(); } /** * {@inheritDoc} */ // @Override public int getDigitalSignatureKeyLength() { return wrapped.getDigitalSignatureKeyLength(); } /** * {@inheritDoc} */ // @Override public String getRandomAlgorithm() { return wrapped.getRandomAlgorithm(); } /** * {@inheritDoc} */ // @Override public int getAllowedLoginAttempts() { return wrapped.getAllowedLoginAttempts(); } /** * {@inheritDoc} */ // @Override public int getMaxOldPasswordHashes() { return wrapped.getMaxOldPasswordHashes(); } /** * {@inheritDoc} */ // @Override public Threshold getQuota(String eventName) { return wrapped.getQuota(eventName); } /** * {@inheritDoc} */ // @Override public File getResourceFile( String filename ) { return wrapped.getResourceFile(filename); } /** * {@inheritDoc} */ // @Override public boolean getForceHttpOnlySession() { return wrapped.getForceHttpOnlySession(); } /** * {@inheritDoc} */ // @Override public boolean getForceSecureSession() { return wrapped.getForceSecureSession(); } /** * {@inheritDoc} */ // @Override public boolean getForceHttpOnlyCookies() { return wrapped.getForceHttpOnlyCookies(); } /** * {@inheritDoc} */ // @Override public boolean getForceSecureCookies() { return wrapped.getForceSecureCookies(); } /** * {@inheritDoc} */ // @Override public int getMaxHttpHeaderSize() { return wrapped.getMaxHttpHeaderSize(); } /** * {@inheritDoc} */ // @Override public InputStream getResourceStream( String filename ) throws IOException { return wrapped.getResourceStream(filename); } /** * {@inheritDoc} */ // @Override public void setResourceDirectory(String dir) { wrapped.setResourceDirectory(dir); } /** * {@inheritDoc} */ // @Override public String getResponseContentType() { return wrapped.getResponseContentType(); } /** * {@inheritDoc} */ // @Override public String getHttpSessionIdName() { return wrapped.getHttpSessionIdName(); } /** * {@inheritDoc} */ // @Override public long getRememberTokenDuration() { return wrapped.getRememberTokenDuration(); } /** * {@inheritDoc} */ // @Override public int getSessionIdleTimeoutLength() { return wrapped.getSessionIdleTimeoutLength(); } /** * {@inheritDoc} */ // @Override public int getSessionAbsoluteTimeoutLength() { return wrapped.getSessionAbsoluteTimeoutLength(); } /** * {@inheritDoc} */ // @Override public boolean getLogEncodingRequired() { return wrapped.getLogEncodingRequired(); } /** * {@inheritDoc} */ // @Override public boolean getLogApplicationName() { return wrapped.getLogApplicationName(); } /** * {@inheritDoc} */ // @Override public boolean getLogServerIP() { return wrapped.getLogServerIP(); } /** * {@inheritDoc} */ // @Override public int getLogLevel() { return wrapped.getLogLevel(); } /** * {@inheritDoc} */ // @Override public String getLogFileName() { return wrapped.getLogFileName(); } /** * {@inheritDoc} */ // @Override public int getMaxLogFileSize() { return wrapped.getMaxLogFileSize(); } /** * {@inheritDoc} */ // @Override public File getWorkingDirectory() { return wrapped.getWorkingDirectory(); } /** * {@inheritDoc} */ // @Override public List<String> getAdditionalAllowedCipherModes() { return wrapped.getAdditionalAllowedCipherModes(); } /** * {@inheritDoc} */ // @Override public List<String> getCombinedCipherModes() { return wrapped.getCombinedCipherModes(); } /** * {@inheritDoc} */ public String getPreferredJCEProvider() { return wrapped.getPreferredJCEProvider(); } /** * {@inheritDoc} */ // @Override public boolean getDisableIntrusionDetection() { return wrapped.getDisableIntrusionDetection(); } /** * {@inheritDoc} */ // @Override public String getKDFPseudoRandomFunction() { return wrapped.getKDFPseudoRandomFunction(); } /** * {@inheritDoc} */ public boolean getLenientDatesAccepted() { return wrapped.getLenientDatesAccepted(); } }
package org.apache.commons.lang; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import junit.textui.TestRunner; public class IllegalClassExceptionTest extends TestCase { public static void main(String[] args) { TestRunner.run(suite()); } public static Test suite() { return new TestSuite(IllegalClassExceptionTest.class); } public IllegalClassExceptionTest(String testName) { super(testName); } // testConstructor_classArgs public void testConstructor_classArgs_allNullInput() { new IllegalClassException(null, null); } public void testConstructor_classArgs_nullExpected() { new IllegalClassException(null, String.class); } public void testConstructor_classArgs_nullActual() { new IllegalClassException(String.class, null); } // testConstructor_stringArg public void testConstructor_stringArg_nullInput() { new IllegalClassException(null); } // testGetMessage public void testGetMessage_classArgs_nullInput() { final Throwable t = new IllegalClassException(null, null); assertEquals("Expected: null, actual: null", t.getMessage()); } public void testGetMessage_classArgs_normalInput() { final Throwable t = new IllegalClassException(String.class, Integer.class); assertEquals( "Expected: java.lang.String, actual: java.lang.Integer", t.getMessage()); } public void testGetMessage_stringArg_nullInput() { final Throwable t = new IllegalClassException(null); assertEquals(null, t.getMessage()); } public void testGetMessage_stringArg_validInput() { final String message = "message"; final Throwable t = new IllegalClassException(message); assertEquals(message, t.getMessage()); } }
package de.uni_potsdam.hpi.bpt.bp2014.jeditor.plugins.pcm; import com.inubit.research.gui.Workbench; import com.inubit.research.gui.plugins.WorkbenchPlugin; import net.frapu.code.visualization.ProcessModel; import net.frapu.code.visualization.SwingUtils; import de.uni_potsdam.hpi.bpt.bp2014.jeditor.visualization.pcm.PCMScenario; import javax.swing.*; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; /** * This class is a workbench Plugin which allows us to create a variant of an existing model. * The model has to be a pcm scenario. * This plugin is not used any longer because it is easier to just save the model as a * new file/ model on the server. */ public class CreateVariantOfModel extends WorkbenchPlugin { private final Workbench wb; /** * Creates a new instance of the plugin. * @param workbench */ public CreateVariantOfModel(Workbench workbench) { super(workbench); this.wb = workbench; } /** * Creates and returns the menu item triggering this plugin. * @return The newly created menuItem. */ @Override public Component getMenuEntry() { JMenuItem menuItem = new JMenuItem("Create Variant"); menuItem.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { System.err.println("Workbench" + wb); ProcessModel model = wb.getSelectedModel(); if (!(model instanceof PCMScenario)) { System.err.println("Model has to be a PCM Scenario"); return; } JDialog chooseProcess = new CopyProcessDialog((PCMScenario)model); chooseProcess.setSize(chooseProcess.getPreferredSize()); SwingUtils.center(chooseProcess); chooseProcess.setVisible(true); } }); return menuItem; } /** * ChooseMenu is a Dialog for Choosing a PCM Fragment to Copy */ private class CopyProcessDialog extends JDialog { private JComboBox<ProcessModel> chooseProcessBox; private PCMScenario model; public CopyProcessDialog(PCMScenario model) { super(); this.model = model; setTitle("Choose Task"); setLayout(new GridLayout(2, 2)); initializeComponents(); setPreferredSize(new Dimension(500, 100)); } /** * Initializes and adds all the necessary Components */ private void initializeComponents() { add(new JLabel("Choose Process:")); chooseProcessBox = chooseProcessBox(); add(chooseProcessBox); add(acceptButton()); add(cancelButton()); } /** * Creates a JComboBox containing with all PCM Fragments of the workspace which have Tasks as an Option * * @return the JComboBox */ private JComboBox<ProcessModel> chooseProcessBox() { JComboBox<ProcessModel> processComboBox = new JComboBox<ProcessModel>(); System.out.println("model: " + model); if (null == model.getModelList()) { return processComboBox; } for (ProcessModel m : model.getModelList()) { processComboBox.addItem(m); } return processComboBox; } /** * Creates a Button to accept the selection. * The Button will have the label "ok". * @return THe newly created Button. */ private JButton acceptButton() { JButton accept = new JButton("Ok"); accept.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { ProcessModel newModel = ((ProcessModel)chooseProcessBox.getSelectedItem()).clone(); newModel.setProcessName(newModel.getProcessName() + "(Clone)"); workbench.processModelOpened(newModel); //workbench.openNewModel(((ProcessModel)chooseProcessBox.getSelectedItem()).clone()); setVisible(false); } }); return accept; } /** * Creates a Button to discard the selection. * The Button will have the create label "cancel". * @return The newly created Button. */ private JButton cancelButton() { JButton cancel = new JButton("Cancel"); cancel.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { setVisible(false); } }); return cancel; } } }
package us.kbase.common.performance.sortjson; import java.awt.Color; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.lang.management.GarbageCollectorMXBean; import java.lang.management.ManagementFactory; import java.net.URL; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Random; import java.util.Set; import org.jfree.chart.ChartFactory; import org.jfree.chart.ChartUtilities; import org.jfree.chart.JFreeChart; import org.jfree.chart.LegendItem; import org.jfree.chart.axis.NumberAxis; import org.jfree.chart.axis.ValueAxis; import org.jfree.chart.plot.PlotOrientation; import org.jfree.chart.plot.XYPlot; import org.jfree.chart.title.LegendTitle; import org.jfree.data.xy.XYSeries; import org.jfree.data.xy.XYSeriesCollection; import us.kbase.common.performance.PerformanceMeasurement; import us.kbase.common.service.Tuple11; import us.kbase.common.service.Tuple9; import us.kbase.common.utils.MD5; import us.kbase.workspace.ListObjectsParams; import us.kbase.workspace.ListWorkspaceInfoParams; import us.kbase.workspace.ObjectData; import us.kbase.workspace.ObjectIdentity; import us.kbase.workspace.WorkspaceClient; import com.fasterxml.jackson.databind.ObjectMapper; public class MeasureSortRunner { final static Path OUTPUT_DIR = Paths.get("."); //set to 0 or less to use pre chosen test objects below final static int NUM_OBJECTS_TO_TEST = 0; //random tester won't use objects below this size final static int MIN_SIZE_B = 1000000; final static boolean CHECK_SORT_CORRECTNESS = true; final static List<ObjectIdentity> TEST_OBJECTS = new ArrayList<ObjectIdentity>(); static { TEST_OBJECTS.add(new ObjectIdentity().withRef("637/35")); TEST_OBJECTS.add(new ObjectIdentity().withRef("637/308")); TEST_OBJECTS.add(new ObjectIdentity().withRef("970/1")); TEST_OBJECTS.add(new ObjectIdentity().withRef("970/2")); TEST_OBJECTS.add(new ObjectIdentity().withRef("970/3")); TEST_OBJECTS.add(new ObjectIdentity().withRef("1267/1")); TEST_OBJECTS.add(new ObjectIdentity().withRef("1267/2")); TEST_OBJECTS.add(new ObjectIdentity().withRef("1267/3")); TEST_OBJECTS.add(new ObjectIdentity().withRef("1267/4")); TEST_OBJECTS.add(new ObjectIdentity().withRef("1267/5")); TEST_OBJECTS.add(new ObjectIdentity().withRef("1267/6")); TEST_OBJECTS.add(new ObjectIdentity().withRef("1267/7")); TEST_OBJECTS.add(new ObjectIdentity().withRef("1267/8")); // TEST_OBJECTS.add(new ObjectIdentity().withRef("1200/MinimalMedia")); } final static String WORKSPACE_URL = "http://kbase.us/services/ws"; final static String JACKSON = "Jackson"; final static List<String> SORTERS = new ArrayList<String>(); static { SORTERS.add("Jackson"); SORTERS.add("SortedJsonBytes"); SORTERS.add("SortedJsonFile-bytes"); SORTERS.add("SortedJsonFile-file"); } final static List<String> JARS = new ArrayList<String>(); static { JARS.add("../jars/lib/jars/jackson/jackson-annotations-2.2.3.jar"); JARS.add("../jars/lib/jars/jackson/jackson-core-2.2.3.jar"); JARS.add("../jars/lib/jars/jackson/jackson-databind-2.2.3.jar"); } final static String CODE_ROOT = "src"; final static String CLASSPATH; static { String classpath = CODE_ROOT; for (String j: JARS) { classpath += ":" + j; } CLASSPATH = classpath; } final static String MEAS_CLASS_FILE = "us.kbase.common.performance.sortjson.MeasureSortJsonMem"; final static String MEAS_JAVA_FILE = CODE_ROOT + "/" + MEAS_CLASS_FILE.replace(".", "/"); private static final int NUM_SORTS_POS = 0; private static final int INTERVAL_POS = 1; private static final Map<Integer, List<Integer>> SIZE_CUTOFFS = new LinkedHashMap<Integer, List<Integer>>(); static { SIZE_CUTOFFS.put(100000000, Arrays.asList(10, 5000)); SIZE_CUTOFFS.put(20000000, Arrays.asList(20, 1000)); SIZE_CUTOFFS.put(10000000, Arrays.asList(100, 500)); SIZE_CUTOFFS.put(0, Arrays.asList(500, 100)); } private static WorkspaceClient ws; public static void main(String[] args) throws Exception { System.setProperty("java.awt.headless", "true"); System.out.println("Java version: " + System.getProperty("java.version")); System.out.println("Mem: total: " + Runtime.getRuntime().totalMemory() + " max: " + Runtime.getRuntime().maxMemory()); System.out.println("Input args:"); System.out.println(ManagementFactory.getRuntimeMXBean().getInputArguments()); System.out.println("Garbage collectors:"); for (GarbageCollectorMXBean g: ManagementFactory.getGarbageCollectorMXBeans()) { System.out.println(g.getName() + " - Valid: " + g.isValid()); String[] m = g.getMemoryPoolNames(); for (int i = 0; i < m.length; i++) { System.out.println("\t" + m[i]); } } compileMeasureSort(); ws = new WorkspaceClient(new URL(WORKSPACE_URL)); int numObjs = NUM_OBJECTS_TO_TEST; if (numObjs < 1) { int count = 1; for (ObjectIdentity oi: TEST_OBJECTS) { System.out.println(String.format("Testing object %s of %s", count++, TEST_OBJECTS.size())); measureObjectMemAndSpeed(OUTPUT_DIR, oi); } } else { Random r = new Random(); Set<String> seenObjs = new HashSet<String>(); for (int i = 0; i < NUM_OBJECTS_TO_TEST; i++) { System.out.println(String.format("Testing object %s of %s", i + 1, NUM_OBJECTS_TO_TEST)); ObjectIdentity oi = getRandomObject(seenObjs, r); measureObjectMemAndSpeed(OUTPUT_DIR, oi); } } } private static ObjectIdentity getRandomObject(Set<String> seenObjs, Random rand) throws Exception { ObjectIdentity good = null; while (good == null) { List<Tuple9<Long, String, String, String, Long, String, String, String, Map<String, String>>> workspaces = ws.listWorkspaceInfo(new ListWorkspaceInfoParams()); int wsr = rand.nextInt(workspaces.size()); List<Tuple11<Long, String, String, String, Long, String, Long, String, String, Long, Map<String, String>>> objs = ws.listObjects(new ListObjectsParams() .withIds(Arrays.asList(workspaces.get(wsr).getE1())) .withShowHidden(1L)); if (objs.size() == 0) continue; int objr = rand.nextInt(objs.size()); long work = objs.get(objr).getE7(); long objid = objs.get(objr).getE1(); long size = objs.get(objr).getE10(); String wsobj = work + "_" + objid; if (!seenObjs.contains(wsobj)) { seenObjs.add(wsobj); if (size > MIN_SIZE_B) { good = new ObjectIdentity().withWsid(work).withObjid(objid); } } } return good; } private static void measureObjectMemAndSpeed(Path dir, ObjectIdentity oi) throws Exception { ObjectData data = ws.getObjects(Arrays.asList(oi)).get(0); Tuple11<Long, String, String, String, Long, String, Long, String, String, Long, Map<String, String>> info = data.getInfo(); String ref = info.getE7() + "/" + info.getE1() + "/" + info.getE5(); String name = info.getE8() + "/" + info.getE2() + "/" + info.getE5(); String title = ref + " " + name + " " + info.getE3(); long size = info.getE10(); String mb = String.format("%,.2fMB", size / 1000000.0); String outputPrefix = mb + "_" + ref.replace("/", "_"); Path d = dir.resolve(info.getE3()); Files.createDirectories(d); Path input = d.resolve(ref.replace("/", "_") + ".object.txt"); input.toFile().createNewFile(); input.toFile().deleteOnExit(); new ObjectMapper().writeValue(input.toFile(), data.getData().asInstance()); data = null; System.out.println(String.format("Testing object %s, %sB, %s", ref, info.getE10(), new Date())); if (CHECK_SORT_CORRECTNESS) { boolean good = true; Map<String, MD5> md5s = MeasureSortJsonMem.getMD5s(input); MD5 jMD5 = md5s.get(JACKSON); List<String> output = new ArrayList<String>(); for (Entry<String, MD5> m: md5s.entrySet()) { output.add(m.getKey() + " " + m.getValue().getMD5()); if (!m.getValue().equals(jMD5)) { good = false; } } final Path out; if (good) { out = d.resolve(outputPrefix + ".md5.good.txt"); } else { System.out.println("Sort correctness failed for " + ref); out = d.resolve(outputPrefix + ".md5.bad.txt"); } Files.write(out, output, Charset.forName("UTF-8")); } int numSorts = SIZE_CUTOFFS.get(0).get(NUM_SORTS_POS); int interval = SIZE_CUTOFFS.get(0).get(INTERVAL_POS); for (Entry<Integer, List<Integer>> sz: SIZE_CUTOFFS.entrySet()) { if (size > sz.getKey()) { numSorts = sz.getValue().get(NUM_SORTS_POS); interval = sz.getValue().get(INTERVAL_POS); break; } } System.out.println(String.format("Recording memory usage. Sorts: %s, interval: %s, %s", numSorts, interval, new Date())); measureSorterMemUsage(numSorts, interval, input, title, d, outputPrefix + ".memresults"); System.out.println("Recording sort speed. " + new Date()); measureSorterSpeed(numSorts, input, title, d.resolve(outputPrefix + ".speed.txt")); Files.deleteIfExists(input); System.out.println(); } private static void measureSorterSpeed(int numSorts, Path input, String title, Path output) throws Exception { byte[] b = Files.readAllBytes(input); PerformanceMeasurement js = MeasureSortJsonSpeed.measureJsonSort(b, numSorts); PerformanceMeasurement skjb = MeasureSortJsonSpeed.measureSKJBSort(b, numSorts); PerformanceMeasurement skjfb = MeasureSortJsonSpeed.measureSKJFSort(b, numSorts); b = null; PerformanceMeasurement skjff = MeasureSortJsonSpeed.measureSKJFSort(input.toFile(), numSorts); BufferedWriter bw = Files.newBufferedWriter(output, Charset.forName("UTF-8")); bw.write(title + String.format(" Size (MB): %,.2f", input.toFile().length() / 1000000.0) + "\n"); MeasureSortJsonSpeed.renderResults(Arrays.asList(js, skjb, skjfb, skjff), bw); bw.close(); } private static void measureSorterMemUsage(int numSorts, int interval, Path input, String title, Path dir, String outputPrefix) throws IOException, InterruptedException { Map<String, List<Double>> mems = new LinkedHashMap<String, List<Double>>(); for (String sorter: SORTERS) { System.out.println("Running sorter: " + sorter + " " + new Date()); mems.put(sorter, runMeasureSort(numSorts, interval, input, sorter)); } String params = String.format( "Sorts: %s, Interval (ms): %s, size (MB): %,.2f", numSorts, interval, input.toFile().length() / 1000000.0); saveMemChart(dir.resolve(Paths.get(outputPrefix + ".png")), mems, title, params); saveMemData(dir.resolve(Paths.get(outputPrefix + ".txt")), mems, title, params); } private static void saveMemData(Path file, Map<String, List<Double>> mems, String title, String params) throws IOException { BufferedWriter bw = Files.newBufferedWriter(file, Charset.forName("UTF-8")); bw.write(title + "\n"); bw.write(params + "\n"); for (String s: mems.keySet()) { bw.write(s + "\n"); for (Double m: mems.get(s)) { bw.write(Double.toString(m) + "\n"); } bw.write("\n"); } bw.flush(); bw.close(); } private static JFreeChart saveMemChart(Path f, Map<String, List<Double>> mems, String title, String params) throws IOException { XYSeriesCollection xyc = new XYSeriesCollection(); for (String sorter: SORTERS) { XYSeries s = new XYSeries(sorter, false, false); double count = 1; for (Double mem: mems.get(sorter)) { s.add(++count, mem); } xyc.addSeries(s); } final JFreeChart chart = ChartFactory.createXYLineChart( title + "\n" + params, "Measurement "Used Memory (MB)", xyc, PlotOrientation.VERTICAL, true, // include legend false, // tooltips false // urls ); chart.setBackgroundPaint(Color.white); final LegendTitle legend = chart.getLegend(); for (Object li: legend.getItemContainer().getBlocks()) { ((LegendItem)li).setShapeVisible(true); } final XYPlot plot = chart.getXYPlot(); plot.setBackgroundPaint(Color.white); plot.setDomainGridlinePaint(Color.gray); plot.setRangeGridlinePaint(Color.gray); plot.getRenderer().setSeriesPaint(2, new Color(20, 184, 69)); plot.getRenderer().setSeriesPaint(3, new Color(184, 173, 20)); final ValueAxis domainAxis = plot.getDomainAxis(); domainAxis.setStandardTickUnits(NumberAxis.createIntegerTickUnits()); ChartUtilities.saveChartAsPNG(f.toFile(), chart, 700, 500); return chart; } private static List<Double> runMeasureSort(int numSorts, int interval, Path file, String sorter) throws IOException, InterruptedException { Process p = new ProcessBuilder(new String [] { "java", "-cp", CLASSPATH, MEAS_CLASS_FILE, Integer.toString(numSorts), Integer.toString(interval), file.toString(), sorter }).start(); List<Double> mem = new ArrayList<Double>(); BufferedReader br = new BufferedReader(new InputStreamReader(p.getInputStream())); finishProcess(p, "Run failed"); //dangerous, could deadlock here - may need to change while (true) { String l = br.readLine(); if (l == null) break; mem.add(Double.parseDouble(l)); } br.close(); BufferedReader err = new BufferedReader(new InputStreamReader(p.getErrorStream())); String l = err.readLine(); if (l != null) { System.out.println("Sort measurer STDERR:"); } while (l != null) { System.out.println(l); l = err.readLine(); } err.close(); p.destroy(); return mem; } private static void compileMeasureSort() throws IOException, InterruptedException { Process p = new ProcessBuilder(new String[] { "javac", "-cp", CLASSPATH, MEAS_JAVA_FILE + ".java"}).start(); finishProcess(p, "Compile failed"); //dangerous, could deadlock here - may need to change p.destroy(); } private static void finishProcess(Process p, String err) throws InterruptedException, IOException { p.waitFor(); if (p.exitValue() != 0) { System.out.println(err + ". Exit value " + p.exitValue()); System.out.println("STDOUT:"); print(p.getInputStream()); System.out.println("STDERR:"); print(p.getErrorStream()); System.exit(1); } } private static void print(InputStream is) throws IOException { BufferedReader br = new BufferedReader(new InputStreamReader(is)); while (true) { String l = br.readLine(); if (l == null) break; System.out.println(l); } br.close(); } }
package com.breadwallet; import android.annotation.TargetApi; import android.app.Activity; import android.app.Application; import android.arch.lifecycle.ProcessLifecycleOwner; import android.content.Context; import android.content.IntentFilter; import android.graphics.Point; import android.hardware.fingerprint.FingerprintManager; import android.net.ConnectivityManager; import android.os.Build; import android.util.Log; import android.view.Display; import android.view.WindowManager; import com.breadwallet.presenter.activities.util.ApplicationLifecycleObserver; import com.breadwallet.presenter.activities.util.BRActivity; import com.breadwallet.tools.crypto.Base32; import com.breadwallet.tools.crypto.CryptoHelper; import com.breadwallet.tools.listeners.SyncReceiver; import com.breadwallet.tools.manager.BRApiManager; import com.breadwallet.tools.manager.BRReportsManager; import com.breadwallet.tools.manager.BRSharedPrefs; import com.breadwallet.tools.manager.InternetManager; import com.breadwallet.tools.util.BRConstants; import com.breadwallet.tools.util.Utils; import com.crashlytics.android.Crashlytics; import com.platform.APIClient; import java.io.UnsupportedEncodingException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.atomic.AtomicInteger; import java.util.regex.Matcher; import java.util.regex.Pattern; import io.fabric.sdk.android.Fabric; public class BreadApp extends Application { private static final String TAG = BreadApp.class.getName(); public static int DISPLAY_HEIGHT_PX; public static int DISPLAY_WIDTH_PX; private FingerprintManager mFingerprintManager; // host is the server(s) on which the API is hosted public static String HOST = "api.breadwallet.com"; private static List<OnAppBackgrounded> listeners; private static Timer isBackgroundChecker; public static AtomicInteger activityCounter = new AtomicInteger(); public static long backgroundedTime; private static Context mContext; private ApplicationLifecycleObserver mObserver; private static final String PACKAGE_NAME = BreadApp.getBreadContext() == null ? null : BreadApp.getBreadContext().getApplicationContext().getPackageName(); static { try { System.loadLibrary(BRConstants.NATIVE_LIB_NAME); } catch (UnsatisfiedLinkError e) { e.printStackTrace(); Log.d(TAG, "Native code library failed to load.\\n\" + " + e); Log.d(TAG, "Installer Package Name -> " + (PACKAGE_NAME == null ? "null" : BreadApp.getBreadContext().getPackageManager().getInstallerPackageName(PACKAGE_NAME))); } } public static final boolean IS_ALPHA = false; public static final Map<String, String> mHeaders = new HashMap<>(); private static Activity currentActivity; @Override public void onCreate() { super.onCreate(); if (BuildConfig.DEBUG) { HOST = "stage2.breadwallet.com"; } final Fabric fabric = new Fabric.Builder(this) .kits(new Crashlytics.Builder().disabled(BuildConfig.DEBUG).build()) .debuggable(BuildConfig.DEBUG)// Enables Crashlytics debugger .build(); Fabric.with(fabric); // StrictMode.setThreadPolicy(new StrictMode.ThreadPolicy.Builder() // .detectDiskReads() // .detectDiskWrites() // .detectNetwork() // or .detectAll() for all detectable problems // .penaltyLog() // .build()); // StrictMode.setVmPolicy(new StrictMode.VmPolicy.Builder() // .detectLeakedSqlLiteObjects() // .detectLeakedClosableObjects() // .penaltyLog() // .penaltyDeath() // .build()); mContext = this; if (!Utils.isEmulatorOrDebug(this) && IS_ALPHA) throw new RuntimeException("can't be alpha for release"); boolean isTestVersion = APIClient.getInstance(this).isStaging(); boolean isTestNet = BuildConfig.BITCOIN_TESTNET; String lang = getCurrentLocale(this); mHeaders.put(BRApiManager.HEADER_IS_INTERNAL, IS_ALPHA ? "true" : "false"); mHeaders.put(BRApiManager.HEADER_TESTFLIGHT, isTestVersion ? "true" : "false"); mHeaders.put(BRApiManager.HEADER_TESTNET, isTestNet ? "true" : "false"); mHeaders.put(BRApiManager.HEADER_ACCEPT_LANGUAGE, lang); WindowManager wm = (WindowManager) getSystemService(Context.WINDOW_SERVICE); Display display = wm.getDefaultDisplay(); Point size = new Point(); display.getSize(size); DISPLAY_WIDTH_PX = size.x; DISPLAY_HEIGHT_PX = size.y; mFingerprintManager = (FingerprintManager) getSystemService(Context.FINGERPRINT_SERVICE); registerReceiver(InternetManager.getInstance(), new IntentFilter(ConnectivityManager.CONNECTIVITY_ACTION)); mObserver = new ApplicationLifecycleObserver(); ProcessLifecycleOwner.get().getLifecycle().addObserver(mObserver); } public static void generateWalletIfIfNeeded(Context app, String address) { if (BRSharedPrefs.getWalletRewardId(app) == null) { String rewardId = generateWalletId(app, address); if (!Utils.isNullOrEmpty(rewardId)) { BRSharedPrefs.putWalletRewardId(app, rewardId); } else BRReportsManager.reportBug(new NullPointerException("rewardId is empty")); } } private static synchronized String generateWalletId(Context app, String address) { if (app == null) { Log.e(TAG, "generateWalletId: app is null"); return null; } try { // Remove the first 2 characters String cleanAddress = address.substring(2, address.length()); // Get the shortened address bytes byte[] addressBytes = cleanAddress.getBytes("UTF-8"); // Run sha256 on the shortened address bytes byte[] sha256Address = CryptoHelper.sha256(addressBytes); if (Utils.isNullOrEmpty(sha256Address)) { BRReportsManager.reportBug(new IllegalAccessException("Failed to sha256")); return null; } // Get the first 10 bytes byte[] firstTenBytes = Arrays.copyOfRange(sha256Address, 0, 10); String base32String = new String(Base32.encode(firstTenBytes)); base32String = base32String.toLowerCase(); StringBuilder builder = new StringBuilder(); Matcher matcher = Pattern.compile(".{1,4}").matcher(base32String); while (matcher.find()) { String piece = base32String.substring(matcher.start(), matcher.end()); builder.append(piece + " "); } return builder.toString(); } catch (UnsupportedEncodingException e) { e.printStackTrace(); } return null; } @TargetApi(Build.VERSION_CODES.N) public String getCurrentLocale(Context ctx) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) { return ctx.getResources().getConfiguration().getLocales().get(0).getLanguage(); } else { //noinspection deprecation return ctx.getResources().getConfiguration().locale.getLanguage(); } } public static Map<String, String> getBreadHeaders() { return mHeaders; } public static Context getBreadContext() { Context app = currentActivity; if (app == null) app = SyncReceiver.app; if (app == null) app = mContext; return app; } public static void setBreadContext(Activity app) { BreadApp.activityCounter.incrementAndGet(); currentActivity = app; } public static synchronized void fireListeners() { if (listeners == null) return; List<OnAppBackgrounded> copy = new ArrayList<>(listeners); for (OnAppBackgrounded lis : copy) if (lis != null) lis.onBackgrounded(); } public static void addOnBackgroundedListener(OnAppBackgrounded listener) { if (listeners == null) listeners = new ArrayList<>(); if (listener != null && !listeners.contains(listener)) listeners.add(listener); } public static boolean isAppInBackground(final Context context) { return context == null || activityCounter.get() <= 0; } //call onStop on every activity so public static void onStop(final BRActivity app) { if (isBackgroundChecker != null) isBackgroundChecker.cancel(); isBackgroundChecker = new Timer(); TimerTask backgroundCheck = new TimerTask() { @Override public void run() { if (isAppInBackground(app)) { backgroundedTime = System.currentTimeMillis(); Log.e(TAG, "App went in background!"); // APP in background, do something fireListeners(); isBackgroundChecker.cancel(); } // APP in foreground, do something else } }; isBackgroundChecker.schedule(backgroundCheck, 500, 500); } public interface OnAppBackgrounded { void onBackgrounded(); } }
package org.helioviewer.jhv.renderable.components; import java.awt.Component; import java.nio.FloatBuffer; import java.nio.IntBuffer; import java.util.ArrayList; import org.helioviewer.base.Pair; import org.helioviewer.base.math.GL3DMat4d; import org.helioviewer.base.math.GL3DVec3d; import org.helioviewer.jhv.camera.GL3DCamera; import org.helioviewer.jhv.gui.ImageViewerGui; import org.helioviewer.jhv.gui.filters.FiltersPanel; import org.helioviewer.jhv.layers.Layers; import org.helioviewer.jhv.opengl.GLImage; import org.helioviewer.jhv.opengl.GLInfo; import org.helioviewer.jhv.opengl.GLSLShader; import org.helioviewer.jhv.renderable.gui.Renderable; import org.helioviewer.jhv.renderable.gui.RenderableType; import org.helioviewer.jhv.renderable.viewport.GL3DViewport; import org.helioviewer.viewmodel.imagedata.ImageData; import org.helioviewer.viewmodel.view.View; import org.helioviewer.viewmodel.view.jp2view.JHVJP2View; import com.jogamp.common.nio.Buffers; import com.jogamp.opengl.GL2; public class RenderableImageLayer implements Renderable { private static boolean showCorona = true; private static int nextLayerId = 0; private final int layerId; public int getLayerId() { return layerId; } private int positionBufferID; private int indexBufferID; private int indexBufferSize; private int positionBufferSize; private final View layerView; private final RenderableType type; private boolean isVisible = true; private final GLImage glImage; public RenderableImageLayer(View view) { type = new RenderableImageType(view.getName()); layerId = nextLayerId++; layerView = view; glImage = new GLImage(view.getDefaultLUT()); ImageViewerGui.getRenderableContainer().addBeforeRenderable(this); float opacity = (float) (1. / (1. + Layers.getNumLayers())); if (layerView instanceof JHVJP2View) { JHVJP2View jp2v = (JHVJP2View) layerView; if (jp2v.getName().contains("LASCO") || jp2v.getName().contains("COR")) { opacity = 1.f; } } glImage.setOpacity(opacity); } @Override public void init(GL2 gl) { glImage.init(gl); Pair<FloatBuffer, IntBuffer> bufferPair = makeIcosphere(2); FloatBuffer positionBuffer = bufferPair.a; IntBuffer indexBuffer = bufferPair.b; positionBufferSize = positionBuffer.capacity(); positionBufferID = generate(gl); gl.glBindBuffer(GL2.GL_ARRAY_BUFFER, positionBufferID); gl.glBufferData(GL2.GL_ARRAY_BUFFER, positionBufferSize * Buffers.SIZEOF_FLOAT, positionBuffer, GL2.GL_STATIC_DRAW); gl.glBindBuffer(GL2.GL_ARRAY_BUFFER, 0); indexBufferID = generate(gl); indexBufferSize = indexBuffer.capacity(); gl.glBindBuffer(GL2.GL_ELEMENT_ARRAY_BUFFER, indexBufferID); gl.glBufferData(GL2.GL_ELEMENT_ARRAY_BUFFER, indexBuffer.capacity() * Buffers.SIZEOF_INT, indexBuffer, GL2.GL_STATIC_DRAW); gl.glBindBuffer(GL2.GL_ELEMENT_ARRAY_BUFFER, 0); } @Override public void render(GL2 gl, GL3DViewport vp) { _render(gl, vp, new double[] { 1., 1., 0., 1. }); } @Override public void renderMiniview(GL2 gl, GL3DViewport vp) { _render(gl, vp, new double[] { 0., 0., 0., 0. }); } private void _render(GL2 gl, GL3DViewport vp, double[] depthrange) { if (!isVisible) return; GLSLShader.bind(gl); { gl.glEnable(GL2.GL_CULL_FACE); gl.glCullFace(GL2.GL_BACK); glImage.applyFilters(gl, imageData, prevImageData, baseImageData); GLSLShader.setViewport(GLInfo.pixelScale[0] * vp.getWidth(), GLInfo.pixelScale[1] * vp.getHeight(), vp.getOffsetX(), vp.getOffsetY()); if (!RenderableImageLayer.showCorona) { GLSLShader.setOuterCutOffRadius(1.); } GLSLShader.filter(gl); GL3DCamera camera = vp.getCamera(); GL3DMat4d vpmi = camera.getOrthoMatrixInverse(); vpmi.translate(new GL3DVec3d(-camera.getTranslation().x, -camera.getTranslation().y, 0.)); GLSLShader.bindMatrix(gl, vpmi.getFloatArray()); GLSLShader.bindCameraDifferenceRotationQuat(gl, camera.getCameraDifferenceRotationQuatd(imageData.getMetaData().getRotationObs())); if (glImage.getBaseDifferenceMode()) { GLSLShader.bindDiffCameraDifferenceRotationQuat(gl, camera.getCameraDifferenceRotationQuatd(baseImageData.getMetaData().getRotationObs())); } else if (glImage.getDifferenceMode()) { GLSLShader.bindDiffCameraDifferenceRotationQuat(gl, camera.getCameraDifferenceRotationQuatd(prevImageData.getMetaData().getRotationObs())); } enablePositionVBO(gl); enableIndexVBO(gl); { gl.glVertexPointer(3, GL2.GL_FLOAT, 3 * Buffers.SIZEOF_FLOAT, 0); GLSLShader.bindIsDisc(gl, 0); gl.glDepthRange(depthrange[0], depthrange[1]); gl.glDrawElements(GL2.GL_TRIANGLES, 6, GL2.GL_UNSIGNED_INT, (indexBufferSize - 6) * Buffers.SIZEOF_INT); gl.glDepthRange(depthrange[1], depthrange[2]); GLSLShader.bindIsDisc(gl, 1); gl.glDrawElements(GL2.GL_TRIANGLES, indexBufferSize - 6, GL2.GL_UNSIGNED_INT, 0); gl.glDepthRange(0.f, 1.f); } disableIndexVBO(gl); disablePositionVBO(gl); gl.glColorMask(true, true, true, true); gl.glDisable(GL2.GL_CULL_FACE); } GLSLShader.unbind(gl); } private int generate(GL2 gl) { int[] tmpId = new int[1]; gl.glGenBuffers(1, tmpId, 0); return tmpId[0]; } private void enableIndexVBO(GL2 gl) { gl.glBindBuffer(GL2.GL_ELEMENT_ARRAY_BUFFER, indexBufferID); } private void disableIndexVBO(GL2 gl) { gl.glBindBuffer(GL2.GL_ELEMENT_ARRAY_BUFFER, 0); } private void enablePositionVBO(GL2 gl) { gl.glEnableClientState(GL2.GL_VERTEX_ARRAY); gl.glBindBuffer(GL2.GL_ARRAY_BUFFER, positionBufferID); } private void disablePositionVBO(GL2 gl) { gl.glDisableClientState(GL2.GL_VERTEX_ARRAY); gl.glBindBuffer(GL2.GL_ARRAY_BUFFER, 0); } private void deletePositionVBO(GL2 gl) { gl.glDeleteBuffers(1, new int[] { positionBufferID }, 0); } private void deleteIndexVBO(GL2 gl) { gl.glDeleteBuffers(1, new int[] { indexBufferID }, 0); } @Override public void remove(GL2 gl) { Layers.removeLayer(layerView); imageData = prevImageData = baseImageData = null; dispose(gl); } private static Pair<FloatBuffer, IntBuffer> makeIcosphere(int level) { float t = (float) ((Math.sqrt(5) - 1) / 2); float[][] icosahedronVertexList = new float[][] { new float[] { -1, -t, 0 }, new float[] { 0, 1, t }, new float[] { 0, 1, -t }, new float[] { 1, t, 0 }, new float[] { 1, -t, 0 }, new float[] { 0, -1, -t }, new float[] { 0, -1, t }, new float[] { t, 0, 1 }, new float[] { -t, 0, 1 }, new float[] { t, 0, -1 }, new float[] { -t, 0, -1 }, new float[] { -1, t, 0 }, }; for (float[] v : icosahedronVertexList) { float length = (float) Math.sqrt(v[0] * v[0] + v[1] * v[1] + v[2] * v[2]); v[0] /= length; v[1] /= length; v[2] /= length; } int[][] icosahedronFaceList = new int[][] { { 3, 7, 1 }, { 4, 7, 3 }, { 6, 7, 4 }, { 8, 7, 6 }, { 7, 8, 1 }, { 9, 4, 3 }, { 2, 9, 3 }, { 2, 3, 1 }, { 11, 2, 1 }, { 10, 2, 11 }, { 10, 9, 2 }, { 9, 5, 4 }, { 6, 4, 5 }, { 0, 6, 5 }, { 0, 11, 8 }, { 11, 1, 8 }, { 10, 0, 5 }, { 10, 5, 9 }, { 0, 8, 6 }, { 0, 10, 11 }, }; ArrayList<Float> vertices = new ArrayList<Float>(); ArrayList<Integer> faceIndices = new ArrayList<Integer>(); for (float[] v : icosahedronVertexList) { vertices.add(v[0]); vertices.add(v[2]); vertices.add(v[1]); } for (int[] f : icosahedronFaceList) { subdivide(f[0], f[1], f[2], vertices, faceIndices, level); } int beginPositionNumberCorona = vertices.size() / 3; float r = 40.f; vertices.add(-r); vertices.add(r); vertices.add(0f); vertices.add(r); vertices.add(r); vertices.add(0f); vertices.add(r); vertices.add(-r); vertices.add(0f); vertices.add(-r); vertices.add(-r); vertices.add(0f); faceIndices.add(beginPositionNumberCorona + 0); faceIndices.add(beginPositionNumberCorona + 2); faceIndices.add(beginPositionNumberCorona + 1); faceIndices.add(beginPositionNumberCorona + 2); faceIndices.add(beginPositionNumberCorona + 0); faceIndices.add(beginPositionNumberCorona + 3); FloatBuffer positionBuffer = FloatBuffer.allocate(vertices.size()); for (Float vert : vertices) { if (vert == 0f) vert = Math.nextAfter(vert, vert + 1.0f); positionBuffer.put(vert); } positionBuffer.flip(); IntBuffer indexBuffer = IntBuffer.allocate(faceIndices.size()); for (int i : faceIndices) { indexBuffer.put(i); } indexBuffer.flip(); return new Pair<FloatBuffer, IntBuffer>(positionBuffer, indexBuffer); } private static void subdivide(int vx, int vy, int vz, ArrayList<Float> vertexList, ArrayList<Integer> faceList, int level) { if (level != 0) { float x1 = (vertexList.get(3 * vx) + vertexList.get(3 * vy)); float y1 = (vertexList.get(3 * vx + 1) + vertexList.get(3 * vy + 1)); float z1 = (vertexList.get(3 * vx + 2) + vertexList.get(3 * vy + 2)); float length = (float) Math.sqrt(x1 * x1 + y1 * y1 + z1 * z1); x1 /= length; y1 /= length; z1 /= length; int firstIndex = vertexList.size() / 3; vertexList.add(x1); vertexList.add(y1); vertexList.add(z1); float x2 = (vertexList.get(3 * vz) + vertexList.get(3 * vy)); float y2 = (vertexList.get(3 * vz + 1) + vertexList.get(3 * vy + 1)); float z2 = (vertexList.get(3 * vz + 2) + vertexList.get(3 * vy + 2)); length = (float) Math.sqrt(x2 * x2 + y2 * y2 + z2 * z2); x2 /= length; y2 /= length; z2 /= length; int secondIndex = vertexList.size() / 3; vertexList.add(x2); vertexList.add(y2); vertexList.add(z2); float x3 = (vertexList.get(3 * vx) + vertexList.get(3 * vz)); float y3 = (vertexList.get(3 * vx + 1) + vertexList.get(3 * vz + 1)); float z3 = (vertexList.get(3 * vx + 2) + vertexList.get(3 * vz + 2)); length = (float) Math.sqrt(x3 * x3 + y3 * y3 + z3 * z3); x3 /= length; y3 /= length; z3 /= length; int thirdIndex = vertexList.size() / 3; vertexList.add(x3); vertexList.add(y3); vertexList.add(z3); subdivide(vx, firstIndex, thirdIndex, vertexList, faceList, level - 1); subdivide(firstIndex, vy, secondIndex, vertexList, faceList, level - 1); subdivide(thirdIndex, secondIndex, vz, vertexList, faceList, level - 1); subdivide(firstIndex, secondIndex, thirdIndex, vertexList, faceList, level - 1); } else { faceList.add(vx); faceList.add(vy); faceList.add(vz); } } public static void toggleCorona() { showCorona = !showCorona; } @Override public RenderableType getType() { return type; } @Override public Component getOptionsPanel() { FiltersPanel fp = ImageViewerGui.getFiltersPanel(); fp.setActiveImage(glImage); return fp; } @Override public boolean isVisible() { return isVisible; } @Override public void setVisible(boolean _isVisible) { isVisible = _isVisible; } @Override public String getName() { return layerView.getName(); } @Override public String getTimeString() { return imageData.getMetaData().getDateObs().getCachedDate(); } public View getMainLayerView() { return layerView; } @Override public boolean isDeletable() { return true; } @Override public boolean isActiveImageLayer() { return Layers.getActiveView() == layerView; } @Override public void dispose(GL2 gl) { disablePositionVBO(gl); disableIndexVBO(gl); deletePositionVBO(gl); deleteIndexVBO(gl); glImage.dispose(gl); } private ImageData imageData; private ImageData prevImageData; private ImageData baseImageData; public void setImageData(ImageData newImageData) { int frame = newImageData.getFrameNumber(); if (frame == 0) { baseImageData = newImageData; } if (imageData == null || (prevImageData != null && prevImageData.getFrameNumber() - frame > 2)) { prevImageData = newImageData; } else if (frame != imageData.getFrameNumber()) { prevImageData = imageData; } imageData = newImageData; } public ImageData getImageData() { return imageData; } }
package org.helioviewer.jhv.viewmodel.view.jp2view.kakadu; import java.io.File; import java.io.IOException; import java.net.URI; import kdu_jni.Jp2_threadsafe_family_src; import kdu_jni.Jpx_source; import kdu_jni.KduException; import kdu_jni.Kdu_cache; import kdu_jni.Kdu_ilayer_ref; import kdu_jni.Kdu_region_compositor; public class KakaduEngine { private Jp2_threadsafe_family_src familySrc; private Jpx_source jpxSrc; private Kdu_region_compositor compositor; public KakaduEngine(Kdu_cache cache, URI uri) throws KduException, IOException { familySrc = new Jp2_threadsafe_family_src(); if (cache == null) { // local File file = new File(uri); familySrc.Open(file.getCanonicalPath(), true); } else { familySrc.Open(cache); } jpxSrc = new Jpx_source(); jpxSrc.Open(familySrc, false); compositor = createCompositor(jpxSrc); } public Jp2_threadsafe_family_src getFamilySrc() { return familySrc; } public Jpx_source getJpxSource() { return jpxSrc; } public Kdu_region_compositor getCompositor() { return compositor; } @Override protected void finalize() throws Throwable { try { destroyCompositor(compositor); /* if (jpxSrc != null) { if (jpxSrc.Exists()) jpxSrc.Close(); jpxSrc.Native_destroy(); } if (familySrc != null) { if (familySrc.Exists()) familySrc.Close(); familySrc.Native_destroy(); } */ } catch (KduException e) { e.printStackTrace(); } finally { super.finalize(); } } private static Kdu_region_compositor createCompositor(Jpx_source jpx) throws KduException { Kdu_region_compositor compositor = new Kdu_region_compositor(); // System.out.println(">>>> compositor create " + compositor + " " + Thread.currentThread().getName()); compositor.Create(jpx, KakaduConstants.CODESTREAM_CACHE_THRESHOLD); compositor.Set_surface_initialization_mode(false); return compositor; } private static void destroyCompositor(Kdu_region_compositor compositor) throws KduException { // System.out.println(">>>> compositor destroy " + compositor + " " + Thread.currentThread().getName()); compositor.Halt_processing(); compositor.Remove_ilayer(new Kdu_ilayer_ref(), true); compositor.Set_thread_env(null, null); compositor.Native_destroy(); } }
package org.commcare.android.view; import android.content.Context; import android.graphics.Bitmap; import android.graphics.drawable.BitmapDrawable; import android.util.Log; import android.view.Display; import android.view.Menu; import android.view.MenuItem; import android.view.WindowManager; import org.commcare.suite.model.graph.DisplayData; import org.javarosa.core.reference.InvalidReferenceException; import org.javarosa.core.reference.ReferenceManager; import org.javarosa.core.services.locale.Localizer; import org.odk.collect.android.utilities.FileUtils; import java.io.File; /** * Utilities for converting CommCare UI diplsay details into Android objects * * @author ctsims * */ public class ViewUtil { // This is silly and isn't really what we want here, but it's a start. // (We'd like to be able to add a displayunit to a menu in a super // easy/straightforward way. public static void addDisplayToMenu(Context context, Menu menu, int menuId, DisplayData display) { MenuItem item = menu.add(0, menuId, menuId, Localizer.clearArguments(display.getName()).trim()); if (display.getImageURI() != null) { Bitmap b = ViewUtil.inflateDisplayImage(context, display.getImageURI()); if (b != null) { item.setIcon(new BitmapDrawable(context.getResources(), b)); } } } //ctsims 5/23/2014 //NOTE: I pretty much extracted the below straight from the TextImageAudioView. It's //not great and doesn't scale resources well. Feel free to split back up. /** * Attempts to inflate an image from a <display> or other CommCare UI definition source. * * @param context * @param jrUri The image to inflate * @return A bitmap if one could be created. Null if there is an error or if the image is unavailable. */ public static Bitmap inflateDisplayImage(Context context, String jrUri) { //TODO: Cache? // Now set up the image view if (jrUri != null && !jrUri.equals("")) { try { //TODO: Fallback for non-local refs? Write to a file first or something... String imageFilename = ReferenceManager._().DeriveReference(jrUri).getLocalURI(); final File imageFile = new File(imageFilename); if (imageFile.exists()) { Bitmap b = null; try { Display display = ((WindowManager)context.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay(); int screenWidth = display.getWidth(); int screenHeight = display.getHeight(); b = FileUtils.getBitmapScaledToDisplay(imageFile, screenHeight, screenWidth); } catch (OutOfMemoryError e) { Log.w("ImageInflater", "File too large to function on local device"); } if (b != null) { return b; } } } catch (InvalidReferenceException e) { Log.e("ImageInflater", "image invalid reference exception for " + e.getReferenceString()); e.printStackTrace(); } } return null; } }
package com.github.jengelman.gradle.plugins.shadow.tasks; import com.github.jengelman.gradle.plugins.shadow.ShadowStats; import com.github.jengelman.gradle.plugins.shadow.internal.*; import com.github.jengelman.gradle.plugins.shadow.relocation.Relocator; import com.github.jengelman.gradle.plugins.shadow.relocation.SimpleRelocator; import com.github.jengelman.gradle.plugins.shadow.transformers.AppendingTransformer; import com.github.jengelman.gradle.plugins.shadow.transformers.GroovyExtensionModuleTransformer; import com.github.jengelman.gradle.plugins.shadow.transformers.ServiceFileTransformer; import com.github.jengelman.gradle.plugins.shadow.transformers.Transformer; import org.gradle.api.Action; import org.gradle.api.artifacts.Configuration; import org.gradle.api.file.FileCollection; import org.gradle.api.internal.DocumentationRegistry; import org.gradle.api.internal.file.FileResolver; import org.gradle.api.internal.file.copy.CopyAction; import org.gradle.api.tasks.InputFiles; import org.gradle.api.tasks.Internal; import org.gradle.api.tasks.Optional; import org.gradle.api.tasks.TaskAction; import org.gradle.api.tasks.bundling.Jar; import org.gradle.api.tasks.util.PatternSet; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; public class ShadowJar extends Jar implements ShadowSpec { private List<Transformer> transformers; private List<Relocator> relocators; private List<Configuration> configurations; private DependencyFilter dependencyFilter; private boolean minimizeJar; private DependencyFilter dependencyFilterForMinimize; private final ShadowStats shadowStats = new ShadowStats(); private final GradleVersionUtil versionUtil; public ShadowJar() { super(); versionUtil = new GradleVersionUtil(getProject().getGradle().getGradleVersion()); dependencyFilter = new DefaultDependencyFilter(getProject()); dependencyFilterForMinimize = new MinimizeDependencyFilter(getProject()); setManifest(new DefaultInheritManifest(getServices().get(FileResolver.class))); transformers = new ArrayList<>(); relocators = new ArrayList<>(); configurations = new ArrayList<>(); } public ShadowJar minimize() { minimizeJar = true; return this; } public ShadowJar minimize(Action<DependencyFilter> c) { minimize(); if (c != null) { c.execute(dependencyFilterForMinimize); } return this; } @Override @Internal public ShadowStats getStats() { return shadowStats; } @Override public InheritManifest getManifest() { return (InheritManifest) super.getManifest(); } @Override protected CopyAction createCopyAction() { DocumentationRegistry documentationRegistry = getServices().get(DocumentationRegistry.class); final UnusedTracker unusedTracker = minimizeJar ? UnusedTracker.forProject(getProject(), configurations, dependencyFilterForMinimize) : null; return new ShadowCopyAction(getArchivePath(), getInternalCompressor(), documentationRegistry, this.getMetadataCharset(), transformers, relocators, getRootPatternSet(), shadowStats, versionUtil, isPreserveFileTimestamps(), minimizeJar, unusedTracker); } @Internal protected ZipCompressor getInternalCompressor() { return versionUtil.getInternalCompressor(getEntryCompression(), this); } @TaskAction protected void copy() { from(getIncludedDependencies()); super.copy(); getLogger().info(shadowStats.toString()); } @InputFiles public FileCollection getIncludedDependencies() { return getProject().files(new Callable<FileCollection>() { @Override public FileCollection call() throws Exception { return dependencyFilter.resolve(configurations); } }); } /** * Utility method for assisting between changes in Gradle 1.12 and 2.x. * * @return this */ @Internal protected PatternSet getRootPatternSet() { return versionUtil.getRootPatternSet(getMainSpec()); } /** * Configure inclusion/exclusion of module & project dependencies into uber jar. * * @param c the configuration of the filter * @return this */ public ShadowJar dependencies(Action<DependencyFilter> c) { if (c != null) { c.execute(dependencyFilter); } return this; } /** * Add a Transformer instance for modifying JAR resources and configure. * * @param clazz the transformer to add. Must have a no-arg constructor * @return this */ public ShadowJar transform(Class<? extends Transformer> clazz) throws InstantiationException, IllegalAccessException { return transform(clazz, null); } /** * Add a Transformer instance for modifying JAR resources and configure. * * @param clazz the transformer class to add. Must have no-arg constructor * @param c the configuration for the transformer * @return this */ public <T extends Transformer> ShadowJar transform(Class<T> clazz, Action<T> c) throws InstantiationException, IllegalAccessException { T transformer = clazz.newInstance(); if (c != null) { c.execute(transformer); } transformers.add(transformer); return this; } /** * Add a preconfigured transformer instance. * * @param transformer the transformer instance to add * @return this */ public ShadowJar transform(Transformer transformer) { transformers.add(transformer); return this; } /** * Syntactic sugar for merging service files in JARs. * * @return this */ public ShadowJar mergeServiceFiles() { try { transform(ServiceFileTransformer.class); } catch (IllegalAccessException e) { } catch (InstantiationException e) { } return this; } /** * Syntactic sugar for merging service files in JARs. * * @return this */ public ShadowJar mergeServiceFiles(final String rootPath) { try { transform(ServiceFileTransformer.class, new Action<ServiceFileTransformer>() { @Override public void execute(ServiceFileTransformer serviceFileTransformer) { serviceFileTransformer.setPath(rootPath); } }); } catch (IllegalAccessException e) { } catch (InstantiationException e) { } return this; } /** * Syntactic sugar for merging service files in JARs. * * @return this */ public ShadowJar mergeServiceFiles(Action<ServiceFileTransformer> configureClosure) { try { transform(ServiceFileTransformer.class, configureClosure); } catch (IllegalAccessException e) { } catch (InstantiationException e) { } return this; } /** * Syntactic sugar for merging Groovy extension module descriptor files in JARs * * @return this */ public ShadowJar mergeGroovyExtensionModules() { try { transform(GroovyExtensionModuleTransformer.class); } catch (IllegalAccessException e) { } catch (InstantiationException e) { } return this; } /** * Syntax sugar for merging service files in JARs * * @return this */ public ShadowJar append(final String resourcePath) { try { transform(AppendingTransformer.class, new Action<AppendingTransformer>() { @Override public void execute(AppendingTransformer transformer) { transformer.setResource(resourcePath); } }); } catch (IllegalAccessException e) { } catch (InstantiationException e) { } return this; } /** * Add a class relocator that maps each class in the pattern to the provided destination. * * @param pattern the source pattern to relocate * @param destination the destination package * @return this */ public ShadowJar relocate(String pattern, String destination) { return relocate(pattern, destination, null); } /** * Add a class relocator that maps each class in the pattern to the provided destination. * * @param pattern the source pattern to relocate * @param destination the destination package * @param configure the configuration of the relocator * @return this */ public ShadowJar relocate(String pattern, String destination, Action<SimpleRelocator> configure) { SimpleRelocator relocator = new SimpleRelocator(pattern, destination, new ArrayList<String>(), new ArrayList<String>()); if (configure != null) { configure.execute(relocator); } relocators.add(relocator); return this; } /** * Add a relocator instance. * * @param relocator the relocator instance to add * @return this */ public ShadowJar relocate(Relocator relocator) { relocators.add(relocator); return this; } /** * Add a relocator of the provided class. * * @param relocatorClass the relocator class to add. Must have a no-arg constructor. * @return this */ public ShadowJar relocate(Class<? extends Relocator> relocatorClass) throws InstantiationException, IllegalAccessException { return relocate(relocatorClass, null); } /** * Add a relocator of the provided class and configure. * * @param relocatorClass the relocator class to add. Must have a no-arg constructor * @param configure the configuration for the relocator * @return this */ public <R extends Relocator> ShadowJar relocate(Class<R> relocatorClass, Action<R> configure) throws InstantiationException, IllegalAccessException { R relocator = relocatorClass.newInstance(); if (configure != null) { configure.execute(relocator); } relocators.add(relocator); return this; } @Internal public List<Transformer> getTransformers() { return this.transformers; } public void setTransformers(List<Transformer> transformers) { this.transformers = transformers; } @Internal public List<Relocator> getRelocators() { return this.relocators; } public void setRelocators(List<Relocator> relocators) { this.relocators = relocators; } @InputFiles @Optional public List<Configuration> getConfigurations() { return this.configurations; } public void setConfigurations(List<Configuration> configurations) { this.configurations = configurations; } @Internal public DependencyFilter getDependencyFilter() { return this.dependencyFilter; } public void setDependencyFilter(DependencyFilter filter) { this.dependencyFilter = filter; } }
package ca.corefacility.bioinformatics.irida.ria.web; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.stereotype.Controller; import org.springframework.ui.Model; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; /** * @author Josh Adam <josh.adam@phac-aspc.gc.ca> */ @Controller public class LoginController { private static final Logger logger = LoggerFactory.getLogger(LoginController.class); @RequestMapping(value = "/") public String showSplash() { logger.debug("Displaying splash page"); if (isAuthenticated()) { return "forward:/dashboard"; } else { return "splash"; } } @RequestMapping(value = "/login") public String showLogin(Model model, @RequestParam(value = "error", required = false, defaultValue = "false") Boolean hasError) { logger.debug("Displaying login page."); if (isAuthenticated()) { return "forward:/dashboard"; } else { model.addAttribute("error", hasError); return "login"; } } private boolean isAuthenticated() { return SecurityContextHolder.getContext().getAuthentication() instanceof UsernamePasswordAuthenticationToken; } }
package com.blackducksoftware.integration.jira.task; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Map; import org.apache.commons.io.IOUtils; import org.apache.log4j.Logger; import com.atlassian.core.util.ClassLoaderUtils; import com.atlassian.jira.issue.issuetype.IssueType; import com.atlassian.jira.project.Project; import com.atlassian.jira.user.ApplicationUser; import com.atlassian.jira.user.util.UserManager; import com.atlassian.jira.workflow.AssignableWorkflowScheme; import com.atlassian.jira.workflow.ConfigurableJiraWorkflow; import com.atlassian.jira.workflow.JiraWorkflow; import com.atlassian.jira.workflow.WorkflowManager; import com.atlassian.jira.workflow.WorkflowSchemeManager; import com.atlassian.jira.workflow.WorkflowUtil; import com.blackducksoftware.integration.jira.common.HubJiraConstants; import com.blackducksoftware.integration.jira.common.HubJiraLogger; import com.opensymphony.workflow.loader.WorkflowDescriptor; public class HubWorkflowSetup { private final HubJiraLogger logger = new HubJiraLogger(Logger.getLogger(this.getClass().getName())); private final JiraSettingsService settingService; private final WorkflowManager workflowManager; private final WorkflowSchemeManager workflowSchemeManager; private final UserManager jiraUserManager; private final String jiraUser; public HubWorkflowSetup(final JiraSettingsService settingService, final WorkflowManager workflowManager, final WorkflowSchemeManager workflowSchemeManager, final UserManager jiraUserManager, final String jiraUser) { this.settingService = settingService; this.workflowManager = workflowManager; this.workflowSchemeManager = workflowSchemeManager; this.jiraUserManager = jiraUserManager; this.jiraUser = jiraUser; } public JiraWorkflow addHubWorkflowToJira() { try { JiraWorkflow hubWorkflow = workflowManager.getWorkflow(HubJiraConstants.HUB_JIRA_WORKFLOW); if (hubWorkflow == null) { final InputStream inputStream = ClassLoaderUtils .getResourceAsStream(HubJiraConstants.HUB_JIRA_WORKFLOW_RESOURCE, this.getClass()); if (inputStream == null) { logger.error("Could not find the Hub Jira workflow resource."); settingService.addHubError("Could not find the Hub Jira workflow resource.", "addHubWorkflow"); return null; } final ApplicationUser jiraAppUser = jiraUserManager.getUserByName(jiraUser); if (jiraAppUser == null) { logger.error("Could not find the Jira User that saved the Hub Jira config."); return null; } final String workflowXml = IOUtils.toString(inputStream, StandardCharsets.UTF_8); final WorkflowDescriptor workflowDescriptor = WorkflowUtil.convertXMLtoWorkflowDescriptor(workflowXml); hubWorkflow = new ConfigurableJiraWorkflow(HubJiraConstants.HUB_JIRA_WORKFLOW, workflowDescriptor, workflowManager); workflowManager.createWorkflow(jiraAppUser, hubWorkflow); } return hubWorkflow; } catch (final Exception e) { logger.error("Failed to add the Hub Jira worflow.", e); settingService.addHubError(e, "addHubWorkflow"); } return null; } public void addWorkflowToProjectsWorkflowScheme(final JiraWorkflow hubWorkflow, final Project project, final List<IssueType> issueTypes) { try { final AssignableWorkflowScheme projectWorkflowScheme = workflowSchemeManager.getWorkflowSchemeObj(project); final AssignableWorkflowScheme.Builder projectWorkflowSchemeBuidler = projectWorkflowScheme.builder(); final Map<String, String> issueMappings = projectWorkflowScheme.getMappings(); boolean needsToBeUpdated = false; // IMPORTANT we assume our custom issue types are already in this // Projects Workflow scheme for (final IssueType issueType : issueTypes) { final String workflowName = issueMappings.get(issueType.getId()); if (!workflowName.equals(hubWorkflow.getName())) { projectWorkflowSchemeBuidler.setMapping(issueType.getId(), hubWorkflow.getName()); needsToBeUpdated = true; } } if (needsToBeUpdated) { workflowSchemeManager.updateWorkflowScheme(projectWorkflowScheme); } } catch (final Exception e) { logger.error("Failed to add the Hub Jira worflow to the Hub scheme.", e); settingService.addHubError(e, null, null, project.getName(), null, "addWorkflowToProjectsWorkflowScheme"); } } }
package com.cjburkey.claimchunk.service.prereq.claim; import com.cjburkey.claimchunk.Utils; import org.bukkit.Chunk; import org.jetbrains.annotations.NotNull; import java.util.Optional; public class NearChunkPrereq implements IClaimPrereq{ @Override public int getWeight() { return 300; } @Override public boolean getPassed(@NotNull PrereqClaimData data) { boolean nearClaimed = false; for(int x1 = -1; x1 < 2; x1++) { for(int z1 = -1; z1 < 2; z1++) { if(nearClaimed) break; Chunk chunk = data.chunk.getWorld().getChunkAt(x1 + data.chunk.getX(), z1 + data.chunk.getZ()); System.out.println(data.claimChunk.getChunkHandler().isOwner(chunk, data.player)); if(data.claimChunk.getChunkHandler().isOwner(chunk, data.player)) continue; nearClaimed = data.claimChunk.getChunkHandler().isClaimed(data.chunk.getWorld().getChunkAt(x1 + data.chunk.getX(), z1 + data.chunk.getZ())); } } return !nearClaimed; } @Override public Optional<String> getErrorMessage(@NotNull PrereqClaimData data) { return Optional.of("Too close to a chunk"); } }
package com.esri.samples.raster.raster_function; import java.io.File; import java.io.FileNotFoundException; import java.util.Scanner; import javafx.application.Application; import javafx.scene.Scene; import javafx.scene.control.Alert; import javafx.scene.layout.StackPane; import javafx.stage.Stage; import com.esri.arcgisruntime.layers.RasterLayer; import com.esri.arcgisruntime.loadable.LoadStatus; import com.esri.arcgisruntime.mapping.ArcGISMap; import com.esri.arcgisruntime.mapping.Basemap; import com.esri.arcgisruntime.mapping.view.MapView; import com.esri.arcgisruntime.raster.ImageServiceRaster; import com.esri.arcgisruntime.raster.Raster; import com.esri.arcgisruntime.raster.RasterFunction; import com.esri.arcgisruntime.raster.RasterFunctionArguments; public class RasterFunctionSample extends Application { private MapView mapView; @Override public void start(Stage stage) throws Exception { try { // create stack pane and application scene StackPane stackPane = new StackPane(); Scene scene = new Scene(stackPane); // set title, size, and add scene to stage stage.setTitle("Raster Function Sample"); stage.setWidth(800); stage.setHeight(700); stage.setScene(scene); stage.show(); // create a map with dark canvas vector basemap ArcGISMap map = new ArcGISMap(Basemap.createDarkGrayCanvasVector()); // add the map to a map view mapView = new MapView(); mapView.setMap(map); // create an image service raster from an online raster service ImageServiceRaster imageServiceRaster = new ImageServiceRaster("http://sampleserver6.arcgisonline" + ".com/arcgis/rest/services/NLCDLandCover2001/ImageServer"); imageServiceRaster.loadAsync(); imageServiceRaster.addDoneLoadingListener(() -> { if (imageServiceRaster.getLoadStatus() == LoadStatus.LOADED) { // create raster function from local json file File jsonFile = new File("./samples-data/raster/hillshade_simplified.json"); try (Scanner scanner = new Scanner(jsonFile)) { // read in the complete file as a string String json = scanner.useDelimiter("\\A").next(); RasterFunction rasterFunction = RasterFunction.fromJson(json); RasterFunctionArguments arguments = rasterFunction.getArguments(); // apply the raster function arguments.setRaster(arguments.getRasterNames().get(0), imageServiceRaster); // create a new raster from the function definition Raster raster = new Raster(rasterFunction); // create raster layer and add to map as operational layer RasterLayer hillshadeLayer = new RasterLayer(raster); // add the hillshade raster layer to the map map.getOperationalLayers().add(hillshadeLayer); hillshadeLayer.addDoneLoadingListener(() -> { if (hillshadeLayer.getLoadStatus() == LoadStatus.LOADED) { // set viewpoint on the raster mapView.setViewpointGeometryAsync(hillshadeLayer.getFullExtent(), 150); } else { Alert alert = new Alert(Alert.AlertType.ERROR, "Failed to load the hillshade raster layer"); alert.show(); } }); } catch (FileNotFoundException e) { Alert alert = new Alert(Alert.AlertType.ERROR, "Failed to locate raster function json"); alert.show(); } } else { Alert alert = new Alert(Alert.AlertType.ERROR, "Failed to load image service raster"); alert.show(); } }); // add the map view to stack pane stackPane.getChildren().addAll(mapView); } catch (Exception e) { // on any error, display the stack trace. e.printStackTrace(); } } /** * Stops and releases all resources used in application. */ @Override public void stop() throws Exception { if (mapView != null) { mapView.dispose(); } } /** * Opens and runs application. * * @param args arguments passed to this application */ public static void main(String[] args) { Application.launch(args); } }
package br.eti.rslemos.ad; import java.util.Arrays; import java.util.Iterator; public abstract class Node { private final String function; private final String form; private final Info info; protected final int depth; private Iterator<Node> children; Node(final ADCorpus corpus) { String line = corpus.line; int i = 0; while(line.charAt(i++) == '='); depth = i-1; line = line.substring(depth); String[] parts; if (line.contains(":")) { parts = line.split(":"); assert parts[0].length() > 0; function = parts[0]; line = line.substring((function + ":").length()); parts = line.split("[(\t]"); assert parts[0].length() > 0; form = parts[0]; line = line.substring(form.length()); if (line.length() > 0 && line.charAt(0) == '(') { String info_chunk = line.substring(1, line.indexOf(')')); if ("n".equals(form)) info = new Info_n(info_chunk); else if("pron-indef".equals(form)) info = new Info_pron_indef(info_chunk); else if("prp".equals(form)) info = new Info_prp(info_chunk); else if("prop".equals(form)) info = new Info_prop(info_chunk); else throw new RuntimeException(); } else info = null; } else { function = null; form = null; info = null; } children = new Iterator<Node>() { private ADCorpus corpus0 = corpus; public boolean hasNext() { if (corpus0 == null) return false; String prefix = buildDepthPrefix(depth + 1); if (corpus0.line.startsWith(prefix)) { return true; } else { corpus0 = null; return false; } } public Node next() { Node subNode; if (corpus0.line.contains("\t") || !corpus0.line.contains(":")) subNode = new TerminalNode(corpus0); else subNode = new NonTerminalNode(corpus0); return subNode; } public void remove() { } }; } public String getFunction() { return function; } public String getForm() { return form; } public Info getInfo() { return info; } public int getDepth() { return depth; } public Iterator<Node> getChildren() { return children; } private static String buildDepthPrefix(int length) { char[] prefixChars = new char[length]; Arrays.fill(prefixChars, '='); return new String(prefixChars).intern(); } }
package com.github.davidmoten.rx.internal.operators; import java.util.LinkedList; import java.util.Map; import java.util.Queue; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import com.github.davidmoten.util.Preconditions; import rx.Observable; import rx.Observable.OnSubscribe; import rx.Producer; import rx.Subscriber; import rx.functions.Func1; import rx.functions.Func2; import rx.internal.operators.BackpressureUtils; import rx.internal.util.unsafe.MpscLinkedQueue; import rx.internal.util.unsafe.UnsafeAccess; public final class OnSubscribeMatch<A, B, K, C> implements OnSubscribe<C> { private final Observable<A> a; private final Observable<B> b; private final Func1<? super A, ? extends K> aKey; private final Func1<? super B, ? extends K> bKey; private final Func2<? super A, ? super B, C> combiner; private final long requestSize; public OnSubscribeMatch(Observable<A> a, Observable<B> b, Func1<? super A, ? extends K> aKey, Func1<? super B, ? extends K> bKey, Func2<? super A, ? super B, C> combiner, long requestSize) { Preconditions.checkNotNull(a, "a should not be null"); Preconditions.checkNotNull(b, "b should not be null"); Preconditions.checkNotNull(aKey, "aKey cannot be null"); Preconditions.checkNotNull(bKey, "bKey cannot be null"); Preconditions.checkNotNull(combiner, "combiner cannot be null"); Preconditions.checkArgument(requestSize >= 1, "requestSize must be >=1"); this.a = a; this.b = b; this.aKey = aKey; this.bKey = bKey; this.combiner = combiner; this.requestSize = requestSize; } @Override public void call(Subscriber<? super C> child) { AtomicReference<Receiver> receiverHolder = new AtomicReference<Receiver>(); MySubscriber<A, K> aSub = new MySubscriber<A, K>(Source.A, receiverHolder, requestSize); MySubscriber<B, K> bSub = new MySubscriber<B, K>(Source.B, receiverHolder, requestSize); child.add(aSub); child.add(bSub); MyProducer<A, B, K, C> producer = new MyProducer<A, B, K, C>(a, b, aKey, bKey, combiner, aSub, bSub, child, requestSize); receiverHolder.set(producer); child.setProducer(producer); a.unsafeSubscribe(aSub); b.unsafeSubscribe(bSub); } @SuppressWarnings("serial") private static final class MyProducer<A, B, K, C> extends AtomicLong implements Producer, Receiver { private final Queue<Object> queue; private final Map<K, Queue<A>> as = new ConcurrentHashMap<K, Queue<A>>(); private final Map<K, Queue<B>> bs = new ConcurrentHashMap<K, Queue<B>>(); private final Func1<? super A, ? extends K> aKey; private final Func1<? super B, ? extends K> bKey; private final Func2<? super A, ? super B, C> combiner; private final Subscriber<? super C> child; private final MySubscriber<A, K> aSub; private final MySubscriber<B, K> bSub; private final long requestSize; // mutable fields, guarded by `this` and `wip` value private final AtomicInteger wip = new AtomicInteger(0); private boolean requestAll = false; private int requestFromA = 0; private int requestFromB = 0; // completion state machine private int completed = COMPLETED_NONE; // completion states private static final int COMPLETED_NONE = 0; private static final int COMPLETED_A = 1; private static final int COMPLETED_B = 2; private static final int COMPLETED_BOTH = 3; MyProducer(Observable<A> a, Observable<B> b, Func1<? super A, ? extends K> aKey, Func1<? super B, ? extends K> bKey, Func2<? super A, ? super B, C> combiner, MySubscriber<A, K> aSub, MySubscriber<B, K> bSub, Subscriber<? super C> child, long requestSize) { this.aKey = aKey; this.bKey = bKey; this.combiner = combiner; this.child = child; this.aSub = aSub; this.bSub = bSub; this.requestSize = requestSize; if (UnsafeAccess.isUnsafeAvailable()) { queue = new MpscLinkedQueue<Object>(); } else { queue = new ConcurrentLinkedQueue<Object>(); } } @Override public void request(long n) { if (BackpressureUtils.validate(n)) { BackpressureUtils.getAndAddRequest(this, n); drain(); } } void drain() { if (wip.getAndIncrement() == 0) { do { long r; if (requestAll) { r = Long.MAX_VALUE; } else { r = get(); if (r == Long.MAX_VALUE) { requestAll = true; } } int emitted = 0; while (r > emitted & !queue.isEmpty()) { if (child.isUnsubscribed()) { return; } // note will not return null Object v = queue.poll(); if (v instanceof Item) { Item item = (Item) v; Emitted em = handleItem(item.value, item.source); if (em == Emitted.FINISHED) { return; } else if (em == Emitted.ONE) { emitted += 1; } } else if (v instanceof Source) { //source completed Status status = handleCompleted((Source) v); if (status == Status.FINISHED) { return; } } else { // v must be an error clear(); child.onError((Throwable) v); return; } if (r == Long.MAX_VALUE) { emitted = 0; } else if (r == emitted) { r = addAndGet(-emitted); emitted = 0; } } if (emitted > 0) { // queue was exhausted but requests were not addAndGet(-emitted); } } while (wip.decrementAndGet() != 0); } } private Emitted handleItem(Object value, Source source) { Emitted result = Emitted.NONE; // logic duplication occurs below // would be nice to simplify without making code // unreadable. A bit of a toss-up. if (source == Source.A) { // look for match @SuppressWarnings("unchecked") A a = (A) value; K key; try { key = aKey.call(a); } catch (Throwable e) { clear(); child.onError(e); return Emitted.FINISHED; } Queue<B> q = bs.get(key); if (q == null) { // cache value add(as, key, a); } else { // emit match B b = poll(bs, q, key); C c; try { c = combiner.call(a, b); } catch (Throwable e) { clear(); child.onError(e); return Emitted.FINISHED; } child.onNext(c); result = Emitted.ONE; } // if the other source has completed and there // is nothing to match with then we should stop if (completed == COMPLETED_B && bs.isEmpty()) { // can finish clear(); child.onCompleted(); return Emitted.FINISHED; } else { requestFromA += 1; } } else { // look for match @SuppressWarnings("unchecked") B b = (B) value; K key; try { key = bKey.call(b); } catch (Throwable e) { clear(); child.onError(e); return Emitted.FINISHED; } Queue<A> q = as.get(key); if (q == null) { // cache value add(bs, key, b); } else { // emit match A a = poll(as, q, key); C c; try { c = combiner.call(a, b); } catch (Throwable e) { clear(); child.onError(e); return Emitted.FINISHED; } child.onNext(c); result = Emitted.ONE; } // if the other source has completed and there // is nothing to match with then we should stop if (completed == COMPLETED_A && as.isEmpty()) { // can finish clear(); child.onCompleted(); return Emitted.FINISHED; } else { requestFromB += 1; } } // requests are batched so that each source gets a turn checkToRequestMore(); return result; } private enum Emitted { ONE, NONE, FINISHED; } private Status handleCompleted(Source source) { completed(source); final boolean done; if (source == Source.A) { aSub.unsubscribe(); done = (completed == COMPLETED_BOTH) || (completed == COMPLETED_A && as.isEmpty()); } else { bSub.unsubscribe(); done = (completed == COMPLETED_BOTH) || (completed == COMPLETED_B && bs.isEmpty()); } if (done) { clear(); child.onCompleted(); return Status.FINISHED; } else { checkToRequestMore(); return Status.KEEP_GOING; } } private enum Status { FINISHED, KEEP_GOING; } private void checkToRequestMore() { if (requestFromA == requestSize && completed == COMPLETED_B) { requestFromA = 0; aSub.requestMore(requestSize); } else if (requestFromB == requestSize && completed == COMPLETED_A) { requestFromB = 0; bSub.requestMore(requestSize); } else if (requestFromA == requestSize && requestFromB == requestSize) { requestFromA = 0; requestFromB = 0; aSub.requestMore(requestSize); bSub.requestMore(requestSize); } } private void completed(Source source) { if (source == Source.A) { if (completed == COMPLETED_NONE) { completed = COMPLETED_A; } else if (completed == COMPLETED_B) { completed = COMPLETED_BOTH; } } else { if (completed == COMPLETED_NONE) { completed = COMPLETED_B; } else if (completed == COMPLETED_A) { completed = COMPLETED_BOTH; } } } private void clear() { as.clear(); bs.clear(); queue.clear(); aSub.unsubscribe(); bSub.unsubscribe(); } private static <K, T> void add(Map<K, Queue<T>> map, K key, T value) { Queue<T> q = map.get(key); if (q == null) { q = new LinkedList<T>(); map.put(key, q); } q.offer(value); } private static <K, T> T poll(Map<K, Queue<T>> map, Queue<T> q, K key) { T t = q.poll(); if (q.isEmpty()) { map.remove(key); } return t; } @Override public void offer(Object item) { queue.offer(item); drain(); } } interface Receiver { void offer(Object item); } private static class MySubscriber<T, K> extends Subscriber<T> { private final AtomicReference<Receiver> receiver; private final Source source; MySubscriber(Source source, AtomicReference<Receiver> receiver, long requestSize) { this.source = source; this.receiver = receiver; request(requestSize); } @Override public void onNext(T t) { // TODO can reduce allocations by emitting one source // without wrapping as an item. Would have to use NULL_SENTINEL // though because cannot rely on queue accepting nulls. receiver.get().offer(new Item(t, source)); } @Override public void onCompleted() { receiver.get().offer(source); } @Override public void onError(Throwable e) { receiver.get().offer(e); } public void requestMore(long n) { request(n); } } static final class Item { final Object value; final Source source; Item(Object value, Source source) { this.value = value; this.source = source; } } enum Source { A, B; } }
package com.techcavern.wavetact.ircCommands.netadmin; import com.techcavern.wavetact.annot.IRCCMD; import com.techcavern.wavetact.objects.IRCCommand; import com.techcavern.wavetact.utils.DatabaseUtils; import com.techcavern.wavetact.utils.ErrorUtils; import com.techcavern.wavetact.utils.GeneralUtils; import com.techcavern.wavetact.utils.IRCUtils; import org.jooq.Record; import org.pircbotx.Channel; import org.pircbotx.PircBotX; import org.pircbotx.User; import static com.techcavern.wavetactdb.Tables.NETWORKPROPERTY; @IRCCMD public class NetworkProperty extends IRCCommand { public NetworkProperty() { super(GeneralUtils.toArray("networkproperty netprop"), 20, "netprop (+)(-)[property] (value)", "creates, modifies or removes network properties", false); } @Override public void onCommand(User user, PircBotX network, String prefix, Channel channel, boolean isPrivate, int userPermLevel, String... args) throws Exception { String networkname = IRCUtils.getNetworkNameByNetwork(network); String property; boolean isModify = false; boolean isDelete = false; boolean viewonly = false; if (args.length < 2) { viewonly = true; } if (args[0].startsWith("-")) { property = args[0].replaceFirst("-", ""); isDelete = true; } else if (args[0].startsWith("+")) { property = args[0].replaceFirst("\\+", ""); isModify = true; } else { property = args[0]; } Record networkProperty = DatabaseUtils.getNetworkProperty(networkname, property); if (networkProperty != null && (isDelete || isModify)) { if (isDelete) { DatabaseUtils.removeNetworkProperty(networkname, property); IRCUtils.sendMessage(user, network, channel, "Property deleted", prefix); } else if (isModify) { if (viewonly) IRCUtils.sendMessage(user, network, channel, property + ": " + networkProperty.getValue(NETWORKPROPERTY.VALUE), prefix); else { networkProperty.setValue(NETWORKPROPERTY.VALUE, args[1]); DatabaseUtils.updateNetworkProperty(networkProperty); IRCUtils.sendMessage(user, network, channel, "Property modified", prefix); } } } else if (networkProperty == null && !isDelete && !isModify) { DatabaseUtils.addNetworkProperty(networkname, property, args[1]); IRCUtils.sendMessage(user, network, channel, "Property added", prefix); } else { ErrorUtils.sendError(user, "Unknown user or unknown property"); } } }
package com.xtremelabs.robolectric.shadows; import android.net.ConnectivityManager; import android.net.NetworkInfo; import com.xtremelabs.robolectric.internal.Implementation; import com.xtremelabs.robolectric.internal.Implements; import java.util.HashMap; import java.util.Map; /** * Shadow of {@code ConnectivityManager} that provides for the simulation of * the active connection status. */ @Implements(ConnectivityManager.class) public class ShadowConnectivityManager { private NetworkInfo activeNetwork; private boolean backgroundDataSetting; private Map<Integer, NetworkInfo> networkTypeToNetworkInfo = new HashMap<Integer, NetworkInfo>(); public ShadowConnectivityManager() { setActiveNetworkInfo(ShadowNetworkInfo.newInstance()); } @Implementation public NetworkInfo getActiveNetworkInfo() { return activeNetwork; } @Implementation public NetworkInfo[] getAllNetworkInfo() { return networkTypeToNetworkInfo.values().toArray(new NetworkInfo[networkTypeToNetworkInfo.size()]); } @Implementation public NetworkInfo getNetworkInfo(int networkType) { return networkTypeToNetworkInfo.get(networkType); } @Implementation public boolean getBackgroundDataSetting() { return backgroundDataSetting; } public void setNetworkInfo(int networkType, NetworkInfo networkInfo) { networkTypeToNetworkInfo.put(networkType, networkInfo); } public void setBackgroundDataSetting(boolean b) { backgroundDataSetting = b; } public void setActiveNetworkInfo(NetworkInfo info) { activeNetwork = info; if (info != null) { networkTypeToNetworkInfo.put(info.getType(), info); } else { networkTypeToNetworkInfo.clear(); } } }
package com.youcruit.mailchimp.client.serializers; import java.io.IOException; import com.google.gson.TypeAdapter; import com.google.gson.stream.JsonReader; import com.google.gson.stream.JsonWriter; public class ArrayStringAdapter extends TypeAdapter<String[]> { @Override public String[] read(JsonReader in) throws IOException { throw new IllegalArgumentException("Not implemented"); } @Override public void write(JsonWriter out, String[] value) throws IOException { if (value != null) { out.value(listToString(value)); } else { out.nullValue(); } } public String listToString(String[] value) { StringBuilder sb = new StringBuilder(); for (String s : value) { if (sb.length() > 0) { sb.append("/"); } sb.append(s); } return sb.toString(); } }
package com.intellij.xml.util; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import com.intellij.psi.xml.XmlFile; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.Project; import java.io.File; import java.io.IOException; import java.io.StringReader; import java.net.URL; import java.net.MalformedURLException; import org.apache.xerces.xni.parser.XMLInputSource; import org.apache.xerces.xni.parser.XMLEntityResolver; import org.apache.xerces.xni.XMLResourceIdentifier; import org.apache.xerces.xni.XNIException; public class XmlResourceResolver implements XMLEntityResolver { private static Logger LOG = Logger.getInstance("#com.intellij.xml.util.XmlResourceResolver"); private XmlFile myFile; private Project myProject; public XmlResourceResolver(XmlFile _xmlFile, Project _project) { myFile = _xmlFile; myProject = _project; } public PsiFile resolve(final String baseSystemId, final String systemId) { if (LOG.isDebugEnabled()) { LOG.debug("enter: resolveEntity(baseSystemId='" + baseSystemId + "' systemId='" + systemId + "')"); } if (systemId == null) return null; final PsiFile[] result = new PsiFile[] { null }; final Runnable action = new Runnable() { public void run() { PsiFile baseFile = null; VirtualFile vFile = null; if (baseSystemId != null) { baseFile = (XmlFile)resolve(null,baseSystemId); if (baseFile == null) { if (myFile != null) { File workingFile = new File(""); String workingDir = workingFile.getAbsoluteFile().getAbsolutePath().replace(File.separatorChar, '/'); String id = StringUtil.replace(baseSystemId, workingDir, myFile.getVirtualFile().getParent().getPath()); vFile = VfsUtil.findRelativeFile(id, null); } if (vFile == null) { vFile = VfsUtil.findRelativeFile(baseSystemId, null); if (vFile == null) { try { vFile = VfsUtil.findFileByURL(new URL(baseSystemId)); } catch(MalformedURLException ex) { } } } } if (vFile != null) { baseFile = PsiManager.getInstance(myProject).findFile(vFile); } } if (baseFile == null) { baseFile = myFile; } PsiFile psiFile = XmlUtil.findXmlFile(baseFile, systemId); if (psiFile == null && baseSystemId!=null && baseFile!=null) { String fullUrl = baseSystemId.substring( 0, baseSystemId.lastIndexOf('/') + 1 ) + systemId; psiFile = XmlUtil.findXmlFile(baseFile,fullUrl); } if (LOG.isDebugEnabled()) { LOG.debug("resolveEntity: psiFile='" + (psiFile != null ? psiFile.getVirtualFile() : null) + "'"); } result[0] = psiFile; } }; ApplicationManager.getApplication().runReadAction(action); return result[0]; } public XMLInputSource resolveEntity(XMLResourceIdentifier xmlResourceIdentifier) throws XNIException, IOException { String publicId; PsiFile psiFile = resolve( xmlResourceIdentifier.getBaseSystemId(), publicId = (xmlResourceIdentifier.getLiteralSystemId() != null ? xmlResourceIdentifier.getLiteralSystemId(): xmlResourceIdentifier.getNamespace()) ); if (psiFile==null && xmlResourceIdentifier.getLiteralSystemId()!=null && xmlResourceIdentifier.getNamespace()!=null) { psiFile = resolve( xmlResourceIdentifier.getBaseSystemId(), publicId = xmlResourceIdentifier.getNamespace() ); } if (psiFile == null) return null; XMLInputSource source = new XMLInputSource(xmlResourceIdentifier); //VirtualFile virtualFile = psiFile.getVirtualFile(); //final String url = VfsUtil.fixIDEAUrl(virtualFile.getUrl()); //source.setBaseSystemId(url); //source.setSystemId(url); source.setPublicId(publicId); source.setCharacterStream(new StringReader(psiFile.getText())); return source; } }
package cz.afrosoft.whattoeat.diet.generator.impl.none; import org.apache.commons.lang3.Validate; import org.springframework.stereotype.Component; import java.time.LocalDate; import java.util.LinkedList; import java.util.List; import cz.afrosoft.whattoeat.diet.generator.impl.BasicGeneratorParams; import cz.afrosoft.whattoeat.diet.generator.model.Generator; import cz.afrosoft.whattoeat.diet.generator.model.GeneratorGui; import cz.afrosoft.whattoeat.diet.generator.model.GeneratorParameters; import cz.afrosoft.whattoeat.diet.generator.model.GeneratorType; import cz.afrosoft.whattoeat.diet.list.data.entity.DayDietEntity; /** * @author tomas.rejent */ @Component public class NoneGenerator implements Generator<BasicGeneratorParams> { @Override public GeneratorGui<BasicGeneratorParams> getGui() { return new NoneGeneratorGui(); } @Override public List<DayDietEntity> generate(final GeneratorParameters parameters) { Validate.notNull(parameters); List<DayDietEntity> dayDiets = new LinkedList<>(); for (LocalDate day = parameters.getFrom(); !day.isAfter(parameters.getTo()); day = day.plusDays(1)) { DayDietEntity dayDiet = new DayDietEntity(); dayDiet.setDay(day); dayDiets.add(dayDiet); } return dayDiets; } @Override public GeneratorType getType() { return GeneratorType.NONE; } }
package com.jbooktrader.platform.model; import com.jbooktrader.platform.report.*; import com.jbooktrader.platform.trader.*; import com.jbooktrader.platform.web.*; import java.util.*; /** * Acts as the dispatcher of the services. */ public class Dispatcher { public enum Mode { Trade("Trade", "Trading"), BackTest("BackTest", "Back Testing"), ForwardTest("ForwardTest", "Forward Testing"), Optimization("Optimize", "Optimizing"); // Note, then enum label here does follow grammar convention private final String name, presentParticiple; private Mode(String name, String presentParticiple) { this.name = name; this.presentParticiple = presentParticiple; } public String getName() { return name; } public String getPresentParticiple() { return presentParticiple; } } private static final List<ModelListener> listeners = new ArrayList<ModelListener>(); private static Report eventReport; private static Trader trader; private static Mode mode; private static int activeStrategies; public static void setReporter() throws JBookTraderException { eventReport = new Report("EventReport"); } public static void addListener(ModelListener listener) { listeners.add(listener); } public static void removeListener(ModelListener listener) { listeners.remove(listener); } public static void fireModelChanged(ModelListener.Event event, Object value) { if (mode != Mode.Optimization) { for (ModelListener listener : listeners) { try { listener.modelChanged(event, value); } catch (Exception e) { eventReport.report(e); } } } } synchronized public static Trader getTrader() { if (trader == null) { trader = new Trader(); } return trader; } public static Report getReporter() { return eventReport; } public static Mode getMode() { return mode; } public static void exit() { if (trader != null) { trader.getAssistant().disconnect(); } System.exit(0); } public static void setMode(Mode mode) throws JBookTraderException { Dispatcher.mode = mode; eventReport.report("Mode set to: " + mode); // Disable all reporting when JBT runs in optimization mode. The optimizer runs // thousands of strategies, and the amount of data to report would be enormous. if (mode == Mode.Optimization) { Report.disable(); } else { Report.enable(); } if (mode == Mode.Trade || mode == Mode.ForwardTest) { trader.getAssistant().connect(); MonitoringServer.start(); } else { trader.getAssistant().disconnect(); } fireModelChanged(ModelListener.Event.ModeChanged, null); } public static synchronized void strategyStarted() { activeStrategies++; fireModelChanged(ModelListener.Event.StrategiesStart, null); } public static synchronized void strategyCompleted() { activeStrategies if (activeStrategies == 0) { fireModelChanged(ModelListener.Event.StrategiesEnd, null); } } }
package edu.umdearborn.astronomyapp.controller; import static edu.umdearborn.astronomyapp.util.constants.UrlConstants.INSTRUCTOR_PATH; import static edu.umdearborn.astronomyapp.util.constants.UrlConstants.REST_PATH_PREFIX; import static edu.umdearborn.astronomyapp.util.constants.UrlConstants.STUDENT_PATH; import static org.springframework.web.bind.annotation.RequestMethod.DELETE; import static org.springframework.web.bind.annotation.RequestMethod.GET; import static org.springframework.web.bind.annotation.RequestMethod.POST; import java.security.Principal; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; import javax.servlet.http.HttpSession; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.ResponseEntity; import org.springframework.security.access.AccessDeniedException; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.bind.annotation.SessionAttributes; import edu.umdearborn.astronomyapp.entity.Answer; import edu.umdearborn.astronomyapp.entity.CourseUser; import edu.umdearborn.astronomyapp.entity.ModuleGroup; import edu.umdearborn.astronomyapp.service.AclService; import edu.umdearborn.astronomyapp.service.GroupService; import edu.umdearborn.astronomyapp.util.HttpSessionUtil; import edu.umdearborn.astronomyapp.util.json.JsonDecorator; @RestController @RequestMapping(REST_PATH_PREFIX) @SessionAttributes("courseUser") public class ModuleGroupController { private static final Logger logger = LoggerFactory.getLogger(ModuleGroupController.class); private AclService acl; private GroupService groupService; public ModuleGroupController(AclService acl, GroupService groupService) { this.acl = acl; this.groupService = groupService; } @RequestMapping(value = STUDENT_PATH + "/course/{courseId}/module/{moduleId}/group", method = POST) public JsonDecorator<ModuleGroup> createGroup(@PathVariable("courseId") String courseId, @PathVariable("moduleId") String moduleId, HttpSession session, Principal principal) { String courseUserId = HttpSessionUtil.getCourseUserId(session, courseId); acl.enforceInCourse(principal.getName(), courseId, courseUserId); acl.enforceIsCourseRole(principal.getName(), courseId, Arrays.asList(CourseUser.CourseRole.STUDENT)); ModuleGroup group = groupService.createGroup(courseUserId, moduleId); JsonDecorator<ModuleGroup> json = new JsonDecorator<>(); json.setPayload(group); json.addProperty("isModuleEditable", groupService.hasLock(group.getId(), getCheckinSessionAttribute(session, group.getId(), courseUserId))); return json; } @RequestMapping( value = STUDENT_PATH + "/course/{courseId}/module/{moduleId}/group/{groupId}/member/{removeUser}", method = DELETE) public List<CourseUser> removeUser(@PathVariable("courseId") String courseId, @PathVariable("moduleId") String moduleId, @PathVariable("groupId") String groupId, HttpSession session, @PathVariable("removeUser") String removedUser, Principal principal) { String courseUserId = HttpSessionUtil.getCourseUserId(session, courseId); acl.enforceInCourse(principal.getName(), courseId, courseUserId); acl.enforceIsCourseRole(principal.getName(), courseId, Arrays.asList(CourseUser.CourseRole.STUDENT)); acl.enforceInGroup(courseUserId, groupId); return groupService.removeFromGroup(groupId, removedUser); } @RequestMapping(value = STUDENT_PATH + "/course/{courseId}/module/{moduleId}/free", method = GET) public List<CourseUser> getFreeAgents(@PathVariable("courseId") String courseId, @PathVariable("moduleId") String moduleId, HttpSession session, Principal principal) { String courseUserId = HttpSessionUtil.getCourseUserId(session, courseId); acl.enforceInCourse(principal.getName(), courseId, courseUserId); acl.enforceIsCourseRole(principal.getName(), courseId, Arrays.asList(CourseUser.CourseRole.STUDENT)); Optional<List<CourseUser>> optional = Optional.ofNullable(groupService.getFreeUsers(courseId, moduleId)); return optional.orElse(new ArrayList<CourseUser>()); } @RequestMapping(value = STUDENT_PATH + "/course/{courseId}/module/{moduleId}/group", method = GET) public JsonDecorator<ModuleGroup> getGroup(@PathVariable("courseId") String courseId, @PathVariable("moduleId") String moduleId, HttpSession session, Principal principal) { String courseUserId = HttpSessionUtil.getCourseUserId(session, courseId); acl.enforceInCourse(principal.getName(), courseId, courseUserId); acl.enforceIsCourseRole(principal.getName(), courseId, Arrays.asList(CourseUser.CourseRole.STUDENT)); Optional<ModuleGroup> optional = Optional.ofNullable(groupService.getGroup(courseUserId, moduleId)); JsonDecorator<ModuleGroup> json = new JsonDecorator<>(); optional.ifPresent(g -> { logger.debug("Returning group and members and if editable"); json.setPayload(g); json.addProperty("members", groupService.getUsersInGroup(g.getId())); json.addProperty("isModuleEditable", groupService.hasLock(g.getId(), getCheckinSessionAttribute(session, g.getId(), courseUserId))); }); return json; } @RequestMapping( value = STUDENT_PATH + "/course/{courseId}/module/{moduleId}/group/{groupId}/member/{addMember}", method = POST) public List<CourseUser> joinGroup(@PathVariable("courseId") String courseId, @PathVariable("moduleId") String moduleId, @PathVariable("groupId") String groupId, HttpSession session, @PathVariable("addMember") String addMember, Principal principal) { String courseUserId = HttpSessionUtil.getCourseUserId(session, courseId); acl.enforceInCourse(principal.getName(), courseId, courseUserId); acl.enforceIsCourseRole(principal.getName(), courseId, Arrays.asList(CourseUser.CourseRole.STUDENT)); acl.enforceInGroup(courseUserId, groupId); acl.enforceGroupLocked(groupId, false); return groupService.joinGroup(addMember, moduleId, groupId); } @RequestMapping( value = STUDENT_PATH + "/course/{courseId}/module/{moduleId}/group/{groupId}/checkin", method = GET) public JsonDecorator<List<String>> getCheckinStatus(@PathVariable("courseId") String courseId, @PathVariable("moduleId") String moduleId, @PathVariable("groupId") String groupId, HttpSession session, Principal principal) { String courseUserId = HttpSessionUtil.getCourseUserId(session, courseId); acl.enforceInCourse(principal.getName(), courseId, courseUserId); acl.enforceIsCourseRole(principal.getName(), courseId, Arrays.asList(CourseUser.CourseRole.STUDENT)); acl.enforceGroupInCourse(groupId, courseId); acl.enforceInGroup(courseUserId, groupId); List<String> checkin = getCheckinSessionAttribute(session, groupId, courseUserId); JsonDecorator<List<String>> json = new JsonDecorator<>(); json.setPayload(checkin); json.addProperty("isModuleEditable", groupService.hasLock(groupId, getCheckinSessionAttribute(session, groupId, courseUserId))); return json; } @RequestMapping( value = STUDENT_PATH + "/course/{courseId}/module/{moduleId}/group/{groupId}/checkin-reset", method = GET) public JsonDecorator<List<String>> resetCheckin(@PathVariable("courseId") String courseId, @PathVariable("moduleId") String moduleId, @PathVariable("groupId") String groupId, HttpSession session, Principal principal) { String courseUserId = HttpSessionUtil.getCourseUserId(session, courseId); acl.enforceInCourse(principal.getName(), courseId, courseUserId); acl.enforceIsCourseRole(principal.getName(), courseId, Arrays.asList(CourseUser.CourseRole.STUDENT)); acl.enforceGroupInCourse(groupId, courseId); acl.enforceInGroup(courseUserId, groupId); List<String> checkin = getCheckinSessionAttribute(session, groupId, courseUserId); JsonDecorator<List<String>> json = new JsonDecorator<>(); json.setPayload(checkin); json.addProperty("isModuleEditable", groupService.hasLock(groupId, getCheckinSessionAttribute(session, groupId, courseUserId))); return json; } @RequestMapping( value = STUDENT_PATH + "/course/{courseId}/module/{moduleId}/group/{groupId}/checkin", method = POST) public JsonDecorator<List<String>> checkin(@PathVariable("courseId") String courseId, @PathVariable("moduleId") String moduleId, @PathVariable("groupId") String groupId, @RequestBody Map<String, String> checkinUser, HttpSession session, Principal principal) { String courseUserId = HttpSessionUtil.getCourseUserId(session, courseId); acl.enforceInCourse(principal.getName(), courseId, courseUserId); acl.enforceIsCourseRole(principal.getName(), courseId, Arrays.asList(CourseUser.CourseRole.STUDENT)); acl.enforceGroupInCourse(groupId, courseId); acl.enforceInGroup(courseUserId, groupId); acl.enforceGroupLocked(groupId, true); List<String> checkin = getCheckinSessionAttribute(session, groupId, courseUserId); logger.debug("Current checkin status: {}", Arrays.toString(checkin.toArray())); Optional<CourseUser> optional = Optional .ofNullable(groupService.checkin(Optional.ofNullable(checkinUser.get("email")).orElse(""), Optional.ofNullable(checkinUser.get("password")).orElse(""), groupId)); CourseUser user = optional.orElseThrow( () -> new AccessDeniedException("User: " + checkinUser.get("email") + " cannot checkin")); if (!checkin.contains(user.getId())) { checkin.add(user.getId()); session.setAttribute(groupId, checkin); logger.debug("After checkin status: {}", Arrays.toString(checkin.toArray())); } JsonDecorator<List<String>> json = new JsonDecorator<>(); boolean isEditable = groupService.hasLock(groupId, getCheckinSessionAttribute(session, groupId, courseUserId)); json.setPayload(checkin); json.addProperty("isModuleEditable", isEditable); json.addProperty("hasLock", isEditable); return json; } @RequestMapping( value = STUDENT_PATH + "/course/{courseId}/module/{moduleId}/group/{groupId}/members", method = GET) public List<CourseUser> getGroupRoster(@PathVariable("courseId") String courseId, @PathVariable("moduleId") String moduleId, @PathVariable("groupId") String groupId, HttpSession session, Principal principal) { String courseUserId = HttpSessionUtil.getCourseUserId(session, courseId); acl.enforceInCourse(principal.getName(), courseId, courseUserId); acl.enforceIsCourseRole(principal.getName(), courseId, Arrays.asList(CourseUser.CourseRole.STUDENT)); acl.enforceGroupInCourse(groupId, courseId); acl.enforceInGroup(courseUserId, groupId); return groupService.getUsersInGroup(groupId); } @RequestMapping( value = STUDENT_PATH + "/course/{courseId}/module/{moduleId}/group/{groupId}/canEdit", method = GET) public Map<String, Boolean> hasLock(@PathVariable("courseId") String courseId, @PathVariable("moduleId") String moduleId, @PathVariable("groupId") String groupId, HttpSession session, Principal principal) { String courseUserId = HttpSessionUtil.getCourseUserId(session, courseId); acl.enforceInCourse(principal.getName(), courseId, courseUserId); acl.enforceIsCourseRole(principal.getName(), courseId, Arrays.asList(CourseUser.CourseRole.STUDENT)); acl.enforceGroupInCourse(groupId, courseId); acl.enforceInGroup(courseUserId, groupId); List<String> checkedIn = getCheckinSessionAttribute(session, groupId, courseUserId); Map<String, Boolean> map = new HashMap<>(); boolean hasLock = groupService.hasLock(groupId, checkedIn); map.put("hasLock", hasLock); map.put("isModuleEditable", hasLock && true); return map; } @RequestMapping( value = STUDENT_PATH + "/course/{courseId}/module/{moduleId}/group/{groupId}/answers/save", method = POST) public List<Answer> saveAnswers(@PathVariable("courseId") String courseId, @PathVariable("moduleId") String moduleId, @PathVariable("groupId") String groupId, HttpSession session, @RequestBody Map<String, String> answers, Principal principal) { String courseUserId = HttpSessionUtil.getCourseUserId(session, courseId); acl.enforceInCourse(principal.getName(), courseId, courseUserId); acl.enforceIsCourseRole(principal.getName(), courseId, Arrays.asList(CourseUser.CourseRole.STUDENT)); acl.enforceGroupInCourse(groupId, courseId); acl.enforceInGroup(courseUserId, groupId); acl.enforceGroupLocked(groupId, true); acl.enforceHasLock(groupId, getCheckinSessionAttribute(session, groupId, courseUserId)); return groupService.saveAnswers(answers, groupId); } @RequestMapping( value = STUDENT_PATH + "/course/{courseId}/module/{moduleId}/group/{groupId}/answers/submit", method = POST) public List<Answer> submitAnswers(@PathVariable("courseId") String courseId, @PathVariable("moduleId") String moduleId, @PathVariable("groupId") String groupId, @RequestBody Map<String, String> answers, HttpSession session, Principal principal) { String courseUserId = HttpSessionUtil.getCourseUserId(session, courseId); acl.enforceInCourse(principal.getName(), courseId, courseUserId); acl.enforceIsCourseRole(principal.getName(), courseId, Arrays.asList(CourseUser.CourseRole.STUDENT)); acl.enforceGroupInCourse(groupId, courseId); acl.enforceInGroup(courseUserId, groupId); acl.enforceGroupLocked(groupId, true); acl.enforceHasLock(groupId, getCheckinSessionAttribute(session, groupId, courseUserId)); return groupService.submitAnswers(groupId); } @RequestMapping( value = STUDENT_PATH + "/course/{courseId}/module/{moduleId}/group/{groupId}/answers", method = GET) public Map<String, Answer> getAnswers(@PathVariable("courseId") String courseId, @PathVariable("moduleId") String moduleId, @PathVariable("groupId") String groupId, @RequestParam(name = "showSaved", defaultValue = "true") boolean showSavedAnswers, Principal principal, HttpSession session) { String courseUserId = HttpSessionUtil.getCourseUserId(session, courseId); acl.enforceInCourse(principal.getName(), courseId, courseUserId); acl.enforceIsCourseRole(principal.getName(), courseId, Arrays.asList(CourseUser.CourseRole.STUDENT)); acl.enforceGroupInCourse(groupId, courseId); acl.enforceGroupLocked(groupId, true); acl.enforceInGroup(courseUserId, groupId); return Optional.ofNullable(groupService.getAnswers(groupId, showSavedAnswers)) .orElse(new ArrayList<Answer>()).parallelStream() .collect(Collectors.toMap(a -> a.getQuestion().getId(), a -> a)); } @RequestMapping( value = INSTRUCTOR_PATH + "/course/{courseId}/module/{moduleId}/group/{groupId}/answers", method = GET) public Map<String, Answer> getSubmissions(@PathVariable("courseId") String courseId, @PathVariable("moduleId") String moduleId, @PathVariable("groupId") String groupId, Principal principal, HttpSession session) { String courseUserId = HttpSessionUtil.getCourseUserId(session, courseId); acl.enforceInCourse(principal.getName(), courseId, courseUserId); return Optional.ofNullable(groupService.getAnswers(groupId, false)) .orElse(new ArrayList<Answer>()).parallelStream() .collect(Collectors.toMap(a -> a.getQuestion().getId(), a -> a)); } @RequestMapping( value = STUDENT_PATH + "/course/{courseId}/module/{moduleId}/group/{groupId}/finalize", method = POST) public ResponseEntity<Void> finalizeGroup(@PathVariable("courseId") String courseId, @PathVariable("moduleId") String moduleId, @PathVariable("groupId") String groupId, HttpSession session, Principal principal) { String courseUserId = HttpSessionUtil.getCourseUserId(session, courseId); acl.enforceInCourse(principal.getName(), courseId, courseUserId); acl.enforceIsCourseRole(principal.getName(), courseId, Arrays.asList(CourseUser.CourseRole.STUDENT)); acl.enforceGroupInCourse(groupId, courseId); acl.enforceInGroup(courseUserId, groupId); acl.enforceGroupLocked(groupId, false); groupService.finalizeGroup(groupId); return ResponseEntity.ok().build(); } private List<String> getCheckinSessionAttribute(HttpSession session, String groupId, String courseUserId) { @SuppressWarnings("unchecked") List<String> checkin = HttpSessionUtil.getAttributeOrDefault(session, groupId, List.class, new ArrayList<String>()); if (!checkin.contains(courseUserId)) { checkin.add(courseUserId); session.setAttribute(groupId, checkin); } return checkin; } }
package io.github.teamdevintia.magicpotions.constants; import io.github.teamdevintia.magicpotions.MagicPotions; import io.github.teamdevintia.magicpotions.util.factory.RecipeFactory; import org.bukkit.Material; import org.bukkit.inventory.ShapedRecipe; import java.util.Map; /** * This constant stores all custom recipes * * @author Shad0wCore & MiniDigger */ public class RecipeConstant extends Constant<ShapedRecipe> { public RecipeConstant(MagicPotions instance) { super(instance); } @Override public void initializeContent() { this.getContentMap().put("recipe.ritualLantern", new RecipeFactory(instance.getItemConstant().get("item.ritualLantern")) .shape("BBB", "BSB", "BBB").ingredient('B', Material.REDSTONE).ingredient('S', Material.STICK).releaseAndRegister()); // potions, only release, not register! this.getContentMap().put("recipe.potion.fire", new RecipeFactory(instance.getItemConstant().get("item.firePotion")) .shape("ESE", "SGS", "ESE").ingredient('E', instance.getItemConstant().get("item.essence").getType()).ingredient('S', Material.BLAZE_POWDER) .ingredient('G', Material.GLASS_BOTTLE).release()); this.getContentMap().put("recipe.potion.ice", new RecipeFactory(instance.getItemConstant().get("item.icePotion")) .shape("ESE", "SGS", "ESE").ingredient('E', instance.getItemConstant().get("item.essence").getType()).ingredient('S', Material.SNOW_BALL) .ingredient('G', Material.GLASS_BOTTLE).release()); this.getContentMap().put("recipe.potion.timeFreeze", new RecipeFactory(instance.getItemConstant().get("item.timeFreezePotion")) .shape("ESE", "SGS", "ESE").ingredient('E', instance.getItemConstant().get("item.essence").getType()).ingredient('S', Material.ENDER_PEARL) .ingredient('G', Material.GLASS_BOTTLE).release()); this.getContentMap().put("recipe.potion.impulse", new RecipeFactory(instance.getItemConstant().get("item.impulsePotion")) .shape("ESE", "SGS", "ESE").ingredient('E', instance.getItemConstant().get("item.essence").getType()).ingredient('S', Material.SULPHUR) // SULPHUR = gunpower, wtf bukkit .ingredient('G', Material.GLASS_BOTTLE).release()); this.getContentMap().put("recipe.potion.winter", new RecipeFactory(instance.getItemConstant().get("item.winterPotion")) .shape("ESE", "SGS", "ESE").ingredient('E', instance.getItemConstant().get("item.essence").getType()).ingredient('S', Material.ICE) .ingredient('G', Material.GLASS_BOTTLE).release()); this.getContentMap().put("recipe.potion.sun", new RecipeFactory(instance.getItemConstant().get("item.sunPotion")) .shape("ESE", "SGS", "ESE").ingredient('E', instance.getItemConstant().get("item.essence").getType()).ingredient('S', Material.FIREBALL) .ingredient('G', Material.GLASS_BOTTLE).release()); this.getContentMap().put("recipe.potion.explosion", new RecipeFactory(instance.getItemConstant().get("item.explosionPotion")) .shape("ESE", "SGS", "ESE").ingredient('E', instance.getItemConstant().get("item.essence").getType()).ingredient('S', Material.MAGMA_CREAM) .ingredient('G', Material.GLASS_BOTTLE).release()); } @Override public ShapedRecipe get(String identifier) { return super.get(identifier); } @Override public Map<String, ShapedRecipe> getContentMap() { return super.getContentMap(); } }
package net.fortuna.ical4j.model.component; import java.io.IOException; import java.net.URISyntaxException; import java.text.ParseException; import java.util.Iterator; import org.apache.commons.lang.ObjectUtils; import org.apache.commons.lang.builder.HashCodeBuilder; import net.fortuna.ical4j.model.Component; import net.fortuna.ical4j.model.ComponentList; import net.fortuna.ical4j.model.Date; import net.fortuna.ical4j.model.DateList; import net.fortuna.ical4j.model.DateTime; import net.fortuna.ical4j.model.Dur; import net.fortuna.ical4j.model.Parameter; import net.fortuna.ical4j.model.Period; import net.fortuna.ical4j.model.PeriodList; import net.fortuna.ical4j.model.Property; import net.fortuna.ical4j.model.PropertyList; import net.fortuna.ical4j.model.ValidationException; import net.fortuna.ical4j.model.parameter.Value; import net.fortuna.ical4j.model.property.Clazz; import net.fortuna.ical4j.model.property.Created; import net.fortuna.ical4j.model.property.Description; import net.fortuna.ical4j.model.property.DtEnd; import net.fortuna.ical4j.model.property.DtStamp; import net.fortuna.ical4j.model.property.DtStart; import net.fortuna.ical4j.model.property.Duration; import net.fortuna.ical4j.model.property.ExDate; import net.fortuna.ical4j.model.property.ExRule; import net.fortuna.ical4j.model.property.Geo; import net.fortuna.ical4j.model.property.LastModified; import net.fortuna.ical4j.model.property.Location; import net.fortuna.ical4j.model.property.Organizer; import net.fortuna.ical4j.model.property.Priority; import net.fortuna.ical4j.model.property.RDate; import net.fortuna.ical4j.model.property.RRule; import net.fortuna.ical4j.model.property.RecurrenceId; import net.fortuna.ical4j.model.property.Sequence; import net.fortuna.ical4j.model.property.Status; import net.fortuna.ical4j.model.property.Summary; import net.fortuna.ical4j.model.property.Transp; import net.fortuna.ical4j.model.property.Uid; import net.fortuna.ical4j.model.property.Url; import net.fortuna.ical4j.util.CompatibilityHints; import net.fortuna.ical4j.util.Dates; import net.fortuna.ical4j.util.PropertyValidator; import net.fortuna.ical4j.util.Strings; /** * Defines an iCalendar VEVENT component. * * <pre> * 4.6.1 Event Component * * Component Name: &quot;VEVENT&quot; * * Purpose: Provide a grouping of component properties that describe an * event. * * Format Definition: A &quot;VEVENT&quot; calendar component is defined by the * following notation: * * eventc = &quot;BEGIN&quot; &quot;:&quot; &quot;VEVENT&quot; CRLF * eventprop *alarmc * &quot;END&quot; &quot;:&quot; &quot;VEVENT&quot; CRLF * * eventprop = *( * * ; the following are optional, * ; but MUST NOT occur more than once * * class / created / description / dtstart / geo / * last-mod / location / organizer / priority / * dtstamp / seq / status / summary / transp / * uid / url / recurid / * * ; either 'dtend' or 'duration' may appear in * ; a 'eventprop', but 'dtend' and 'duration' * ; MUST NOT occur in the same 'eventprop' * * dtend / duration / * * ; the following are optional, * ; and MAY occur more than once * * attach / attendee / categories / comment / * contact / exdate / exrule / rstatus / related / * resources / rdate / rrule / x-prop * * ) * </pre> * * Example 1 - Creating a new all-day event: * * <pre><code> * java.util.Calendar cal = java.util.Calendar.getInstance(); * cal.set(java.util.Calendar.MONTH, java.util.Calendar.DECEMBER); * cal.set(java.util.Calendar.DAY_OF_MONTH, 25); * * VEvent christmas = new VEvent(cal.getTime(), &quot;Christmas Day&quot;); * * // initialise as an all-day event.. * christmas.getProperties().getProperty(Property.DTSTART).getParameters().add( * Value.DATE); * * // add timezone information.. * VTimeZone tz = VTimeZone.getDefault(); * TzId tzParam = new TzId(tz.getProperties().getProperty(Property.TZID) * .getValue()); * christmas.getProperties().getProperty(Property.DTSTART).getParameters().add( * tzParam); * </code></pre> * * Example 2 - Creating an event of one (1) hour duration: * * <pre><code> * java.util.Calendar cal = java.util.Calendar.getInstance(); * // tomorrow.. * cal.add(java.util.Calendar.DAY_OF_MONTH, 1); * cal.set(java.util.Calendar.HOUR_OF_DAY, 9); * cal.set(java.util.Calendar.MINUTE, 30); * * VEvent meeting = new VEvent(cal.getTime(), 1000 * 60 * 60, &quot;Progress Meeting&quot;); * * // add timezone information.. * VTimeZone tz = VTimeZone.getDefault(); * TzId tzParam = new TzId(tz.getProperties().getProperty(Property.TZID) * .getValue()); * meeting.getProperties().getProperty(Property.DTSTART).getParameters().add( * tzParam); * </code></pre> * * Example 3 - Retrieve a list of periods representing a recurring event in a specified range: * * <pre><code> * Calendar weekday9AM = Calendar.getInstance(); * weekday9AM.set(2005, Calendar.MARCH, 7, 9, 0, 0); * weekday9AM.set(Calendar.MILLISECOND, 0); * * Calendar weekday5PM = Calendar.getInstance(); * weekday5PM.set(2005, Calendar.MARCH, 7, 17, 0, 0); * weekday5PM.set(Calendar.MILLISECOND, 0); * * // Do the recurrence until December 31st. * Calendar untilCal = Calendar.getInstance(); * untilCal.set(2005, Calendar.DECEMBER, 31); * untilCal.set(Calendar.MILLISECOND, 0); * * // 9:00AM to 5:00PM Rule * Recur recur = new Recur(Recur.WEEKLY, untilCal.getTime()); * recur.getDayList().add(WeekDay.MO); * recur.getDayList().add(WeekDay.TU); * recur.getDayList().add(WeekDay.WE); * recur.getDayList().add(WeekDay.TH); * recur.getDayList().add(WeekDay.FR); * recur.setInterval(3); * recur.setWeekStartDay(WeekDay.MO.getDay()); * RRule rrule = new RRule(recur); * * Summary summary = new Summary(&quot;TEST EVENTS THAT HAPPEN 9-5 MON-FRI&quot;); * * weekdayNineToFiveEvents = new VEvent(); * weekdayNineToFiveEvents.getProperties().add(rrule); * weekdayNineToFiveEvents.getProperties().add(summary); * weekdayNineToFiveEvents.getProperties().add(new DtStart(weekday9AM.getTime())); * weekdayNineToFiveEvents.getProperties().add(new DtEnd(weekday5PM.getTime())); * * // Test Start 04/01/2005, End One month later. * // Query Calendar Start and End Dates. * Calendar queryStartDate = Calendar.getInstance(); * queryStartDate.set(2005, Calendar.APRIL, 1, 14, 47, 0); * queryStartDate.set(Calendar.MILLISECOND, 0); * Calendar queryEndDate = Calendar.getInstance(); * queryEndDate.set(2005, Calendar.MAY, 1, 11, 15, 0); * queryEndDate.set(Calendar.MILLISECOND, 0); * * // This range is monday to friday every three weeks, starting from * // March 7th 2005, which means for our query dates we need * // April 18th through to the 22nd. * PeriodList periods = weekdayNineToFiveEvents.getPeriods(queryStartDate * .getTime(), queryEndDate.getTime()); * </code></pre> * * @author Ben Fortuna */ public class VEvent extends CalendarComponent { private static final long serialVersionUID = 2547948989200697335L; private ComponentList alarms; /** * Default constructor. */ public VEvent() { super(VEVENT); this.alarms = new ComponentList(); getProperties().add(new DtStamp()); } /** * Constructor. * @param properties a list of properties */ public VEvent(final PropertyList properties) { super(VEVENT, properties); this.alarms = new ComponentList(); } /** * Constructor. * @param properties a list of properties * @param alarms a list of alarms */ public VEvent(final PropertyList properties, final ComponentList alarms) { super(VEVENT, properties); this.alarms = alarms; } /** * Constructs a new VEVENT instance starting at the specified time with the specified summary. * @param start the start date of the new event * @param summary the event summary */ public VEvent(final Date start, final String summary) { this(); getProperties().add(new DtStart(start)); getProperties().add(new Summary(summary)); } /** * Constructs a new VEVENT instance starting and ending at the specified times with the specified summary. * @param start the start date of the new event * @param end the end date of the new event * @param summary the event summary */ public VEvent(final Date start, final Date end, final String summary) { this(); getProperties().add(new DtStart(start)); getProperties().add(new DtEnd(end)); getProperties().add(new Summary(summary)); } /** * Constructs a new VEVENT instance starting at the specified times, for the specified duration, with the specified * summary. * @param start the start date of the new event * @param duration the duration of the new event * @param summary the event summary */ public VEvent(final Date start, final Dur duration, final String summary) { this(); getProperties().add(new DtStart(start)); getProperties().add(new Duration(duration)); getProperties().add(new Summary(summary)); } /** * Returns the list of alarms for this event. * @return a component list */ public final ComponentList getAlarms() { return alarms; } /** * @see java.lang.Object#toString() */ public final String toString() { StringBuffer b = new StringBuffer(); b.append(BEGIN); b.append(':'); b.append(getName()); b.append(Strings.LINE_SEPARATOR); b.append(getProperties()); b.append(getAlarms()); b.append(END); b.append(':'); b.append(getName()); b.append(Strings.LINE_SEPARATOR); return b.toString(); } /** * @see net.fortuna.ical4j.model.Component#validate(boolean) */ public final void validate(final boolean recurse) throws ValidationException { // validate that getAlarms() only contains VAlarm components Iterator iterator = getAlarms().iterator(); while (iterator.hasNext()) { Component component = (Component) iterator.next(); if (!(component instanceof VAlarm)) { throw new ValidationException("Component [" + component.getName() + "] may not occur in VEVENT"); } } if (!CompatibilityHints .isHintEnabled(CompatibilityHints.KEY_RELAXED_VALIDATION)) { // From "4.8.4.7 Unique Identifier": // Conformance: The property MUST be specified in the "VEVENT", "VTODO", // "VJOURNAL" or "VFREEBUSY" calendar components. PropertyValidator.getInstance().assertOne(Property.UID, getProperties()); // From "4.8.7.2 Date/Time Stamp": // Conformance: This property MUST be included in the "VEVENT", "VTODO", // "VJOURNAL" or "VFREEBUSY" calendar components. PropertyValidator.getInstance().assertOne(Property.DTSTAMP, getProperties()); } /* * ; the following are optional, ; but MUST NOT occur more than once class / created / description / dtstart / * geo / last-mod / location / organizer / priority / dtstamp / seq / status / summary / transp / uid / url / * recurid / */ PropertyValidator.getInstance().assertOneOrLess(Property.CLASS, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.CREATED, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.DESCRIPTION, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.DTSTART, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.GEO, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.LAST_MODIFIED, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.LOCATION, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.ORGANIZER, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.PRIORITY, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.DTSTAMP, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.SEQUENCE, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.STATUS, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.SUMMARY, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.TRANSP, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.UID, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.URL, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.RECURRENCE_ID, getProperties()); Status status = (Status) getProperty(Property.STATUS); if (status != null && !Status.VEVENT_TENTATIVE.equals(status) && !Status.VEVENT_CONFIRMED.equals(status) && !Status.VEVENT_CANCELLED.equals(status)) { throw new ValidationException("Status property [" + status.toString() + "] is not applicable for VEVENT"); } /* * ; either 'dtend' or 'duration' may appear in ; a 'eventprop', but 'dtend' and 'duration' ; MUST NOT occur in * the same 'eventprop' dtend / duration / */ try { PropertyValidator.getInstance().assertNone(Property.DTEND, getProperties()); } catch (ValidationException ve) { PropertyValidator.getInstance().assertNone(Property.DURATION, getProperties()); } if (getProperty(Property.DTEND) != null) { /* * The "VEVENT" is also the calendar component used to specify an anniversary or daily reminder within a * calendar. These events have a DATE value type for the "DTSTART" property instead of the default data type * of DATE-TIME. If such a "VEVENT" has a "DTEND" property, it MUST be specified as a DATE value also. The * anniversary type of "VEVENT" can span more than one date (i.e, "DTEND" property value is set to a * calendar date after the "DTSTART" property value). */ DtStart start = (DtStart) getProperty(Property.DTSTART); DtEnd end = (DtEnd) getProperty(Property.DTEND); if (start != null) { Parameter startValue = start.getParameter(Parameter.VALUE); Parameter endValue = end.getParameter(Parameter.VALUE); if (startValue != null) { if(startValue.equals(Value.DATE_TIME) && endValue==null) { // DATE-TIME is the default so this is ok } else if (!startValue.equals(endValue)) { throw new ValidationException("Property [" + Property.DTEND + "] must have the same [" + Parameter.VALUE + "] as [" + Property.DTSTART + "]"); } } else if(endValue!=null) { // if DTSTART's VALUE is null then DTEND's must be DATE-TIME if(!endValue.equals(Value.DATE_TIME)) throw new ValidationException("Property [" + Property.DTEND + "] must have the same [" + Parameter.VALUE + "] as [" + Property.DTSTART + "]"); } } } /* * ; the following are optional, ; and MAY occur more than once attach / attendee / categories / comment / * contact / exdate / exrule / rstatus / related / resources / rdate / rrule / x-prop */ if (recurse) { validateProperties(); } } /** * Returns a normalised list of periods representing the consumed time for this event. * @param rangeStart * @param rangeEnd * @return a normalised list of periods representing consumed time for this event * @see VEvent#getConsumedTime(Date, Date, boolean) */ public final PeriodList getConsumedTime(final Date rangeStart, final Date rangeEnd) { return getConsumedTime(rangeStart, rangeEnd, true); } /** * Returns a list of periods representing the consumed time for this event in the specified range. Note that the * returned list may contain a single period for non-recurring components or multiple periods for recurring * components. If no time is consumed by this event an empty list is returned. * @param rangeStart the start of the range to check for consumed time * @param rangeEnd the end of the range to check for consumed time * @param normalise indicate whether the returned list of periods should be normalised * @return a list of periods representing consumed time for this event */ public final PeriodList getConsumedTime(final Date rangeStart, final Date rangeEnd, final boolean normalise) { PeriodList periods = new PeriodList(); // if component is transparent return empty list.. if (Transp.TRANSPARENT.equals(getProperty(Property.TRANSP))) { return periods; } DtStart start = (DtStart) getProperty(Property.DTSTART); DtEnd end = (DtEnd) getProperty(Property.DTEND); Duration duration = (Duration) getProperty(Property.DURATION); // if no start date or duration specified return empty list.. if (start == null || (duration == null && end == null)) { return periods; } // if an explicit event duration is not specified, derive a value for recurring // periods from the end date.. Dur rDuration; if (duration == null) { rDuration = new Dur(start.getDate(), end.getDate()); } else { rDuration = duration.getDuration(); } // adjust range start back by duration to allow for recurrences that // start before the range but finish inside.. // FIXME: See bug #1325558.. Date adjustedRangeStart = new DateTime(rangeStart); adjustedRangeStart.setTime(rDuration.negate().getTime(rangeStart) .getTime()); // recurrence dates.. PropertyList rDates = getProperties(Property.RDATE); for (Iterator i = rDates.iterator(); i.hasNext();) { RDate rdate = (RDate) i.next(); // only period-based rdates are applicable.. // FIXME: ^^^ not true - date-time/date also applicable.. if (Value.PERIOD.equals(rdate.getParameter(Parameter.VALUE))) { for (Iterator j = rdate.getPeriods().iterator(); j.hasNext();) { Period period = (Period) j.next(); if (period.getStart().before(rangeEnd) && period.getEnd().after(rangeStart)) { periods.add(period); } } } } // recurrence rules.. PropertyList rRules = getProperties(Property.RRULE); for (Iterator i = rRules.iterator(); i.hasNext();) { RRule rrule = (RRule) i.next(); DateList startDates = rrule.getRecur().getDates(start.getDate(), adjustedRangeStart, rangeEnd, (Value) start.getParameter(Parameter.VALUE)); // DateList startDates = rrule.getRecur().getDates(start.getDate(), rangeStart, rangeEnd, (Value) // start.getParameters().getParameter(Parameter.VALUE)); for (int j = 0; j < startDates.size(); j++) { Date startDate = (Date) startDates.get(j); periods.add(new Period(new DateTime(startDate), rDuration)); } } // add first instance if included in range.. if (start.getDate().before(rangeEnd)) { if (end != null && end.getDate().after(rangeStart)) { periods.add(new Period(new DateTime(start.getDate()), new DateTime(end.getDate()))); } else if (duration != null) { Period period = new Period(new DateTime(start.getDate()), duration.getDuration()); if (period.getEnd().after(rangeStart)) { periods.add(period); } } } // exception dates.. PropertyList exDates = getProperties(Property.EXDATE); for (Iterator i = exDates.iterator(); i.hasNext();) { ExDate exDate = (ExDate) i.next(); for (Iterator j = periods.iterator(); j.hasNext();) { Period period = (Period) j.next(); // for DATE-TIME instances check for DATE-based exclusions also.. if (exDate.getDates().contains(period.getStart()) || exDate.getDates().contains( new Date(period.getStart()))) { j.remove(); } } } // exception rules.. // FIXME: exception rules should be consistent with exception dates (i.e. not use periods?).. PropertyList exRules = getProperties(Property.EXRULE); PeriodList exPeriods = new PeriodList(); for (Iterator i = exRules.iterator(); i.hasNext();) { ExRule exrule = (ExRule) i.next(); // DateList startDates = exrule.getRecur().getDates(start.getDate(), adjustedRangeStart, rangeEnd, (Value) // start.getParameters().getParameter(Parameter.VALUE)); DateList startDates = exrule.getRecur().getDates(start.getDate(), rangeStart, rangeEnd, (Value) start.getParameter(Parameter.VALUE)); for (Iterator j = startDates.iterator(); j.hasNext();) { Date startDate = (Date) j.next(); exPeriods.add(new Period(new DateTime(startDate), rDuration)); } } // apply exceptions.. if (!exPeriods.isEmpty()) { periods = periods.subtract(exPeriods); } // if periods already specified through recurrence, return.. // ..also normalise before returning. if (!periods.isEmpty() && normalise) { return periods.normalise(); } return periods; } /** * @return the optional access classification property for an event */ public final Clazz getClassification() { return (Clazz) getProperty(Property.CLASS); } /** * @return the optional creation-time property for an event */ public final Created getCreated() { return (Created) getProperty(Property.CREATED); } /** * @return the optional description property for an event */ public final Description getDescription() { return (Description) getProperty(Property.DESCRIPTION); } /** * Convenience method to pull the DTSTART out of the property list. * @return The DtStart object representation of the start Date */ public final DtStart getStartDate() { return (DtStart) getProperty(Property.DTSTART); } /** * @return the optional geographic position property for an event */ public final Geo getGeographicPos() { return (Geo) getProperty(Property.GEO); } /** * @return the optional last-modified property for an event */ public final LastModified getLastModified() { return (LastModified) getProperty(Property.LAST_MODIFIED); } /** * @return the optional location property for an event */ public final Location getLocation() { return (Location) getProperty(Property.LOCATION); } /** * @return the optional organizer property for an event */ public final Organizer getOrganizer() { return (Organizer) getProperty(Property.ORGANIZER); } /** * @return the optional priority property for an event */ public final Priority getPriority() { return (Priority) getProperty(Property.PRIORITY); } /** * @return the optional date-stamp property */ public final DtStamp getDateStamp() { return (DtStamp) getProperty(Property.DTSTAMP); } /** * @return the optional sequence number property for an event */ public final Sequence getSequence() { return (Sequence) getProperty(Property.SEQUENCE); } /** * @return the optional status property for an event */ public final Status getStatus() { return (Status) getProperty(Property.STATUS); } /** * @return the optional summary property for an event */ public final Summary getSummary() { return (Summary) getProperty(Property.SUMMARY); } /** * @return the optional time transparency property for an event */ public final Transp getTransparency() { return (Transp) getProperty(Property.TRANSP); } /** * @return the optional URL property for an event */ public final Url getUrl() { return (Url) getProperty(Property.URL); } /** * @return the optional recurrence identifier property for an event */ public final RecurrenceId getRecurrenceId() { return (RecurrenceId) getProperty(Property.RECURRENCE_ID); } /** * Returns the end date of this event. Where an end date is not available it will be derived from the event * duration. * @return a DtEnd instance, or null if one cannot be derived */ public final DtEnd getEndDate() { return getEndDate(true); } /** * Convenience method to pull the DTEND out of the property list. If DTEND was not specified, use the DTSTART + * DURATION to calculate it. * @param deriveFromDuration specifies whether to derive an end date from the event duration where an end date is * not found * @return The end for this VEVENT. */ public final DtEnd getEndDate(final boolean deriveFromDuration) { DtEnd dtEnd = (DtEnd) getProperty(Property.DTEND); // No DTEND? No problem, we'll use the DURATION. if (dtEnd == null && deriveFromDuration && getDuration() != null) { DtStart dtStart = getStartDate(); Duration vEventDuration = getDuration(); dtEnd = new DtEnd(Dates.getInstance(vEventDuration.getDuration() .getTime(dtStart.getDate()), (Value) dtStart .getParameter(Parameter.VALUE))); if (dtStart.isUtc()) { dtEnd.setUtc(true); } } return dtEnd; } /** * @return the optional Duration property */ public final Duration getDuration() { return (Duration) getProperty(Property.DURATION); } /** * Returns the UID property of this component if available. * @return a Uid instance, or null if no UID property exists */ public final Uid getUid() { return (Uid) getProperty(Property.UID); } /* * (non-Javadoc) * @see net.fortuna.ical4j.model.Component#equals(java.lang.Object) */ public boolean equals(Object arg0) { if (arg0 instanceof VEvent) { return super.equals(arg0) && ObjectUtils.equals(alarms, ((VEvent) arg0).getAlarms()); } return super.equals(arg0); } /* (non-Javadoc) * @see net.fortuna.ical4j.model.Component#hashCode() */ public int hashCode() { return new HashCodeBuilder().append(getName()).append(getProperties()) .append(getAlarms()).toHashCode(); } /** * Overrides default copy method to add support for copying alarm sub-components. * @see net.fortuna.ical4j.model.Component#copy() */ public Component copy() throws ParseException, IOException, URISyntaxException { VEvent copy = (VEvent) super.copy(); copy.alarms = new ComponentList(alarms); return copy; } }
package me.deftware.client.framework.command.commands; import com.mojang.brigadier.arguments.StringArgumentType; import com.mojang.brigadier.builder.LiteralArgumentBuilder; import com.mojang.brigadier.builder.RequiredArgumentBuilder; import me.deftware.client.framework.command.CommandBuilder; import me.deftware.client.framework.command.CommandResult; import me.deftware.client.framework.command.EMCModCommand; import me.deftware.client.framework.main.Bootstrap; import me.deftware.client.framework.maps.SettingsMap; import me.deftware.client.framework.wrappers.IChat; @SuppressWarnings("ALL") public class CommandTrigger extends EMCModCommand { @Override public CommandBuilder getCommandBuilder() { return new CommandBuilder().set((LiteralArgumentBuilder) LiteralArgumentBuilder.literal("trigger") .then( LiteralArgumentBuilder.literal("set") .then( RequiredArgumentBuilder.argument("prefix", StringArgumentType.string()) .executes(c -> { CommandResult r = new CommandResult(c); if (r.getString("prefix").trim().isEmpty()) { IChat.sendClientMessage("Please enter a valid trigger prefix"); return 1; } SettingsMap.update(SettingsMap.MapKeys.EMC_SETTINGS, "COMMAND_TRIGGER", r.getString("prefix")); Bootstrap.EMCSettings.saveString("commandtrigger", r.getString("prefix")); IChat.sendClientMessage("Set command trigger to \"" + r.getString("prefix") + "\""); return 1; }) ) ) .then( LiteralArgumentBuilder.literal("restore") .executes(c -> { SettingsMap.update(SettingsMap.MapKeys.EMC_SETTINGS, "COMMAND_TRIGGER", "."); Bootstrap.EMCSettings.saveString("commandtrigger", "."); IChat.sendClientMessage("Command trigger has been reset to \".\" (single dot)"); return 1; }) ) ); } }
package net.fortuna.ical4j.model.component; import java.util.Iterator; import net.fortuna.ical4j.model.Component; import net.fortuna.ical4j.model.ComponentList; import net.fortuna.ical4j.model.Date; import net.fortuna.ical4j.model.DateList; import net.fortuna.ical4j.model.DateTime; import net.fortuna.ical4j.model.Dur; import net.fortuna.ical4j.model.Parameter; import net.fortuna.ical4j.model.Period; import net.fortuna.ical4j.model.PeriodList; import net.fortuna.ical4j.model.Property; import net.fortuna.ical4j.model.PropertyList; import net.fortuna.ical4j.model.ValidationException; import net.fortuna.ical4j.model.parameter.Value; import net.fortuna.ical4j.model.property.DtEnd; import net.fortuna.ical4j.model.property.DtStamp; import net.fortuna.ical4j.model.property.DtStart; import net.fortuna.ical4j.model.property.Duration; import net.fortuna.ical4j.model.property.ExDate; import net.fortuna.ical4j.model.property.ExRule; import net.fortuna.ical4j.model.property.RDate; import net.fortuna.ical4j.model.property.RRule; import net.fortuna.ical4j.model.property.Status; import net.fortuna.ical4j.model.property.Summary; import net.fortuna.ical4j.model.property.Transp; import net.fortuna.ical4j.util.Dates; import net.fortuna.ical4j.util.PropertyValidator; /** * Defines an iCalendar VEVENT component. * * <pre> * 4.6.1 Event Component * * Component Name: "VEVENT" * * Purpose: Provide a grouping of component properties that describe an * event. * * Format Definition: A "VEVENT" calendar component is defined by the * following notation: * * eventc = "BEGIN" ":" "VEVENT" CRLF * eventprop *alarmc * "END" ":" "VEVENT" CRLF * * eventprop = *( * * ; the following are optional, * ; but MUST NOT occur more than once * * class / created / description / dtstart / geo / * last-mod / location / organizer / priority / * dtstamp / seq / status / summary / transp / * uid / url / recurid / * * ; either 'dtend' or 'duration' may appear in * ; a 'eventprop', but 'dtend' and 'duration' * ; MUST NOT occur in the same 'eventprop' * * dtend / duration / * * ; the following are optional, * ; and MAY occur more than once * * attach / attendee / categories / comment / * contact / exdate / exrule / rstatus / related / * resources / rdate / rrule / x-prop * * ) * </pre> * * Example 1 - Creating a new all-day event: * * <pre><code> * java.util.Calendar cal = java.util.Calendar.getInstance(); * cal.set(java.util.Calendar.MONTH, java.util.Calendar.DECEMBER); * cal.set(java.util.Calendar.DAY_OF_MONTH, 25); * * VEvent christmas = new VEvent(cal.getTime(), "Christmas Day"); * * // initialise as an all-day event.. * christmas.getProperties().getProperty(Property.DTSTART).getParameters().add(Value.DATE); * * // add timezone information.. * VTimeZone tz = VTimeZone.getDefault(); * TzId tzParam = new TzId(tz.getProperties().getProperty(Property.TZID).getValue()); * christmas.getProperties().getProperty(Property.DTSTART).getParameters().add(tzParam); * </code></pre> * * Example 2 - Creating an event of one (1) hour duration: * * <pre><code> * java.util.Calendar cal = java.util.Calendar.getInstance(); * // tomorrow.. * cal.add(java.util.Calendar.DAY_OF_MONTH, 1); * cal.set(java.util.Calendar.HOUR_OF_DAY, 9); * cal.set(java.util.Calendar.MINUTE, 30); * * VEvent meeting = new VEvent(cal.getTime(), 1000 * 60 * 60, "Progress Meeting"); * * // add timezone information.. * VTimeZone tz = VTimeZone.getDefault(); * TzId tzParam = new TzId(tz.getProperties().getProperty(Property.TZID).getValue()); * meeting.getProperties().getProperty(Property.DTSTART).getParameters().add(tzParam); * </code></pre> * * Example 3 - Retrieve a list of periods representing a recurring event in a * specified range: * * <pre><code> * Calendar weekday9AM = Calendar.getInstance(); * weekday9AM.set(2005, Calendar.MARCH, 7, 9, 0, 0); * weekday9AM.set(Calendar.MILLISECOND, 0); * * Calendar weekday5PM = Calendar.getInstance(); * weekday5PM.set(2005, Calendar.MARCH, 7, 17, 0, 0); * weekday5PM.set(Calendar.MILLISECOND, 0); * * // Do the recurrence until December 31st. * Calendar untilCal = Calendar.getInstance(); * untilCal.set(2005, Calendar.DECEMBER, 31); * untilCal.set(Calendar.MILLISECOND, 0); * * // 9:00AM to 5:00PM Rule * Recur recur = new Recur(Recur.WEEKLY, untilCal.getTime()); * recur.getDayList().add(WeekDay.MO); * recur.getDayList().add(WeekDay.TU); * recur.getDayList().add(WeekDay.WE); * recur.getDayList().add(WeekDay.TH); * recur.getDayList().add(WeekDay.FR); * recur.setInterval(3); * recur.setWeekStartDay(WeekDay.MO.getDay()); * RRule rrule = new RRule(recur); * * Summary summary = new Summary("TEST EVENTS THAT HAPPEN 9-5 MON-FRI"); * * weekdayNineToFiveEvents = new VEvent(); * weekdayNineToFiveEvents.getProperties().add(rrule); * weekdayNineToFiveEvents.getProperties().add(summary); * weekdayNineToFiveEvents.getProperties().add( * new DtStart(weekday9AM.getTime())); * weekdayNineToFiveEvents.getProperties().add( * new DtEnd(weekday5PM.getTime())); * * // Test Start 04/01/2005, End One month later. * // Query Calendar Start and End Dates. * Calendar queryStartDate = Calendar.getInstance(); * queryStartDate.set(2005, Calendar.APRIL, 1, 14, 47, 0); * queryStartDate.set(Calendar.MILLISECOND, 0); * Calendar queryEndDate = Calendar.getInstance(); * queryEndDate.set(2005, Calendar.MAY, 1, 11, 15, 0); * queryEndDate.set(Calendar.MILLISECOND, 0); * * // This range is monday to friday every three weeks, starting from * // March 7th 2005, which means for our query dates we need * // April 18th through to the 22nd. * PeriodList periods = * weekdayNineToFiveEvents.getPeriods(queryStartDate.getTime(), * queryEndDate.getTime()); * </code></pre> * * @author Ben Fortuna */ public class VEvent extends Component { private static final long serialVersionUID = 2547948989200697335L; private ComponentList alarms; /** * Default constructor. */ public VEvent() { super(VEVENT); this.alarms = new ComponentList(); } /** * Constructor. * * @param properties * a list of properties */ public VEvent(final PropertyList properties) { super(VEVENT, properties); this.alarms = new ComponentList(); } /** * Constructor. * * @param properties * a list of properties * @param alarms * a list of alarms */ public VEvent(final PropertyList properties, final ComponentList alarms) { super(VEVENT, properties); this.alarms = alarms; } /** * Constructs a new VEVENT instance starting at the specified * time with the specified summary. * @param start the start date of the new event * @param summary the event summary */ public VEvent(final Date start, final String summary) { this(); getProperties().add(new DtStamp(new DateTime())); getProperties().add(new DtStart(start)); getProperties().add(new Summary(summary)); } /** * Constructs a new VEVENT instance starting and ending at the specified * times with the specified summary. * @param start the start date of the new event * @param end the end date of the new event * @param summary the event summary */ public VEvent(final Date start, final Date end, final String summary) { this(); getProperties().add(new DtStamp(new DateTime())); getProperties().add(new DtStart(start)); getProperties().add(new DtEnd(end)); getProperties().add(new Summary(summary)); } /** * Constructs a new VEVENT instance starting at the specified * times, for the specified duration, with the specified summary. * @param start the start date of the new event * @param duration the duration of the new event * @param summary the event summary */ public VEvent(final Date start, final Dur duration, final String summary) { this(); getProperties().add(new DtStamp(new DateTime())); getProperties().add(new DtStart(start)); getProperties().add(new Duration(duration)); getProperties().add(new Summary(summary)); } /** * Returns the list of alarms for this event. * @return a component list */ public final ComponentList getAlarms() { return alarms; } /** * @see java.lang.Object#toString() */ public final String toString() { return BEGIN + ":" + getName() + "\r\n" + getProperties() + getAlarms() + END + ":" + getName() + "\r\n"; } /** * @see net.fortuna.ical4j.model.Component#validate(boolean) */ public final void validate(final boolean recurse) throws ValidationException { // validate that getAlarms() only contains VAlarm components Iterator iterator = getAlarms().iterator(); while (iterator.hasNext()) { Component component = (Component) iterator.next(); if (!(component instanceof VAlarm)) { throw new ValidationException( "Component [" + component.getName() + "] may not occur in VEVENT"); } } /* * ; the following are optional, ; but MUST NOT occur more than once * * class / created / description / dtstart / geo / last-mod / location / * organizer / priority / dtstamp / seq / status / summary / transp / * uid / url / recurid / */ PropertyValidator.getInstance().assertOneOrLess(Property.CLASS, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.CREATED, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.DESCRIPTION, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.DTSTART, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.GEO, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.LAST_MODIFIED, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.LOCATION, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.ORGANIZER, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.PRIORITY, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.DTSTAMP, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.SEQUENCE, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.STATUS, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.SUMMARY, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.TRANSP, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.UID, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.URL, getProperties()); PropertyValidator.getInstance().assertOneOrLess(Property.RECURRENCE_ID, getProperties()); Status status = (Status) getProperties().getProperty(Property.STATUS); if (status != null && !Status.VEVENT_TENTATIVE.equals(status) && !Status.VEVENT_CONFIRMED.equals(status) && !Status.VEVENT_CANCELLED.equals(status)) { throw new ValidationException( "Status property [" + status.toString() + "] is not applicable for VEVENT"); } /* * ; either 'dtend' or 'duration' may appear in ; a 'eventprop', but * 'dtend' and 'duration' ; MUST NOT occur in the same 'eventprop' * * dtend / duration / */ if (getProperties().getProperty(Property.DTEND) != null) { if (getProperties().getProperty(Property.DURATION) != null) { throw new ValidationException( "Properties [" + Property.DTEND + "," + Property.DURATION + "] may not occur in the same VEVENT"); } /* * The "VEVENT" is also the calendar component used to specify an * anniversary or daily reminder within a calendar. These events have a * DATE value type for the "DTSTART" property instead of the default * data type of DATE-TIME. If such a "VEVENT" has a "DTEND" property, it * MUST be specified as a DATE value also. The anniversary type of * "VEVENT" can span more than one date (i.e, "DTEND" property value is * set to a calendar date after the "DTSTART" property value). */ DtStart start = (DtStart) getProperties().getProperty(Property.DTSTART); DtEnd end = (DtEnd) getProperties().getProperty(Property.DTEND); if (start != null) { Parameter value = start.getParameters().getParameter(Parameter.VALUE); if (value != null && !value.equals(end.getParameters().getParameter(Parameter.VALUE))) { throw new ValidationException("Property [" + Property.DTEND + "] must have the same [" + Parameter.VALUE + "] as [" + Property.DTSTART + "]"); } } } /* * ; the following are optional, ; and MAY occur more than once * * attach / attendee / categories / comment / contact / exdate / exrule / * rstatus / related / resources / rdate / rrule / x-prop */ if (recurse) { validateProperties(); } } /** * Returns a list of periods representing the consumed time for this event * in the specified range. Note that the returned list may contain a single * period for non-recurring components or multiple periods for recurring * components. If no time is consumed by this event an empty list is returned. * @param rangeStart the start of the range to check for consumed time * @param rangeEnd the end of the range to check for consumed time * @return a list of periods representing consumed time for this event */ public final PeriodList getConsumedTime(final Date rangeStart, final Date rangeEnd) { PeriodList periods = new PeriodList(); // if component is transparent return empty list.. if (Transp.TRANSPARENT.equals(getProperties().getProperty(Property.TRANSP))) { return periods; } DtStart start = (DtStart) getProperties().getProperty(Property.DTSTART); DtEnd end = (DtEnd) getProperties().getProperty(Property.DTEND); Duration duration = (Duration) getProperties().getProperty(Property.DURATION); // if no start date specified return empty list.. if (start == null) { return periods; } // if an explicit event duration is not specified, derive a value for recurring // periods from the end date.. Dur rDuration; if (duration == null) { rDuration = new Dur(start.getDate(), end.getDate()); } else { rDuration = duration.getDuration(); } // adjust range start back by duration to allow for recurrences that // start before the range but finish inside.. // FIXME: See bug #1325558.. // Date adjustedRangeStart = new DateTime(rangeStart); // adjustedRangeStart.setTime(rDuration.negate().getTime(rangeStart).getTime()); // if start/end specified as anniversary-type (i.e. uses DATE values // rather than DATE-TIME), return empty list.. if (Value.DATE.equals(start.getParameters().getParameter(Parameter.VALUE))) { return periods; } // recurrence dates.. PropertyList rDates = getProperties().getProperties(Property.RDATE); for (Iterator i = rDates.iterator(); i.hasNext();) { RDate rdate = (RDate) i.next(); // only period-based rdates are applicable.. // FIXME: ^^^ not true - date-time/date also applicable.. if (Value.PERIOD.equals(rdate.getParameters().getParameter(Parameter.VALUE))) { for (Iterator j = rdate.getPeriods().iterator(); j.hasNext();) { Period period = (Period) j.next(); if (period.getStart().before(rangeEnd) && period.getEnd().after(rangeStart)) { periods.add(period); } } } } // recurrence rules.. PropertyList rRules = getProperties().getProperties(Property.RRULE); for (Iterator i = rRules.iterator(); i.hasNext();) { RRule rrule = (RRule) i.next(); // DateList startDates = rrule.getRecur().getDates(start.getDate(), adjustedRangeStart, rangeEnd, (Value) start.getParameters().getParameter(Parameter.VALUE)); DateList startDates = rrule.getRecur().getDates(start.getDate(), rangeStart, rangeEnd, (Value) start.getParameters().getParameter(Parameter.VALUE)); for (int j = 0; j < startDates.size(); j++) { Date startDate = (Date) startDates.get(j); periods.add(new Period(new DateTime(startDate), rDuration)); } } // exception dates.. PropertyList exDates = getProperties().getProperties(Property.EXDATE); for (Iterator i = exDates.iterator(); i.hasNext();) { ExDate exDate = (ExDate) i.next(); for (Iterator j = periods.iterator(); j.hasNext();) { Period period = (Period) j.next(); // for DATE-TIME instances check for DATE-based exclusions also.. if (exDate.getDates().contains(period.getStart()) || exDate.getDates().contains(new Date(period.getStart()))) { periods.remove(period); } } } // exception rules.. // FIXME: exception rules should be consistent with exception dates (i.e. not use periods?).. PropertyList exRules = getProperties().getProperties(Property.EXRULE); PeriodList exPeriods = new PeriodList(); for (Iterator i = exRules.iterator(); i.hasNext();) { ExRule exrule = (ExRule) i.next(); // DateList startDates = exrule.getRecur().getDates(start.getDate(), adjustedRangeStart, rangeEnd, (Value) start.getParameters().getParameter(Parameter.VALUE)); DateList startDates = exrule.getRecur().getDates(start.getDate(), rangeStart, rangeEnd, (Value) start.getParameters().getParameter(Parameter.VALUE)); for (Iterator j = startDates.iterator(); j.hasNext();) { Date startDate = (Date) j.next(); exPeriods.add(new Period(new DateTime(startDate), rDuration)); } } // apply exceptions.. if (!exPeriods.isEmpty()) { periods = periods.subtract(exPeriods); } // if periods already specified through recurrence, return.. // ..also normalise before returning. if (!periods.isEmpty()) { return periods.normalise(); } // add first instance if included in range.. if (start.getDate().before(rangeEnd)) { if (end != null && end.getDate().after(rangeStart)) { periods.add(new Period(new DateTime(start.getDate()), new DateTime(end.getDate()))); } else if (duration != null) { Period period = new Period(new DateTime(start.getDate()), duration.getDuration()); if (period.getEnd().after(rangeStart)) { periods.add(period); } } } return periods; } /** * Convenience method to pull the DTSTART out of the property list. * * @return * The DtStart object representation of the start Date */ public final DtStart getStartDate() { return (DtStart) getProperties().getProperty(Property.DTSTART); } /** * Convenience method to pull the DTEND out of the property list. If * DTEND was not specified, use the DTSTART + DURATION to calculate it. * * @return * The end for this VEVENT. */ public final DtEnd getEndDate() { DtEnd dtEnd = (DtEnd) getProperties().getProperty(Property.DTEND); // No DTEND? No problem, we'll use the DURATION. if (dtEnd == null) { DtStart dtStart = getStartDate(); Duration vEventDuration = (Duration) getProperties().getProperty(Property.DURATION); dtEnd = new DtEnd(Dates.getInstance(vEventDuration.getDuration().getTime(dtStart.getDate()), (Value) dtStart.getParameters().getParameter(Parameter.VALUE))); } return dtEnd; } }
package org.cyclops.integrateddynamics.recipe; import net.minecraft.inventory.InventoryCrafting; import net.minecraft.item.ItemStack; import net.minecraft.item.crafting.IRecipe; import net.minecraft.world.World; import org.cyclops.cyclopscore.helper.MinecraftHelpers; import org.cyclops.integrateddynamics.IntegratedDynamics; import org.cyclops.integrateddynamics.api.item.IVariableFacade; import org.cyclops.integrateddynamics.api.item.IVariableFacadeHandlerRegistry; import org.cyclops.integrateddynamics.item.ItemVariable; /** * Crafting recipe to copy variable data. * @author rubensworks */ public class ItemVariableCopyRecipe implements IRecipe { @Override public boolean matches(InventoryCrafting inv, World worldIn) { return getCraftingResult(inv) != null; } @Override public ItemStack getCraftingResult(InventoryCrafting inv) { ItemStack withData = null; ItemStack withoutData = null; IVariableFacade facade; int count = 0; for(int j = 0; j < inv.getSizeInventory(); j++) { ItemStack element = inv.getStackInSlot(j); if(element != null && element.getItem() instanceof ItemVariable) { count++; facade = ItemVariable.getInstance().getVariableFacade(element); if(!facade.isValid() && withoutData == null && element.stackSize == 1) { withoutData = element; } if(facade.isValid() && withData == null && element.stackSize == 1) { withData = element.copy(); } } } if(count == 2 && withoutData != null && withData != null) { return withData; } return null; } @Override public int getRecipeSize() { return 2; } @Override public ItemStack getRecipeOutput() { return new ItemStack(ItemVariable.getInstance(), 1); } @Override public ItemStack[] getRemainingItems(InventoryCrafting inv) { ItemStack[] ret = new ItemStack[inv.getSizeInventory()]; for(int j = 0; j < inv.getSizeInventory(); j++) { ItemStack element = inv.getStackInSlot(j); if(element != null && element.getItem() instanceof ItemVariable) { IVariableFacade facade = ItemVariable.getInstance().getVariableFacade(element); if(facade.isValid()) { // Create a copy with a new id. ret[j] = IntegratedDynamics._instance.getRegistryManager() .getRegistry(IVariableFacadeHandlerRegistry.class).copy(!MinecraftHelpers.isClientSide(), element); } } } return ret; } }
package org.esupportail.smsuapiadmin.web.controllers; import java.io.IOException; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; import org.springframework.beans.factory.annotation.Autowired; import javax.servlet.http.HttpServletRequest; import org.codehaus.jackson.map.ObjectMapper; import org.esupportail.smsuapiadmin.business.UserManager; import org.esupportail.smsuapiadmin.dto.beans.UIUser; @Path("/login") public class LoginController { @Autowired private UserManager userManager; @GET public Response get(@Context HttpServletRequest request) throws IOException { UIUser user = userManager.getUserByLogin(request.getRemoteUser()); String jsUser = new ObjectMapper().writeValueAsString(user); String callback = request.getParameter("callback"); String type = callback == null ? "text/html" : "application/x-javascript"; String js = callback == null ? "Login success, please wait...\n<script>\n (window.opener.postMessage ? window.opener : window.opener.document).postMessage('loggedUser=' + JSON.stringify(" + jsUser + "), '*');\n</script>" : callback + "(" + jsUser + ")"; return Response.status(Response.Status.OK).type(type).entity(js).build(); } }
package org.jenkinsci.plugins.pretestcommit; import hudson.AbortException; import hudson.EnvVars; import hudson.Extension; import hudson.Launcher; import hudson.model.BuildListener; import hudson.model.Environment; import hudson.model.AbstractBuild; import hudson.model.AbstractProject; import hudson.model.Descriptor; import hudson.model.Cause.LegacyCodeCause; import hudson.model.Computer; import hudson.model.*; import hudson.plugins.mercurial.HgExe; import hudson.plugins.mercurial.MercurialInstallation; import hudson.plugins.mercurial.MercurialSCM; import hudson.plugins.mercurial.*; import hudson.tasks.BuildWrapper; import hudson.tasks.BuildTrigger; import hudson.tasks.BuildStep; import hudson.util.ArgumentListBuilder; import hudson.FilePath; import java.io.IOException; import java.io.PrintStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.List; import org.kohsuke.stapler.DataBoundConstructor; public class PretestCommitPreCheckout extends BuildWrapper { @DataBoundConstructor public PretestCommitPreCheckout() { } /** * Finds the hg executable on the system. This method is taken from MercurialSCM where it is private * @param node * @param listener * @param allowDebug * @return * @throws IOException * @throws InterruptedException */ ArgumentListBuilder findHgExe(MercurialSCM scm, Node node, TaskListener listener, boolean allowDebug) throws IOException, InterruptedException { for (MercurialInstallation inst : MercurialInstallation.allInstallations()) { if (inst.getName().equals(scm.getInstallation())) { // XXX what about forEnvironment? String home = inst.getExecutable().replace("INSTALLATION", inst.forNode(node, listener).getHome()); ArgumentListBuilder b = new ArgumentListBuilder(home); if (allowDebug && inst.getDebug()) { b.add("--debug"); } return b; } } return new ArgumentListBuilder(scm.getDescriptor().getHgExe()); } /** * @param build * @param launcher * @param listener * @return noop Environment class */ @Override public Environment setUp(AbstractBuild build, Launcher launcher, BuildListener listener) throws IOException, InterruptedException { listener.getLogger().println("Setup!!!"); listener.getLogger().println("Workspace is here: " + build.getWorkspace()); FilePath fp = build.getWorkspace().child("pretest_stuff_was_here"); listener.getLogger().println("Writing file here: " + fp); OutputStream os = fp.write(); //Setup variables to find our executable AbstractProject<?,?> project = build.getProject(); //We need to check this cast.. MercurialSCM scm = (MercurialSCM) project.getScm(); Node node = Computer.currentComputer().getNode(); EnvVars env = build.getEnvironment(listener); //Why not do it like the mercurial plugin? ;) ArgumentListBuilder cmd = findHgExe(scm, node, listener, false); //This is also a possibility //new HgExe(scm,launcher,build.getBuiltOn(),listener,env); String source = build.getBuildVariables().get("branch").toString(); //Use args.add(String a) to add an argument/flag just as you would an ordinary flag //e.g. to do "hg pull" you'd use args.add("pull") cmd.add("pull"); cmd.add("--update"); cmd.add(build.getBuildVariables().get("branch")); //Finally use hg.run(args).join() to run the command on the system int cloneExitCode; try{ //cloneExitCode = hg.run(args).join();\ cloneExitCode = launcher.launch().cmds(cmd).pwd(build.getWorkspace()).join(); } catch(IOException e) { String message = e.getMessage(); if (message != null && message.startsWith("Cannot run program") && message.endsWith("No such file or directory")) { listener.error("Failed to clone " + source + " because hg could not be found;" + " check that you've properly configured your Mercurial installation"); } else { e.printStackTrace(listener.error("Failed to clone repository")); } throw new AbortException("Failed to clone repository"); } if(cloneExitCode!=0) { listener.error("Failed to clone repository"); throw new AbortException("Failed to clone repository"); } //Use build.getBuildVariables().get("foo") to get a build variable configured in the jenkins job os.write(0); os.close(); return new NoopEnv(); } /** * @param build * @param launcher * @param listener */ @Override public void preCheckout(AbstractBuild build, Launcher launcher, BuildListener listener) throws IOException, InterruptedException { //PrintStream log = listener.getLogger(); listener.getLogger().println("Pre-checkout!!!"); } @Extension public static final class DescriptorImpl extends Descriptor<BuildWrapper> { public String getDisplayName() { return "Run pretest-commit stuff before SCM runs"; } } class NoopEnv extends Environment { } }
package org.openmrs; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.Vector; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.hibernate.search.annotations.ContainedIn; import org.hibernate.search.annotations.DocumentId; import org.hibernate.search.annotations.Field; import org.hibernate.search.annotations.FullTextFilterDef; import org.hibernate.search.annotations.FullTextFilterDefs; import org.hibernate.search.annotations.IndexedEmbedded; import org.openmrs.annotation.AllowDirectAccess; import org.openmrs.api.APIException; import org.openmrs.api.ConceptNameType; import org.openmrs.api.ConceptService; import org.openmrs.api.context.Context; import org.openmrs.api.db.hibernate.search.TermsFilterFactory; import org.openmrs.util.LocaleUtility; import org.openmrs.util.OpenmrsUtil; import org.simpleframework.xml.Attribute; import org.simpleframework.xml.Element; import org.simpleframework.xml.ElementList; import org.simpleframework.xml.Root; import org.springframework.util.ObjectUtils; /** * A Concept object can represent either a question or an answer to a data point. That data point is * usually an {@link Obs}. <br> * <br> * A Concept can have multiple names and multiple descriptions within one locale and across multiple * locales.<br> * <br> * To save a Concept to the database, first build up the Concept object in java, then pass that * object to the {@link ConceptService}.<br> * <br> * To get a Concept that is stored in the database, call a method in the {@link ConceptService} to * fetch an object. To get child objects off of that Concept, further calls to the * {@link ConceptService} or the database are not needed. e.g. To get the list of answers that are * stored to a concept, get the concept, then call {@link Concept#getAnswers()} * * @see ConceptName * @see ConceptDescription * @see ConceptAnswer * @see ConceptSet * @see ConceptMap * @see ConceptService */ @Root @FullTextFilterDefs( { @FullTextFilterDef(name = "termsFilterFactory", impl = TermsFilterFactory.class) }) public class Concept extends BaseOpenmrsObject implements Auditable, Retireable, java.io.Serializable, Attributable<Concept> { public static final long serialVersionUID = 57332L; private static final Log log = LogFactory.getLog(Concept.class); // Fields @DocumentId private Integer conceptId; @Field private Boolean retired = false; private User retiredBy; private Date dateRetired; private String retireReason; @IndexedEmbedded(includeEmbeddedObjectId = true) private ConceptDatatype datatype; @IndexedEmbedded(includeEmbeddedObjectId = true) private ConceptClass conceptClass; private Boolean set = false; private String version; private User creator; private Date dateCreated; private User changedBy; private Date dateChanged; @AllowDirectAccess @ContainedIn private Collection<ConceptName> names; @AllowDirectAccess private Collection<ConceptAnswer> answers; private Collection<ConceptSet> conceptSets; private Collection<ConceptDescription> descriptions; @IndexedEmbedded(includeEmbeddedObjectId = true) private Collection<ConceptMap> conceptMappings; /** * A cache of locales to names which have compatible locales. Built on-the-fly by * getCompatibleNames(). */ private Map<Locale, List<ConceptName>> compatibleCache; /** default constructor */ public Concept() { names = new HashSet<ConceptName>(); answers = new HashSet<ConceptAnswer>(); conceptSets = new TreeSet<ConceptSet>(); descriptions = new HashSet<ConceptDescription>(); conceptMappings = new HashSet<ConceptMap>(); } /** * Convenience constructor with conceptid to save to {@link #setConceptId(Integer)}. This * effectively creates a concept stub that can be used to make other calls. Because the * {@link #equals(Object)} and {@link #hashCode()} methods rely on conceptId, this allows a stub * to masquerade as a full concept as long as other objects like {@link #getAnswers()} and * {@link #getNames()} are not needed/called. * * @param conceptId the concept id to set */ public Concept(Integer conceptId) { this(); this.conceptId = conceptId; } /** * @return Returns all answers (including retired answers). * @should return retired and non-retired answers * @should not return null if answers is null or empty */ @ElementList public Collection<ConceptAnswer> getAnswers() { if (answers == null) { answers = new HashSet<ConceptAnswer>(); } return answers; } /** * If <code>includeRetired</code> is true, then the returned object is the actual stored list of * {@link ConceptAnswer}s * * @param includeRetired true/false whether to also include the retired answers * @return Returns the answers for this Concept * @should return the same as getAnswers() if includeRetired is true * @should not return retired answers if includeRetired is false */ public Collection<ConceptAnswer> getAnswers(boolean includeRetired) { if (!includeRetired) { Collection<ConceptAnswer> newAnswers = new HashSet<ConceptAnswer>(); if (answers != null) { for (ConceptAnswer ca : answers) { if (!ca.getAnswerConcept().isRetired()) { newAnswers.add(ca); } } } return newAnswers; } else { return getAnswers(); } } /** * Set this Concept as having the given <code>answers</code>; This method assumes that the * sort_weight has already been set. * * @param answers The answers to set. */ @ElementList public void setAnswers(Collection<ConceptAnswer> answers) { this.answers = answers; } /** * Add the given ConceptAnswer to the list of answers for this Concept * * @param conceptAnswer * @should add the ConceptAnswer to Concept * @should not fail if answers list is null * @should not fail if answers contains ConceptAnswer already * @should set the sort weight to the max plus one if not provided */ public void addAnswer(ConceptAnswer conceptAnswer) { if (conceptAnswer != null) { if (!getAnswers().contains(conceptAnswer)) { conceptAnswer.setConcept(this); getAnswers().add(conceptAnswer); } if ((conceptAnswer.getSortWeight() == null) || (conceptAnswer.getSortWeight() <= 0)) { //find largest sort weight ConceptAnswer a = Collections.max(answers); Double sortWeight = (a == null) ? 1d : ((a.getSortWeight() == null) ? 1d : a.getSortWeight() + 1d);//a.sortWeight can be NULL conceptAnswer.setSortWeight(sortWeight); } } } /** * Remove the given answer from the list of answers for this Concept * * @param conceptAnswer answer to remove * @return true if the entity was removed, false otherwise * @should not fail if answers is empty * @should not fail if given answer does not exist in list */ public boolean removeAnswer(ConceptAnswer conceptAnswer) { return getAnswers().remove(conceptAnswer); } /** * @return Returns the changedBy. */ @Element(required = false) public User getChangedBy() { return changedBy; } /** * @param changedBy The changedBy to set. */ @Element(required = false) public void setChangedBy(User changedBy) { this.changedBy = changedBy; } /** * @return Returns the conceptClass. */ @Element public ConceptClass getConceptClass() { return conceptClass; } /** * @param conceptClass The conceptClass to set. */ @Element public void setConceptClass(ConceptClass conceptClass) { this.conceptClass = conceptClass; } /** * whether or not this concept is a set */ public Boolean isSet() { return set; } /** * @param set whether or not this concept is a set */ @Attribute public void setSet(Boolean set) { this.set = set; } @Attribute public Boolean getSet() { return isSet(); } /** * @return Returns the conceptDatatype. */ @Element public ConceptDatatype getDatatype() { return datatype; } /** * @param conceptDatatype The conceptDatatype to set. */ @Element public void setDatatype(ConceptDatatype conceptDatatype) { this.datatype = conceptDatatype; } /** * @return Returns the conceptId. */ @Attribute(required = true) public Integer getConceptId() { return conceptId; } /** * @param conceptId The conceptId to set. */ @Attribute(required = true) public void setConceptId(Integer conceptId) { this.conceptId = conceptId; } /** * @return Returns the creator. */ @Element public User getCreator() { return creator; } /** * @param creator The creator to set. */ @Element public void setCreator(User creator) { this.creator = creator; } /** * @return Returns the dateChanged. */ @Element(required = false) public Date getDateChanged() { return dateChanged; } /** * @param dateChanged The dateChanged to set. */ @Element(required = false) public void setDateChanged(Date dateChanged) { this.dateChanged = dateChanged; } /** * @return Returns the dateCreated. */ @Element public Date getDateCreated() { return dateCreated; } /** * @param dateCreated The dateCreated to set. */ @Element public void setDateCreated(Date dateCreated) { this.dateCreated = dateCreated; } /** * Sets the preferred name /in this locale/ to the specified conceptName and its Locale, if * there is an existing preferred name for this concept in the same locale, this one will * replace the old preferred name. Also, the name is added to the concept if it is not already * among the concept names. * * @param preferredName The name to be marked as preferred in its locale * @should only allow one preferred name * @should add the name to the list of names if it not among them before * @should fail if the preferred name to set to is an index term */ public void setPreferredName(ConceptName preferredName) { if (preferredName == null || preferredName.isVoided() || preferredName.isIndexTerm()) { throw new APIException("Concept.error.preferredName.null", (Object[]) null); } else if (preferredName.getLocale() == null) { throw new APIException("Concept.name.locale.null", (Object[]) null); } //first revert the current preferred name(if any) from being preferred ConceptName oldPreferredName = getPreferredName(preferredName.getLocale()); if (oldPreferredName != null) { oldPreferredName.setLocalePreferred(false); } preferredName.setLocalePreferred(true); //add this name, if it is new or not among this concept's names if (preferredName.getConceptNameId() == null || !getNames().contains(preferredName)) { addName(preferredName); } } /** * A convenience method to get the concept-name (if any) which has a particular tag. This does * not guarantee that the returned name is the only one with the tag. * * @param conceptNameTag the tag for which to look * @return the tagged name, or null if no name has the tag */ public ConceptName findNameTaggedWith(ConceptNameTag conceptNameTag) { ConceptName taggedName = null; for (ConceptName possibleName : getNames()) { if (possibleName.hasTag(conceptNameTag)) { taggedName = possibleName; break; } } return taggedName; } /** * Returns a name in the given locale. If a name isn't found with an exact match, a compatible * locale match is returned. If no name is found matching either of those, the first name * defined for this concept is returned. * * @param locale the locale to fetch for * @return ConceptName attributed to the Concept in the given locale * @since 1.5 * @see Concept#getNames(Locale) to get all the names for a locale, * @see Concept#getPreferredName(Locale) for the preferred name (if any) */ public ConceptName getName(Locale locale) { return getName(locale, false); } /** * Returns concept name, the look up for the appropriate name is done in the following order; * <ul> * <li>First name found in any locale that is explicitly marked as preferred while searching * available locales in order of preference (the locales are traversed in their order as they * are listed in the 'locale.allowed.list' including english global property).</li> * <li>First "Fully Specified" name found while searching available locales in order of * preference.</li> * <li>The first fully specified name found while searching through all names for the concept</li> * <li>The first synonym found while searching through all names for the concept.</li> * <li>The first random name found(except index terms) while searching through all names.</li> * </ul> * * @return {@link ConceptName} in the current locale or any locale if none found * @since 1.5 * @see Concept#getNames(Locale) to get all the names for a locale * @see Concept#getPreferredName(Locale) for the preferred name (if any) * @should return the name explicitly marked as locale preferred if any is present * @should return the fully specified name in a locale if no preferred name is set * @should return null if the only added name is an index term * @should return name in broader locale incase none is found in specific one */ public ConceptName getName() { if (getNames().size() == 0) { if (log.isDebugEnabled()) { log.debug("there are no names defined for: " + conceptId); } return null; } for (Locale currentLocale : LocaleUtility.getLocalesInOrder()) { ConceptName preferredName = getPreferredName(currentLocale); if (preferredName != null) { return preferredName; } ConceptName fullySpecifiedName = getFullySpecifiedName(currentLocale); if (fullySpecifiedName != null) { return fullySpecifiedName; } //if the locale has an variants e.g en_GB, try names in the locale excluding the country code i.e en if (!StringUtils.isBlank(currentLocale.getCountry()) || !StringUtils.isBlank(currentLocale.getVariant())) { Locale broaderLocale = new Locale(currentLocale.getLanguage()); ConceptName prefNameInBroaderLoc = getPreferredName(broaderLocale); if (prefNameInBroaderLoc != null) { return prefNameInBroaderLoc; } ConceptName fullySpecNameInBroaderLoc = getFullySpecifiedName(broaderLocale); if (fullySpecNameInBroaderLoc != null) { return fullySpecNameInBroaderLoc; } } } for (ConceptName cn : getNames()) { if (cn.isFullySpecifiedName()) { return cn; } } if (getSynonyms().size() > 0) { return getSynonyms().iterator().next(); } //we dont expect to get here since every concept name must have atleast //one fully specified name, but just in case(probably inconsistent data) return null; } /** * Checks whether this concept has the given string in any of the names in the given locale * already. * * @param name the ConceptName.name to compare to * @param locale the locale to look in (null to check all locales) * @return true/false whether the name exists already */ public boolean hasName(String name, Locale locale) { if (name == null) { return false; } Collection<ConceptName> currentNames = null; if (locale == null) { currentNames = getNames(); } else { currentNames = getNames(locale); } for (ConceptName currentName : currentNames) { if (name.equalsIgnoreCase(currentName.getName())) { return true; } } return false; } /** * Returns concept name depending of locale, type (short, fully specified, etc) and tag. * Searches in the locale, and then the locale's parent if nothing is found. * * @param ofType find a name of this type (optional) * @param havingTag find a name with this tag (optional) * @param locale find a name with this locale (required) * @return a name that matches the arguments, or null if none is found. If there are multiple * matches and one is locale_preferred, that will be returned, otherwise a random one of * the matches will be returned. * @since 1.9 **/ public ConceptName getName(Locale locale, ConceptNameType ofType, ConceptNameTag havingTag) { Collection<ConceptName> namesInLocale = getNames(locale); if (!namesInLocale.isEmpty()) { List<ConceptName> matches = new ArrayList<ConceptName>(); for (ConceptName candidate : namesInLocale) { if ((ofType == null || ofType.equals(candidate.getConceptNameType())) && (havingTag == null || candidate.hasTag(havingTag))) { matches.add(candidate); } } // if we have any matches, we'll return one of them if (matches.size() == 1) { return matches.get(0); } else if (matches.size() > 1) { for (ConceptName match : matches) { if (match.isLocalePreferred()) { return match; } } // none was explicitly marked as preferred return matches.get(0); } } // if we reach here, there were no matching names, so try to look in the parent locale Locale parent = new Locale(locale.getLanguage()); if (!parent.equals(locale)) { return getName(parent, ofType, havingTag); } else { return null; } } /** * Returns a name in the given locale. If a name isn't found with an exact match, a compatible * locale match is returned. If no name is found matching either of those, the first name * defined for this concept is returned. * * @param locale the language and country in which the name is used * @param exact true/false to return only exact locale (no default locale) * @return the closest name in the given locale, or the first name * @see Concept#getNames(Locale) to get all the names for a locale, * @see Concept#getPreferredName(Locale) for the preferred name (if any) * @should return exact name locale match given exact equals true * @should return loose match given exact equals false * @should return null if no names are found in locale given exact equals true * @should return any name if no locale match given exact equals false * @should return name in broader locale incase none is found in specific one */ public ConceptName getName(Locale locale, boolean exact) { // fail early if this concept has no names defined if (getNames().size() == 0) { if (log.isDebugEnabled()) { log.debug("there are no names defined for: " + conceptId); } return null; } if (log.isDebugEnabled()) { log.debug("Getting conceptName for locale: " + locale); } ConceptName exactName = getNameInLocale(locale); if (exactName != null) { return exactName; } if (!exact) { Locale broaderLocale = new Locale(locale.getLanguage()); ConceptName name = getNameInLocale(broaderLocale); return name; } return null; } /** * Gets the best name in the specified locale. * * @param locale * @return null if name in given locale doesn't exist */ private ConceptName getNameInLocale(Locale locale) { ConceptName preferredName = getPreferredName(locale); if (preferredName != null) { return preferredName; } ConceptName fullySpecifiedName = getFullySpecifiedName(locale); if (fullySpecifiedName != null) { return fullySpecifiedName; } else if (getSynonyms(locale).size() > 0) { return getSynonyms(locale).iterator().next(); } return null; } /** * Returns the name which is explicitly marked as preferred for a given locale. * * @param forLocale locale for which to return a preferred name * @return preferred name for the locale, or null if no preferred name is specified * @should return the concept name explicitly marked as locale preferred * @should return the fully specified name if no name is explicitly marked as locale preferred */ public ConceptName getPreferredName(Locale forLocale) { if (log.isDebugEnabled()) { log.debug("Getting preferred conceptName for locale: " + forLocale); } // fail early if this concept has no names defined if (getNames(forLocale).size() == 0) { if (log.isDebugEnabled()) { log.debug("there are no names defined for concept with id: " + conceptId + " in the locale: " + forLocale); } return null; } else if (forLocale == null) { log.warn("Locale cannot be null"); return null; } for (ConceptName nameInLocale : getNames(forLocale)) { if (ObjectUtils.nullSafeEquals(nameInLocale.isLocalePreferred(), true)) { return nameInLocale; } } // look for partially locale match - any language matches takes precedence over country matches. ConceptName bestMatch = null; for (ConceptName nameInLocale : getPartiallyCompatibleNames(forLocale)) { if (ObjectUtils.nullSafeEquals(nameInLocale.isLocalePreferred(), true)) { Locale nameLocale = nameInLocale.getLocale(); if (forLocale.getLanguage().equals(nameLocale.getLanguage())) { return nameInLocale; } else { bestMatch = nameInLocale; } } } if (bestMatch != null) { return bestMatch; } return getFullySpecifiedName(forLocale); } /** * Convenience method that returns the fully specified name in the locale * * @param locale locale from which to look up the fully specified name * @return the name explicitly marked as fully specified for the locale * @should return the name marked as fully specified for the given locale */ public ConceptName getFullySpecifiedName(Locale locale) { if (locale != null && getNames(locale).size() > 0) { //get the first fully specified name, since every concept must have a fully specified name, //then, this loop will have to return a name for (ConceptName conceptName : getNames(locale)) { if (ObjectUtils.nullSafeEquals(conceptName.isFullySpecifiedName(), true)) { return conceptName; } } // look for partially locale match - any language matches takes precedence over country matches. ConceptName bestMatch = null; for (ConceptName conceptName : getPartiallyCompatibleNames(locale)) { if (ObjectUtils.nullSafeEquals(conceptName.isFullySpecifiedName(), true)) { Locale nameLocale = conceptName.getLocale(); if (locale.getLanguage().equals(nameLocale.getLanguage())) { return conceptName; } bestMatch = conceptName; } } return bestMatch; } return null; } /** * Returns all names available in a specific locale. <br> * <br> * This is recommended when managing the concept dictionary. * * @param locale locale for which names should be returned * @return Collection of ConceptNames with the given locale */ public Collection<ConceptName> getNames(Locale locale) { Collection<ConceptName> localeNames = new Vector<ConceptName>(); for (ConceptName possibleName : getNames()) { if (possibleName.getLocale().equals(locale)) { localeNames.add(possibleName); } } return localeNames; } /** * Returns all names available for locale langueage "or" country. <br> * <br> * * @param locale locale for which names should be returned * @return Collection of ConceptNames with the given locale langueage or country */ private Collection<ConceptName> getPartiallyCompatibleNames(Locale locale) { Collection<ConceptName> localeNames = new Vector<ConceptName>(); String language = locale.getLanguage(); String country = locale.getCountry(); for (ConceptName possibleName : getNames()) { Locale possibleLocale = possibleName.getLocale(); if (language.equals(possibleLocale.getLanguage()) || (StringUtils.isNotBlank(country) && country.equals(possibleLocale.getCountry()))) { localeNames.add(possibleName); } } return localeNames; } /** * Returns all names from compatible locales. A locale is considered compatible if it is exactly * the same locale, or if either locale has no country specified and the language matches. <br> * <br> * This is recommended when presenting possible names to the use. * * @param desiredLocale locale with which the names should be compatible * @return Collection of compatible names * @should exclude incompatible country locales * @should exclude incompatible language locales */ public List<ConceptName> getCompatibleNames(Locale desiredLocale) { // lazy create the cache List<ConceptName> compatibleNames = null; if (compatibleCache == null) { compatibleCache = new HashMap<Locale, List<ConceptName>>(); } else { compatibleNames = compatibleCache.get(desiredLocale); } if (compatibleNames == null) { compatibleNames = new Vector<ConceptName>(); for (ConceptName possibleName : getNames()) { if (LocaleUtility.areCompatible(possibleName.getLocale(), desiredLocale)) { compatibleNames.add(possibleName); } } compatibleCache.put(desiredLocale, compatibleNames); } return compatibleNames; } /** * Sets the specified name as the fully specified name for the locale and the current fully * specified (if any) ceases to be the fully specified name for the locale. * * @param fullySpecifiedName the new fully specified name to set * @should set the concept name type of the specified name to fully specified * @should convert the previous fully specified name if any to a synonym * @should add the name to the list of names if it not among them before */ public void setFullySpecifiedName(ConceptName fullySpecifiedName) { if (fullySpecifiedName == null || fullySpecifiedName.getLocale() == null) { throw new APIException("Concept.name.locale.null", (Object[]) null); } else if (fullySpecifiedName.isVoided()) { throw new APIException("Concept.error.fullySpecifiedName.null", (Object[]) null); } ConceptName oldFullySpecifiedName = getFullySpecifiedName(fullySpecifiedName.getLocale()); if (oldFullySpecifiedName != null) { oldFullySpecifiedName.setConceptNameType(null); } fullySpecifiedName.setConceptNameType(ConceptNameType.FULLY_SPECIFIED); //add this name, if it is new or not among this concept's names if (fullySpecifiedName.getConceptNameId() == null || !getNames().contains(fullySpecifiedName)) { addName(fullySpecifiedName); } } /** * Sets the specified name as the short name for the locale and the current shortName(if any) * ceases to be the short name for the locale. * * @param shortName the new shortName to set * @should set the concept name type of the specified name to short * @should convert the previous shortName if any to a synonym * @should add the name to the list of names if it not among them before * @should void old short name if new one is blank (do not save blanks!) */ public void setShortName(ConceptName shortName) { if (shortName != null) { if (shortName.getLocale() == null) { throw new APIException("Concept.name.locale.null", (Object[]) null); } ConceptName oldShortName = getShortNameInLocale(shortName.getLocale()); if (oldShortName != null) { oldShortName.setConceptNameType(null); } shortName.setConceptNameType(ConceptNameType.SHORT); if (StringUtils.isNotBlank(shortName.getName()) && (shortName.getConceptNameId() == null || !getNames().contains(shortName))) { //add this name, if it is new or not among this concept's names addName(shortName); } } else { throw new APIException("Concept.error.shortName.null", (Object[]) null); } } /** * Gets the explicitly specified short name for a locale. * * @param locale locale for which to find a short name * @return the short name, or null if none has been explicitly set */ public ConceptName getShortNameInLocale(Locale locale) { ConceptName bestMatch = null; if (locale != null && getShortNames().size() > 0) { for (ConceptName shortName : getShortNames()) { Locale nameLocale = shortName.getLocale(); if (nameLocale.equals(locale)) { return shortName; } // test for partially locale match - any language matches takes precedence over country matches. if (OpenmrsUtil.nullSafeEquals(locale.getLanguage(), nameLocale.getLanguage())) { bestMatch = shortName; } else if (bestMatch == null && StringUtils.isNotBlank(locale.getCountry()) && locale.getCountry().equals(nameLocale.getCountry())) { bestMatch = shortName; } } } return bestMatch; } /** * Gets a collection of short names for this concept from all locales. * * @return a collection of all short names for this concept */ public Collection<ConceptName> getShortNames() { Vector<ConceptName> shortNames = new Vector<ConceptName>(); if (getNames().size() == 0) { if (log.isDebugEnabled()) { log.debug("The Concept with id: " + conceptId + " has no names"); } } else { for (ConceptName name : getNames()) { if (name.isShort()) { shortNames.add(name); } } } return shortNames; } /** * Returns the short form name for a locale, or if none has been identified, the shortest name * available in the locale. If exact is false, the shortest name from any locale is returned * * @param locale the language and country in which the short name is used * @param exact true/false to return only exact locale (no default locale) * @return the appropriate short name, or null if not found * @should return the name marked as the shortName for the locale if it is present * @should return the shortest name in a given locale for a concept if exact is true * @should return the shortest name for the concept from any locale if exact is false * @should return null if their are no names in the specified locale and exact is true */ public ConceptName getShortestName(Locale locale, Boolean exact) { if (log.isDebugEnabled()) { log.debug("Getting shortest conceptName for locale: " + locale); } ConceptName shortNameInLocale = getShortNameInLocale(locale); if (shortNameInLocale != null) { return shortNameInLocale; } ConceptName shortestNameForLocale = null; ConceptName shortestNameForConcept = null; if (locale != null) { for (ConceptName possibleName : getNames()) { if (possibleName.getLocale().equals(locale) && ((shortestNameForLocale == null) || (possibleName.getName().length() < shortestNameForLocale .getName().length()))) { shortestNameForLocale = possibleName; } if ((shortestNameForConcept == null) || (possibleName.getName().length() < shortestNameForConcept.getName().length())) { shortestNameForConcept = possibleName; } } } if (exact) { if (shortestNameForLocale == null) { log.warn("No short concept name found for concept id " + conceptId + " for locale " + locale.getDisplayName()); } return shortestNameForLocale; } return shortestNameForConcept; } /** * @param name A name * @return whether this concept has the given name in any locale */ public boolean isNamed(String name) { for (ConceptName cn : getNames()) { if (name.equals(cn.getName())) { return true; } } return false; } /** * Gets the list of all non-retired concept names which are index terms for this concept * * @return a collection of concept names which are index terms for this concept * @since 1.7 */ public Collection<ConceptName> getIndexTerms() { Collection<ConceptName> indexTerms = new Vector<ConceptName>(); for (ConceptName name : getNames()) { if (name.isIndexTerm()) { indexTerms.add(name); } } return indexTerms; } /** * Gets the list of all non-retired concept names which are index terms in a given locale * * @param locale the locale for the index terms to return * @return a collection of concept names which are index terms in the given locale * @since 1.7 */ public Collection<ConceptName> getIndexTermsForLocale(Locale locale) { Vector<ConceptName> indexTermsForLocale = new Vector<ConceptName>(); if (getIndexTerms().size() > 0) { for (ConceptName name : getIndexTerms()) { if (name.getLocale().equals(locale)) { indexTermsForLocale.add(name); } } } return indexTermsForLocale; } /** * @return Returns the names. */ @ElementList public Collection<ConceptName> getNames() { return getNames(false); } /** * @return Returns the names. * @param includeVoided Include voided ConceptNames if true. */ public Collection<ConceptName> getNames(boolean includeVoided) { Collection<ConceptName> ret = new HashSet<ConceptName>(); if (includeVoided) { if (names != null) { return names; } else { return ret; } } else { if (names != null) { for (ConceptName cn : names) { if (!cn.isVoided()) { ret.add(cn); } } } return ret; } } /** * @param names The names to set. */ @ElementList public void setNames(Collection<ConceptName> names) { this.names = names; } /** * Add the given ConceptName to the list of names for this Concept * * @param conceptName * @should replace the old preferred name with a current one * @should replace the old fully specified name with a current one * @should replace the old short name with a current one * @should mark the first name added as fully specified */ public void addName(ConceptName conceptName) { if (conceptName != null) { conceptName.setConcept(this); if (names == null) { names = new HashSet<ConceptName>(); } if (!names.contains(conceptName)) { if (getNames().size() == 0 && !OpenmrsUtil.nullSafeEquals(conceptName.getConceptNameType(), ConceptNameType.FULLY_SPECIFIED)) { conceptName.setConceptNameType(ConceptNameType.FULLY_SPECIFIED); } else { if (conceptName.isPreferred() && !conceptName.isIndexTerm() && conceptName.getLocale() != null) { ConceptName prefName = getPreferredName(conceptName.getLocale()); if (prefName != null) { prefName.setLocalePreferred(false); } } if (conceptName.isFullySpecifiedName() && conceptName.getLocale() != null) { ConceptName fullySpecName = getFullySpecifiedName(conceptName.getLocale()); if (fullySpecName != null) { fullySpecName.setConceptNameType(null); } } else if (conceptName.isShort() && conceptName.getLocale() != null) { ConceptName shortName = getShortNameInLocale(conceptName.getLocale()); if (shortName != null) { shortName.setConceptNameType(null); } } } names.add(conceptName); if (compatibleCache != null) { compatibleCache.clear(); // clear the locale cache, forcing it to be rebuilt } } } } /** * Remove the given name from the list of names for this Concept * * @param conceptName * @return true if the entity was removed, false otherwise */ public boolean removeName(ConceptName conceptName) { if (names != null) { return names.remove(conceptName); } else { return false; } } /** * Finds the description of the concept using the current locale in Context.getLocale(). Returns * null if none found. * * @return ConceptDescription attributed to the Concept in the given locale */ public ConceptDescription getDescription() { return getDescription(Context.getLocale()); } /** * Finds the description of the concept in the given locale. Returns null if none found. * * @param locale * @return ConceptDescription attributed to the Concept in the given locale */ public ConceptDescription getDescription(Locale locale) { return getDescription(locale, false); } /** * Returns the preferred description for a locale. * * @param locale the language and country in which the description is used * @param exact true/false to return only exact locale (no default locale) * @return the appropriate description, or null if not found * @should return match on locale exactly * @should return match on language only * @should not return match on language only if exact match exists * @should not return language only match for exact matches */ public ConceptDescription getDescription(Locale locale, boolean exact) { log.debug("Getting ConceptDescription for locale: " + locale); ConceptDescription foundDescription = null; if (locale == null) { locale = LocaleUtility.getDefaultLocale(); } Locale desiredLocale = locale; ConceptDescription defaultDescription = null; for (Iterator<ConceptDescription> i = getDescriptions().iterator(); i.hasNext();) { ConceptDescription availableDescription = i.next(); Locale availableLocale = availableDescription.getLocale(); if (availableLocale.equals(desiredLocale)) { foundDescription = availableDescription; break; // skip out now because we found an exact locale match } if (!exact && LocaleUtility.areCompatible(availableLocale, desiredLocale)) { foundDescription = availableDescription; } if (availableLocale.equals(LocaleUtility.getDefaultLocale())) { defaultDescription = availableDescription; } } if (foundDescription == null) { // no description with the given locale was found. // return null if exact match desired if (exact) { log.debug("No concept description found for concept id " + conceptId + " for locale " + desiredLocale.toString()); } else { // returning default description locale ("en") if exact match // not desired if (defaultDescription == null) { log.debug("No concept description found for default locale for concept id " + conceptId); } else { foundDescription = defaultDescription; } } } return foundDescription; } /** * @return the retiredBy */ public User getRetiredBy() { return retiredBy; } /** * @param retiredBy the retiredBy to set */ public void setRetiredBy(User retiredBy) { this.retiredBy = retiredBy; } /** * @return the dateRetired */ public Date getDateRetired() { return dateRetired; } /** * @param dateRetired the dateRetired to set */ public void setDateRetired(Date dateRetired) { this.dateRetired = dateRetired; } /** * @return the retireReason */ public String getRetireReason() { return retireReason; } /** * @param retireReason the retireReason to set */ public void setRetireReason(String retireReason) { this.retireReason = retireReason; } /** * @return Returns the descriptions. */ @ElementList public Collection<ConceptDescription> getDescriptions() { return descriptions; } /** * Sets the collection of descriptions for this Concept. * * @param descriptions the collection of descriptions */ @ElementList public void setDescriptions(Collection<ConceptDescription> descriptions) { this.descriptions = descriptions; } /** * Add the given description to the list of descriptions for this Concept * * @param description the description to add */ public void addDescription(ConceptDescription description) { if (description != null) { if (getDescriptions() == null) { descriptions = new HashSet<ConceptDescription>(); description.setConcept(this); descriptions.add(description); } else if (!descriptions.contains(description)) { description.setConcept(this); descriptions.add(description); } } } /** * Remove the given description from the list of descriptions for this Concept * * @param description the description to remove * @return true if the entity was removed, false otherwise */ public boolean removeDescription(ConceptDescription description) { if (getDescriptions() != null) { return descriptions.remove(description); } else { return false; } } /** * @return Returns the retired. */ public Boolean isRetired() { return retired; } /** * This method delegates to {@link #isRetired()}. This is only needed for jstl syntax like * ${concept.retired} because the return type is a Boolean object instead of a boolean * primitive type. * * @see org.openmrs.Retireable#isRetired() * @since 2.0.0 */ public Boolean getRetired() { return isRetired(); } /** * @param retired The retired to set. */ @Attribute public void setRetired(Boolean retired) { this.retired = retired; } /** * Gets the synonyms in the given locale. Returns a list of names from the same language with * the preferred synonym sorted first, or an empty list if none found. * * @param locale * @return Collection of ConceptNames which are synonyms for the Concept in the given locale */ public Collection<ConceptName> getSynonyms(Locale locale) { List<ConceptName> syns = new Vector<ConceptName>(); ConceptName preferredConceptName = null; for (ConceptName possibleSynonymInLoc : getSynonyms()) { if (locale.equals(possibleSynonymInLoc.getLocale())) { if (possibleSynonymInLoc.isPreferred()) { preferredConceptName = possibleSynonymInLoc; } else { syns.add(possibleSynonymInLoc); } } } // Add preferred name first in the list. if (preferredConceptName != null) { syns.add(0, preferredConceptName); } log.debug("returning: " + syns); return syns; } /** * Gets all the non-retired synonyms. * * @return Collection of ConceptNames which are synonyms for the Concept or an empty list if * none is found * @since 1.7 */ public Collection<ConceptName> getSynonyms() { Collection<ConceptName> synonyms = new Vector<ConceptName>(); for (ConceptName possibleSynonym : getNames()) { if (possibleSynonym.isSynonym()) { synonyms.add(possibleSynonym); } } log.debug("returning: " + synonyms); return synonyms; } /** * @return Returns the version. */ @Attribute(required = false) public String getVersion() { return version; } /** * @param version The version to set. */ @Attribute(required = false) public void setVersion(String version) { this.version = version; } /** * @return Returns the conceptSets. */ @ElementList(required = false) public Collection<ConceptSet> getConceptSets() { return conceptSets; } /** * @param conceptSets The conceptSets to set. */ @ElementList(required = false) public void setConceptSets(Collection<ConceptSet> conceptSets) { this.conceptSets = conceptSets; } /** * Whether this concept is numeric or not. This will <i>always</i> return false for concept * objects. ConceptNumeric.isNumeric() will then <i>always</i> return true. * * @return false */ public boolean isNumeric() { return false; } /** * @return the conceptMappings for this concept */ @ElementList(required = false) public Collection<ConceptMap> getConceptMappings() { if (conceptMappings == null) { conceptMappings = new HashSet<ConceptMap>(); } return conceptMappings; } /** * @param conceptMappings the conceptMappings to set */ @ElementList(required = false) public void setConceptMappings(Collection<ConceptMap> conceptMappings) { this.conceptMappings = conceptMappings; } /** * Add the given ConceptMap object to this concept's list of concept mappings. If there is * already a corresponding ConceptMap object for this concept already, this one will not be * added. * * @param newConceptMap */ public void addConceptMapping(ConceptMap newConceptMap) { if (conceptMappings == null) { conceptMappings = new HashSet<ConceptMap>(); } if (newConceptMap != null) { newConceptMap.setConcept(this); } if (newConceptMap != null && !conceptMappings.contains(newConceptMap)) { if (newConceptMap.getConceptMapType() == null) { newConceptMap.setConceptMapType(Context.getConceptService().getDefaultConceptMapType()); } conceptMappings.add(newConceptMap); } } /** * Child Class ConceptComplex overrides this method and returns true. See * {@link org.openmrs.ConceptComplex#isComplex()}. Otherwise this method returns false. * * @return false * @since 1.5 */ public boolean isComplex() { return false; } /** * Remove the given ConceptMap from the list of mappings for this Concept * * @param conceptMap * @return true if the entity was removed, false otherwise */ public boolean removeConceptMapping(ConceptMap conceptMap) { return getConceptMappings().remove(conceptMap); } /** * @see java.lang.Object#toString() */ @Override public String toString() { if (conceptId == null) { return ""; } return conceptId.toString(); } /** * @see org.openmrs.Attributable#findPossibleValues(java.lang.String) */ public List<Concept> findPossibleValues(String searchText) { List<Concept> concepts = new Vector<Concept>(); try { for (ConceptSearchResult searchResult : Context.getConceptService().getConcepts(searchText, Collections.singletonList(Context.getLocale()), false, null, null, null, null, null, null, null)) { concepts.add(searchResult.getConcept()); } } catch (Exception e) { // pass } return concepts; } /** * @see org.openmrs.Attributable#getPossibleValues() */ public List<Concept> getPossibleValues() { try { return Context.getConceptService().getConceptsByName(""); } catch (Exception e) { // pass } return Collections.emptyList(); } /** * @see org.openmrs.Attributable#hydrate(java.lang.String) */ public Concept hydrate(String s) { try { return Context.getConceptService().getConcept(Integer.valueOf(s)); } catch (Exception e) { // pass } return null; } /** * Turns this concept into a very very simple serialized string * * @see org.openmrs.Attributable#serialize() */ public String serialize() { if (this.getConceptId() == null) { return ""; } return "" + this.getConceptId(); } /** * @see org.openmrs.Attributable#getDisplayString() */ public String getDisplayString() { if (getName() == null) { return toString(); } else { return getName().getName(); } } /** * Convenience method that returns a set of all the locales in which names have been added for * this concept. * * @return a set of all locales for names for this concept * @since 1.7 * @should return all locales for conceptNames for this concept without duplicates */ public Set<Locale> getAllConceptNameLocales() { if (getNames().size() == 0) { if (log.isDebugEnabled()) { log.debug("The Concept with id: " + conceptId + " has no names"); } return null; } Set<Locale> locales = new HashSet<Locale>(); for (ConceptName cn : getNames()) { locales.add(cn.getLocale()); } return locales; } /** * @since 1.5 * @see org.openmrs.OpenmrsObject#getId() */ public Integer getId() { return getConceptId(); } /** * @since 1.5 * @see org.openmrs.OpenmrsObject#setId(java.lang.Integer) */ public void setId(Integer id) { setConceptId(id); } /** * Sort the ConceptSet based on the weight * * @return sortedConceptSet Collection&lt;ConceptSet&gt; */ private List<ConceptSet> getSortedConceptSets() { List<ConceptSet> cs = new Vector<ConceptSet>(); if (conceptSets != null) { cs.addAll(conceptSets); Collections.sort(cs); } return cs; } /** * Get all the concept members of current concept * * @since 1.7 * @return List&lt;Concept&gt; the Concepts that are members of this Concept's set * @should return concept set members sorted according to the sort weight * @should return all the conceptMembers of current Concept * @should return unmodifiable list of conceptMember list * @should return concept set members sorted with retired last */ public List<Concept> getSetMembers() { List<Concept> conceptMembers = new Vector<Concept>(); Collection<ConceptSet> sortedConceptSet = getSortedConceptSets(); for (ConceptSet conceptSet : sortedConceptSet) { conceptMembers.add(conceptSet.getConcept()); } return Collections.unmodifiableList(conceptMembers); } /** * Appends the concept to the end of the existing list of concept members for this Concept * * @since 1.7 * @param setMember Concept to add to the * @should add concept as a conceptSet * @should append concept to the existing list of conceptSet * @should place the new concept last in the list * @should assign the calling component as parent to the ConceptSet */ public void addSetMember(Concept setMember) { addSetMember(setMember, -1); } /** * Add the concept to the existing member to the list of set members in the given location. <br> * <br> * index of 0 is before the first concept<br> * index of -1 is after last.<br> * index of 1 is after the first but before the second, etc<br> * * @param setMember the Concept to add as a child of this Concept * @param index where in the list of set members to put this setMember * @since 1.7 * @should assign the given concept as a ConceptSet * @should insert the concept before the first with zero index * @should insert the concept at the end with negative one index * @should insert the concept in the third slot * @should assign the calling component as parent to the ConceptSet * @should add the concept to the current list of conceptSet * @see #getSortedConceptSets() */ public void addSetMember(Concept setMember, int index) { List<ConceptSet> sortedConceptSets = getSortedConceptSets(); int setsSize = sortedConceptSets.size(); double weight; if (sortedConceptSets.isEmpty()) { weight = 1000.0; } else if (index == -1 || index >= setsSize) { // deals with list size of 1 and any large index given by dev weight = sortedConceptSets.get(setsSize - 1).getSortWeight() + 10.0; } else if (index == 0) { weight = sortedConceptSets.get(0).getSortWeight() - 10.0; } else { // put the weight between two double prevSortWeight = sortedConceptSets.get(index - 1).getSortWeight(); double nextSortWeight = sortedConceptSets.get(index).getSortWeight(); weight = (prevSortWeight + nextSortWeight) / 2; } ConceptSet conceptSet = new ConceptSet(setMember, weight); conceptSet.setConceptSet(this); conceptSets.add(conceptSet); } }
package org.jenkinsci.plugins.pretestedintegration.scm.git; import hudson.Extension; import hudson.FilePath; import hudson.Launcher; import hudson.Launcher.ProcStarter; import hudson.model.BuildListener; import hudson.model.TaskListener; import hudson.model.AbstractBuild; import hudson.plugins.git.Branch; import hudson.plugins.git.GitSCM; import hudson.plugins.git.util.BuildData; import hudson.scm.SCM; import hudson.util.ArgumentListBuilder; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.OutputStream; import java.util.ArrayList; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.commons.lang.StringUtils; import org.jenkinsci.plugins.gitclient.Git; import org.jenkinsci.plugins.gitclient.GitClient; import org.jenkinsci.plugins.pretestedintegration.AbstractSCMBridge; import org.jenkinsci.plugins.pretestedintegration.Commit; import org.jenkinsci.plugins.pretestedintegration.exceptions.EstablishWorkspaceException; import org.jenkinsci.plugins.pretestedintegration.PretestedIntegrationAction; import org.jenkinsci.plugins.pretestedintegration.SCMBridgeDescriptor; import org.jenkinsci.plugins.pretestedintegration.IntegrationStrategy; import org.jenkinsci.plugins.pretestedintegration.IntegrationStrategyDescriptor; import org.jenkinsci.plugins.pretestedintegration.exceptions.CommitChangesFailureException; import org.jenkinsci.plugins.pretestedintegration.exceptions.DeleteIntegratedBranchException; import org.jenkinsci.plugins.pretestedintegration.exceptions.NextCommitFailureException; import org.jenkinsci.plugins.pretestedintegration.exceptions.RollbackFailureException; import org.kohsuke.stapler.DataBoundConstructor; public class GitBridge extends AbstractSCMBridge { private String revId; @DataBoundConstructor public GitBridge(IntegrationStrategy integrationStrategy, final String branch) { super(integrationStrategy); this.branch = branch; } @Override public String getBranch() { return StringUtils.isBlank(this.branch) ? "master" : this.branch; } public String getRevId() { return this.revId; } public void setWorkingDirectory(FilePath workingDirectory) { this.workingDirectory = workingDirectory; } public FilePath getWorkingDirectory() { return this.workingDirectory; } private GitSCM findScm(AbstractBuild<?, ?> build) throws InterruptedException { try { SCM scm = build.getProject().getScm(); GitSCM git = (GitSCM) scm; return git; } catch (ClassCastException e) { throw new InterruptedException("Configured scm is not Git"); } } private ProcStarter buildCommand(AbstractBuild<?, ?> build, Launcher launcher, TaskListener listener, String... cmds) throws IOException, InterruptedException { GitSCM scm = findScm(build); String gitExe = scm.getGitExe(build.getBuiltOn(), listener); ArgumentListBuilder b = new ArgumentListBuilder(); b.add(gitExe); b.add(cmds); return launcher.launch().cmds(b).pwd(build.getWorkspace()); } /** * Invoke a command with git * * @param build * @param launcher * @param listener * @param cmds * @return The exitcode of command * @throws IOException * @throws InterruptedException */ public int git(AbstractBuild<?, ?> build, Launcher launcher, TaskListener listener, String... cmds) throws IOException, InterruptedException { ProcStarter git = buildCommand(build, launcher, listener, cmds); int exitCode = git.join(); return exitCode; } /** * Invoke a command with mercurial * * @param build * @param launcher * @param listener * @param cmds * @return The exitcode of command * @throws IOException * @throws InterruptedException */ public int git(AbstractBuild<?, ?> build, Launcher launcher, TaskListener listener, OutputStream out, String... cmds) throws IOException, InterruptedException { ProcStarter git = buildCommand(build, launcher, listener, cmds); int exitCode = git.stdout(out).join(); return exitCode; } @Override public void ensureBranch(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener, String branch) throws EstablishWorkspaceException { listener.getLogger().println(String.format("Checking out integration target branch %s and pulling latest changes", getBranch())); try { git(build, launcher, listener, "checkout", getBranch()); update(build, launcher, listener); } catch (IOException ex) { throw new EstablishWorkspaceException(ex); } catch (InterruptedException ex) { throw new EstablishWorkspaceException(ex); } } private GitClient getGitClient(AbstractBuild<?, ?> build, Launcher launcher, TaskListener listener) throws InterruptedException, IOException { GitClient client = Git.with(listener, build.getEnvironment(listener)).getClient(); return client; } protected void update(AbstractBuild<?, ?> build, Launcher launcher, TaskListener listener) throws IOException, InterruptedException { git(build, launcher, listener, "pull", "origin", branch); } /** * 1. Convert the stuff in the commit to Map<String,String> * 2. Check the current working branch if there are any more commits in that * branch 3. Check the next branch round-robin * * @return * @throws NextCommitFailureException */ @Override public Commit<String> nextCommit( AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener, Commit<?> commit) throws NextCommitFailureException { logger.finest("Git plugin, nextCommit invoked"); Commit<String> next = null; try { BuildData gitBuildData = build.getAction(BuildData.class); Branch gitDataBranch = gitBuildData.lastBuild.revision.getBranches().iterator().next(); next = new Commit<String>(gitDataBranch.getSHA1String()); } catch (Exception e) { logger.finest("Failed to find next commit"); throw new NextCommitFailureException(e); } logger.finest("Git plugin, nextCommit returning"); return next; } @Override public void commit(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws CommitChangesFailureException { int returncode = -99999; ByteArrayOutputStream bos = new ByteArrayOutputStream(); try { returncode = git(build, launcher, listener, bos, "push", "origin", getBranch()); } catch (Exception ex) { logger.log(Level.WARNING, "Failed to commit changes to integration branch", ex); } if(returncode != 0) { throw new CommitChangesFailureException( String.format( "Failed to commit integrated changes, message was:%n%s", bos.toString()) ); } } @Override public void rollback(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws RollbackFailureException { int returncode = -9999; ByteArrayOutputStream bos = new ByteArrayOutputStream(); Commit<?> lastIntegraion = build.getAction(PretestedIntegrationAction.class).getCurrentIntegrationTip(); try { if(lastIntegraion != null) { returncode = git(build, launcher, listener, bos, "reset", "--hard", (String)lastIntegraion.getId()); } } catch (Exception ex) { logger.log(Level.WARNING, "Failed to roll back", ex); } //If the return code is -9999 that means no previous pre-test action if(returncode != 0 && returncode != -9999) { throw new RollbackFailureException( String.format( "Failed to rollback changes, message was:%n%s", bos.toString()) ); } } @Override public void deleteIntegratedBranch(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws DeleteIntegratedBranchException { BuildData gitBuildData = build.getAction(BuildData.class); Branch gitDataBranch = gitBuildData.lastBuild.revision.getBranches().iterator().next(); ByteArrayOutputStream out = new ByteArrayOutputStream(); int delRemote = -99999; if(build.getResult().isBetterOrEqualTo(getRequiredResult())) { try { delRemote = git(build, launcher, listener, out, "push", "origin",":"+removeOrigin(gitDataBranch.getName())); } catch (Exception ex) { logger.log(Level.WARNING, "Failure to delete branch", ex); } if(delRemote != 0) { throw new DeleteIntegratedBranchException(String.format( "Failed to delete the remote branch %s with the following error:%n%s", gitDataBranch.getName(), out.toString()) ); } } } @Override public void updateBuildDescription(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) { BuildData gitBuildData = build.getAction(BuildData.class); if(gitBuildData != null) { Branch gitDataBranch = gitBuildData.lastBuild.revision.getBranches().iterator().next(); String text = ""; if(!StringUtils.isBlank(build.getDescription())) { text = String.format( "%s<br/>Branch: %s", build.getDescription(), gitDataBranch.getName()); } else { text = String.format( "Branch: %s", gitDataBranch.getName()); } try { build.setDescription(text); } catch (Exception ex) { logger.log(Level.FINE, "Failed to update description", ex); /* Dont care */ } } } private String removeOrigin(String branchName) { String s = branchName.substring(branchName.indexOf("/")+1, branchName.length()); return s; } @Override protected Commit<?> determineIntegrationHead(AbstractBuild<?, ?> build, Launcher launcher, TaskListener listener) { Commit<?> commit = null; try { GitClient client = Git.with(listener, build.getEnvironment(listener)).in(build.getWorkspace()).getClient(); for(Branch b : client.getBranches()) { if(b.getName().contains(getBranch())) { commit = new Commit(b.getSHA1String()); } } } catch (IOException ex) { Logger.getLogger(GitBridge.class.getName()).log(Level.SEVERE, null, ex); } catch (InterruptedException ex) { Logger.getLogger(GitBridge.class.getName()).log(Level.SEVERE, null, ex); } return commit; } @Extension public static final class DescriptorImpl extends SCMBridgeDescriptor<GitBridge> { public DescriptorImpl() { load(); } public String getDisplayName() { return "Git"; } public List<IntegrationStrategyDescriptor<?>> getIntegrationStrategies() { List<IntegrationStrategyDescriptor<?>> list = new ArrayList<IntegrationStrategyDescriptor<?>>(); for(IntegrationStrategyDescriptor<?> descr : IntegrationStrategy.all()) { if(descr.isApplicable(this.clazz)) { list.add(descr); } } return list; } public IntegrationStrategy getDefaultStrategy() { return new SquashCommitStrategy(); } } private FilePath workingDirectory = null; final static String LOG_PREFIX = "[PREINT-GIT] "; private static final Logger logger = Logger.getLogger(GitBridge.class.getName()); }
package org.liquidengine.legui.system.handler; import org.joml.Vector2f; import org.liquidengine.legui.component.Component; import org.liquidengine.legui.component.Frame; import org.liquidengine.legui.component.Layer; import org.liquidengine.legui.component.Widget; import org.liquidengine.legui.event.FocusEvent; import org.liquidengine.legui.event.MouseClickEvent; import org.liquidengine.legui.input.Mouse; import org.liquidengine.legui.listener.processor.EventProcessorProvider; import org.liquidengine.legui.style.Style.DisplayType; import org.liquidengine.legui.system.context.Context; import org.liquidengine.legui.system.event.SystemMouseClickEvent; import org.lwjgl.glfw.GLFW; import java.util.Collections; import java.util.List; import static org.liquidengine.legui.event.MouseClickEvent.MouseClickAction.*; import static org.lwjgl.glfw.GLFW.GLFW_RELEASE; public class MouseClickEventHandler implements SystemEventHandler<SystemMouseClickEvent> { @Override public void handle(SystemMouseClickEvent event, Frame frame, Context ctx) { Mouse.MouseButton btn = Mouse.MouseButton.getByCode(event.button); btn.setPressed(event.action != GLFW_RELEASE); Vector2f cursorPos = Mouse.getCursorPosition(); btn.setPressPosition(cursorPos); List<Layer> layers = frame.getAllLayers(); Collections.reverse(layers); Component focusedGui = ctx.getFocusedGui(); Component target = null; for (Layer layer : layers) { if (layer.isEventReceivable()) { if (!layer.getContainer().isVisible() || !layer.getContainer().isEnabled()) { continue; } target = SehUtil.getTargetComponent(layer, cursorPos); if (target != null) { break; } } if (!layer.isEventPassable()) { break; } } if (target == null) { if (event.action == GLFW_RELEASE) { updateReleasePosAndFocusedGui(btn, cursorPos, focusedGui); } else { ctx.setFocusedGui(null); } } else { int mods = event.mods; if (event.action == GLFW.GLFW_PRESS) { btn.setPressPosition(cursorPos); removeFocus(target, frame, ctx); target.setPressed(true); if (focusedGui != target) { target.setFocused(true); ctx.setFocusedGui(target); } Vector2f position = target.getAbsolutePosition().sub(cursorPos).negate(); EventProcessorProvider.getInstance().pushEvent(new MouseClickEvent<>(target, ctx, frame, PRESS, btn, position, cursorPos, mods)); if (focusedGui != target) { EventProcessorProvider.getInstance().pushEvent(new FocusEvent<>(target, ctx, frame, target, true)); } } else { updateReleasePosAndFocusedGui(btn, cursorPos, focusedGui); Vector2f pos = target.getAbsolutePosition().sub(cursorPos).negate(); if (focusedGui != null && focusedGui == target) { EventProcessorProvider.getInstance().pushEvent(new MouseClickEvent<>(target, ctx, frame, CLICK, btn, pos, cursorPos, mods)); } EventProcessorProvider.getInstance().pushEvent(new MouseClickEvent<>(focusedGui, ctx, frame, RELEASE, btn, pos, cursorPos, mods)); } pushWidgetsUp(target); } } private void updateReleasePosAndFocusedGui(Mouse.MouseButton button, Vector2f cursorPosition, Component focusedGui) { button.setReleasePosition(cursorPosition); if (focusedGui != null) { focusedGui.setPressed(false); } } private void removeFocus(Component targetComponent, Frame frame, Context context) { List<Layer> allLayers = frame.getAllLayers(); for (Layer layer : allLayers) { List<Component> childComponents = layer.getContainer().getChildComponents(); for (Component child : childComponents) { removeFocus(targetComponent, child, context, frame); } } } private void removeFocus(Component focused, Component component, Context context, Frame frame) { if (component != focused && component.isVisible() && component.isFocused()) { component.setFocused(false); component.setPressed(false); EventProcessorProvider.getInstance().pushEvent(new FocusEvent<>(component, context, frame, focused, false)); } List<? extends Component> childComponents = component.getChildComponents(); for (Component child : childComponents) { removeFocus(focused, child, context, frame); } } private void pushWidgetsUp(Component gui) { Component parent = gui.getParent(); Component current = gui; if (parent != null) { boolean push = false; while (parent != null) { push = (parent instanceof Widget) && (parent.getParent() != null) && (parent.getParent().getStyle().getDisplay() == DisplayType.MANUAL); current = parent; parent = parent.getParent(); if (push) { break; } } if (push) { parent.remove(current); parent.add(current); } } } }
package org.percepta.mgrankvi.periodic.gwt.client.periodic; import com.google.gwt.canvas.client.Canvas; import com.google.gwt.canvas.dom.client.Context2d; import com.google.gwt.canvas.dom.client.CssColor; import com.google.gwt.dom.client.Style; import com.google.gwt.event.dom.client.*; import com.google.gwt.user.client.DOM; import com.google.gwt.user.client.Timer; import com.google.gwt.user.client.ui.*; import com.vaadin.client.VConsole; import org.percepta.mgrankvi.periodic.gwt.client.PeriodicMovable; import org.percepta.mgrankvi.periodic.gwt.client.PeriodicPaintable; import org.percepta.mgrankvi.periodic.gwt.client.PeriodicalItem; import java.util.LinkedList; import java.util.List; public class CPeriodic extends Composite implements MouseDownHandler, MouseMoveHandler, MouseUpHandler, MouseOutHandler { private static final String CLASSNAME = "c-periodic"; protected final Canvas canvas; protected final Canvas tooltipCanvas; private final FlowPanel content; private AbsolutePanel panel = null; private int width = 400; private int height = 300; private int scale = 31; private boolean animate = true; private int animationTime = 3000; private boolean immediate = false; private boolean move = false; private int down = 0; private final PeriodicScaleAxis scaleAxis = new PeriodicScaleAxis(scale, width, height); private final List<PeriodicPaintable> paintable = new LinkedList<PeriodicPaintable>(); private PeriodicalItem lastPeriodical = null; private Timer hold = null; public CPeriodic() { content = new FlowPanel(); content.setSize(width + "px", height + "px"); SimplePanel baseContent = new SimplePanel(); baseContent.add(content); initWidget(baseContent); setStyleName(CLASSNAME); addDomHandler(this, MouseDownEvent.getType()); addDomHandler(this, MouseMoveEvent.getType()); addDomHandler(this, MouseUpEvent.getType()); canvas = Canvas.createIfSupported(); tooltipCanvas = Canvas.createIfSupported(); if (canvas != null) { panel = new AbsolutePanel(); panel.add(canvas, 0, 0); panel.add(tooltipCanvas, 0, 0); content.add(panel); if (animate) { scaleAxis.animate(3000); } scaleAxis.setSize(canvas.getContext2d()); clearCanvas(); } else { getElement().setInnerHTML("Canvas not supported"); } } protected void setSize(int width, int height) { this.width = width; this.height = height; scaleAxis.setWidth(width); scaleAxis.setHeight(height); if(panel != null){ panel.setSize(width + "px", height + "px"); } content.setSize(width + "px", height + "px"); clearCanvas(); } protected void setScale(int scale) { this.scale = scale; scaleAxis.setScale(scale); clearCanvas(); for (PeriodicPaintable item : paintable) { item.setStepSize(scaleAxis.perStep); } paint(); } protected void setImmediate(boolean immediate) { this.immediate = immediate; } protected void clearCanvas() { canvas.setCoordinateSpaceWidth(width); canvas.setCoordinateSpaceHeight(height); tooltipCanvas.setCoordinateSpaceWidth(width); tooltipCanvas.setCoordinateSpaceHeight(height); } protected void animate(int time) { this.animate = true; animationTime = time; for (PeriodicPaintable item : paintable) { item.animate(time); } scaleAxis.animate(time); this.animate = false; } protected void paint() { scaleAxis.paint(canvas.getContext2d()); for (PeriodicPaintable item : paintable) { if (item.getPosition() > scaleAxis.verticalScaleWidth) { item.paint(canvas.getContext2d()); } } } public void clearChildItems() { paintable.clear(); } public void add(Widget widget) { if (widget instanceof PeriodicPaintable) { PeriodicPaintable item = (PeriodicPaintable) widget; item.setHeight(scaleAxis.getActiveHeight()); item.setStepSize(scaleAxis.perStep); paintable.add(item); item.setPosition(scaleAxis.verticalScaleWidth + (paintable.size() * 20)); item.setLow(paintable.size() % 2 == 0); if (animate) { item.animate(animationTime); } if (widget instanceof PeriodicalItem) { if (lastPeriodical != null) { lastPeriodical.addNext((PeriodicalItem) widget); ((PeriodicalItem) widget).addPrevious(lastPeriodical); } lastPeriodical = (PeriodicalItem) widget; } } else { content.add(widget); } } /** * Mouse handlers */ @Override public void onMouseUp(MouseUpEvent event) { move = false; down = 0; } @Override public void onMouseMove(MouseMoveEvent event) { if (hold != null) { hold.cancel(); hold = null; } if (move) { for (PeriodicPaintable item : paintable) { if (item instanceof PeriodicMovable) { ((PeriodicMovable) item).move(event.getClientX() - down); } } down = event.getClientX(); clearCanvas(); paint(); } else { int x = event.getRelativeX(this.getElement()); int y = event.getRelativeY(this.getElement()); clearCanvas(); paint(); if (x > scaleAxis.verticalScaleWidth + 5 && x < width && y < scaleAxis.getActiveHeight()) { PeriodicPaintable periodicPaintable = null; int position = 0; for (PeriodicPaintable item : paintable) { if (item.getPosition() > x - 10 && item.getPosition() < x + 10) { periodicPaintable = item; break; } } if (!(periodicPaintable instanceof PeriodicalItem)) { return; } if (periodicPaintable.getPosition() > x - 10 && periodicPaintable.getPosition() < x + 10) { if (immediate) { int length = ((PeriodicalItem) periodicPaintable).getTypeLength(event.getRelativeY(this.getElement())); //paintTooltip(event.getRelativeX(this.getElement()), event.getRelativeY(this.getElement()), length); paintTooltip((int) periodicPaintable.getPosition(), event.getRelativeY(this.getElement()), length); } else { int length = ((PeriodicalItem) periodicPaintable).getTypeLength(event.getRelativeY(this.getElement())); hold = new DelayedTimer((int) periodicPaintable.getPosition(), event.getRelativeY(this.getElement()), length); hold.schedule(250); } } } } } private class DelayedTimer extends Timer { private int relativeX, relativeY, length; public DelayedTimer(int relativeX, int relativeY, int length) { this.length = length; this.relativeX = relativeX; this.relativeY = relativeY; } @Override public void run() { paintTooltip(relativeX, relativeY, length); } } private void paintTooltip(int relativeX, int relativeY, int length) { int directionX = 1; int directionY = 1; Context2d tooltip = tooltipCanvas.getContext2d(); int tooltipTextWidth = (int) tooltip.measureText("Length: " + length).getWidth() + 15; if (relativeX >= (width - tooltipTextWidth)) { directionX = -1; } if (relativeY <= 15) { directionY = -1; } tooltip.moveTo(relativeX, relativeY); tooltip.beginPath(); tooltip.lineTo(relativeX + (5 * directionX), relativeY - (5 * directionY)); tooltip.lineTo(relativeX + (5 * directionX), relativeY - (15 * directionY)); tooltip.lineTo(relativeX + (tooltipTextWidth * directionX), relativeY - (15 * directionY)); tooltip.lineTo(relativeX + (tooltipTextWidth * directionX), relativeY - (3 * directionY)); tooltip.lineTo(relativeX + (5 * directionX), relativeY - (3 * directionY)); tooltip.lineTo(relativeX, relativeY); tooltip.closePath(); tooltip.save(); tooltip.setFillStyle(CssColor.make("LIGHTGREEN")); tooltip.stroke(); tooltip.fill(); tooltip.restore(); int textOffset = -5; if (directionY == -1) { textOffset = 11; } if (directionX == -1) { directionX = (int) -(tooltipTextWidth * 0.1); } tooltip.fillText("Period: " + length, relativeX + (10 * directionX), relativeY + textOffset); } @Override public void onMouseDown(MouseDownEvent event) { move = true; down = event.getClientX(); } @Override public void onMouseOut(MouseOutEvent event) { move = false; down = 0; } }
package org.spongepowered.asm.mixin.injection.struct; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import net.minecraft.launchwrapper.Launch; import com.google.gson.Gson; import com.google.gson.GsonBuilder; public final class ReferenceMapper implements Serializable { private static final long serialVersionUID = 1L; public static final String DEFAULT_RESOURCE = "mixin.refmap.json"; private final Map<String, Map<String, String>> mappings; public ReferenceMapper() { this(new HashMap<String, Map<String, String>>()); } public ReferenceMapper(Map<String, Map<String, String>> mappings) { this.mappings = mappings; } public String remap(String className, String reference) { if (className == null) { for (Map<String, String> mappings : this.mappings.values()) { if (mappings.containsKey(reference)) { return mappings.get(reference); } } } Map<String, String> classMappings = this.mappings.get(className); if (classMappings == null) { return reference; } String remappedReference = classMappings.get(reference); return remappedReference != null ? remappedReference : reference; } public void addMapping(String className, String reference, String newReference) { Map<String, String> classMappings = this.mappings.get(className); if (classMappings == null) { classMappings = new HashMap<String, String>(); this.mappings.put(className, classMappings); } classMappings.put(reference, newReference); } public void write(Appendable writer) { new GsonBuilder().setPrettyPrinting().create().toJson(this, writer); } public static ReferenceMapper read(String resource) { Reader reader = null; try { reader = new InputStreamReader(Launch.classLoader.getResourceAsStream(resource)); return ReferenceMapper.read(reader); } catch (Exception ex) { return new ReferenceMapper(); } finally { if (reader != null) { try { reader.close(); } catch (IOException ex) { // don't really care } } } } public static ReferenceMapper read(Reader reader) { return new Gson().fromJson(reader, ReferenceMapper.class); } }
package org.spongepowered.common.mixin.core.inventory; import it.unimi.dsi.fastutil.ints.Int2ObjectArrayMap; import net.minecraft.entity.item.EntityItem; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.entity.player.EntityPlayerMP; import net.minecraft.entity.player.InventoryPlayer; import net.minecraft.inventory.ClickType; import net.minecraft.inventory.Container; import net.minecraft.inventory.IContainerListener; import net.minecraft.inventory.IInventory; import net.minecraft.inventory.InventoryCraftResult; import net.minecraft.inventory.InventoryCrafting; import net.minecraft.inventory.Slot; import net.minecraft.inventory.SlotCrafting; import net.minecraft.item.ItemStack; import net.minecraft.item.crafting.CraftingManager; import net.minecraft.item.crafting.IRecipe; import net.minecraft.network.play.server.SPacketSetSlot; import net.minecraft.util.NonNullList; import net.minecraft.world.World; import org.spongepowered.api.event.item.inventory.CraftItemEvent; import org.spongepowered.api.item.inventory.Carrier; import org.spongepowered.api.item.inventory.Inventory; import org.spongepowered.api.item.inventory.InventoryArchetype; import org.spongepowered.api.item.inventory.ItemStackSnapshot; import org.spongepowered.api.item.inventory.crafting.CraftingInventory; import org.spongepowered.api.item.inventory.crafting.CraftingOutput; import org.spongepowered.api.item.inventory.query.QueryOperationTypes; import org.spongepowered.api.item.inventory.transaction.SlotTransaction; import org.spongepowered.api.item.inventory.type.CarriedInventory; import org.spongepowered.api.item.recipe.crafting.CraftingRecipe; import org.spongepowered.api.util.annotation.NonnullByDefault; import org.spongepowered.api.world.Location; import org.spongepowered.asm.mixin.Mixin; import org.spongepowered.asm.mixin.Overwrite; import org.spongepowered.asm.mixin.Shadow; import org.spongepowered.asm.mixin.injection.At; import org.spongepowered.asm.mixin.injection.Inject; import org.spongepowered.asm.mixin.injection.Redirect; import org.spongepowered.asm.mixin.injection.callback.CallbackInfo; import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable; import org.spongepowered.common.SpongeImpl; import org.spongepowered.common.bridge.entity.player.EntityPlayerBridge; import org.spongepowered.common.bridge.inventory.ContainerBridge; import org.spongepowered.common.bridge.inventory.LensProviderBridge; import org.spongepowered.common.bridge.inventory.TrackedInventoryBridge; import org.spongepowered.common.bridge.item.inventory.InventoryAdapterBridge; import org.spongepowered.common.event.SpongeCommonEventFactory; import org.spongepowered.common.event.tracking.phase.packet.PacketPhaseUtil; import org.spongepowered.common.item.inventory.adapter.InventoryAdapter; import org.spongepowered.common.item.inventory.adapter.impl.SlotCollectionIterator; import org.spongepowered.common.item.inventory.adapter.impl.slots.SlotAdapter; import org.spongepowered.common.item.inventory.lens.Fabric; import org.spongepowered.common.item.inventory.lens.Lens; import org.spongepowered.common.item.inventory.lens.SlotProvider; import org.spongepowered.common.item.inventory.lens.impl.DefaultEmptyLens; import org.spongepowered.common.item.inventory.util.ContainerUtil; import org.spongepowered.common.item.inventory.util.ItemStackUtil; import java.util.ArrayList; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.function.Predicate; import javax.annotation.Nullable; @NonnullByDefault @Mixin(value = Container.class, priority = 998) public abstract class ContainerMixin implements ContainerBridge, InventoryAdapter, TrackedInventoryBridge, InventoryAdapterBridge { @Shadow public List<Slot> inventorySlots; @Shadow public NonNullList<ItemStack> inventoryItemStacks; @Shadow protected List<IContainerListener> listeners; @Shadow public abstract NonNullList<ItemStack> getInventory(); @Shadow public abstract Slot shadow$getSlot(int slotId); @Shadow public ItemStack slotClick(final int slotId, final int dragType, final ClickType clickTypeIn, final EntityPlayer player) { throw new IllegalStateException("Shadowed."); } private boolean impl$spectatorChest; private boolean impl$dropCancelled = false; private ItemStackSnapshot impl$itemStackSnapshot = ItemStackSnapshot.NONE; @Nullable private Slot impl$lastSlotUsed = null; @Nullable private CraftItemEvent.Craft impl$lastCraft = null; @Nullable private Location<org.spongepowered.api.world.World> impl$lastOpenLocation; private boolean impl$firePreview = true; private boolean impl$inUse = false; private boolean impl$captureSuccess = false; private boolean impl$captureInventory = false; private boolean impl$shiftCraft = false; //private boolean postPreCraftEvent = true; // used to prevent multiple craft events to fire when setting multiple slots simultaneously private List<SlotTransaction> impl$capturedSlotTransactions = new ArrayList<>(); private List<SlotTransaction> impl$capturedCraftShiftTransactions = new ArrayList<>(); private List<SlotTransaction> impl$capturedCraftPreviewTransactions = new ArrayList<>(); private boolean impl$isLensInitialized; @Nullable private Map<Integer, SlotAdapter> impl$adapters; @Nullable private InventoryArchetype impl$archetype; @Nullable private Carrier impl$carrier; @Nullable Predicate<EntityPlayer> impl$canInteractWithPredicate; @Nullable private LinkedHashMap<IInventory, Set<Slot>> impl$allInventories; @Nullable private ItemStack impl$previousCursor; @Override public SlotProvider bridge$generateSlotProvider() { return ContainerUtil.countSlots((Container) (Object) this, bridge$getFabric()); } @SuppressWarnings("ConstantConditions") @Override public Lens bridge$generateLens(SlotProvider slots) { if (this.impl$isLensInitialized) { return null; // Means that we've tried to generate a lens before, but it was null. And because the lens is null, // the generate will try again. So, we stop trying to generate it. } this.impl$isLensInitialized = true; final Fabric fabric = bridge$getFabric(); final Lens lens; if (this.impl$spectatorChest) { lens = null; } else { if (this instanceof LensProviderBridge) { // TODO LensProviders for all Vanilla Containers lens = ((LensProviderBridge) this).bridge$rootLens(fabric, this); } else if (getInventory().size() == 0) { lens = new DefaultEmptyLens(this); // Empty Container } else { lens = ContainerUtil.generateLens((Container) (Object) this, slots); } } return lens; } @SuppressWarnings("ConstantConditions") private Map<Integer, SlotAdapter> impl$getAdapters() { if (this.impl$adapters == null) { this.impl$adapters = new Int2ObjectArrayMap<>(); // If we know the lens, we can cache the adapters now final Lens lens = bridge$getRootLens(); if (lens != null) { final SlotCollectionIterator iter = new SlotCollectionIterator((Inventory) this, bridge$getFabric(), lens, bridge$getSlotProvider()); for (final org.spongepowered.api.item.inventory.Slot slot : iter) { this.impl$adapters.put(((SlotAdapter) slot).slotNumber, (SlotAdapter) slot); } } } return this.impl$adapters; } @Override public InventoryArchetype bridge$getArchetype() { if (this.impl$archetype == null) { this.impl$archetype = ContainerUtil.getArchetype((Container) (Object) this); } return this.impl$archetype; } @Override public Optional<Carrier> bridge$getCarrier() { if (this.impl$carrier == null) { this.impl$carrier = ContainerUtil.getCarrier((org.spongepowered.api.item.inventory.Container) this); } return Optional.ofNullable(this.impl$carrier); } @SuppressWarnings("unused") @Override public LinkedHashMap<IInventory, Set<Slot>> bridge$getInventories() { if (this.impl$allInventories == null) { this.impl$allInventories = new LinkedHashMap<>(); this.inventorySlots.forEach(slot -> this.impl$allInventories.computeIfAbsent(slot.inventory, (i) -> new HashSet<>()).add(slot)); } return this.impl$allInventories; } /** * @author bloodmc * @reason If listener already exists, avoid firing an exception * and simply send the inventory changes to client. */ @Overwrite public void addListener(final IContainerListener listener) { final Container container = (Container) (Object) this; if (this.listeners.contains(listener)) { // Sponge start listener.sendAllContents(container, this.getInventory()); container.detectAndSendChanges(); // Sponge end } else { this.listeners.add(listener); listener.sendAllContents(container, this.getInventory()); container.detectAndSendChanges(); } } /** * @author bloodmc * @reason All player fabric changes that need to be synced to * client flow through this method. Overwrite is used as no mod * should be touching this method. * */ @Overwrite public void detectAndSendChanges() { this.bridge$detectAndSendChanges(false); this.impl$captureSuccess = true; // Detect mod overrides } @Override public boolean bridge$capturePossible() { return this.impl$captureSuccess; } @Override public void bridge$detectAndSendChanges(final boolean captureOnly) { for (int i = 0; i < this.inventorySlots.size(); ++i) { final Slot slot = this.inventorySlots.get(i); final ItemStack itemstack = slot.getStack(); ItemStack itemstack1 = this.inventoryItemStacks.get(i); if (!ItemStack.areItemStacksEqual(itemstack1, itemstack)) { // Sponge start if (this.impl$captureInventory) { final ItemStackSnapshot originalItem = itemstack1.isEmpty() ? ItemStackSnapshot.NONE : ((org.spongepowered.api.item.inventory.ItemStack) itemstack1).createSnapshot(); final ItemStackSnapshot newItem = itemstack.isEmpty() ? ItemStackSnapshot.NONE : ((org.spongepowered.api.item.inventory.ItemStack) itemstack).createSnapshot(); org.spongepowered.api.item.inventory.Slot adapter = null; try { adapter = this.bridge$getContainerSlot(i); SlotTransaction newTransaction = new SlotTransaction(adapter, originalItem, newItem); if (this.impl$shiftCraft) { this.impl$capturedCraftShiftTransactions.add(newTransaction); } else { if (!this.impl$capturedCraftPreviewTransactions.isEmpty()) { // Check if Preview transaction is this transaction final SlotTransaction previewTransaction = this.impl$capturedCraftPreviewTransactions.get(0); if (previewTransaction.equals(newTransaction)) { newTransaction = null; } } if (newTransaction != null) { this.impl$capturedSlotTransactions.add(newTransaction); } } } catch (IndexOutOfBoundsException e) { SpongeImpl.getLogger().error("SlotIndex out of LensBounds! Did the Container change after creation?", e); } // This flag is set only when the client sends an invalid CPacketWindowClickItem packet. // We simply capture in order to send the proper changes back to client. if (captureOnly) { continue; } } // Sponge end itemstack1 = itemstack.copy(); this.inventoryItemStacks.set(i, itemstack1); for (final IContainerListener listener : this.listeners) { listener.sendSlotContents((Container) (Object) this, i, itemstack1); } } } this.impl$markClean(); } protected void impl$markClean() { } @Inject(method = "addSlotToContainer", at = @At(value = "HEAD")) private void impl$onAddSlotToContainer(final Slot slotIn, final CallbackInfoReturnable<Slot> cir) { this.impl$isLensInitialized = false; // Reset the lense and slot provider bridge$setSlotProvider(null); bridge$setLens(null); this.impl$adapters = null; } @Inject(method = "putStackInSlot", at = @At(value = "HEAD") ) private void impl$addTransaction(final int slotId, final ItemStack itemstack, final CallbackInfo ci) { if (this.impl$captureInventory) { final Slot slot = shadow$getSlot(slotId); if (slot != null) { final ItemStackSnapshot originalItem = slot.getStack().isEmpty() ? ItemStackSnapshot.NONE : ((org.spongepowered.api.item.inventory.ItemStack) slot.getStack()).createSnapshot(); final ItemStackSnapshot newItem = itemstack.isEmpty() ? ItemStackSnapshot.NONE : ((org.spongepowered.api.item.inventory.ItemStack) itemstack).createSnapshot(); final org.spongepowered.api.item.inventory.Slot adapter = this.bridge$getContainerSlot(slotId); this.impl$capturedSlotTransactions.add(new SlotTransaction(adapter, originalItem, newItem)); } } } @Nullable @Redirect(method = "slotClick", at = @At( value = "INVOKE", target = "Lnet/minecraft/entity/player/EntityPlayer;dropItem(Lnet/minecraft/item/ItemStack;Z)Lnet/minecraft/entity/item/EntityItem;", ordinal = 0)) private EntityItem impl$RestoreOnDrag(final EntityPlayer player, final ItemStack itemStackIn, final boolean unused) { final ItemStackSnapshot original = ItemStackUtil.snapshotOf(itemStackIn); final EntityItem entityItem = player.dropItem(itemStackIn, unused); if (!((EntityPlayerBridge) player).bridge$shouldRestoreInventory()) { return entityItem; } if (entityItem == null) { this.impl$dropCancelled = true; PacketPhaseUtil.handleCustomCursor((EntityPlayerMP) player, original); } return entityItem; } @Redirect(method = "slotClick", at = @At( value = "INVOKE", target = "Lnet/minecraft/entity/player/EntityPlayer;dropItem(Lnet/minecraft/item/ItemStack;Z)Lnet/minecraft/entity/item/EntityItem;", ordinal = 1)) @Nullable private EntityItem impl$restoreOnDragSplit(final EntityPlayer player, final ItemStack itemStackIn, final boolean unused) { final EntityItem entityItem = player.dropItem(itemStackIn, unused); if (!((EntityPlayerBridge) player).bridge$shouldRestoreInventory()) { return entityItem; } if (entityItem == null) { ItemStack original = null; if (player.inventory.getItemStack().isEmpty()) { original = itemStackIn; } else { player.inventory.getItemStack().grow(1); original = player.inventory.getItemStack(); } player.inventory.setItemStack(original); ((EntityPlayerMP) player).connection.sendPacket(new SPacketSetSlot(-1, -1, original)); } ((EntityPlayerBridge) player).bridge$shouldRestoreInventory(false); return entityItem; } @Redirect(method = "slotClick", at = @At( value = "INVOKE", target = "Lnet/minecraft/entity/player/InventoryPlayer;setItemStack(Lnet/minecraft/item/ItemStack;)V", ordinal = 1)) private void impl$ClearOnSlot(final InventoryPlayer inventoryPlayer, final ItemStack itemStackIn) { if (!this.impl$dropCancelled || !((EntityPlayerBridge) inventoryPlayer.player).bridge$shouldRestoreInventory()) { inventoryPlayer.setItemStack(itemStackIn); } ((EntityPlayerBridge) inventoryPlayer.player).bridge$shouldRestoreInventory(false); this.impl$dropCancelled = false; } @Redirect(method = "slotClick", at = @At(value = "INVOKE", target = "Lnet/minecraft/inventory/Slot;canTakeStack(Lnet/minecraft/entity/player/EntityPlayer;)Z", ordinal = 4)) public boolean onCanTakeStack(final Slot slot, final EntityPlayer playerIn) { final boolean result = slot.canTakeStack(playerIn); if (result) { this.impl$itemStackSnapshot = ItemStackUtil.snapshotOf(slot.getStack()); this.impl$lastSlotUsed = slot; } else { this.impl$itemStackSnapshot = ItemStackSnapshot.NONE; this.impl$lastSlotUsed = null; } return result; } @Nullable @Redirect(method = "slotClick", at = @At(value = "INVOKE", target = "Lnet/minecraft/entity/player/EntityPlayer;dropItem(Lnet/minecraft/item/ItemStack;Z)Lnet/minecraft/entity/item/EntityItem;", ordinal = 3)) private EntityItem onThrowClick(final EntityPlayer player, final ItemStack itemStackIn, final boolean unused) { final EntityItem entityItem = player.dropItem(itemStackIn, true); if (entityItem == null && ((EntityPlayerBridge) player).bridge$shouldRestoreInventory()) { final ItemStack original = ItemStackUtil.toNative(this.impl$itemStackSnapshot.createStack()); this.impl$lastSlotUsed.putStack(original); player.openContainer.detectAndSendChanges(); ((EntityPlayerMP) player).isChangingQuantityOnly = false; ((EntityPlayerMP) player).connection.sendPacket(new SPacketSetSlot(player.openContainer.windowId, this.impl$lastSlotUsed.slotNumber, original)); } this.impl$itemStackSnapshot = ItemStackSnapshot.NONE; this.impl$lastSlotUsed = null; ((EntityPlayerBridge) player).bridge$shouldRestoreInventory(false); return entityItem; } @Redirect(method = "slotChangedCraftingGrid", at = @At( value = "INVOKE", target = "Lnet/minecraft/inventory/InventoryCraftResult;setInventorySlotContents(ILnet/minecraft/item/ItemStack;)V")) private void beforeSlotChangedCraftingGrid(final InventoryCraftResult output, final int index, final ItemStack itemstack) { if (!this.impl$captureInventory) { // Capture Inventory is true when caused by a vanilla inventory packet // This is to prevent infinite loops when a client mod re-requests the recipe result after we modified/cancelled it output.setInventorySlotContents(index, itemstack); return; } this.impl$capturedCraftPreviewTransactions.clear(); final ItemStackSnapshot orig = ItemStackUtil.snapshotOf(output.getStackInSlot(index)); output.setInventorySlotContents(index, itemstack); final ItemStackSnapshot repl = ItemStackUtil.snapshotOf(output.getStackInSlot(index)); final SlotAdapter slot = this.impl$getAdapters().get(index); this.impl$capturedCraftPreviewTransactions.add(new SlotTransaction(slot, orig, repl)); } @Inject(method = "slotChangedCraftingGrid", cancellable = true, at = @At(value = "INVOKE", target = "Lnet/minecraft/network/NetHandlerPlayServer;sendPacket(Lnet/minecraft/network/Packet;)V")) private void afterSlotChangedCraftingGrid( final World world, final EntityPlayer player, final InventoryCrafting craftingInventory, final InventoryCraftResult output, final CallbackInfo ci) { if (this.impl$firePreview && !this.impl$capturedCraftPreviewTransactions.isEmpty()) { final Inventory inv = ((CarriedInventory<?>) this).query(QueryOperationTypes.INVENTORY_TYPE.of(CraftingInventory.class)); if (!(inv instanceof CraftingInventory)) { SpongeImpl.getLogger().warn("Detected crafting but Sponge could not get a CraftingInventory for " + this.getClass().getName()); return; } final SlotTransaction previewTransaction = this.impl$capturedCraftPreviewTransactions.get(this.impl$capturedCraftPreviewTransactions.size() - 1); final IRecipe recipe = CraftingManager.findMatchingRecipe(craftingInventory, world); SpongeCommonEventFactory.callCraftEventPre(player, ((CraftingInventory) inv), previewTransaction, ((CraftingRecipe) recipe), ((Container)(Object) this), this.impl$capturedCraftPreviewTransactions); this.impl$capturedCraftPreviewTransactions.clear(); } } @Override public ItemStack bridge$getPreviousCursor() { return this.impl$previousCursor; } @Inject(method = "slotClick", at = @At(value = "INVOKE", target = "Lnet/minecraft/item/ItemStack;grow(I)V", ordinal = 1)) private void beforeOnTakeClickWithItem( final int slotId, final int dragType, final ClickType clickTypeIn, final EntityPlayer player, final CallbackInfoReturnable<Integer> cir) { this.impl$previousCursor = player.inventory.getItemStack().copy(); // capture previous cursor for CraftItemEvent.Craft } @Inject(method = "slotClick", at = @At(value = "INVOKE", target = "Lnet/minecraft/entity/player/InventoryPlayer;setItemStack(Lnet/minecraft/item/ItemStack;)V", ordinal = 3)) private void beforeOnTakeClick( final int slotId, final int dragType, final ClickType clickTypeIn, final EntityPlayer player, final CallbackInfoReturnable<Integer> cir) { this.impl$previousCursor = player.inventory.getItemStack().copy(); // capture previous cursor for CraftItemEvent.Craft } @Redirect(method = "slotClick", at = @At(value = "INVOKE", target = "Lnet/minecraft/inventory/Slot;onTake(Lnet/minecraft/entity/player/EntityPlayer;Lnet/minecraft/item/ItemStack;)Lnet/minecraft/item/ItemStack;", ordinal = 5)) private ItemStack redirectOnTakeThrow(final Slot slot, final EntityPlayer player, final ItemStack stackOnCursor) { this.impl$lastCraft = null; final ItemStack result = slot.onTake(player, stackOnCursor); if (this.impl$lastCraft != null) { if (slot instanceof SlotCrafting) { if (this.impl$lastCraft.isCancelled()) { stackOnCursor.setCount(0); // do not drop crafted item when cancelled } } } return result; } @Inject(method = "slotClick", at = @At("RETURN")) private void onReturn(final int slotId, final int dragType, final ClickType clickTypeIn, final EntityPlayer player, final CallbackInfoReturnable<ItemStack> cir) { // Reset variables needed for CraftItemEvent.Craft this.impl$lastCraft = null; this.impl$previousCursor = null; } @Redirect(method = "slotClick", at = @At(value = "INVOKE", target = "Lnet/minecraft/inventory/Container;transferStackInSlot(Lnet/minecraft/entity/player/EntityPlayer;I)Lnet/minecraft/item/ItemStack;")) private ItemStack redirectTransferStackInSlot(final Container thisContainer, final EntityPlayer player, final int slotId) { final Slot slot = thisContainer.getSlot(slotId); if (!(slot instanceof SlotCrafting)) { return thisContainer.transferStackInSlot(player, slotId); } this.impl$lastCraft = null; this.impl$shiftCraft = true; ItemStack result = thisContainer.transferStackInSlot(player, slotId); if (this.impl$lastCraft != null) { if (this.impl$lastCraft.isCancelled()) { result = ItemStack.EMPTY; // Return empty to stop shift-crafting } } final Inventory craftInv = ((Inventory) thisContainer).query(QueryOperationTypes.INVENTORY_TYPE.of(CraftingInventory.class)); if (craftInv instanceof CraftingInventory) { List<SlotTransaction> previewTransactions = ((ContainerBridge) thisContainer).bridge$getPreviewTransactions(); if (previewTransactions.isEmpty()) { final CraftingOutput outSlot = ((CraftingInventory) craftInv).getResult(); final SlotTransaction st = new SlotTransaction(outSlot, ItemStackSnapshot.NONE, ItemStackUtil.snapshotOf(outSlot.peek().orElse(org.spongepowered.api.item.inventory.ItemStack.empty()))); previewTransactions.add(st); } } this.impl$shiftCraft = false; return result; } @Override public boolean bridge$capturingInventory() { return this.impl$captureInventory; } @Override public void bridge$setCaptureInventory(final boolean flag) { this.impl$captureInventory = flag; } @Override public void bridge$setSpectatorChest(final boolean spectatorChest) { this.impl$spectatorChest = spectatorChest; } @Override public List<SlotTransaction> bridge$getCapturedSlotTransactions() { return this.impl$capturedSlotTransactions; } @Override public List<SlotTransaction> bridge$getPreviewTransactions() { return this.impl$capturedCraftPreviewTransactions; } @Override public void bridge$setLastCraft(final CraftItemEvent.Craft event) { this.impl$lastCraft = event; } @Override public void bridge$setFirePreview(final boolean firePreview) { this.impl$firePreview = firePreview; } @Override public void bridge$setShiftCrafting(final boolean flag) { this.impl$shiftCraft = flag; } @Override public boolean bridge$isShiftCrafting() { return this.impl$shiftCraft; } @Override public void bridge$setCanInteractWith(@Nullable final Predicate<EntityPlayer> predicate) { this.impl$canInteractWithPredicate = predicate; } @Override public org.spongepowered.api.item.inventory.Slot bridge$getContainerSlot(final int slot) { final org.spongepowered.api.item.inventory.Slot adapter = this.impl$getAdapters().get(slot); if (adapter == null) // Slot is not in Lens { if (slot >= this.inventorySlots.size()) { SpongeImpl.getLogger().warn("Could not find slot #{} in Container {}", slot, getClass().getName()); return null; } final Slot mcSlot = this.inventorySlots.get(slot); // Try falling back to vanilla slot if (mcSlot == null) { SpongeImpl.getLogger().warn("Could not find slot #{} in Container {}", slot, getClass().getName()); return null; } return ((org.spongepowered.api.item.inventory.Slot) mcSlot); } return adapter; } @Override public Location<org.spongepowered.api.world.World> bridge$getOpenLocation() { return this.impl$lastOpenLocation; } @Override public void bridge$setOpenLocation(final Location<org.spongepowered.api.world.World> loc) { this.impl$lastOpenLocation = loc; } @Override public void bridge$setInUse(final boolean inUse) { this.impl$inUse = inUse; } @Override public boolean bridge$isInUse() { return this.impl$inUse; } }
package uk.ac.ebi.ddi.similarityCalculator.utils; /* * @author Gaurhari Dass gdass@ebi.ac.uk * */ public class SimilarityConstants { public static final String OMICS_DOMAIN = "omics"; public static final String ATLAS_GENES = "atlas-genes"; public static final String ATLAS_GENES_DIFFERENTIAL = "atlas-genes-differential"; public static final String METABOLIGHTS = "metabolights";, }
package uk.ac.ebi.phenotype.service; import net.sf.json.JSONArray; import net.sf.json.JSONObject; import org.apache.commons.lang.StringUtils; import org.apache.solr.client.solrj.SolrQuery; import org.apache.solr.client.solrj.SolrServerException; import org.apache.solr.client.solrj.impl.HttpSolrServer; import org.apache.solr.client.solrj.response.FacetField.Count; import org.apache.solr.client.solrj.response.Group; import org.apache.solr.client.solrj.response.QueryResponse; import org.apache.solr.common.SolrDocument; import org.apache.solr.common.SolrDocumentList; import uk.ac.ebi.generic.util.JSONRestUtil; import uk.ac.ebi.phenotype.analytics.bean.AggregateCountXYBean; import uk.ac.ebi.phenotype.dao.PhenotypePipelineDAO; import uk.ac.ebi.phenotype.pojo.*; import uk.ac.ebi.phenotype.service.dto.GenotypePhenotypeDTO; import uk.ac.ebi.phenotype.util.PhenotypeFacetResult; import uk.ac.ebi.phenotype.web.controller.OverviewChartsController; import uk.ac.ebi.phenotype.web.pojo.BasicBean; import uk.ac.ebi.phenotype.web.pojo.GeneRowForHeatMap; import uk.ac.ebi.phenotype.web.pojo.HeatMapCell; import java.io.IOException; import java.net.URISyntaxException; import java.util.*; public abstract class AbstractGenotypePhenotypeService extends BasicService { protected PhenotypePipelineDAO pipelineDAO; protected HttpSolrServer solr; protected Boolean isPreQc; /** * @param zygosity - optional (pass null if not needed) * @return Map <String, Long> : <top_level_mp_name, number_of_annotations> * @author tudose */ public Map <String, Long> getDistributionOfAnnotationsByMPTopLevel(ZygosityType zygosity){ SolrQuery query = new SolrQuery(); if (zygosity != null){ query.setQuery(GenotypePhenotypeDTO.ZYGOSITY + ":" + zygosity.getName()); } else { query.setQuery("*:*"); } query.setFacet(true); query.setFacetLimit(-1); query.setRows(0); query.addFacetField(GenotypePhenotypeDTO.TOP_LEVEL_MP_TERM_NAME); try { QueryResponse response = solr.query(query); return getFacets(response).get(GenotypePhenotypeDTO.TOP_LEVEL_MP_TERM_NAME); } catch (SolrServerException e) { e.printStackTrace(); } return null; } public List<AggregateCountXYBean> getAggregateCountXYBean(Map <String, Long> map ){ List<AggregateCountXYBean> res = new ArrayList<>(); for (String key : map.keySet()){ // new AggregateCountXYBean(aggregateCount, xValue, xName, xAttribute, yValue, yName, yAttribute); AggregateCountXYBean bean = new AggregateCountXYBean( Integer.getInteger(map.get(key).toString()), key, key, "xAttribute", "Total", "Total", "yAttribute"); res.add(bean); } return res; } /** * Returns a list of a all colonies * * @param phenotypeResourceName * @return * @throws SolrServerException */ public List<GenotypePhenotypeDTO> getAllMPByPhenotypingCenterAndColonies(String phenotypeResourceName, String mpTermAcc, String mpTermName) throws SolrServerException { List<String> fields = Arrays.asList(GenotypePhenotypeDTO.PHENOTYPING_CENTER, mpTermAcc, mpTermName, GenotypePhenotypeDTO.COLONY_ID, GenotypePhenotypeDTO.MARKER_SYMBOL, GenotypePhenotypeDTO.MARKER_ACCESSION_ID); SolrQuery query = new SolrQuery() .setQuery("*:*") .addFilterQuery(GenotypePhenotypeDTO.RESOURCE_NAME + ":" + phenotypeResourceName) .setRows(MAX_NB_DOCS) .setFields(StringUtils.join(fields, ",")); QueryResponse response = solr.query(query); return response.getBeans(GenotypePhenotypeDTO.class); // SolrQuery query = new SolrQuery().setQuery("*:*").addFilterQuery(GenotypePhenotypeDTO.RESOURCE_NAME + ":" + phenotypeResourceName).setRows(MAX_NB_DOCS).setFields(GenotypePhenotypeDTO.PHENOTYPING_CENTER + "," + mpTermAcc + "," + mpTermName + "," + GenotypePhenotypeDTO.COLONY_ID + "," + GenotypePhenotypeDTO.MARKER_SYMBOL + "," + GenotypePhenotypeDTO.MARKER_ACCESSION_ID); // QueryResponse response = solr.query(query); // SolrDocumentList results = response.getResults(); // List<Map<String, String>> lmap = new ArrayList<Map<String, String>>(); // for (SolrDocument doc : results) { // String phenotypingCenter = (String) doc.getFieldValue(GenotypePhenotypeDTO.PHENOTYPING_CENTER); // String colonyID = (String) doc.getFieldValue(GenotypePhenotypeDTO.COLONY_ID); // String markerSymbol = (String) doc.getFieldValue(GenotypePhenotypeDTO.MARKER_SYMBOL); // String markerAccession = (String) doc.getFieldValue(GenotypePhenotypeDTO.MARKER_ACCESSION_ID); // if (mpTermAcc.equals(GenotypePhenotypeDTO.MP_TERM_ID)) { // Map<String, String> r = new HashMap<String, String>(); // r.put(GenotypePhenotypeDTO.PHENOTYPING_CENTER, phenotypingCenter); // r.put(GenotypePhenotypeDTO.COLONY_ID, colonyID); // r.put(GenotypePhenotypeDTO.MARKER_SYMBOL, markerSymbol); // r.put(GenotypePhenotypeDTO.MARKER_ACCESSION_ID, markerAccession); // r.put(mpTermAcc, (String) doc.getFieldValue(GenotypePhenotypeDTO.MP_TERM_ID)); // r.put(mpTermName, (String) doc.getFieldValue(GenotypePhenotypeDTO.MP_TERM_NAME)); // lmap.add(r); // } else { // ArrayList<String> mpTermIds = (ArrayList<String>) doc.getFieldValue(mpTermAcc); // ArrayList<String> mpTermNames = (ArrayList<String>) doc.getFieldValue(mpTermName); // for (int i = 0; i < mpTermIds.size(); i++) { // Map<String, String> r = new HashMap<String, String>(); // r.put(GenotypePhenotypeDTO.PHENOTYPING_CENTER, phenotypingCenter); // r.put(GenotypePhenotypeDTO.COLONY_ID, colonyID); // r.put(GenotypePhenotypeDTO.MARKER_SYMBOL, markerSymbol); // r.put(GenotypePhenotypeDTO.MARKER_ACCESSION_ID, markerAccession); // r.put(mpTermAcc, mpTermIds.get(i)); // r.put(mpTermName, mpTermNames.get(i)); // lmap.add(r); // return lmap; } /** * * @param mpId * @return List of parameters that led to at least one association to the * given parameter or some class in its subtree * @throws SolrServerException * @author tudose */ public ArrayList<Parameter> getParametersForPhenotype(String mpId) throws SolrServerException { ArrayList<Parameter> res = new ArrayList<>(); SolrQuery q = new SolrQuery().setQuery("(" + GenotypePhenotypeDTO.MP_TERM_ID + ":\"" + mpId + "\" OR " + GenotypePhenotypeDTO.TOP_LEVEL_MP_TERM_ID + ":\"" + mpId + "\" OR " + GenotypePhenotypeDTO.INTERMEDIATE_MP_TERM_ID + ":\"" + mpId + "\") AND (" + GenotypePhenotypeDTO.STRAIN_ACCESSION_ID + ":\"" + StringUtils.join(OverviewChartsController.OVERVIEW_STRAINS, "\" OR " + GenotypePhenotypeDTO.STRAIN_ACCESSION_ID + ":\"") + "\")").setRows(0); q.set("facet.field", "" + GenotypePhenotypeDTO.PARAMETER_STABLE_ID); q.set("facet", true); q.set("facet.limit", -1); q.set("facet.mincount", 1); QueryResponse response = solr.query(q); for (Count parameter : response.getFacetField(GenotypePhenotypeDTO.PARAMETER_STABLE_ID).getValues()) { // fill genes for each of them // if (parameter.getCount() > 0){ res.add(pipelineDAO.getParameterByStableId(parameter.getName())); } return res; } public List<Group> getGenesBy(String mpId, String sex) throws SolrServerException { // males only SolrQuery q = new SolrQuery().setQuery("(" + GenotypePhenotypeDTO.MP_TERM_ID + ":\"" + mpId + "\" OR " + GenotypePhenotypeDTO.TOP_LEVEL_MP_TERM_ID + ":\"" + mpId + "\" OR " + GenotypePhenotypeDTO.INTERMEDIATE_MP_TERM_ID + ":\"" + mpId + "\") AND (" + GenotypePhenotypeDTO.STRAIN_ACCESSION_ID + ":\"" + StringUtils.join(OverviewChartsController.OVERVIEW_STRAINS, "\" OR " + GenotypePhenotypeDTO.STRAIN_ACCESSION_ID + ":\"") + "\")").setRows(10000); q.set("group.field", "" + GenotypePhenotypeDTO.MARKER_SYMBOL); q.set("group", true); q.set("group.limit", 0); if (sex != null) { q.addFilterQuery(GenotypePhenotypeDTO.SEX + ":" + sex); } QueryResponse results = solr.query(q); return results.getGroupResponse().getValues().get(0).getValues(); } public List<String> getGenesAssocByParamAndMp(String parameterStableId, String phenotype_id) throws SolrServerException { List<String> res = new ArrayList<String>(); SolrQuery query = new SolrQuery().setQuery("(" + GenotypePhenotypeDTO.MP_TERM_ID + ":\"" + phenotype_id + "\" OR " + GenotypePhenotypeDTO.TOP_LEVEL_MP_TERM_ID + ":\"" + phenotype_id + "\" OR " + GenotypePhenotypeDTO.INTERMEDIATE_MP_TERM_ID + ":\"" + phenotype_id + "\") AND (" + GenotypePhenotypeDTO.STRAIN_ACCESSION_ID + ":\"" + StringUtils.join(OverviewChartsController.OVERVIEW_STRAINS, "\" OR " + GenotypePhenotypeDTO.STRAIN_ACCESSION_ID + ":\"") + "\") AND " + GenotypePhenotypeDTO.PARAMETER_STABLE_ID + ":\"" + parameterStableId + "\"").setRows(-1); query.set("group.field", GenotypePhenotypeDTO.MARKER_ACCESSION_ID); query.set("group", true); List<Group> groups = solr.query(query).getGroupResponse().getValues().get(0).getValues(); for (Group gr : groups) { if (!res.contains((String) gr.getGroupValue())) { res.add((String) gr.getGroupValue()); } } return res; } /** * Returns a set of MARKER_ACCESSION_ID strings of all genes that have * phenotype associations. * * @return a set of MARKER_ACCESSION_ID strings of all genes that have * phenotype associations. * @throws SolrServerException */ public Set<String> getAllGenesWithPhenotypeAssociations() throws SolrServerException { SolrQuery solrQuery = new SolrQuery(); solrQuery.setQuery(GenotypePhenotypeDTO.MARKER_ACCESSION_ID + ":*"); solrQuery.setRows(1000000); solrQuery.setFields(GenotypePhenotypeDTO.MARKER_ACCESSION_ID); QueryResponse rsp = null; rsp = solr.query(solrQuery); SolrDocumentList res = rsp.getResults(); HashSet<String> allGenes = new HashSet<String>(); for (SolrDocument doc : res) { allGenes.add((String) doc.getFieldValue(GenotypePhenotypeDTO.MARKER_ACCESSION_ID)); } return allGenes; } /** * Returns a set of MP_TERM_ID strings of all phenotypes that have gene * associations. * * @return a set of MP_TERM_ID strings of all phenotypes that have gene * associations. * @throws SolrServerException */ public Set<String> getAllPhenotypesWithGeneAssociations() throws SolrServerException { SolrQuery solrQuery = new SolrQuery(); solrQuery.setQuery(GenotypePhenotypeDTO.MP_TERM_ID + ":*"); solrQuery.setRows(1000000); solrQuery.setFields(GenotypePhenotypeDTO.MP_TERM_ID); QueryResponse rsp = solr.query(solrQuery); SolrDocumentList res = rsp.getResults(); HashSet<String> allPhenotypes = new HashSet<String>(); for (SolrDocument doc : res) { allPhenotypes.add((String) doc.getFieldValue(GenotypePhenotypeDTO.MP_TERM_ID)); } return allPhenotypes; } /** * Returns a set of MP_TERM_ID strings of all top-level phenotypes. * * @return a set of MP_TERM_ID strings of all top-level phenotypes. * @throws SolrServerException */ public Set<String> getAllTopLevelPhenotypes() throws SolrServerException { SolrQuery solrQuery = new SolrQuery(); solrQuery.setQuery(GenotypePhenotypeDTO.TOP_LEVEL_MP_TERM_ID + ":*"); solrQuery.setRows(1000000); solrQuery.setFields(GenotypePhenotypeDTO.TOP_LEVEL_MP_TERM_ID); QueryResponse rsp = solr.query(solrQuery); SolrDocumentList res = rsp.getResults(); HashSet<String> allTopLevelPhenotypes = new HashSet<String>(); for (SolrDocument doc : res) { ArrayList<String> ids = (ArrayList<String>) doc.getFieldValue(GenotypePhenotypeDTO.TOP_LEVEL_MP_TERM_ID); for (String id : ids) { allTopLevelPhenotypes.add(id); } } return allTopLevelPhenotypes; } /** * Returns a set of MP_TERM_ID strings of all intermediate-level phenotypes. * * @return a set of MP_TERM_ID strings of all intermediate-level phenotypes. * @throws SolrServerException */ public Set<String> getAllIntermediateLevelPhenotypes() throws SolrServerException { SolrQuery solrQuery = new SolrQuery(); solrQuery.setQuery(GenotypePhenotypeDTO.INTERMEDIATE_MP_TERM_ID + ":*"); solrQuery.setRows(1000000); solrQuery.setFields(GenotypePhenotypeDTO.INTERMEDIATE_MP_TERM_ID); QueryResponse rsp = solr.query(solrQuery); SolrDocumentList res = rsp.getResults(); HashSet<String> allIntermediateLevelPhenotypes = new HashSet<String>(); for (SolrDocument doc : res) { ArrayList<String> ids = (ArrayList<String>) doc.getFieldValue(GenotypePhenotypeDTO.INTERMEDIATE_MP_TERM_ID); for (String id : ids) { allIntermediateLevelPhenotypes.add(id); } } return allIntermediateLevelPhenotypes; } /* * Methods used by PhenotypeSummaryDAO */ public SolrDocumentList getPhenotypesForTopLevelTerm(String gene, String mpID, ZygosityType zygosity) throws SolrServerException { String query; if (gene.equalsIgnoreCase("*")) { query = GenotypePhenotypeDTO.MARKER_ACCESSION_ID + ":" + gene + " AND "; } else { query = GenotypePhenotypeDTO.MARKER_ACCESSION_ID + ":\"" + gene + "\" AND "; } SolrQuery solrQuery = new SolrQuery(); solrQuery.setQuery(query+GenotypePhenotypeDTO.TOP_LEVEL_MP_TERM_ID + ":\"" + mpID + "\""); solrQuery.setRows(1000000); if (zygosity != null){ solrQuery.setFilterQueries(GenotypePhenotypeDTO.ZYGOSITY + ":" + zygosity.getName() ); } SolrDocumentList result = solr.query(solrQuery).getResults(); // mpID might be in mp_id instead of top level field if (result.size() == 0 || result == null) // result = runQuery("marker_accession_id:" + gene.replace(":", // "\\:") + " AND mp_term_id:" + mpID.replace(":", "\\:")); result = runQuery(query + GenotypePhenotypeDTO.MP_TERM_ID + ":\"" + mpID + "\"");// AND // -" + GenotypePhenotypeDTO.RESOURCE_NAME + ":IMPC"); return result; } public SolrDocumentList getPhenotypes(String gene) throws SolrServerException { SolrDocumentList result = runQuery(GenotypePhenotypeDTO.MARKER_ACCESSION_ID + ":\"" + gene + "\""); return result; } private SolrDocumentList runQuery(String q) throws SolrServerException { SolrQuery solrQuery = new SolrQuery().setQuery(q); solrQuery.setRows(1000000); QueryResponse rsp = null; rsp = solr.query(solrQuery); return rsp.getResults(); } public HashMap<String, String> getTopLevelMPTerms(String gene, ZygosityType zyg) throws SolrServerException { HashMap<String, String> tl = new HashMap<String, String>(); SolrQuery query = new SolrQuery(); if (gene.equalsIgnoreCase("*")) { query.setQuery(GenotypePhenotypeDTO.MARKER_ACCESSION_ID + ":" + gene); } else { query.setQuery(GenotypePhenotypeDTO.MARKER_ACCESSION_ID + ":\"" + gene + "\""); } query.setRows(10000000); if (zyg != null){ query.setFilterQueries(GenotypePhenotypeDTO.ZYGOSITY + ":" + zyg.getName()); } SolrDocumentList result = solr.query(query).getResults(); if (result.size() > 0) { for (int i = 0; i < result.size(); i++) { SolrDocument doc = result.get(i); if (doc.getFieldValue(GenotypePhenotypeDTO.TOP_LEVEL_MP_TERM_ID) != null) { ArrayList<String> tlTermIDs = (ArrayList<String>) doc.getFieldValue(GenotypePhenotypeDTO.TOP_LEVEL_MP_TERM_ID); ArrayList<String> tlTermNames = (ArrayList<String>) doc.getFieldValue(GenotypePhenotypeDTO.TOP_LEVEL_MP_TERM_NAME); int len = tlTermIDs.size(); for (int k = 0; k < len; k++) { tl.put(tlTermIDs.get(k), tlTermNames.get(k)); } } else {// it seems that when the term id is a top level term // itself the top level term field tl.put((String) doc.getFieldValue(GenotypePhenotypeDTO.MP_TERM_ID), (String) doc.getFieldValue(GenotypePhenotypeDTO.MP_TERM_NAME)); } } } return tl; } /* * End of Methods for PhenotypeSummaryDAO */ /* * Methods for PipelineSolrImpl */ public Parameter getParameterByStableId(String paramStableId, String queryString) throws IOException, URISyntaxException { String solrUrl = solr.getBaseURL() + "/select/?q=" + GenotypePhenotypeDTO.PARAMETER_STABLE_ID + ":\"" + paramStableId + "\"&rows=10000000&version=2.2&start=0&indent=on&wt=json"; if (queryString.startsWith("&")) { solrUrl += queryString; } else {// add an ampersand parameter splitter if not one as we need // one to add to the already present solr query string solrUrl += "&" + queryString; } return createParameter(solrUrl); } private Parameter createParameter(String url) throws IOException, URISyntaxException { Parameter parameter = new Parameter(); JSONObject results = null; results = JSONRestUtil.getResults(url); JSONArray docs = results.getJSONObject("response").getJSONArray("docs"); for (Object doc : docs) { JSONObject paramDoc = (JSONObject) doc; String isDerivedInt = paramDoc.getString("parameter_derived"); boolean derived = false; if (isDerivedInt.equals("true")) { derived = true; } parameter.setDerivedFlag(derived); parameter.setName(paramDoc.getString(GenotypePhenotypeDTO.PARAMETER_NAME)); // we need to set is derived in the solr core! // pipeline core parameter_derived field parameter.setStableId(paramDoc.getString("" + GenotypePhenotypeDTO.PARAMETER_STABLE_ID + "")); if (paramDoc.containsKey(GenotypePhenotypeDTO.PROCEDURE_STABLE_KEY)) { parameter.setStableKey(Integer.parseInt(paramDoc.getString(GenotypePhenotypeDTO.PROCEDURE_STABLE_KEY))); } } return parameter; } /* * End of method for PipelineSolrImpl */ /* * Methods used by PhenotypeCallSummarySolrImpl */ public List<? extends StatisticalResult> getStatsResultFor(String accession, String parameterStableId, ObservationType observationType, String strainAccession, String alleleAccession) throws IOException, URISyntaxException { String solrUrl = solr.getBaseURL(); solrUrl += "/select/?q=" + GenotypePhenotypeDTO.MARKER_ACCESSION_ID + ":\"" + accession + "\"" + "&fq=" + GenotypePhenotypeDTO.PARAMETER_STABLE_ID + ":" + parameterStableId + "&fq=" + GenotypePhenotypeDTO.STRAIN_ACCESSION_ID + ":\"" + strainAccession + "\"" + "&fq=" + GenotypePhenotypeDTO.ALLELE_ACCESSION_ID + ":\"" + alleleAccession + "\"&rows=10000000&version=2.2&start=0&indent=on&wt=json"; System.out.println("solr url for stats results=" + solrUrl); List<? extends StatisticalResult> statisticalResult = this.createStatsResultFromSolr(solrUrl, observationType); return statisticalResult; } /** * Returns a PhenotypeFacetResult object given a list of genes * @param genomicFeatures list of marker accession * @return * @throws IOException * @throws URISyntaxException */ public PhenotypeFacetResult getPhenotypeFacetResultByGenomicFeatures(Set<String> genomicFeatures) throws IOException, URISyntaxException { String solrUrl = solr.getBaseURL(); System.out.println("SOLR URL = " + solrUrl); // build OR query from a list of genes (assuming they have MGI ids StringBuilder geneClause = new StringBuilder(genomicFeatures.size()*15); boolean start = true; for (String genomicFeatureAcc: genomicFeatures) { geneClause.append((start)?genomicFeatureAcc:"\" OR \""+genomicFeatureAcc); start = false; } solrUrl += "/select/?q=" + GenotypePhenotypeDTO.MARKER_ACCESSION_ID + ":(\"" + geneClause.toString() + "\")" + "&facet=true" + "&facet.field=" + GenotypePhenotypeDTO.RESOURCE_FULLNAME + "&facet.field=" + GenotypePhenotypeDTO.PROCEDURE_NAME + "&facet.field=" + GenotypePhenotypeDTO.MARKER_SYMBOL + "&facet.field=" + GenotypePhenotypeDTO.MP_TERM_NAME + "&sort=p_value%20asc" + "&rows=10000000&version=2.2&start=0&indent=on&wt=json"; System.out.println("\n\n\n SOLR URL = " + solrUrl); return this.createPhenotypeResultFromSolrResponse(solrUrl, isPreQc); } /** * Returns a PhenotypeFacetResult object given a phenotyping center and a * pipeline stable id * * @param phenotypingCenter * a short name for a phenotyping center * @param pipelineStableId * a stable pipeline id * @return a PhenotypeFacetResult instance containing a list of * PhenotypeCallSummary objects. * @throws IOException * @throws URISyntaxException */ public PhenotypeFacetResult getPhenotypeFacetResultByPhenotypingCenterAndPipeline(String phenotypingCenter, String pipelineStableId) throws IOException, URISyntaxException { String solrUrl = solr.getBaseURL(); System.out.println("SOLR URL = " + solrUrl); solrUrl += "/select/?q=" + GenotypePhenotypeDTO.PHENOTYPING_CENTER + ":\"" + phenotypingCenter + "\"" + "&fq=" + GenotypePhenotypeDTO.PIPELINE_STABLE_ID + ":" + pipelineStableId + "&facet=true" + "&facet.field=" + GenotypePhenotypeDTO.RESOURCE_FULLNAME + "&facet.field=" + GenotypePhenotypeDTO.PROCEDURE_NAME + "&facet.field=" + GenotypePhenotypeDTO.MARKER_SYMBOL + "&facet.field=" + GenotypePhenotypeDTO.MP_TERM_NAME + "&sort=p_value%20asc" + "&rows=10000000&version=2.2&start=0&indent=on&wt=json"; System.out.println("SOLR URL = " + solrUrl); return this.createPhenotypeResultFromSolrResponse(solrUrl, isPreQc); } public PhenotypeFacetResult getMPByGeneAccessionAndFilter(String accId, String queryString) throws IOException, URISyntaxException { String solrUrl = solr.getBaseURL() + "/select/?q=" + GenotypePhenotypeDTO.MARKER_ACCESSION_ID + ":\"" + accId + "\"" // + "&fq=-" + GenotypePhenotypeDTO.RESOURCE_NAME + ":IMPC" + "&rows=10000000&version=2.2&start=0&indent=on&wt=json&facet=true&facet.field=" + GenotypePhenotypeDTO.RESOURCE_FULLNAME + "&facet.field=" + GenotypePhenotypeDTO.TOP_LEVEL_MP_TERM_NAME + ""; if (queryString.startsWith("&")) { solrUrl += queryString; } else {// add an ampersand parameter splitter if not one as we need // one to add to the already present solr query string solrUrl += "&" + queryString; } solrUrl += "&sort=p_value%20asc";// sort by pValue by default so we get // most sig calls at top of tables System.out.println("Solr url in getMPByGeneAccessionAndFilter " + solrUrl); return createPhenotypeResultFromSolrResponse(solrUrl, isPreQc); } public PhenotypeFacetResult getMPCallByMPAccessionAndFilter(String phenotype_id, String queryString) throws IOException, URISyntaxException { String solrUrl = solr.getBaseURL() + "/select/?q=(" + GenotypePhenotypeDTO.MP_TERM_ID + ":\"" + phenotype_id + "\"+OR+" + GenotypePhenotypeDTO.TOP_LEVEL_MP_TERM_ID + ":\"" + phenotype_id + "\"+OR+" + GenotypePhenotypeDTO.INTERMEDIATE_MP_TERM_ID + ":\"" + phenotype_id + "\")" // + "&fq=-" + GenotypePhenotypeDTO.RESOURCE_NAME + ":IMPC" + + "&rows=1000000&version=2.2&start=0&indent=on&wt=json&facet=true&facet.field=" + GenotypePhenotypeDTO.RESOURCE_FULLNAME + "&facet.field=" + GenotypePhenotypeDTO.PROCEDURE_NAME + "&facet.field=" + GenotypePhenotypeDTO.MARKER_SYMBOL + "&facet.field=" + GenotypePhenotypeDTO.MP_TERM_NAME + ""; // if (!filterString.equals("")) { if (queryString.startsWith("&")) { solrUrl += queryString; } else {// add an ampersand parameter splitter if not one as we need // one to add to the already present solr query string solrUrl += "&" + queryString; } solrUrl += "&sort=p_value%20asc"; System.out.println("solr url for sorting pvalues=" + solrUrl); return createPhenotypeResultFromSolrResponse(solrUrl, isPreQc); } private List<? extends StatisticalResult> createStatsResultFromSolr(String url, ObservationType observationType) throws IOException, URISyntaxException { // need some way of determining what type of data and therefor what type // of stats result object to create default to unidimensional for now List<StatisticalResult> results = new ArrayList<>(); // StatisticalResult statisticalResult=new StatisticalResult(); JSONObject resultsj = null; resultsj = JSONRestUtil.getResults(url); JSONArray docs = resultsj.getJSONObject("response").getJSONArray("docs"); if (observationType == ObservationType.unidimensional) { for (Object doc : docs) { UnidimensionalResult unidimensionalResult = new UnidimensionalResult(); JSONObject phen = (JSONObject) doc; String pValue = phen.getString(GenotypePhenotypeDTO.P_VALUE); String sex = phen.getString(GenotypePhenotypeDTO.SEX); String zygosity = phen.getString(GenotypePhenotypeDTO.ZYGOSITY); String effectSize = phen.getString(GenotypePhenotypeDTO.EFFECT_SIZE); String phenoCallSummaryId = phen.getString(GenotypePhenotypeDTO.ID); // System.out.println("pValue="+pValue); if (pValue != null) { unidimensionalResult.setId(Integer.parseInt(phenoCallSummaryId)); // one id for each document and for each sex unidimensionalResult.setpValue(Double.valueOf(pValue)); unidimensionalResult.setZygosityType(ZygosityType.valueOf(zygosity)); unidimensionalResult.setEffectSize(new Double(effectSize)); unidimensionalResult.setSexType(SexType.valueOf(sex)); } results.add(unidimensionalResult); } return results; } if (observationType == ObservationType.categorical) { for (Object doc : docs) { CategoricalResult catResult = new CategoricalResult(); JSONObject phen = (JSONObject) doc; // System.out.println("pValue="+pValue); String pValue = phen.getString(GenotypePhenotypeDTO.P_VALUE); String sex = phen.getString(GenotypePhenotypeDTO.SEX); String zygosity = phen.getString(GenotypePhenotypeDTO.ZYGOSITY); String effectSize = phen.getString(GenotypePhenotypeDTO.EFFECT_SIZE); String phenoCallSummaryId = phen.getString(GenotypePhenotypeDTO.ID); // System.out.println("pValue="+pValue); // if(pValue!=null) { catResult.setId(Integer.parseInt(phenoCallSummaryId)); // one id for each document and for each sex catResult.setpValue(Double.valueOf(pValue)); catResult.setZygosityType(ZygosityType.valueOf(zygosity)); catResult.setEffectSize(new Double(Double.valueOf(effectSize))); catResult.setSexType(SexType.valueOf(sex)); // System.out.println("adding sex="+SexType.valueOf(sex)); results.add(catResult); } return results; } return results; } private PhenotypeFacetResult createPhenotypeResultFromSolrResponse(String url, Boolean isPreQc) throws IOException, URISyntaxException { PhenotypeFacetResult facetResult = new PhenotypeFacetResult(); List<PhenotypeCallSummary> list = new ArrayList<PhenotypeCallSummary>(); JSONObject results = new JSONObject(); results = JSONRestUtil.getResults(url); JSONArray docs = results.getJSONObject("response").getJSONArray("docs"); for (Object doc : docs) { list.add(createSummaryCall(doc, isPreQc)); } // get the facet information that we can use to create the buttons / // dropdowns/ checkboxes JSONObject facets = results.getJSONObject("facet_counts").getJSONObject("facet_fields"); Iterator<String> ite = facets.keys(); Map<String, Map<String, Integer>> dropdowns = new HashMap<String, Map<String, Integer>>(); while (ite.hasNext()) { Map<String, Integer> map = new HashMap<String, Integer>(); String key = (String) ite.next(); JSONArray array = (JSONArray) facets.get(key); int i = 0; while (i + 1 < array.size()) { String facetString = array.get(i).toString(); int number = array.getInt(i + 1); if (number != 0) {// only add if some counts to filter on! map.put(facetString, number); } i += 2; // System.out.println("i="+i); } dropdowns.put(key, map); } facetResult.setFacetResults(dropdowns); facetResult.setPhenotypeCallSummaries(list); return facetResult; } public PhenotypeCallSummary createSummaryCall(Object doc, Boolean preQc){ JSONObject phen = (JSONObject) doc; JSONArray topLevelMpTermNames; JSONArray topLevelMpTermIDs; String mpTerm = phen.getString(GenotypePhenotypeDTO.MP_TERM_NAME); String mpId = phen.getString(GenotypePhenotypeDTO.MP_TERM_ID); PhenotypeCallSummary sum = new PhenotypeCallSummary(); OntologyTerm phenotypeTerm = new OntologyTerm(); DatasourceEntityId mpEntity = new DatasourceEntityId(); mpEntity.setAccession(mpId); phenotypeTerm.setId(mpEntity); phenotypeTerm.setName(mpTerm); phenotypeTerm.setDescription(mpTerm); sum.setPhenotypeTerm(phenotypeTerm); // Set the Gid field required for linking to phenoview, which is stored in the // datafile in the external id field if (phen.containsKey(GenotypePhenotypeDTO.EXTERNAL_ID)){ sum.setgId(phen.getString(GenotypePhenotypeDTO.EXTERNAL_ID)); } sum.setPreQC(preQc); // check the top level categories if (phen.containsKey(GenotypePhenotypeDTO.TOP_LEVEL_MP_TERM_ID)){ topLevelMpTermNames = phen.getJSONArray(GenotypePhenotypeDTO.TOP_LEVEL_MP_TERM_NAME); topLevelMpTermIDs = phen.getJSONArray(GenotypePhenotypeDTO.TOP_LEVEL_MP_TERM_ID); }else { // a top level term is directly associated topLevelMpTermNames = new JSONArray(); topLevelMpTermNames.add(phen.getString(GenotypePhenotypeDTO.MP_TERM_NAME)); topLevelMpTermIDs = new JSONArray(); topLevelMpTermIDs.add(phen.getString(GenotypePhenotypeDTO.MP_TERM_ID)); } List<OntologyTerm> topLevelPhenotypeTerms = new ArrayList<OntologyTerm>(); for (int i = 0; i < topLevelMpTermNames.size(); i++) { OntologyTerm toplevelTerm = new OntologyTerm(); toplevelTerm.setName(topLevelMpTermNames.getString(i)); toplevelTerm.setDescription(topLevelMpTermNames.getString(i)); DatasourceEntityId tlmpEntity = new DatasourceEntityId(); tlmpEntity.setAccession(topLevelMpTermIDs.getString(i)); toplevelTerm.setId(tlmpEntity); topLevelPhenotypeTerms.add(toplevelTerm); } sum.setTopLevelPhenotypeTerms(topLevelPhenotypeTerms); sum.setPhenotypingCenter(phen.getString(GenotypePhenotypeDTO.PHENOTYPING_CENTER)); if (phen.containsKey(GenotypePhenotypeDTO.ALLELE_SYMBOL)) { Allele allele = new Allele(); allele.setSymbol(phen.getString(GenotypePhenotypeDTO.ALLELE_SYMBOL)); GenomicFeature alleleGene = new GenomicFeature(); DatasourceEntityId alleleEntity = new DatasourceEntityId(); alleleEntity.setAccession(phen.getString(GenotypePhenotypeDTO.ALLELE_ACCESSION_ID)); allele.setId(alleleEntity); alleleGene.setId(alleleEntity); alleleGene.setSymbol(phen.getString(GenotypePhenotypeDTO.MARKER_SYMBOL)); allele.setGene(alleleGene); sum.setAllele(allele); } if (phen.containsKey(GenotypePhenotypeDTO.MARKER_SYMBOL)) { GenomicFeature gf = new GenomicFeature(); gf.setSymbol(phen.getString(GenotypePhenotypeDTO.MARKER_SYMBOL)); DatasourceEntityId geneEntity = new DatasourceEntityId(); geneEntity.setAccession(phen.getString(GenotypePhenotypeDTO.MARKER_ACCESSION_ID)); gf.setId(geneEntity); sum.setGene(gf); } if (phen.containsKey(GenotypePhenotypeDTO.PHENOTYPING_CENTER)) { sum.setPhenotypingCenter(phen.getString(GenotypePhenotypeDTO.PHENOTYPING_CENTER)); } // GenomicFeature gene=new GenomicFeature(); // gene. // allele.setGene(gene); String zygosity = phen.getString(GenotypePhenotypeDTO.ZYGOSITY); ZygosityType zyg = ZygosityType.valueOf(zygosity); sum.setZygosity(zyg); String sex = phen.getString(GenotypePhenotypeDTO.SEX); SexType sexType = SexType.valueOf(sex); sum.setSex(sexType); String provider = phen.getString(GenotypePhenotypeDTO.RESOURCE_NAME); Datasource datasource = new Datasource(); datasource.setName(provider); sum.setDatasource(datasource); // "parameter_stable_id":"557", // "parameter_name":"Bone Mineral Content", // "procedure_stable_id":"41", // "procedure_stable_key":"41", Parameter parameter = new Parameter(); if (phen.containsKey(GenotypePhenotypeDTO.PARAMETER_STABLE_ID)) { parameter = pipelineDAO.getParameterByStableId(phen.getString(GenotypePhenotypeDTO.PARAMETER_STABLE_ID)); } else { System.err.println("parameter_stable_id missing"); } sum.setParameter(parameter); Pipeline pipeline = new Pipeline(); if (phen.containsKey(GenotypePhenotypeDTO.PARAMETER_STABLE_ID)) { pipeline = pipelineDAO.getPhenotypePipelineByStableId(phen.getString(GenotypePhenotypeDTO.PIPELINE_STABLE_ID)); } else { System.err.println("pipeline stable_id missing"); } sum.setPipeline(pipeline); Project project = new Project(); project.setName(phen.getString(GenotypePhenotypeDTO.PROJECT_NAME)); //TODO remove comment out// project.setDescription(phen.getString(GenotypePhenotypeDTO.PROJECT_FULLNAME)); if (phen.containsKey(GenotypePhenotypeDTO.PROJECT_EXTERNAL_ID)) { sum.setExternalId(phen.getInt(GenotypePhenotypeDTO.PROJECT_EXTERNAL_ID)); } if (phen.containsKey(GenotypePhenotypeDTO.P_VALUE)) { sum.setpValue(new Float(phen.getString(GenotypePhenotypeDTO.P_VALUE))); // get the effect size too sum.setEffectSize(new Float(phen.getString(GenotypePhenotypeDTO.EFFECT_SIZE))); } sum.setProject(project); // "procedure_stable_id":"77", // "procedure_stable_key":"77", // "procedure_name":"Plasma Chemistry", Procedure procedure = new Procedure(); if (phen.containsKey(GenotypePhenotypeDTO.PROCEDURE_STABLE_ID)) { procedure.setStableId(phen.getString(GenotypePhenotypeDTO.PROCEDURE_STABLE_ID)); //TODO remove comment out // procedure.setStableKey(Integer.valueOf(phen.getString(GenotypePhenotypeDTO.PROCEDURE_STABLE_KEY))); procedure.setName(phen.getString(GenotypePhenotypeDTO.PROCEDURE_NAME)); sum.setProcedure(procedure); } else { System.err.println("procedure_stable_id"); } return sum; } /* * End of method for PhenotypeCallSummarySolrImpl */ public GeneRowForHeatMap getResultsForGeneHeatMap(String accession, GenomicFeature gene, List<BasicBean> xAxisBeans, Map<String, List<String>> geneToTopLevelMpMap) { GeneRowForHeatMap row = new GeneRowForHeatMap(accession); if (gene != null) { row.setSymbol(gene.getSymbol()); } else { System.err.println("error no symbol for gene " + accession); } Map<String, HeatMapCell> xAxisToCellMap = new HashMap<>(); for (BasicBean xAxisBean : xAxisBeans) { HeatMapCell cell = new HeatMapCell(); if (geneToTopLevelMpMap.containsKey(accession)) { List<String> mps = geneToTopLevelMpMap.get(accession); // cell.setLabel("No Phenotype Detected"); if (mps != null && !mps.isEmpty()) { if (mps.contains(xAxisBean.getId())) { cell.setxAxisKey(xAxisBean.getId()); cell.setLabel("Data Available"); cell.setStatus("Data Available"); } else { cell.setStatus("No MP"); } } else { // System.err.println("mps are null or empty"); cell.setStatus("No MP"); } } else { // if no doc found for the gene then no data available cell.setStatus("No Data Available"); } xAxisToCellMap.put(xAxisBean.getId(), cell); } row.setXAxisToCellMap(xAxisToCellMap); return row; } }
package org.ensembl.healthcheck.configurationmanager; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.logging.Logger; import org.ensembl.healthcheck.ConfigurableTestRunner; import org.ensembl.healthcheck.configurationmanager.AbstractAliasAwareConfigurationBacking.configurationDataType; import uk.co.flamingpenguin.jewel.cli.OptionNotPresentException; /** * A configuration object that gets its information from a property file. * * Use ConfigurationFactory for instantiation or one of the many constructors * */ public class ConfigurationByProperties<T> extends AbstractAliasAwareWithStanardInvocationHanderConfigurationBacking<T> { static final Logger log = Logger.getLogger(ConfigurationByProperties.class.getCanonicalName()); /** * A list of strings may be expected as a configuration value. In * properties file however, there are only strings. In case a list * of strings is requested in the configuration interface the string * found in the property will be split on the value in * the listSeparatorInProperyFile variable to generate a list of values. * * Update 11/10/2010: Changed from comma (",") to space (" ") as requested * by Dan * */ final static String listSeparatorInProperyFile = " "; // Awesome for debugging private final String type = "My type is ConfigurationByProperties."; private final Properties properties; protected Map<String,Set<String>> parameterAliasesMap; /** * * Construct a ConfigurationByProperties object that proxies a * configurationInterfaceToProxy. Initialised by passing properties. * * @param configurationInterfaceToProxy * @param properties * @return */ public static Object newInstance(Class configurationInterfaceToProxy, Properties properties) { return createProxyUsingConfigurationObject( configurationInterfaceToProxy, new ConfigurationByProperties(configurationInterfaceToProxy, properties) ); } /** * * Construct a ConfigurationByProperties object that proxies a * configurationInterfaceToProxy. Initialised by passing the name of a * property file. * * @param configurationInterfaceToProxy * @param propertyFile * @return * @throws IOException */ public static Object newInstance(Class configurationInterfaceToProxy, String propertyFile) throws IOException { return createProxyUsingConfigurationObject( configurationInterfaceToProxy, new ConfigurationByProperties(configurationInterfaceToProxy, propertyFile) ); } /** * * Creates a proxy of type configurationInterfaceToProxy for a handler * which is an instance of this object. * * @param configurationInterfaceToProxy * @param handler * @return */ private static Object createProxyUsingConfigurationObject( Class configurationInterfaceToProxy, InvocationHandler handler ) { Object configuration = java.lang.reflect.Proxy.newProxyInstance( configurationInterfaceToProxy.getClassLoader(), new Class[] { configurationInterfaceToProxy }, handler ); return configuration; } /** * Constructor using a filename for finding the properties file * * @param propertyFile * @throws IOException * */ public ConfigurationByProperties(Class<T> configurationClass, String propertyFile) throws IOException { this(configurationClass, new File(propertyFile)); } /** * Constructor using a File object for finding the properties file * * @param propertyFile * @throws IOException * */ public ConfigurationByProperties(Class<T> configurationClass, File propertyFile) throws IOException { this(configurationClass, new FileInputStream(propertyFile)); } /** * Constructor using a FileInputStream object for reading the properties * file. * * @param propertyFis * @throws IOException * */ public ConfigurationByProperties(Class<T> configurationClass, FileInputStream propertyFis) throws IOException { super(configurationClass); Properties properties = new Properties(); properties.load(propertyFis); propertyFis.close(); this.properties = properties; parameterAliasesMap = this.createParameterAliasesMap(configurationClass); } /** * @param properties * * Constructor that takes the properties directly. * */ public ConfigurationByProperties(Class<T> configurationClass, Properties properties) { super(configurationClass); this.properties = properties; parameterAliasesMap = this.createParameterAliasesMap(configurationClass); } protected Object mockDirectGetMethod(String varRequested) { // Result may be null String configValue = this.properties.getProperty(varRequested); // Requested property might not have been set. If so if (configValue == null) { throw new OptionNotPresentException("No configuration setting found for " + varRequested); } log.fine("varRequested: " + varRequested); configurationDataType dataTypeExpected = canonicalVarName2DataType.get(alias2CanonicalVarName.get(varRequested)); if (dataTypeExpected==null) { throw new NullPointerException("Unknown return data type for " + varRequested); } if (dataTypeExpected==configurationDataType.String) { return configValue; } if (dataTypeExpected==configurationDataType.List_Of_Strings) { List<String> returnValue = new ArrayList<String>(); for (String currentValue : configValue.split(listSeparatorInProperyFile)) { returnValue.add(currentValue); } return returnValue; } throw new RuntimeException("Unknown return type " + dataTypeExpected + " for " + varRequested + "!"); } protected boolean mockDirectIsMethod(String varRequested) { try { //mockGetMethod(varRequested); mockDirectGetMethod(varRequested); } catch (OptionNotPresentException e) { return false; } return true; } public String toString() { return type + " " + " " + properties.toString(); } }
/* * @author <a href="mailto:novotny@aei.mpg.de">Jason Novotny</a> * @version $Id$ */ package org.gridlab.gridsphere.portlet.service.spi.impl; import org.gridlab.gridsphere.portlet.*; import org.gridlab.gridsphere.portlet.impl.SportletLog; import org.gridlab.gridsphere.portlet.service.PortletService; import org.gridlab.gridsphere.portlet.service.PortletServiceNotFoundException; import org.gridlab.gridsphere.portlet.service.PortletServiceUnavailableException; import org.gridlab.gridsphere.portlet.service.spi.PortletServiceAuthorizer; import org.gridlab.gridsphere.portlet.service.spi.PortletServiceConfig; import org.gridlab.gridsphere.portlet.service.spi.PortletServiceFactory; import org.gridlab.gridsphere.portlet.service.spi.PortletServiceProvider; import org.gridlab.gridsphere.portlet.service.spi.impl.descriptor.SportletServiceCollection; import org.gridlab.gridsphere.portlet.service.spi.impl.descriptor.SportletServiceDefinition; import org.gridlab.gridsphere.portlet.service.spi.impl.descriptor.SportletServiceDescriptor; import org.gridlab.gridsphere.portletcontainer.GridSphereConfig; import org.gridlab.gridsphere.portletcontainer.GridSphereConfigProperties; import org.gridlab.gridsphere.portletcontainer.PortletSessionManager; import org.gridlab.gridsphere.services.core.user.impl.GridSphereUserManager; import org.gridlab.gridsphere.services.core.user.UserSessionManager; import org.gridlab.gridsphere.core.persistence.PersistenceManagerException; import javax.servlet.ServletConfig; import java.io.IOException; import java.lang.reflect.Constructor; import java.util.*; /** * The <code>SportletServiceFactory</code> provides a factory for the creation * of portlet services. The <code>SportletServiceFactory</code> is also * responsible for portlet service lifecycle management including * initialization and shutdown. */ public class SportletServiceFactory implements PortletServiceFactory, PortletSessionListener { private static PortletLog log = SportletLog.getInstance(SportletServiceFactory.class); private static SportletServiceFactory instance = null; private static GridSphereUserManager userManager = GridSphereUserManager.getInstance(); private static PortletSessionManager portletSessionManager = PortletSessionManager.getInstance(); private static UserSessionManager userSessionManager = UserSessionManager.getInstance(); // Maintain a single copy of each service instantiated // as a classname and PortletServiceProvider pair private Hashtable initServices = new Hashtable(); // Hash of all services key = service interface name, value = SportletServiceDefinition private Hashtable allServices = new Hashtable(); // Hash of all user services private static Hashtable userServices = new Hashtable(); // List of all guest cached guest services private Hashtable guestServices = new Hashtable(); /** * Private constructor. Use getDefault() instead. */ private SportletServiceFactory() { // Reads in the service definitions from the xml file and stores them in allServices // organized according to service interface keys and service definition values String servicesPath = GridSphereConfig.getServletContext().getRealPath("/WEB-INF/PortletMaster.xml"); String servicesMappingPath = GridSphereConfig.getServletContext().getRealPath("/WEB-INF/mapping/portlet-services-mapping.xml"); addServices(servicesPath, servicesMappingPath); } public void login(PortletRequest req) throws PortletException { } public void logout(PortletSession session) throws PortletException { String userid = userSessionManager.getUserIdFromSession(session); if ((userid != null) && (userServices.containsKey(userid))) { log.debug("Removing services for userid: " + userid); userServices.remove(userid); } } /** * Umarshalls services from the descriptor file found in servicesPath * using the mapping file specified * * @param servicesPath the path to the portlet services descriptor file * @param mappingPath the path to the portlet services mapping file */ public synchronized void addServices(String servicesPath, String mappingPath) { SportletServiceDescriptor descriptor = null; try { descriptor = new SportletServiceDescriptor(servicesPath, mappingPath); } catch (IOException e) { log.error("IO error unmarshalling " + servicesPath + " using " + mappingPath + " : " + e.getMessage()); return; } catch (PersistenceManagerException e) { log.error("Unable to unmarshall " + servicesPath + " using " + mappingPath + " : " + e.getMessage()); return; } SportletServiceCollection serviceCollection = descriptor.getServiceCollection(); List services = serviceCollection.getPortletServicesList(); Iterator it = services.iterator(); while (it.hasNext()) { SportletServiceDefinition serviceDef = (SportletServiceDefinition) it.next(); allServices.put(serviceDef.getServiceInterface(), serviceDef); log.debug("adding service: " + serviceDef.getServiceInterface() + " service def: " + serviceDef.toString()); } } public static synchronized SportletServiceFactory getInstance() { if (instance == null) { instance = new SportletServiceFactory(); } return instance; } /** * createPortletServiceFactory instantiates the given class and initializes it. The portlet service properties * file must be specified in the ServletContext as an InitParameter with the "PortletServices.properties" key. * If no properties file is found or any error occurs an exception is thrown. * * @param service the class of the service * @param servletConfig the servlet configuration * @return the instantiated portlet service * @throws PortletServiceUnavailableException if the portlet service is unavailable * @throws PortletServiceNotFoundException if the PortletService is not found */ public PortletService createPortletService(Class service, ServletConfig servletConfig, boolean useCachedService) throws PortletServiceUnavailableException, PortletServiceNotFoundException { PortletServiceProvider psp = null; // see if we already have an instance of this service if (service == null) { throw new PortletServiceUnavailableException("Received null service class"); } // if init'ed service exists then use it if (useCachedService) { psp = (PortletServiceProvider) initServices.get(service); if (psp != null) return psp; } String serviceName = service.getName(); SportletServiceDefinition def = (SportletServiceDefinition) allServices.get(serviceName); if (def == null) { log.error("Unable to find portlet service interface: " + serviceName + " . Please check PortletServices.xml file for proper service entry"); throw new PortletServiceNotFoundException("Unable to find portlet service: " + serviceName); } // if user is required then pass in Guest user privileges if (def.getUserRequired()) { return createUserPortletService(service, GuestUser.getInstance(), servletConfig, useCachedService); } /* Create the service implementation */ String serviceImpl = def.getServiceImplementation(); if (serviceImpl == null) { log.error("Unable to find implementing portlet service: " + serviceName + " . Please check PortletServices.xml file for proper service entry"); throw new PortletServiceNotFoundException("Unable to find implementing portlet service for interface: " + serviceName); } Properties configProperties = def.getConfigProperties(); PortletServiceConfig portletServiceConfig = new SportletServiceConfig(service, configProperties, servletConfig); try { psp = (PortletServiceProvider) Class.forName(serviceImpl).newInstance(); } catch (Exception e) { log.error("Unable to create portlet service: " + serviceImpl, e); throw new PortletServiceNotFoundException("Unable to create portlet service: " + serviceImpl, e); } try { psp.init(portletServiceConfig); } catch (PortletServiceUnavailableException e) { log.error("Unable to initialize portlet service: " + serviceImpl, e); throw new PortletServiceNotFoundException("Unable to initialize portlet service: " + serviceImpl, e); } initServices.put(service, psp); return psp; } /** * Creates a user specific portlet service. If no instance exists, the service * will be initialized before it is returned to the client. * * @param service the class of the service * @param user the User * @param servletConfig the servlet configuration * @param useCachedService reuse a previous initialized service if <code>true</code>, * otherwise create a new service instance if <code>false</code> * @return the instantiated portlet service * @throws PortletServiceUnavailableException if the portlet service is unavailable * @throws PortletServiceNotFoundException if the PortletService is not found */ public PortletService createUserPortletService(Class service, User user, ServletConfig servletConfig, boolean useCachedService) throws PortletServiceUnavailableException, PortletServiceNotFoundException { //PortletServiceProvider psp = null; String serviceName = service.getName(); SportletServiceDefinition def = (SportletServiceDefinition) allServices.get(serviceName); if (def == null) { log.error("Unable to find portlet service interface: " + serviceName + " . Please check PortletServices.xml file for proper service entry"); throw new PortletServiceNotFoundException("Unable to find portlet service: " + serviceName); } if (!def.getUserRequired()) { return createPortletService(service, servletConfig, useCachedService); } else if (user == null) { throw new PortletServiceNotFoundException("Unable to create service: " + serviceName + " user is null"); } if (useCachedService) { Map userServiceMap = (Map)userServices.get(user.getID()); if (userServiceMap != null) { if (userServiceMap.containsKey(serviceName)) { return (PortletService)userServiceMap.get(serviceName); } } } if ((user instanceof GuestUser) && (guestServices.containsKey(serviceName))) { return (PortletService)guestServices.get(serviceName); } /* Create the service implementation */ String serviceImpl = def.getServiceImplementation(); if (serviceImpl == null) { log.error("Unable to find implementing portlet service: " + serviceName + " . Please check PortletServices.xml file for proper service entry"); throw new PortletServiceNotFoundException("Unable to find implementing portlet service for interface: " + serviceName); } Properties configProperties = def.getConfigProperties(); PortletServiceConfig portletServiceConfig = new SportletServiceConfig(service, configProperties, servletConfig); // Create an authroizer for the secure service PortletServiceAuthorizer auth = new SportletServiceAuthorizer(user, userManager); // instantiate wrapper with user and impl PortletServiceProvider psp = null; //PortletServiceProvider psp = (PortletServiceProvider)initServices.get(service); //if (psp == null) { try { Class c = Class.forName(serviceImpl); Class[] parameterTypes = new Class[]{PortletServiceAuthorizer.class}; Object[] obj = new Object[]{auth}; Constructor con = c.getConstructor(parameterTypes); psp = (PortletServiceProvider) con.newInstance(obj); } catch (Exception e) { log.error("Unable to create portlet service wrapper: " + serviceImpl, e); throw new PortletServiceNotFoundException("Unable to create portlet service: " + serviceName, e); } try { psp.init(portletServiceConfig); initServices.put(service, psp); } catch (PortletServiceUnavailableException e) { log.error("Unable to initialize portlet service: " + serviceImpl, e); throw new PortletServiceNotFoundException("The SportletServiceFactory was unable to initialize the portlet service: " + serviceImpl, e); } if (user instanceof GuestUser) { guestServices.put(serviceName, psp); return psp; } Map userServiceMap = (Map)userServices.get(user.getID()); if (userServiceMap == null) userServiceMap = new HashMap(); userServiceMap.put(serviceName, psp); userServices.put(user.getID(), userServiceMap); log.debug("printing user services " + user.getID()); Enumeration enum = userServices.keys(); while (enum.hasMoreElements()) { String u = (String)enum.nextElement(); Map l = (Map)userServices.get(u); Iterator it = l.keySet().iterator(); while (it.hasNext()) { log.debug("service: " + (String)it.next()); } } log.debug("printing guest services"); enum = guestServices.keys(); while (enum.hasMoreElements()) { String s = (String)enum.nextElement(); log.debug("service: " + s); } List sessions = userSessionManager.getSessions(user); if (sessions != null) { Iterator it = sessions.iterator(); while (it.hasNext()) { PortletSession session = (PortletSession)it.next(); if (session != null) portletSessionManager.addSessionListener(session.getId(), this); } } return psp; } /** * Returns an enumaration of the active services (services that have been * initialized) * * @return an enumaration of the active services */ public Enumeration getActiveServices() { return initServices.keys(); } /** * Destroys a portlet service identified by its class * * @param service the service class to shutdown */ public void shutdownService(Class service) { if (initServices.containsKey(service)) { log.info("Shutting down service: " + service.getName()); PortletServiceProvider psp = (PortletServiceProvider) initServices.get(service); psp.destroy(); } } /** * Shuts down all portlet services managed by this factory */ public void shutdownServices() { // Calls destroy() on all services we know about Enumeration keys = initServices.keys(); log.info("Shutting down all services:"); while (keys.hasMoreElements()) { Class service = (Class) keys.nextElement(); PortletServiceProvider psp = (PortletServiceProvider) initServices.get(service); log.info("Shutting down service: " + service.getName() + " impl: " + psp.getClass().getName()); psp.destroy(); } } }
package org.mtransit.parser.ca_gtha_go_transit_bus; import java.util.HashSet; import java.util.regex.Pattern; import org.apache.commons.lang3.StringUtils; import org.mtransit.parser.CleanUtils; import org.mtransit.parser.DefaultAgencyTools; import org.mtransit.parser.Utils; import org.mtransit.parser.gtfs.data.GCalendar; import org.mtransit.parser.gtfs.data.GCalendarDate; import org.mtransit.parser.gtfs.data.GRoute; import org.mtransit.parser.gtfs.data.GSpec; import org.mtransit.parser.gtfs.data.GStop; import org.mtransit.parser.gtfs.data.GTrip; import org.mtransit.parser.mt.data.MAgency; import org.mtransit.parser.mt.data.MRoute; import org.mtransit.parser.mt.data.MTrip; public class GTHAGOTransitBusAgencyTools extends DefaultAgencyTools { public static void main(String[] args) { if (args == null || args.length == 0) { args = new String[3]; args[0] = "input/gtfs.zip"; args[1] = "../../mtransitapps/ca-gtha-go-transit-bus-android/res/raw/"; args[2] = ""; // files-prefix } new GTHAGOTransitBusAgencyTools().start(args); } private HashSet<String> serviceIds; @Override public void start(String[] args) { System.out.printf("\nGenerating GO Transit bus data..."); long start = System.currentTimeMillis(); this.serviceIds = extractUsefulServiceIds(args, this, true); super.start(args); System.out.printf("\nGenerating GO Transit bus data... DONE in %s.\n", Utils.getPrettyDuration(System.currentTimeMillis() - start)); } @Override public boolean excludeCalendar(GCalendar gCalendar) { if (this.serviceIds != null) { return excludeUselessCalendar(gCalendar, this.serviceIds); } return super.excludeCalendar(gCalendar); } @Override public boolean excludeCalendarDate(GCalendarDate gCalendarDates) { if (this.serviceIds != null) { return excludeUselessCalendarDate(gCalendarDates, this.serviceIds); } return super.excludeCalendarDate(gCalendarDates); } @Override public boolean excludeTrip(GTrip gTrip) { if (this.serviceIds != null) { return excludeUselessTrip(gTrip, this.serviceIds); } return super.excludeTrip(gTrip); } @Override public Integer getAgencyRouteType() { return MAgency.ROUTE_TYPE_BUS; } @Override public long getRouteId(GRoute gRoute) { return Long.parseLong(gRoute.getRouteShortName()); // use route short name as route ID } private static final String AGENCY_COLOR = "387C2B"; // GREEN (AGENCY WEB SITE CSS) @Override public String getAgencyColor() { return AGENCY_COLOR; } @Override public String getRouteColor(GRoute gRoute) { if (StringUtils.isEmpty(gRoute.getRouteColor())) { int rsn = Integer.parseInt(gRoute.getRouteShortName()); switch (rsn) { // @formatter:off case 11: return "98002e"; // St. Catharines / Niagara on the Lake case 70: return "794500"; // @formatter:on } System.out.printf("\nUnexpected route color for %s!\n", gRoute); System.exit(-1); return null; } return super.getRouteColor(gRoute); } private static final String UNIVERSITY_SHORT = "U"; private static final String BARRIE = "Barrie"; private static final String BEAVERTON = "Beaverton"; private static final String BOLTON = "Bolton"; private static final String BRAMPTON = "Brampton"; private static final String BURLINGTON_CARPOOL = "Burlington Carpool"; private static final String COOKSVILLE = "Cooksville"; private static final String EAST_GWILLIMBURY = "East Gwillimbury"; private static final String FINCH = "Finch"; private static final String GUELPH_CENTRAL = "Guelph Central"; private static final String HAMILTON = "Hamilton"; private static final String KING_CITY = "King City"; private static final String KITCHENER = "Kitchener"; private static final String MILTON = "Milton"; private static final String MT_JOY = "Mt Joy"; private static final String NEWCASTLE = "Newcastle"; private static final String NEWMARKET = "Newmarket"; private static final String NIAGARA_FALLS = "Niagara Falls"; private static final String OAKVILLE = "Oakville"; private static final String ORANGEVILLE = "Orangeville"; private static final String OSHAWA = "Oshawa"; private static final String RICHMOND_HILL_CENTER = "Richmond Hl Ctr"; private static final String SCARBORO = "Scarboro"; private static final String STREETSVILLE = "Streetsville"; private static final String U_OF_GUELPH = UNIVERSITY_SHORT + " of Guelph"; private static final String U_OF_WATERLOO = UNIVERSITY_SHORT + " Of Waterloo"; private static final String UNION = "Union"; private static final String UNIONVILLE = "Unionville"; private static final String UOIT_D_C = "UOIT / D.C."; private static final String UXBRIDGE = "Uxbridge"; private static final String WHITBY = "Whitby"; private static final String WOODBINE_404 = "Woodbine / 404"; private static final String YORK_MILLS = "York Mills"; private static final String YORK_U = "York " + UNIVERSITY_SHORT; private static final String YORKDALE = "Yorkdale"; @Override public void setTripHeadsign(MRoute mRoute, MTrip mTrip, GTrip gTrip, GSpec gtfs) { // @formatter:off if (mRoute.getId() == 11l) { if (gTrip.getDirectionId() == 0) { mTrip.setHeadsignString("Niagara-On-The-Lk", gTrip.getDirectionId()); return ; } if (gTrip.getDirectionId() == 1) { mTrip.setHeadsignString("St. Catharines", gTrip.getDirectionId()); return; } } // @formatter:on mTrip.setHeadsignString(cleanTripHeadsign(gTrip.getTripHeadsign()), gTrip.getDirectionId()); } @Override public boolean mergeHeadsign(MTrip mTrip, MTrip mTripToMerge) { // @formatter:off if (mTrip.getRouteId() == 12l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(NIAGARA_FALLS, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(BURLINGTON_CARPOOL, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 16l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(HAMILTON, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 18l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(HAMILTON, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(UNION, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 19l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(OAKVILLE, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(FINCH, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 21l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(MILTON, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(UNION, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 25l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(U_OF_WATERLOO, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(YORK_U, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 27l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(MILTON, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(FINCH, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 29l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(GUELPH_CENTRAL, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(COOKSVILLE, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 30l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(KITCHENER, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 31l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(U_OF_GUELPH, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(UNION, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 32l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(BRAMPTON, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(YORK_MILLS, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 33l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(U_OF_GUELPH, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(YORK_MILLS, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 36l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(BRAMPTON, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(YORK_MILLS, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 37l) { if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(ORANGEVILLE, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 38l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(YORK_MILLS, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(BOLTON, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 40l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(HAMILTON, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(RICHMOND_HILL_CENTER, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 45l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(STREETSVILLE, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(YORK_U, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 46l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(OAKVILLE, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(YORK_U, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 47l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(HAMILTON, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(YORK_U, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 48l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(U_OF_GUELPH, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(YORK_U, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 51l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(YORK_U, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString("Pickering", mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 52l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(YORK_U, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(OSHAWA, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 54l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(YORK_U, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(MT_JOY, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 61l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(UNION, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 63l) { if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(KING_CITY, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 65l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(UNION, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(EAST_GWILLIMBURY, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 66l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(YORKDALE, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(NEWMARKET, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 67l) { if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(WOODBINE_404, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 68l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(NEWMARKET, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(BARRIE, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 70l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(UNIONVILLE, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(UXBRIDGE, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 71l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(UNION, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(UXBRIDGE, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 81l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(WHITBY, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(BEAVERTON, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 90l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(UNION, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(NEWCASTLE, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 91l) { if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(NEWCASTLE, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 92l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(YORKDALE, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(OSHAWA, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 93l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(SCARBORO, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(UOIT_D_C, mTrip.getHeadsignId()); return true; } } else if (mTrip.getRouteId() == 96l) { if (mTrip.getHeadsignId() == 0) { mTrip.setHeadsignString(FINCH, mTrip.getHeadsignId()); return true; } if (mTrip.getHeadsignId() == 1) { mTrip.setHeadsignString(OSHAWA, mTrip.getHeadsignId()); return true; } } // @formatter:on System.out.printf("\n%s: Unexpected trips to merge: %s AND %s!\n", mTrip.getRouteId(), mTrip, mTripToMerge); System.exit(-1); return false; } private static final Pattern STARTS_WITH_RSN = Pattern.compile("(^[0-9]{2,3}([A-Z]?)(\\s+)\\- )", Pattern.CASE_INSENSITIVE); private static final String STARTS_WITH_RSN_REPLACEMENT = "$2$3"; private static final Pattern STATION = Pattern.compile("((^|\\W){1}(station|sta|stn)(\\W|$){1})", Pattern.CASE_INSENSITIVE); private static final String STATION_REPLACEMENT = "$2$4"; private static final Pattern PARK_AND_RIDE = Pattern.compile("((^|\\W){1}(park & ride)(\\W|$){1})", Pattern.CASE_INSENSITIVE); private static final String PARK_AND_RIDE_REPLACEMENT = "$2P&R$4"; private static final Pattern UNIVERSITY = Pattern.compile("((^|\\W){1}(university)(\\W|$){1})", Pattern.CASE_INSENSITIVE); private static final String UNIVERSITY_REPLACEMENT = "$2" + UNIVERSITY_SHORT + "$4"; private static final Pattern CLEAN_DASH = Pattern.compile("(^[\\s]*\\-[\\s]*|[\\s]*\\-[\\s]*$)", Pattern.CASE_INSENSITIVE); private static final Pattern BUS_TERMINAL = Pattern.compile("( bus loop| bus terminal| bus term[\\.]?| terminal| term[\\.]?)", Pattern.CASE_INSENSITIVE); @Override public String cleanTripHeadsign(String tripHeadsign) { tripHeadsign = STARTS_WITH_RSN.matcher(tripHeadsign).replaceAll(STARTS_WITH_RSN_REPLACEMENT); tripHeadsign = GO.matcher(tripHeadsign).replaceAll(GO_REPLACEMENT); tripHeadsign = STATION.matcher(tripHeadsign).replaceAll(STATION_REPLACEMENT); tripHeadsign = PARK_AND_RIDE.matcher(tripHeadsign).replaceAll(PARK_AND_RIDE_REPLACEMENT); tripHeadsign = UNIVERSITY.matcher(tripHeadsign).replaceAll(UNIVERSITY_REPLACEMENT); tripHeadsign = BUS_TERMINAL.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY); tripHeadsign = CLEAN_DASH.matcher(tripHeadsign).replaceAll(StringUtils.EMPTY); int indexOfDash = tripHeadsign.indexOf("- "); if (indexOfDash >= 0) { tripHeadsign = tripHeadsign.substring(0, indexOfDash); } tripHeadsign = CleanUtils.CLEAN_AND.matcher(tripHeadsign).replaceAll(CleanUtils.CLEAN_AND_REPLACEMENT); tripHeadsign = CleanUtils.CLEAN_AT.matcher(tripHeadsign).replaceAll(CleanUtils.CLEAN_AT_REPLACEMENT); tripHeadsign = CleanUtils.cleanSlashes(tripHeadsign); tripHeadsign = CleanUtils.cleanStreetTypes(tripHeadsign); tripHeadsign = CleanUtils.cleanNumbers(tripHeadsign); return CleanUtils.cleanLabel(tripHeadsign); } private static final Pattern GO = Pattern.compile("(^|\\s){1}(go)($|\\s){1}", Pattern.CASE_INSENSITIVE); private static final String GO_REPLACEMENT = " "; private static final Pattern VIA = Pattern.compile("(^|\\s){1}(via)($|\\s){1}", Pattern.CASE_INSENSITIVE); private static final String VIA_REPLACEMENT = " "; private static final Pattern POINT = Pattern.compile("((^|\\W){1}(st|ave|blvd|hwy|rd|dr)\\.(\\W|$){1})", Pattern.CASE_INSENSITIVE); private static final String POINT_REPLACEMENT = "$2$3$4"; private static final Pattern DIRECTION = Pattern.compile("((^|\\W){1}(s|n|e|w)\\.(\\W|$){1})", Pattern.CASE_INSENSITIVE); private static final String DIRECTION_REPLACEMENT = "$2$3$4"; @Override public String cleanStopName(String gStopName) { gStopName = CleanUtils.CLEAN_AT.matcher(gStopName).replaceAll(CleanUtils.CLEAN_AT_REPLACEMENT); gStopName = VIA.matcher(gStopName).replaceAll(VIA_REPLACEMENT); gStopName = GO.matcher(gStopName).replaceAll(GO_REPLACEMENT); gStopName = POINT.matcher(gStopName).replaceAll(POINT_REPLACEMENT); gStopName = DIRECTION.matcher(gStopName).replaceAll(DIRECTION_REPLACEMENT); gStopName = CleanUtils.cleanStreetTypes(gStopName); gStopName = CleanUtils.cleanNumbers(gStopName); return CleanUtils.cleanLabel(gStopName); } private static final String SID_UN = "UN"; private static final int UN_SID = 9021; private static final String SID_EX = "EX"; private static final int EX_SID = 9022; private static final String SID_MI = "MI"; private static final int MI_SID = 9031; private static final String SID_LO = "LO"; private static final int LO_SID = 9033; private static final String SID_DA = "DA"; private static final int DA_SID = 9061; private static final String SID_SC = "SC"; private static final int SC_SID = 9062; private static final String SID_EG = "EG"; private static final int EG_SID = 9063; private static final String SID_GU = "GU"; private static final int GU_SID = 9081; private static final String SID_RO = "RO"; private static final int RO_SID = 9091; private static final String SID_PO = "PO"; private static final int PO_SID = 9111; private static final String SID_CL = "CL"; private static final int CL_SID = 9121; private static final String SID_OA = "OA"; private static final int OA_SID = 9131; private static final String SID_BO = "BO"; private static final int BO_SID = 9141; private static final String SID_AP = "AP"; private static final int AP_SID = 9151; private static final String SID_BU = "BU"; private static final int BU_SID = 9161; private static final String SID_AL = "AL"; private static final int AL_SID = 9171; private static final String SID_PIN = "PIN"; private static final int PIN_SID = 9911; private static final String SID_AJ = "AJ"; private static final int AJ_SID = 9921; private static final String SID_WH = "WH"; private static final int WH_SID = 9939; private static final String SID_OS = "OS"; private static final int OS_SID = 9941; private static final String SID_BL = "BL"; private static final int BL_SID = 9023; private static final String SID_KP = "KP"; private static final int KP_SID = 9032; private static final String SID_WE = "WE"; private static final int WE_SID = 9041; private static final String SID_ET = "ET"; private static final int ET_SID = 9042; private static final String SID_OR = "OR"; private static final int OR_SID = 9051; private static final String SID_OL = "OL"; private static final int OL_SID = 9052; private static final String SID_AG = "AG"; private static final int AG_SID = 9071; private static final String SID_DI = "DI"; private static final int DI_SID = 9113; private static final String SID_CO = "CO"; private static final int CO_SID = 9114; private static final String SID_ER = "ER"; private static final int ER_SID = 9123; private static final String SID_HA = "HA"; private static final int HA_SID = 9181; private static final String SID_YO = "YO"; private static final int YO_SID = 9191; private static final String SID_SR = "SR"; private static final int SR_SID = 9211; private static final String SID_ME = "ME"; private static final int ME_SID = 9221; private static final String SID_LS = "LS"; private static final int LS_SID = 9231; private static final String SID_ML = "ML"; private static final int ML_SID = 9241; private static final String SID_KI = "KI"; private static final int KI_SID = 9271; private static final String SID_MA = "MA"; private static final int MA_SID = 9311; private static final String SID_BE = "BE"; private static final int BE_SID = 9321; private static final String SID_BR = "BR"; private static final int BR_SID = 9331; private static final String SID_MO = "MO"; private static final int MO_SID = 9341; private static final String SID_GE = "GE"; private static final int GE_SID = 9351; private static final String SID_AC = "AC"; private static final int AC_SID = 9371; private static final String SID_GL = "GL"; private static final int GL_SID = 9391; private static final String SID_EA = "EA"; private static final int EA_SID = 9441; private static final String SID_LA = "LA"; private static final int LA_SID = 9601; private static final String SID_RI = "RI"; private static final int RI_SID = 9612; private static final String SID_MP = "MP"; private static final int MP_SID = 9613; private static final String SID_RU = "RU"; private static final int RU_SID = 9614; private static final String SID_KC = "KC"; private static final int KC_SID = 9621; private static final String SID_AU = "AU"; private static final int AU_SID = 9631; private static final String SID_NE = "NE"; private static final int NE_SID = 9641; private static final String SID_BD = "BD"; private static final int BD_SID = 9651; private static final String SID_BA = "BA"; private static final int BA_SID = 9681; private static final String SID_AD = "AD"; private static final int AD_SID = 9691; private static final String SID_MK = "MK"; private static final int MK_SID = 9701; private static final String SID_UI = "UI"; private static final int UI_SID = 9712; private static final String SID_MR = "MR"; private static final int MR_SID = 9721; private static final String SID_CE = "CE"; private static final int CE_SID = 9722; private static final String SID_MJ = "MJ"; private static final int MJ_SID = 9731; private static final String SID_ST = "ST"; private static final int ST_SID = 9741; private static final String SID_LI = "LI"; private static final int LI_SID = 9742; private static final String SID_KE = "KE"; private static final int KE_SID = 9771; private static final String SID_JAMES_STR = "JAMES STR"; private static final int JAMES_STR_SID = 100001; private static final String SID_USBT = "USBT"; private static final int USBT_SID = 52; private static final String SID_NI = "NI"; private static final int NI_SID = 100003; private static final String SID_PA = "PA"; private static final int PA_SID = 311; private static final String SID_SCTH = "SCTH"; private static final int SCTH_SID = 100005; @Override public int getStopId(GStop gStop) { if (!Utils.isDigitsOnly(gStop.getStopId())) { if (SID_UN.equals(gStop.getStopId())) { return UN_SID; } else if (SID_EX.equals(gStop.getStopId())) { return EX_SID; } else if (SID_MI.equals(gStop.getStopId())) { return MI_SID; } else if (SID_LO.equals(gStop.getStopId())) { return LO_SID; } else if (SID_DA.equals(gStop.getStopId())) { return DA_SID; } else if (SID_SC.equals(gStop.getStopId())) { return SC_SID; } else if (SID_EG.equals(gStop.getStopId())) { return EG_SID; } else if (SID_GU.equals(gStop.getStopId())) { return GU_SID; } else if (SID_RO.equals(gStop.getStopId())) { return RO_SID; } else if (SID_PO.equals(gStop.getStopId())) { return PO_SID; } else if (SID_CL.equals(gStop.getStopId())) { return CL_SID; } else if (SID_OA.equals(gStop.getStopId())) { return OA_SID; } else if (SID_BO.equals(gStop.getStopId())) { return BO_SID; } else if (SID_AP.equals(gStop.getStopId())) { return AP_SID; } else if (SID_BU.equals(gStop.getStopId())) { return BU_SID; } else if (SID_AL.equals(gStop.getStopId())) { return AL_SID; } else if (SID_PIN.equals(gStop.getStopId())) { return PIN_SID; } else if (SID_AJ.equals(gStop.getStopId())) { return AJ_SID; } else if (SID_WH.equals(gStop.getStopId())) { return WH_SID; } else if (SID_OS.equals(gStop.getStopId())) { return OS_SID; } else if (SID_BL.equals(gStop.getStopId())) { return BL_SID; } else if (SID_KP.equals(gStop.getStopId())) { return KP_SID; } else if (SID_WE.equals(gStop.getStopId())) { return WE_SID; } else if (SID_ET.equals(gStop.getStopId())) { return ET_SID; } else if (SID_OR.equals(gStop.getStopId())) { return OR_SID; } else if (SID_OL.equals(gStop.getStopId())) { return OL_SID; } else if (SID_AG.equals(gStop.getStopId())) { return AG_SID; } else if (SID_DI.equals(gStop.getStopId())) { return DI_SID; } else if (SID_CO.equals(gStop.getStopId())) { return CO_SID; } else if (SID_ER.equals(gStop.getStopId())) { return ER_SID; } else if (SID_HA.equals(gStop.getStopId())) { return HA_SID; } else if (SID_YO.equals(gStop.getStopId())) { return YO_SID; } else if (SID_SR.equals(gStop.getStopId())) { return SR_SID; } else if (SID_ME.equals(gStop.getStopId())) { return ME_SID; } else if (SID_LS.equals(gStop.getStopId())) { return LS_SID; } else if (SID_ML.equals(gStop.getStopId())) { return ML_SID; } else if (SID_KI.equals(gStop.getStopId())) { return KI_SID; } else if (SID_MA.equals(gStop.getStopId())) { return MA_SID; } else if (SID_BE.equals(gStop.getStopId())) { return BE_SID; } else if (SID_BR.equals(gStop.getStopId())) { return BR_SID; } else if (SID_MO.equals(gStop.getStopId())) { return MO_SID; } else if (SID_GE.equals(gStop.getStopId())) { return GE_SID; } else if (SID_AC.equals(gStop.getStopId())) { return AC_SID; } else if (SID_GL.equals(gStop.getStopId())) { return GL_SID; } else if (SID_EA.equals(gStop.getStopId())) { return EA_SID; } else if (SID_LA.equals(gStop.getStopId())) { return LA_SID; } else if (SID_RI.equals(gStop.getStopId())) { return RI_SID; } else if (SID_MP.equals(gStop.getStopId())) { return MP_SID; } else if (SID_RU.equals(gStop.getStopId())) { return RU_SID; } else if (SID_KC.equals(gStop.getStopId())) { return KC_SID; } else if (SID_AU.equals(gStop.getStopId())) { return AU_SID; } else if (SID_NE.equals(gStop.getStopId())) { return NE_SID; } else if (SID_BD.equals(gStop.getStopId())) { return BD_SID; } else if (SID_BA.equals(gStop.getStopId())) { return BA_SID; } else if (SID_AD.equals(gStop.getStopId())) { return AD_SID; } else if (SID_MK.equals(gStop.getStopId())) { return MK_SID; } else if (SID_UI.equals(gStop.getStopId())) { return UI_SID; } else if (SID_MR.equals(gStop.getStopId())) { return MR_SID; } else if (SID_CE.equals(gStop.getStopId())) { return CE_SID; } else if (SID_MJ.equals(gStop.getStopId())) { return MJ_SID; } else if (SID_ST.equals(gStop.getStopId())) { return ST_SID; } else if (SID_LI.equals(gStop.getStopId())) { return LI_SID; } else if (SID_KE.equals(gStop.getStopId())) { return KE_SID; } else if (SID_JAMES_STR.equals(gStop.getStopId())) { return JAMES_STR_SID; } else if (SID_USBT.equals(gStop.getStopId())) { return USBT_SID; } else if (SID_NI.equals(gStop.getStopId())) { return NI_SID; } else if (SID_PA.equals(gStop.getStopId())) { return PA_SID; } else if (SID_SCTH.equals(gStop.getStopId())) { return SCTH_SID; } else { System.out.println("Unexpected stop ID " + gStop); System.exit(-1); return -1; } } return super.getStopId(gStop); } }
package org.pentaho.di.trans.steps.synchronizeaftermerge; import java.sql.BatchUpdateException; import java.sql.PreparedStatement; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import org.pentaho.di.core.Const; import org.pentaho.di.core.database.Database; import org.pentaho.di.core.database.DatabaseMeta; import org.pentaho.di.core.exception.KettleDatabaseBatchException; import org.pentaho.di.core.exception.KettleDatabaseException; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStep; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; /** * Performs an insert/update/delete depending on the value of a field. * * @author Samatar * @since 13-10-2008 */ public class SynchronizeAfterMerge extends BaseStep implements StepInterface { private static Class<?> PKG = SynchronizeAfterMergeMeta.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$ private SynchronizeAfterMergeMeta meta; private SynchronizeAfterMergeData data; public SynchronizeAfterMerge(StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans) { super(stepMeta, stepDataInterface, copyNr, transMeta, trans); } private synchronized void lookupValues(Object[] row) throws KettleException { // get operation for the current // do we insert, update or delete ? String operation=data.inputRowMeta.getString(row,data.indexOfOperationOrderField); boolean rowIsSafe = false; boolean sendToErrorRow=false; String errorMessage = null; int[] updateCounts = null; List<Exception> exceptionsList = null; boolean batchProblem = false; data.lookupFailure=false; boolean performInsert=false; boolean performUpdate=false; boolean performDelete=false; boolean lineSkipped=false; try{ if(operation==null) throw new KettleException(BaseMessages.getString(PKG, "SynchronizeAfterMerge.Log.OperationFieldEmpty",meta.getOperationOrderField())); if(meta.istablenameInField()) { // get dynamic table name data.realTableName = data.inputRowMeta.getString(row,data.indexOfTableNameField); if (Const.isEmpty(data.realTableName)) throw new KettleStepException("The name of the table is not specified!"); data.realSchemaTable = data.db.getDatabaseMeta().getQuotedSchemaTableCombination(data.realSchemaName, data.realTableName); } if(operation.equals(data.insertValue)) { // directly insert data into table /* * * INSERT ROW * */ if(log.isRowLevel()) logRowlevel(BaseMessages.getString(PKG, "SynchronizeAfterMerge.InsertRow",row.toString())); //$NON-NLS-1$ // The values to insert are those in the update section Object[] insertRowData = new Object[data.valuenrs.length]; for (int i=0;i<data.valuenrs.length;i++) { insertRowData[i]=row[data.valuenrs[i]]; } if(meta.istablenameInField()) { data.insertStatement = (PreparedStatement) data.preparedStatements.get(data.realSchemaTable+"insert"); if (data.insertStatement==null) { String sql = data.db.getInsertStatement(data.realSchemaName,data.realTableName, data.insertRowMeta); if(log.isDebug()) logDebug("Preparation of the insert SQL statement: "+sql); data.insertStatement = data.db.prepareSQL(sql); data.preparedStatements.put(data.realSchemaTable+"insert", data.insertStatement); } } // For PG & GP, we add a savepoint before the row. // Then revert to the savepoint afterwards... (not a transaction, so hopefully still fast) if (data.specialErrorHandling) { data.savepoint = data.db.setSavepoint(); } // Set the values on the prepared statement... data.db.setValues(data.insertRowMeta, insertRowData, data.insertStatement); data.db.insertRow(data.insertStatement,data.batchMode); performInsert=true; incrementLinesOutput(); if (log.isRowLevel()) logRowlevel("Written row: "+data.insertRowMeta.getString(insertRowData)); }else { Object[] lookupRow = new Object[data.keynrs.length]; int lookupIndex = 0; for (int i=0;i<meta.getKeyStream().length;i++) { if (data.keynrs[i]>=0) { lookupRow[lookupIndex] = row[ data.keynrs[i] ]; lookupIndex++; } if (data.keynrs2[i]>=0) { lookupRow[lookupIndex] = row[ data.keynrs2[i] ]; lookupIndex++; } } boolean updateorDelete = false; if(meta.isPerformLookup()) { // LOOKUP if(meta.istablenameInField()) { // Prepare Lookup statement data.lookupStatement = (PreparedStatement) data.preparedStatements.get(data.realSchemaTable+"lookup"); if (data.lookupStatement==null) { String sql = getLookupStatement(data.inputRowMeta); if(log.isDebug()) logDebug("Preparating SQL for insert: "+sql); data.lookupStatement = data.db.prepareSQL(sql); data.preparedStatements.put(data.realSchemaTable+"lookup", data.lookupStatement); } } data.db.setValues(data.lookupParameterRowMeta, lookupRow, data.lookupStatement); if (log.isRowLevel()) logRowlevel(BaseMessages.getString(PKG, "SynchronizeAfterMerge.Log.ValuesSetForLookup",data.lookupParameterRowMeta.getString(lookupRow))); //$NON-NLS-1$ Object[] add = data.db.getLookup(data.lookupStatement); incrementLinesInput(); if (add==null) { // nothing was found: if (data.stringErrorKeyNotFound==null) { data.stringErrorKeyNotFound=BaseMessages.getString(PKG, "SynchronizeAfterMerge.Exception.KeyCouldNotFound")+data.lookupParameterRowMeta.getString(lookupRow); data.stringFieldnames=""; for (int i=0;i<data.lookupParameterRowMeta.size();i++) { if (i>0) data.stringFieldnames+=", "; data.stringFieldnames+=data.lookupParameterRowMeta.getValueMeta(i).getName(); } } data.lookupFailure=true; throw new KettleDatabaseException(BaseMessages.getString(PKG, "SynchronizeAfterMerge.Exception.KeyCouldNotFound",data.lookupParameterRowMeta.getString(lookupRow))); }else { if (log.isRowLevel()) logRowlevel(BaseMessages.getString(PKG, "SynchronizeAfterMerge.Log.FoundRowForUpdate",data.insertRowMeta.getString(row))); //$NON-NLS-1$ for (int i=0;i<data.valuenrs.length;i++) { if ( meta.getUpdate()[i].booleanValue() ) { ValueMetaInterface valueMeta = data.inputRowMeta.getValueMeta( data.valuenrs[i] ); ValueMetaInterface retMeta = data.db.getReturnRowMeta().getValueMeta(i); Object rowvalue = row[ data.valuenrs[i] ]; Object retvalue = add[ i ]; if ( valueMeta.compare(rowvalue, retMeta, retvalue)!=0 ) { updateorDelete=true; } } } } } // end if perform lookup if(operation.equals(data.updateValue)) { if(!meta.isPerformLookup() || updateorDelete) { // UPDATE : if(meta.istablenameInField()) { data.updateStatement = (PreparedStatement) data.preparedStatements.get(data.realSchemaTable+"update"); if(data.updateStatement==null) { String sql =getUpdateStatement(data.inputRowMeta); data.updateStatement= data.db.prepareSQL(sql); data.preparedStatements.put(data.realSchemaTable+"update", data.updateStatement); if(log.isDebug()) logDebug("Preparation of the Update SQL statement : "+sql); } } // Create the update row... Object[] updateRow = new Object[data.updateParameterRowMeta.size()]; int j = 0; for (int i=0;i<data.valuenrs.length;i++) { if( meta.getUpdate()[i].booleanValue() ) { updateRow[j] = row[ data.valuenrs[i] ]; // the setters j++; } } // add the where clause parameters, they are exactly the same for lookup and update for (int i=0;i<lookupRow.length;i++) { updateRow[j+i] = lookupRow[i]; } // For PG & GP, we add a savepoint before the row. // Then revert to the savepoint afterwards... (not a transaction, so hopefully still fast) if (data.specialErrorHandling) { data.savepoint = data.db.setSavepoint(); } data.db.setValues(data.updateParameterRowMeta, updateRow, data.updateStatement); if (log.isRowLevel()) logRowlevel(BaseMessages.getString(PKG, "SynchronizeAfterMerge.Log.SetValuesForUpdate",data.updateParameterRowMeta.getString(updateRow),data.inputRowMeta.getString(row))); data.db.insertRow(data.updateStatement,data.batchMode); performUpdate=true; incrementLinesUpdated(); } // end if operation update else { incrementLinesSkipped(); lineSkipped=true; } } else if(operation.equals(data.deleteValue)) { // DELETE if(meta.istablenameInField()) { data.deleteStatement = (PreparedStatement) data.preparedStatements.get(data.realSchemaTable+"delete"); if(data.deleteStatement==null) { String sql =getDeleteStatement(data.inputRowMeta); data.deleteStatement= data.db.prepareSQL(sql); data.preparedStatements.put(data.realSchemaTable+"delete", data.deleteStatement); if(log.isDebug()) logDebug("Preparation of the Delete SQL statement : "+sql); } } Object[] deleteRow = new Object[data.deleteParameterRowMeta.size()]; int deleteIndex = 0; for (int i=0;i<meta.getKeyStream().length;i++) { if (data.keynrs[i]>=0) { deleteRow[deleteIndex] = row[ data.keynrs[i] ]; deleteIndex++; } if (data.keynrs2[i]>=0) { deleteRow[deleteIndex] = row[ data.keynrs2[i] ]; deleteIndex++; } } // For PG & GP, we add a savepoint before the row. // Then revert to the savepoint afterwards... (not a transaction, so hopefully still fast) if (data.specialErrorHandling) { data.savepoint = data.db.setSavepoint(); } data.db.setValues(data.deleteParameterRowMeta, deleteRow, data.deleteStatement); if (log.isRowLevel()) logRowlevel(BaseMessages.getString(PKG, "SynchronizeAfterMerge.Log.SetValuesForDelete",data.deleteParameterRowMeta.getString(deleteRow),data.inputRowMeta.getString(row))); //$NON-NLS-1$ data.db.insertRow(data.deleteStatement,data.batchMode); performDelete=true; incrementLinesUpdated(); } // endif operation delete else { incrementLinesSkipped(); lineSkipped=true; } } // endif operation insert // If we skip a line we need to empty the buffer and skip the line in question. // The skipped line is never added to the buffer! if(performInsert || performUpdate || performDelete || (data.batchBuffer.size()>0 && lineSkipped)) { // Get a commit counter per prepared statement to keep track of separate tables, etc. String tableName=data.realTableName; if(performInsert) tableName+="insert"; else if(performUpdate) tableName+="update"; if(performDelete) tableName+="delete"; Integer commitCounter = data.commitCounterMap.get(tableName); if (commitCounter==null) commitCounter=Integer.valueOf(0); data.commitCounterMap.put(tableName, Integer.valueOf(commitCounter.intValue()+1)); // Release the savepoint if needed if (data.specialErrorHandling) { data.db.releaseSavepoint(data.savepoint); } // Perform a commit if needed if (commitCounter>0 && (commitCounter%data.commitSize)==0) { if (data.batchMode) { try { if(performInsert) { data.insertStatement.executeBatch(); data.db.commit(); data.insertStatement.clearBatch(); } else if(performUpdate) { data.updateStatement.executeBatch(); data.db.commit(); data.updateStatement.clearBatch(); } else if(performDelete) { data.deleteStatement.executeBatch(); data.db.commit(); data.deleteStatement.clearBatch(); } } catch(BatchUpdateException ex) { KettleDatabaseBatchException kdbe = new KettleDatabaseBatchException(BaseMessages.getString(PKG, "SynchronizeAfterMerge.Error.UpdatingBatch"), ex); kdbe.setUpdateCounts(ex.getUpdateCounts()); List<Exception> exceptions = new ArrayList<Exception>(); // 'seed' the loop with the root exception SQLException nextException = ex; do { exceptions.add(nextException); // while current exception has next exception, add to list } while ((nextException = nextException.getNextException())!=null); kdbe.setExceptionsList(exceptions); throw kdbe; } catch(SQLException ex) { throw new KettleDatabaseException(BaseMessages.getString(PKG, "SynchronizeAfterMerge.Error.InsertingRow"), ex); } catch(Exception ex) { throw new KettleDatabaseException("Unexpected error inserting row", ex); } } else { // insertRow normal commit data.db.commit(); } // Clear the batch/commit counter... data.commitCounterMap.put(tableName, Integer.valueOf(0)); rowIsSafe=true; } else { rowIsSafe=false; } } } catch(KettleDatabaseBatchException be) { errorMessage = be.toString(); batchProblem = true; sendToErrorRow = true; updateCounts = be.getUpdateCounts(); exceptionsList = be.getExceptionsList(); if(data.insertStatement!=null) data.db.clearBatch(data.insertStatement); if(data.updateStatement!=null) data.db.clearBatch(data.updateStatement); if(data.deleteStatement!=null) data.db.clearBatch(data.deleteStatement); if (getStepMeta().isDoingErrorHandling()) { data.db.commit(true); } else { data.db.rollback(); StringBuffer msg = new StringBuffer("Error batch inserting rows into table ["+data.realTableName+"]."); msg.append(Const.CR); msg.append("Errors encountered (first 10):").append(Const.CR); for (int x = 0 ; x < be.getExceptionsList().size() && x < 10 ; x++) { Exception exception = be.getExceptionsList().get(x); if (exception.getMessage()!=null) msg.append(exception.getMessage()).append(Const.CR); } throw new KettleException(msg.toString(), be); } } catch(KettleDatabaseException dbe) { if (getStepMeta().isDoingErrorHandling()) { if (log.isRowLevel()) { logRowlevel("Written row to error handling : "+getInputRowMeta().getString(row)); } if (data.specialErrorHandling) { data.db.rollback(data.savepoint); data.db.releaseSavepoint(data.savepoint); } sendToErrorRow = true; errorMessage = dbe.toString(); } else { setErrors(getErrors()+1); data.db.rollback(); throw new KettleException("Error inserting row into table ["+data.realTableName+"] with values: "+data.inputRowMeta.getString(row), dbe); } } if (data.batchMode) { if (sendToErrorRow) { if (batchProblem) { data.batchBuffer.add(row); processBatchException(errorMessage, updateCounts, exceptionsList); } else { // Simply add this row to the error row putError(data.inputRowMeta, row, 1L, errorMessage, null, "SUYNC002"); } } else { if (!lineSkipped) { data.batchBuffer.add(row); } if (rowIsSafe) // A commit was done and the rows are all safe (no error) { for (int i=0;i<data.batchBuffer.size();i++) { Object[] rowb = (Object[]) data.batchBuffer.get(i); putRow(data.outputRowMeta, rowb); incrementLinesOutput(); } // Clear the buffer data.batchBuffer.clear(); } // Don't forget to pass this line to the following steps if (lineSkipped) { putRow(data.outputRowMeta, row); } } } else { if (sendToErrorRow) { if(data.lookupFailure) putError(data.inputRowMeta, row, 1, data.stringErrorKeyNotFound, data.stringFieldnames, "SUYNC001"); else putError(data.inputRowMeta, row, 1, errorMessage, null, "SUYNC001"); } } } private void processBatchException(String errorMessage, int[] updateCounts, List<Exception> exceptionsList) throws KettleException { // There was an error with the commit // We should put all the failing rows out there... if (updateCounts!=null) { int errNr = 0; for (int i=0;i<updateCounts.length;i++) { Object[] row = (Object[]) data.batchBuffer.get(i); if (updateCounts[i]>0) { // send the error forward putRow(data.outputRowMeta, row); incrementLinesOutput(); } else { String exMessage = errorMessage; if (errNr<exceptionsList.size()) { SQLException se = (SQLException) exceptionsList.get(errNr); errNr++; exMessage = se.toString(); } putError(data.outputRowMeta, row, 1L, exMessage, null, "SUYNC002"); } } } else { // If we don't have update counts, it probably means the DB doesn't support it. // In this case we don't have a choice but to consider all inserted rows to be error rows. for (int i=0;i<data.batchBuffer.size();i++) { Object[] row = (Object[]) data.batchBuffer.get(i); putError(data.outputRowMeta, row, 1L, errorMessage, null, "SUYNC003"); } } // Clear the buffer afterwards... data.batchBuffer.clear(); } // Lookup certain fields in a table public String getLookupStatement(RowMetaInterface rowMeta) throws KettleDatabaseException { data.lookupParameterRowMeta = new RowMeta(); data.lookupReturnRowMeta = new RowMeta(); DatabaseMeta databaseMeta = meta.getDatabaseMeta(); String sql = "SELECT "; for (int i = 0; i < meta.getUpdateLookup().length; i++) { if (i != 0) sql += ", "; sql += databaseMeta.quoteField(meta.getUpdateLookup()[i]); data.lookupReturnRowMeta.addValueMeta( rowMeta.searchValueMeta(meta.getUpdateStream()[i]).clone() ); } sql += " FROM " + data.realSchemaTable + " WHERE "; for (int i = 0; i < meta.getKeyLookup().length; i++) { if (i != 0) sql += " AND "; sql += databaseMeta.quoteField(meta.getKeyLookup()[i]); if ("BETWEEN".equalsIgnoreCase(meta.getKeyCondition()[i])) { sql += " BETWEEN ? AND ? "; data.lookupParameterRowMeta.addValueMeta( rowMeta.searchValueMeta(meta.getKeyStream()[i]) ); data.lookupParameterRowMeta.addValueMeta( rowMeta.searchValueMeta(meta.getKeyStream2()[i]) ); } else { if ("IS NULL".equalsIgnoreCase(meta.getKeyCondition()[i]) || "IS NOT NULL".equalsIgnoreCase(meta.getKeyCondition()[i])) { sql += " " + meta.getKeyCondition()[i] + " "; } else { sql += " " + meta.getKeyCondition()[i] + " ? "; data.lookupParameterRowMeta.addValueMeta( rowMeta.searchValueMeta(meta.getKeyStream()[i]) ); } } } return sql; } // Lookup certain fields in a table public String getUpdateStatement(RowMetaInterface rowMeta) throws KettleDatabaseException { DatabaseMeta databaseMeta = meta.getDatabaseMeta(); data.updateParameterRowMeta = new RowMeta(); String sql = "UPDATE " + data.realSchemaTable + Const.CR; sql += "SET "; boolean comma=false; for (int i=0;i<meta.getUpdateLookup().length;i++) { if ( meta.getUpdate()[i].booleanValue() ) { if (comma) sql+= ", "; else comma=true; sql += databaseMeta.quoteField(meta.getUpdateLookup()[i]); sql += " = ?" + Const.CR; data.updateParameterRowMeta.addValueMeta( rowMeta.searchValueMeta(meta.getUpdateStream()[i]).clone() ); } } sql += "WHERE "; for (int i=0;i<meta.getKeyLookup().length;i++) { if (i!=0) sql += "AND "; sql += databaseMeta.quoteField(meta.getKeyLookup()[i]); if ("BETWEEN".equalsIgnoreCase(meta.getKeyCondition()[i])) { sql += " BETWEEN ? AND ? "; data.updateParameterRowMeta.addValueMeta( rowMeta.searchValueMeta(meta.getKeyStream()[i]) ); data.updateParameterRowMeta.addValueMeta( rowMeta.searchValueMeta(meta.getKeyStream2()[i]) ); } else if ("IS NULL".equalsIgnoreCase(meta.getKeyCondition()[i]) || "IS NOT NULL".equalsIgnoreCase(meta.getKeyCondition()[i])) { sql += " "+meta.getKeyCondition()[i]+" "; } else { sql += " "+meta.getKeyCondition()[i]+" ? "; data.updateParameterRowMeta.addValueMeta( rowMeta.searchValueMeta(meta.getKeyStream()[i]).clone() ); } } return sql; } public String getDeleteStatement(RowMetaInterface rowMeta) throws KettleDatabaseException { DatabaseMeta databaseMeta = meta.getDatabaseMeta(); data.deleteParameterRowMeta = new RowMeta(); String sql = "DELETE FROM " + data.realSchemaTable + Const.CR; sql += "WHERE "; for (int i=0;i<meta.getKeyLookup().length;i++) { if (i!=0) sql += "AND "; sql += databaseMeta.quoteField(meta.getKeyLookup()[i]); if ("BETWEEN".equalsIgnoreCase(meta.getKeyCondition()[i])) { sql += " BETWEEN ? AND ? "; data.deleteParameterRowMeta.addValueMeta( rowMeta.searchValueMeta(meta.getKeyStream()[i]) ); data.deleteParameterRowMeta.addValueMeta( rowMeta.searchValueMeta(meta.getKeyStream2()[i]) ); } else if ("IS NULL".equalsIgnoreCase(meta.getKeyCondition()[i]) || "IS NOT NULL".equalsIgnoreCase(meta.getKeyCondition()[i])) { sql += " "+meta.getKeyCondition()[i]+" "; } else { sql += " "+meta.getKeyCondition()[i]+" ? "; data.deleteParameterRowMeta.addValueMeta( rowMeta.searchValueMeta(meta.getKeyStream()[i]) ); } } return sql; } public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException { meta=(SynchronizeAfterMergeMeta)smi; data=(SynchronizeAfterMergeData)sdi; Object[] r=getRow(); // Get row from input rowset & set row busy! if (r==null) // no more input to be expected... { setOutputDone(); return false; } if (first) { first=false; data.outputRowMeta = getInputRowMeta().clone(); data.inputRowMeta=data.outputRowMeta; meta.getFields(data.outputRowMeta, getStepname(), null, null, this); if(meta.istablenameInField()) { // Cache the position of the table name field if (data.indexOfTableNameField<0) { data.indexOfTableNameField = data.inputRowMeta.indexOfValue(meta.gettablenameField()); if (data.indexOfTableNameField<0) { String message = "It was not possible to find table ["+meta.gettablenameField()+"] in the input fields."; logError(message); throw new KettleStepException(message); } } }else { data.realTableName = environmentSubstitute(meta.getTableName()); if (Const.isEmpty(data.realTableName)) throw new KettleStepException("The table name is not specified (or the input field is empty)"); data.realSchemaTable = data.db.getDatabaseMeta().getQuotedSchemaTableCombination(data.realSchemaName, data.realTableName); } // Cache the position of the operation order field if (data.indexOfOperationOrderField<0) { data.indexOfOperationOrderField = data.inputRowMeta.indexOfValue(meta.getOperationOrderField()); if (data.indexOfOperationOrderField<0) { String message = "It was not possible to find operation field ["+meta.getOperationOrderField()+"] in the input stream!"; logError(message); throw new KettleStepException(message); } } data.insertValue=environmentSubstitute(meta.getOrderInsert()); data.updateValue=environmentSubstitute(meta.getOrderUpdate()); data.deleteValue=environmentSubstitute(meta.getOrderDelete()); data.insertRowMeta = new RowMeta(); // lookup the values! if (log.isDebug()) logDebug(BaseMessages.getString(PKG, "SynchronizeAfterMerge.Log.CheckingRow")+r.toString()); //$NON-NLS-1$ data.keynrs = new int[meta.getKeyStream().length]; data.keynrs2 = new int[meta.getKeyStream().length]; for (int i=0;i<meta.getKeyStream().length;i++) { data.keynrs[i]=data.inputRowMeta.indexOfValue(meta.getKeyStream()[i]); if (data.keynrs[i]<0 && // couldn't find field! !"IS NULL".equalsIgnoreCase(meta.getKeyCondition()[i]) && // No field needed! //$NON-NLS-1$ !"IS NOT NULL".equalsIgnoreCase(meta.getKeyCondition()[i]) // No field needed! //$NON-NLS-1$ ) { throw new KettleStepException(BaseMessages.getString(PKG, "SynchronizeAfterMerge.Exception.FieldRequired",meta.getKeyStream()[i])); //$NON-NLS-1$ //$NON-NLS-2$ } data.keynrs2[i]=data.inputRowMeta.indexOfValue(meta.getKeyStream2()[i]); if (data.keynrs2[i]<0 && // couldn't find field! "BETWEEN".equalsIgnoreCase(meta.getKeyCondition()[i]) // 2 fields needed! //$NON-NLS-1$ ) { throw new KettleStepException(BaseMessages.getString(PKG, "SynchronizeAfterMerge.Exception.FieldRequired",meta.getKeyStream2()[i])); //$NON-NLS-1$ //$NON-NLS-2$ } if (log.isDebug()) logDebug(BaseMessages.getString(PKG, "SynchronizeAfterMerge.Log.FieldHasDataNumbers",meta.getKeyStream()[i])+data.keynrs[i]); //$NON-NLS-1$ //$NON-NLS-2$ } // Insert the update fields: just names. Type doesn't matter! for (int i=0;i<meta.getUpdateLookup().length;i++) { ValueMetaInterface insValue = data.insertRowMeta.searchValueMeta( meta.getUpdateLookup()[i]); if (insValue==null) // Don't add twice! { // we already checked that this value exists so it's probably safe to ignore lookup failure... ValueMetaInterface insertValue = data.inputRowMeta.searchValueMeta( meta.getUpdateStream()[i] ).clone(); insertValue.setName(meta.getUpdateLookup()[i]); data.insertRowMeta.addValueMeta( insertValue ); } else { throw new KettleStepException(BaseMessages.getString(PKG, "SynchronizeAfterMerge.Error.SameColumnInsertedTwice",insValue.getName())); } } // Cache the position of the compare fields in Row row data.valuenrs = new int[meta.getUpdateLookup().length]; for (int i=0;i<meta.getUpdateLookup().length;i++) { data.valuenrs[i]=data.inputRowMeta.indexOfValue(meta.getUpdateStream()[i]); if (data.valuenrs[i]<0) // couldn't find field! { throw new KettleStepException(BaseMessages.getString(PKG, "SynchronizeAfterMerge.Exception.FieldRequired",meta.getUpdateStream()[i])); //$NON-NLS-1$ //$NON-NLS-2$ } if (log.isDebug()) logDebug(BaseMessages.getString(PKG, "SynchronizeAfterMerge.Log.FieldHasDataNumbers",meta.getUpdateStream()[i])+data.valuenrs[i]); //$NON-NLS-1$ //$NON-NLS-2$ } if(!meta.istablenameInField()) { // Prepare Lookup statement if(meta.isPerformLookup()) { data.lookupStatement = (PreparedStatement) data.preparedStatements.get(data.realSchemaTable+"lookup"); if (data.lookupStatement==null) { String sql = getLookupStatement(data.inputRowMeta); if(log.isDebug()) logDebug("Preparation of the lookup SQL statement : "+sql); data.lookupStatement = data.db.prepareSQL(sql); data.preparedStatements.put(data.realSchemaTable+"lookup", data.lookupStatement); } } // Prepare Insert statement data.insertStatement = (PreparedStatement) data.preparedStatements.get(data.realSchemaTable+"insert"); if (data.insertStatement==null) { String sql = data.db.getInsertStatement(data.realSchemaName,data.realTableName, data.insertRowMeta); if(log.isDebug()) logDebug("Preparation of the Insert SQL statement : "+sql); data.insertStatement = data.db.prepareSQL(sql); data.preparedStatements.put(data.realSchemaTable+"insert", data.insertStatement); } // Prepare Update Statement data.updateStatement = (PreparedStatement) data.preparedStatements.get(data.realSchemaTable+"update"); if(data.updateStatement==null) { String sql =getUpdateStatement(data.inputRowMeta); data.updateStatement= data.db.prepareSQL(sql); data.preparedStatements.put(data.realSchemaTable+"update", data.updateStatement); if(log.isDebug()) logDebug("Preparation of the Update SQL statement : "+sql); } // Prepare delete statement data.deleteStatement = (PreparedStatement) data.preparedStatements.get(data.realSchemaTable+"delete"); if(data.deleteStatement==null) { String sql =getDeleteStatement(data.inputRowMeta); data.deleteStatement= data.db.prepareSQL(sql); data.preparedStatements.put(data.realSchemaTable+"delete", data.deleteStatement); if(log.isDebug()) logDebug("Preparation of the Delete SQL statement : "+sql); } } }// end if first try { lookupValues(r); // add new values to the row in rowset[0]. if (!data.batchMode) { putRow(data.outputRowMeta, r); // copy row to output rowset(s); } if (checkFeedback(getLinesRead())) { if(log.isDetailed()) logDetailed(BaseMessages.getString(PKG, "SynchronizeAfterMerge.Log.LineNumber")+getLinesRead()); //$NON-NLS-1$ } } catch(KettleException e) { logError("Because of an error, this step can't continue: ", e); setErrors(1); stopAll(); setOutputDone(); // signal end to receiver(s) return false; } return true; } public boolean init(StepMetaInterface smi, StepDataInterface sdi) { meta=(SynchronizeAfterMergeMeta)smi; data=(SynchronizeAfterMergeData)sdi; if (super.init(smi, sdi)) { try { data.realSchemaName=environmentSubstitute(meta.getSchemaName()); if(meta.istablenameInField()) { if(Const.isEmpty(meta.gettablenameField())) { logError(BaseMessages.getString(PKG, "SynchronizeAfterMerge.Log.Error.TableFieldnameEmpty")); return false; } } data.databaseMeta = meta.getDatabaseMeta(); data.commitSize = Integer.parseInt(environmentSubstitute(""+meta.getCommitSize())); data.batchMode = data.commitSize>0 && meta.useBatchUpdate(); // Batch updates are not supported on PostgreSQL (and look-a-likes) together with error handling (PDI-366) data.specialErrorHandling = getStepMeta().isDoingErrorHandling() && meta.getDatabaseMeta().supportsErrorHandlingOnBatchUpdates(); if (data.batchMode && data.specialErrorHandling ) { data.batchMode = false; if(log.isBasic()) logBasic(BaseMessages.getString(PKG, "SynchronizeAfterMerge.Log.BatchModeDisabled")); } data.db=new Database(this, meta.getDatabaseMeta()); data.db.shareVariablesWith(this); if (getTransMeta().isUsingUniqueConnections()) { synchronized (getTrans()) { data.db.connect(getTrans().getThreadName(), getPartitionID()); } } else { data.db.connect(getPartitionID()); } data.db.setCommit(meta.getCommitSize()); return true; } catch(KettleException ke) { logError(BaseMessages.getString(PKG, "SynchronizeAfterMerge.Log.ErrorOccurredDuringStepInitialize")+ke.getMessage()); //$NON-NLS-1$ } } return false; } public void dispose(StepMetaInterface smi, StepDataInterface sdi) { meta=(SynchronizeAfterMergeMeta)smi; data=(SynchronizeAfterMergeData)sdi; try { for (String schemaTable : data.preparedStatements.keySet()) { // Get a commit counter per prepared statement to keep track of separate tables, etc. Integer batchCounter = data.commitCounterMap.get(schemaTable); if (batchCounter==null) { batchCounter = 0; } PreparedStatement insertStatement = data.preparedStatements.get(schemaTable); data.db.emptyAndCommit(insertStatement, data.batchMode, batchCounter); } for (int i=0;i<data.batchBuffer.size();i++) { Object[] row = (Object[]) data.batchBuffer.get(i); putRow(data.outputRowMeta, row); incrementLinesOutput(); } // Clear the buffer data.batchBuffer.clear(); } catch(KettleDatabaseBatchException be) { if (getStepMeta().isDoingErrorHandling()) { // Right at the back we are experiencing a batch commit problem... // OK, we have the numbers... try { processBatchException(be.toString(), be.getUpdateCounts(), be.getExceptionsList()); } catch(KettleException e) { logError("Unexpected error processing batch error", e); setErrors(1); stopAll(); } } else { logError("Unexpected batch update error committing the database connection.", be); setErrors(1); stopAll(); } } catch(Exception dbe) { logError("Unexpected error committing the database connection.", dbe); logError(Const.getStackTracker(dbe)); setErrors(1); stopAll(); } finally { setOutputDone(); if (getErrors()>0) { try { data.db.rollback(); } catch(KettleDatabaseException e) { logError("Unexpected error rolling back the database connection.", e); } } if (data.db!=null) { data.db.disconnect(); } super.dispose(smi, sdi); } } }
package org.vitrivr.cineast.core.run.filehandler; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.vitrivr.cineast.core.config.Config; import org.vitrivr.cineast.core.config.IdConfig; import org.vitrivr.cineast.core.data.SegmentContainer; import org.vitrivr.cineast.core.data.entities.MultimediaMetadataDescriptor; import org.vitrivr.cineast.core.data.entities.MultimediaObjectDescriptor; import org.vitrivr.cineast.core.data.entities.SegmentDescriptor; import org.vitrivr.cineast.core.db.MultimediaObjectLookup; import org.vitrivr.cineast.core.db.PersistencyWriterSupplier; import org.vitrivr.cineast.core.db.SegmentLookup; import org.vitrivr.cineast.core.db.dao.MultimediaMetadataWriter; import org.vitrivr.cineast.core.db.dao.MultimediaObjectWriter; import org.vitrivr.cineast.core.db.dao.SegmentWriter; import org.vitrivr.cineast.core.decode.general.Decoder; import org.vitrivr.cineast.core.features.extractor.DefaultExtractorInitializer; import org.vitrivr.cineast.core.idgenerator.ObjectIdGenerator; import org.vitrivr.cineast.core.metadata.MetadataExtractor; import org.vitrivr.cineast.core.run.ExtractionContextProvider; import org.vitrivr.cineast.core.runtime.ExtractionPipeline; import org.vitrivr.cineast.core.segmenter.general.Segmenter; import javax.activation.MimetypesFileTypeMap; import java.io.IOException; import java.nio.file.Path; import java.time.Duration; import java.util.*; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; /** * Abstract implementation of ExtractionFileHandler. This class should fit most media-types. However, * a concrete implementation must provide the correct decoder and segmenter classes. ** * @see ExtractionFileHandler * @see org.vitrivr.cineast.core.run.ExtractionDispatcher * * @author rgasser * @version 1.0 * @created 14.01.17 */ public abstract class AbstractExtractionFileHandler<T> implements ExtractionFileHandler<T> { private static final Logger LOGGER = LogManager.getLogger(); /** MultimediaObjectWriter used to persist MultimediaObjectDescriptors created during the extraction. */ private final MultimediaObjectWriter objectWriter; /** SegmentWriter used to persist SegmentDescriptors created during the extraction. */ private final SegmentWriter segmentWriter; /** SegmentWriter used to persist SegmentDescriptors created during the extraction. */ private final MultimediaMetadataWriter metadataWriter; /** Deque of files that are being extracted. */ private final Deque<Path> files = new ArrayDeque<>(); /** ExtractionContextProvider that is used to configure the extraction. */ private final ExtractionContextProvider context; /** ExecutorService used to run the ExtractionPipeline and the Segmenter. */ private final ExecutorService executorService = Executors.newFixedThreadPool(2, r -> { Thread thread = new Thread(r); if (r instanceof ExtractionPipeline) { thread.setName("extraction-pipeline-thread"); } else if (r instanceof Segmenter) { thread.setName("extraction-segmenter-thread"); } return thread; }); /** List of MetadataExtractors that should be executed as part of the Extraction. */ private final List<MetadataExtractor> metadataExtractors; /** ExtractionPipeline that extracts features from the segments. */ private final ExtractionPipeline pipeline; /** Used to measure the duration of an extraction run. */ private long start_timestamp; /** Total number of files that were queued for processing. */ private long count_files = 0; /** Total number of files that were effectively processed. */ private long count_processed = 0; /** * Default constructor used to initialize the class. * * @param files List of files that should be extracted. * @param context ExtractionContextProvider that holds extraction specific configurations. */ public AbstractExtractionFileHandler(List<Path> files, ExtractionContextProvider context) throws IOException { /* Loads the files into the Deque. */ this.preprocess(files); /* Setup all the required helper classes. */ PersistencyWriterSupplier writerSupplier = context.persistencyWriter(); this.objectWriter = new MultimediaObjectWriter(writerSupplier.get(),10); this.segmentWriter = new SegmentWriter(writerSupplier.get(),10); this.metadataWriter = new MultimediaMetadataWriter(writerSupplier.get(),10); this.pipeline = new ExtractionPipeline(context, new DefaultExtractorInitializer(writerSupplier)); this.metadataExtractors = context.metadataExtractors(); this.context = context; } /** * When an object implementing interface <code>Runnable</code> is used * to create a thread, starting the thread causes the object's * <code>run</code> method to be called in that separately executing * thread. * <p> * The general contract of the method <code>run</code> is that it may * take any action whatsoever. * * @see Thread#run() */ @Override public void run() { /* Get start_timestamp-timestamp. */ this.start_timestamp = System.currentTimeMillis(); /* Create new, initial decoder and segmenter. */ Decoder<T> decoder = this.newDecoder(); Segmenter<T> segmenter = this.newSegmenter(); LOGGER.info("Starting image extraction with {} files.", this.files.size()); /* Submit the ExtractionPipeline to the executor-service. */ this.executorService.execute(pipeline); /* Instantiates some of the helper classes required by this class. */ final ObjectIdGenerator generator = this.context.objectIdGenerator(); Path path = null; /* Process every file in the list. */ while ((path = this.files.poll()) != null) { LOGGER.info("Processing file {}.", path); /* Create new MultimediaObjectDescriptor for new file. */ MultimediaObjectDescriptor descriptor = MultimediaObjectDescriptor.newMultimediaObjectDescriptor(generator, path, context.sourceType()); if (!this.checkAndPersistMultimediaObject(descriptor)) continue; /* Pass file to decoder and decoder to segmenter. */ decoder.init(path, Config.sharedConfig().getDecoders().get(this.context.sourceType())); segmenter.init(decoder); /* Store objectId for further reference and initialize a new segment number. */ String objectId = descriptor.getObjectId(); int segmentNumber = 1; /* Pass segmenter (runnable) to executor service. */ this.executorService.execute(segmenter); /* Poll for output from the segmenter until that segmenter reports that no more output * is going to be generated. * * For every segment: Increase the segment-number, persist a segment descriptor and emit the segment * to the ExtractionPipeline! */ while (!segmenter.complete()) { try { SegmentContainer container = segmenter.getNext(); if (container != null) { /* Create segment-descriptor and try to persist it. */ SegmentDescriptor segmentDescriptor = SegmentDescriptor.newSegmentDescriptor(objectId, segmentNumber, container.getStart(), container.getEnd()); if (!this.checkAndPersistSegment(segmentDescriptor)) continue; /* Update container ID's. */ container.setId(segmentDescriptor.getSegmentId()); container.setSuperId(segmentDescriptor.getObjectId()); /* Emit container to extraction pipeline. */ this.pipeline.emit(container); /* Increase the segment number. */ segmentNumber+=1; } } catch (InterruptedException e) { LOGGER.log(Level.ERROR, "Thread was interrupted while the extraction process was running. Aborting..."); break; } } /* Extract metadata. */ this.extractAndPersistMetadata(path, objectId); /* Increment the files counter. */ this.count_processed += 1; /* Create new decoder pair for a new file if the decoder reports that it cannot be reused.*/ if (!decoder.canBeReused()) { decoder.close(); decoder = this.newDecoder(); } } /* Shutdown the FileHandler. */ this.shutdown(); } /** * Stops the ExtractionPipeline and relinquishing all resources. */ private void shutdown() { try { /* Wait a few seconds for the ExtractionPipeline to submit remaining tasks to the queue. */ Thread.sleep(5000); /* Now shutdown the ExecutorService and tell the pipeline to stop. */ LOGGER.info("File decoding and segmenting complete! Shutting down..."); this.executorService.shutdown(); this.pipeline.stop(); /* Wait for pipeline to complete. */ LOGGER.info("Waiting for ExtractionPipeline to terminate! This could take a while."); this.executorService.awaitTermination(30, TimeUnit.MINUTES); } catch (InterruptedException e) { LOGGER.warn("Interrupted while waiting for ExtractionPipeline to shutdown!"); } finally { this.segmentWriter.close(); this.objectWriter.close(); Duration duration = Duration.ofMillis(System.currentTimeMillis()-this.start_timestamp); LOGGER.info("File extraction complete! It took me {} to extract {} out of {} files.", duration.toString(), this.count_processed, this.count_files); } } /** * Pre-processes the list of files by filtering unsupported types. The remaining files are * added to the * * ID's are generated b the ObjectIdGenerator configured. * * @return List of Pairs mapping the new objectId to the Path. */ private void preprocess(List<Path> files) throws IOException { final MimetypesFileTypeMap filetypes = new MimetypesFileTypeMap("mime.types"); final Decoder<T> decoder = this.newDecoder(); files.stream().filter( path -> { Set<String> supportedFiles = decoder.supportedFiles(); if (supportedFiles != null) { String type = filetypes.getContentType(path.toString()); return decoder.supportedFiles().contains(type); } else { return true; } }).forEach(this.files::push); this.count_files = this.files.size(); } /** * Persists a MultimediaObjectDescriptor and performs an existence check before, if so configured. Based * on the outcome of that persistence check and the settings in the ExtractionContext this method * returns true if object should be processed further or false otherwise. * * @param descriptor MultimediaObjectDescriptor that should be persisted. * @return true if object should be processed further or false if it should be skipped. */ private boolean checkAndPersistMultimediaObject(MultimediaObjectDescriptor descriptor) { if (descriptor.getObjectId() == null) { LOGGER.warn("The objectId that was generated for {} is empty. This object cannot be persisted and will be skipped.", descriptor.getPath()); return false; } MultimediaObjectLookup mlookup = new MultimediaObjectLookup(); if (this.context.existenceCheck() != IdConfig.ExistenceCheck.NOCHECK) { if (!mlookup.lookUpObjectById(descriptor.getObjectId()).exists()) { this.objectWriter.write(descriptor); return true; } else if (this.context.existenceCheck() == IdConfig.ExistenceCheck.CHECK_SKIP) { LOGGER.warn("MultimediaObject {} (name: {}) already exists. This object will be skipped.", descriptor.getObjectId(), descriptor.getName()); return false; } else { LOGGER.warn("MultimediaObject {} (name: {}) already exists. Proceeding anyway...", descriptor.getObjectId(), descriptor.getName()); return true; } } else { this.objectWriter.write(descriptor); return true; } } /** * Persists a SegmentDescriptor and performs an existence check before, if so configured. Based * on the outcome of that persistence check and the settings in the ExtractionContext this method * returns true if segment should be processed further or false otherwise. * * @param descriptor SegmentDescriptor that should be persisted. * @return true if segment should be processed further or false if it should be skipped. */ private boolean checkAndPersistSegment(SegmentDescriptor descriptor) { if (this.context.existenceCheck() != IdConfig.ExistenceCheck.NOCHECK) { SegmentLookup slookup = new SegmentLookup(); if (!slookup.lookUpShot(descriptor.getSegmentId()).exists()) { this.segmentWriter.write(descriptor); return true; } else if (this.context.existenceCheck() == IdConfig.ExistenceCheck.CHECK_SKIP) { LOGGER.warn("Segment {} already exists. This segment will be skipped.", descriptor.getSegmentId()); return false; } else { LOGGER.warn("Segment {} already exists. Proceeding anyway...", descriptor.getSegmentId()); return true; } } else { this.segmentWriter.write(descriptor); return true; } } /** * * @param path * @param objectId */ private void extractAndPersistMetadata(Path path, String objectId) { for (MetadataExtractor extractor : this.metadataExtractors) { List<MultimediaMetadataDescriptor> metadata = extractor.extract(objectId, path); if (metadata.size() > 0) { this.metadataWriter.write(metadata); } } } }
package org.yeastrc.proxl.gen_import_xml.xquest.builder; import java.io.File; import java.math.BigInteger; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import org.yeastrc.fasta.FASTAEntry; import org.yeastrc.fasta.FASTAHeader; import org.yeastrc.fasta.FASTAReader; import org.yeastrc.proxl_import.api.xml_dto.MatchedProteins; import org.yeastrc.proxl_import.api.xml_dto.Peptide; import org.yeastrc.proxl_import.api.xml_dto.Peptides; import org.yeastrc.proxl_import.api.xml_dto.Protein; import org.yeastrc.proxl_import.api.xml_dto.ProteinAnnotation; import org.yeastrc.proxl_import.api.xml_dto.ProxlInput; import org.yeastrc.proxl_import.api.xml_dto.ReportedPeptide; import org.yeastrc.proxl_import.api.xml_dto.ReportedPeptides; import org.yeastrc.taxonomy.main.GetTaxonomyId; /** * Build the MatchedProteins section of the ProXL XML docs. This is done by finding all proteins in the FASTA * file that contains any of the peptide sequences found in the experiment. * * This is generalized enough to be usable by any pipeline * * @author mriffle * */ public class MatchedProteinsBuilder { public static MatchedProteinsBuilder getInstance() { return new MatchedProteinsBuilder(); } /** * Add all target proteins from the FASTA file that contain any of the peptides found in the experiment * to the proxl xml document in the matched proteins sectioni. * * @param proxlInputRoot * @param fastaFile * @param decoyIdentifiers * @throws Exception */ public void buildMatchedProteins( ProxlInput proxlInputRoot, File fastaFile, Collection<String> decoyIdentifiers ) throws Exception { // get all distinct peptides found in this search Collection<String> allPetpideSequences = getDistinctPeptides( proxlInputRoot ); // the proteins we've found Map<String, Collection<FastaProteinAnnotation>> proteins = getProteins( allPetpideSequences, fastaFile, decoyIdentifiers ); // create the XML and add to root element buildAndAddMatchedProteinsToXML( proxlInputRoot, proteins ); } /** * Do the work of building the matched peptides element and adding to proxl xml root * * @param proxlInputRoot * @param proteins * @throws Exception */ private void buildAndAddMatchedProteinsToXML( ProxlInput proxlInputRoot, Map<String, Collection<FastaProteinAnnotation>> proteins ) throws Exception { MatchedProteins xmlMatchedProteins = new MatchedProteins(); proxlInputRoot.setMatchedProteins( xmlMatchedProteins ); for( String sequence : proteins.keySet() ) { if( proteins.get( sequence ).isEmpty() ) continue; Protein xmlProtein = new Protein(); xmlMatchedProteins.getProtein().add( xmlProtein ); xmlProtein.setSequence( sequence ); for( FastaProteinAnnotation anno : proteins.get( sequence ) ) { ProteinAnnotation xmlProteinAnnotation = new ProteinAnnotation(); xmlProtein.getProteinAnnotation().add( xmlProteinAnnotation ); xmlProteinAnnotation.setName( anno.getName() ); if( anno.getDescription() != null ) xmlProteinAnnotation.setDescription( anno.getDescription() ); if( anno.getTaxonomId() != null ) xmlProteinAnnotation.setNcbiTaxonomyId( new BigInteger( anno.getTaxonomId().toString() ) ); } } } /** * Get a map of the distinct target protein sequences mapped to a collection of target annotations for that sequence * from the given fasta file, where the sequence contains any of the supplied peptide sequences * * @param allPetpideSequences * @param fastaFile * @param decoyIdentifiers * @return * @throws Exception */ private Map<String, Collection<FastaProteinAnnotation>> getProteins( Collection<String> allPetpideSequences, File fastaFile, Collection<String> decoyIdentifiers ) throws Exception { Map<String, Collection<FastaProteinAnnotation>> proteinAnnotations = new HashMap<>(); FASTAReader fastaReader = null; try { fastaReader = FASTAReader.getInstance( fastaFile ); for( FASTAEntry entry = fastaReader.readNext(); entry != null; entry = fastaReader.readNext() ) { if( isDecoyFastaEntry( entry, decoyIdentifiers ) ) continue; for( FASTAHeader header : entry.getHeaders() ) { if( !proteinAnnotations.containsKey( entry.getSequence() ) ) proteinAnnotations.put( entry.getSequence(), new HashSet<FastaProteinAnnotation>() ); FastaProteinAnnotation anno = new FastaProteinAnnotation(); anno.setName( header.getName() ); anno.setDescription( header.getDescription() ); Integer taxId = GetTaxonomyId.getInstance().getTaxonomyId( header.getName(), header.getDescription() ); if( taxId != null ) anno.setTaxonomId( taxId ); proteinAnnotations.get( entry.getSequence() ).add( anno ); } } } finally { if( fastaReader != null ) { fastaReader.close(); fastaReader = null; } } return proteinAnnotations; } /** * Return true if the supplied FASTA entry is a decoy entry. False otherwise. * An entry is considered a decoy if any of the supplied decoy identifiers are present * anywhere in the header line. * * @param entry * @param decoyIdentifiers * @return */ private boolean isDecoyFastaEntry( FASTAEntry entry, Collection<String> decoyIdentifiers ) { for( String decoyId : decoyIdentifiers ) { if( entry.getHeaderLine().toLowerCase().contains( decoyId.toLowerCase() ) ) return true; } return false; } /** * Get all distinct peptides from a proxlxml doc's reported peptide section * * @param proxlInputRoot * @return * @throws Exception */ private Collection<String> getDistinctPeptides( ProxlInput proxlInputRoot ) throws Exception { Collection<String> allPeptideSequences = new HashSet<>(); ReportedPeptides reportedPeptides = proxlInputRoot.getReportedPeptides(); if ( reportedPeptides != null ) { List<ReportedPeptide> reportedPeptideList = reportedPeptides.getReportedPeptide(); if ( reportedPeptideList != null && ( ! reportedPeptideList.isEmpty() ) ) { for ( ReportedPeptide reportedPeptide : reportedPeptideList ) { if ( reportedPeptides != null ) { Peptides peptidesProxlXML = reportedPeptide.getPeptides(); List<Peptide> peptideProxlXMLList = peptidesProxlXML.getPeptide(); if ( peptideProxlXMLList != null && ( ! peptideProxlXMLList.isEmpty() ) ) { for ( Peptide peptideProxlXML : peptideProxlXMLList ) { allPeptideSequences.add( peptideProxlXML.getSequence() ); } } } } } } return allPeptideSequences; } /** * An annotation for a protein in a Fasta file * * @author mriffle * */ private class FastaProteinAnnotation { public int hashCode() { String hashString = this.getName(); if( this.getDescription() != null ) hashString += this.getDescription(); if( this.getTaxonomId() != null ) hashString += this.getTaxonomId().intValue(); return hashString.hashCode(); } /** * Return true if name, description and taxonomy are all the same, false otherwise */ public boolean equals( Object o ) { try { FastaProteinAnnotation otherAnno = (FastaProteinAnnotation)o; if( !this.getName().equals( otherAnno.getName() ) ) return false; if( this.getDescription() == null ) { if( otherAnno.getDescription() != null ) return false; } else { if( otherAnno.getDescription() == null ) return false; } if( !this.getDescription().equals( otherAnno.getDescription() ) ) return false; if( this.getTaxonomId() == null ) { if( otherAnno.getTaxonomId() != null ) return false; } else { if( otherAnno.getTaxonomId() == null ) return false; } if( !this.getTaxonomId().equals( otherAnno.getTaxonomId() ) ) return false; return true; } catch( Exception e ) { return false; } } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public Integer getTaxonomId() { return taxonomId; } public void setTaxonomId(Integer taxonomId) { this.taxonomId = taxonomId; } private String name; private String description; private Integer taxonomId; } }
package com.lazerycode.jmeter.configuration; import com.lazerycode.jmeter.UtilityFunctions; import org.apache.maven.plugin.MojoExecutionException; import org.junit.Before; import org.junit.Test; import java.io.File; import java.net.URISyntaxException; import java.net.URL; import java.util.HashMap; import java.util.Map; import java.util.Set; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsEqual.equalTo; import static org.hamcrest.core.IsNot.not; import static org.junit.Assert.assertThat; public class JMeterArgumentsArrayTest { private final URL testFile = this.getClass().getResource("/test.jmx"); private String testFilePath; @Before public void setTestFileAbsolutePath() throws URISyntaxException { testFilePath = new File(this.testFile.toURI()).getAbsolutePath(); } public String argumentsMapToString(Map<String, String> value, JMeterCommandLineArguments type) { String arguments = ""; Set<String> globalPropertySet = value.keySet(); for (String property : globalPropertySet) { arguments += type.getCommandLineArgument() + " "; arguments += property + "=" + value.get(property) + " "; } return arguments.trim(); } @Test(expected = MojoExecutionException.class) public void noTestSpecified() throws MojoExecutionException { JMeterArgumentsArray testArgs = new JMeterArgumentsArray(); testArgs.buildArgumentsArray(); } @Test(expected = MojoExecutionException.class) public void propertiesFileNotSet() throws Exception { JMeterArgumentsArray testArgs = new JMeterArgumentsArray(); testArgs.setTestFile(new File(this.testFile.toURI())); testArgs.buildArgumentsArray(); } @Test(expected = MojoExecutionException.class) public void jMeterHomeNotSet() throws Exception { JMeterArgumentsArray testArgs = new JMeterArgumentsArray(); testArgs.setTestFile(new File(this.testFile.toURI())); testArgs.buildArgumentsArray(); } @Test public void validateDefaultCommandLineOutput() throws Exception { JMeterArgumentsArray testArgs = new JMeterArgumentsArray(); testArgs.setTestFile(new File(this.testFile.toURI())); testArgs.setJMeterHome("target/jmeter/"); assertThat(testArgs.getResultsFileName(), is(not(equalTo("")))); assertThat(testArgs.getResultsFileName(), is(not(equalTo(null)))); assertThat(UtilityFunctions.humanReadableCommandLineOutput(testArgs.buildArgumentsArray()), is(equalTo("-n -t " + testFilePath + " -l " + testArgs.getResultsFileName() + " -d target/jmeter/"))); } @Test public void validateJMeterCustomPropertiesFile() throws Exception { JMeterArgumentsArray testArgs = new JMeterArgumentsArray(); testArgs.setTestFile(new File(this.testFile.toURI())); testArgs.setJMeterHome("target/jmeter/"); File testPropFile = new File("test.properties"); testArgs.setACustomPropertiesFile(testPropFile); assertThat(UtilityFunctions.humanReadableCommandLineOutput(testArgs.buildArgumentsArray()), is(equalTo("-n -t " + testFilePath + " -l " + testArgs.getResultsFileName() + " -d target/jmeter/ -q " + testPropFile.getAbsolutePath()))); } @Test public void validateJMeterChangeRootLogLevel() throws Exception { JMeterArgumentsArray testArgs = new JMeterArgumentsArray(); testArgs.setTestFile(new File(this.testFile.toURI())); testArgs.setJMeterHome("target/jmeter/"); testArgs.setLogRootOverride("DEBUG"); assertThat(UtilityFunctions.humanReadableCommandLineOutput(testArgs.buildArgumentsArray()), is(equalTo("-n -t " + testFilePath + " -l " + testArgs.getResultsFileName() + " -d target/jmeter/ -L DEBUG"))); } @Test public void validateJMeterChangeIndividualLogLevels() throws Exception { JMeterArgumentsArray testArgs = new JMeterArgumentsArray(); testArgs.setTestFile(new File(this.testFile.toURI())); testArgs.setJMeterHome("target/jmeter/"); Map<String, String> logLevels = new HashMap<String, String>(); logLevels.put("jorphan", "INFO"); logLevels.put("jmeter.UtilityFunctions", "DEBUG"); testArgs.setLogCategoriesOverrides(logLevels); assertThat(UtilityFunctions.humanReadableCommandLineOutput(testArgs.buildArgumentsArray()), is(equalTo("-n -t " + testFilePath + " -l " + testArgs.getResultsFileName() + " -d target/jmeter/ " + argumentsMapToString(logLevels, JMeterCommandLineArguments.LOGLEVEL)))); } @Test public void validateJMeterSetProxyHost() throws Exception { ProxyConfiguration proxyConfiguration = new ProxyConfiguration(); proxyConfiguration.setHost("http://10.10.50.43"); proxyConfiguration.setPort(8080); JMeterArgumentsArray testArgs = new JMeterArgumentsArray(); testArgs.setTestFile(new File(this.testFile.toURI())); testArgs.setJMeterHome("target/jmeter/"); testArgs.setProxyConfig(proxyConfiguration); assertThat(UtilityFunctions.humanReadableCommandLineOutput(testArgs.buildArgumentsArray()), is(equalTo("-n -t " + testFilePath + " -l " + testArgs.getResultsFileName() + " -d target/jmeter/ -H http://10.10.50.43 -P 8080"))); } @Test public void validateJMeterSetProxyUsername() throws Exception { ProxyConfiguration proxyConfiguration = new ProxyConfiguration(); proxyConfiguration.setUsername("god"); JMeterArgumentsArray testArgs = new JMeterArgumentsArray(); testArgs.setTestFile(new File(this.testFile.toURI())); testArgs.setJMeterHome("target/jmeter/"); testArgs.setProxyConfig(proxyConfiguration); assertThat(UtilityFunctions.humanReadableCommandLineOutput(testArgs.buildArgumentsArray()), is(equalTo("-n -t " + testFilePath + " -l " + testArgs.getResultsFileName() + " -d target/jmeter/ -u god"))); } @Test public void validateJMeterSetProxyPassword() throws Exception { ProxyConfiguration proxyConfiguration = new ProxyConfiguration(); proxyConfiguration.setPassword("changeme"); JMeterArgumentsArray testArgs = new JMeterArgumentsArray(); testArgs.setTestFile(new File(this.testFile.toURI())); testArgs.setJMeterHome("target/jmeter/"); testArgs.setProxyConfig(proxyConfiguration); assertThat(UtilityFunctions.humanReadableCommandLineOutput(testArgs.buildArgumentsArray()), is(equalTo("-n -t " + testFilePath + " -l " + testArgs.getResultsFileName() + " -d target/jmeter/ -a changeme"))); } @Test public void validateSetNonProxyHosts() throws Exception { ProxyConfiguration proxyConfiguration = new ProxyConfiguration(); proxyConfiguration.setHostExclusions("localhost|*.lazerycode.com"); JMeterArgumentsArray testArgs = new JMeterArgumentsArray(); testArgs.setTestFile(new File(this.testFile.toURI())); testArgs.setJMeterHome("target/jmeter/"); testArgs.setProxyConfig(proxyConfiguration); assertThat(UtilityFunctions.humanReadableCommandLineOutput(testArgs.buildArgumentsArray()), is(equalTo("-n -t " + testFilePath + " -l " + testArgs.getResultsFileName() + " -d target/jmeter/ -N localhost|*.lazerycode.com"))); } @Test public void validateSetRemoteStop() throws Exception { JMeterArgumentsArray testArgs = new JMeterArgumentsArray(); testArgs.setTestFile(new File(this.testFile.toURI())); testArgs.setJMeterHome("target/jmeter/"); testArgs.setRemoteStop(true); assertThat(UtilityFunctions.humanReadableCommandLineOutput(testArgs.buildArgumentsArray()), is(equalTo("-n -t " + testFilePath + " -l " + testArgs.getResultsFileName() + " -d target/jmeter/ -X"))); } @Test public void validateSetRemoteStartAll() throws Exception { JMeterArgumentsArray testArgs = new JMeterArgumentsArray(); testArgs.setTestFile(new File(this.testFile.toURI())); testArgs.setJMeterHome("target/jmeter/"); testArgs.setRemoteStartAll(true); assertThat(UtilityFunctions.humanReadableCommandLineOutput(testArgs.buildArgumentsArray()), is(equalTo("-n -t " + testFilePath + " -l " + testArgs.getResultsFileName() + " -d target/jmeter/ -r"))); } @Test public void validateSetRemoteStart() throws Exception { JMeterArgumentsArray testArgs = new JMeterArgumentsArray(); testArgs.setTestFile(new File(this.testFile.toURI())); testArgs.setJMeterHome("target/jmeter/"); testArgs.setRemoteStart("server1, server2"); assertThat(UtilityFunctions.humanReadableCommandLineOutput(testArgs.buildArgumentsArray()), is(equalTo("-n -t " + testFilePath + " -l " + testArgs.getResultsFileName() + " -d target/jmeter/ -R server1, server2"))); } }
package com.typesafe.netty; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelFutureListener; import io.netty.channel.ChannelMetadata; import io.netty.channel.local.LocalChannel; import io.netty.channel.local.LocalEventLoopGroup; import org.reactivestreams.Subscriber; import org.reactivestreams.tck.SubscriberWhiteboxVerification; import org.reactivestreams.tck.TestEnvironment; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; public class HandlerSubscriberWhiteboxVerificationTest extends SubscriberWhiteboxVerification<Long> { private boolean workAroundIssue277; public HandlerSubscriberWhiteboxVerificationTest() { super(new TestEnvironment()); } private LocalEventLoopGroup eventLoop; // I tried making this before/after class, but encountered a strange error where after 32 publishers were created, // the following tests complained about the executor being shut down when I registered the channel. Though, it // doesn't happen if you create 32 publishers in a single test. @BeforeMethod public void startEventLoop() { workAroundIssue277 = false; eventLoop = new LocalEventLoopGroup(); } @AfterMethod public void stopEventLoop() { eventLoop.shutdownGracefully(); eventLoop = null; } @Override public Subscriber<Long> createSubscriber(WhiteboxSubscriberProbe<Long> probe) { final HandlerSubscriber<Long> subscriber = new HandlerSubscriber<>(2, 4); final ProbeHandler<Long> probeHandler = new ProbeHandler<>(probe, Long.class); final LocalChannel channel = new LocalChannel() { private final ChannelMetadata metadata = new ChannelMetadata(true); @Override public ChannelMetadata metadata() { return metadata; } }; ChannelFuture future = eventLoop.register(channel).addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) throws Exception { channel.pipeline().addLast("probe", probeHandler); channel.pipeline().addLast("subscriber", subscriber); } }); if (workAroundIssue277) { try { // Wait for the pipeline to be setup, so we're ready to receive elements even if they aren't requested, future.await(); } catch (InterruptedException e) { throw new RuntimeException(e); } } return probeHandler.wrap(subscriber); } @Override public void required_spec208_mustBePreparedToReceiveOnNextSignalsAfterHavingCalledSubscriptionCancel() throws Throwable { workAroundIssue277 = true; super.required_spec208_mustBePreparedToReceiveOnNextSignalsAfterHavingCalledSubscriptionCancel(); } @Override public void required_spec308_requestMustRegisterGivenNumberElementsToBeProduced() throws Throwable { workAroundIssue277 = true; super.required_spec308_requestMustRegisterGivenNumberElementsToBeProduced(); } @Override public Long createElement(int element) { return (long) element; } }
package am.userInterface; import java.awt.Color; import java.awt.Dimension; import java.awt.Font; import java.awt.Graphics; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.util.ArrayList; import java.util.Enumeration; import java.util.Iterator; import java.util.StringTokenizer; import java.util.Vector; import javax.swing.JMenu; import javax.swing.JMenuItem; import javax.swing.JOptionPane; import javax.swing.JPopupMenu; import javax.swing.JScrollPane; import am.GlobalStaticVariables; import am.Utility; import am.app.Core; import am.app.mappingEngine.AbstractMatcher; import am.app.mappingEngine.Alignment; import am.app.mappingEngine.AlignmentSet; import am.app.ontology.Node; import am.app.ontology.Ontology; import am.app.ontology.ontologyParser.TreeBuilder; import am.userInterface.vertex.Vertex; import am.userInterface.vertex.VertexDescriptionPane; import am.userInterface.vertex.VertexLine; /** * Canvas class is responsible for all the contents inside the canvas such as * displaying the tree, connecting the lines, mappingByUser nodes, highlighting the nodes. * This class implements mouse listener for mouse events such as mouse clicks, * and also implements action listener for menu items. * * @author ADVIS Laboratory * @version 12/5/2004 */ public class Canvas extends VisualizationPanel implements MouseListener, ActionListener { private static final long serialVersionUID = -7531606228579063838L; //Main variables and Tree structure private double canvasHeight; // height of the canvas private double canvasWidth; // width of the canvas private double sourcePaneHeight; private double targetPaneHeight; //private int countStat = 0; private Vertex globalTreeRoot; // root of global tree private Vertex localTreeRoot; // root of local tree private UI myUI; // UI variable /**Reference to the core istance, it's set in the canvas constructor, we could also avoid to keep it, but then we should always get it via Core.getIstance();*/ private Core core; //POPUP MENUS VARIABLES //rightclick popup private JPopupMenu popup; // popup menu private JMenuItem cancelPopup; // cancel the right click pop up private JMenuItem desc; // desc JMenuItem //private JMenuItem mappingInfo; // mappingByUser information of the node //mapping popup private JPopupMenu mappingPopup; // mappingByUser popup menu private JMenuItem standardAlignment; // mappingByUser create an exact 100% alignment private JMenu manualAlignment; private JMenuItem deleteAlignment; // mappingByUser type menu private JMenuItem cancel; // cancel the mappingByUser private JMenuItem exact; // exact mappingByUser private JMenuItem other; private JMenuItem subset; // subset mappingByUser private JMenuItem subsetComplete; // subset complete mappingByUser private JMenuItem superset; // superset mappingByUser private JMenuItem supersetComplete; // superset complete mappingByUser private JMenuItem comparativeExact; // comparativeExact mappingByUser RIGHT NOW I'm NOT CONSIDERING THIS private JMenuItem comparativeSubset; // comparativeSubset mappingByUser RIGHT NOW I'm NOT CONSIDERING THIS private JMenuItem comparativeSuperset; // comparativeSuperset mappingByUser RIGHT NOW I'm NOT CONSIDERING THIS //Structures to manage selection and highlighting private ArrayList<Vertex> localClickedNodeList; //this list do not contains the whole list of local selected nodes, but the ordered list of clickedNodes private ArrayList<Vertex> globalClickedNodeList; //this list do not contains the whole list of global selected nodes, but the ordered list of clickedNodes private ArrayList<Vertex> localNodesSelected; // the local nodes which are selected (so all clicked and also others selected via shifting) private ArrayList<Vertex> globalNodesSelected; // the global nodes which are selected (so all cliecked and also others selected via shifting) private Vertex rightClickedNode; // right clicked node private Vertex displayedNode; //the last vertex selected //one of the clicked node but is last one clicked in fact is displayed private Vector<VertexLine> selectedLines; //All global and local nodes to be highlighted, that means that are matched with any selected nodes, this set gets created and calculated only during matchings display //private int oldY; // the previous y location of left clicked node private boolean disableVisualization; private boolean smoMode; // true or false, depending whether the user is viewing the canvas in Selected Matchings Only mode. private boolean showLabel; private boolean showLocalName; /******************************************************************************************* * Default constructor for myCanvas class. */ public Canvas(JScrollPane s) { super(s); // do nothing } /** * one argument constructor for myCanvas class. * @param tempUI UI class */ @Deprecated public Canvas(UI ui){ super(new JScrollPane()); // initialize the global and local selected nodes vector globalNodesSelected = new ArrayList<Vertex>(); localNodesSelected = new ArrayList<Vertex>(); localClickedNodeList = new ArrayList<Vertex>(); globalClickedNodeList = new ArrayList<Vertex>(); // add the mouse listener addMouseListener(this); // assign the tempUI taken as arugment to myUI variable myUI = ui; // create pop up menu when the user rights clicks createPopupMenu(); // create mappingByUser pop up menu when the user mapps nodes createMappingPopupMenu(); //Init Core core = Core.getInstance(); // Now that we have more than one visualization system in the AgreementMaker, we need to check if the ontologies are loaded beforehand. if( core.sourceIsLoaded() ) setTree( core.getSourceOntology().getDeepRoot(), core.getSourceOntology(), core.getSourceOntology().getTreeCount() ); if( core.targetIsLoaded() ) setTree( core.getTargetOntology().getDeepRoot(), core.getTargetOntology(), core.getTargetOntology().getTreeCount() ); // get whether the user had SMO enabled disableVisualization = ui.getAppPreferences().getDisableVisualization(); smoMode = ui.getAppPreferences().getSelectedMatchingsOnly(); showLocalName = ui.getAppPreferences().getShowLocalname(); showLabel = ui.getAppPreferences().getShowLabel(); // repaint the canvas repaint(); } /** * This function implements the actionperformed. * @param e MouseEvent */ public void actionPerformed(ActionEvent e) { // get the object which was clicked on Object obj = e.getSource(); if (obj == desc) { //TODO:Edit to work differently for OWL files Vertex node; node = getRightClickedNode(); if(node.getOntNode()==GlobalStaticVariables.XMLFILE){// this means XML file StringTokenizer st = new StringTokenizer(node.getDesc()); String descript=""; int maxChar = 50; while (st.hasMoreTokens()){ for(int i=0; i<maxChar && st.hasMoreTokens(); i++){ String tok = st.nextToken(); descript += tok + " "; i += tok.length(); } descript += '\n'; } JOptionPane.showMessageDialog(null,"Node Name:\n" +node.getName()+"\n\n"+"Description:\n" + descript+"\n\n","Node Info", JOptionPane.PLAIN_MESSAGE); }else{ JOptionPane.showMessageDialog(null,"Node Name:\n" +node.getName()+"\n\n"+"Description:\n" + node.getOWLDesc()+"\n\n","Node Info", JOptionPane.PLAIN_MESSAGE); } } // if the user clicked on any of the mappingByUser types if ( (obj == standardAlignment) || (obj == deleteAlignment) || (obj == exact) || (obj == subset) || (obj == subsetComplete) || (obj == superset) || (obj == supersetComplete) || (obj == comparativeExact) || (obj == comparativeSubset) || (obj == comparativeSuperset) || (obj == other)) { createManualAlignment(obj); clearAllSelections(); } if ((obj == cancel) || (obj == cancelPopup)){ //clearAllSelections(); } repaint(); } public void createManualAlignment(Object obj) { String relation = Alignment.EQUIVALENCE;; double sim = 0; boolean abort = false; if(obj == standardAlignment) { relation = Alignment.EQUIVALENCE; sim = 1; } else if(obj == deleteAlignment) { relation = Alignment.EQUIVALENCE; sim = 0; } else { boolean correct = false; while(!correct && !abort) { String x = JOptionPane.showInputDialog(null, "Insert the similarity value.\nInsert a number between 0 and 100 using only numeric digits.\n Warning: the similarity should be higher than the threshold value.\nIf not, the similarity matrix will be modified but the alignment won't be selected and visualized."); try { if(x == null) abort = true;//USER SELECTED CANCEL else { sim = Double.parseDouble(x); if(sim >= 0 && sim <= 100) { correct = true; sim = sim/100; } } } catch(Exception ex) {//WRONG INPUT, ASK INPUT AGAIN } } if(obj == other){ correct = false; while(!correct && !abort) { String x = JOptionPane.showInputDialog(null, "Insert the relation type:"); try { if(x == null) abort = true;//USER SELECTED CANCEL else { relation = x; correct = true; } } catch(Exception ex) {//WRONG INPUT, ASK INPUT AGAIN } } } else if (obj == exact) relation = Alignment.EQUIVALENCE; else if (obj == subset) relation = Alignment.SUBSET; else if (obj == subsetComplete) relation = Alignment.SUBSETCOMPLETE; else if (obj == superset) relation = Alignment.SUPERSET; else if (obj == supersetComplete) relation = Alignment.SUPERSETCOMPLETE; /* else if (obj == comparativeExact) map.setMappingType("Comparative exact"); else if (obj == comparativeSubset) map.setMappingType("Comparative subset"); else if (obj == comparativeSuperset) map.setMappingType("Comparative superset"); */ } if(!abort) { Vertex global; Vertex local; ArrayList<Alignment> alignments = new ArrayList<Alignment>(); Alignment align; for (int i =0; i < globalNodesSelected.size(); i++){ global = globalNodesSelected.get(i); if(!global.isFake()) { for(int j= 0; j < localNodesSelected.size();j++) { local = localNodesSelected.get(j); if(!local.isFake()) { align = new Alignment(global.getNode(), local.getNode(), sim, relation); if(!alignments.contains(align)) { alignments.add(align); } } } } } myUI.getControlPanel().userMatching(alignments); } } /** * This function returns the width of the canvas * @return canvasWidth width of canvas */ public double getCanvasWidth() { return this.canvasWidth; } /** * This function returns the global tree root * @return globalTreeRoot the global tree root */ public Vertex getGlobalTreeRoot() { return globalTreeRoot; } /** * This function returns the local tree root * @return localTreeRoot the local tree root */ public Vertex getLocalTreeRoot() { return localTreeRoot; } /** * This function sets the global tree root * * @param node global tree root of type Vertex */ public void setGlobalTreeRoot(Vertex node) { globalTreeRoot = node; } /** * This function sets the local tree root * * @param node local tree root of type Vertex */ public void setLocalTreeRoot(Vertex node) { localTreeRoot = node; } /** Set the visualization option */ public void setDisableVisualization( boolean disable ) { disableVisualization = disable; } /** Change whether we are in "Selected Matchings Only" (SMO) view mode. */ public void setSMO ( boolean smoEnabled ) { smoMode = smoEnabled; } /** Change whether we are showing labels and/or localnames. */ public boolean isShowLabel() { return showLabel; } public void setShowLabel(boolean showLabel) { this.showLabel = showLabel; } public boolean isShowLocalName() { return showLocalName; } public void setShowLocalName(boolean showLocalName) { this.showLocalName = showLocalName; } public String getVertexLabelAndOrName(Vertex v){ if(!v.isFake()){ Node n = v.getNode(); String middle = " || ";; if(showLabel && showLocalName) return n.getLocalName()+middle+n.getLabel(); else if(showLabel) return n.getLabel(); else if(showLocalName) return n.getLocalName(); else return ""; } else{ return v.getName(); } } /* Wrapper method. */ public void setTree(TreeBuilder tb) { Vertex treeRoot = tb.getTreeRoot(); Ontology o = tb.getOntology(); setTree( treeRoot, o, tb.getTreeCount() ); } public void setTree( Vertex treeRoot, Ontology o, int totalNodes ) { if(o.isSource()) { setGlobalTreeRoot(treeRoot); } else { setLocalTreeRoot(treeRoot); } updateSize( treeRoot, o, totalNodes); repaint(); } private void updateSize( Vertex treeRoot, Ontology o, int totalNodes ) { //int totalNodes = tb.getTreeCount(); // number of nodes created in global tree Dimension dim; // dimension of the panel double height; // height of the canvas // get the dimension of the panel dim = getPreferredSize(); // figure out what the canvas height should be height = 70+25*totalNodes; //Assign height to source or target to keep track if(o.isSource()){ sourcePaneHeight = height; } else{ targetPaneHeight = height; } // if the current tree height is smaller than the panel's height // set the new height to be height of the tree depending on the ontologies loaded if(dim.getHeight() > height){ //if none of the ontologies loaded if(core.getSourceOntology() == null && core.getTargetOntology() == null){ setPreferredSize(new Dimension((int)canvasWidth,(int)height)); } //if only source is loaded if(core.getSourceOntology() != null && core.getTargetOntology() == null ) { setPreferredSize(new Dimension((int)canvasWidth,(int)height)); } //if only target is loaded if(core.getSourceOntology() == null && core.getTargetOntology() != null ){ setPreferredSize(new Dimension((int)canvasWidth,(int)height)); } //if both loaded, then max of two is the height if(core.getSourceOntology() != null && core.getTargetOntology() != null){ if(sourcePaneHeight > targetPaneHeight){ setPreferredSize(new Dimension((int)canvasWidth,(int)sourcePaneHeight)); } else{ setPreferredSize(new Dimension((int)canvasWidth,(int)targetPaneHeight)); } } } else { setPreferredSize(new Dimension((int)canvasWidth,(int)height)); } } /** * This function paints the background and displays global and local trees and mappings * This function is fundamental, everytime there is change in the graphics * the system has to invoke canvas.redisplay() (inerhited by component) * which invokes canvas.update() which invokes paint(Graphic g) * In the system only use repaint() which invoke this method * * @param graphic of type Graphics */ public void paint(Graphics graphic) { super.paint(graphic); Dimension dim; // dimension of the canvas // get the dimension of the canvas dim = this.getSize(); // get the width and height of the canvas canvasWidth = dim.getWidth(); canvasHeight = dim.getHeight(); // get the middle location of the canvas screen int middle = (int)canvasWidth/2; // paint the whole background by selecting color graphic.setColor(Colors.background); // Fill the whole screen (rectangle) graphic.fillRect(0,0,(int)canvasWidth,(int)canvasHeight); if ((!core.sourceIsLoaded() && !core.targetIsLoaded()) || disableVisualization) { graphic.setColor(Colors.dividers); // draw a dividing line graphic.fillRect(middle-2,0,2,(int)canvasHeight); // draw a horizontal line graphic.fillRect(0,19,(int)canvasWidth,2); graphic.setColor(Colors.foreground); graphic.setFont(new Font("Arial", Font.BOLD,12)); // Label the divided screens graphic.drawString("Global (Source) Ontology", 10,15); graphic.drawString("Local (Target) Ontology",(int)(canvasWidth/2)+10, 15); } else { if (core.sourceIsLoaded()) displayTree(graphic, true);//global if (core.targetIsLoaded()) displayTree(graphic, false);//local if ((core.sourceIsLoaded() ) && (core.targetIsLoaded() )){ selectedLines = new Vector<VertexLine>();//lines to be highlighted if user is not creating a manual matching displayAllMatchings(graphic);//it fills up selectedLines, thats why we have to invoke this method olso in SMO mode //IF i the user has selected some nodes in both three display redlines of manual mappings to be created if(globalNodesSelected.size()>0 && localNodesSelected.size()>0) { drawManualRedLines(graphic); }else { displayHighlightedVertex(graphic); } } } this.revalidate(); } /** * This function displays the tree. This function is called from paint method. * * @param xmlFile filename * @param graphic Graphics * @param isGlobal boolean value indicating if the tree is global */ @SuppressWarnings("unchecked") public void displayTree(Graphics graphic, boolean isGlobal) { Vertex treeRoot; int middle = (int)canvasWidth/2; int x, starting_X_Value; int y = 0; int oldY = 10; String name; int width, height, arcWidth, arcHeight; // int nodeType; graphic.setColor(Colors.dividers); // draw a dividing line graphic.fillRect(middle-2,0,2,(int)canvasHeight); // draw a horizontal line graphic.fillRect(0,19,(int)canvasWidth,2); graphic.setColor(Colors.foreground); graphic.setFont(new Font("Arial", Font.BOLD,12)); // Label the divided screens graphic.drawString("Global (Source) Ontology", 10,15); graphic.drawString("Local (Target) Ontology",(int)(canvasWidth/2)+10, 15); graphic.setColor(Colors.foreground); graphic.setFont(new Font("Lucida S",Font.PLAIN,12)); if (isGlobal == true) { // get the global root treeRoot = getGlobalTreeRoot(); // if the file is global ontolgy start displaying tree from x = 15 starting_X_Value = 15; } else { // get the local root treeRoot = getLocalTreeRoot(); // if the file is global ontolgy start displaying tree from (width/2)+15 starting_X_Value = (int)(canvasWidth/2)+15; } arcWidth = 10; arcHeight = 10; height = 20; // displaying the tree as text Vertex node; if( treeRoot != null ) for (Enumeration<Vertex> e = treeRoot.preorderEnumeration(); e.hasMoreElements() ;) { // get the node node = e.nextElement(); //System.out.println("Name: " +node.getName()+"."); //System.out.println("Key: " + node.getID()); //else //System.out.println("Desc: " +node.getDesc()+"."); name = getVertexLabelAndOrName(node); x = starting_X_Value+(node.getLevel())*20; y = oldY +25; width = 20+(name.length())*7; // set the coordinate, width, height, arcwidth, archeight to the node (Vertex) node.setX(x); node.setX2(x+width); node.setY(y); node.setY2(y+height); node.setWidth(width); node.setHeight(height); node.setArcWidth(arcWidth); node.setArcHeight(arcHeight); //nodeType = node.getNodeType(); if (node.isVisible() == true) { //System.out.println( node.getIsMappedByDef() + " " + mapByDefn ); if (node.getIsSelected() == true) { // change the color to node selection color graphic.setColor(Colors.selected); graphic.fillRoundRect(node.getX(),node.getY(),node.getWidth(),node.getHeight(), node.getArcWidth(),node.getArcHeight()); } // change the color to foreground color graphic.setColor(Colors.foreground); // draw a round rectangle resembling the node graphic.drawRoundRect(x,y,width,height, arcWidth, arcHeight); // display the node name inside the round rectangle graphic.setFont(new Font("Lucida Sans Regular", Font.PLAIN, 12)); graphic.drawString(getVertexLabelAndOrName(node),x+5,y+15); // keep track of the previous y to display the next obj oldY = y; } } // display the lines displayLines(graphic, isGlobal); } /** * This function displays the lines of the trees * * @param graphic of type Graphics * @param isGlobal boolean value indicating if it is global */ @SuppressWarnings("unchecked") public void displayLines(Graphics graphic, boolean isGlobal) { Vertex root, node; Vertex lastChild, parent; int x1,y1,x2,y2; // get the root of the tree if (isGlobal == true) root = getGlobalTreeRoot(); else root = getLocalTreeRoot(); if( root != null ) for (Enumeration<Vertex> e = root.preorderEnumeration(); e.hasMoreElements() ;) { // get the node node = e.nextElement(); // if the node is visible then draw the lines if (node.isVisible() == true) { x1 = node.getX()-7; y1 = node.getY()+(node.getY2()-node.getY())/2; x2 = node.getX(); y2 = node.getY()+(node.getY2()-node.getY())/2; // draw a horizontal line to the node graphic.drawLine(x1,y1 ,x2, y2); // draw the vertical line if (node.isLeaf() == false) { // but first find where the line has to be drawn upto // by finding the last child of the node lastChild = (Vertex) node.getLastChild(); // draw oval to indicate that the node is expanded or collapsed graphic.drawOval(x1-4,y1-4,8,8); if (lastChild.isVisible() == true) { x2 = x1; y2 = lastChild.getY()+(lastChild.getY2()-lastChild.getY())/2; graphic.drawLine(x1,y1+4,x2,y2); } else { // place a | bar inside the circle to indicate that // the node can be expanded graphic.drawLine(x1,y1-4,x1,y1+4); } } if (node != root) { // this is a leaf node, so get the parent of this node. parent = (Vertex) node.getParent(); x1 = parent.getX()-7; y1 = node.getY()+(node.getY2()-node.getY())/2; x2 = node.getX(); y2 = y1; // draw the horizontal line graphic.drawLine(x1,y1,x2,y2); } } // end of if the node is visible } // end of enumeration } /** * Scan the Matchers Instances to display all classes and properties alignmentSet * dysplay a matcher only if it's isShown(); */ public void displayAllMatchings(Graphics g) { ArrayList<AbstractMatcher> alist = core.getMatcherInstances(); if(alist != null) { Iterator<AbstractMatcher> it = alist.iterator(); AbstractMatcher a = null; while(it.hasNext()) { a = it.next(); if(a.isShown()) { if(a.areClassesAligned()) { displayAlignmentSet(g, a, a.getClassAlignmentSet()); //a.getClassAlignmentSet().show(); } if(a.arePropertiesAligned()) { displayAlignmentSet(g, a, a.getPropertyAlignmentSet()); } } } } } private void displayAlignmentSet(Graphics g, AbstractMatcher matcher, AlignmentSet<Alignment> aset) { if(aset != null) { Alignment a = null; for(int i = 0; i < aset.size(); i++) { a = aset.getAlignment(i); displayAlignment(g, matcher, a); } } } private void displayAlignment(Graphics graphic, AbstractMatcher m, Alignment a) { Vertex source, target; ArrayList<Vertex> sourceVertexes = a.getEntity1().getVertexList(); ArrayList<Vertex> targetVertexes = a.getEntity2().getVertexList(); Iterator<Vertex> itsource = sourceVertexes.iterator(); Iterator<Vertex> ittarget; while(itsource.hasNext()) { source = itsource.next(); if(source.isVisible()) { ittarget = targetVertexes.iterator(); while(ittarget.hasNext()) { target = ittarget.next(); if(target.isVisible()) { displayLine(graphic,m, a, source, target); } } } } } private void displayLine(Graphics graphic, AbstractMatcher m, Alignment a, Vertex source, Vertex target) { //if needed add the line to selected lines, we must do this both in SMO mode and normal mode VertexLine line = new VertexLine(); line.source = source; line.target = target; line.alignment = a; if( (source.getIsSelected() && !target.getIsSelected()) || (!source.getIsSelected() && target.getIsSelected()) ) { selectedLines.add(line); } //DRAW VERTEX AND MAPPING LINES, IF VERTEX ARE SELECTED (NOT HIGHLIGHTED) COLOR THEM AS SELECTED, //highlighted lines and vertex are not colored as selected here will done later scanning selectedLines if( !smoMode || (globalNodesSelected.size() ==0 && localNodesSelected.size() == 0)){ Color linecolor = m.getColor(); Color scolor = m.getColor(); Color tcolor = m.getColor(); if(target.getIsSelected()) { tcolor = Colors.selected; } if(source.getIsSelected()) { scolor = Colors.selected; } //DRAW LINE graphic.setColor(linecolor); int x1 = source.getX2(); //starting point of the line is the end of the left vertex int y1 = (source.getY()+source.getY2())/2; //from the middle of the left vertex int x2 = target.getX(); //ending point of the line is the beginning of the right vertex int y2 = (target.getY()+target.getY2())/2;//to the middle of the right vertex graphic.drawLine(x1,y1,x2,y2); //DRAW STRING ON LINE graphic.setFont(new Font("Arial Unicode MS", Font.PLAIN, 12)); graphic.drawString(a.getRelation()+" "+Utility.getNoDecimalPercentFromDouble(a.getSimilarity()),(x1+x2)/2,((y1+y2)/2) -5); //FILL THE VERTEX NODE, this will cancel the name of the vertex and the shape so we have to rewrite both //this color the node if it's selected but it doesn't color it if it's highlighted graphic.setColor(scolor); graphic.fillRoundRect(source.getX(),source.getY(),source.getWidth(),source.getHeight(), source.getArcWidth(),source.getArcHeight()); graphic.setColor(tcolor); graphic.fillRoundRect(target.getX(),target.getY(),target.getWidth(),target.getHeight(), target.getArcWidth(),target.getArcHeight()); // change the color to foreground color to graphic.setColor(Colors.foreground); //Draw shape graphic.drawRoundRect(source.getX(),source.getY(),source.getWidth(),source.getHeight(), source.getArcWidth(),source.getArcHeight()); graphic.drawRoundRect(target.getX(),target.getY(),target.getWidth(),target.getHeight(), target.getArcWidth(),target.getArcHeight()); // display the node name inside the round rectangle graphic.drawString(getVertexLabelAndOrName(source),source.getX()+5,source.getY()+15); graphic.drawString(getVertexLabelAndOrName(target),target.getX()+5,target.getY()+15); } } public void displayHighlightedVertex(Graphics graphic) { //it colors the line and not selected node between the two VertexLine line; Vertex highlightedNode; Vertex source; Vertex target; Alignment a; for(int i = 0; i < selectedLines.size(); i++) { line = selectedLines.get(i); highlightedNode = line.getHighlightedNode(); source = line.source; target = line.target; a = line.alignment; //HIGHLIGHT LINE graphic.setColor(Colors.selected); //DRAW LINE int x1 = source.getX2(); //starting point of the line is the end of the left vertex int y1 = (source.getY()+source.getY2())/2; //from the middle of the left vertex int x2 = target.getX(); //ending point of the line is the beginning of the right vertex int y2 = (target.getY()+target.getY2())/2;//to the middle of the right vertex graphic.drawLine(x1,y1,x2,y2); //DRAW STRING ON LINE graphic.setFont(new Font("Arial Unicode MS", Font.PLAIN, 12)); graphic.drawString(a.getRelation()+" "+Utility.getNoDecimalPercentFromDouble(a.getSimilarity()),(x1+x2)/2,((y1+y2)/2) -5); //COLOR HIGHLIGHT VERTEX graphic.setColor(Colors.highlighted); graphic.fillRoundRect(highlightedNode.getX(),highlightedNode.getY(),highlightedNode.getWidth(),highlightedNode.getHeight(), highlightedNode.getArcWidth(),highlightedNode.getArcHeight()); graphic.setColor(Colors.foreground); graphic.drawRoundRect(highlightedNode.getX(),highlightedNode.getY(),highlightedNode.getWidth(),highlightedNode.getHeight(), highlightedNode.getArcWidth(),highlightedNode.getArcHeight()); graphic.setFont(new Font("Arial Unicode MS", Font.PLAIN, 12)); graphic.drawString(getVertexLabelAndOrName(highlightedNode),highlightedNode.getX()+5,highlightedNode.getY()+15); } } /** * THIS METHOD MANAGE THE CREATION OF A MANUAL MAPPING RED LINES, but it doesn't show the popup * This function maps the global nodes with local nodes * * @param graphic of type Graphics */ public void drawManualRedLines(Graphics graphic) { // There are 4 casses of mappingByUser (1-to-1, many-to-1,1-to-many,and many-to-many) int x1,y1,x2,y2; Vertex global,local; int []x; int []y; global = null; local = null; graphic.setColor(Colors.lineColor); if ((globalNodesSelected.size() == 1) && (localNodesSelected.size() == 1)) { // ONE-TO-ONE MAPPING // get the local and global node selected global = globalNodesSelected.get(0); local = localNodesSelected.get(0); // get their location on their canvas x1 = global.getX2(); y1 = (global.getY()+global.getY2())/2; x2 = local.getX(); y2 = (local.getY()+local.getY2())/2; // draw the connecting line between the two nodes graphic.drawLine(x1,y1,x2,y2); } else if ((globalNodesSelected.size() > 1) && (localNodesSelected.size() > 1)) { // MANY-TO-MANY MAPPING int [] globalX; // keeps track of global nodes x values int [] globalY; // keeps track of global nodes y values int [] localX; // keeps track of local nodes x values int [] localY; // keeps track of local nodes y values int minX1 = 99999999; int minY1 = 99999999; int maxX1 = 0; int maxY1 = 0; int minX2 = 99999999; int minY2 = 99999999; int maxX2 = 0; int maxY2 = 0; // initialize the arrays globalX = new int[globalNodesSelected.size()]; globalY = new int[globalNodesSelected.size()]; localX = new int[localNodesSelected.size()]; localY = new int[localNodesSelected.size()]; // for each global node selected, get the node and its location for (int i =0; i< globalNodesSelected.size(); i++) { // get the global node global = globalNodesSelected.get(i); // get the location of the global node x1 = global.getX2(); y1 = (global.getY()+global.getY2())/2; // place all the x1's in an globalX vector globalX[i] = x1; // place all the y1 in an globalY vector globalY[i] = y1; // keep track of the max x1 and y1 if (x1 > maxX1) maxX1 = x1; if (y1 > maxY1) maxY1 = y1; // keep track of the min x1 and y1 if (x1 < minX1) minX1 = x1; if (y1 < minY1) minY1 = y1; } // for each local node selected, get the node and its location for (int i =0; i<localNodesSelected.size(); i++) { // get the local node local = localNodesSelected.get(i); // get the location of the local node x2 = local.getX(); y2 = (local.getY()+local.getY2())/2; // place all the x2's in an localX vector localX[i] = x2; // place all the y2 in an localY vector localY[i] = y2; // keep track of the maximum x2 and y2 if (x2 > maxX2) maxX2 = x2; if (y2 > maxY2) maxY2 = y2; // keep track of the minimum x2 and y2 if (x2 < minX2) minX2 = x2; if (y2 < minY2) minY2 = y2; } // draw the horizontal line from the global node x location to the max // global node x location for (int j=0;j<globalNodesSelected.size();j++) { // draw the horizontal line graphic.drawLine(globalX[j],globalY[j],maxX1+20,globalY[j]); } // draw the horizontal line from the local node x location to the max // local node x location for (int j=0;j<localNodesSelected.size();j++) { // draw the horizontal line graphic.drawLine(localX[j],localY[j],minX2-20,localY[j]); } //draw the vertical line for global nodes graphic.drawLine(maxX1+20,minY1,maxX1+20,maxY1); //draw the vertical line for local nodes graphic.drawLine(minX2-20,minY2,minX2-20,maxY2); // draw the mappingByUser line between global nodes and local nodes graphic.drawLine(maxX1+20,(minY1+maxY1)/2, minX2-20,(minY2+maxY2)/2); // display the popup menu //mappingPopup.show(this,maxX1+((maxX1+minX2)/2),(minY1+maxY1)/2); } else if ((globalNodesSelected.size() >1) && (localNodesSelected.size() ==1)) { // MANY-TO-ONE MAPPING int minX=99999999; int minY=99999999; int maxX=0; int maxY=0; x = new int[globalNodesSelected.size()]; y = new int[globalNodesSelected.size()]; // first get the local one node local = localNodesSelected.get(0); // get the location of the local node x2 = local.getX(); y2 = (local.getY()+local.getY2())/2; // for each global node get the node and its location for (int i =0; i< globalNodesSelected.size(); i++) { // get the global node global = globalNodesSelected.get(i); // get the location of the global node x1 = global.getX2(); y1 = (global.getY()+global.getY2())/2; // place all the x1's in an x1 vector x[i] = x1; // place all the y1 in an y1 vector y[i] = y1; // keep track of the max x1 and y1 if (x1 > maxX) maxX = x1; if (y1 > maxY) maxY = y1; // keep track of the min x1 and y1 if (x1 < minX) minX = x1; if (y1 < minY) minY = y1; } // draw the horizontal line from each global node to the // max x location of the global node for (int j=0;j<globalNodesSelected.size();j++) { // draw the horizontal line graphic.drawLine(x[j],y[j],maxX+10,y[j]); } //draw the vertical line graphic.drawLine(maxX+10,minY,maxX+10,maxY); if (y2 <=maxY && y2 >=minY) y1 = y2; else y1 = (minY+maxY)/2; // draw the mappingByUser line between global nodes and local node graphic.drawLine(maxX+10,y1, x2,y2); // display the popup menu //mappingPopup.show(this,x2,y2); } else if ((globalNodesSelected.size() ==1) && (localNodesSelected.size() > 1)) { // ONE-TO-MANY MAPPING int minX=99999999; int minY=99999999; int maxX=0; int maxY=0; x = new int[localNodesSelected.size()]; y = new int[localNodesSelected.size()]; // first get the global one node global = globalNodesSelected.get(0); // get the location of the global node x1 = global.getX2(); y1 = (global.getY()+global.getY2())/2; // for each local nodes get the node and its location for (int i =0; i<localNodesSelected.size(); i++) { // get the local node local = localNodesSelected.get(i); // get the location of the local node x2 = local.getX(); y2 = (local.getY()+local.getY2())/2; // place all the x2's in an x2 vector x[i] = x2; // place all the y2 in an y2 vector y[i] = y2; // keep track of the maximum x2 and y2 if (x2 > maxX) maxX = x2; if (y2 > maxY) maxY = y2; // keep track of the minimum x2 and y2 if (x2 < minX) minX = x2; if (y2 < minY) minY = y2; } // draw a horizontal line from local node to the min x location of the local node for (int j=0;j<localNodesSelected.size();j++) { // draw the horizontal line graphic.drawLine(x[j],y[j],minX-20,y[j]); } //draw the vertical line graphic.drawLine(minX-20,minY,minX-20,maxY); if (y1 <=maxY && y1 >=minY) y2 = y1; else y2 = (minY+maxY)/2; // draw the mappingByUser line from global node to local nodes graphic.drawLine(x1,y1, minX-20,y2); // display the popup menu //mappingPopup.show(this,x1,y1); } } /**Select all nodes with height between min and max in the selected root*/ /**single node clicking invoked in mouseclick during a single click or ctrl click*/ /**single node clicking invoked in mouseclick during a single click or ctrl click but also a shift click in this case*/ /**Select and unselect functions are the basic function to select or deselect any nodes used in the mouse clicked funct and manageSelection, is used for single click but also shift*/ /**Select and unselect functions are the basic function to select or deselect any nodes used in the mouse clicked funct and manageSelection*/ /** * This function returns the node to expand or contrast based on the location of the mouseclick * * @param x the x location of mouseclick * @param y the y location of mouseclick */ @SuppressWarnings("unchecked") public boolean expandOrContract(int x, int y) { Vertex root,node, expandOrContractNode=null; int x1,y1,x2,y2; // Check to see if the area clicked was next to the node // get the root of the tree based on the mouseclick if (x < (canvasWidth/2)) root = getGlobalTreeRoot(); else root = getLocalTreeRoot(); if (root != null) { for (Enumeration<Vertex> e = root.preorderEnumeration(); e.hasMoreElements() ;) { // get the node node = e.nextElement(); // get the location of the node // and set a area left of node which is indicates to expand or contrast x1 = node.getX()-12; y1 = (node.getY()+(node.getY2()-node.getY())/2)-4; x2 = node.getX()-4; y2 = (node.getY()+(node.getY2()-node.getY())/2)+4; if ((x >= x1) && (x <= x2)) { if ((y >= y1) && (y <= y2)) { expandOrContractNode = node; } } } } boolean hasDoneSomething = false; // if the mouseclick is to the left of some node if (expandOrContractNode != null) { hasDoneSomething = true; // If the children are visible then contrast the tree Vertex child; if(!expandOrContractNode.isLeaf()) { child = (Vertex)expandOrContractNode.getFirstChild(); // check to see if the node is already visible // if the node is already visible then make it not visible and vice versa if (child.isVisible() == true) { expandOrContractNode.setShouldCollapse(true); recurseOnNode(expandOrContractNode,0); } else { expandOrContractNode.setShouldCollapse(false); recurseOnNode(expandOrContractNode,1); } } } return hasDoneSomething; } /** * This function recursively sets the Vertex and its descendants to be visible or invisible * based on the int; If int is 0 the node will collapse, else it will expand * * @param targetNode node to collapse or expand * @param expandOrCollapse value indicating to expand or collapse the node */ @SuppressWarnings("unchecked") public void recurseOnNode(Vertex targetNode, int expandOrCollapse) { Vertex node; // Hide the children of expandOrContractNode children for (Enumeration<Vertex> e = targetNode.children(); e.hasMoreElements(); ) { node = (Vertex) e.nextElement(); if (expandOrCollapse == 0) { //set the child to be invisible node.setIsVisible(false); } else if (expandOrCollapse == 1) { // set the child to be visible node.setIsVisible(true); } if (node.getShouldCollapse() == false) { // if the child has its own children if (!node.isLeaf()) { // recursively set the child's child to be invisible recurseOnNode(node, expandOrCollapse); } } } } /** * This function sets the rightClickedNode * * @param node right clicked vertex */ public void setRightClickedNode(Vertex node) { rightClickedNode = node; } /** * This function returns the rightClickedNode * @return rightClickedNode the node which was right clicked */ public Vertex getRightClickedNode() { return rightClickedNode; } /** * This function figures out which node the clicked on. * * @param x the x location of mouse click * @param y the y location of mouse click * @return Vertex the node which was clicked on */ @SuppressWarnings("unchecked") public Vertex getNodeClicked(int x,int y) { // figure out which type of node was clicked (global or local) // if the click was within the left half of the canvas then it is global // else the it is within the lcoal side Dimension dim; Vertex root; double canvasWidth;//, canvasHeight; dim = getSize(); canvasWidth = dim.getWidth(); // get the root of the tree based on the mouseclick if (x < (canvasWidth/2)) root = getGlobalTreeRoot(); else root = getLocalTreeRoot(); if (root != null) { Vertex node; for (Enumeration<Vertex> e = root.preorderEnumeration(); e.hasMoreElements() ;) { // get the node node = e.nextElement(); if (node.isVisible() == true) { if ((x <= node.getX2()) && (x >= node.getX())) { if ((y <= node.getY2()) && (y >= node.getY())) { return node; } } } } } return null; } /** * Creates a popupmenu when the user maps nodes */ public void createMappingPopupMenu() { mappingPopup = new JPopupMenu(); //First popup: 4 rows, manualAlignment open a submenu standardAlignment = new JMenuItem("Create/Update standard alignment"); manualAlignment = new JMenu("Create/Update alignment manually"); deleteAlignment = new JMenuItem("Delete alignment"); cancel = new JMenuItem("Cancel"); //submenu manual alignment exact = new JMenuItem("Equivalence"); other = new JMenuItem("Other"); subset = new JMenuItem("Subset"); subsetComplete = new JMenuItem("Subset Complete"); superset = new JMenuItem("Superset"); supersetComplete = new JMenuItem("Superset Complete"); //I'm not considering this for now comparativeExact = new JMenuItem("Comparitive Exact"); comparativeSubset = new JMenuItem("Comparitive Subset"); comparativeSuperset = new JMenuItem("Comparitive Superset"); // add exact, subset, subsetComplete, superset, // supersetComplete, comparitive menu items to mappingType menu manualAlignment.add(exact); manualAlignment.add(superset); manualAlignment.add(subsetComplete); manualAlignment.add(supersetComplete); manualAlignment.add(other); /* manualAlignment.add(comparativeExact); manualAlignment.add(comparativeSubset); manualAlignment.add(comparativeSuperset); */ // add them to mappingPopup menu mappingPopup.add(standardAlignment); mappingPopup.add(manualAlignment); mappingPopup.add(deleteAlignment); mappingPopup.add(cancel); // add Listener to menuItems standardAlignment.addActionListener(this); deleteAlignment.addActionListener(this); //manual alignment needs listeners only on submenu items cancel.addActionListener(this); exact.addActionListener(this); other.addActionListener(this); subset.addActionListener(this); subsetComplete.addActionListener(this); superset.addActionListener(this); supersetComplete.addActionListener(this); comparativeExact.addActionListener(this); comparativeSubset.addActionListener(this); comparativeSuperset.addActionListener(this); } /** * Creates a popupmenu when the user right clicks on a vertex */ public void createPopupMenu() { popup = new JPopupMenu(); desc = new JMenuItem("Node Details"); //mappingInfo = new JMenuItem("Mapping Info"); cancelPopup = new JMenuItem("Cancel"); // add the description to popup menu popup.add(desc); // add the mappingByUser information //popup.add(mappingInfo); // add the cancel menu item popup.add(cancelPopup); // add listener desc.addActionListener(this); //mappingInfo.addActionListener(this); cancelPopup.addActionListener(this); } /* private void computeCanvasWidth(int ontoType) { Vertex node = new Vertex(null); int x=0;//initial value int y=0; if(ontoType == GSM.SOURCENODE) node = getGlobalTreeRoot(); else if(ontoType == GSM.TARGETNODE) node = getLocalTreeRoot(); if(node != null) for (Enumeration e = node.preorderEnumeration(); e.hasMoreElements() ;) { // get the node node = (Vertex) e.nextElement(); y = node.getLevel()*20 + node.getName().length()*7; if(y>x) x=y; } x+=30; if(x*2 > canvasWidth) canvasWidth=x*2; } */ /** * @param root * / public void showAll(Vertex root) { performShowAll(root); } */ public void mouseEntered(MouseEvent e) { // TODO Auto-generated method stub } public void mouseExited(MouseEvent e) { // TODO Auto-generated method stub } public void mousePressed(MouseEvent e) { // TODO Auto-generated method stub } public void mouseReleased(MouseEvent e) { // TODO Auto-generated method stub } }
package org.jdesktop.swingx.renderer; import java.awt.Component; import javax.swing.JList; import javax.swing.ListCellRenderer; /** * Adapter to glue SwingX renderer support to core API. It has convenience * constructors to create a LabelProvider, optionally configured with a * StringValue and horizontal alignment. Typically, client code does not * interact with this class except at instantiation time. * <p> * * Note: core DefaultListCellRenderer shows either an icon or the element's * toString representation, depending on whether or not the given value * is of type icon or implementors. This renderer's empty/null provider * constructor takes care of configuring the default provider with a converter * which mimics that behaviour. When instantiating this renderer with * any of the constructors which have converters as parameters, * it's up to the client code to supply the appropriate converter, if needed: * * * <pre><code> * StringValue sv = new StringValue() { * * public String getString(Object value) { * if (value instanceof Icon) { * return &quot;&quot;; * } * return StringValue.TO_STRING.getString(value); * } * * }; * StringValue lv = new MappedValue(sv, IconValue.ICON); * listRenderer = new DefaultListRenderer(lv, alignment); * * </code></pre> * * <p> * * * @author Jeanette Winzenburg * * @see ComponentProvider * @see StringValue * @see IconValue * @see MappedValue * * */ public class DefaultListRenderer extends AbstractRenderer implements ListCellRenderer { protected ListCellContext cellContext; /** * Instantiates a default list renderer with the default component * provider. * */ public DefaultListRenderer() { this((ComponentProvider<?>) null); } /** * Instantiates a ListCellRenderer with the given ComponentProvider. * If the provider is null, creates and uses a default. The default * provider is of type <code>LabelProvider</code><p> * * Note: the default provider is configured with a custom StringValue * which behaves exactly as core DefaultListCellRenderer: depending on * whether or not given value is of type icon or implementors, it shows * either the icon or the element's toString. * * @param componentProvider the provider of the configured component to * use for cell rendering */ public DefaultListRenderer(ComponentProvider<?> componentProvider) { super(componentProvider); this.cellContext = new ListCellContext(); } /** * Instantiates a default table renderer with a default component controller * using the given converter.<p> * * PENDING JW: how to guarantee core consistent icon handling? Leave to * client code? * * @param converter the converter to use for mapping the content value to a * String representation. * */ public DefaultListRenderer(StringValue converter) { this(new LabelProvider(converter)); } /** * Instantiates a default list renderer with a default component * controller using the given converter and horizontal * alignment. * * PENDING JW: how to guarantee core consistent icon handling? Leave to * client code? * * * @param converter the converter to use for mapping the * content value to a String representation. * @param alignment the horizontal alignment. */ public DefaultListRenderer(StringValue converter, int alignment) { this(new LabelProvider(converter, alignment)); } /** * Instantiates a default list renderer with default component provider * using both converters. * * @param stringValue the converter to use for the string representation * @param iconValue the converter to use for the icon representation */ public DefaultListRenderer(StringValue stringValue, IconValue iconValue) { this(new MappedValue(stringValue, iconValue)); } /** * Instantiates a default list renderer with default component provider * using both converters and the given alignment. * * @param stringValue the converter to use for the string representation * @param iconValue the converter to use for the icon representation * @param alignment the rendering component's horizontal alignment */ public DefaultListRenderer(StringValue stringValue, IconValue iconValue, int alignment) { this(new MappedValue(stringValue, iconValue), alignment); } /** * * Returns a configured component, appropriate to render the given * list cell. <p> * * Note: The component's name is set to "List.cellRenderer" for the sake * of Synth-based LAFs. * * @param list the <code>JList</code> to render on * @param value the value to assign to the cell * @param isSelected true if cell is selected * @param cellHasFocus true if cell has focus * @param index the row index (in view coordinates) of the cell to render * @return a component to render the given list cell. */ @Override public Component getListCellRendererComponent(JList list, Object value, int index, boolean isSelected, boolean cellHasFocus) { cellContext.installContext(list, value, index, 0, isSelected, cellHasFocus, true, true); Component comp = componentController.getRendererComponent(cellContext); // fix issue #1040-swingx: memory leak if value not released cellContext.replaceValue(null); return comp; } /** * {@inheritDoc} */ @Override protected ComponentProvider<?> createDefaultComponentProvider() { return new LabelProvider(createDefaultStringValue()); } /** * Creates and returns the default StringValue for a JList.<p> * This is added to keep consistent with core list rendering which * shows either the Icon (for Icon value types) or the default * to-string for non-icon types. * * @return the StringValue to use by default. */ private StringValue createDefaultStringValue() { return MappedValues.STRING_OR_ICON_ONLY; } }
package com.exedio.cope; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import com.exedio.cope.util.ConnectionPoolInfo; import com.exedio.cope.util.PoolCounter; import com.exedio.dsmf.ConnectionProvider; import com.exedio.dsmf.SQLRuntimeException; final class ConnectionPool implements ConnectionProvider { interface Factory { Connection createConnection() throws SQLException; } // TODO: allow changing pool size // TODO: gather pool effectivity statistics // TODO: use a ring buffer instead of a stack // to avoid connections at the bottom of the stack // staying idle for a very long time and possibly // running into some idle timeout implemented by the // jdbc driver or the database itself. // TODO: implement idle timout // ensure, that idle connections in the pool do // not stay idle for a indefinite time, // but are closed after a certain time to avoid // running into some idle timeout implemented by the // jdbc driver or the database itself. // maybe then no ring buffer is needed. private final Factory factory; private final PoolCounter counter; private final Connection[] idle; private int idleCount; private final Object lock = new Object(); ConnectionPool(final Factory factory, final int idleLimit, final int idleInitial) { assert factory!=null; assert idleLimit>=0; assert idleInitial>=0; this.factory = factory; // TODO: make this customizable and disableable this.counter = new PoolCounter(new int[]{0,1,2,4,6,8,10,15,20,25,30,40,50,60,70,80,90,100}); this.idle = idleLimit>0 ? new Connection[idleLimit] : null; assert idleInitial<=idleLimit; this.idleCount = idleInitial; if(idleInitial>0) { try { for(int i = 0; i<idleInitial; i++) idle[i] = factory.createConnection(); } catch(SQLException e) { throw new RuntimeException(e); } } } //private static long timeInChecks = 0; //private static long numberOfChecks = 0; public Connection getConnection(final boolean autoCommit) throws SQLException { counter.incrementGet(); Connection result = null; do { synchronized(lock) { if(idle!=null && idleCount>0) { //System.out.println("connection pool: fetch "+(size-1)); result = idle[--idleCount]; idle[idleCount] = null; // do not reference active connections } } if(result==null) break; // Important to do this outside the synchronized block! result = checkWhetherConnectionIsStillValid(result); if(result!=null) break; } while(true); //System.out.println("connection pool: CREATE"); // Important to do this outside the synchronized block! if(result==null) result = factory.createConnection(); result.setAutoCommit(autoCommit); return result; } /** * One important reason to have this functionality in a dedicated method is to * put the name of the method into exception stacktraces. */ private static final Connection checkWhetherConnectionIsStillValid(final Connection result) { try { //final long start = System.currentTimeMillis(); // probably not the best idea final ResultSet rs = result.getMetaData().getTables(null, null, "zack", null); rs.next(); rs.close(); //timeInChecks += (System.currentTimeMillis()-start); //numberOfChecks++; return result; } catch(SQLException e) { System.out.println("warning: pooled connection invalid: " + e.getMessage()); return null; } } /** * TODO: If we want to implement changing connection parameters on-the-fly * somewhere in the future, it's important, that client return connections * to exactly the same instance of ConnectionPool. */ public void putConnection(final Connection connection) throws SQLException { if(connection==null) throw new NullPointerException(); counter.incrementPut(); // IMPORTANT: // Do not let a closed connection be put back into the pool. if(connection.isClosed()) throw new IllegalArgumentException("unexpected closed connection"); synchronized(lock) { if(idle!=null && idleCount<idle.length) { //System.out.println("connection pool: store "+idleCount); idle[idleCount++] = connection; return; } } //System.out.println("connection pool: CLOSE "); // Important to do this outside the synchronized block! connection.close(); } void flush() { if(idle!=null) { // make a copy of idle to avoid closing idle connections // inside the synchronized block final ArrayList<Connection> copyOfIdle = new ArrayList<Connection>(idle.length); synchronized(lock) { if(idleCount==0) return; //System.out.println("connection pool: FLUSH "+size); for(int i = 0; i<idleCount; i++) { copyOfIdle.add(idle[i]); idle[i] = null; // do not reference closed connections } idleCount = 0; } try { for(final Connection c : copyOfIdle) c.close(); } catch(SQLException e) { throw new SQLRuntimeException(e, "close()"); } } } ConnectionPoolInfo getInfo() { return new ConnectionPoolInfo(idleCount, new PoolCounter(counter)); } }
import static org.junit.Assert.*; import org.junit.After; import org.junit.Before; import org.junit.Test; /** * The test class FlotaTest. * * @author (your name) * @version (a version number or a date) */ public class FlotaTest { /** * Default constructor for test class FlotaTest */ public FlotaTest() { } /** * Sets up the test fixture. * * Called before every test case method. */ @Before public void setUp() { } /** * Tears down the test fixture. * * Called after every test case method. */ @After public void tearDown() { } @Test public void testFlota01() { Vehiculo coche1 = new Coche(Marca.FIAT, 8, 2000, 4); Vehiculo furgonet1 = new FurgonetaPequena(Marca.FORD, 2, 456, 6); Vehiculo furgonet2 = new FurgonetaGrande(Marca.OPEL, 5, 20000, 1000); Vehiculo camion1 = new Camion(Marca.CITROEN, 6, 10000, 1500); Flota flota1 = new Flota(); flota1.addVehiculo(coche1); flota1.addVehiculo(furgonet1); flota1.addVehiculo(furgonet2); flota1.addVehiculo(camion1); assertEquals(true, flota1.cumplenMedidas()); assertNull(flota1.marcaMayoritaria()); int[] arrayEsperado0 = {0, 8, 2000, 4, 1}; assertArrayEquals(arrayEsperado0, flota1.getDatosVehiculo(0)); int[] arrayEsperado1 = {1, 2, 456, 6}; assertArrayEquals(arrayEsperado1, flota1.getDatosVehiculo(1)); int[] arrayEsperado2 = {2, 5, 20000, 1000}; assertArrayEquals(arrayEsperado2, flota1.getDatosVehiculo(2)); int[] arrayEsperado3 = {3, 6, 10000, 1500}; assertArrayEquals(arrayEsperado3, flota1.getDatosVehiculo(3)); Vehiculo coche2 = new Coche(Marca.FORD, 10, 300000, 4); flota1.addVehiculo(coche2); assertEquals(false, flota1.cumplenMedidas()); assertEquals(Marca.FORD, flota1.marcaMayoritaria()); int[] arrayEsperado4 = {4, 10, 300000, 4, 2}; assertArrayEquals(arrayEsperado4, flota1.getDatosVehiculo(4)); flota1.removeVehiculo(4); assertEquals(true, flota1.cumplenMedidas()); Vehiculo camion2 = new Camion(Marca.CITROEN, 7, 206000, 4000); flota1.addVehiculo(camion2); assertEquals(Marca.CITROEN, flota1.marcaMayoritaria()); assertEquals(false, flota1.cumplenMedidas()); flota1.removeVehiculo(5); assertEquals(true, flota1.cumplenMedidas()); } }
package org.jboss.as.threads; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import org.jboss.msc.inject.Injector; import org.jboss.msc.service.Service; import org.jboss.msc.service.StartContext; import org.jboss.msc.service.StartException; import org.jboss.msc.service.StopContext; import org.jboss.msc.value.InjectedValue; import org.jboss.threads.EventListener; import org.jboss.threads.JBossThreadPoolExecutor; /** * Service responsible for creating, starting and stopping a thread pool executor with an unbounded queue. * * @author John E. Bailey */ public class UnboundedQueueThreadPoolService implements Service<ManagedJBossThreadPoolExecutorService> { private final InjectedValue<ThreadFactory> threadFactoryValue = new InjectedValue<ThreadFactory>(); private ManagedJBossThreadPoolExecutorService executor; private int maxThreads; private TimeSpec keepAlive; public UnboundedQueueThreadPoolService(int maxThreads, TimeSpec keepAlive) { this.maxThreads = maxThreads; this.keepAlive = keepAlive; } public synchronized void start(final StartContext context) throws StartException { final TimeSpec keepAliveSpec = keepAlive; long keepAliveTime = keepAliveSpec == null ? Long.MAX_VALUE : keepAliveSpec.getUnit().toNanos(keepAliveSpec.getDuration()); final JBossThreadPoolExecutor jbossExecutor = new JBossThreadPoolExecutor(maxThreads, maxThreads, keepAliveTime, TimeUnit.NANOSECONDS, new LinkedBlockingQueue<Runnable>(), threadFactoryValue.getValue()); executor = new ManagedJBossThreadPoolExecutorService(jbossExecutor); } public synchronized void stop(final StopContext context) { final ManagedJBossThreadPoolExecutorService executor = getValue(); context.asynchronous(); executor.internalShutdown(); executor.addShutdownListener(new EventListener<StopContext>() { public void handleEvent(final StopContext stopContext) { stopContext.complete(); } }, context); this.executor = null; } public synchronized ManagedJBossThreadPoolExecutorService getValue() throws IllegalStateException { final ManagedJBossThreadPoolExecutorService value = this.executor; if (value == null) { throw ThreadsMessages.MESSAGES.unboundedQueueThreadPoolExecutorUninitialized(); } return value; } public Injector<ThreadFactory> getThreadFactoryInjector() { return threadFactoryValue; } public synchronized void setMaxThreads(final int maxThreads) { boolean decreased = false; if (maxThreads < this.maxThreads) decreased = true; this.maxThreads = maxThreads; final ManagedJBossThreadPoolExecutorService executor = this.executor; if(executor != null) { if (decreased) { executor.setCoreThreads(maxThreads); executor.setMaxThreads(maxThreads); } else { executor.setMaxThreads(maxThreads); executor.setCoreThreads(maxThreads); } } } public synchronized void setKeepAlive(final TimeSpec keepAlive) { this.keepAlive = keepAlive; final ManagedJBossThreadPoolExecutorService executor = this.executor; if(executor != null) { executor.setKeepAlive(keepAlive); } } public int getActiveCount() { final ManagedJBossThreadPoolExecutorService executor = getValue(); return executor.getActiveCount(); } public long getCompletedTaskCount() { final ManagedJBossThreadPoolExecutorService executor = getValue(); return executor.getCompletedTaskCount(); } public int getCurrentThreadCount() { final ManagedJBossThreadPoolExecutorService executor = getValue(); return executor.getCurrentThreadCount(); } public int getLargestPoolSize() { final ManagedJBossThreadPoolExecutorService executor = getValue(); return executor.getLargestPoolSize(); } public int getLargestThreadCount() { final ManagedJBossThreadPoolExecutorService executor = getValue(); return executor.getLargestThreadCount(); } public int getRejectedCount() { final ManagedJBossThreadPoolExecutorService executor = getValue(); return executor.getRejectedCount(); } public long getTaskCount() { final ManagedJBossThreadPoolExecutorService executor = getValue(); return executor.getTaskCount(); } public int getQueueSize() { final ManagedJBossThreadPoolExecutorService executor = getValue(); return executor.getQueueSize(); } TimeUnit getKeepAliveUnit() { return keepAlive == null ? TimeSpec.DEFAULT_KEEPALIVE.getUnit() : keepAlive.getUnit(); } }