repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
open-health-hub/openMAXIMS
openmaxims_workspace/ValueObjects/src/ims/RefMan/vo/beans/TheatreListBookingDetailVoBean.java
2025
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751) // Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved. // WARNING: DO NOT MODIFY the content of this file package ims.RefMan.vo.beans; public class TheatreListBookingDetailVoBean extends ims.vo.ValueObjectBean { public TheatreListBookingDetailVoBean() { } public TheatreListBookingDetailVoBean(ims.RefMan.vo.TheatreListBookingDetailVo vo) { this.tcitime = vo.getTCITime(); this.proceduretext = vo.getProcedureText(); this.theatretext = vo.getTheatreText(); this.los = vo.getLOS(); } public void populate(ims.vo.ValueObjectBeanMap map, ims.RefMan.vo.TheatreListBookingDetailVo vo) { this.tcitime = vo.getTCITime(); this.proceduretext = vo.getProcedureText(); this.theatretext = vo.getTheatreText(); this.los = vo.getLOS(); } public ims.RefMan.vo.TheatreListBookingDetailVo buildVo() { return this.buildVo(new ims.vo.ValueObjectBeanMap()); } public ims.RefMan.vo.TheatreListBookingDetailVo buildVo(ims.vo.ValueObjectBeanMap map) { ims.RefMan.vo.TheatreListBookingDetailVo vo = null; if(map != null) vo = (ims.RefMan.vo.TheatreListBookingDetailVo)map.getValueObject(this); if(vo == null) { vo = new ims.RefMan.vo.TheatreListBookingDetailVo(); map.addValueObject(this, vo); vo.populate(map, this); } return vo; } public String getTCITime() { return this.tcitime; } public void setTCITime(String value) { this.tcitime = value; } public String getProcedureText() { return this.proceduretext; } public void setProcedureText(String value) { this.proceduretext = value; } public String getTheatreText() { return this.theatretext; } public void setTheatreText(String value) { this.theatretext = value; } public Integer getLOS() { return this.los; } public void setLOS(Integer value) { this.los = value; } private String tcitime; private String proceduretext; private String theatretext; private Integer los; }
agpl-3.0
vladimir-bukhtoyarov/jagger
dbapi/src/main/java/com/griddynamics/jagger/dbapi/dto/PointDto.java
1093
package com.griddynamics.jagger.dbapi.dto; import java.io.Serializable; /** * @author "Artem Kirillov" (akirillov@griddynamics.com) * @since 5/30/12 */ public class PointDto implements Serializable { private double x; private double y; public PointDto() { } public PointDto(double x, double y) { this.x = x; this.y = y; } public double getX() { return x; } public double getY() { return y; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof PointDto)) return false; PointDto pointDto = (PointDto) o; if (Double.compare(pointDto.x, x) != 0) return false; if (Double.compare(pointDto.y, y) != 0) return false; return true; } @Override public int hashCode() { return new Double(x).hashCode() >> 13 ^ new Double(y).hashCode(); } @Override public String toString() { return "PointDto{" + "x=" + x + ", y=" + y + '}'; } }
lgpl-2.1
joshkh/intermine
imbuild/im-ant-tasks/src/org/intermine/task/FileName.java
1098
package org.intermine.task; /* * Copyright (C) 2002-2015 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.io.File; import org.apache.tools.ant.BuildException; import org.apache.tools.ant.Task; /** * * * @author Thmoas Riley */ public class FileName extends Task { private String propName; private File file; /** * @param file filename */ public void setFile(File file) { this.file = file; } /** * @throws BuildException if can't build */ public void execute() throws BuildException { if (file == null) { throw new BuildException("file attribute required"); } this.getProject().setProperty(propName, file.getName()); } /** * @param propName property name */ public void setProperty(String propName) { this.propName = propName; } }
lgpl-2.1
wolfgangmm/exist
exist-core/src/test/java/org/exist/numbering/DLNTest.java
8703
/* * eXist-db Open Source Native XML Database * Copyright (C) 2001 The eXist-db Authors * * info@exist-db.org * http://www.exist-db.org * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ package org.exist.numbering; import com.googlecode.junittoolbox.ParallelRunner; import org.junit.Test; import org.junit.runner.RunWith; import java.util.Arrays; import java.util.Random; import static org.junit.Assert.*; @RunWith(ParallelRunner.class) public class DLNTest { private class TestItem implements Comparable<TestItem> { int id; NodeId dln; public TestItem(int id, DLN dln) { this.id = id; this.dln = dln; } public String toString() { StringBuffer buf = new StringBuffer(); buf.append(id); buf.append(" = "); buf.append(dln.toString()); return buf.toString(); } public int compareTo(TestItem other) { return dln.compareTo(other.dln); } } private final static int ITEMS_TO_TEST = 10000; @Test public void singleId() { Random rand = new Random(); TestItem items[] = new TestItem[ITEMS_TO_TEST]; for (int i = 0; i < ITEMS_TO_TEST; i++) { int next = rand.nextInt(5000000); DLN dln = new DLN(); dln.setLevelId(0, next); items[i] = new TestItem(next, dln); } Arrays.sort(items); for (int i = 0; i < ITEMS_TO_TEST; i++) { assertEquals("Item: " + i, items[i].id, ((DLN)items[i].dln).getLevelId(0)); if (i + 1 < ITEMS_TO_TEST) assertTrue(items[i].id <= items[i + 1].id); if (i > 0) assertTrue(items[i].id >= items[i - 1].id); } } @Test public void sort() { Random rand = new Random(); DLN items[] = new DLN[ITEMS_TO_TEST]; for (int i = 0; i < ITEMS_TO_TEST; i++) { int next = rand.nextInt(5000000); DLN dln = new DLN(); dln.setLevelId(0, next); items[i] = dln; } Arrays.sort(items); } @Test public void create() { DLN dln = new DLN(); for (int i = 1; i < 500000; i++) { dln.incrementLevelId(); } assertEquals(500000, dln.getLevelId(0)); } @Test public void levelIds() { DLN dln = new DLN("1.33.56.2.98.1.27");; assertEquals("1.33.56.2.98.1.27", dln.toString()); dln = new DLN("1.56.4.33.30.11.9.40.3.2"); assertEquals("1.56.4.33.30.11.9.40.3.2", dln.toString()); assertEquals(10, dln.getLevelCount(0)); dln = new DLN("1.8000656.40.3.2"); assertEquals("1.8000656.40.3.2", dln.toString()); assertEquals(5, dln.getLevelCount(0)); dln = new DLN("1.1"); assertEquals("1.1", dln.toString()); assertEquals(2, dln.getLevelCount(0)); dln.incrementLevelId(); assertEquals("1.2", dln.toString()); assertEquals(2, dln.getLevelCount(0)); assertEquals("1", dln.getParentId().toString()); dln = new DLN("1"); assertEquals("1", dln.toString()); assertEquals(1, dln.getLevelCount(0)); assertSame(NodeId.DOCUMENT_NODE, dln.getParentId()); dln = new DLN("1.72"); assertEquals("1.72", dln.toString()); dln = new DLN("1.7.3/1.34"); assertEquals("1.7.3/1.34", dln.toString()); assertEquals(4, dln.getLevelCount(0)); dln = new DLN("1.7.3.1/34"); assertEquals("1.7.3.1/34", dln.toString()); assertEquals(4, dln.getLevelCount(0)); dln.incrementLevelId(); assertEquals("1.7.3.1/35", dln.toString()); assertEquals(4, dln.getLevelCount(0)); dln = new DLN("1.2.1/2/3"); assertEquals(3, dln.getSubLevelCount(dln.lastLevelOffset())); dln = new DLN("1/2/3"); assertEquals(3, dln.getSubLevelCount(dln.lastLevelOffset())); int[] id0 = new int[] { 1, 33, 56, 2, 98, 1, 27 }; dln = new DLN(); for (int i = 0; i < id0.length; i++) { if (i > 0) dln.addLevelId(1, false); for (int j = 1; j < id0[i]; j++) dln.incrementLevelId(); } assertEquals("1.33.56.2.98.1.27", dln.toString()); assertEquals(7, dln.getLevelCount(0)); } @Test public void relations() { DLN root = new DLN("1.3"); DLN descendant = new DLN("1.3.1"); assertTrue(descendant.isDescendantOf(root)); assertTrue(descendant.isChildOf(root)); assertTrue(root.equals(descendant.getParentId())); descendant = new DLN("1.3.2.5.6"); assertTrue(descendant.isDescendantOf(root)); assertFalse(descendant.isChildOf(root)); assertTrue(descendant.isDescendantOrSelfOf(root)); descendant = new DLN("1.4"); assertFalse(descendant.isDescendantOf(root)); descendant = new DLN("1.3"); assertFalse(descendant.isDescendantOf(root)); assertTrue(descendant.isDescendantOrSelfOf(root)); root = new DLN("1.3.2.5.6"); descendant = new DLN("1.3.2.5.6.7777"); assertTrue(descendant.isDescendantOf(root)); assertTrue(descendant.isChildOf(root)); assertTrue(root.equals(descendant.getParentId())); descendant = new DLN("1.3.2.5.6.7777.1"); assertTrue(descendant.isDescendantOf(root)); assertFalse(descendant.isChildOf(root)); root = new DLN("1.3.1"); descendant = new DLN("1.3.2"); assertFalse(descendant.isDescendantOf(root)); root = new DLN("1.6.6.66"); descendant = new DLN("1.6.6.65.1"); assertFalse(descendant.isChildOf(root)); descendant = new DLN("1.6.6.66"); assertFalse(descendant.isChildOf(root)); root = new DLN("1.3.1/1"); descendant = new DLN("1.3.1/1.1"); assertTrue(descendant.isChildOf(root)); descendant = (DLN) root.newChild(); assertEquals("1.3.1/1.1", descendant.toString()); descendant.incrementLevelId(); assertEquals("1.3.1/1.2", descendant.toString()); assertTrue(root.equals(descendant.getParentId())); descendant = new DLN("1.3.1/1.2.2"); assertFalse(descendant.isChildOf(root)); assertTrue(descendant.isDescendantOf(root)); NodeId left = new DLN("1.3.1"); NodeId dln = new DLN("1.3.1/1"); NodeId right = new DLN("1.3.2"); assertTrue(dln.compareTo(right) < 0); assertTrue(dln.compareTo(left) > 0); assertTrue(left.compareTo(dln) < 0); assertTrue(right.compareTo(dln) > 0); assertTrue(left.compareTo(right) < 0); DLN id0 = new DLN("1.1.7"); DLN id1 = new DLN("1.1.6"); DLN id2 = new DLN("1.1.7.1"); DLN id3 = new DLN("1.1.7/1"); assertTrue(id0.isSiblingOf(id1)); assertTrue(id1.isSiblingOf(id0)); assertFalse(id0.isSiblingOf(id2)); assertFalse(id2.isSiblingOf(id0)); assertTrue(id3.isSiblingOf(id0)); assertTrue(id0.isSiblingOf(id3)); } @Test public void insertion() { DLN left = new DLN("1.1"); DLN right = (DLN) left.insertNode(null); assertEquals("1.2", right.toString()); DLN dln = (DLN) left.insertNode(right); assertEquals("1.1/1", dln.toString()); left = dln; dln = (DLN) left.insertNode(right); assertEquals("1.1/2", dln.toString()); right = dln; dln = (DLN) left.insertNode(right); assertEquals("1.1/1/1", dln.toString()); right = new DLN("1.1/1"); left = new DLN("1.1"); dln = (DLN) left.insertNode(right); assertEquals("1.1/0/35", dln.toString()); right = dln; dln = (DLN) left.insertNode(right); assertEquals("1.1/0/34", dln.toString()); } }
lgpl-2.1
paulklinkenberg/Lucee4
lucee-java/lucee-core/src/lucee/transformer/bytecode/visitor/ParseBodyVisitor.java
1975
/** * * Copyright (c) 2014, the Railo Company Ltd. All rights reserved. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library. If not, see <http://www.gnu.org/licenses/>. * **/ package lucee.transformer.bytecode.visitor; import lucee.transformer.bytecode.BytecodeContext; import lucee.transformer.bytecode.BytecodeException; import lucee.transformer.bytecode.util.Types; import org.objectweb.asm.Type; import org.objectweb.asm.commons.GeneratorAdapter; import org.objectweb.asm.commons.Method; public final class ParseBodyVisitor { // void outputStart() public final static Method OUTPUT_START = new Method("outputStart",Types.VOID,new Type[]{}); // void outputEnd() public final static Method OUTPUT_END = new Method("outputEnd",Types.VOID,new Type[]{}); private TryFinallyVisitor tfv; public void visitBegin(BytecodeContext bc) { GeneratorAdapter adapter = bc.getAdapter(); tfv=new TryFinallyVisitor(new OnFinally() { public void _writeOut(BytecodeContext bc) { //ExpressionUtil.visitLine(bc, line); bc.getAdapter().loadArg(0); bc.getAdapter().invokeVirtual(Types.PAGE_CONTEXT,OUTPUT_END); } },null); //ExpressionUtil.visitLine(bc, line); adapter.loadArg(0); adapter.invokeVirtual(Types.PAGE_CONTEXT,OUTPUT_START); tfv.visitTryBegin(bc); } public void visitEnd(BytecodeContext bc) throws BytecodeException { tfv.visitTryEnd(bc); } }
lgpl-2.1
elsiklab/intermine
intermine/web/main/src/org/intermine/webservice/server/core/WebServiceServlet.java
4673
package org.intermine.webservice.server.core; /* * Copyright (C) 2002-2017 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.intermine.api.InterMineAPI; import org.intermine.web.context.InterMineContext; import org.intermine.webservice.server.WebService; /** * A servlet which can be easily configured to define how to route requests, using * standard RESTful semantics. * @author Alex Kalderimis * */ public abstract class WebServiceServlet extends HttpServlet { private static final long serialVersionUID = 3419034521176834088L; protected final InterMineAPI api; public static enum Method { /** GET **/ GET, /** POST **/ POST, /** PUT **/ PUT, /** DELETE **/ DELETE }; /** Constructor **/ public WebServiceServlet() { super(); api = InterMineContext.getInterMineAPI(); } /** * Respond to a request. * @param method The current method. * @param request The request. * @param response The response. * @throws ServletException Well it could I suppose. * @throws IOException Entirely possible really. */ protected void respond( Method method, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { WebService service = getService(method); // ugly, but better safe than sorry, since null is the bottom type; // but strictly speaking, the getService method should throw a // NoServiceException instead of returning null. if (service == null) { throw new NoServiceException(); } service.service(request, response); } catch (NoServiceException e) { sendNoMethodError(method.toString(), request, response); } } private void sendNoMethodError(String method, HttpServletRequest request, HttpServletResponse response) throws IOException { // The default no-op servlet behaviour. String protocol = request.getProtocol(); String msg = "The " + method + " method is not supported"; if (protocol.endsWith("1.1")) { response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, msg); } else { response.sendError(HttpServletResponse.SC_BAD_REQUEST, msg); } } /** * Implement this to route requests. * @param method The current method. * @return A webservice handler. * @throws NoServiceException If no handler matches the method. */ protected abstract WebService getService(Method method) throws NoServiceException; @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { String tunnelledMethod = request.getParameter("method"); if (tunnelledMethod != null && !"".equals(tunnelledMethod.trim())) { // This a fake tunnelled request, probably from IE, but possibly json-p Method tm; try { tm = Method.valueOf(tunnelledMethod); } catch (IllegalArgumentException e) { sendNoMethodError(tunnelledMethod, request, response); return; } respond(tm, request, response); } else { respond(Method.GET, request, response); } } @Override public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { if ("PUT".equals(request.getParameter("method"))) { // This a fake tunnelled request, probably from IE. doPut(request, response); } else { respond(Method.POST, request, response); } } @Override public void doPut(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { respond(Method.PUT, request, response); } @Override public void doDelete(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { respond(Method.DELETE, request, response); } }
lgpl-2.1
elsiklab/intermine
intermine/objectstore/test/src/org/intermine/modelproduction/MetadataManagerTest.java
2328
package org.intermine.modelproduction; /* * Copyright (C) 2002-2017 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.io.InputStream; import java.io.OutputStream; import junit.framework.TestCase; import org.intermine.sql.Database; import org.intermine.sql.DatabaseFactory; public class MetadataManagerTest extends TestCase { private Database db; @Override public void setUp() throws Exception { db = DatabaseFactory.getDatabase("db.unittest"); } public void testValue() throws Exception { MetadataManager.store(db, "test_key", "Hello"); assertEquals("Hello", MetadataManager.retrieve(db, "test_key")); } public void testBinaryValue() throws Exception { byte[] expected = "Hello".getBytes(); MetadataManager.storeBinary(db, "test_key_bin", expected); InputStream is = MetadataManager.retrieveBLOBInputStream(db, "test_key_bin"); byte[] got = new byte[expected.length + 1]; assertEquals(expected.length, is.read(got)); for (int i = 0; i < expected.length; i++) { assertEquals(expected[i], got[i]); } } public void testLargeValue() throws Exception { byte[] expected = "Hello".getBytes(); OutputStream os = MetadataManager.storeLargeBinary(db, "test_key_large"); os.write(expected); os.close(); InputStream is = MetadataManager.readLargeBinary(db, "test_key_large"); byte[] got = new byte[expected.length + 1]; assertEquals(expected.length, is.read(got)); for (int i = 0; i < expected.length; i++) { assertEquals(expected[i], got[i]); } } public void testDeleteLargeBinary() throws Exception { byte[] expected = "Hello".getBytes(); String key = "test_key_large"; OutputStream os = MetadataManager.storeLargeBinary(db, key); os.write(expected); os.close(); assertNotNull(MetadataManager.retrieve(db, key)); MetadataManager.deleteLargeBinary(db, key); assertNull(MetadataManager.retrieve(db, key)); } }
lgpl-2.1
jcarvalho/fenixedu-academic
src/main/java/org/fenixedu/academic/dto/accounting/sibsPaymentFileProcessReport/SibsPaymentFileProcessReportDTO.java
19011
/** * Copyright © 2002 Instituto Superior Técnico * * This file is part of FenixEdu Academic. * * FenixEdu Academic is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * FenixEdu Academic is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with FenixEdu Academic. If not, see <http://www.gnu.org/licenses/>. */ package org.fenixedu.academic.dto.accounting.sibsPaymentFileProcessReport; import org.fenixedu.academic.domain.GratuitySituation; import org.fenixedu.academic.domain.accounting.Event; import org.fenixedu.academic.domain.accounting.PaymentCode; import org.fenixedu.academic.domain.accounting.ResidenceEvent; import org.fenixedu.academic.domain.accounting.events.AdministrativeOfficeFeeAndInsuranceEvent; import org.fenixedu.academic.domain.accounting.events.candidacy.DegreeCandidacyForGraduatedPersonEvent; import org.fenixedu.academic.domain.accounting.events.candidacy.DegreeChangeIndividualCandidacyEvent; import org.fenixedu.academic.domain.accounting.events.candidacy.DegreeTransferIndividualCandidacyEvent; import org.fenixedu.academic.domain.accounting.events.candidacy.Over23IndividualCandidacyEvent; import org.fenixedu.academic.domain.accounting.events.candidacy.SecondCycleIndividualCandidacyEvent; import org.fenixedu.academic.domain.accounting.events.gratuity.DfaGratuityEvent; import org.fenixedu.academic.domain.accounting.events.gratuity.GratuityEventWithPaymentPlan; import org.fenixedu.academic.domain.accounting.events.gratuity.StandaloneEnrolmentGratuityEvent; import org.fenixedu.academic.domain.accounting.events.insurance.InsuranceEvent; import org.fenixedu.academic.domain.accounting.paymentCodes.AccountingEventPaymentCode; import org.fenixedu.academic.domain.accounting.paymentCodes.GratuitySituationPaymentCode; import org.fenixedu.academic.domain.accounting.paymentCodes.MasterDegreeInsurancePaymentCode; import org.fenixedu.academic.domain.phd.candidacy.PhdProgramCandidacyEvent; import org.fenixedu.academic.util.Money; import org.fenixedu.academic.util.sibs.incomming.SibsIncommingPaymentFile; import org.fenixedu.academic.util.sibs.incomming.SibsIncommingPaymentFileDetailLine; import org.joda.time.YearMonthDay; public class SibsPaymentFileProcessReportDTO { private String filename; private YearMonthDay whenProcessedBySibs; private Integer fileVersion; private Money degreeGratuityTotalAmount; private Money bolonhaDegreeGratuityTotalAmount; private Money integratedMasterDegreeGratuityTotalAmount; private Money integratedBolonhaMasterDegreeGratuityTotalAmount; private Money administrativeOfficeTaxTotalAmount; private Money graduationInsuranceTotalAmount; private Money specializationGratuityTotalAmount; private Money masterDegreeGratuityTotalAmount; private Money bolonhaMasterDegreeGratuityTotalAmount; private Money dfaGratuityTotalAmount; private Money afterGraduationInsuranceTotalAmount; private Money phdGratuityTotalAmout; private Money transactionsTotalAmount; private Money residenceAmount; private Money degreeChangeIndividualCandidacyAmount; private Money degreeTransferIndividualCandidacyAmount; private Money secondCycleIndividualCandidacyAmount; private Money degreeCandidacyForGraduatedPersonAmount; private Money totalCost; private Money standaloneEnrolmentGratuityEventAmount; private Money over23IndividualCandidacyEventAmount; private Money institutionAffiliationEventAmount; private Money phdProgramCandidacyEventAmount; private Money rectorateAmount; public SibsPaymentFileProcessReportDTO() { super(); this.degreeGratuityTotalAmount = Money.ZERO; this.bolonhaDegreeGratuityTotalAmount = Money.ZERO; this.integratedMasterDegreeGratuityTotalAmount = Money.ZERO; this.integratedBolonhaMasterDegreeGratuityTotalAmount = Money.ZERO; this.administrativeOfficeTaxTotalAmount = Money.ZERO; this.graduationInsuranceTotalAmount = Money.ZERO; this.specializationGratuityTotalAmount = Money.ZERO; this.masterDegreeGratuityTotalAmount = Money.ZERO; this.bolonhaMasterDegreeGratuityTotalAmount = Money.ZERO; this.dfaGratuityTotalAmount = Money.ZERO; this.afterGraduationInsuranceTotalAmount = Money.ZERO; this.phdGratuityTotalAmout = Money.ZERO; this.transactionsTotalAmount = Money.ZERO; this.totalCost = Money.ZERO; this.residenceAmount = Money.ZERO; this.degreeChangeIndividualCandidacyAmount = Money.ZERO; this.degreeTransferIndividualCandidacyAmount = Money.ZERO; this.secondCycleIndividualCandidacyAmount = Money.ZERO; this.degreeCandidacyForGraduatedPersonAmount = Money.ZERO; this.standaloneEnrolmentGratuityEventAmount = Money.ZERO; this.over23IndividualCandidacyEventAmount = Money.ZERO; this.institutionAffiliationEventAmount = Money.ZERO; this.phdProgramCandidacyEventAmount = Money.ZERO; this.rectorateAmount = Money.ZERO; } public SibsPaymentFileProcessReportDTO(final SibsIncommingPaymentFile sibsIncomingPaymentFile) { this(); setWhenProcessedBySibs(sibsIncomingPaymentFile.getHeader().getWhenProcessedBySibs()); setFilename(sibsIncomingPaymentFile.getFilename()); setTransactionsTotalAmount(sibsIncomingPaymentFile.getFooter().getTransactionsTotalAmount()); setTotalCost(sibsIncomingPaymentFile.getFooter().getTotalCost()); setFileVersion(sibsIncomingPaymentFile.getHeader().getVersion()); } private void addAdministrativeOfficeTaxAmount(final Money amount) { this.administrativeOfficeTaxTotalAmount = this.administrativeOfficeTaxTotalAmount.add(amount); } public Money getAdministrativeOfficeTaxTotalAmount() { return administrativeOfficeTaxTotalAmount; } private void addBolonhaDegreeGratuityAmount(final Money amount) { this.bolonhaDegreeGratuityTotalAmount = this.bolonhaDegreeGratuityTotalAmount.add(amount); } public Money getBolonhaDegreeGratuityTotalAmount() { return bolonhaDegreeGratuityTotalAmount; } private void addDegreeGratuityAmount(final Money amount) { this.degreeGratuityTotalAmount = this.degreeGratuityTotalAmount.add(amount); } public Money getDegreeGratuityTotalAmount() { return degreeGratuityTotalAmount; } public Money getGraduationInsuranceTotalAmount() { return graduationInsuranceTotalAmount; } private void addGraduationInsuranceAmount(Money amount) { this.graduationInsuranceTotalAmount = this.graduationInsuranceTotalAmount.add(amount); } private void addDfaGratuityAmount(final Money amount) { this.dfaGratuityTotalAmount = this.dfaGratuityTotalAmount.add(amount); } public Money getDfaGratuityTotalAmount() { return dfaGratuityTotalAmount; } public String getFilename() { return filename; } public void setFilename(String filename) { this.filename = filename; } public Integer getFileVersion() { return fileVersion; } public void setFileVersion(Integer fileVersion) { this.fileVersion = fileVersion; } private void addAfterGraduationInsuranceAmount(final Money amount) { this.afterGraduationInsuranceTotalAmount = this.afterGraduationInsuranceTotalAmount.add(amount); } public Money getAfterGraduationInsuranceTotalAmount() { return afterGraduationInsuranceTotalAmount; } private void addIntegratedBolonhaMasterDegreeGratuityAmount(final Money amount) { this.integratedBolonhaMasterDegreeGratuityTotalAmount = this.integratedBolonhaMasterDegreeGratuityTotalAmount.add(amount); } public Money getIntegratedBolonhaMasterDegreeGratuityTotalAmount() { return integratedBolonhaMasterDegreeGratuityTotalAmount; } private void addIntegratedMasterDegreeGratuityAmount(final Money amount) { this.integratedMasterDegreeGratuityTotalAmount = this.integratedMasterDegreeGratuityTotalAmount.add(amount); } public Money getIntegratedMasterDegreeGratuityTotalAmount() { return integratedMasterDegreeGratuityTotalAmount; } private void addMasterDegreeGratuityAmount(final Money amount) { this.masterDegreeGratuityTotalAmount = this.masterDegreeGratuityTotalAmount.add(amount); } public Money getMasterDegreeGratuityTotalAmount() { return masterDegreeGratuityTotalAmount; } private void addBolonhaMasterDegreGratuityTotalAmount(final Money amount) { this.bolonhaMasterDegreeGratuityTotalAmount = this.bolonhaMasterDegreeGratuityTotalAmount.add(amount); } public Money getBolonhaMasterDegreeGratuityTotalAmount() { return bolonhaMasterDegreeGratuityTotalAmount; } private void addSpecializationGratuityAmount(final Money amount) { this.specializationGratuityTotalAmount = this.specializationGratuityTotalAmount.add(amount); } public Money getSpecializationGratuityTotalAmount() { return specializationGratuityTotalAmount; } private void addPhdGratuityAmount(final Money amount) { this.phdGratuityTotalAmout = this.phdGratuityTotalAmout.add(amount); } public Money getPhdGratuityTotalAmout() { return phdGratuityTotalAmout; } public Money getOver23IndividualCandidacyEventAmount() { return over23IndividualCandidacyEventAmount; } public void addOver23IndividualCandidacyEventAmount(final Money amount) { this.over23IndividualCandidacyEventAmount = this.over23IndividualCandidacyEventAmount.add(amount); } public Money getInstitutionAffiliationEventAmount() { return institutionAffiliationEventAmount; } public void addInstitutionAffiliationEventAmount(final Money amount) { this.institutionAffiliationEventAmount = this.institutionAffiliationEventAmount.add(amount); } public Money getPhdProgramCandidacyEventAmount() { return phdProgramCandidacyEventAmount; } public void addPhdProgramCandidacyEventAmount(final Money amount) { this.phdProgramCandidacyEventAmount = this.phdProgramCandidacyEventAmount.add(amount); } public Money getTotalCost() { return totalCost; } public void setTotalCost(Money totalCost) { this.totalCost = totalCost; } public Money getTransactionsTotalAmount() { return transactionsTotalAmount; } public void setTransactionsTotalAmount(Money transactionsTotalAmount) { this.transactionsTotalAmount = transactionsTotalAmount; } public YearMonthDay getWhenProcessedBySibs() { return whenProcessedBySibs; } public void setWhenProcessedBySibs(YearMonthDay whenProcessedBySibs) { this.whenProcessedBySibs = whenProcessedBySibs; } public Money getResidenceAmount() { return residenceAmount; } public void addResidenceAmount(Money money) { this.residenceAmount = this.residenceAmount.add(money); } public void addAmount(final SibsIncommingPaymentFileDetailLine detailLine, final PaymentCode paymentCode) { if (paymentCode.isForRectorate()) { addAmountForRectorate(detailLine.getAmount()); } else if (paymentCode instanceof AccountingEventPaymentCode) { addAmountForEvent(detailLine, paymentCode); } else if (paymentCode instanceof GratuitySituationPaymentCode) { addAmountForGratuitySituation(detailLine, (GratuitySituationPaymentCode) paymentCode); } else if (paymentCode instanceof MasterDegreeInsurancePaymentCode) { addAfterGraduationInsuranceAmount(detailLine.getAmount()); } else { throw new UnsupportedOperationException("Unknown payment code type"); } } private void addAmountForEvent(final SibsIncommingPaymentFileDetailLine detailLine, final PaymentCode paymentCode) { final Event event = ((AccountingEventPaymentCode) paymentCode).getAccountingEvent(); if (event instanceof GratuityEventWithPaymentPlan) { addAmountForGratuityEvent(detailLine, (GratuityEventWithPaymentPlan) event); } else if (event instanceof AdministrativeOfficeFeeAndInsuranceEvent) { addAmountForAdministrativeOfficeAndInsuranceEvent(detailLine, (AdministrativeOfficeFeeAndInsuranceEvent) event); } else if (event instanceof DfaGratuityEvent) { addDfaGratuityAmount(detailLine.getAmount()); } else if (event instanceof InsuranceEvent) { addAfterGraduationInsuranceAmount(detailLine.getAmount()); } else if (event instanceof ResidenceEvent) { addResidenceAmount(detailLine.getAmount()); } else if (event instanceof SecondCycleIndividualCandidacyEvent) { addSecondCycleIndividualCandidacyAmount(detailLine.getAmount()); } else if (event instanceof DegreeChangeIndividualCandidacyEvent) { addDegreeChangeIndividualCandidacyAmount(detailLine.getAmount()); } else if (event instanceof DegreeCandidacyForGraduatedPersonEvent) { addDegreeCandidacyForGraduatedPersonAmount(detailLine.getAmount()); } else if (event instanceof DegreeTransferIndividualCandidacyEvent) { addDegreeTransferIndividualCandidacyAmount(detailLine.getAmount()); } else if (event instanceof StandaloneEnrolmentGratuityEvent) { addStandaloneEnrolmentGratuityEventAmount(detailLine.getAmount()); } else if (event instanceof Over23IndividualCandidacyEvent) { addOver23IndividualCandidacyEventAmount(detailLine.getAmount()); } else if (event instanceof PhdProgramCandidacyEvent) { addPhdProgramCandidacyEventAmount(detailLine.getAmount()); } else { throw new IllegalArgumentException("Unknown accounting event " + event.getClass().getName()); } } private void addAmountForGratuityEvent(final SibsIncommingPaymentFileDetailLine detailLine, final GratuityEventWithPaymentPlan gratuityEventWithPaymentPlan) { if (gratuityEventWithPaymentPlan.getDegree().getDegreeType().isPreBolonhaDegree()) { addDegreeGratuityAmount(detailLine.getAmount()); return; } if (gratuityEventWithPaymentPlan.getDegree().getDegreeType().isBolonhaMasterDegree()) { addBolonhaMasterDegreGratuityTotalAmount(detailLine.getAmount()); return; } if (gratuityEventWithPaymentPlan.getDegree().getDegreeType().isBolonhaDegree()) { addBolonhaDegreeGratuityAmount(detailLine.getAmount()); return; } if (gratuityEventWithPaymentPlan.getDegree().getDegreeType().isIntegratedMasterDegree()) { addIntegratedBolonhaMasterDegreeGratuityAmount(detailLine.getAmount()); return; } throw new IllegalArgumentException("unknown degree type for gratuity event"); } private void addAmountForAdministrativeOfficeAndInsuranceEvent(final SibsIncommingPaymentFileDetailLine detailLine, final AdministrativeOfficeFeeAndInsuranceEvent administrativeOfficeFeeAndInsuranceEvent) { if (detailLine.getAmount().greaterOrEqualThan(administrativeOfficeFeeAndInsuranceEvent.getAmountToPay())) { addGraduationInsuranceAmount(administrativeOfficeFeeAndInsuranceEvent.getInsuranceAmount()); addAdministrativeOfficeTaxAmount(detailLine.getAmount().subtract( administrativeOfficeFeeAndInsuranceEvent.getInsuranceAmount())); } else { addAdministrativeOfficeTaxAmount(detailLine.getAmount()); } } private void addAmountForGratuitySituation(final SibsIncommingPaymentFileDetailLine detailLine, GratuitySituationPaymentCode paymentCode) { final GratuitySituation gratuitySituation = paymentCode.getGratuitySituation(); switch (gratuitySituation.getStudentCurricularPlan().getSpecialization()) { case STUDENT_CURRICULAR_PLAN_MASTER_DEGREE: addMasterDegreeGratuityAmount(detailLine.getAmount()); break; case STUDENT_CURRICULAR_PLAN_INTEGRATED_MASTER_DEGREE: addIntegratedMasterDegreeGratuityAmount(detailLine.getAmount()); break; case STUDENT_CURRICULAR_PLAN_SPECIALIZATION: addSpecializationGratuityAmount(detailLine.getAmount()); break; default: throw new RuntimeException("Unknown specialization " + gratuitySituation.getStudentCurricularPlan().getSpecialization().name()); } } public Money getDegreeChangeIndividualCandidacyAmount() { return degreeChangeIndividualCandidacyAmount; } public Money getDegreeTransferIndividualCandidacyAmount() { return degreeTransferIndividualCandidacyAmount; } public Money getSecondCycleIndividualCandidacyAmount() { return secondCycleIndividualCandidacyAmount; } public Money getDegreeCandidacyForGraduatedPersonAmount() { return degreeCandidacyForGraduatedPersonAmount; } public Money getRectorateAmount() { return this.rectorateAmount; } public void addDegreeChangeIndividualCandidacyAmount(Money money) { this.degreeChangeIndividualCandidacyAmount = this.degreeChangeIndividualCandidacyAmount.add(money); } public void addDegreeTransferIndividualCandidacyAmount(Money money) { this.degreeTransferIndividualCandidacyAmount = this.degreeTransferIndividualCandidacyAmount.add(money); } public void addSecondCycleIndividualCandidacyAmount(Money money) { this.secondCycleIndividualCandidacyAmount = this.secondCycleIndividualCandidacyAmount.add(money); } public void addDegreeCandidacyForGraduatedPersonAmount(Money money) { this.degreeCandidacyForGraduatedPersonAmount = this.degreeCandidacyForGraduatedPersonAmount.add(money); } public Money getStandaloneEnrolmentGratuityEventAmount() { return this.standaloneEnrolmentGratuityEventAmount; } public void addStandaloneEnrolmentGratuityEventAmount(Money amount) { this.standaloneEnrolmentGratuityEventAmount = this.standaloneEnrolmentGratuityEventAmount.add(amount); } public void addAmountForRectorate(Money amount) { this.rectorateAmount = this.rectorateAmount.add(amount); } }
lgpl-3.0
yangcwGIT/simpleimage
simpleimage.core/src/test/java/com/alibaba/simpleimage/ColorConvertTest.java
2325
/** * Project: simple-image-1.0 File Created at 2010-7-9 $Id$ Copyright 2008 Alibaba.com Croporation Limited. All rights * reserved. This software is the confidential and proprietary information of Alibaba Company. * ("Confidential Information"). You shall not disclose such Confidential Information and shall use it only in * accordance with the terms of the license agreement you entered into with Alibaba.com. */ package com.alibaba.simpleimage; import java.io.File; /** * ColorConvertTest主要是测试CMYK转RGB的颜色是否失真, 带有内置profile的非标准RGB转成RGB是否失真 * * @author wendell */ public class ColorConvertTest extends BaseTest { static File cmykDir = new File("./src/test/resources/conf.test/simpleimage/cmyk"); static File rgbDir = new File("./src/test/resources/conf.test/simpleimage/rgb"); static File gifDir = new File("./src/test/resources/conf.test/simpleimage/gif"); public void testCMYK2RGB() throws Exception { for (File cmykFile : cmykDir.listFiles()) { String fileName = cmykFile.getName().toLowerCase(); if (fileName.endsWith("jpg")) { File out = new File(resultDir, "CMYK2RGB_" + fileName); doReadWrite(cmykFile, out, ImageFormat.JPEG); } } } public void testRGB2sRGB() throws Exception { for (File rgbFile : rgbDir.listFiles()) { String fileName = rgbFile.getName().toLowerCase(); if (fileName.endsWith("jpg")) { File out = new File(resultDir, "RGB2RGB_" + fileName); doReadWrite(rgbFile, out, ImageFormat.JPEG); } } } public void testIndexColor2RGB() throws Exception { for (File gifFile : gifDir.listFiles()) { String fileName = gifFile.getName().toLowerCase(); if (fileName.endsWith("gif")) { File out = new File(resultDir, "GIF2RGB_" + fileName); doReadWrite(gifFile, out, ImageFormat.GIF); } } } public void testGray2RGB() throws Exception { File in = new File("./src/test/resources/conf.test/simpleimage/gray/gray.jpg"); File out = new File(resultDir, "GRAY2RGB_" + in.getName()); doReadWrite(in, out, ImageFormat.JPEG); } }
apache-2.0
barmstrong/bitcoin-android
src/com/google/bitcoin/bouncycastle/math/ec/SimpleBigDecimal.java
6890
package com.google.bitcoin.bouncycastle.math.ec; import java.math.BigInteger; /** * Class representing a simple version of a big decimal. A * <code>SimpleBigDecimal</code> is basically a * {@link java.math.BigInteger BigInteger} with a few digits on the right of * the decimal point. The number of (binary) digits on the right of the decimal * point is called the <code>scale</code> of the <code>SimpleBigDecimal</code>. * Unlike in {@link java.math.BigDecimal BigDecimal}, the scale is not adjusted * automatically, but must be set manually. All <code>SimpleBigDecimal</code>s * taking part in the same arithmetic operation must have equal scale. The * result of a multiplication of two <code>SimpleBigDecimal</code>s returns a * <code>SimpleBigDecimal</code> with double scale. */ class SimpleBigDecimal //extends Number // not in J2ME - add compatibility class? { private static final long serialVersionUID = 1L; private final BigInteger bigInt; private final int scale; /** * Returns a <code>SimpleBigDecimal</code> representing the same numerical * value as <code>value</code>. * @param value The value of the <code>SimpleBigDecimal</code> to be * created. * @param scale The scale of the <code>SimpleBigDecimal</code> to be * created. * @return The such created <code>SimpleBigDecimal</code>. */ public static SimpleBigDecimal getInstance(BigInteger value, int scale) { return new SimpleBigDecimal(value.shiftLeft(scale), scale); } /** * Constructor for <code>SimpleBigDecimal</code>. The value of the * constructed <code>SimpleBigDecimal</code> equals <code>bigInt / * 2<sup>scale</sup></code>. * @param bigInt The <code>bigInt</code> value parameter. * @param scale The scale of the constructed <code>SimpleBigDecimal</code>. */ public SimpleBigDecimal(BigInteger bigInt, int scale) { if (scale < 0) { throw new IllegalArgumentException("scale may not be negative"); } this.bigInt = bigInt; this.scale = scale; } private SimpleBigDecimal(SimpleBigDecimal limBigDec) { bigInt = limBigDec.bigInt; scale = limBigDec.scale; } private void checkScale(SimpleBigDecimal b) { if (scale != b.scale) { throw new IllegalArgumentException("Only SimpleBigDecimal of " + "same scale allowed in arithmetic operations"); } } public SimpleBigDecimal adjustScale(int newScale) { if (newScale < 0) { throw new IllegalArgumentException("scale may not be negative"); } if (newScale == scale) { return new SimpleBigDecimal(this); } return new SimpleBigDecimal(bigInt.shiftLeft(newScale - scale), newScale); } public SimpleBigDecimal add(SimpleBigDecimal b) { checkScale(b); return new SimpleBigDecimal(bigInt.add(b.bigInt), scale); } public SimpleBigDecimal add(BigInteger b) { return new SimpleBigDecimal(bigInt.add(b.shiftLeft(scale)), scale); } public SimpleBigDecimal negate() { return new SimpleBigDecimal(bigInt.negate(), scale); } public SimpleBigDecimal subtract(SimpleBigDecimal b) { return add(b.negate()); } public SimpleBigDecimal subtract(BigInteger b) { return new SimpleBigDecimal(bigInt.subtract(b.shiftLeft(scale)), scale); } public SimpleBigDecimal multiply(SimpleBigDecimal b) { checkScale(b); return new SimpleBigDecimal(bigInt.multiply(b.bigInt), scale + scale); } public SimpleBigDecimal multiply(BigInteger b) { return new SimpleBigDecimal(bigInt.multiply(b), scale); } public SimpleBigDecimal divide(SimpleBigDecimal b) { checkScale(b); BigInteger dividend = bigInt.shiftLeft(scale); return new SimpleBigDecimal(dividend.divide(b.bigInt), scale); } public SimpleBigDecimal divide(BigInteger b) { return new SimpleBigDecimal(bigInt.divide(b), scale); } public SimpleBigDecimal shiftLeft(int n) { return new SimpleBigDecimal(bigInt.shiftLeft(n), scale); } public int compareTo(SimpleBigDecimal val) { checkScale(val); return bigInt.compareTo(val.bigInt); } public int compareTo(BigInteger val) { return bigInt.compareTo(val.shiftLeft(scale)); } public BigInteger floor() { return bigInt.shiftRight(scale); } public BigInteger round() { SimpleBigDecimal oneHalf = new SimpleBigDecimal(ECConstants.ONE, 1); return add(oneHalf.adjustScale(scale)).floor(); } public int intValue() { return floor().intValue(); } public long longValue() { return floor().longValue(); } /* NON-J2ME compliant. public double doubleValue() { return Double.valueOf(toString()).doubleValue(); } public float floatValue() { return Float.valueOf(toString()).floatValue(); } */ public int getScale() { return scale; } public String toString() { if (scale == 0) { return bigInt.toString(); } BigInteger floorBigInt = floor(); BigInteger fract = bigInt.subtract(floorBigInt.shiftLeft(scale)); if (bigInt.signum() == -1) { fract = ECConstants.ONE.shiftLeft(scale).subtract(fract); } if ((floorBigInt.signum() == -1) && (!(fract.equals(ECConstants.ZERO)))) { floorBigInt = floorBigInt.add(ECConstants.ONE); } String leftOfPoint = floorBigInt.toString(); char[] fractCharArr = new char[scale]; String fractStr = fract.toString(2); int fractLen = fractStr.length(); int zeroes = scale - fractLen; for (int i = 0; i < zeroes; i++) { fractCharArr[i] = '0'; } for (int j = 0; j < fractLen; j++) { fractCharArr[zeroes + j] = fractStr.charAt(j); } String rightOfPoint = new String(fractCharArr); StringBuffer sb = new StringBuffer(leftOfPoint); sb.append("."); sb.append(rightOfPoint); return sb.toString(); } public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof SimpleBigDecimal)) { return false; } SimpleBigDecimal other = (SimpleBigDecimal)o; return ((bigInt.equals(other.bigInt)) && (scale == other.scale)); } public int hashCode() { return bigInt.hashCode() ^ scale; } }
apache-2.0
objectiser/camel
docs/user-manual/modules/ROOT/examples/core/camel-core/src/test/java/org/apache/camel/processor/TransformTest.java
2205
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.processor; import org.apache.camel.ContextTestSupport; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.Processor; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; import org.junit.Before; import org.junit.Test; public class TransformTest extends ContextTestSupport { protected MockEndpoint resultEndpoint; @Test public void testSendingAMessageUsingMulticastReceivesItsOwnExchange() throws Exception { resultEndpoint.expectedBodiesReceived("Hello World!"); sendBody("direct:start", "Hello"); resultEndpoint.assertIsSatisfied(); } @Override @Before public void setUp() throws Exception { super.setUp(); resultEndpoint = getMockEndpoint("mock:result"); } @Override protected RouteBuilder createRouteBuilder() { return new RouteBuilder() { public void configure() { // tag::example[] from("direct:start").process(new Processor() { public void process(Exchange exchange) { Message in = exchange.getIn(); in.setBody(in.getBody(String.class) + " World!"); } }).to("mock:result"); // end::example[] } }; } }
apache-2.0
goodwinnk/intellij-community
platform/lang-api/src/com/intellij/execution/dashboard/RunDashboardGroupingRule.java
2047
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.execution.dashboard; import com.intellij.ide.util.treeView.AbstractTreeNode; import com.intellij.ide.util.treeView.smartTree.TreeAction; import com.intellij.openapi.extensions.ExtensionPointName; import org.jetbrains.annotations.Nullable; import java.util.Comparator; /** * Action for grouping items in a run dashboard tree. * Grouping rules are applied to dashboard nodes according to their order defined in plug-in configuration. * * @author konstantin.aleev */ public interface RunDashboardGroupingRule extends TreeAction { ExtensionPointName<RunDashboardGroupingRule> EP_NAME = ExtensionPointName.create("com.intellij.runDashboardGroupingRule"); Comparator<RunDashboardGroup> GROUP_NAME_COMPARATOR = Comparator.comparing(RunDashboardGroup::getName); /** * @return {@code true} if grouping rule should always be applied to dashboard nodes. */ boolean isAlwaysEnabled(); /** * @return {@code false} if groups with single node should not added to the dashboard tree keeping such nodes ungrouped. */ boolean shouldGroupSingleNodes(); /** * @param node node which should be grouped by this grouping rule. * @return a group which node belongs to or {@code null} if node could not be grouped by this rule. */ @Nullable RunDashboardGroup getGroup(AbstractTreeNode<?> node); default Comparator<RunDashboardGroup> getGroupComparator() { return GROUP_NAME_COMPARATOR; } }
apache-2.0
stoksey69/googleads-java-lib
modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201502/CreativeWrapperError.java
1712
package com.google.api.ads.dfp.jaxws.v201502; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlSchemaType; import javax.xml.bind.annotation.XmlType; /** * * Errors specific to creative wrappers. * * * <p>Java class for CreativeWrapperError complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="CreativeWrapperError"> * &lt;complexContent> * &lt;extension base="{https://www.google.com/apis/ads/publisher/v201502}ApiError"> * &lt;sequence> * &lt;element name="reason" type="{https://www.google.com/apis/ads/publisher/v201502}CreativeWrapperError.Reason" minOccurs="0"/> * &lt;/sequence> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "CreativeWrapperError", propOrder = { "reason" }) public class CreativeWrapperError extends ApiError { @XmlSchemaType(name = "string") protected CreativeWrapperErrorReason reason; /** * Gets the value of the reason property. * * @return * possible object is * {@link CreativeWrapperErrorReason } * */ public CreativeWrapperErrorReason getReason() { return reason; } /** * Sets the value of the reason property. * * @param value * allowed object is * {@link CreativeWrapperErrorReason } * */ public void setReason(CreativeWrapperErrorReason value) { this.reason = value; } }
apache-2.0
shyTNT/googleads-java-lib
modules/dfp_axis/src/main/java/com/google/api/ads/dfp/axis/v201502/UnknownPremiumFeature.java
2591
/** * UnknownPremiumFeature.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.dfp.axis.v201502; /** * The {@link PremiumFeature} returned if the actual feature is not * exposed by * the requested API version. */ public class UnknownPremiumFeature extends com.google.api.ads.dfp.axis.v201502.PremiumFeature implements java.io.Serializable { public UnknownPremiumFeature() { } private java.lang.Object __equalsCalc = null; public synchronized boolean equals(java.lang.Object obj) { if (!(obj instanceof UnknownPremiumFeature)) return false; UnknownPremiumFeature other = (UnknownPremiumFeature) obj; if (obj == null) return false; if (this == obj) return true; if (__equalsCalc != null) { return (__equalsCalc == obj); } __equalsCalc = obj; boolean _equals; _equals = super.equals(obj); __equalsCalc = null; return _equals; } private boolean __hashCodeCalc = false; public synchronized int hashCode() { if (__hashCodeCalc) { return 0; } __hashCodeCalc = true; int _hashCode = super.hashCode(); __hashCodeCalc = false; return _hashCode; } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(UnknownPremiumFeature.class, true); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201502", "UnknownPremiumFeature")); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } /** * Get Custom Serializer */ public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanSerializer( _javaType, _xmlType, typeDesc); } /** * Get Custom Deserializer */ public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanDeserializer( _javaType, _xmlType, typeDesc); } }
apache-2.0
ZhenyaM/veraPDF-pdfbox
examples/src/main/java/org/apache/pdfbox/examples/acroforms/package-info.java
894
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * AcroForms samples for PDFBox. * */ package org.apache.pdfbox.examples.acroforms;
apache-2.0
Apache9/netty
transport/src/test/java/io/netty/channel/pool/FixedChannelPoolTest.java
12947
/* * Copyright 2015 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.channel.pool; import io.netty.bootstrap.Bootstrap; import io.netty.bootstrap.ServerBootstrap; import io.netty.channel.Channel; import io.netty.channel.ChannelInboundHandlerAdapter; import io.netty.channel.ChannelInitializer; import io.netty.channel.DefaultEventLoopGroup; import io.netty.channel.EventLoopGroup; import io.netty.channel.local.LocalAddress; import io.netty.channel.local.LocalChannel; import io.netty.channel.local.LocalEventLoopGroup; import io.netty.channel.local.LocalServerChannel; import io.netty.channel.pool.FixedChannelPool.AcquireTimeoutAction; import io.netty.util.concurrent.Future; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import static org.junit.Assert.*; public class FixedChannelPoolTest { private static final String LOCAL_ADDR_ID = "test.id"; private static EventLoopGroup group; @BeforeClass public static void createEventLoop() { group = new LocalEventLoopGroup(); } @AfterClass public static void destroyEventLoop() { if (group != null) { group.shutdownGracefully(); } } @Test public void testAcquire() throws Exception { LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID); Bootstrap cb = new Bootstrap(); cb.remoteAddress(addr); cb.group(group) .channel(LocalChannel.class); ServerBootstrap sb = new ServerBootstrap(); sb.group(group) .channel(LocalServerChannel.class) .childHandler(new ChannelInitializer<LocalChannel>() { @Override public void initChannel(LocalChannel ch) throws Exception { ch.pipeline().addLast(new ChannelInboundHandlerAdapter()); } }); // Start server Channel sc = sb.bind(addr).syncUninterruptibly().channel(); CountingChannelPoolHandler handler = new CountingChannelPoolHandler(); ChannelPool pool = new FixedChannelPool(cb, handler, 1, Integer.MAX_VALUE); Channel channel = pool.acquire().syncUninterruptibly().getNow(); Future<Channel> future = pool.acquire(); assertFalse(future.isDone()); pool.release(channel).syncUninterruptibly(); assertTrue(future.await(1, TimeUnit.SECONDS)); Channel channel2 = future.getNow(); assertSame(channel, channel2); assertEquals(1, handler.channelCount()); assertEquals(1, handler.acquiredCount()); assertEquals(1, handler.releasedCount()); sc.close().syncUninterruptibly(); channel2.close().syncUninterruptibly(); } @Test(expected = TimeoutException.class) public void testAcquireTimeout() throws Exception { LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID); Bootstrap cb = new Bootstrap(); cb.remoteAddress(addr); cb.group(group) .channel(LocalChannel.class); ServerBootstrap sb = new ServerBootstrap(); sb.group(group) .channel(LocalServerChannel.class) .childHandler(new ChannelInitializer<LocalChannel>() { @Override public void initChannel(LocalChannel ch) throws Exception { ch.pipeline().addLast(new ChannelInboundHandlerAdapter()); } }); // Start server Channel sc = sb.bind(addr).syncUninterruptibly().channel(); ChannelPoolHandler handler = new TestChannelPoolHandler(); ChannelPool pool = new FixedChannelPool(cb, handler, ChannelHealthChecker.ACTIVE, AcquireTimeoutAction.FAIL, 500, 1, Integer.MAX_VALUE); Channel channel = pool.acquire().syncUninterruptibly().getNow(); Future<Channel> future = pool.acquire(); try { future.syncUninterruptibly(); } finally { sc.close().syncUninterruptibly(); channel.close().syncUninterruptibly(); } } @Test public void testAcquireNewConnection() throws Exception { LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID); Bootstrap cb = new Bootstrap(); cb.remoteAddress(addr); cb.group(group) .channel(LocalChannel.class); ServerBootstrap sb = new ServerBootstrap(); sb.group(group) .channel(LocalServerChannel.class) .childHandler(new ChannelInitializer<LocalChannel>() { @Override public void initChannel(LocalChannel ch) throws Exception { ch.pipeline().addLast(new ChannelInboundHandlerAdapter()); } }); // Start server Channel sc = sb.bind(addr).syncUninterruptibly().channel(); ChannelPoolHandler handler = new TestChannelPoolHandler(); ChannelPool pool = new FixedChannelPool(cb, handler, ChannelHealthChecker.ACTIVE, AcquireTimeoutAction.NEW, 500, 1, Integer.MAX_VALUE); Channel channel = pool.acquire().syncUninterruptibly().getNow(); Channel channel2 = pool.acquire().syncUninterruptibly().getNow(); assertNotSame(channel, channel2); sc.close().syncUninterruptibly(); channel.close().syncUninterruptibly(); channel2.close().syncUninterruptibly(); } /** * Tests that the acquiredChannelCount is not added up several times for the same channel acquire request. * @throws Exception */ @Test public void testAcquireNewConnectionWhen() throws Exception { LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID); Bootstrap cb = new Bootstrap(); cb.remoteAddress(addr); cb.group(group) .channel(LocalChannel.class); ServerBootstrap sb = new ServerBootstrap(); sb.group(group) .channel(LocalServerChannel.class) .childHandler(new ChannelInitializer<LocalChannel>() { @Override public void initChannel(LocalChannel ch) throws Exception { ch.pipeline().addLast(new ChannelInboundHandlerAdapter()); } }); // Start server Channel sc = sb.bind(addr).syncUninterruptibly().channel(); ChannelPoolHandler handler = new TestChannelPoolHandler(); ChannelPool pool = new FixedChannelPool(cb, handler, 1); Channel channel1 = pool.acquire().syncUninterruptibly().getNow(); channel1.close().syncUninterruptibly(); pool.release(channel1); Channel channel2 = pool.acquire().syncUninterruptibly().getNow(); assertNotSame(channel1, channel2); sc.close().syncUninterruptibly(); channel2.close().syncUninterruptibly(); } @Test(expected = IllegalStateException.class) public void testAcquireBoundQueue() throws Exception { LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID); Bootstrap cb = new Bootstrap(); cb.remoteAddress(addr); cb.group(group) .channel(LocalChannel.class); ServerBootstrap sb = new ServerBootstrap(); sb.group(group) .channel(LocalServerChannel.class) .childHandler(new ChannelInitializer<LocalChannel>() { @Override public void initChannel(LocalChannel ch) throws Exception { ch.pipeline().addLast(new ChannelInboundHandlerAdapter()); } }); // Start server Channel sc = sb.bind(addr).syncUninterruptibly().channel(); ChannelPoolHandler handler = new TestChannelPoolHandler(); ChannelPool pool = new FixedChannelPool(cb, handler, 1, 1); Channel channel = pool.acquire().syncUninterruptibly().getNow(); Future<Channel> future = pool.acquire(); assertFalse(future.isDone()); try { pool.acquire().syncUninterruptibly(); } finally { sc.close().syncUninterruptibly(); channel.close().syncUninterruptibly(); } } @Test(expected = IllegalArgumentException.class) public void testReleaseDifferentPool() throws Exception { LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID); Bootstrap cb = new Bootstrap(); cb.remoteAddress(addr); cb.group(group) .channel(LocalChannel.class); ServerBootstrap sb = new ServerBootstrap(); sb.group(group) .channel(LocalServerChannel.class) .childHandler(new ChannelInitializer<LocalChannel>() { @Override public void initChannel(LocalChannel ch) throws Exception { ch.pipeline().addLast(new ChannelInboundHandlerAdapter()); } }); // Start server Channel sc = sb.bind(addr).syncUninterruptibly().channel(); ChannelPoolHandler handler = new TestChannelPoolHandler(); ChannelPool pool = new FixedChannelPool(cb, handler, 1, 1); ChannelPool pool2 = new FixedChannelPool(cb, handler, 1, 1); Channel channel = pool.acquire().syncUninterruptibly().getNow(); try { pool2.release(channel).syncUninterruptibly(); } finally { sc.close().syncUninterruptibly(); channel.close().syncUninterruptibly(); } } @Test public void testReleaseAfterClosePool() throws Exception { LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID); Bootstrap cb = new Bootstrap(); cb.remoteAddress(addr); cb.group(group).channel(LocalChannel.class); ServerBootstrap sb = new ServerBootstrap(); sb.group(group) .channel(LocalServerChannel.class) .childHandler(new ChannelInitializer<LocalChannel>() { @Override public void initChannel(LocalChannel ch) throws Exception { ch.pipeline().addLast(new ChannelInboundHandlerAdapter()); } }); // Start server Channel sc = sb.bind(addr).syncUninterruptibly().channel(); FixedChannelPool pool = new FixedChannelPool(cb, new TestChannelPoolHandler(), 2); final Future<Channel> acquire = pool.acquire(); final Channel channel = acquire.get(); pool.close(); group.submit(new Runnable() { @Override public void run() { // NOOP } }).syncUninterruptibly(); try { pool.release(channel).syncUninterruptibly(); fail(); } catch (IllegalStateException e) { assertSame(FixedChannelPool.POOL_CLOSED_ON_RELEASE_EXCEPTION, e); } // Since the pool is closed, the Channel should have been closed as well. channel.closeFuture().syncUninterruptibly(); assertFalse("Unexpected open channel", channel.isOpen()); sc.close().syncUninterruptibly(); } @Test public void testReleaseClosed() { LocalAddress addr = new LocalAddress(LOCAL_ADDR_ID); Bootstrap cb = new Bootstrap(); cb.remoteAddress(addr); cb.group(group).channel(LocalChannel.class); ServerBootstrap sb = new ServerBootstrap(); sb.group(group) .channel(LocalServerChannel.class) .childHandler(new ChannelInitializer<LocalChannel>() { @Override public void initChannel(LocalChannel ch) throws Exception { ch.pipeline().addLast(new ChannelInboundHandlerAdapter()); } }); // Start server Channel sc = sb.bind(addr).syncUninterruptibly().channel(); FixedChannelPool pool = new FixedChannelPool(cb, new TestChannelPoolHandler(), 2); Channel channel = pool.acquire().syncUninterruptibly().getNow(); channel.close().syncUninterruptibly(); pool.release(channel).syncUninterruptibly(); sc.close().syncUninterruptibly(); } private static final class TestChannelPoolHandler extends AbstractChannelPoolHandler { @Override public void channelCreated(Channel ch) throws Exception { // NOOP } } }
apache-2.0
lukecwik/incubator-beam
sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableSourceDef.java
4796
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.io.gcp.bigquery; import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkNotNull; import static org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkState; import com.google.api.services.bigquery.model.TableReference; import com.google.api.services.bigquery.model.TableSchema; import java.io.IOException; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.annotations.Experimental.Kind; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.options.ValueProvider; import org.apache.beam.sdk.schemas.Schema; import org.apache.beam.sdk.transforms.SerializableFunction; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Strings; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @SuppressWarnings({ "nullness" // TODO(https://issues.apache.org/jira/browse/BEAM-10402) }) class BigQueryTableSourceDef implements BigQuerySourceDef { private static final Logger LOG = LoggerFactory.getLogger(BigQueryTableSourceDef.class); private final BigQueryServices bqServices; private final ValueProvider<String> jsonTable; static BigQueryTableSourceDef create( BigQueryServices bqServices, ValueProvider<TableReference> table) { ValueProvider<String> jsonTable = ValueProvider.NestedValueProvider.of( checkNotNull(table, "table"), new BigQueryHelpers.TableRefToJson()); return new BigQueryTableSourceDef(bqServices, jsonTable); } private BigQueryTableSourceDef(BigQueryServices bqServices, ValueProvider<String> jsonTable) { this.bqServices = bqServices; this.jsonTable = jsonTable; } TableReference getTableReference(BigQueryOptions bqOptions) throws IOException { TableReference tableReference = BigQueryIO.JSON_FACTORY.fromString(jsonTable.get(), TableReference.class); return setDefaultProjectIfAbsent(bqOptions, tableReference); } /** * Sets the {@link TableReference#projectId} of the provided table reference to the id of the * default project if the table reference does not have a project ID specified. */ private TableReference setDefaultProjectIfAbsent( BigQueryOptions bqOptions, TableReference tableReference) { if (Strings.isNullOrEmpty(tableReference.getProjectId())) { checkState( !Strings.isNullOrEmpty(bqOptions.getProject()), "No project ID set in %s or %s, cannot construct a complete %s", TableReference.class.getSimpleName(), BigQueryOptions.class.getSimpleName(), TableReference.class.getSimpleName()); LOG.info( "Project ID not set in {}. Using default project from {}.", TableReference.class.getSimpleName(), BigQueryOptions.class.getSimpleName()); tableReference.setProjectId( bqOptions.getBigQueryProject() == null ? bqOptions.getProject() : bqOptions.getBigQueryProject()); } return tableReference; } ValueProvider<String> getJsonTable() { return jsonTable; } /** {@inheritDoc} */ @Override public <T> BigQuerySourceBase<T> toSource( String stepUuid, Coder<T> coder, SerializableFunction<SchemaAndRecord, T> parseFn, boolean useAvroLogicalTypes) { return BigQueryTableSource.create( stepUuid, this, bqServices, coder, parseFn, useAvroLogicalTypes); } /** {@inheritDoc} */ @Experimental(Kind.SCHEMAS) @Override public Schema getBeamSchema(BigQueryOptions bqOptions) { try { TableReference tableRef = getTableReference(bqOptions); TableSchema tableSchema = bqServices.getDatasetService(bqOptions).getTable(tableRef).getSchema(); return BigQueryUtils.fromTableSchema(tableSchema); } catch (IOException | InterruptedException | NullPointerException e) { throw new BigQuerySchemaRetrievalException("Exception while trying to retrieve schema", e); } } }
apache-2.0
DevStreet/FinanceAnalytics
projects/OG-Financial/src/main/java/com/opengamma/financial/analytics/volatility/cube/rest/DataVolatilityCubeSpecificationSourceResource.java
3146
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.financial.analytics.volatility.cube.rest; import java.net.URI; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriBuilder; import javax.ws.rs.core.UriInfo; import org.threeten.bp.Instant; import com.opengamma.financial.analytics.volatility.cube.VolatilityCubeSpecification; import com.opengamma.financial.analytics.volatility.cube.VolatilityCubeSpecificationSource; import com.opengamma.id.VersionCorrection; import com.opengamma.util.ArgumentChecker; import com.opengamma.util.rest.AbstractDataResource; /** * RESTful resource for the volatility cube specification source. * <p> * This resource receives and processes RESTful calls to the source. */ @Path("volatilityCubeSpecificationSource") public class DataVolatilityCubeSpecificationSourceResource extends AbstractDataResource { /** * The source. */ private final VolatilityCubeSpecificationSource _source; /** * Creates the resource, exposing the underlying source over REST. * * @param source the underlying source, not null */ public DataVolatilityCubeSpecificationSourceResource(final VolatilityCubeSpecificationSource source) { ArgumentChecker.notNull(source, "source"); _source = source; } //------------------------------------------------------------------------- /** * Gets the source. * * @return the source, not null */ public VolatilityCubeSpecificationSource getVolatilityCubeSpecificationSource() { return _source; } //------------------------------------------------------------------------- @GET public Response getHateaos(@Context final UriInfo uriInfo) { return hateoasResponse(uriInfo); } @GET @Path("definitions/searchSingle") public Response searchSingle( @QueryParam("versionAsOf") final String versionAsOfStr, @QueryParam("name") final String name) { if (versionAsOfStr != null) { final VersionCorrection versionCorrection = VersionCorrection.parse(versionAsOfStr, null); final VolatilityCubeSpecification result = getVolatilityCubeSpecificationSource().getSpecification(name, versionCorrection); return responseOkFudge(result); } final VolatilityCubeSpecification result = getVolatilityCubeSpecificationSource().getSpecification(name); return responseOkFudge(result); } /** * Builds a URI. * * @param baseUri the base URI, not null * @param name the name, not null * @param versionAsOf the version to fetch, null means latest * @return the URI, not null */ public static URI uriSearchSingle(final URI baseUri, final String name, final Instant versionAsOf) { final UriBuilder bld = UriBuilder.fromUri(baseUri).path("/specifications/searchSingle"); bld.queryParam("name", name); if (versionAsOf != null) { bld.queryParam("versionAsOf", versionAsOf.toString()); } return bld.build(); } }
apache-2.0
stoksey69/googleads-java-lib
modules/adwords_axis/src/main/java/com/google/api/ads/adwords/axis/v201506/o/TrafficEstimatorError.java
4379
/** * TrafficEstimatorError.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.adwords.axis.v201506.o; /** * Base error class for * {@link com.google.ads.api.services.trafficestimator.TrafficEstimatorService}. */ public class TrafficEstimatorError extends com.google.api.ads.adwords.axis.v201506.cm.ApiError implements java.io.Serializable { private com.google.api.ads.adwords.axis.v201506.o.TrafficEstimatorErrorReason reason; public TrafficEstimatorError() { } public TrafficEstimatorError( java.lang.String fieldPath, java.lang.String trigger, java.lang.String errorString, java.lang.String apiErrorType, com.google.api.ads.adwords.axis.v201506.o.TrafficEstimatorErrorReason reason) { super( fieldPath, trigger, errorString, apiErrorType); this.reason = reason; } /** * Gets the reason value for this TrafficEstimatorError. * * @return reason */ public com.google.api.ads.adwords.axis.v201506.o.TrafficEstimatorErrorReason getReason() { return reason; } /** * Sets the reason value for this TrafficEstimatorError. * * @param reason */ public void setReason(com.google.api.ads.adwords.axis.v201506.o.TrafficEstimatorErrorReason reason) { this.reason = reason; } private java.lang.Object __equalsCalc = null; public synchronized boolean equals(java.lang.Object obj) { if (!(obj instanceof TrafficEstimatorError)) return false; TrafficEstimatorError other = (TrafficEstimatorError) obj; if (obj == null) return false; if (this == obj) return true; if (__equalsCalc != null) { return (__equalsCalc == obj); } __equalsCalc = obj; boolean _equals; _equals = super.equals(obj) && ((this.reason==null && other.getReason()==null) || (this.reason!=null && this.reason.equals(other.getReason()))); __equalsCalc = null; return _equals; } private boolean __hashCodeCalc = false; public synchronized int hashCode() { if (__hashCodeCalc) { return 0; } __hashCodeCalc = true; int _hashCode = super.hashCode(); if (getReason() != null) { _hashCode += getReason().hashCode(); } __hashCodeCalc = false; return _hashCode; } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(TrafficEstimatorError.class, true); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201506", "TrafficEstimatorError")); org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc(); elemField.setFieldName("reason"); elemField.setXmlName(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201506", "reason")); elemField.setXmlType(new javax.xml.namespace.QName("https://adwords.google.com/api/adwords/o/v201506", "TrafficEstimatorError.Reason")); elemField.setMinOccurs(0); elemField.setNillable(false); typeDesc.addFieldDesc(elemField); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } /** * Get Custom Serializer */ public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanSerializer( _javaType, _xmlType, typeDesc); } /** * Get Custom Deserializer */ public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.BeanDeserializer( _javaType, _xmlType, typeDesc); } }
apache-2.0
AnuchitPrasertsang/dropwizard
dropwizard-jersey/src/main/java/io/dropwizard/jersey/gzip/ConfiguredGZipEncoder.java
1983
package io.dropwizard.jersey.gzip; import java.io.IOException; import java.util.zip.GZIPOutputStream; import javax.annotation.Priority; import javax.ws.rs.Priorities; import javax.ws.rs.WebApplicationException; import javax.ws.rs.client.ClientRequestContext; import javax.ws.rs.client.ClientRequestFilter; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.ext.Provider; import javax.ws.rs.ext.WriterInterceptor; import javax.ws.rs.ext.WriterInterceptorContext; /** * GZIP encoding support. Writer interceptor that encodes the output if * {@link HttpHeaders#CONTENT_ENCODING Content-Encoding header} value equals * to {@code gzip} or {@code x-gzip}. * * If so configured, it will encode the output even if the {@code gzip} and {@code x-gzip} * {@link HttpHeaders#CONTENT_ENCODING Content-Encoding header} is missing, and insert a value * of {@code gzip} for that header. * */ @Provider @Priority(Priorities.ENTITY_CODER) public class ConfiguredGZipEncoder implements WriterInterceptor, ClientRequestFilter { private boolean forceEncoding = false; public ConfiguredGZipEncoder(boolean forceEncoding) { this.forceEncoding = forceEncoding; } @Override public void filter(ClientRequestContext context) throws IOException { if (context.hasEntity() && context.getHeaders().getFirst(HttpHeaders.CONTENT_ENCODING) == null && this.forceEncoding) { context.getHeaders().add(HttpHeaders.CONTENT_ENCODING, "gzip"); } } @Override public final void aroundWriteTo(WriterInterceptorContext context) throws IOException, WebApplicationException { String contentEncoding = (String) context.getHeaders().getFirst(HttpHeaders.CONTENT_ENCODING); if ((contentEncoding != null) && (contentEncoding.equals("gzip") || contentEncoding.equals("x-gzip"))) { context.setOutputStream(new GZIPOutputStream(context.getOutputStream())); } context.proceed(); } }
apache-2.0
shyamalschandra/flex-sdk
modules/debugger/src/java/flash/tools/debugger/threadsafe/ThreadSafeValueExp.java
2232
/* * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package flash.tools.debugger.threadsafe; import flash.tools.debugger.PlayerDebugException; import flash.tools.debugger.expression.Context; import flash.tools.debugger.expression.NoSuchVariableException; import flash.tools.debugger.expression.PlayerFaultException; import flash.tools.debugger.expression.ValueExp; /** * Thread-safe wrapper for flash.tools.debugger.expression.ValueExp */ public class ThreadSafeValueExp extends ThreadSafeDebuggerObject implements ValueExp { private final ValueExp m_valueExp; public ThreadSafeValueExp(Object syncObj, ValueExp valueExp) { super(syncObj); m_valueExp = valueExp; } /** * Wraps a ValueExp inside a ThreadSafeValueExp. If the passed-in * ValueExp is null, then this function returns null. */ public static ThreadSafeValueExp wrap(Object syncObj, ValueExp valueExp) { if (valueExp != null) return new ThreadSafeValueExp(syncObj, valueExp); else return null; } public Object evaluate(Context context) throws NumberFormatException, NoSuchVariableException, PlayerFaultException, PlayerDebugException { synchronized (getSyncObject()) { return m_valueExp.evaluate(context); } } public boolean containsAssignment() { synchronized (getSyncObject()) { return m_valueExp.containsAssignment(); } } public boolean isLookupMembers() { synchronized (getSyncObject()) { return m_valueExp.isLookupMembers(); } } }
apache-2.0
shun634501730/java_source_cn
src_en/com/sun/jmx/snmp/IPAcl/JDMInformCommunity.java
806
/* * Copyright (c) 2000, 2003, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ /* Generated By:JJTree: Do not edit this line. JDMInformCommunity.java */ package com.sun.jmx.snmp.IPAcl; class JDMInformCommunity extends SimpleNode { protected String community= ""; JDMInformCommunity(int id) { super(id); } JDMInformCommunity(Parser p, int id) { super(p, id); } public static Node jjtCreate(int id) { return new JDMInformCommunity(id); } public static Node jjtCreate(Parser p, int id) { return new JDMInformCommunity(p, id); } public String getCommunity() { return community; } }
apache-2.0
RobinUS2/presto
presto-main/src/test/java/com/facebook/presto/sql/planner/optimizations/TestSetFlatteningOptimizer.java
5541
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.sql.planner.optimizations; import com.facebook.presto.sql.planner.StatsRecorder; import com.facebook.presto.sql.planner.assertions.BasePlanTest; import com.facebook.presto.sql.planner.assertions.PlanMatchPattern; import com.facebook.presto.sql.planner.iterative.IterativeOptimizer; import com.facebook.presto.sql.planner.iterative.rule.RemoveRedundantIdentityProjections; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import org.testng.annotations.Test; import java.util.List; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.anyTree; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.except; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.intersect; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.tableScan; import static com.facebook.presto.sql.planner.assertions.PlanMatchPattern.union; public class TestSetFlatteningOptimizer extends BasePlanTest { @Test public void testFlattensUnion() { assertPlan( "(SELECT * FROM nation UNION SELECT * FROM nation)" + "UNION (SELECT * FROM nation UNION SELECT * FROM nation)", anyTree( union( tableScan("nation"), tableScan("nation"), tableScan("nation"), tableScan("nation")))); } @Test public void testFlattensUnionAll() { assertPlan( "(SELECT * FROM nation UNION ALL SELECT * FROM nation)" + "UNION ALL (SELECT * FROM nation UNION ALL SELECT * FROM nation)", anyTree( union( tableScan("nation"), tableScan("nation"), tableScan("nation"), tableScan("nation")))); } @Test public void testFlattensUnionAndUnionAllWhenAllowed() { assertPlan( "SELECT * FROM nation " + "UNION ALL (SELECT * FROM nation " + "UNION (SELECT * FROM nation UNION ALL select * FROM nation))", anyTree( union( tableScan("nation"), anyTree( union( tableScan("nation"), tableScan("nation"), tableScan("nation")))))); } @Test public void testFlattensIntersect() { assertPlan( "(SELECT * FROM nation INTERSECT SELECT * FROM nation)" + "INTERSECT (SELECT * FROM nation INTERSECT SELECT * FROM nation)", anyTree( intersect( tableScan("nation"), tableScan("nation"), tableScan("nation"), tableScan("nation")))); } @Test public void testFlattensOnlyFirstInputOfExcept() { assertPlan( "(SELECT * FROM nation EXCEPT SELECT * FROM nation)" + "EXCEPT (SELECT * FROM nation EXCEPT SELECT * FROM nation)", anyTree( except( tableScan("nation"), tableScan("nation"), except( tableScan("nation"), tableScan("nation"))))); } @Test public void testDoesNotFlattenDifferentSetOperations() { assertPlan( "(SELECT * FROM nation EXCEPT SELECT * FROM nation)" + "UNION (SELECT * FROM nation INTERSECT SELECT * FROM nation)", anyTree( union( except( tableScan("nation"), tableScan("nation")), intersect( tableScan("nation"), tableScan("nation"))))); } public void assertPlan(String sql, PlanMatchPattern pattern) { List<PlanOptimizer> optimizers = ImmutableList.of( new UnaliasSymbolReferences(), new PruneUnreferencedOutputs(), new IterativeOptimizer(new StatsRecorder(), ImmutableSet.of(new RemoveRedundantIdentityProjections())), new SetFlatteningOptimizer()); assertPlanWithOptimizers(sql, pattern, optimizers); } }
apache-2.0
x-hansong/aSpice
src/com/iiordanov/bVNC/ISentText.java
1339
/** * Copyright (C) 2012 Iordan Iordanov * Copyright (C) 2010 Michael A. MacDonald * * This is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this software; if not, write to the Free Software * Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, * USA. */ package com.iiordanov.bVNC; import com.antlersoft.android.db.FieldAccessor; import com.antlersoft.android.db.TableInterface; /** * Interface specification for table storing sent text; the last N text items sent are stored in a table * and will be recalled on demand * * @author Michael A. MacDonald * */ @TableInterface(TableName="SENT_TEXT",ImplementingIsAbstract=false,ImplementingClassName="SentTextBean") public interface ISentText { @FieldAccessor long get_Id(); @FieldAccessor String getSentText(); }
apache-2.0
lgobinath/carbon-analytics-common
components/data-bridge/org.wso2.carbon.databridge.core/src/main/java/org/wso2/carbon/databridge/core/internal/queue/EventBlockingQueue.java
3653
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.databridge.core.internal.queue; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.databridge.core.Utils.DataBridgeUtils; import org.wso2.carbon.databridge.core.Utils.EventComposite; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.Semaphore; import java.util.concurrent.atomic.AtomicInteger; /** * This class provides the blocking implementation based on the size of event composite in the queue. Also it make sure * the queue doesn't grow beyond the determined size. */ public class EventBlockingQueue extends ArrayBlockingQueue<EventComposite> { private static final Log log = LogFactory.getLog(EventBlockingQueue.class); private final Object lock = new Object(); private AtomicInteger currentSize; private int currentEventCompositeSize; private int maxSize; private Semaphore semaphore; public EventBlockingQueue(int maxQueueSize, int maxSizeCapacity) { super(maxQueueSize); this.currentSize = new AtomicInteger(0); this.maxSize = maxSizeCapacity; this.semaphore = new Semaphore(1); } public synchronized void put(EventComposite eventComposite) { eventComposite.setSize(DataBridgeUtils.getSize(eventComposite)); currentEventCompositeSize = eventComposite.getSize(); if (currentSize.get() >= maxSize) { try { semaphore.acquire(); if (semaphore.availablePermits() == 0) { synchronized (lock) { if (semaphore.availablePermits() == 0) { semaphore.release(); } } } } catch (InterruptedException ignored) { } } try { super.put(eventComposite); if (currentSize.addAndGet(eventComposite.getSize()) >= maxSize) { try { semaphore.acquire(); } catch (InterruptedException ignored) { } } if (log.isDebugEnabled()) { log.debug("current queue size in bytes : " + currentSize + " , elements : " + size()); } } catch (InterruptedException e) { String logMessage = "Failure to insert event into queue"; log.warn(logMessage); } } public EventComposite poll() { EventComposite eventComposite = super.poll(); currentSize.addAndGet(-eventComposite.getSize()); if (semaphore.availablePermits() == 0 && ((currentEventCompositeSize + currentSize.get()) < maxSize) || isEmpty()) { synchronized (lock) { if (semaphore.availablePermits() == 0 && ((currentEventCompositeSize + currentSize.get()) < maxSize) || isEmpty()) { semaphore.release(); } } } return eventComposite; } }
apache-2.0
vega113/incubator-wave
wave/src/test/java/org/waveprotocol/wave/model/document/util/XmlStringBuilderTest.java
6201
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.waveprotocol.wave.model.document.util; import junit.framework.TestCase; import org.waveprotocol.wave.model.document.operation.automaton.DocumentSchema.PermittedCharacters; import org.waveprotocol.wave.model.document.raw.impl.Element; import org.waveprotocol.wave.model.document.raw.impl.RawDocumentImpl; import org.waveprotocol.wave.model.util.CollectionUtils; import org.waveprotocol.wave.model.util.StringMap; /** * Test for {@link XmlStringBuilder} * @author danilatos@google.com (Daniel Danilatos) */ public class XmlStringBuilderTest extends TestCase { XmlStringBuilder a = XmlStringBuilder.createEmpty(); XmlStringBuilder b = XmlStringBuilder.createEmpty(); XmlStringBuilder c = XmlStringBuilder.createEmpty(); XmlStringBuilder d = XmlStringBuilder.createEmpty(); public void testTextIsJustText() { checkText("", a); a.appendText(""); checkText("", a); a.appendText("blah"); checkText("blah", a); } public void testEscapesText() { String text = "the <p> & \n <q/> tags", expected = "the &lt;p&gt; &amp; \n &lt;q/&gt; tags"; a.appendText(text, PermittedCharacters.ANY); check(text.length(), expected, a); text = "the <p> & <q/> tags"; expected = "the &lt;p&gt; &amp; &lt;q/&gt; tags"; check(text.length(), expected, XmlStringBuilder.createText(text)); } public void testTextLengthIsUnescapedStringLength() { check(5, "hello", a.appendText("hello")); check(9, "hello&lt;p&gt;&amp;", a.appendText("<p>&")); } /** * test whether createFromXmlString works by converting some fragments to * and from an XmlStringBuilder. We also check that invalid text doesn't * work. */ public void testCreateFromXmlString() { String xmlText = "<p>Hello world</p><p>How are things</p>"; XmlStringBuilder builder = XmlStringBuilder.createFromXmlString(xmlText); assertEquals(xmlText, builder.toString()); } public void testWrapMakesLengthOne() { checkLength(2, a.wrap("abc")); checkLength(6, a.appendText("hi").wrap("Yo", "x", "y")); checkLength(9, b.appendText("<h&ello").wrap("xx")); } public void testLengthIsCumulative() { checkLength(9, a.appendText("blah").wrap("ab").appendText("abc")); } public void testWrapEscapesAttributes() { check(4, "<ab cd=\"e&apos;f&lt;\" gh=\"&amp;i&quot;&gt;j\" xy=\"zz\">kl</ab>", a.appendText("kl").wrap("ab", "cd", "e'f<", "gh", "&i\">j", "xy", "zz")); } // TODO(danilatos): Sort keys, or don't compare just strings public void testWrapWithAttributeMap() { StringMap<String> attribs = CollectionUtils.createStringMap(); attribs.put("t", "li"); attribs.put("i", "2"); check(2, "<line t=\"li\" i=\"2\"></line>", XmlStringBuilder.createEmpty().wrap("line", attribs)); } // TODO(danilatos): Sort keys, or don't compare just strings public void testWrapWithAttributeMapWithEmptyValues() { StringMap<String> attribs = CollectionUtils.createStringMap(); attribs.put("a", "1"); attribs.put("b", ""); check(2, "<x b=\"\" a=\"1\"></x>", XmlStringBuilder.createEmpty().wrap("x", attribs)); attribs = CollectionUtils.createStringMap(); attribs.put("a", ""); attribs.put("b", ""); check(2, "<x b=\"\" a=\"\"></x>", XmlStringBuilder.createEmpty().wrap("x", attribs)); attribs = CollectionUtils.createStringMap(); attribs.put("a", ""); attribs.put("b", "2"); attribs.put("c", ""); check(2, "<x b=\"2\" c=\"\" a=\"\"></x>", XmlStringBuilder.createEmpty().wrap("x", attribs)); } public void testConstructsFromNodes() { checkConstruction("some text"); checkConstruction("some text &gt; &amp; &lt;"); checkConstruction("<p>blah</p>"); checkConstruction("<p a=\"b\" c=\"d\">blah</p>"); checkConstruction("<p a=\"b\" c=\"d&apos;e\" f=\"&gt;g\">blah</p>"); } public void testEqualityCorrespondsToXmlBeingBuilt() { assertEquals(a, b); a.appendText("hello"); b.appendText("hello"); assertEquals(a, b); assertTrue(!a.appendText("x").equals(b)); b.appendText("x"); c.appendText("hi").wrap("x"); d.appendText("hi").wrap("x"); assertEquals(c, d); assertTrue(!c.wrap("y").equals(d)); d.wrap("y"); a.append(c); assertTrue(!a.equals(b)); b.append(d); assertEquals(a, b); assertEquals(a.wrap("blah"), b.wrap("blah")); // TODO(danilatos): Test other methods } protected void checkText(String expected, XmlStringBuilder xml) { check(expected.length(), expected, xml); } protected void check(int length, String expected, XmlStringBuilder xml) { checkLength(length, xml); DocCompare.equivalent(DocCompare.STRUCTURE, expected, xml.getXmlString()); } protected void checkLength(int length, XmlStringBuilder xml) { assertEquals(length, xml.getLength()); } protected void checkConstruction(String xml) { try { String outerXml = "<DOC a=\"b\">" + xml + "</DOC>"; RawDocumentImpl doc = RawDocumentImpl.PROVIDER.parse(outerXml); Element element = doc.getDocumentElement(); int size = element.calculateSize(); check(size - 2, xml, XmlStringBuilder.createChildren(doc, element)); check(size, outerXml, XmlStringBuilder.createNode(doc, element)); } catch (Exception e) { e.printStackTrace(); fail(e.getMessage()); } } }
apache-2.0
q474818917/solr-5.2.0
lucene/codecs/src/java/org/apache/lucene/codecs/simpletext/SimpleTextStoredFieldsWriter.java
5785
package org.apache.lucene.codecs.simpletext; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import org.apache.lucene.codecs.StoredFieldsWriter; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.IndexableField; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.IOUtils; /** * Writes plain-text stored fields. * <p> * <b>FOR RECREATIONAL USE ONLY</b> * @lucene.experimental */ public class SimpleTextStoredFieldsWriter extends StoredFieldsWriter { private int numDocsWritten = 0; private final Directory directory; private final String segment; private IndexOutput out; final static String FIELDS_EXTENSION = "fld"; final static BytesRef TYPE_STRING = new BytesRef("string"); final static BytesRef TYPE_BINARY = new BytesRef("binary"); final static BytesRef TYPE_INT = new BytesRef("int"); final static BytesRef TYPE_LONG = new BytesRef("long"); final static BytesRef TYPE_FLOAT = new BytesRef("float"); final static BytesRef TYPE_DOUBLE = new BytesRef("double"); final static BytesRef END = new BytesRef("END"); final static BytesRef DOC = new BytesRef("doc "); final static BytesRef FIELD = new BytesRef(" field "); final static BytesRef NAME = new BytesRef(" name "); final static BytesRef TYPE = new BytesRef(" type "); final static BytesRef VALUE = new BytesRef(" value "); private final BytesRefBuilder scratch = new BytesRefBuilder(); public SimpleTextStoredFieldsWriter(Directory directory, String segment, IOContext context) throws IOException { this.directory = directory; this.segment = segment; boolean success = false; try { out = directory.createOutput(IndexFileNames.segmentFileName(segment, "", FIELDS_EXTENSION), context); success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(this); } } } @Override public void startDocument() throws IOException { write(DOC); write(Integer.toString(numDocsWritten)); newLine(); numDocsWritten++; } @Override public void writeField(FieldInfo info, IndexableField field) throws IOException { write(FIELD); write(Integer.toString(info.number)); newLine(); write(NAME); write(field.name()); newLine(); write(TYPE); final Number n = field.numericValue(); if (n != null) { if (n instanceof Byte || n instanceof Short || n instanceof Integer) { write(TYPE_INT); newLine(); write(VALUE); write(Integer.toString(n.intValue())); newLine(); } else if (n instanceof Long) { write(TYPE_LONG); newLine(); write(VALUE); write(Long.toString(n.longValue())); newLine(); } else if (n instanceof Float) { write(TYPE_FLOAT); newLine(); write(VALUE); write(Float.toString(n.floatValue())); newLine(); } else if (n instanceof Double) { write(TYPE_DOUBLE); newLine(); write(VALUE); write(Double.toString(n.doubleValue())); newLine(); } else { throw new IllegalArgumentException("cannot store numeric type " + n.getClass()); } } else { BytesRef bytes = field.binaryValue(); if (bytes != null) { write(TYPE_BINARY); newLine(); write(VALUE); write(bytes); newLine(); } else if (field.stringValue() == null) { throw new IllegalArgumentException("field " + field.name() + " is stored but does not have binaryValue, stringValue nor numericValue"); } else { write(TYPE_STRING); newLine(); write(VALUE); write(field.stringValue()); newLine(); } } } @Override public void finish(FieldInfos fis, int numDocs) throws IOException { if (numDocsWritten != numDocs) { throw new RuntimeException("mergeFields produced an invalid result: docCount is " + numDocs + " but only saw " + numDocsWritten + " file=" + out.toString() + "; now aborting this merge to prevent index corruption"); } write(END); newLine(); SimpleTextUtil.writeChecksum(out, scratch); } @Override public void close() throws IOException { try { IOUtils.close(out); } finally { out = null; } } private void write(String s) throws IOException { SimpleTextUtil.write(out, s, scratch); } private void write(BytesRef bytes) throws IOException { SimpleTextUtil.write(out, bytes); } private void newLine() throws IOException { SimpleTextUtil.writeNewline(out); } }
apache-2.0
shun634501730/java_source_cn
src_en/com/sun/corba/se/spi/monitoring/StringMonitoredAttributeBase.java
1406
/* * Copyright (c) 2003, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package com.sun.corba.se.spi.monitoring; import java.util.*; /** * <p> * * @author Hemanth Puttaswamy * </p> * <p> * A Convenient Abstraction to present String type Monitored Attribute. One * of the examples of StringMonitoredAttribute is the State information. * </p> */ public abstract class StringMonitoredAttributeBase extends MonitoredAttributeBase { /////////////////////////////////////// // operations /** * <p> * Constructs StringMonitoredAttribute with the MonitoredAttributeInfo * built with the class type of String. * </p> * <p> * * @param name of this attribute * </p> * <p> * @param description of this attribute * </p> * <p> * @return a StringMonitoredAttributeBase * </p> */ public StringMonitoredAttributeBase(String name, String description) { super( name ); MonitoredAttributeInfoFactory f = MonitoringFactories.getMonitoredAttributeInfoFactory(); MonitoredAttributeInfo maInfo = f.createMonitoredAttributeInfo( description, String.class, false, false ); this.setMonitoredAttributeInfo( maInfo ); } // end StringMonitoredAttributeBase } // end StringMonitoredAttributeBase
apache-2.0
NSAmelchev/ignite
modules/indexing/src/test/java/org/apache/ignite/internal/metric/SqlStatisticsUserQueriesFastTest.java
10960
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.metric; import java.sql.BatchUpdateException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.Statement; import java.util.Objects; import javax.cache.CacheException; import org.apache.ignite.IgniteCache; import org.apache.ignite.cache.query.SqlFieldsQuery; import org.apache.ignite.cache.query.SqlQuery; import org.apache.ignite.internal.processors.query.RunningQueryManager; import org.apache.ignite.testframework.GridTestUtils; import org.apache.ignite.transactions.TransactionDuplicateKeyException; import org.junit.Test; import static org.apache.ignite.internal.util.IgniteUtils.resolveIgnitePath; /** * Tests for statistics of user initiated queries execution, that can be runned without grid restart. * * @see RunningQueryManager */ public class SqlStatisticsUserQueriesFastTest extends UserQueriesTestBase { /** Subdirectory with CSV files */ private static final String CSV_FILE_SUBDIR = "/modules/indexing/src/test/resources/"; /** * A CSV file with two records, that could NOT be inserted to the test table, because it have been generated for * different table. */ private static final String COPY_CMD_BAD_FORMATED_FILE = Objects.requireNonNull(resolveIgnitePath(CSV_FILE_SUBDIR + "bulkload_bad.csv")).getAbsolutePath(); /** * A CSV file with two records, that could be upload to the test table. */ private static final String COPY_CMD_OK_FORMATED_FILE = Objects.requireNonNull(resolveIgnitePath(CSV_FILE_SUBDIR + "bulkload_ok.csv")).getAbsolutePath(); /** Cache with a tested table, created and populated only once. */ private static IgniteCache cache; /** * Setup. */ @Override protected void beforeTestsStarted() throws Exception { SuspendQuerySqlFunctions.refresh(); startGrids(2); cache = createCacheFrom(grid(REDUCER_IDX)); } /** {@inheritDoc} */ @Override protected void afterTestsStopped() throws Exception { stopAllGrids(); } /** * Sanity check for selects. */ @Test public void testSanitySelectSuccess() { assertMetricsIncrementedOnlyOnReducer( () -> cache.query(new SqlFieldsQuery("SELECT * FROM TAB")).getAll(), "success"); assertMetricsIncrementedOnlyOnReducer( () -> cache.query(new SqlFieldsQuery("SELECT * FROM TAB WHERE ID = (SELECT AVG(ID) FROM TAB WHERE ID < 20)")).getAll(), "success"); } /** * Check that metrics work for DDL statements. */ @Test public void testDdlSuccess() { assertMetricsIncrementedOnlyOnReducer( () -> cache.query(new SqlFieldsQuery("CREATE INDEX myidx ON TAB(ID)")).getAll(), "success"); assertMetricsIncrementedOnlyOnReducer(() -> GridTestUtils.assertThrows( log, () -> cache.query(new SqlFieldsQuery("CREATE INDEX myidx ON TAB(ID)")).getAll(), CacheException.class, "Index already exists"), "failed"); assertMetricsIncrementedOnlyOnReducer( () -> cache.query(new SqlFieldsQuery("DROP INDEX myidx")).getAll(), "success"); assertMetricsIncrementedOnlyOnReducer( () -> cache.query(new SqlFieldsQuery("CREATE TABLE ANOTHER_TAB (ID INT PRIMARY KEY, VAL VARCHAR)") .setSchema("PUBLIC")).getAll(), "success"); assertMetricsIncrementedOnlyOnReducer(() -> GridTestUtils.assertThrows( log, () -> cache.query(new SqlFieldsQuery("CREATE TABLE ANOTHER_TAB (ID INT PRIMARY KEY, VAL VARCHAR)") .setSchema("PUBLIC")).getAll(), CacheException.class, "Table already exists"), "failed"); } /** * Check that metrics work for DML statements. */ @Test public void testDmlSuccess() { assertMetricsIncrementedOnlyOnReducer( () -> cache.query(new SqlFieldsQuery("DELETE FROM TAB WHERE ID = 5")).getAll(), "success"); assertMetricsIncrementedOnlyOnReducer( () -> cache.query(new SqlFieldsQuery("DELETE FROM TAB WHERE ID < (SELECT AVG(ID) FROM TAB WHERE ID < 20)")).getAll(), "success"); assertMetricsIncrementedOnlyOnReducer( () -> cache.query(new SqlFieldsQuery("INSERT INTO TAB VALUES(5, 'Name')")).getAll(), "success"); assertMetricsIncrementedOnlyOnReducer( () -> cache.query(new SqlFieldsQuery("MERGE INTO TAB(ID, NAME) VALUES(5, 'NewerName')")).getAll(), "success"); assertMetricsIncrementedOnlyOnReducer(() -> GridTestUtils.assertThrowsAnyCause( log, () -> cache.query(new SqlFieldsQuery("INSERT INTO TAB VALUES(5, 'I will NOT be inserted')")).getAll(), TransactionDuplicateKeyException.class, "Duplicate key during INSERT"), "failed"); } /** * Check that metrics work for statements in streaming mode. */ @Test public void testStreaming() { final Integer okId = 42; final Integer badId = null; cache.query(new SqlFieldsQuery("DELETE FROM TAB WHERE ID = ?").setArgs(okId)).getAll(); assertMetricsIncrementedOnlyOnReducer( () -> insertWithStreaming(okId, "Succesfully inserted name"), "success", "success"); assertMetricsIncrementedOnlyOnReducer(() -> GridTestUtils.assertThrowsAnyCause( log, () -> insertWithStreaming(badId, "I will NOT be inserted"), BatchUpdateException.class, "Null value is not allowed for column"), "success", "failed"); } /** * Insert row using streaming mode of the Thin JDBC client. * * @param id Id. * @param name Name. * @return update count. */ private int insertWithStreaming(Integer id, String name) { try (Connection conn = GridTestUtils.connect(grid(REDUCER_IDX), null)) { conn.setSchema('"' + DEFAULT_CACHE_NAME + '"'); try (Statement stat = conn.createStatement()) { stat.execute("SET STREAMING ON ALLOW_OVERWRITE OFF"); try (PreparedStatement ins = conn.prepareStatement("INSERT INTO TAB VALUES(?, ?)")) { ins.setObject(1, id); ins.setString(2, name); return ins.executeUpdate(); } } } catch (Exception e) { throw new RuntimeException("Streaming upload failed", e); } } /** * Check that metrics work for COPY statement. */ @Test public void testCopyComand() { cache.query(new SqlFieldsQuery("DELETE FROM TAB WHERE ID = 1 or ID = 2 ")).getAll(); assertMetricsIncrementedOnlyOnReducer( () -> doCopyCommand(COPY_CMD_OK_FORMATED_FILE), "success"); assertMetricsIncrementedOnlyOnReducer(() -> GridTestUtils.assertThrowsAnyCause( log, () -> doCopyCommand(COPY_CMD_BAD_FORMATED_FILE), SQLException.class, "Value conversion failed"), "failed"); } /** * Perform copy command: upload file using thin jdbc client. * * @param pathToCsv Path to csv file to upload. */ private int doCopyCommand(String pathToCsv) { try (Connection conn = GridTestUtils.connect(grid(REDUCER_IDX), null)) { conn.setSchema('"' + DEFAULT_CACHE_NAME + '"'); try (Statement copy = conn.createStatement()) { return copy.executeUpdate("copy from '" + pathToCsv + "' into TAB (ID, NAME) format csv"); } } catch (Exception e) { throw new RuntimeException("COPY upload from " + pathToCsv + " failed", e); } } /** * Sanity test for deprecated, but still supported by metrics, sql queries. * * @throws Exception if failed. */ @Test public void testSanityDeprecatedSqlQueryMetrics() throws Exception { assertMetricsIncrementedOnlyOnReducer( () -> cache.query(new SqlQuery(String.class, "ID < 5").setLocal(false)).getAll(), "success"); assertMetricsIncrementedOnlyOnReducer( () -> cache.query(new SqlQuery(String.class, "ID < 5").setLocal(true)).getAll(), "success"); } /** * Check that unparseable query doesn't affect any metric value. * * @throws Exception if failed. */ @Test public void testUnparseableQueriesAreNotCounted() throws Exception { assertMetricsRemainTheSame(() -> { GridTestUtils.assertThrows( log, () -> cache.query(new SqlFieldsQuery("THIS IS NOT A SQL STATEMENT")).getAll(), CacheException.class, "Failed to parse query"); }); } /** * Check success metric in case of local select. * */ @Test public void testLocalSelectSuccess() { assertMetricsIncrementedOnlyOnReducer( () -> cache.query(new SqlFieldsQuery("SELECT * FROM TAB WHERE ID < 100").setLocal(true)).getAll(), "success"); } /** * Check general failure metric if local select failed. * */ @Test public void testLocalSelectFailed() { assertMetricsIncrementedOnlyOnReducer(() -> GridTestUtils.assertThrows( log, () -> cache.query(new SqlFieldsQuery("SELECT * FROM TAB WHERE ID = failFunction()").setLocal(true)).getAll(), CacheException.class, null), "failed"); } /** * Check cancel metric if local select cancelled. */ @Test public void testLocalSelectCanceled() { assertMetricsIncrementedOnlyOnReducer(() -> startAndKillQuery(new SqlFieldsQuery("SELECT * FROM TAB WHERE ID <> suspendHook(ID)").setLocal(true)), 2, "success", "failed", "canceled"); } }
apache-2.0
williamchengit/TestRepo
solr-4.9.0/lucene/core/src/java/org/apache/lucene/store/RateLimiter.java
4851
package org.apache.lucene.store; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.lucene.util.ThreadInterruptedException; /** Abstract base class to rate limit IO. Typically implementations are * shared across multiple IndexInputs or IndexOutputs (for example * those involved all merging). Those IndexInputs and * IndexOutputs would call {@link #pause} whenever the have read * or written more than {@link #getMinPauseCheckBytes} bytes. */ public abstract class RateLimiter { /** * Sets an updated mb per second rate limit. */ public abstract void setMbPerSec(double mbPerSec); /** * The current mb per second rate limit. */ public abstract double getMbPerSec(); /** Pauses, if necessary, to keep the instantaneous IO * rate at or below the target. * <p> * Note: the implementation is thread-safe * </p> * @return the pause time in nano seconds * */ public abstract long pause(long bytes); /** How many bytes caller should add up itself before invoking {@link #pause}. */ public abstract long getMinPauseCheckBytes(); /** * Simple class to rate limit IO. */ public static class SimpleRateLimiter extends RateLimiter { private final static int MIN_PAUSE_CHECK_MSEC = 5; private volatile double mbPerSec; private volatile long minPauseCheckBytes; private long lastNS; // TODO: we could also allow eg a sub class to dynamically // determine the allowed rate, eg if an app wants to // change the allowed rate over time or something /** mbPerSec is the MB/sec max IO rate */ public SimpleRateLimiter(double mbPerSec) { setMbPerSec(mbPerSec); } /** * Sets an updated mb per second rate limit. */ @Override public void setMbPerSec(double mbPerSec) { this.mbPerSec = mbPerSec; minPauseCheckBytes = (long) ((MIN_PAUSE_CHECK_MSEC / 1000.0) * mbPerSec * 1024 * 1024); } @Override public long getMinPauseCheckBytes() { return minPauseCheckBytes; } /** * The current mb per second rate limit. */ @Override public double getMbPerSec() { return this.mbPerSec; } /** Pauses, if necessary, to keep the instantaneous IO * rate at or below the target. Be sure to only call * this method when bytes > {@link #getMinPauseCheckBytes}, * otherwise it will pause way too long! * * @return the pause time in nano seconds */ @Override public long pause(long bytes) { long startNS = System.nanoTime(); double secondsToPause = (bytes/1024./1024.) / mbPerSec; long targetNS; // Sync'd to read + write lastNS: synchronized (this) { // Time we should sleep until; this is purely instantaneous // rate (just adds seconds onto the last time we had paused to); // maybe we should also offer decayed recent history one? targetNS = lastNS + (long) (1000000000 * secondsToPause); if (startNS >= targetNS) { // OK, current time is already beyond the target sleep time, // no pausing to do. // Set to startNS, not targetNS, to enforce the instant rate, not // the "averaaged over all history" rate: lastNS = startNS; return 0; } lastNS = targetNS; } long curNS = startNS; // While loop because Thread.sleep doesn't always sleep // enough: while (true) { final long pauseNS = targetNS - curNS; if (pauseNS > 0) { try { // NOTE: except maybe on real-time JVMs, minimum realistic sleep time // is 1 msec; if you pass just 1 nsec the default impl rounds // this up to 1 msec: Thread.sleep((int) (pauseNS/1000000), (int) (pauseNS % 1000000)); } catch (InterruptedException ie) { throw new ThreadInterruptedException(ie); } curNS = System.nanoTime(); continue; } break; } return curNS - startNS; } } }
apache-2.0
hugesu/elasticsearch-knapsack
src/main/java/org/xbib/elasticsearch/knapsack/KnapsackRequest.java
905
/* * Copyright (C) 2014 Jörg Prante * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.xbib.elasticsearch.knapsack; import org.elasticsearch.common.unit.TimeValue; import java.util.Map; public interface KnapsackRequest { String getCluster(); String getHost(); int getPort(); boolean getSniff(); TimeValue getTimeout(); Map getIndexTypeNames(); }
apache-2.0
mbiarnes/drools-wb
drools-wb-screens/drools-wb-dtable-xls-editor/drools-wb-dtable-xls-editor-client/src/test/java/org/drools/workbench/screens/dtablexls/client/editor/DecisionTableXLSEditorPresenterValidateTest.java
3912
/* * Copyright 2018 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.workbench.screens.dtablexls.client.editor; import java.util.ArrayList; import com.google.gwtmockito.GwtMockitoTestRunner; import com.google.gwtmockito.WithClassesToStub; import org.drools.workbench.screens.dtablexls.client.type.DecisionTableXLSResourceType; import org.drools.workbench.screens.dtablexls.client.type.DecisionTableXLSXResourceType; import org.drools.workbench.screens.dtablexls.service.DecisionTableXLSService; import org.guvnor.common.services.shared.metadata.MetadataService; import org.guvnor.common.services.shared.validation.model.ValidationMessage; import org.gwtbootstrap3.client.ui.Modal; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.kie.workbench.common.test.MockProvider; import org.mockito.Mock; import org.uberfire.backend.vfs.Path; import org.uberfire.ext.editor.commons.client.history.VersionRecordManager; import org.uberfire.ext.widgets.common.client.common.BusyIndicatorView; import org.uberfire.java.nio.IOException; import org.uberfire.mocks.CallerMock; import org.uberfire.mvp.Command; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @RunWith(GwtMockitoTestRunner.class) @WithClassesToStub({Modal.class}) public class DecisionTableXLSEditorPresenterValidateTest { @Mock DecisionTableXLSService decisionTableXLSService; @Mock MetadataService metadataService; DecisionTableXLSEditorPresenter presenter; @Before public void setUp() throws Exception { presenter = new DecisionTableXLSEditorPresenter(mock(DecisionTableXLSEditorView.class), mock(DecisionTableXLSResourceType.class), mock(DecisionTableXLSXResourceType.class), mock(BusyIndicatorView.class), MockProvider.getMockValidationPopup(), new CallerMock<>(decisionTableXLSService), new CallerMock<>(metadataService)) { { versionRecordManager = mock(VersionRecordManager.class); } }; } @Test public void commandIsCalled() throws Exception { doReturn(new ArrayList<ValidationMessage>()).when(decisionTableXLSService).validate(any(Path.class), any(Path.class)); final Command afterValidation = mock(Command.class); presenter.onValidate(afterValidation); verify(afterValidation).execute(); } @Test public void callFailsAndCommandIsCalled() throws Exception { doThrow(new IOException()).when(decisionTableXLSService).validate(any(Path.class), any(Path.class)); final Command afterValidation = mock(Command.class); presenter.onValidate(afterValidation); verify(afterValidation).execute(); } }
apache-2.0
Fantast/mvel
src/test/java/org/mvel2/tests/core/RegularExpressionTests.java
2900
package org.mvel2.tests.core; import org.mvel2.MVEL; import java.util.HashMap; import java.util.Map; import static org.mvel2.MVEL.compileExpression; import static org.mvel2.MVEL.eval; import static org.mvel2.MVEL.executeExpression; /** * @author Mike Brock . */ public class RegularExpressionTests extends AbstractTest { public void testRegExpOK() throws Exception { // This works OK intepreted assertEquals(Boolean.TRUE, MVEL.eval("'Hello'.toUpperCase() ~= '[A-Z]{0,5}'")); assertEquals(Boolean.TRUE, MVEL.eval("1 == 0 || ('Hello'.toUpperCase() ~= '[A-Z]{0,5}')")); // This works OK if toUpperCase() is avoided in pre-compiled assertEquals(Boolean.TRUE, executeExpression(compileExpression("'Hello' ~= '[a-zA-Z]{0,5}'"))); } public void testRegExpPreCompiledBug() throws Exception { // If toUpperCase() is used in the expression then this fails; returns null not // a boolean. Object ser = compileExpression("'Hello'.toUpperCase() ~= '[a-zA-Z]{0,5}'"); assertEquals(Boolean.TRUE, executeExpression(ser)); } public void testRegExpOrBug() throws Exception { // This fails during execution due to returning null, I think... assertEquals(Boolean.TRUE, executeExpression(compileExpression("1 == 0 || ('Hello'.toUpperCase() ~= '[A-Z]{0,5}')"))); } public void testRegExpAndBug() throws Exception { // This also fails due to returning null, I think... // Object ser = MVEL.compileExpression("1 == 1 && ('Hello'.toUpperCase() ~= '[A-Z]{0,5}')"); assertEquals(Boolean.TRUE, executeExpression(compileExpression("1 == 1 && ('Hello'.toUpperCase() ~= '[A-Z]{0,5}')"))); } public void testRegExSurroundedByBrackets() { Map<String, Object> map = new HashMap<String, Object>(); map.put("x", "foobie"); assertEquals(Boolean.TRUE, MVEL.eval("x ~= ('f.*')", map)); } public void testMVEL231() { System.out.println(MVEL.eval("Q8152405_A35423077=\"1\"; Q8152405_A35423077!=null && (Q8152405_A35423077~=\"^[0-9]$\");", new HashMap())); } public void testParsingStability4() { assertEquals(true, test("vv=\"Edson\"; !(vv ~= \"Mark\")")); } /** * Submitted by: Dimitar Dimitrov */ public void testRegExOR() { Map<String, Object> map = new HashMap<String, Object>(); map.put("os", "windows"); assertTrue((Boolean) eval("os ~= 'windows|unix'", map)); } public void testRegExOR2() { Map<String, Object> map = new HashMap<String, Object>(); map.put("os", "windows"); assertTrue((Boolean) eval("'windows' ~= 'windows|unix'", map)); assertFalse((Boolean) eval("time ~= 'windows|unix'", new java.util.Date())); } public void testRegExMatch() { assertEquals(true, MVEL.eval("$test = 'foo'; $ex = 'f.*'; $test ~= $ex", new HashMap())); } }
apache-2.0
meteorcloudy/bazel
src/main/java/com/google/devtools/build/lib/bazel/rules/java/proto/BazelJavaProtoLibraryRule.java
3481
// Copyright 2016 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.bazel.rules.java.proto; import static com.google.devtools.build.lib.packages.Attribute.attr; import static com.google.devtools.build.lib.packages.BuildType.LABEL_LIST; import static com.google.devtools.build.lib.packages.Type.BOOLEAN; import com.google.devtools.build.lib.analysis.BaseRuleClasses; import com.google.devtools.build.lib.analysis.RuleDefinition; import com.google.devtools.build.lib.analysis.RuleDefinitionEnvironment; import com.google.devtools.build.lib.packages.RuleClass; import com.google.devtools.build.lib.packages.StarlarkProviderIdentifier; import com.google.devtools.build.lib.rules.java.JavaConfiguration; import com.google.devtools.build.lib.rules.java.JavaInfo; import com.google.devtools.build.lib.rules.java.proto.JavaProtoLibrary; import com.google.devtools.build.lib.rules.proto.ProtoConfiguration; /** Declaration of the {@code java_proto_library} rule. */ public class BazelJavaProtoLibraryRule implements RuleDefinition { private final BazelJavaProtoAspect javaProtoAspect; public BazelJavaProtoLibraryRule(BazelJavaProtoAspect javaProtoAspect) { this.javaProtoAspect = javaProtoAspect; } @Override public RuleClass build(RuleClass.Builder builder, RuleDefinitionEnvironment environment) { return builder .requiresConfigurationFragments(JavaConfiguration.class, ProtoConfiguration.class) /* <!-- #BLAZE_RULE(java_proto_library).ATTRIBUTE(deps) --> The list of <a href="protocol-buffer.html#proto_library"><code>proto_library</code></a> rules to generate Java code for. <!-- #END_BLAZE_RULE.ATTRIBUTE --> */ .override( attr("deps", LABEL_LIST) .allowedRuleClasses("proto_library") .allowedFileTypes() .aspect(javaProtoAspect)) .add(attr("strict_deps", BOOLEAN).value(true).undocumented("for migration")) .advertiseStarlarkProvider(StarlarkProviderIdentifier.forKey(JavaInfo.PROVIDER.getKey())) .build(); } @Override public Metadata getMetadata() { return RuleDefinition.Metadata.builder() .name("java_proto_library") .factoryClass(JavaProtoLibrary.class) .ancestors(BaseRuleClasses.NativeActionCreatingRule.class) .build(); } } /*<!-- #BLAZE_RULE (NAME = java_proto_library, TYPE = LIBRARY, FAMILY = Java) --> <p> <code>java_proto_library</code> generates Java code from <code>.proto</code> files. </p> <p> <code>deps</code> must point to <a href="protocol-buffer.html#proto_library"><code>proto_library </code></a> rules. </p> <p> Example: </p> <pre class="code"> java_library( name = "lib", deps = [":foo_java_proto"], ) java_proto_library( name = "foo_java_proto", deps = [":foo_proto"], ) proto_library( name = "foo_proto", ) </pre> <!-- #END_BLAZE_RULE -->*/
apache-2.0
joansmith/pdfbox
pdfbox/src/main/java/org/apache/pdfbox/pdmodel/interactive/annotation/PDAppearanceCharacteristicsDictionary.java
6439
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.pdmodel.interactive.annotation; import org.apache.pdfbox.cos.COSArray; import org.apache.pdfbox.cos.COSBase; import org.apache.pdfbox.cos.COSDictionary; import org.apache.pdfbox.cos.COSName; import org.apache.pdfbox.cos.COSStream; import org.apache.pdfbox.pdmodel.common.COSObjectable; import org.apache.pdfbox.pdmodel.graphics.color.PDColor; import org.apache.pdfbox.pdmodel.graphics.color.PDColorSpace; import org.apache.pdfbox.pdmodel.graphics.color.PDDeviceCMYK; import org.apache.pdfbox.pdmodel.graphics.color.PDDeviceGray; import org.apache.pdfbox.pdmodel.graphics.color.PDDeviceRGB; import org.apache.pdfbox.pdmodel.graphics.form.PDFormXObject; /** * This class represents an appearance characteristics dictionary. * */ public class PDAppearanceCharacteristicsDictionary implements COSObjectable { private final COSDictionary dictionary; /** * Constructor. * * @param dict dictionary */ public PDAppearanceCharacteristicsDictionary(COSDictionary dict) { this.dictionary = dict; } /** * returns the dictionary. * * @return the dictionary */ @Override public COSDictionary getCOSObject() { return this.dictionary; } /** * This will retrieve the rotation of the annotation widget. It must be a multiple of 90. Default is 0 * * @return the rotation */ public int getRotation() { return this.getCOSObject().getInt(COSName.R, 0); } /** * This will set the rotation. * * @param rotation the rotation as a multiple of 90 */ public void setRotation(int rotation) { this.getCOSObject().setInt(COSName.R, rotation); } /** * This will retrieve the border color. * * @return the border color. */ public PDColor getBorderColour() { return getColor(COSName.BC); } /** * This will set the border color. * * @param c the border color */ public void setBorderColour(PDColor c) { this.getCOSObject().setItem(COSName.BC, c.toCOSArray()); } /** * This will retrieve the background color. * * @return the background color. */ public PDColor getBackground() { return getColor(COSName.BG); } /** * This will set the background color. * * @param c the background color */ public void setBackground(PDColor c) { this.getCOSObject().setItem(COSName.BG, c.toCOSArray()); } /** * This will retrieve the normal caption. * * @return the normal caption. */ public String getNormalCaption() { return this.getCOSObject().getString("CA"); } /** * This will set the normal caption. * * @param caption the normal caption */ public void setNormalCaption(String caption) { this.getCOSObject().setString("CA", caption); } /** * This will retrieve the rollover caption. * * @return the rollover caption. */ public String getRolloverCaption() { return this.getCOSObject().getString("RC"); } /** * This will set the rollover caption. * * @param caption the rollover caption */ public void setRolloverCaption(String caption) { this.getCOSObject().setString("RC", caption); } /** * This will retrieve the alternate caption. * * @return the alternate caption. */ public String getAlternateCaption() { return this.getCOSObject().getString("AC"); } /** * This will set the alternate caption. * * @param caption the alternate caption */ public void setAlternateCaption(String caption) { this.getCOSObject().setString("AC", caption); } /** * This will retrieve the normal icon. * * @return the normal icon. */ public PDFormXObject getNormalIcon() { COSBase i = this.getCOSObject().getDictionaryObject("I"); if (i instanceof COSStream) { return new PDFormXObject((COSStream)i); } return null; } /** * This will retrieve the rollover icon. * * @return the rollover icon */ public PDFormXObject getRolloverIcon() { COSBase i = this.getCOSObject().getDictionaryObject("RI"); if (i instanceof COSStream) { return new PDFormXObject((COSStream)i); } return null; } /** * This will retrieve the alternate icon. * * @return the alternate icon. */ public PDFormXObject getAlternateIcon() { COSBase i = this.getCOSObject().getDictionaryObject("IX"); if (i instanceof COSStream) { return new PDFormXObject((COSStream)i); } return null; } private PDColor getColor(COSName itemName) { COSBase c = this.getCOSObject().getItem(itemName); if (c instanceof COSArray) { PDColorSpace colorSpace = null; switch (((COSArray) c).size()) { case 1: colorSpace = PDDeviceGray.INSTANCE; break; case 3: colorSpace = PDDeviceRGB.INSTANCE; break; case 4: colorSpace = PDDeviceCMYK.INSTANCE; break; default: break; } return new PDColor((COSArray) c, colorSpace); } return null; } }
apache-2.0
dawidmalina/pinpoint
plugins/dubbo/src/main/java/com/navercorp/pinpoint/plugin/dubbo/DubboPlugin.java
2928
package com.navercorp.pinpoint.plugin.dubbo; import com.navercorp.pinpoint.bootstrap.instrument.InstrumentClass; import com.navercorp.pinpoint.bootstrap.instrument.InstrumentException; import com.navercorp.pinpoint.bootstrap.instrument.Instrumentor; import com.navercorp.pinpoint.bootstrap.instrument.transformer.TransformCallback; import com.navercorp.pinpoint.bootstrap.instrument.transformer.TransformTemplate; import com.navercorp.pinpoint.bootstrap.instrument.transformer.TransformTemplateAware; import com.navercorp.pinpoint.bootstrap.plugin.ProfilerPlugin; import com.navercorp.pinpoint.bootstrap.plugin.ProfilerPluginSetupContext; import java.security.ProtectionDomain; /** * @author Jinkai.Ma */ public class DubboPlugin implements ProfilerPlugin, TransformTemplateAware { private TransformTemplate transformTemplate; @Override public void setup(ProfilerPluginSetupContext context) { this.addApplicationTypeDetector(context); this.addTransformers(); } private void addTransformers() { transformTemplate.transform("com.alibaba.dubbo.rpc.cluster.support.AbstractClusterInvoker", new TransformCallback() { @Override public byte[] doInTransform(Instrumentor instrumentor, ClassLoader loader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException { InstrumentClass target = instrumentor.getInstrumentClass(loader, className, classfileBuffer); target.getDeclaredMethod("invoke", "com.alibaba.dubbo.rpc.Invocation").addInterceptor("com.navercorp.pinpoint.plugin.dubbo.interceptor.DubboConsumerInterceptor"); return target.toBytecode(); } }); transformTemplate.transform("com.alibaba.dubbo.rpc.proxy.AbstractProxyInvoker", new TransformCallback() { @Override public byte[] doInTransform(Instrumentor instrumentor, ClassLoader loader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException { InstrumentClass target = instrumentor.getInstrumentClass(loader, className, classfileBuffer); target.getDeclaredMethod("invoke", "com.alibaba.dubbo.rpc.Invocation").addInterceptor("com.navercorp.pinpoint.plugin.dubbo.interceptor.DubboProviderInterceptor"); return target.toBytecode(); } }); } /** * Pinpoint profiler agent uses this detector to find out the service type of current application. */ private void addApplicationTypeDetector(ProfilerPluginSetupContext context) { context.addApplicationTypeDetector(new DubboProviderDetector()); } @Override public void setTransformTemplate(TransformTemplate transformTemplate) { this.transformTemplate = transformTemplate; } }
apache-2.0
mtkocak/intellij-haxe
src/common/com/intellij/plugins/haxe/runner/NMERunningState.java
4628
/* * Copyright 2000-2013 JetBrains s.r.o. * Copyright 2014-2014 AS3Boyan * Copyright 2014-2014 Elias Ku * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.plugins.haxe.runner; import com.intellij.execution.ExecutionException; import com.intellij.execution.configurations.CommandLineState; import com.intellij.execution.configurations.GeneralCommandLine; import com.intellij.execution.filters.TextConsoleBuilder; import com.intellij.execution.filters.TextConsoleBuilderFactory; import com.intellij.execution.process.ColoredProcessHandler; import com.intellij.execution.process.ProcessHandler; import com.intellij.execution.runners.ExecutionEnvironment; import com.intellij.openapi.module.Module; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.roots.ModuleRootManager; import com.intellij.plugins.haxe.HaxeCommonBundle; import com.intellij.plugins.haxe.config.sdk.HaxeSdkData; import com.intellij.plugins.haxe.ide.module.HaxeModuleSettings; import com.intellij.util.PathUtil; import com.intellij.util.text.StringTokenizer; import org.jetbrains.annotations.NotNull; /** * @author: Fedor.Korotkov */ public class NMERunningState extends CommandLineState { private final Module module; private final boolean myRunInTest; private final boolean myDebug; private final int myDebugPort; public NMERunningState(ExecutionEnvironment env, Module module, boolean runInTest) { this(env, module, runInTest, false, 0); } public NMERunningState(ExecutionEnvironment env, Module module, boolean runInTest, boolean debug) { this(env, module, runInTest, debug, 6972); } public NMERunningState(ExecutionEnvironment env, Module module, boolean runInTest, boolean debug, int debugPort) { super(env); this.module = module; myRunInTest = runInTest; myDebug = debug; myDebugPort = debugPort; } @NotNull @Override protected ProcessHandler startProcess() throws ExecutionException { final HaxeModuleSettings settings = HaxeModuleSettings.getInstance(module); final Sdk sdk = ModuleRootManager.getInstance(module).getSdk(); assert sdk != null; GeneralCommandLine commandLine = getCommandForNeko(sdk, settings); return new ColoredProcessHandler(commandLine.createProcess(), commandLine.getCommandLineString()); } private GeneralCommandLine getCommandForNeko(Sdk sdk, HaxeModuleSettings settings) throws ExecutionException { final HaxeSdkData sdkData = sdk.getSdkAdditionalData() instanceof HaxeSdkData ? (HaxeSdkData)sdk.getSdkAdditionalData() : null; if (sdkData == null) { throw new ExecutionException(HaxeCommonBundle.message("invalid.haxe.sdk")); } final GeneralCommandLine commandLine = new GeneralCommandLine(); commandLine.setWorkDirectory(PathUtil.getParentPath(module.getModuleFilePath())); final String haxelibPath = sdkData.getHaxelibPath(); if (haxelibPath == null || haxelibPath.isEmpty()) { throw new ExecutionException(HaxeCommonBundle.message("no.haxelib.for.sdk", sdk.getName())); } commandLine.setExePath(haxelibPath); commandLine.addParameter("run"); commandLine.addParameter("nme"); commandLine.addParameter(myRunInTest ? "test" : "run"); commandLine.addParameter(settings.getNmmlPath()); for (String flag : settings.getNmeTarget().getFlags()) { commandLine.addParameter(flag); } if (myDebug) { commandLine.addParameter("-debug"); commandLine.addParameter("-Ddebug"); commandLine.addParameter("-args"); commandLine.addParameter("-start_debugger"); commandLine.addParameter("-debugger_host=localhost:" + myDebugPort); } final StringTokenizer flagsTokenizer = new StringTokenizer(settings.getNmeFlags()); while (flagsTokenizer.hasMoreTokens()) { commandLine.addParameter(flagsTokenizer.nextToken()); } final TextConsoleBuilder consoleBuilder = TextConsoleBuilderFactory.getInstance().createBuilder(module.getProject()); setConsoleBuilder(consoleBuilder); return commandLine; } }
apache-2.0
laki88/carbon-registry
components/registry/org.wso2.carbon.registry.extensions/src/main/java/org/wso2/carbon/registry/extensions/handlers/ZipWSDLMediaTypeHandler.java
53013
/* * Copyright (c) 2005-2009, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.wso2.carbon.registry.extensions.handlers; import java.io.BufferedOutputStream; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.*; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; import javax.xml.namespace.QName; import javax.xml.stream.XMLStreamException; import org.apache.axiom.om.OMAbstractFactory; import org.apache.axiom.om.OMElement; import org.apache.axiom.om.OMFactory; import org.apache.axiom.om.util.AXIOMUtil; import org.apache.commons.io.FileUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.xerces.xni.parser.XMLInputSource; import org.uddi.api_v3.AuthToken; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.registry.core.Registry; import org.wso2.carbon.registry.core.RegistryConstants; import org.wso2.carbon.registry.core.Resource; import org.wso2.carbon.registry.core.ResourcePath; import org.wso2.carbon.registry.core.config.RegistryContext; import org.wso2.carbon.registry.core.exceptions.RegistryException; import org.wso2.carbon.registry.core.internal.RegistryCoreServiceComponent; import org.wso2.carbon.registry.core.jdbc.handlers.RequestContext; import org.wso2.carbon.registry.core.jdbc.utils.Transaction; import org.wso2.carbon.registry.core.session.CurrentSession; import org.wso2.carbon.registry.core.session.UserRegistry; import org.wso2.carbon.registry.core.utils.RegistryUtils; import org.wso2.carbon.registry.extensions.beans.BusinessServiceInfo; import org.wso2.carbon.registry.extensions.handlers.utils.*; import org.wso2.carbon.registry.extensions.utils.CommonConstants; import org.wso2.carbon.registry.extensions.utils.CommonUtil; import org.wso2.carbon.registry.extensions.utils.WSDLValidationInfo; import org.wso2.carbon.registry.uddi.utils.UDDIUtil; import org.wso2.carbon.user.core.UserRealm; @SuppressWarnings({"unused", "UnusedAssignment"}) public class ZipWSDLMediaTypeHandler extends WSDLMediaTypeHandler { // <handler class="org.wso2.carbon.registry.extensions.handlers.ZipWSDLMediaTypeHandler"> // <property name="wsdlMediaType">application/wsdl+xml</property> // <property name="schemaMediaType">application/xsd+xml</property> // <property name="threadPoolSize">50</property> // <property name="useOriginalSchema">true</property> // <!--property name="disableWSDLValidation">true</property> // <property name="disableSchemaValidation">true</property> // <property name="wsdlExtension">.wsdl</property> // <property name="schemaExtension">.xsd</property> // <property name="archiveExtension">.gar</property> // <property name="tempFilePrefix">wsdl</property--> // <property name="schemaLocationConfiguration" type="xml"> // <location>/governance/schemas/</location> // </property> // <property name="wsdlLocationConfiguration" type="xml"> // <location>/governance/wsdls/</location> // </property> // <filter class="org.wso2.carbon.registry.core.jdbc.handlers.filters.MediaTypeMatcher"> // <property name="mediaType">application/vnd.wso2.governance-archive</property> // </filter> // </handler> private String wsdlMediaType = "application/wsdl+xml"; private String wsdlExtension = ".wsdl"; private String xsdMediaType = "application/xsd+xml"; private String xsdExtension = ".xsd"; private String swaggerMediaType = "application/swagger+json"; private String swaggerExtension = ".json"; private String swaggerLocation = "/swagger/"; private String wadlMediaType = "application/wadl+xml"; private String wadlExtension = ".wadl"; private String wadlLocation = "/wadl/"; private OMElement wadlLocationConfiguration; private String archiveExtension = ".gar"; private String tempFilePrefix = "wsdl"; private boolean disableWSDLValidation = false; private boolean disableSchemaValidation = false; private boolean useOriginalSchema = false; private boolean createService = true; private boolean disableSymlinkCreation = true; private static int numberOfRetry = 5; private boolean disableWADLValidation = false; private static List<String> skipFileExtensions = new ArrayList<String>(); private String extensionsSeparator = ","; public void setNumberOfRetry(String numberOfRetry) { ZipWSDLMediaTypeHandler.numberOfRetry = Integer.parseInt(numberOfRetry); } public boolean isDisableSymlinkCreation() { return disableSymlinkCreation; } public void setDisableSymlinkCreation(String disableSymlinkCreation) { this.disableSymlinkCreation = Boolean.toString(true).equals(disableSymlinkCreation); } private int threadPoolSize = 50; private static final Log log = LogFactory.getLog(ZipWSDLMediaTypeHandler.class); public void setThreadPoolSize(String threadPoolSize) { this.threadPoolSize = Integer.parseInt(threadPoolSize); } public OMElement getWADLLocationConfiguration() { return wadlLocationConfiguration; } public void setWadlLocationConfiguration(OMElement locationConfiguration) throws RegistryException { Iterator confElements = locationConfiguration.getChildElements(); while (confElements.hasNext()) { OMElement confElement = (OMElement)confElements.next(); if (confElement.getQName().equals(new QName(locationTag))) { wadlLocation = confElement.getText(); if(!wadlLocation.startsWith(RegistryConstants.PATH_SEPARATOR)){ wadlLocation = RegistryConstants.PATH_SEPARATOR + wadlLocation; } if(wadlLocation.endsWith(RegistryConstants.PATH_SEPARATOR)){ wadlLocation = wadlLocation.substring(0, wadlLocation.length() - 1); } } } this.wadlLocationConfiguration = locationConfiguration; } /** * @return createService */ public boolean isCreateService() { return createService; } /** * Extracts createService property from the registry.xml * * @param createService createService property. */ public void setCreateService(String createService) { this.createService = Boolean.valueOf(createService); } public void put(RequestContext requestContext) throws RegistryException { if (!CommonUtil.isUpdateLockAvailable()) { return; } CommonUtil.acquireUpdateLock(); // setting up session local path map for mounted setup. boolean pathMapSet = setSessionLocalPathMap(requestContext); try { Resource resource = requestContext.getResource(); String path = requestContext.getResourcePath().getPath(); try { // If the WSDL is already there, we don't need to re-run this handler unless the content is changed. // Re-running this handler causes issues with downstream handlers and other behaviour (ex:- lifecycles). // If you need to do a replace programatically, delete-then-replace. if (requestContext.getRegistry().resourceExists(path)) { // TODO: Add logic to compare content, and return only if the content didn't change. return; } } catch (Exception ignore) { } try { if (resource != null) { Object resourceContent = resource.getContent(); InputStream in = new ByteArrayInputStream((byte[]) resourceContent); Stack<File> fileList = new Stack<File>(); List<String> uriList = new LinkedList<String>(); List<UploadTask> tasks = new LinkedList<UploadTask>(); int threadPoolSize = this.threadPoolSize; File tempFile = File.createTempFile(tempFilePrefix, archiveExtension); File tempDir = new File(tempFile.getAbsolutePath().substring(0, tempFile.getAbsolutePath().length() - archiveExtension.length())); try { BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(tempFile)); try { byte[] contentChunk = new byte[1024]; int byteCount; while ((byteCount = in.read(contentChunk)) != -1) { out.write(contentChunk, 0, byteCount); } out.flush(); } finally { out.close(); } ZipEntry entry; makeDir(tempDir); ZipInputStream zs; List<String> wsdlUriList = new LinkedList<String>(); List<String> xsdUriList = new LinkedList<String>(); List<String> wadlUriList = new LinkedList<String>(); List<String> swaggerUriList = new LinkedList<String>(); zs = new ZipInputStream(new FileInputStream(tempFile)); try { entry = zs.getNextEntry(); while (entry != null) { String entryName = entry.getName(); FileOutputStream os; File file = new File(tempFile.getAbsolutePath().substring(0, tempFile.getAbsolutePath().length() - archiveExtension.length()) + File.separator + entryName); if (entry.isDirectory()) { if (!file.exists()) { makeDirs(file); fileList.push(file); } entry = zs.getNextEntry(); continue; } File parentFile = file.getParentFile(); if (!parentFile.exists()) { makeDirs(parentFile); } os = new FileOutputStream(file); try { fileList.push(file); byte[] contentChunk = new byte[1024]; int byteCount; while ((byteCount = zs.read(contentChunk)) != -1) { os.write(contentChunk, 0, byteCount); } } finally { os.close(); } zs.closeEntry(); entry = zs.getNextEntry(); if (entryName != null && entryName.toLowerCase().endsWith(wsdlExtension)) { String uri = tempFile.toURI().toString(); uri = uri.substring(0, uri.length() - archiveExtension.length()) + "/" + entryName; if (uri.startsWith("file:")) { uri = uri.substring(5); } while (uri.startsWith("/")) { uri = uri.substring(1); } uri = "file:///" + uri; if (uri.endsWith("/")) { uri = uri.substring(0, uri.length() -1); } wsdlUriList.add(uri); } else if (entryName != null && entryName.toLowerCase().endsWith(xsdExtension)) { String uri = tempFile.toURI().toString(); uri = uri.substring(0, uri.length() - archiveExtension.length()) + "/" + entryName; if (uri.startsWith("file:")) { uri = uri.substring(5); } while (uri.startsWith("/")) { uri = uri.substring(1); } uri = "file:///" + uri; if (uri.endsWith("/")) { uri = uri.substring(0, uri.length() -1); } xsdUriList.add(uri); } else if(entryName != null && entryName.toLowerCase().endsWith(wadlExtension)){ String uri = tempFile.toURI().toString(); uri = uri.substring(0, uri.length() - archiveExtension.length()) + "/" + entryName; if (uri.startsWith("file:")) { uri = uri.substring(5); } while (uri.startsWith("/")) { uri = uri.substring(1); } uri = "file:///" + uri; if (uri.endsWith("/")) { uri = uri.substring(0, uri.length() -1); } wadlUriList.add(uri); } else if(entryName != null && entryName.toLowerCase().endsWith(swaggerExtension)){ String uri = tempFile.toURI().toString(); uri = uri.substring(0, uri.length() - archiveExtension.length()) + "/" + entryName; if (uri.startsWith("file:")) { uri = uri.substring(5); } while (uri.startsWith("/")) { uri = uri.substring(1); } uri = "file:///" + uri; if (uri.endsWith("/")) { uri = uri.substring(0, uri.length() -1); } swaggerUriList.add(uri); } else if (entryName != null) { boolean isSkipFileExtension = false; for (String extension : skipFileExtensions) { if (entryName.toLowerCase().endsWith(extension.toLowerCase())) { isSkipFileExtension = true; break; } } if(!isSkipFileExtension){ String uri = tempFile.toURI().toString(); uri = uri.substring(0, uri.length() - archiveExtension.length()) + "/" + entryName; if (uri.startsWith("file:")) { uri = uri.substring(5); } while (uri.startsWith("/")) { uri = uri.substring(1); } uri = "file:///" + uri; if (uri.endsWith("/")) { uri = uri.substring(0, uri.length() -1); } uriList.add(uri); } } } } finally { zs.close(); } Map<String, String> localPathMap = null; if (CurrentSession.getLocalPathMap() != null) { localPathMap = Collections.unmodifiableMap(CurrentSession.getLocalPathMap()); } if (wsdlUriList.isEmpty() && xsdUriList.isEmpty() && wadlUriList.isEmpty() && uriList.isEmpty() && swaggerUriList.isEmpty()) { throw new RegistryException( "No Files found in the given archive"); } for (String uri : wsdlUriList) { tasks.add(new UploadWSDLTask(requestContext, uri, CurrentSession.getTenantId(), CurrentSession.getUserRegistry(), CurrentSession.getUserRealm(), CurrentSession.getUser(), CurrentSession.getCallerTenantId(), localPathMap)); } for (String uri : xsdUriList) { tasks.add(new UploadXSDTask(requestContext, uri, CurrentSession.getTenantId(), CurrentSession.getUserRegistry(), CurrentSession.getUserRealm(), CurrentSession.getUser(), CurrentSession.getCallerTenantId(), localPathMap)); } for (String uri : wadlUriList) { tasks.add(new UploadWadlTask(requestContext, uri, CurrentSession.getTenantId(), CurrentSession.getUserRegistry(), CurrentSession.getUserRealm(), CurrentSession.getUser(), CurrentSession.getCallerTenantId(), localPathMap)); } for (String uri : swaggerUriList) { tasks.add(new UploadSwaggerTask(requestContext, uri, CurrentSession.getTenantId(), CurrentSession.getUserRegistry(), CurrentSession.getUserRealm(), CurrentSession.getUser(), CurrentSession.getCallerTenantId(), localPathMap)); } String mediaType = resource.getProperty("registry.mediaType"); if (mediaType != null) { for (String uri : uriList) { tasks.add(new UploadFileTask(requestContext, uri, CurrentSession.getTenantId(), CurrentSession.getUserRegistry(), CurrentSession.getUserRealm(), CurrentSession.getUser(), CurrentSession.getCallerTenantId(), localPathMap, mediaType)); } uriList.clear(); } // calculate thread pool size for efficient use of resources in concurrent // update scenarios. int toAdd = wsdlUriList.size() + xsdUriList.size(); if (toAdd < threadPoolSize) { if (toAdd < (threadPoolSize / 8)) { threadPoolSize = 0; } else if (toAdd < (threadPoolSize / 2)) { threadPoolSize = (threadPoolSize / 8); } else { threadPoolSize = (threadPoolSize / 4); } } } finally { in.close(); resourceContent = null; resource.setContent(null); } uploadFiles(tasks, tempFile, fileList, tempDir, threadPoolSize, path, uriList, requestContext); } } catch (IOException e) { throw new RegistryException("Error occurred while unpacking Governance Archive", e); } if (Transaction.isRollbacked()) { throw new RegistryException("A nested transaction was rollbacked and therefore " + "cannot proceed with this action."); } requestContext.setProcessingComplete(true); } finally { CommonUtil.releaseUpdateLock(); removeSessionLocalPathMap(pathMapSet); } } /** * remove the Local PathMap from the CurrentSession * @param pathMapSet whether pathMap is set or not */ private void removeSessionLocalPathMap(boolean pathMapSet) { if (pathMapSet) { CurrentSession.removeLocalPathMap(); } } /** * Method will add Local PathMap to the CurrentSession, if it is not exists * Set ARCHIEVE_UPLOAD param to true, it param is accessed to calculate registry path in mounted env. * @param requestContext the request context to get mount points * @return whether pathMap is set or not */ private boolean setSessionLocalPathMap(RequestContext requestContext) { boolean pathMapSet = false; if (CurrentSession.getLocalPathMap() == null) { RegistryContext registryContext = requestContext.getRegistry().getRegistryContext(); if (registryContext != null && registryContext.getMounts() != null && !registryContext.getMounts().isEmpty()) { Map<String, String> localPathMap = new HashMap<String, String>(); CurrentSession.setLocalPathMap(localPathMap); CurrentSession.getLocalPathMap().put(CommonConstants.ARCHIEVE_UPLOAD, "true"); pathMapSet = true; } } else { CurrentSession.getLocalPathMap().put(CommonConstants.ARCHIEVE_UPLOAD, "true"); } return pathMapSet; } /** * Method that runs the WSDL upload procedure. * * @param requestContext the request context for the import/put operation * @param uri the URL from which the WSDL is imported * * @return the path at which the WSDL was uploaded to * * @throws RegistryException if the operation failed. */ protected String addWSDLFromZip(RequestContext requestContext, String uri) throws RegistryException { if (uri != null) { Resource local = requestContext.getRegistry().newResource(); String version = requestContext.getResource().getProperty("version"); local.setMediaType(wsdlMediaType); local.setProperty("version", version); local.setProperties(requestContext.getResource().getProperties()); requestContext.setSourceURL(uri); requestContext.setResource(local); String path = requestContext.getResourcePath().getPath(); if (path.lastIndexOf("/") != -1) { path = path.substring(0, path.lastIndexOf("/")); } else { path = ""; } String wsdlName = uri; if (wsdlName.lastIndexOf("/") != -1) { wsdlName = wsdlName.substring(wsdlName.lastIndexOf("/")); } else { wsdlName = "/" + wsdlName; } path = path + wsdlName; requestContext.setResourcePath(new ResourcePath(path)); WSDLProcessor wsdlProcessor = buildWSDLProcessor(requestContext, this.useOriginalSchema); String addedPath = wsdlProcessor.addWSDLToRegistry(requestContext, uri, local, false, true, disableWSDLValidation,disableSymlinkCreation); if (CommonConstants.ENABLE.equals(System.getProperty( CommonConstants.UDDI_SYSTEM_PROPERTY))) { AuthToken authToken = UDDIUtil.getPublisherAuthToken(); if(authToken !=null){ BusinessServiceInfo businessServiceInfo = new BusinessServiceInfo(); WSDLInfo wsdlInfo = wsdlProcessor.getMasterWSDLInfo(); businessServiceInfo.setServiceWSDLInfo(wsdlInfo); UDDIPublisher publisher = new UDDIPublisher(); publisher.publishBusinessService(authToken,businessServiceInfo); } } log.debug("WSDL : " + addedPath); return addedPath; } return null; } /** * Method that runs the WADL upload procedure. * * @param requestContext requestContext the request context for the import/put operation * @param uri the URL from which the WADL is imported * * @return the path at which the WADL was uploaded to * * @throws RegistryException if the operation failed. */ protected String addWADLFromZip(RequestContext requestContext, String uri) throws RegistryException { if (uri != null) { Resource local = requestContext.getRegistry().newResource(); String version = requestContext.getResource().getProperty("version"); local.setMediaType(wadlMediaType); local.setProperty("version",version); requestContext.setSourceURL(uri); requestContext.setResource(local); String path = requestContext.getResourcePath().getPath(); if (path.lastIndexOf("/") != -1) { path = path.substring(0, path.lastIndexOf("/")); } else { path = ""; } String wadlName = uri; if (wadlName.lastIndexOf("/") != -1) { wadlName = wadlName.substring(wadlName.lastIndexOf("/")); } else { wadlName = "/" + wadlName; } path = path + wadlName; requestContext.setResourcePath(new ResourcePath(path)); WADLProcessor wadlProcessor = new WADLProcessor (requestContext); wadlProcessor.setCreateService(isCreateService()); return wadlProcessor.importWADLToRegistry(requestContext, disableWADLValidation); } return null; } /** * Method that runs the Swagger upload procedure. * * @param requestContext requestContext the request context for the import/put operation * @param uri the URL from which the swagger is imported * * @return the path at which the Swagger was uploaded to * * @throws RegistryException if the operation failed. */ protected String addSwaggerFromZip(RequestContext requestContext, String uri) throws RegistryException { if (uri != null) { Resource local = requestContext.getRegistry().newResource(); String version = requestContext.getResource().getProperty("version"); local.setMediaType(swaggerMediaType); local.setProperty("version",version); requestContext.setSourceURL(uri); requestContext.setResource(local); String path = requestContext.getResourcePath().getPath(); if (path.lastIndexOf("/") != -1) { path = path.substring(0, path.lastIndexOf("/")); } else { path = ""; } String swaggerName = uri; if (swaggerName.lastIndexOf("/") != -1) { swaggerName = swaggerName.substring(swaggerName.lastIndexOf("/")); } else { swaggerName = "/" + swaggerName; } path = path + swaggerName; requestContext.setResourcePath(new ResourcePath(path)); SwaggerProcessor swaggerProcessor = new SwaggerProcessor (requestContext, isCreateService()); InputStream inputStream = null; try { inputStream = new URL(uri).openStream(); return swaggerProcessor.processSwagger(inputStream, getChrootedSwaggerLocation(requestContext.getRegistryContext()), uri); } catch (IOException e) { throw new RegistryException("Swagger URI is invalid", e); } finally { if (inputStream != null){ try { inputStream.close(); } catch (IOException e) { log.warn("Error while deleting Swagger temp files"); } } } } return null; } /** * Method that runs the Schema upload procedure. * * @param requestContext the request context for the import/put operation * @param uri the URL from which the Schema is imported * * @return the path at which the schema was uploaded to * * @throws RegistryException if the operation failed. */ protected String addSchemaFromZip(RequestContext requestContext, String uri) throws RegistryException { if (uri != null) { Resource local = requestContext.getRegistry().newResource(); String version = requestContext.getResource().getProperty("version"); local.setMediaType(xsdMediaType); local.setProperty("version",version); local.setProperties(requestContext.getResource().getProperties()); requestContext.setSourceURL(uri); requestContext.setResource(local); String path = requestContext.getResourcePath().getPath(); if (path.lastIndexOf("/") != -1) { path = path.substring(0, path.lastIndexOf("/")); } else { path = ""; } String xsdName = uri; if (xsdName.lastIndexOf("/") != -1) { xsdName = xsdName.substring(xsdName.lastIndexOf("/")); } else { xsdName = "/" + xsdName; } path = path + xsdName; requestContext.setResourcePath(new ResourcePath(path)); WSDLValidationInfo validationInfo = null; try { if (!disableSchemaValidation) { validationInfo = SchemaValidator.validate(new XMLInputSource(null, uri, null)); } } catch (Exception e) { throw new RegistryException("Exception occured while validating the schema" , e); } SchemaProcessor schemaProcessor = buildSchemaProcessor(requestContext, validationInfo, this.useOriginalSchema); String addedPath = schemaProcessor .importSchemaToRegistry(requestContext, path, getChrootedSchemaLocation(requestContext.getRegistryContext()), true,disableSymlinkCreation); requestContext.setActualPath(addedPath); log.debug("XSD : " + addedPath); return addedPath; } return null; } private String getChrootedSchemaLocation(RegistryContext registryContext) { return RegistryUtils.getAbsolutePath(registryContext, RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH + schemaLocation); } private String getChrootedWADLLocation(RegistryContext registryContext) { return RegistryUtils.getAbsolutePath(registryContext, RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH + wadlLocation); } private String getChrootedSwaggerLocation(RegistryContext registryContext) { return RegistryUtils.getAbsolutePath(registryContext, RegistryConstants.GOVERNANCE_REGISTRY_BASE_PATH + swaggerLocation); } public void importResource(RequestContext context) { // We don't support importing .gar files. This is meant only for uploading WSDL files // and imports from the local filesystem. log.warn("The imported Governance Web Archive will not be extracted. To extract the content" + " upload the archive from the file system."); } public void setWsdlMediaType(String wsdlMediaType) { this.wsdlMediaType = wsdlMediaType; } public void setWsdlExtension(String wsdlExtension) { this.wsdlExtension = wsdlExtension; } public void setSchemaMediaType(String xsdMediaType) { this.xsdMediaType = xsdMediaType; } public void setSchemaExtension(String xsdExtension) { this.xsdExtension = xsdExtension; } public void setArchiveExtension(String archiveExtension) { this.archiveExtension = archiveExtension; } public void setTempFilePrefix(String tempFilePrefix) { this.tempFilePrefix = tempFilePrefix; } public void setDisableWSDLValidation(String disableWSDLValidation) { this.disableWSDLValidation = Boolean.toString(true).equals(disableWSDLValidation); } public void setDisableSchemaValidation(String disableSchemaValidation) { this.disableSchemaValidation = Boolean.toString(true).equals(disableSchemaValidation); } public void setDisableWADLValidation(String disableWADLValidation) { this.disableWADLValidation = Boolean.getBoolean(disableWADLValidation); } public void setUseOriginalSchema(String useOriginalSchema) { this.useOriginalSchema = Boolean.toString(true).equals(useOriginalSchema); } public void setSkipFileExtensions(String skipFileExtensions) { if (skipFileExtensions != null) { String[] extensions = skipFileExtensions.split(this.extensionsSeparator); this.skipFileExtensions = Arrays.asList(extensions); } } /** * {@inheritDoc} */ protected void onPutCompleted(String path, Map<String, String> addedResources, List<String> otherResources, RequestContext requestContext) //Final result printing in console. throws RegistryException { Registry configRegistry = RegistryCoreServiceComponent.getRegistryService().getConfigSystemRegistry(); String resourceName = RegistryUtils.getResourceName(requestContext.getResourcePath().getPath()); OMFactory factory = OMAbstractFactory.getOMFactory(); OMElement garElement = factory.createOMElement( new QName(CommonConstants.REG_GAR_PATH_MAPPING_RESOURCE)); garElement.addAttribute(factory.createOMAttribute( CommonConstants.REG_GAR_PATH_MAPPING_RESOURCE_ATTR_PATH, null, requestContext.getResourcePath().getPath())); log.info("Total Number of Files Uploaded: " + addedResources.size()); List<String> failures = new LinkedList<String>(); for (Map.Entry<String, String> e : addedResources.entrySet()) { if (e.getValue() == null) { failures.add(e.getKey()); log.info("Failure " + failures.size() + ": " + e.getKey()); } else { OMElement targetElement = factory.createOMElement( new QName(CommonConstants.REG_GAR_PATH_MAPPING_RESOURCE_TARGET)); targetElement.setText(e.getValue()); garElement.addChild(targetElement); } } String pathMappingResourceName = resourceName; if (resourceName.contains(".")){ pathMappingResourceName = resourceName.substring(0, resourceName.lastIndexOf(".")); } String pathMappingResourcePath = CommonConstants.REG_GAR_PATH_MAPPING + pathMappingResourceName; boolean garMappingExists = configRegistry.resourceExists(pathMappingResourcePath); if (garMappingExists) { Resource pathMappingResource = configRegistry.get(pathMappingResourcePath); try { OMElement garMappingElement = AXIOMUtil.stringToOM( new String((byte[]) pathMappingResource.getContent())); garMappingElement.addChild(garElement); pathMappingResource.setContent(garMappingElement.toString()); configRegistry.put(pathMappingResourcePath, pathMappingResource); } catch (XMLStreamException e) { log.warn("Error occurred while retrieving the content of GAR mapping file ", e); } } log.info("Total Number of Files Failed to Upload: " + failures.size()); if (otherResources.size() > 0) { log.info("Total Number of Files Not-Uploaded: " + otherResources.size()); } } protected void uploadFiles(List<UploadTask> tasks, File tempFile, Stack<File> fileList, File tempDir, int poolSize, String path, List<String> uriList, RequestContext requestContext) throws RegistryException { CommonUtil.loadImportedArtifactMap(); try { if (poolSize <= 0) { boolean updateLockAvailable = CommonUtil.isUpdateLockAvailable(); if (!updateLockAvailable) { CommonUtil.releaseUpdateLock(); } try { for (UploadTask task : tasks) { task.run(); } } finally { if (!updateLockAvailable) { CommonUtil.acquireUpdateLock(); } } } else { ExecutorService executorService = Executors.newFixedThreadPool(poolSize); if (!CommonUtil.isArtifactIndexMapExisting()) { CommonUtil.createArtifactIndexMap(); } if (!CommonUtil.isSymbolicLinkMapExisting()) { CommonUtil.createSymbolicLinkMap(); } for (UploadTask task : tasks) { executorService.submit(task); } executorService.shutdown(); while (!executorService.isTerminated()) { } } } finally { CommonUtil.clearImportedArtifactMap(); } try { if (CommonUtil.isArtifactIndexMapExisting()) { Map<String, String> artifactIndexMap = CommonUtil.getAndRemoveArtifactIndexMap(); if (log.isDebugEnabled()) { for (Map.Entry<String, String> entry : artifactIndexMap.entrySet()) { log.debug("Added Artifact Entry: " + entry.getKey()); } } // CommonUtil.addGovernanceArtifactEntriesWithRelativeValues( // CommonUtil.getUnchrootedSystemRegistry(requestContext), artifactIndexMap); } Registry registry = requestContext.getRegistry(); if (!isDisableSymlinkCreation() && CommonUtil.isSymbolicLinkMapExisting()) { Map<String, String> symbolicLinkMap = CommonUtil.getAndRemoveSymbolicLinkMap(); for (Map.Entry<String, String> entry : symbolicLinkMap.entrySet()) { if (log.isDebugEnabled()) { log.debug("Added Symbolic Link: " + entry.getKey()); } try { if (registry.resourceExists(entry.getKey())) { registry.removeLink(entry.getKey()); } } catch (RegistryException ignored) { // we are not bothered above errors in getting rid of symbolic links. } requestContext.getSystemRegistry().createLink(entry.getKey(), entry.getValue()); } } } catch (RegistryException e) { log.error("Unable to build artifact index.", e); } Map<String, String> taskResults = new LinkedHashMap<String, String>(); for (UploadTask task : tasks) { if (task.getFailed()) { taskResults.put(task.getUri(), null); } else { taskResults.put(task.getUri(), task.getResult()); } } onPutCompleted(path, taskResults, uriList, requestContext); try { delete(tempFile); while (!fileList.isEmpty()) { delete(fileList.pop()); } FileUtils.deleteDirectory(tempDir); } catch (IOException e) { log.error("Unable to cleanup temporary files", e); } log.info("Completed uploading files from archive file"); } protected static abstract class UploadTask implements Runnable { private String uri; private RequestContext requestContext; private int tenantId = -1; private UserRegistry userRegistry; private UserRealm userRealm; private String userId; private int callerTenantId; private Map<String, String> localPathMap; private Random random = new Random(10); protected String result = null; protected boolean failed = false; protected int retries = 0; public UploadTask(RequestContext requestContext, String uri, int tenantId, UserRegistry userRegistry, UserRealm userRealm, String userId, int callerTenantId, Map<String, String> localPathMap) { this.userRegistry = userRegistry; this.userRealm = userRealm; this.tenantId = tenantId; this.requestContext = requestContext; this.uri = uri; this.userId = userId; this.callerTenantId = callerTenantId; this.localPathMap = localPathMap; } public void run() { try { PrivilegedCarbonContext.startTenantFlow(); //This is for fixing CARBON-14469. PrivilegedCarbonContext.getThreadLocalCarbonContext().setTenantId(tenantId, true); //set user name to set in swagger import provider PrivilegedCarbonContext.getThreadLocalCarbonContext().setUsername(userId); // File is already uploaded via wsdl or xsd imports those are skip if (CommonUtil.isImportedArtifactExisting(new File(uri).toString())) { failed = false; result = "added from import"; return; } doWork(); } finally { PrivilegedCarbonContext.endTenantFlow(); } } protected void retry() { //Number of retry can be configurable via handler configuration (<property name="numberOfRetry">1</property>) if (retries < ZipWSDLMediaTypeHandler.numberOfRetry) { ++retries; log.info("Retrying to upload resource: " + uri); int i = random.nextInt(10); if (log.isDebugEnabled()) { log.debug("Waiting for " + i + " seconds"); } try { Thread.sleep(1000 * i); } catch (InterruptedException ignored) { } doWork(); } else { failed = true; } } private void doWork() { CurrentSession.setTenantId(tenantId); CurrentSession.setUserRegistry(userRegistry); CurrentSession.setUserRealm(userRealm); CurrentSession.setUser(userId); CurrentSession.setCallerTenantId(callerTenantId); if (localPathMap != null) { CurrentSession.setLocalPathMap(localPathMap); } try { if (CommonUtil.isUpdateLockAvailable()) { CommonUtil.acquireUpdateLock(); try { RequestContext requestContext = new RequestContext(this.requestContext.getRegistry(), this.requestContext.getRepository(), this.requestContext.getVersionRepository()); requestContext.setResourcePath(this.requestContext.getResourcePath()); requestContext.setResource(this.requestContext.getResource()); requestContext.setOldResource(this.requestContext.getOldResource()); doProcessing(requestContext, uri); } finally { CommonUtil.releaseUpdateLock(); } } } catch (RegistryException e) { log.error("An error occurred while uploading "+uri, e); retry(); } catch (RuntimeException e) { log.error("An unhandled exception occurred while uploading " + uri, e); retry(); } finally { CurrentSession.removeUser(); CurrentSession.removeUserRealm(); CurrentSession.removeUserRegistry(); CurrentSession.removeTenantId(); CurrentSession.removeCallerTenantId(); if (localPathMap != null) { CurrentSession.removeLocalPathMap(); } // get rid of the reference to the request context at the end. requestContext = null; } } protected abstract void doProcessing(RequestContext requestContext, String uri) throws RegistryException; public String getUri() { return uri; } public String getResult() { return result; } public boolean getFailed() { return failed; } } protected class UploadFileTask extends UploadTask { String mediaType; public UploadFileTask(RequestContext requestContext, String uri, int tenantId, UserRegistry userRegistry, UserRealm userRealm, String userId, int callerTenantId, Map<String, String> localPathMap, String mediaType) { super(requestContext, uri, tenantId, userRegistry, userRealm, userId, callerTenantId, localPathMap); this.mediaType = mediaType; } protected void doProcessing(RequestContext requestContext, String uri) throws RegistryException { Registry registry = requestContext.getRegistry(); Resource resource = registry.newResource(); String version = requestContext.getResource().getProperty("version"); if (resource.getUUID() == null) { resource.setUUID(UUID.randomUUID().toString()); } if (version != null) { resource.setProperty("version", version); } resource.setMediaType(this.mediaType); InputStream inputStream; try { inputStream = new URL(uri).openStream(); } catch (IOException e) { throw new RegistryException("The URL " + uri + " is incorrect.", e); } ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); int nextChar; try { while ((nextChar = inputStream.read()) != -1) { outputStream.write(nextChar); } outputStream.flush(); } catch (IOException e) { throw new RegistryException("Failed to read content from URL " + uri, e); } resource.setContent(outputStream.toByteArray()); String path = RegistryUtils.getParentPath(requestContext.getResourcePath().getPath()) + RegistryConstants.PATH_SEPARATOR + RegistryUtils.getResourceName(uri); result = registry.put(path, resource); } } protected class UploadXSDTask extends UploadTask { public UploadXSDTask(RequestContext requestContext, String uri, int tenantId, UserRegistry userRegistry, UserRealm userRealm, String userId, int callerTenantId, Map<String, String> localPathMap) { super(requestContext, uri, tenantId, userRegistry, userRealm, userId, callerTenantId, localPathMap); } protected void doProcessing(RequestContext requestContext, String uri) throws RegistryException { result = addSchemaFromZip(requestContext, uri); } } protected class UploadWSDLTask extends UploadTask { public UploadWSDLTask(RequestContext requestContext, String uri, int tenantId, UserRegistry userRegistry, UserRealm userRealm, String userId, int callerTenantId, Map<String, String> localPathMap) { super(requestContext, uri, tenantId, userRegistry, userRealm, userId, callerTenantId, localPathMap); } protected void doProcessing(RequestContext requestContext, String uri) throws RegistryException { result = addWSDLFromZip(requestContext, uri); } } protected class UploadWadlTask extends UploadTask { public UploadWadlTask(RequestContext requestContext, String uri, int tenantId, UserRegistry userRegistry, UserRealm userRealm, String userId, int callerTenantId, Map<String, String> localPathMap) { super(requestContext, uri, tenantId, userRegistry, userRealm, userId, callerTenantId, localPathMap); } protected void doProcessing(RequestContext requestContext, String uri) throws RegistryException { result = addWADLFromZip(requestContext, uri); } } protected class UploadSwaggerTask extends UploadTask { public UploadSwaggerTask(RequestContext requestContext, String uri, int tenantId, UserRegistry userRegistry, UserRealm userRealm, String userId, int callerTenantId, Map<String, String> localPathMap) { super(requestContext, uri, tenantId, userRegistry, userRealm, userId, callerTenantId, localPathMap); } protected void doProcessing(RequestContext requestContext, String uri) throws RegistryException { result = addSwaggerFromZip(requestContext, uri); } } }
apache-2.0
tectronics/scalaris
contrib/wikipedia/src/de/zib/scalaris/examples/wikipedia/bliki/WikiServletScalaris.java
26708
/** * Copyright 2011-2013 Zuse Institute Berlin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.zib.scalaris.examples.wikipedia.bliki; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; import java.math.BigInteger; import java.util.Arrays; import java.util.Calendar; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Random; import java.util.TreeSet; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.xml.sax.InputSource; import org.xml.sax.XMLReader; import org.xml.sax.helpers.XMLReaderFactory; import de.zib.scalaris.Connection; import de.zib.scalaris.ConnectionFactory; import de.zib.scalaris.ConnectionPool; import de.zib.scalaris.NodeDiscovery; import de.zib.scalaris.TransactionSingleOp; import de.zib.scalaris.examples.wikipedia.Options; import de.zib.scalaris.examples.wikipedia.PageHistoryResult; import de.zib.scalaris.examples.wikipedia.RevisionResult; import de.zib.scalaris.examples.wikipedia.SavePageResult; import de.zib.scalaris.examples.wikipedia.ScalarisDataHandlerNormalised; import de.zib.scalaris.examples.wikipedia.ScalarisDataHandlerUnnormalised; import de.zib.scalaris.examples.wikipedia.ValueResult; import de.zib.scalaris.examples.wikipedia.data.Contribution; import de.zib.scalaris.examples.wikipedia.data.Revision; import de.zib.scalaris.examples.wikipedia.data.SiteInfo; import de.zib.scalaris.examples.wikipedia.data.xml.SAXParsingInterruptedException; import de.zib.scalaris.examples.wikipedia.data.xml.WikiDump; import de.zib.scalaris.examples.wikipedia.data.xml.WikiDumpHandler; import de.zib.scalaris.examples.wikipedia.data.xml.WikiDumpPreparedSQLiteToScalaris; import de.zib.scalaris.examples.wikipedia.data.xml.WikiDumpToScalarisHandler; import de.zib.tools.CircularByteArrayOutputStream; /** * Wiki servlet connecting to Scalaris. * * @author Nico Kruber, kruber@zib.de */ public class WikiServletScalaris extends WikiServlet<Connection> { private static final long serialVersionUID = 1L; private static final int CONNECTION_POOL_SIZE = 200; private static final int MAX_WAIT_FOR_CONNECTION = 10000; // 10s private ConnectionPool cPool; protected NodeDiscovery nodeDiscovery; private boolean autoImport; /** * Default constructor creating the servlet. */ public WikiServletScalaris() { super(); } /** * Servlet initialisation: creates the connection to the erlang node and * imports site information. */ @Override public void init2(ServletConfig config) throws ServletException { super.init2(config); Properties properties = new Properties(); try { InputStream fis = config.getServletContext().getResourceAsStream("/WEB-INF/scalaris.properties"); if (fis != null) { properties.load(fis); properties.setProperty("PropertyLoader.loadedfile", "/WEB-INF/scalaris.properties"); fis.close(); } else { properties = null; } } catch (IOException e) { // e.printStackTrace(); properties = null; } ConnectionFactory cFactory; if (properties != null) { cFactory = new ConnectionFactory(properties); } else { cFactory = new ConnectionFactory(); cFactory.setClientName("wiki"); } Random random = new Random(); String clientName = new BigInteger(128, random).toString(16); cFactory.setClientName(cFactory.getClientName() + '_' + clientName); cFactory.setClientNameAppendUUID(true); // cFactory.setConnectionPolicy(new RoundRobinConnectionPolicy(cFactory.getNodes())); cPool = new ConnectionPool(cFactory, CONNECTION_POOL_SIZE); if (Options.getInstance().SCALARIS_NODE_DISCOVERY > 0) { nodeDiscovery = new NodeDiscovery(cPool); nodeDiscovery.startWithFixedDelay(Options.getInstance().SCALARIS_NODE_DISCOVERY); } } @Override protected void startAutoImport() { String dumpsPath = getServletContext().getRealPath("/WEB-INF/dumps"); if (!initialized && !loadSiteInfo() || !currentImport.isEmpty()) { String req_import = null; // get auto-import dumps: File dumpsDir = new File(dumpsPath); if (dumpsDir.isDirectory()) { List<String> autoImportFiles = Arrays.asList(dumpsDir.list(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return MATCH_WIKI_AUTOIMPORT_FILE.matcher(name).matches(); } })); if (!autoImportFiles.isEmpty()) { // use the first auto-import file req_import = autoImportFiles.get(0); // remove .auto from filename: req_import = req_import.substring(0, req_import.length() - ".auto".length()); startImport(dumpsPath, req_import, 2, null); autoImport = true; } } } } /** * Loads the siteinfo object from Scalaris. * * @return <tt>true</tt> on success, * <tt>false</tt> if not found or no connection available */ @Override protected synchronized boolean loadSiteInfo() { TransactionSingleOp scalaris_single; try { Connection conn = cPool.getConnection(MAX_WAIT_FOR_CONNECTION); if (conn == null) { System.err.println("Could not get a connection to Scalaris for siteinfo, waited " + MAX_WAIT_FOR_CONNECTION + "ms"); return false; } scalaris_single = new TransactionSingleOp(conn); try { siteinfo = scalaris_single.read("siteinfo").jsonValue(SiteInfo.class); // TODO: fix siteinfo's base url namespace = new MyNamespace(siteinfo); initialized = true; setLocalisedSpecialPageNames(); } catch (Exception e) { // no warning here - this probably is an empty wiki return false; } } catch (Exception e) { System.out.println(e); e.printStackTrace(); return false; } return true; } /** * Sets up the connection to the Scalaris erlang node once on the server. * * In case of errors, the <tt>error</tt> and <tt>notice</tt> attributes of * the <tt>request</tt> object are set appropriately if not <tt>null</tt>. * * @param request * the request to the servlet (may be <tt>null</tt>) * * @return a valid connection of <tt>null</tt> if an error occurred */ @Override protected Connection getConnection(HttpServletRequest request) { try { Connection conn = cPool.getConnection(MAX_WAIT_FOR_CONNECTION); if (conn == null) { System.err.println("Could not get a connection to Scalaris, waited " + MAX_WAIT_FOR_CONNECTION + "ms"); if (request != null) { setParam_error(request, "ERROR: DB unavailable"); addToParam_notice(request, "error: <pre>Could not get a connection to Scalaris, waited " + MAX_WAIT_FOR_CONNECTION + "ms</pre>"); } return null; } return conn; } catch (Exception e) { if (request != null) { setParam_error(request, "ERROR: DB unavailable"); addToParam_notice(request, "error: <pre>" + e.getMessage() + "</pre>"); } else { System.out.println(e); e.printStackTrace(); } return null; } } /** * Releases the connection back into the Scalaris connection pool. * * @param request * the request to the servlet or <tt>null</tt> if there is none * @param conn * the connection to release */ @Override protected void releaseConnection(HttpServletRequest request, Connection conn) { cPool.releaseConnection(conn); } /** * Shows a page for importing a DB dump. * * @param request * the request of the current operation * @param response * the response of the current operation * * @throws IOException * @throws ServletException */ @Override protected synchronized void showImportPage(HttpServletRequest request, HttpServletResponse response, Connection connection, WikiPageBean page) throws ServletException, IOException { page.setNotAvailable(true); StringBuilder content = new StringBuilder(); String dumpsPath = getServletContext().getRealPath("/WEB-INF/dumps"); final String serviceUser = page.getServiceUser().isEmpty() ? "" : "&service_user=" + page.getServiceUser(); if (currentImport.isEmpty() && importHandler == null) { TreeSet<String> availableDumps = new TreeSet<String>(); File dumpsDir = new File(dumpsPath); if (dumpsDir.isDirectory()) { availableDumps.addAll(Arrays.asList(dumpsDir.list(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return MATCH_WIKI_IMPORT_FILE.matcher(name).matches(); } }))); } // get parameters: String req_import = request.getParameter("import"); if (req_import == null || !availableDumps.contains(req_import)) { content.append("<h2>Please select a wiki dump to import</h2>\n"); content.append("<form method=\"get\" action=\"wiki\">\n"); if (!page.getServiceUser().isEmpty()) { content.append("<input type=\"hidden\" value=\"" + page.getServiceUser() + "\" name=\"service_user\"/>"); } content.append("<p>\n"); content.append(" <select name=\"import\" size=\"10\" style=\"width:500px;\">\n"); for (String dump: availableDumps) { content.append(" <option>" + dump + "</option>\n"); } content.append(" </select>\n"); content.append(" </p>\n"); content.append(" <p>Maximum number of revisions per page: <input name=\"max_revisions\" size=\"2\" value=\"2\" /></br><span style=\"font-size:80%\">(<tt>-1</tt> to import everything)</span></p>\n"); content.append(" <p>No entry newer than: <input name=\"max_time\" size=\"20\" value=\"\" /></br><span style=\"font-size:80%\">(ISO8601 format, e.g. <tt>2004-01-07T08:09:29Z</tt> - leave empty to import everything)</span></p>\n"); content.append(" <input type=\"submit\" value=\"Import\" />\n"); content.append("</form>\n"); content.append("<p>Note: You will be re-directed to the main page when the import finishes.</p>"); } else { content.append("<h2>Importing \"" + req_import + "\"...</h2>\n"); try { int maxRevisions = parseInt(request.getParameter("max_revisions"), 2); Calendar maxTime = parseDate(request.getParameter("max_time"), null); startImport(dumpsPath, req_import, maxRevisions, maxTime); response.setHeader("Refresh", "2; url = wiki?import=" + currentImport + serviceUser + "#refresh"); content.append("<p>Current log file (refreshed automatically every " + IMPORT_REDIRECT_EVERY + " seconds):</p>\n"); content.append("<pre>"); content.append("starting import...\n"); content.append("</pre>"); content.append("<p><a name=\"refresh\" href=\"wiki?import=" + currentImport + serviceUser + "#refresh\">refresh</a></p>"); if (importHandler.hasStopSupport()) { content.append("<p><a href=\"wiki?stop_import=" + currentImport + serviceUser + "\">stop</a> (WARNING: pages may be incomplete due to missing templates)</p>"); } } catch (Exception e) { setParam_error(request, "ERROR: import failed"); addToParam_notice(request, "error: <pre>" + e.getMessage() + "</pre>"); currentImport = ""; } } } else if (!currentImport.isEmpty() && importHandler != null) { content.append("<h2>Importing \"" + currentImport + "\"...</h2>\n"); String req_stop_import = request.getParameter("stop_import"); boolean stopImport = false; if (importHandler.hasStopSupport() && req_stop_import != null && !req_stop_import.isEmpty()) { stopImport = true; importHandler.stopParsing(); content.append("<p>Current log file:</p>\n"); } else { response.setHeader("Refresh", IMPORT_REDIRECT_EVERY + "; url = wiki?import=" + currentImport + serviceUser + "#refresh"); content.append("<p>Current log file (refreshed automatically every " + IMPORT_REDIRECT_EVERY + " seconds):</p>\n"); } content.append("<pre>"); String log = importLog.toString(); int start = log.indexOf("\n"); if (start != -1) { content.append(log.substring(start)); } content.append("</pre>"); if (!stopImport) { content.append("<p><a name=\"refresh\" href=\"wiki?import=" + currentImport + serviceUser + "#refresh\">refresh</a></p>"); if (importHandler.hasStopSupport()) { content.append("<p><a href=\"wiki?stop_import=" + currentImport + serviceUser + "\">stop</a> (WARNING: pages may be incomplete due to missing templates)</p>"); } } else { content.append("<p>Import has been stopped by the user. Return to <a href=\"wiki?title=" + MAIN_PAGE + serviceUser + "\">" + MAIN_PAGE + "</a>.</p>"); } } else if (!currentImport.isEmpty() && importHandler == null) { content.append("<h2>Import of \"" + currentImport + "\" finished</h2>\n"); content.append("<p>Current log file:</p>\n"); content.append("<pre>"); String log = importLog.toString(); int start = log.indexOf("\n"); if (start != -1) { content.append(log.substring(start)); } content.append("</pre>"); String req_stop_import = request.getParameter("stop_import"); if (req_stop_import != null && !req_stop_import.isEmpty()) { synchronized (WikiServletScalaris.this) { importLog.close(); WikiServletScalaris.this.currentImport = ""; } response.setHeader("Refresh", "1; url = wiki?title=" + MAIN_PAGE + serviceUser + ""); content.append("<p>If not re-directed automatically: Return to <a href=\"wiki?title=" + MAIN_PAGE + serviceUser + "\">" + MAIN_PAGE + "</a></p>\n"); } else { content.append("<p><a href=\"wiki?stop_import=" + currentImport + serviceUser + "\">clear log and return to Main Page</a></p>"); } } page.setNotice(WikiServlet.getParam_notice(request)); page.setError(getParam_error(request)); page.setTitle("Import Wiki dump"); page.setPage(content.toString()); forwardToPageJsp(request, response, connection, page, "page.jsp"); } private void startImport(String dumpsPath, String req_import, int maxRevisions, Calendar maxTime) throws RuntimeException { currentImport = req_import; importLog = new CircularByteArrayOutputStream(1024 * 1024); PrintStream ps = new PrintStream(importLog); ps.println("starting import..."); String fileName = dumpsPath + File.separator + req_import; if (fileName.endsWith(".db")) { importHandler = new WikiDumpPreparedSQLiteToScalaris(fileName, Options.getInstance(), 1, 1, cPool.getConnectionFactory()); } else { importHandler = new WikiDumpToScalarisHandler( de.zib.scalaris.examples.wikipedia.data.xml.Main.blacklist, null, maxRevisions, null, maxTime, cPool.getConnectionFactory()); } importHandler.setMsgOut(ps); this.new ImportThread(importHandler, fileName, ps).start(); } private class ImportThread extends Thread { private WikiDump handler; private String fileName; private PrintStream ps; public ImportThread(WikiDump handler, String fileName, PrintStream ps) { this.handler = handler; this.fileName = fileName; this.ps = ps; } /* (non-Javadoc) * @see java.lang.Thread#run() */ @Override public void run() { InputSource[] is = null; try { handler.setUp(); if (handler instanceof WikiDumpHandler) { WikiDumpHandler xmlHandler = (WikiDumpHandler) handler; XMLReader reader = XMLReaderFactory.createXMLReader(); reader.setContentHandler(xmlHandler); is = de.zib.scalaris.examples.wikipedia.data.xml.Main.getFileReader(fileName); for (InputSource source : is) { reader.parse(source); } xmlHandler.new ReportAtShutDown().reportAtEnd(); ps.println("import finished"); } else if (handler instanceof WikiDumpPreparedSQLiteToScalaris) { WikiDumpPreparedSQLiteToScalaris sqlHandler = (WikiDumpPreparedSQLiteToScalaris) handler; sqlHandler.writeToScalaris(); sqlHandler.new ReportAtShutDown().reportAtEnd(); } } catch (Exception e) { if (e instanceof SAXParsingInterruptedException) { // this is ok - we told the parser to stop } else { e.printStackTrace(ps); } } finally { handler.tearDown(); if (is != null) { try { for (InputSource source : is) { source.getCharacterStream().close(); } } catch (IOException e) { // don't care } } } synchronized (WikiServletScalaris.this) { WikiServletScalaris.this.importHandler = null; WikiServletScalaris.this.updateExistingPages(); if (WikiServletScalaris.this.autoImport) { WikiServletScalaris.this.currentImport = ""; } } } } @Override protected MyScalarisWikiModel getWikiModel(Connection connection, WikiPageBeanBase page) { final MyScalarisWikiModel model = new MyScalarisWikiModel(getImagebaseurl(page), getLinkbaseurl(page), connection, namespace); model.setExistingPages(existingPages); return model; } @Override public String getSiteInfoKey() { return ScalarisDataHandlerUnnormalised.getSiteInfoKey(); } @Override public String getPageListKey(int namespace) { return ScalarisDataHandlerUnnormalised.getPageListKey(namespace); } @Override public String getPageCountKey(int namespace) { return ScalarisDataHandlerUnnormalised.getPageCountKey(namespace); } @Override public String getArticleCountKey() { return ScalarisDataHandlerUnnormalised.getArticleCountKey(); } @Override public String getRevKey(String title, int id, final MyNamespace nsObject) { return ScalarisDataHandlerUnnormalised.getRevKey(title, id, nsObject); } @Override public String getPageKey(String title, final MyNamespace nsObject) { return ScalarisDataHandlerUnnormalised.getPageKey(title, nsObject); } @Override public String getRevListKey(String title, final MyNamespace nsObject) { return ScalarisDataHandlerUnnormalised.getRevListKey(title, nsObject); } @Override public String getCatPageListKey(String title, final MyNamespace nsObject) { return ScalarisDataHandlerUnnormalised.getCatPageListKey(title, nsObject); } @Override public String getCatPageCountKey(String title, final MyNamespace nsObject) { return ScalarisDataHandlerUnnormalised.getCatPageCountKey(title, nsObject); } @Override public String getTplPageListKey(String title, final MyNamespace nsObject) { return ScalarisDataHandlerUnnormalised.getTplPageListKey(title, nsObject); } @Override public String getBackLinksPageListKey(String title, final MyNamespace nsObject) { return ScalarisDataHandlerUnnormalised.getBackLinksPageListKey(title, nsObject); } @Override public String getStatsPageEditsKey() { return ScalarisDataHandlerUnnormalised.getStatsPageEditsKey(); } @Override public String getContributionListKey(String contributor) { return ScalarisDataHandlerUnnormalised.getContributionListKey(contributor); } @Override public ValueResult<String> getDbVersion(Connection connection) { return ScalarisDataHandlerUnnormalised.getDbVersion(connection); } @Override public PageHistoryResult getPageHistory(Connection connection, String title, final MyNamespace nsObject) { return ScalarisDataHandlerUnnormalised.getPageHistory(connection, title, nsObject); } @Override public RevisionResult getRevision(Connection connection, String title, final MyNamespace nsObject) { return ScalarisDataHandlerUnnormalised.getRevision(connection, title, nsObject); } @Override public RevisionResult getRevision(Connection connection, String title, int id, final MyNamespace nsObject) { return ScalarisDataHandlerUnnormalised.getRevision(connection, title, id, nsObject); } @Override public ValueResult<List<NormalisedTitle>> getPageList(Connection connection) { return ScalarisDataHandlerUnnormalised.getPageList(connection); } @Override public ValueResult<List<NormalisedTitle>> getPageList(int namespace, Connection connection) { return ScalarisDataHandlerUnnormalised.getPageList(namespace, connection); } @Override public ValueResult<List<NormalisedTitle>> getPagesInCategory(Connection connection, NormalisedTitle title) { return ScalarisDataHandlerNormalised.getPagesInCategory(connection, title); } @Override public ValueResult<List<NormalisedTitle>> getPagesInTemplate(Connection connection, NormalisedTitle title) { return ScalarisDataHandlerNormalised.getPagesInTemplate(connection, title); } @Override public ValueResult<List<NormalisedTitle>> getPagesInTemplates(Connection connection, List<NormalisedTitle> titles, String pageTitle) { return ScalarisDataHandlerNormalised.getPagesInTemplates(connection, titles, pageTitle); } @Override public ValueResult<List<NormalisedTitle>> getPagesLinkingTo(Connection connection, String title, final MyNamespace nsObject) { return ScalarisDataHandlerUnnormalised.getPagesLinkingTo(connection, title, nsObject); } @Override public ValueResult<List<Contribution>> getContributions( Connection connection, String contributor) { return ScalarisDataHandlerUnnormalised.getContributions(connection, contributor); } @Override public ValueResult<BigInteger> getPageCount(Connection connection) { return ScalarisDataHandlerUnnormalised.getPageCount(connection); } @Override public ValueResult<BigInteger> getPageCount(int namespace, Connection connection) { return ScalarisDataHandlerUnnormalised.getPageCount(namespace, connection); } @Override public ValueResult<BigInteger> getArticleCount(Connection connection) { return ScalarisDataHandlerUnnormalised.getArticleCount(connection); } @Override public ValueResult<BigInteger> getPagesInCategoryCount(Connection connection, String title, final MyNamespace nsObject) { return ScalarisDataHandlerUnnormalised.getPagesInCategoryCount(connection, title, nsObject); } @Override public ValueResult<BigInteger> getStatsPageEdits(Connection connection) { return ScalarisDataHandlerUnnormalised.getStatsPageEdits(connection); } @Override public ValueResult<NormalisedTitle> getRandomArticle(Connection connection, Random random) { return ScalarisDataHandlerUnnormalised.getRandomArticle(connection, random); } @Override public SavePageResult savePage(Connection connection, String title, Revision newRev, int prevRevId, Map<String, String> restrictions, SiteInfo siteinfo, String username, final MyNamespace nsObject) { return ScalarisDataHandlerUnnormalised.savePage(connection, title, newRev, prevRevId, restrictions, siteinfo, username, nsObject); } }
apache-2.0
MetSystem/fixflow
modules/fixflow-core/src/main/java/org/eclipse/bpmn2/di/BPMNDiagram.java
3011
/** * <copyright> * * Copyright (c) 2010 SAP AG. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Reiner Hille-Doering (SAP AG) - initial API and implementation and/or initial documentation * * </copyright> * */ package org.eclipse.bpmn2.di; import java.util.List; import org.eclipse.dd.di.Diagram; /** * <!-- begin-user-doc --> * A representation of the model object '<em><b>BPMN Diagram</b></em>'. * <!-- end-user-doc --> * * <p> * The following features are supported: * <ul> * <li>{@link org.eclipse.bpmn2.di.BPMNDiagram#getPlane <em>Plane</em>}</li> * <li>{@link org.eclipse.bpmn2.di.BPMNDiagram#getLabelStyle <em>Label Style</em>}</li> * </ul> * </p> * * @see org.eclipse.bpmn2.di.BpmnDiPackage#getBPMNDiagram() * @model extendedMetaData="name='BPMNDiagram' kind='elementOnly'" * @generated */ public interface BPMNDiagram extends Diagram { /** * Returns the value of the '<em><b>Plane</b></em>' containment reference. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Plane</em>' containment reference isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Plane</em>' containment reference. * @see #setPlane(BPMNPlane) * @see org.eclipse.bpmn2.di.BpmnDiPackage#getBPMNDiagram_Plane() * @model containment="true" required="true" ordered="false" * extendedMetaData="kind='element' name='BPMNPlane' namespace='http://www.omg.org/spec/BPMN/20100524/DI'" * @generated */ BPMNPlane getPlane(); /** * Sets the value of the '{@link org.eclipse.bpmn2.di.BPMNDiagram#getPlane <em>Plane</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Plane</em>' containment reference. * @see #getPlane() * @generated */ void setPlane(BPMNPlane value); /** * Returns the value of the '<em><b>Label Style</b></em>' containment reference list. * The list contents are of type {@link org.eclipse.bpmn2.di.BPMNLabelStyle}. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Label Style</em>' containment reference list isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Label Style</em>' containment reference list. * @see org.eclipse.bpmn2.di.BpmnDiPackage#getBPMNDiagram_LabelStyle() * @model containment="true" ordered="false" * extendedMetaData="kind='element' name='BPMNLabelStyle' namespace='http://www.omg.org/spec/BPMN/20100524/DI'" * @generated */ List<BPMNLabelStyle> getLabelStyle(); } // BPMNDiagram
apache-2.0
pkocandr/indy
models/core-java/src/main/java/org/commonjava/indy/model/spi/IndyAddOnID.java
4082
/** * Copyright (C) 2011-2020 Red Hat, Inc. (https://github.com/Commonjava/indy) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.commonjava.indy.model.spi; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import java.util.ArrayList; import java.util.List; @ApiModel( description = "Description of an add-on" ) public final class IndyAddOnID implements Comparable<IndyAddOnID> { private String name; private String description; @ApiModelProperty( "List of router entries (mainly for use in the UI)" ) private List<UIRoute> routes; @ApiModelProperty( "List of menu sections (mainly for use in the UI)" ) private List<UISection> sections; @ApiModelProperty( "Path to the Javascript needed to drive the UI for this add-on" ) private String initJavascriptHref; public String getInitJavascriptHref() { return initJavascriptHref; } public void setInitJavascriptHref( final String initJavascriptHref ) { this.initJavascriptHref = initJavascriptHref; } public IndyAddOnID withInitJavascriptHref( final String initJavascriptHref ) { this.initJavascriptHref = initJavascriptHref; return this; } public String getName() { return name; } public void setName( final String name ) { this.name = name; } public IndyAddOnID withName( final String name ) { this.name = name; return this; } public List<UISection> getSections() { return sections; } public void setSections( final List<UISection> sections ) { this.sections = sections; } public IndyAddOnID withSection( final UISection section ) { if ( sections == null ) { sections = new ArrayList<UISection>(); } sections.add( section ); return this; } public List<UIRoute> getRoutes() { return routes; } public void setRoutes( final List<UIRoute> routes ) { this.routes = routes; } public IndyAddOnID withRoute( final UIRoute route ) { if ( routes == null ) { routes = new ArrayList<UIRoute>(); } routes.add( route ); return this; } @Override public String toString() { return "IndyAddOnID [" + name + "]"; } @Override public int compareTo( final IndyAddOnID other ) { return name.compareTo( other.name ); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ( ( name == null ) ? 0 : name.hashCode() ); return result; } @Override public boolean equals( final Object obj ) { if ( this == obj ) { return true; } if ( obj == null ) { return false; } if ( getClass() != obj.getClass() ) { return false; } final IndyAddOnID other = (IndyAddOnID) obj; if ( name == null ) { if ( other.name != null ) { return false; } } else if ( !name.equals( other.name ) ) { return false; } return true; } public String getDescription() { return description; } public void setDescription( final String description ) { this.description = description; } }
apache-2.0
BeatCoding/droolsjbpm-integration
kie-server-parent/kie-server-remote/kie-server-client/src/main/java/org/kie/server/client/RuleServicesClient.java
856
/* * Copyright 2015 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.server.client; import org.kie.api.command.Command; import org.kie.server.api.model.ServiceResponse; public interface RuleServicesClient { ServiceResponse<String> executeCommands(String id, String payload); ServiceResponse<String> executeCommands(String id, Command<?> cmd); }
apache-2.0
apache/olingo-odata2
odata2-jpa-processor/jpa-api/src/main/java/org/apache/olingo/odata2/jpa/processor/api/access/JPAEdmBuilder.java
3126
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. ******************************************************************************/ package org.apache.olingo.odata2.jpa.processor.api.access; import org.apache.olingo.odata2.jpa.processor.api.exception.ODataJPAModelException; import org.apache.olingo.odata2.jpa.processor.api.exception.ODataJPARuntimeException; /** * JPAEdmBuilder interface provides methods for building elements of an Entity Data Model (EDM) from * a Java Persistence Model. * * * */ public interface JPAEdmBuilder { /** * The Method builds EDM Elements by transforming JPA MetaModel. The method * processes EDM JPA Containers which could be accessed using the following * views, * <ul> * <li> {@link org.apache.olingo.odata2.jpa.processor.api.model.JPAEdmAssociationSetView} </li> * <li> {@link org.apache.olingo.odata2.jpa.processor.api.model.JPAEdmAssociationView}</li> * <li> {@link org.apache.olingo.odata2.jpa.processor.api.model.JPAEdmBaseView}</li> * <li> {@link org.apache.olingo.odata2.jpa.processor.api.model.JPAEdmComplexPropertyView} </li> * <li> {@link org.apache.olingo.odata2.jpa.processor.api.model.JPAEdmComplexTypeView}</li> * <li> {@link org.apache.olingo.odata2.jpa.processor.api.model.JPAEdmEntityContainerView} </li> * <li> {@link org.apache.olingo.odata2.jpa.processor.api.model.JPAEdmEntitySetView}</li> * <li> {@link org.apache.olingo.odata2.jpa.processor.api.model.JPAEdmEntityTypeView}</li> * <li> {@link org.apache.olingo.odata2.jpa.processor.api.model.JPAEdmKeyView}</li> * <li> {@link org.apache.olingo.odata2.jpa.processor.api.model.JPAEdmModelView}</li> * <li> {@link org.apache.olingo.odata2.jpa.processor.api.model.JPAEdmNavigationPropertyView} </li> * <li> {@link org.apache.olingo.odata2.jpa.processor.api.model.JPAEdmPropertyView}</li> * <li> {@link org.apache.olingo.odata2.jpa.processor.api.model.JPAEdmReferentialConstraintRoleView} </li> * <li> {@link org.apache.olingo.odata2.jpa.processor.api.model.JPAEdmReferentialConstraintView} </li> * <li> {@link org.apache.olingo.odata2.jpa.processor.api.model.JPAEdmSchemaView}</li> * </ul> * * @throws ODataJPARuntimeException **/ public void build() throws ODataJPAModelException, ODataJPARuntimeException; }
apache-2.0
thc202/zap-extensions
addOns/openapi/src/test/java/org/zaproxy/zap/extension/openapi/AbstractOpenApiTest.java
1239
/* * Zed Attack Proxy (ZAP) and its related class files. * * ZAP is an HTTP/HTTPS proxy for assessing web application security. * * Copyright 2018 The ZAP Development Team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.zaproxy.zap.extension.openapi; import org.junit.jupiter.api.BeforeEach; import org.zaproxy.zap.testutils.TestUtils; /** * Base class for OpenAPI tests. * * <p>It's responsible for {@link #setUpZap() setup ZAP} for each test method. */ public abstract class AbstractOpenApiTest extends TestUtils { @Override protected void setUpMessages() { mockMessages(new ExtensionOpenApi()); } @BeforeEach void setup() throws Exception { setUpZap(); } }
apache-2.0
DevStreet/FinanceAnalytics
projects/OG-Analytics/src/main/java/com/opengamma/analytics/financial/model/option/definition/ForwardStartOptionDefinition.java
4716
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.model.option.definition; import org.apache.commons.lang.ObjectUtils; import org.apache.commons.lang.Validate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.threeten.bp.ZonedDateTime; import com.opengamma.analytics.financial.model.option.Moneyness; import com.opengamma.util.ArgumentChecker; import com.opengamma.util.time.DateUtils; import com.opengamma.util.time.Expiry; /** * */ public class ForwardStartOptionDefinition extends OptionDefinition { private static final Logger s_logger = LoggerFactory.getLogger(ForwardStartOptionDefinition.class); private final OptionExerciseFunction<StandardOptionDataBundle> _exerciseFunction = new EuropeanExerciseFunction<>(); private final OptionPayoffFunction<StandardOptionDataBundle> _payoffFunction = new OptionPayoffFunction<StandardOptionDataBundle>() { @Override public double getPayoff(final StandardOptionDataBundle data, final Double optionPrice) { final ZonedDateTime date = data.getDate(); if (date.isBefore(getStartTime().getExpiry())) { throw new IllegalArgumentException("Cannot get strike before start time: it has not been defined"); } final double spot = data.getSpot(); final double alpha = getAlpha(); final double strike = spot * alpha; return isCall() ? Math.max(0, spot - strike) : Math.max(0, strike - spot); } }; private final Expiry _startTime; private final double _percent; private final Moneyness _moneyness; private final double _alpha; public ForwardStartOptionDefinition(final Expiry expiry, final Boolean isCall, final Expiry startTime) { this(expiry, isCall, startTime, 1, Moneyness.ATM); } public ForwardStartOptionDefinition(final Expiry expiry, final Boolean isCall, final Expiry startTime, final double percent, final Moneyness moneyness) { super(null, expiry, isCall); Validate.notNull(startTime); ArgumentChecker.notNegative(percent, "percent"); Validate.notNull(moneyness, "moneyness"); if (expiry.getExpiry().isBefore(startTime.getExpiry())) { throw new IllegalArgumentException("The forward start time must be before the expiry of the option"); } if (moneyness == Moneyness.ATM && percent != 1) { s_logger.info("Option is ATM but percentage is not one; ignoring value for percent"); } _startTime = startTime; _percent = percent; _moneyness = moneyness; switch (moneyness) { case ITM: _alpha = isCall ? 1 - percent : 1 + percent; break; case OTM: _alpha = isCall ? percent + 1 : 1 - percent; break; case ATM: _alpha = 1; break; default: throw new IllegalArgumentException("Can only handle ITM, OTM and ATM"); } } public Expiry getStartTime() { return _startTime; } public double getAlpha() { return _alpha; } public double getPercent() { return _percent; } public Moneyness getMoneyness() { return _moneyness; } @Override public OptionExerciseFunction<StandardOptionDataBundle> getExerciseFunction() { return _exerciseFunction; } @Override public OptionPayoffFunction<StandardOptionDataBundle> getPayoffFunction() { return _payoffFunction; } public double getTimeToStart(final ZonedDateTime date) { if (date.isAfter(getStartTime().getExpiry())) { throw new IllegalArgumentException("Date " + date + " is after startTime " + getStartTime()); } return DateUtils.getDifferenceInYears(date, getStartTime().getExpiry()); } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); result = prime * result + ((_moneyness == null) ? 0 : _moneyness.hashCode()); long temp; temp = Double.doubleToLongBits(_percent); result = prime * result + (int) (temp ^ (temp >>> 32)); result = prime * result + ((_startTime == null) ? 0 : _startTime.hashCode()); return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (!super.equals(obj)) { return false; } if (getClass() != obj.getClass()) { return false; } final ForwardStartOptionDefinition other = (ForwardStartOptionDefinition) obj; if (!ObjectUtils.equals(_moneyness, other._moneyness)) { return false; } if (Double.doubleToLongBits(_percent) != Double.doubleToLongBits(other._percent)) { return false; } return ObjectUtils.equals(_startTime, other._startTime); } }
apache-2.0
jeorme/OG-Platform
projects/OG-Analytics/src/test/java/com/opengamma/analytics/financial/interestrate/bond/provider/BondTransactionDiscountingMethodTest.java
29558
/** * Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.interestrate.bond.provider; import static org.testng.AssertJUnit.assertEquals; import static org.testng.AssertJUnit.assertFalse; import org.testng.annotations.Test; import org.threeten.bp.Period; import org.threeten.bp.ZonedDateTime; import com.opengamma.analytics.financial.instrument.bond.BondFixedSecurityDefinition; import com.opengamma.analytics.financial.instrument.bond.BondFixedTransactionDefinition; import com.opengamma.analytics.financial.instrument.bond.BondIborSecurityDefinition; import com.opengamma.analytics.financial.instrument.bond.BondIborTransactionDefinition; import com.opengamma.analytics.financial.instrument.index.IborIndex; import com.opengamma.analytics.financial.interestrate.annuity.derivative.Annuity; import com.opengamma.analytics.financial.interestrate.annuity.derivative.AnnuityCouponFixed; import com.opengamma.analytics.financial.interestrate.annuity.derivative.AnnuityPaymentFixed; import com.opengamma.analytics.financial.interestrate.bond.definition.BondFixedTransaction; import com.opengamma.analytics.financial.interestrate.bond.definition.BondIborTransaction; import com.opengamma.analytics.financial.interestrate.payments.derivative.Payment; import com.opengamma.analytics.financial.interestrate.payments.derivative.PaymentFixed; import com.opengamma.analytics.financial.provider.calculator.discounting.PresentValueCurveSensitivityDiscountingCalculator; import com.opengamma.analytics.financial.provider.calculator.discounting.PresentValueDiscountingCalculator; import com.opengamma.analytics.financial.provider.calculator.issuer.ParSpreadRateCurveSensitivityIssuerDiscountingCalculator; import com.opengamma.analytics.financial.provider.calculator.issuer.ParSpreadRateIssuerDiscountingCalculator; import com.opengamma.analytics.financial.provider.calculator.issuer.PresentValueCurveSensitivityIssuerCalculator; import com.opengamma.analytics.financial.provider.calculator.issuer.PresentValueIssuerCalculator; import com.opengamma.analytics.financial.provider.description.IssuerProviderDiscountDataSets; import com.opengamma.analytics.financial.provider.description.interestrate.IssuerProviderDiscount; import com.opengamma.analytics.financial.provider.description.interestrate.MulticurveProviderDiscountingDecoratedIssuer; import com.opengamma.analytics.financial.provider.description.interestrate.MulticurveProviderInterface; import com.opengamma.analytics.financial.provider.description.interestrate.ParameterIssuerProviderInterface; import com.opengamma.analytics.financial.provider.sensitivity.issuer.SimpleParameterSensitivityIssuerCalculator; import com.opengamma.analytics.financial.provider.sensitivity.issuer.SimpleParameterSensitivityIssuerDiscountInterpolatedFDCalculator; import com.opengamma.analytics.financial.provider.sensitivity.multicurve.MulticurveSensitivity; import com.opengamma.analytics.financial.provider.sensitivity.multicurve.MultipleCurrencyMulticurveSensitivity; import com.opengamma.analytics.financial.provider.sensitivity.multicurve.SimpleParameterSensitivity; import com.opengamma.analytics.financial.schedule.ScheduleCalculator; import com.opengamma.analytics.financial.util.AssertSensitivityObjects; import com.opengamma.analytics.util.time.TimeCalculator; import com.opengamma.financial.convention.businessday.BusinessDayConvention; import com.opengamma.financial.convention.businessday.BusinessDayConventions; import com.opengamma.financial.convention.calendar.Calendar; import com.opengamma.financial.convention.calendar.MondayToFridayCalendar; import com.opengamma.financial.convention.daycount.DayCount; import com.opengamma.financial.convention.daycount.DayCounts; import com.opengamma.financial.convention.yield.YieldConvention; import com.opengamma.financial.convention.yield.YieldConventionFactory; import com.opengamma.timeseries.DoubleTimeSeries; import com.opengamma.timeseries.precise.zdt.ImmutableZonedDateTimeDoubleTimeSeries; import com.opengamma.util.money.Currency; import com.opengamma.util.money.MultipleCurrencyAmount; import com.opengamma.util.test.TestGroup; import com.opengamma.util.time.DateUtils; /** * Test. */ @Test(groups = TestGroup.UNIT) public class BondTransactionDiscountingMethodTest { private final static IssuerProviderDiscount ISSUER_MULTICURVES = IssuerProviderDiscountDataSets.getIssuerSpecificProvider(); private final static String[] ISSUER_NAMES = IssuerProviderDiscountDataSets.getIssuerNames(); private static final Currency CUR = Currency.EUR; private static final Calendar CALENDAR = new MondayToFridayCalendar("A"); // to derivatives: first coupon private static final ZonedDateTime REFERENCE_DATE = DateUtils.getUTCDate(2011, 8, 18); private static final ZonedDateTime REFERENCE_DATE_2 = DateUtils.getUTCDate(2012, 7, 12); //Fixed Coupon Semi-annual 5Y private static final Period PAYMENT_TENOR_FIXED = Period.ofMonths(6); private static final DayCount DAY_COUNT_FIXED = DayCounts.ACT_ACT_ICMA; private static final BusinessDayConvention BUSINESS_DAY_FIXED = BusinessDayConventions.FOLLOWING; private static final boolean IS_EOM_FIXED = false; private static final Period BOND_TENOR_FIXED = Period.ofYears(5); private static final int SETTLEMENT_DAYS_FIXED = 3; private static final ZonedDateTime START_ACCRUAL_DATE_FIXED = DateUtils.getUTCDate(2011, 7, 13); private static final ZonedDateTime MATURITY_DATE_FIXED = START_ACCRUAL_DATE_FIXED.plus(BOND_TENOR_FIXED); private static final double RATE_FIXED = 0.0325; private static final YieldConvention YIELD_CONVENTION_FIXED = YieldConventionFactory.INSTANCE.getYieldConvention("STREET CONVENTION"); private static final BondFixedSecurityDefinition BOND_DESCRIPTION_DEFINITION_FIXED = BondFixedSecurityDefinition.from(CUR, MATURITY_DATE_FIXED, START_ACCRUAL_DATE_FIXED, PAYMENT_TENOR_FIXED, RATE_FIXED, SETTLEMENT_DAYS_FIXED, CALENDAR, DAY_COUNT_FIXED, BUSINESS_DAY_FIXED, YIELD_CONVENTION_FIXED, IS_EOM_FIXED, ISSUER_NAMES[1]); // Transaction fixed private static final double PRICE_CLEAN_FIXED = 0.90; //clean price private static final double QUANTITY_FIXED = 100000000; //100m // Transaction past private static final ZonedDateTime BOND_SETTLEMENT_DATE_FIXED_1 = DateUtils.getUTCDate(2011, 8, 16); private static final AnnuityCouponFixed COUPON_TR_FIXED_1 = BOND_DESCRIPTION_DEFINITION_FIXED.getCoupons().toDerivative(REFERENCE_DATE); private static final AnnuityPaymentFixed NOMINAL_TR_FIXED_1 = (AnnuityPaymentFixed) BOND_DESCRIPTION_DEFINITION_FIXED.getNominal().toDerivative(REFERENCE_DATE); private static final BondFixedTransactionDefinition BOND_TRANSACTION_DEFINITION_FIXED_1 = new BondFixedTransactionDefinition(BOND_DESCRIPTION_DEFINITION_FIXED, QUANTITY_FIXED, BOND_SETTLEMENT_DATE_FIXED_1, PRICE_CLEAN_FIXED); private static final BondFixedTransaction BOND_TRANSACTION_FIXED_1 = BOND_TRANSACTION_DEFINITION_FIXED_1.toDerivative(REFERENCE_DATE); // Transaction today private static final ZonedDateTime BOND_SETTLEMENT_DATE_FIXED_2 = DateUtils.getUTCDate(2011, 8, 18); private static final double BOND_SETTLEMENT_TIME_FIXED_2 = TimeCalculator.getTimeBetween(REFERENCE_DATE, BOND_SETTLEMENT_DATE_FIXED_2); private static final AnnuityCouponFixed COUPON_TR_FIXED_2 = BOND_DESCRIPTION_DEFINITION_FIXED.getCoupons().toDerivative(REFERENCE_DATE).trimBefore(BOND_SETTLEMENT_TIME_FIXED_2); private static final AnnuityPaymentFixed NOMINAL_TR_FIXED_2 = (AnnuityPaymentFixed) BOND_DESCRIPTION_DEFINITION_FIXED.getNominal().toDerivative(REFERENCE_DATE) .trimBefore(BOND_SETTLEMENT_TIME_FIXED_2); private static final BondFixedTransactionDefinition BOND_TRANSACTION_DEFINITION_FIXED_2 = new BondFixedTransactionDefinition(BOND_DESCRIPTION_DEFINITION_FIXED, QUANTITY_FIXED, BOND_SETTLEMENT_DATE_FIXED_2, PRICE_CLEAN_FIXED); private static final PaymentFixed BOND_SETTLEMENT_FIXED_2 = new PaymentFixed(CUR, BOND_SETTLEMENT_TIME_FIXED_2, -(PRICE_CLEAN_FIXED + BOND_TRANSACTION_DEFINITION_FIXED_2.getAccruedInterestAtSettlement()) * QUANTITY_FIXED); private static final BondFixedTransaction BOND_TRANSACTION_FIXED_2 = BOND_TRANSACTION_DEFINITION_FIXED_2.toDerivative(REFERENCE_DATE); // Transaction future private static final ZonedDateTime BOND_SETTLEMENT_DATE_FIXED_3 = DateUtils.getUTCDate(2011, 8, 24); private static final double BOND_SETTLEMENT_TIME_FIXED_3 = TimeCalculator.getTimeBetween(REFERENCE_DATE, BOND_SETTLEMENT_DATE_FIXED_3); private static final AnnuityCouponFixed COUPON_TR_FIXED_3 = BOND_DESCRIPTION_DEFINITION_FIXED.getCoupons().toDerivative(REFERENCE_DATE).trimBefore(BOND_SETTLEMENT_TIME_FIXED_3); private static final AnnuityPaymentFixed NOMINAL_TR_FIXED_3 = (AnnuityPaymentFixed) BOND_DESCRIPTION_DEFINITION_FIXED.getNominal().toDerivative(REFERENCE_DATE) .trimBefore(BOND_SETTLEMENT_TIME_FIXED_3); private static final BondFixedTransactionDefinition BOND_TRANSACTION_DEFINITION_FIXED_3 = new BondFixedTransactionDefinition(BOND_DESCRIPTION_DEFINITION_FIXED, QUANTITY_FIXED, BOND_SETTLEMENT_DATE_FIXED_3, PRICE_CLEAN_FIXED); private static final PaymentFixed BOND_SETTLEMENT_FIXED_3 = new PaymentFixed(CUR, BOND_SETTLEMENT_TIME_FIXED_3, -(PRICE_CLEAN_FIXED + BOND_TRANSACTION_DEFINITION_FIXED_3.getAccruedInterestAtSettlement()) * QUANTITY_FIXED); private static final BondFixedTransaction BOND_TRANSACTION_FIXED_3 = BOND_TRANSACTION_DEFINITION_FIXED_3.toDerivative(REFERENCE_DATE); private static final BondFixedTransaction BOND_TRANSACTION_FIXED_4 = BOND_TRANSACTION_DEFINITION_FIXED_3.toDerivative(REFERENCE_DATE_2); private static final ZonedDateTime SETTLE_DATE_STD = ScheduleCalculator.getAdjustedDate(REFERENCE_DATE, BOND_DESCRIPTION_DEFINITION_FIXED.getSettlementDays(), CALENDAR); private static final BondFixedTransactionDefinition BOND_FIXED_STD_DEFINITION = new BondFixedTransactionDefinition(BOND_DESCRIPTION_DEFINITION_FIXED, QUANTITY_FIXED, SETTLE_DATE_STD, PRICE_CLEAN_FIXED); private static final BondFixedTransaction BOND_FIXED_STD = BOND_FIXED_STD_DEFINITION.toDerivative(REFERENCE_DATE); // Ibor coupon Quarterly 2Y private static final DayCount DAY_COUNT_FRN = DayCounts.ACT_ACT_ISDA; private static final BusinessDayConvention BUSINESS_DAY_FRN = BusinessDayConventions.FOLLOWING; private static final boolean IS_EOM_FRN = false; private static final Period IBOR_TENOR = Period.ofMonths(3); private static final DayCount IBOR_DAY_COUNT = DayCounts.ACT_360; private static final int IBOR_SPOT_LAG = 2; private static final BusinessDayConvention IBOR_BUSINESS_DAY = BusinessDayConventions.MODIFIED_FOLLOWING; private static final boolean IBOR_IS_EOM = false; private static final IborIndex IBOR_INDEX = new IborIndex(CUR, IBOR_TENOR, IBOR_SPOT_LAG, IBOR_DAY_COUNT, IBOR_BUSINESS_DAY, IBOR_IS_EOM, "Ibor"); private static final Period BOND_TENOR_FRN = Period.ofYears(2); private static final int SETTLEMENT_DAYS_FRN = 3; // Standard for euro-bonds. private static final ZonedDateTime START_ACCRUAL_DATE_FRN = DateUtils.getUTCDate(2011, 7, 13); private static final ZonedDateTime MATURITY_DATE_FRN = START_ACCRUAL_DATE_FRN.plus(BOND_TENOR_FRN); private static final BondIborSecurityDefinition BOND_DESCRIPTION_DEFINITION_FRN = BondIborSecurityDefinition.from(MATURITY_DATE_FRN, START_ACCRUAL_DATE_FRN, IBOR_INDEX, SETTLEMENT_DAYS_FRN, DAY_COUNT_FRN, BUSINESS_DAY_FRN, IS_EOM_FRN, ISSUER_NAMES[1], CALENDAR); // Transaction FRN private static final double FIRST_FIXING = 0.02; private static final double PRICE_FRN = 0.99; private static final ZonedDateTime BOND_SETTLEMENT_DATE_FRN = DateUtils.getUTCDate(2011, 8, 24); private static final double BOND_SETTLEMENT_TIME_FRN = TimeCalculator.getTimeBetween(REFERENCE_DATE, BOND_SETTLEMENT_DATE_FRN); private static final double QUANTITY_FRN = 100000000; //100m private static final DoubleTimeSeries<ZonedDateTime> FIXING_TS = ImmutableZonedDateTimeDoubleTimeSeries.ofUTC(new ZonedDateTime[] {BOND_DESCRIPTION_DEFINITION_FRN.getCoupons().getNthPayment(0) .getFixingDate() }, new double[] {FIRST_FIXING }); private static final AnnuityPaymentFixed NOMINAL_TR_1_FRN = (AnnuityPaymentFixed) BOND_DESCRIPTION_DEFINITION_FRN.getNominal().toDerivative(REFERENCE_DATE) .trimBefore(BOND_SETTLEMENT_TIME_FRN); private static final Annuity<? extends Payment> COUPON_TR_1_FRN = BOND_DESCRIPTION_DEFINITION_FRN.getCoupons().toDerivative(REFERENCE_DATE, FIXING_TS) .trimBefore(BOND_SETTLEMENT_TIME_FRN); private static final BondIborTransactionDefinition BOND_TRANSACTION_DEFINITION_FRN = new BondIborTransactionDefinition(BOND_DESCRIPTION_DEFINITION_FRN, QUANTITY_FRN, BOND_SETTLEMENT_DATE_FRN, PRICE_FRN); private static final PaymentFixed BOND_SETTLEMENT_FRN = new PaymentFixed(CUR, BOND_SETTLEMENT_TIME_FRN, -PRICE_FRN * QUANTITY_FRN); private static final BondIborTransaction BOND_TRANSACTION_FRN = BOND_TRANSACTION_DEFINITION_FRN.toDerivative(REFERENCE_DATE, FIXING_TS); // Calculators private static final PresentValueDiscountingCalculator PVDC = PresentValueDiscountingCalculator.getInstance(); private static final PresentValueIssuerCalculator PVIC = PresentValueIssuerCalculator.getInstance(); private static final PresentValueCurveSensitivityIssuerCalculator PVCSIC = PresentValueCurveSensitivityIssuerCalculator.getInstance(); private static final PresentValueCurveSensitivityDiscountingCalculator PVCSDC = PresentValueCurveSensitivityDiscountingCalculator.getInstance(); private static final ParSpreadRateIssuerDiscountingCalculator PSRIDC = ParSpreadRateIssuerDiscountingCalculator.getInstance(); private static final ParSpreadRateCurveSensitivityIssuerDiscountingCalculator PSRCSIDC = ParSpreadRateCurveSensitivityIssuerDiscountingCalculator.getInstance(); private static final double SHIFT_FD = 1.0E-6; private static final SimpleParameterSensitivityIssuerCalculator<ParameterIssuerProviderInterface> PS_I_AD = new SimpleParameterSensitivityIssuerCalculator<>(PSRCSIDC); private static final SimpleParameterSensitivityIssuerDiscountInterpolatedFDCalculator PS_I_FD = new SimpleParameterSensitivityIssuerDiscountInterpolatedFDCalculator(PSRIDC, SHIFT_FD); private static final BondTransactionDiscountingMethod METHOD_BOND_TR = BondTransactionDiscountingMethod.getInstance(); private static final BondSecurityDiscountingMethod METHOD_BOND_SEC = BondSecurityDiscountingMethod.getInstance(); private static final double TOLERANCE_PV_DELTA = 1.0E-2; private static final double TOLERANCE_PV = 1.0E-2; private static final double TOLERANCE_PRICE = 1.0E-8; private static final double TOLERANCE_PRICE_DELTA = 1.0E-8; @Test public void testPVFixedBondSettlePast() { final MultipleCurrencyAmount pv = METHOD_BOND_TR.presentValue(BOND_TRANSACTION_FIXED_1, ISSUER_MULTICURVES); final MulticurveProviderInterface multicurvesDecorated = new MulticurveProviderDiscountingDecoratedIssuer(ISSUER_MULTICURVES, CUR, BOND_TRANSACTION_FIXED_1.getBondTransaction().getIssuerEntity()); final MultipleCurrencyAmount pvNominal = NOMINAL_TR_FIXED_1.accept(PVDC, multicurvesDecorated); final MultipleCurrencyAmount pvCoupon = COUPON_TR_FIXED_1.accept(PVDC, multicurvesDecorated); assertEquals("Fixed bond present value", (pvNominal.getAmount(CUR) + pvCoupon.getAmount(CUR)) * QUANTITY_FIXED, pv.getAmount(CUR)); } @Test public void testPVCleanPriceFixedBondSettlePast() { final MultipleCurrencyAmount pv = METHOD_BOND_TR.presentValueFromCleanPrice(BOND_TRANSACTION_FIXED_1, ISSUER_MULTICURVES, PRICE_CLEAN_FIXED); final MultipleCurrencyAmount pvSec = METHOD_BOND_SEC.presentValueFromCleanPrice(BOND_TRANSACTION_FIXED_1.getBondStandard(), ISSUER_MULTICURVES.getMulticurveProvider(), PRICE_CLEAN_FIXED); final double df = ISSUER_MULTICURVES.getMulticurveProvider().getDiscountFactor(CUR, BOND_TRANSACTION_FIXED_1.getBondStandard().getSettlementTime()); final double pvExpected = (PRICE_CLEAN_FIXED * BOND_TRANSACTION_FIXED_1.getNotionalStandard() + BOND_TRANSACTION_FIXED_1.getBondStandard().getAccruedInterest()) * df * QUANTITY_FIXED; assertEquals("Fixed bond present value", pvSec.getAmount(CUR) * QUANTITY_FIXED, pv.getAmount(CUR), TOLERANCE_PV_DELTA); assertEquals("Fixed bond present value", pvExpected, pv.getAmount(CUR), TOLERANCE_PV_DELTA); } @Test public void testPVYieldFixedBondSettlePast() { final double yield = 0.05; final MultipleCurrencyAmount pv = METHOD_BOND_TR.presentValueFromYield(BOND_TRANSACTION_FIXED_1, ISSUER_MULTICURVES, yield); final double dirtyPrice = METHOD_BOND_SEC.dirtyPriceFromYield(BOND_TRANSACTION_FIXED_1.getBondStandard(), yield); final double df = ISSUER_MULTICURVES.getMulticurveProvider().getDiscountFactor(CUR, BOND_TRANSACTION_FIXED_1.getBondStandard().getSettlementTime()); final double pvExpected = (dirtyPrice) * df * QUANTITY_FIXED; assertEquals("Fixed bond present value", pvExpected, pv.getAmount(CUR), TOLERANCE_PV_DELTA); } @Test public void testPVFixedBondSettleToday() { final MultipleCurrencyAmount pv = METHOD_BOND_TR.presentValue(BOND_TRANSACTION_FIXED_2, ISSUER_MULTICURVES); final MulticurveProviderInterface multicurvesDecorated = new MulticurveProviderDiscountingDecoratedIssuer(ISSUER_MULTICURVES, CUR, BOND_TRANSACTION_FIXED_1.getBondTransaction().getIssuerEntity()); final MultipleCurrencyAmount pvNominal = NOMINAL_TR_FIXED_2.accept(PVDC, multicurvesDecorated); final MultipleCurrencyAmount pvCoupon = COUPON_TR_FIXED_2.accept(PVDC, multicurvesDecorated); final double pvSettlement = BOND_SETTLEMENT_FIXED_2.getAmount(); assertEquals("Fixed bond present value", (pvNominal.getAmount(CUR) + pvCoupon.getAmount(CUR)) * QUANTITY_FIXED + pvSettlement, pv.getAmount(CUR)); } @Test public void testPVFixedBondSettleFuture() { final MultipleCurrencyAmount pv = METHOD_BOND_TR.presentValue(BOND_TRANSACTION_FIXED_3, ISSUER_MULTICURVES); final MulticurveProviderInterface multicurvesDecorated = new MulticurveProviderDiscountingDecoratedIssuer(ISSUER_MULTICURVES, CUR, BOND_TRANSACTION_FIXED_1.getBondTransaction().getIssuerEntity()); final MultipleCurrencyAmount pvNominal = NOMINAL_TR_FIXED_3.accept(PVDC, multicurvesDecorated); final MultipleCurrencyAmount pvCoupon = COUPON_TR_FIXED_3.accept(PVDC, multicurvesDecorated); final MultipleCurrencyAmount pvSettlement = BOND_SETTLEMENT_FIXED_3.accept(PVDC, ISSUER_MULTICURVES.getMulticurveProvider()); assertEquals("Fixed bond present value", (pvNominal.getAmount(CUR) + pvCoupon.getAmount(CUR)) * QUANTITY_FIXED + pvSettlement.getAmount(CUR), pv.getAmount(CUR)); } @Test /** * Test the PV when a coupon payment is between today and standard settlement date and pv is computed from conventional clean price. */ public void testPVCleanPriceFixedBondCouponBeforeSettle() { final MultipleCurrencyAmount pv = METHOD_BOND_TR.presentValueFromCleanPrice(BOND_TRANSACTION_FIXED_4, ISSUER_MULTICURVES, PRICE_CLEAN_FIXED); final MultipleCurrencyAmount pvSec = METHOD_BOND_SEC.presentValueFromCleanPrice(BOND_TRANSACTION_FIXED_4.getBondStandard(), ISSUER_MULTICURVES.getMulticurveProvider(), PRICE_CLEAN_FIXED); final MulticurveProviderInterface multicurvesDecorated = new MulticurveProviderDiscountingDecoratedIssuer(ISSUER_MULTICURVES, CUR, BOND_TRANSACTION_FIXED_4.getBondTransaction().getIssuerEntity()); final MultipleCurrencyAmount pvNominalStandard = BOND_TRANSACTION_FIXED_4.getBondStandard().getNominal().accept(PVDC, multicurvesDecorated); final MultipleCurrencyAmount pvCouponStandard = BOND_TRANSACTION_FIXED_4.getBondStandard().getCoupon().accept(PVDC, multicurvesDecorated); final MultipleCurrencyAmount pvDiscountingStandard = pvNominalStandard.plus(pvCouponStandard); final MultipleCurrencyAmount pvNominalTransaction = BOND_TRANSACTION_FIXED_4.getBondTransaction().getNominal().accept(PVDC, multicurvesDecorated); final MultipleCurrencyAmount pvCouponTransaction = BOND_TRANSACTION_FIXED_4.getBondTransaction().getCoupon().accept(PVDC, multicurvesDecorated); final MultipleCurrencyAmount pvDiscountingTransaction = pvNominalTransaction.plus(pvCouponTransaction); final double pvExpected = (pvDiscountingTransaction.getAmount(CUR) - pvDiscountingStandard.getAmount(CUR) + pvSec.getAmount(CUR)) * QUANTITY_FIXED; assertEquals("Fixed coupon bond present value", pvExpected, pv.getAmount(CUR), TOLERANCE_PV_DELTA); assertFalse("Fixed coupon bond present value", Math.abs(pvSec.getAmount(CUR) * QUANTITY_FIXED - pv.getAmount(CUR)) < TOLERANCE_PV_DELTA); } @Test /** * Test the PV when a coupon payment is between today and standard settlement date and pv is computed from conventional yield. */ public void testPVYieldFixedBondCouponBeforeSettle() { final double yield = 0.05; final MultipleCurrencyAmount pv = METHOD_BOND_TR.presentValueFromYield(BOND_TRANSACTION_FIXED_4, ISSUER_MULTICURVES, yield); final double dirtyPrice = METHOD_BOND_SEC.dirtyPriceFromYield(BOND_TRANSACTION_FIXED_4.getBondStandard(), yield); final double df = ISSUER_MULTICURVES.getMulticurveProvider().getDiscountFactor(CUR, BOND_TRANSACTION_FIXED_4.getBondStandard().getSettlementTime()); final MultipleCurrencyAmount pvSec = MultipleCurrencyAmount.of(CUR, dirtyPrice * df); final MulticurveProviderInterface multicurvesDecorated = new MulticurveProviderDiscountingDecoratedIssuer(ISSUER_MULTICURVES, CUR, BOND_TRANSACTION_FIXED_4.getBondTransaction().getIssuerEntity()); final MultipleCurrencyAmount pvNominalStandard = BOND_TRANSACTION_FIXED_4.getBondStandard().getNominal().accept(PVDC, multicurvesDecorated); final MultipleCurrencyAmount pvCouponStandard = BOND_TRANSACTION_FIXED_4.getBondStandard().getCoupon().accept(PVDC, multicurvesDecorated); final MultipleCurrencyAmount pvDiscountingStandard = pvNominalStandard.plus(pvCouponStandard); final MultipleCurrencyAmount pvNominalTransaction = BOND_TRANSACTION_FIXED_4.getBondTransaction().getNominal().accept(PVDC, multicurvesDecorated); final MultipleCurrencyAmount pvCouponTransaction = BOND_TRANSACTION_FIXED_4.getBondTransaction().getCoupon().accept(PVDC, multicurvesDecorated); final MultipleCurrencyAmount pvDiscountingTransaction = pvNominalTransaction.plus(pvCouponTransaction); final double pvExpected = (pvDiscountingTransaction.getAmount(CUR) - pvDiscountingStandard.getAmount(CUR) + pvSec.getAmount(CUR)) * QUANTITY_FIXED; assertEquals("Fixed coupon bond present value", pvExpected, pv.getAmount(CUR), TOLERANCE_PV_DELTA); assertFalse("Fixed coupon bond present value", Math.abs(pvSec.getAmount(CUR) * QUANTITY_FIXED - pv.getAmount(CUR)) < TOLERANCE_PV_DELTA); } @Test public void testPVCSFixedBond() { final MultipleCurrencyMulticurveSensitivity pvs = METHOD_BOND_TR.presentValueCurveSensitivity(BOND_TRANSACTION_FIXED_3, ISSUER_MULTICURVES); final MulticurveProviderInterface multicurvesDecorated = new MulticurveProviderDiscountingDecoratedIssuer(ISSUER_MULTICURVES, CUR, BOND_TRANSACTION_FIXED_1.getBondTransaction().getIssuerEntity()); final MultipleCurrencyMulticurveSensitivity pvsNominal = NOMINAL_TR_FIXED_3.accept(PVCSDC, multicurvesDecorated); final MultipleCurrencyMulticurveSensitivity pvsCoupon = COUPON_TR_FIXED_3.accept(PVCSDC, multicurvesDecorated); final MultipleCurrencyMulticurveSensitivity pvsSettlement = BOND_SETTLEMENT_FIXED_3.accept(PVCSDC, ISSUER_MULTICURVES.getMulticurveProvider()); final MultipleCurrencyMulticurveSensitivity expectedPvs = pvsNominal.plus(pvsCoupon).multipliedBy(QUANTITY_FRN).plus(pvsSettlement).cleaned(); assertEquals("Fixed bond present value sensitivity", expectedPvs, pvs.cleaned()); } @Test public void testPVCSFixedBondMethodCalculator() { final MultipleCurrencyAmount pvMethod = METHOD_BOND_TR.presentValue(BOND_TRANSACTION_FIXED_3, ISSUER_MULTICURVES); final MultipleCurrencyAmount pvCalculator = BOND_TRANSACTION_FIXED_3.accept(PVIC, ISSUER_MULTICURVES); assertEquals("Fixed bond present value: Method vs Calculator", pvMethod, pvCalculator); final MultipleCurrencyMulticurveSensitivity pvsMethod = METHOD_BOND_TR.presentValueCurveSensitivity(BOND_TRANSACTION_FIXED_3, ISSUER_MULTICURVES); final MultipleCurrencyMulticurveSensitivity pvsCalculator = BOND_TRANSACTION_FIXED_3.accept(PVCSIC, ISSUER_MULTICURVES); AssertSensitivityObjects.assertEquals("Fixed bond present value sensitivity: Method vs Calculator", pvsMethod, pvsCalculator, TOLERANCE_PV_DELTA); } @Test public void parSpreadMarketQuote() { final double parSpreadMarketQuote = METHOD_BOND_TR.parSpread(BOND_FIXED_STD, ISSUER_MULTICURVES); final BondFixedTransaction bond0 = new BondFixedTransaction(BOND_FIXED_STD.getBondTransaction(), QUANTITY_FIXED, PRICE_CLEAN_FIXED + parSpreadMarketQuote, BOND_FIXED_STD.getBondStandard(), BOND_FIXED_STD.getNotionalStandard()); final MultipleCurrencyAmount pv0 = METHOD_BOND_TR.presentValue(bond0, ISSUER_MULTICURVES); assertEquals("Fixed bond: par spread market quote", pv0.getAmount(CUR), 0, TOLERANCE_PV); } @Test public void parSpreadYield() { final ZonedDateTime settleDate = ScheduleCalculator.getAdjustedDate(REFERENCE_DATE, BOND_DESCRIPTION_DEFINITION_FIXED.getSettlementDays(), CALENDAR); final BondFixedTransactionDefinition bondTrDefinition = new BondFixedTransactionDefinition(BOND_DESCRIPTION_DEFINITION_FIXED, QUANTITY_FIXED, settleDate, PRICE_CLEAN_FIXED); final BondFixedTransaction bondTr = bondTrDefinition.toDerivative(REFERENCE_DATE); final double parSpreadYield = METHOD_BOND_TR.parSpreadYield(bondTr, ISSUER_MULTICURVES); final double yield = METHOD_BOND_SEC.yieldFromCleanPrice(bondTr.getBondStandard(), PRICE_CLEAN_FIXED); final BondFixedTransactionDefinition bond0Definition = BondFixedTransactionDefinition.fromYield(BOND_DESCRIPTION_DEFINITION_FIXED, QUANTITY_FIXED, settleDate, yield + parSpreadYield); final BondFixedTransaction bond0 = bond0Definition.toDerivative(REFERENCE_DATE); final MultipleCurrencyAmount pv0 = METHOD_BOND_TR.presentValue(bond0, ISSUER_MULTICURVES); assertEquals("Fixed bond: par spread yield", pv0.getAmount(CUR), 0, TOLERANCE_PV); final Double parSpreadYieldCalculator = bondTr.accept(PSRIDC, ISSUER_MULTICURVES); assertEquals("Fixed bond: par spread yield", parSpreadYieldCalculator, parSpreadYield, TOLERANCE_PRICE); } @Test /** * Tests parSpreadYield curve sensitivity: explicit formula versus finite difference. */ public void parSpreadYieldCurveSensitivityMethodVsCalculator() { final SimpleParameterSensitivity pspsDepositExact = PS_I_AD.calculateSensitivity(BOND_FIXED_STD, ISSUER_MULTICURVES, ISSUER_MULTICURVES.getAllNames()); final SimpleParameterSensitivity pspsDepositFD = PS_I_FD.calculateSensitivity(BOND_FIXED_STD, ISSUER_MULTICURVES); AssertSensitivityObjects.assertEquals("BondTransactionDiscountingMethod: parSpreadYield curve sensitivity", pspsDepositExact, pspsDepositFD, TOLERANCE_PV_DELTA); } @Test public void parSpreadYieldCurveSensitivity() { final MulticurveSensitivity pscsyCalculator = BOND_FIXED_STD.accept(PSRCSIDC, ISSUER_MULTICURVES); final MulticurveSensitivity psycsMethod = METHOD_BOND_TR.parSpreadYieldCurveSensitivity(BOND_FIXED_STD, ISSUER_MULTICURVES); AssertSensitivityObjects.assertEquals("BondTransactionDiscountingMethod: parSpreadYield curve sensitivity", psycsMethod, pscsyCalculator, TOLERANCE_PRICE_DELTA); } @Test(enabled = false) //FIXME change the test and the pv method with correct accrual interests mechanism. public void testPVIborBond() { final MultipleCurrencyAmount pv = METHOD_BOND_TR.presentValue(BOND_TRANSACTION_FRN, ISSUER_MULTICURVES); final MulticurveProviderInterface multicurvesDecorated = new MulticurveProviderDiscountingDecoratedIssuer(ISSUER_MULTICURVES, CUR, BOND_TRANSACTION_FIXED_1.getBondTransaction().getIssuerEntity()); final MultipleCurrencyAmount pvNominal = NOMINAL_TR_1_FRN.accept(PVDC, multicurvesDecorated); final MultipleCurrencyAmount pvCoupon = COUPON_TR_1_FRN.accept(PVDC, multicurvesDecorated); final MultipleCurrencyAmount pvSettlement = BOND_SETTLEMENT_FRN.accept(PVDC, multicurvesDecorated); assertEquals("FRN present value", (pvNominal.getAmount(CUR) + pvCoupon.getAmount(CUR)) * QUANTITY_FRN + pvSettlement.getAmount(CUR), pv.getAmount(CUR)); } @Test(enabled = false) //FIXME change the test and the pv method with correct accrual interests mechanism. public void testPVSIborBond() { final MultipleCurrencyMulticurveSensitivity pvs = METHOD_BOND_TR.presentValueCurveSensitivity(BOND_TRANSACTION_FRN, ISSUER_MULTICURVES); final MultipleCurrencyMulticurveSensitivity pvsNominal = NOMINAL_TR_1_FRN.accept(PVCSIC, ISSUER_MULTICURVES); final MultipleCurrencyMulticurveSensitivity pvsCoupon = COUPON_TR_1_FRN.accept(PVCSIC, ISSUER_MULTICURVES); final MultipleCurrencyMulticurveSensitivity pvsSettlement = BOND_SETTLEMENT_FRN.accept(PVCSIC, ISSUER_MULTICURVES); final MultipleCurrencyMulticurveSensitivity expectedPvs = pvsNominal.plus(pvsCoupon).multipliedBy(QUANTITY_FRN).plus(pvsSettlement).cleaned(); assertEquals("FRN present value sensitivity", expectedPvs, pvs.cleaned()); } }
apache-2.0
manolama/YCSB
core/src/main/java/com/yahoo/ycsb/CommandLine.java
11056
/** * Copyright (c) 2010 Yahoo! Inc. All rights reserved. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package com.yahoo.ycsb; import com.yahoo.ycsb.workloads.CoreWorkload; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.util.*; /** * A simple command line client to a database, using the appropriate com.yahoo.ycsb.DB implementation. */ public final class CommandLine { private CommandLine() { //not used } public static final String DEFAULT_DB = "com.yahoo.ycsb.BasicDB"; public static void usageMessage() { System.out.println("YCSB Command Line Client"); System.out.println("Usage: java com.yahoo.ycsb.CommandLine [options]"); System.out.println("Options:"); System.out.println(" -P filename: Specify a property file"); System.out.println(" -p name=value: Specify a property value"); System.out.println(" -db classname: Use a specified DB class (can also set the \"db\" property)"); System.out.println(" -table tablename: Use the table name instead of the default \"" + CoreWorkload.TABLENAME_PROPERTY_DEFAULT + "\""); System.out.println(); } public static void help() { System.out.println("Commands:"); System.out.println(" read key [field1 field2 ...] - Read a record"); System.out.println(" scan key recordcount [field1 field2 ...] - Scan starting at key"); System.out.println(" insert key name1=value1 [name2=value2 ...] - Insert a new record"); System.out.println(" update key name1=value1 [name2=value2 ...] - Update a record"); System.out.println(" delete key - Delete a record"); System.out.println(" table [tablename] - Get or [set] the name of the table"); System.out.println(" quit - Quit"); } public static void main(String[] args) { Properties props = new Properties(); Properties fileprops = new Properties(); parseArguments(args, props, fileprops); for (Enumeration e = props.propertyNames(); e.hasMoreElements();) { String prop = (String) e.nextElement(); fileprops.setProperty(prop, props.getProperty(prop)); } props = fileprops; System.out.println("YCSB Command Line client"); System.out.println("Type \"help\" for command line help"); System.out.println("Start with \"-help\" for usage info"); String table = props.getProperty(CoreWorkload.TABLENAME_PROPERTY, CoreWorkload.TABLENAME_PROPERTY_DEFAULT); //create a DB String dbname = props.getProperty(Client.DB_PROPERTY, DEFAULT_DB); ClassLoader classLoader = CommandLine.class.getClassLoader(); DB db = null; try { Class dbclass = classLoader.loadClass(dbname); db = (DB) dbclass.newInstance(); } catch (Exception e) { e.printStackTrace(); System.exit(0); } db.setProperties(props); try { db.init(); } catch (DBException e) { e.printStackTrace(); System.exit(0); } System.out.println("Connected."); //main loop BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); for (;;) { //get user input System.out.print("> "); String input = null; try { input = br.readLine(); } catch (IOException e) { e.printStackTrace(); System.exit(1); } if (input.compareTo("") == 0) { continue; } if (input.compareTo("help") == 0) { help(); continue; } if (input.compareTo("quit") == 0) { break; } String[] tokens = input.split(" "); long st = System.currentTimeMillis(); //handle commands if (tokens[0].compareTo("table") == 0) { handleTable(tokens, table); } else if (tokens[0].compareTo("read") == 0) { handleRead(tokens, table, db); } else if (tokens[0].compareTo("scan") == 0) { handleScan(tokens, table, db); } else if (tokens[0].compareTo("update") == 0) { handleUpdate(tokens, table, db); } else if (tokens[0].compareTo("insert") == 0) { handleInsert(tokens, table, db); } else if (tokens[0].compareTo("delete") == 0) { handleDelete(tokens, table, db); } else { System.out.println("Error: unknown command \"" + tokens[0] + "\""); } System.out.println((System.currentTimeMillis() - st) + " ms"); } } private static void parseArguments(String[] args, Properties props, Properties fileprops) { int argindex = 0; while ((argindex < args.length) && (args[argindex].startsWith("-"))) { if ((args[argindex].compareTo("-help") == 0) || (args[argindex].compareTo("--help") == 0) || (args[argindex].compareTo("-?") == 0) || (args[argindex].compareTo("--?") == 0)) { usageMessage(); System.exit(0); } if (args[argindex].compareTo("-db") == 0) { argindex++; if (argindex >= args.length) { usageMessage(); System.exit(0); } props.setProperty(Client.DB_PROPERTY, args[argindex]); argindex++; } else if (args[argindex].compareTo("-P") == 0) { argindex++; if (argindex >= args.length) { usageMessage(); System.exit(0); } String propfile = args[argindex]; argindex++; Properties myfileprops = new Properties(); try { myfileprops.load(new FileInputStream(propfile)); } catch (IOException e) { System.out.println(e.getMessage()); System.exit(0); } for (Enumeration e = myfileprops.propertyNames(); e.hasMoreElements();) { String prop = (String) e.nextElement(); fileprops.setProperty(prop, myfileprops.getProperty(prop)); } } else if (args[argindex].compareTo("-p") == 0) { argindex++; if (argindex >= args.length) { usageMessage(); System.exit(0); } int eq = args[argindex].indexOf('='); if (eq < 0) { usageMessage(); System.exit(0); } String name = args[argindex].substring(0, eq); String value = args[argindex].substring(eq + 1); props.put(name, value); argindex++; } else if (args[argindex].compareTo("-table") == 0) { argindex++; if (argindex >= args.length) { usageMessage(); System.exit(0); } props.put(CoreWorkload.TABLENAME_PROPERTY, args[argindex]); argindex++; } else { System.out.println("Unknown option " + args[argindex]); usageMessage(); System.exit(0); } if (argindex >= args.length) { break; } } if (argindex != args.length) { usageMessage(); System.exit(0); } } private static void handleDelete(String[] tokens, String table, DB db) { if (tokens.length != 2) { System.out.println("Error: syntax is \"delete keyname\""); } else { Status ret = db.delete(table, tokens[1]); System.out.println("Return result: " + ret.getName()); } } private static void handleInsert(String[] tokens, String table, DB db) { if (tokens.length < 3) { System.out.println("Error: syntax is \"insert keyname name1=value1 [name2=value2 ...]\""); } else { HashMap<String, ByteIterator> values = new HashMap<>(); for (int i = 2; i < tokens.length; i++) { String[] nv = tokens[i].split("="); values.put(nv[0], new StringByteIterator(nv[1])); } Status ret = db.insert(table, tokens[1], values); System.out.println("Result: " + ret.getName()); } } private static void handleUpdate(String[] tokens, String table, DB db) { if (tokens.length < 3) { System.out.println("Error: syntax is \"update keyname name1=value1 [name2=value2 ...]\""); } else { HashMap<String, ByteIterator> values = new HashMap<>(); for (int i = 2; i < tokens.length; i++) { String[] nv = tokens[i].split("="); values.put(nv[0], new StringByteIterator(nv[1])); } Status ret = db.update(table, tokens[1], values); System.out.println("Result: " + ret.getName()); } } private static void handleScan(String[] tokens, String table, DB db) { if (tokens.length < 3) { System.out.println("Error: syntax is \"scan keyname scanlength [field1 field2 ...]\""); } else { Set<String> fields = null; if (tokens.length > 3) { fields = new HashSet<>(); fields.addAll(Arrays.asList(tokens).subList(3, tokens.length)); } Vector<HashMap<String, ByteIterator>> results = new Vector<>(); Status ret = db.scan(table, tokens[1], Integer.parseInt(tokens[2]), fields, results); System.out.println("Result: " + ret.getName()); int record = 0; if (results.isEmpty()) { System.out.println("0 records"); } else { System.out.println("--------------------------------"); } for (Map<String, ByteIterator> result : results) { System.out.println("Record " + (record++)); for (Map.Entry<String, ByteIterator> ent : result.entrySet()) { System.out.println(ent.getKey() + "=" + ent.getValue()); } System.out.println("--------------------------------"); } } } private static void handleRead(String[] tokens, String table, DB db) { if (tokens.length == 1) { System.out.println("Error: syntax is \"read keyname [field1 field2 ...]\""); } else { Set<String> fields = null; if (tokens.length > 2) { fields = new HashSet<>(); fields.addAll(Arrays.asList(tokens).subList(2, tokens.length)); } HashMap<String, ByteIterator> result = new HashMap<>(); Status ret = db.read(table, tokens[1], fields, result); System.out.println("Return code: " + ret.getName()); for (Map.Entry<String, ByteIterator> ent : result.entrySet()) { System.out.println(ent.getKey() + "=" + ent.getValue()); } } } private static void handleTable(String[] tokens, String table) { if (tokens.length == 1) { System.out.println("Using table \"" + table + "\""); } else if (tokens.length == 2) { table = tokens[1]; System.out.println("Using table \"" + table + "\""); } else { System.out.println("Error: syntax is \"table tablename\""); } } }
apache-2.0
jeorme/OG-Platform
projects/OG-Analytics/src/test/java/com/opengamma/analytics/financial/volatilityswap/VolatilitySwapCalculatorResultTest.java
6557
/** * Copyright (C) 2014 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.volatilityswap; import static org.testng.AssertJUnit.assertEquals; import static org.testng.AssertJUnit.assertTrue; import java.util.ArrayList; import java.util.List; import org.testng.annotations.Test; import com.opengamma.util.test.TestGroup; /** * */ @Test(groups = TestGroup.UNIT) public class VolatilitySwapCalculatorResultTest { private static final double[] putWeights = new double[] {0., -1., 1.5, }; private static final double straddleWeight = 1.e2; private static final double[] callWeights = new double[] {11. / 7. }; private static final double[] putPrices = new double[] {3.1, 4., 5.214 }; private static final double straddlePrice = 2.2; private static final double[] callPrices = new double[] {33. }; private static final double cash = 11.3; private static final double[] putStrikes = new double[] {1.1, 1.3, 1.4 }; private static final double[] callStrikes = new double[] {1.6 }; /** * */ @Test public void accessTest() { final VolatilitySwapCalculatorResult res = new VolatilitySwapCalculatorResult(putWeights, straddleWeight, callWeights, putPrices, straddlePrice, callPrices, cash); final VolatilitySwapCalculatorResultWithStrikes resStrikes = new VolatilitySwapCalculatorResultWithStrikes(putStrikes, callStrikes, putWeights, straddleWeight, callWeights, putPrices, straddlePrice, callPrices, cash); final int nPuts = putWeights.length; final int nCalls = callWeights.length; double optionTotal = straddleWeight * straddlePrice; for (int i = 0; i < nPuts; ++i) { assertEquals(putWeights[i], res.getPutWeights()[i]); assertEquals(putPrices[i], res.getPutPrices()[i]); assertEquals(putStrikes[i], resStrikes.getPutStrikes()[i]); optionTotal += putWeights[i] * putPrices[i]; } assertEquals(straddleWeight, res.getStraddleWeight()); assertEquals(straddlePrice, res.getStraddlePrice()); for (int i = 0; i < nCalls; ++i) { assertEquals(callWeights[i], res.getCallWeights()[i]); assertEquals(callPrices[i], res.getCallPrices()[i]); assertEquals(callStrikes[i], resStrikes.getCallStrikes()[i]); optionTotal += callWeights[i] * callPrices[i]; } assertEquals(cash, res.getCash()); assertEquals(optionTotal, res.getOptionTotal()); assertEquals(optionTotal + cash, res.getFairValue()); final VolatilitySwapCalculatorResultWithStrikes resStrikesFrom = res.withStrikes(putStrikes, callStrikes); assertEquals(resStrikes.hashCode(), resStrikesFrom.hashCode()); assertEquals(resStrikes, resStrikesFrom); } /** * Equals and hashcode are tested */ @Test public void hashEqualsTest() { final VolatilitySwapCalculatorResult res1 = new VolatilitySwapCalculatorResult(putWeights, straddleWeight, callWeights, putPrices, straddlePrice, callPrices, cash); final VolatilitySwapCalculatorResult res2 = new VolatilitySwapCalculatorResult(putWeights, straddleWeight, callWeights, putPrices, straddlePrice, callPrices, cash); final VolatilitySwapCalculatorResult res3 = new VolatilitySwapCalculatorResult(new double[] {0., 1., 1.5, }, straddleWeight, callWeights, putPrices, straddlePrice, callPrices, cash); final VolatilitySwapCalculatorResult res4 = new VolatilitySwapCalculatorResult(putWeights, straddleWeight + 2., callWeights, putPrices, straddlePrice, callPrices, cash); final VolatilitySwapCalculatorResult res5 = new VolatilitySwapCalculatorResult(putWeights, straddleWeight, new double[] {1.5, }, putPrices, straddlePrice, callPrices, cash); final VolatilitySwapCalculatorResult res6 = new VolatilitySwapCalculatorResult(putWeights, straddleWeight, callWeights, new double[] {1., 1., 1.5, }, straddlePrice, callPrices, cash); final VolatilitySwapCalculatorResult res7 = new VolatilitySwapCalculatorResult(putWeights, straddleWeight, callWeights, putPrices, straddlePrice + 1., callPrices, cash); final VolatilitySwapCalculatorResult res8 = new VolatilitySwapCalculatorResult(putWeights, straddleWeight, callWeights, putPrices, straddlePrice, new double[] {2. }, cash); final VolatilitySwapCalculatorResult res9 = new VolatilitySwapCalculatorResult(putWeights, straddleWeight, callWeights, putPrices, straddlePrice, callPrices, cash + 1.); assertTrue(res1.equals(res1)); assertTrue(res1.equals(res2)); assertTrue(res2.equals(res1)); assertEquals(res1.hashCode(), res2.hashCode()); assertTrue(!(res1.equals(res3))); assertTrue(!(res3.equals(res1))); assertTrue(!(res1.equals(res3))); assertTrue(!(res3.equals(res1))); assertTrue(!(res1.equals(res4))); assertTrue(!(res4.equals(res1))); assertTrue(!(res1.equals(res5))); assertTrue(!(res5.equals(res1))); assertTrue(!(res1.equals(res6))); assertTrue(!(res6.equals(res1))); assertTrue(!(res1.equals(res7))); assertTrue(!(res7.equals(res1))); assertTrue(!(res1.equals(res8))); assertTrue(!(res8.equals(res1))); assertTrue(!(res1.equals(res9))); assertTrue(!(res9.equals(res1))); assertTrue(!(res1.equals(null))); assertTrue(!(res1.equals(new CarrLeeSeasonedSyntheticVolatilitySwapCalculator()))); final int size = 8; List<VolatilitySwapCalculatorResult> list = new ArrayList<>(size); list.add(res2); list.add(res3); list.add(res4); list.add(res5); list.add(res6); list.add(res7); list.add(res8); list.add(res9); for (int i = 0; i < size; ++i) { if (res1.hashCode() != list.get(i).hashCode()) { assertTrue(!(res1.equals(list.get(i)))); } } final VolatilitySwapCalculatorResultWithStrikes resStrikesFrom1 = res1.withStrikes(putStrikes, callStrikes); final VolatilitySwapCalculatorResultWithStrikes resStrikesFrom3 = res3.withStrikes(putStrikes, callStrikes); final VolatilitySwapCalculatorResultWithStrikes resStrikesFrom11 = res1.withStrikes(putStrikes, callWeights); final VolatilitySwapCalculatorResultWithStrikes resStrikesFrom12 = res1.withStrikes(putWeights, callStrikes); assertTrue(resStrikesFrom1.equals(resStrikesFrom1)); assertTrue(!(resStrikesFrom1.equals(null))); assertTrue(!(resStrikesFrom1.equals(res1))); assertTrue(!(resStrikesFrom1.equals(resStrikesFrom3))); assertTrue(!(resStrikesFrom1.equals(resStrikesFrom11))); assertTrue(!(resStrikesFrom1.equals(resStrikesFrom12))); } }
apache-2.0
JayanthyChengan/dataverse
src/main/java/edu/harvard/iq/dataverse/mydata/RoleTagRetriever.java
24567
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package edu.harvard.iq.dataverse.mydata; import edu.harvard.iq.dataverse.DvObject; import edu.harvard.iq.dataverse.DvObjectServiceBean; import edu.harvard.iq.dataverse.RoleAssigneeServiceBean; import edu.harvard.iq.dataverse.search.SolrQueryResponse; import edu.harvard.iq.dataverse.search.SolrSearchResult; import edu.harvard.iq.dataverse.authorization.DataverseRolePermissionHelper; import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser; import edu.harvard.iq.dataverse.engine.command.DataverseRequest; import edu.harvard.iq.dataverse.search.SearchConstants; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import javax.json.Json; import javax.json.JsonArrayBuilder; import org.apache.commons.lang.StringUtils; /** * Input: dvObject id, parent Id, and dvObject type (from Solr) * Output: For each dvObject id, a list of Role names * * @author rmp553 */ public class RoleTagRetriever { private static final Logger logger = Logger.getLogger(RoleTagRetriever.class.getCanonicalName()); private final DataverseRolePermissionHelper rolePermissionHelper; private final RoleAssigneeServiceBean roleAssigneeService; private final DvObjectServiceBean dvObjectServiceBean; public boolean errorFound = false; public String errorMessage = null; //public Map<Long, String> roleNameLookup = new HashMap<>(); // { role id : role name } private Map<Long, List<Long>> idToRoleListHash; // { dvobject id : [role id, role id] } private Map<Long, Long> childToParentIdHash; // { dvobject id : parent id } private Map<Long, String> idToDvObjectType; // { dvobject id : dvobject type } private List<Long> datasetIdsNeedingParentIds; private List<Long> finalCardIds; private Map<Long, List<String>> finalIdToRolesHash; // { dvobject id : [role name, role name] } // ---------------------------------- // Constructor // ---------------------------------- public RoleTagRetriever(DataverseRolePermissionHelper rolePermissionHelper , RoleAssigneeServiceBean roleAssigneeService , DvObjectServiceBean dvObjectServiceBean){ this.rolePermissionHelper = rolePermissionHelper; this.roleAssigneeService = roleAssigneeService; this.dvObjectServiceBean = dvObjectServiceBean; } public void loadRoles(DataverseRequest dataverseRequest , SolrQueryResponse solrQueryResponse){ if (dataverseRequest == null){ throw new NullPointerException("RoleTagRetriever.constructor. dataverseRequest cannot be null"); } AuthenticatedUser au = dataverseRequest.getAuthenticatedUser(); if (au == null){ throw new NullPointerException("RoleTagRetriever.constructor. au cannot be null"); } String userIdentifier = au.getUserIdentifier(); if (userIdentifier == null){ throw new NullPointerException("RoleTagRetriever.constructor. userIdentifier cannot be null"); } if (solrQueryResponse == null){ throw new NullPointerException("RoleTagRetriever.constructor. solrQueryResponse cannot be null"); } // (1) Reset variables initLookups(); // (2) Load roles from solr docs loadInfoFromSolrResponseDocs(solrQueryResponse); // (3) Load grandparent ids, if needed findDataverseIdsForFiles(); // (4) Retrieve the role ids retrieveRoleIdsForDvObjects(dataverseRequest, au); // (5) Prepare final role lists prepareFinalRoleLists(); //showRoleListHash(); } private void initLookups(){ this.errorFound = false; this.errorMessage = null; this.idToRoleListHash = new HashMap<>(); // { dvobject id : [role id, role id] } this.childToParentIdHash = new HashMap<>(); // { dvobject id : parent id } this.idToDvObjectType = new HashMap<>(); // { dvobject id : dvobject type } this.finalIdToRolesHash = new HashMap<>(); this.datasetIdsNeedingParentIds = new ArrayList<>(); this.finalCardIds = new ArrayList<>(); } private void addIdNeedingRoleRetrieval(Long dvObjectId){ if (dvObjectId == null){ return; } // initialize with dvObject id and empty list of role ids // if (!this.idToRoleListHash.containsKey(dvObjectId)){ this.idToRoleListHash.put(dvObjectId, new ArrayList<>()); } } public void showRoleListHash(){ msgt("showRoleListHash"); for (Map.Entry<Long, List<Long>> entry : idToRoleListHash.entrySet()) { msg("id: " + entry.getKey() + " | values: " + entry.getValue().toString()); } msgt("show idToDvObjectType"); for (Map.Entry<Long, String> entry : idToDvObjectType.entrySet()) { msg("dv id: " + entry.getKey() + " | type: " + entry.getValue()); } for (Map.Entry<Long, List<String>> entry : finalIdToRolesHash.entrySet()) { msg("id: " + entry.getKey() + " | values: " + entry.getValue().toString()); } } private void addRoleIdForHash(Long dvObjectId, Long roleId){ if ((dvObjectId == null)||(roleId == null)){ return; } if (!this.idToRoleListHash.containsKey(dvObjectId)){ logger.warning("DvObject id not found in hash (shouldn't happen): " + dvObjectId); return; } List<Long> roldIdList = this.idToRoleListHash.get(dvObjectId); roldIdList.add(roleId); this.idToRoleListHash.put(dvObjectId, roldIdList); } /** * Iterate through the Solr Cards and collect * - DvObject Id + Parent ID * - Dtype for object and parent * - Whether a "grandparent id" is needed for a file object * * @param solrQueryResponse */ private void loadInfoFromSolrResponseDocs(SolrQueryResponse solrQueryResponse){ if (solrQueryResponse == null){ throw new NullPointerException("RoleTagRetriever.constructor. solrQueryResponse cannot be null"); } // ---------------------------------- // Load initial data // ---------------------------------- msgt("load initial data"); // Iterate through Solr cards // for (SolrSearchResult doc : solrQueryResponse.getSolrSearchResults()){ // ------------------------------------------------- // (a) retrieve Card Id and DvObject type // ------------------------------------------------- finalCardIds.add(doc.getEntityId()); String dtype = doc.getType(); Long entityId = doc.getEntityId(); if (dtype == null){ throw new NullPointerException("The dvobject type cannot be null for SolrSearchResult"); } logger.fine("\nid: " + doc.getEntityId() + " dtype: " + dtype); // ------------------------------------------------- // (b) Populate dict of { dvObject id : dtype } // e.g. { 3 : 'Dataverse' } // ------------------------------------------------- this.idToDvObjectType.put(entityId, dtype); // ------------------------------------------------- // (c) initialize dict of { dvObject id : [ (empty list for role ids) ] } // ------------------------------------------------- addIdNeedingRoleRetrieval(entityId); Long parentId = doc.getParentIdAsLong(); // ------------------------------------------------- // For datasets and files, check parents // ------------------------------------------------- if (!(dtype.equals(SearchConstants.SOLR_DATAVERSES))){ // ------------------------------------------------- // (d) Add to the childToParentIdHash { child id : parent id } // ------------------------------------------------- if (parentId == null){ throw new NullPointerException("A dataset or file parent cannot be null for SolrSearchResult"); } logger.fine("\nparentId: " + parentId); this.childToParentIdHash.put(doc.getEntityId(), parentId); // ------------------------------------------------- // (e) For the parent, add to dict of // { dvObject id : [ (empty list for role ids) ] } // - similar to (c) above // ------------------------------------------------- addIdNeedingRoleRetrieval(parentId); // ------------------------------------------------- // (f) Add the parent to the DvObject type lookup { dvObject id : dtype } // - similar to (b) above // ------------------------------------------------- if (doc.getType().equals(SearchConstants.SOLR_FILES)){ logger.fine("It's a file"); // ------------------------------------------------- // (f1) This is a file, we know the parent is a Dataset // ------------------------------------------------- this.idToDvObjectType.put(parentId, SearchConstants.SOLR_DATASETS); // ------------------------------------------------- // (g) For files, we'll need to get roles from the grandparent--e.g., the dataverse // ------------------------------------------------- this.datasetIdsNeedingParentIds.add(parentId); }if (dtype.equals(SearchConstants.SOLR_DATASETS)){ logger.fine("It's a dataset"); // ------------------------------------------------- // (f2) This is a Dataset, we know the parent is a Dataverse // ------------------------------------------------- this.idToDvObjectType.put(parentId, SearchConstants.SOLR_DATAVERSES); } } // ------------------------------------------------- // initialize final hash of dvObject id and empty list of role names // { dvObject id : [ (empty list for role nams) ] } // ------------------------------------------------- this.finalIdToRolesHash.put(doc.getEntityId(), new ArrayList<>()); } } /** * From the Cards, we know the Parent Ids of all the DvObjects * * However, for files, the roles may trickle down from the Dataverses * * Dataverse (file downloader) -> Dataset (file downloader) -> File (file downloader) * * Grandparent -> Parent -> Child * * Therefore, we need the File's "grandparent id" -- the Dataverse ID * * File (from card) -> Parent (from card) -> Grandparent (NEED TO FIND) * * */ private void findDataverseIdsForFiles(){ msgt("findDataverseIdsForFiles: " + datasetIdsNeedingParentIds.toString()); // ------------------------------------- // (1) Do we have any dataset Ids where we need to find the parent dataverse? // ------------------------------------- if (this.datasetIdsNeedingParentIds == null){ throw new NullPointerException("findDataverseIdsForFiles should not be null"); } if (this.datasetIdsNeedingParentIds.isEmpty()){ logger.fine("No ids found!"); return; } // ------------------------------------- // (2) Do we have any dataset Ids where we need to find the parent dataverse? // ------------------------------------- List<Object[]> results = this.dvObjectServiceBean.getDvObjectInfoForMyData(this.datasetIdsNeedingParentIds); logger.fine("findDataverseIdsForFiles results count: " + results.size()); // ------------------------------------- // (2a) Nope, return // ------------------------------------- if (results.isEmpty()){ return; } // ------------------------------------- // (3) Process the results -- the parent ID is the Dataverse that we're interested in // ------------------------------------- Integer dvIdAsInteger; Long dvId; String dtype; Long parentId; // ------------------------------------- // Iterate through object list // ------------------------------------- for (Object[] ra : results) { dvIdAsInteger = (Integer)ra[0]; // ?? Why, should be a Long dvId = new Long(dvIdAsInteger); dtype = (String)ra[1]; parentId = (Long)ra[2]; //msg("result: dvId: " + dvId + " |dtype: " + dtype + " |parentId: " + parentId); // Should ALWAYS be a Dataset! if (dtype.equals(DvObject.DATASET_DTYPE_STRING)){ this.childToParentIdHash.put(dvId, parentId); // Store the parent child relation this.addIdNeedingRoleRetrieval(parentId); // We need the roles for this dataverse this.idToDvObjectType.put(parentId, SearchConstants.SOLR_DATAVERSES); // store the dv object type } } } private boolean retrieveRoleIdsForDvObjects(DataverseRequest dataverseRequest, AuthenticatedUser au){ String userIdentifier = au.getUserIdentifier(); if (userIdentifier == null){ throw new NullPointerException("RoleTagRetriever.constructor. userIdentifier cannot be null"); } if (this.idToRoleListHash.isEmpty()){ return true; } List<Long> dvObjectIdList = new ArrayList<>(this.idToRoleListHash.keySet()); if (dvObjectIdList.isEmpty()){ return true; } //msg("dvObjectIdList: " + dvObjectIdList.toString()); List<Object[]> results = this.roleAssigneeService.getRoleIdsFor(dataverseRequest, dvObjectIdList); //msgt("runStep1RoleAssignments results: " + results.toString()); if (results == null){ this.addErrorMessage("Sorry, the roleAssigneeService isn't working."); return false; }else if (results.isEmpty()){ logger.log(Level.WARNING, "No roles were found for user {0} with ids {1}", new Object[]{userIdentifier, dvObjectIdList.toString()}); this.addErrorMessage("Sorry, no roles were found."); return false; } // Iterate through assigned objects, a single object may end up in // multiple "buckets" for (Object[] ra : results) { Long dvId = (Long)ra[0]; Long roleId = (Long)ra[1]; this.addRoleIdForHash(dvId, roleId); //msg("dv id: " + dvId + "(" + this.idToDvObjectType.get(dvId) + ") | roleId: " // + roleId + "(" + this.rolePermissionHelper.getRoleName(roleId)+")"); } return true; } private List<String> getFormattedRoleListForId(Long dvId){ if (dvId==null){ return null; } if (!this.idToRoleListHash.containsKey(dvId)){ return null; } List<String> roleNames = new ArrayList<>(); for (Long roleId : this.idToRoleListHash.get(dvId) ){ String roleName = this.rolePermissionHelper.getRoleName(roleId); if (roleName != null){ roleNames.add(roleName); } } return roleNames; } private List<String> getFormattedRoleListForId(Long dvId, boolean withDatasetPerms, boolean withFilePerms){ if (dvId==null){ return null; } if (!this.idToRoleListHash.containsKey(dvId)){ return null; } List<String> roleNames = new ArrayList<>(); for (Long roleId : this.idToRoleListHash.get(dvId) ){ if ((withDatasetPerms && this.rolePermissionHelper.hasDatasetPermissions(roleId)) || (withFilePerms && this.rolePermissionHelper.hasFilePermissions(roleId))) { String roleName = this.rolePermissionHelper.getRoleName(roleId); if (roleName != null){ roleNames.add(roleName); } } } return roleNames; } public boolean hasRolesForCard(Long dvObjectId){ if (dvObjectId == null){ return false; } return this.finalIdToRolesHash.containsKey(dvObjectId); } public List<String> getRolesForCard(Long dvObjectId){ if (!this.hasRolesForCard(dvObjectId)){ return null; } return this.finalIdToRolesHash.get(dvObjectId); } public JsonArrayBuilder getRolesForCardAsJSON(Long dvObjectId){ if (!this.hasRolesForCard(dvObjectId)){ return null; } JsonArrayBuilder jsonArray = Json.createArrayBuilder(); for (String roleName : this.finalIdToRolesHash.get(dvObjectId)){ jsonArray.add(roleName); } return jsonArray; } /** * For the cards, make a dict of { dv object id : [role name, role name, etc ]} * */ public void prepareFinalRoleLists(){ msgt("prepareFinalRoleLists"); if (finalCardIds.isEmpty()){ return; } List<String> formattedRoleNames; List<String> finalRoleNames; for (Long dvIdForCard : this.finalCardIds) { //msgt("dvIdForCard: " + dvIdForCard + "(" + this.idToDvObjectType.get(dvIdForCard) + ")"); // ------------------------------------------------- // (a) Make a new array with the role names for the card // ------------------------------------------------- finalRoleNames = new ArrayList<>(); if (!this.idToDvObjectType.containsKey(dvIdForCard)){ throw new IllegalStateException("All dvObject ids from solr should have their dvObject types in this hash"); } // ------------------------------------------------- // (b) Add direct role assignments -- may be empty // ------------------------------------------------- formattedRoleNames = getFormattedRoleListForId(dvIdForCard); //msg("(a) direct assignments: " + StringUtils.join(formattedRoleNames, ", ")); if (formattedRoleNames != null){ finalRoleNames.addAll(formattedRoleNames); } //msg("Roles so far: " + finalRoleNames.toString()); // ------------------------------------------------- // (c) get parent id // ------------------------------------------------- Long parentId = null; if (this.childToParentIdHash.containsKey(dvIdForCard)){ parentId = this.childToParentIdHash.get(dvIdForCard); //msg("(b) parentId: " + parentId); }else{ // ------------------------------------------------- // No parent! Store roles and move to next id // ------------------------------------------------- finalIdToRolesHash.put(dvIdForCard, this.formatRoleNames(finalRoleNames)); continue; } // ------------------------------------------------- // (d) get dtype // ------------------------------------------------- String dtype = this.idToDvObjectType.get(dvIdForCard); switch(dtype){ //case(SearchConstants.SOLR_DATAVERSES // No indirect assignments case(SearchConstants.SOLR_DATASETS): // ------------------------------------------------- // (d1) May have indirect assignments re: dataverse // ------------------------------------------------- formattedRoleNames = getFormattedRoleListForId(parentId, true, true); if (formattedRoleNames != null){ //msg("(d) indirect assignments: " + StringUtils.join(formattedRoleNames, ", ")); finalRoleNames.addAll(formattedRoleNames); //msg("Roles from dataverse: " + finalRoleNames.toString()); } break; case(SearchConstants.SOLR_FILES): //msg("(c) FILES"); // ------------------------------------------------- // (d2) May have indirect assignments re: dataset // ------------------------------------------------- formattedRoleNames = getFormattedRoleListForId(parentId, false, true); if (formattedRoleNames != null){ //msg("(d) indirect assignments: " + StringUtils.join(formattedRoleNames, ", ")); finalRoleNames.addAll(formattedRoleNames); } // May have indirect assignments re: dataverse // if (this.childToParentIdHash.containsKey(parentId)){ Long grandparentId = this.childToParentIdHash.get(parentId); formattedRoleNames = getFormattedRoleListForId(grandparentId, false, true); if (formattedRoleNames != null){ //msg("(e) 2-step indirect assignments: " + StringUtils.join(formattedRoleNames, ", ")); finalRoleNames.addAll(formattedRoleNames); } } break; } // end switch //msg("Roles from dataverse: " + formattedRoleNames.toString()); finalIdToRolesHash.put(dvIdForCard, formatRoleNames(finalRoleNames)); //String key = //Object value = entry.getValue(); // ... } } private List<String> formatRoleNames(List<String> roleNames){ if (roleNames==null){ return null; } // remove duplicates Set<String> distinctRoleNames = new HashSet<>(roleNames); // back to list roleNames = new ArrayList<>(distinctRoleNames); // sort list Collections.sort(roleNames); return roleNames; } public boolean hasError(){ return this.errorFound; } public String getErrorMessage(){ return this.errorMessage; } private void addErrorMessage(String s){ this.errorFound = true; this.errorMessage = s; } private void msg(String s){ //System.out.println(s); } private void msgt(String s){ msg("-------------------------------"); msg(s); msg("-------------------------------"); } }
apache-2.0
jeorme/OG-Platform
projects/OG-Analytics/src/main/java/com/opengamma/analytics/financial/model/option/pricing/tree/EuropeanExchangeOptionFunctionProvider.java
4090
/** * Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.model.option.pricing.tree; import com.google.common.primitives.Doubles; import com.opengamma.util.ArgumentChecker; /** * The payoff of European exchange-one-asset-for-another option is max(Q1 * S1 - Q2 * S2, 0) at expiration, * where Q1 is the quantity of asset S1 and Q2 is the quantity of asset S2. */ public class EuropeanExchangeOptionFunctionProvider extends OptionFunctionProvider2D { private double _quantity1; private double _quantity2; /** * @param timeToExpiry Time to expiry * @param steps Number of steps * @param quantity1 Quantity of asset 1 * @param quantity2 Quantity of asset 2 */ public EuropeanExchangeOptionFunctionProvider(final double timeToExpiry, final int steps, final double quantity1, final double quantity2) { super(0., timeToExpiry, steps, true); ArgumentChecker.isTrue(quantity1 > 0., "quantity1 should be positive"); ArgumentChecker.isTrue(Doubles.isFinite(quantity1), "quantity1 should be finite"); ArgumentChecker.isTrue(quantity2 > 0., "quantity2 should be positive"); ArgumentChecker.isTrue(Doubles.isFinite(quantity2), "quantity2 should be finite"); _quantity1 = quantity1; _quantity2 = quantity2; } @Override public double[][] getPayoffAtExpiry(final double assetPrice1, final double assetPrice2, final double upOverDown1, final double upOverDown2) { final int nStepsP = getNumberOfSteps() + 1; final double[][] values = new double[nStepsP][nStepsP]; double priceTmp1 = assetPrice1; for (int i = 0; i < nStepsP; ++i) { double priceTmp2 = assetPrice2; for (int j = 0; j < nStepsP; ++j) { values[i][j] = Math.max(_quantity1 * priceTmp1 - _quantity2 * priceTmp2, 0.); priceTmp2 *= upOverDown2; } priceTmp1 *= upOverDown1; } return values; } public double[][] getPayoffAtExpiryTrinomial(final double assetPrice1, final double assetPrice2, final double middleOverDown1, final double middleOverDown2) { final int nNodes = 2 * getNumberOfSteps() + 1; final double[][] values = new double[nNodes][nNodes]; double priceTmp1 = assetPrice1; for (int i = 0; i < nNodes; ++i) { double priceTmp2 = assetPrice2; for (int j = 0; j < nNodes; ++j) { values[i][j] = Math.max(_quantity1 * priceTmp1 - _quantity2 * priceTmp2, 0.); priceTmp2 *= middleOverDown2; } priceTmp1 *= middleOverDown1; } return values; } @Override public double getSign() { throw new IllegalArgumentException("Call/put is not relevant for this option"); } @Override public double getStrike() { throw new IllegalArgumentException("Strike is not relavant for this option"); } /** * Access quantity of asset 1 * @return _quantity1 */ public double getQuantity1() { return _quantity1; } /** * Access quantity of asset 2 * @return _quantity2 */ public double getQuantity2() { return _quantity2; } @Override public int hashCode() { final int prime = 31; int result = super.hashCode(); long temp; temp = Double.doubleToLongBits(_quantity1); result = prime * result + (int) (temp ^ (temp >>> 32)); temp = Double.doubleToLongBits(_quantity2); result = prime * result + (int) (temp ^ (temp >>> 32)); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!super.equals(obj)) { return false; } if (!(obj instanceof EuropeanExchangeOptionFunctionProvider)) { return false; } EuropeanExchangeOptionFunctionProvider other = (EuropeanExchangeOptionFunctionProvider) obj; if (Double.doubleToLongBits(_quantity1) != Double.doubleToLongBits(other._quantity1)) { return false; } if (Double.doubleToLongBits(_quantity2) != Double.doubleToLongBits(other._quantity2)) { return false; } return true; } }
apache-2.0
paradisecr/ONOS-OXP
core/api/src/test/java/org/onosproject/net/optical/impl/DefaultOmsPortTest.java
4995
/* * Copyright 2016-present Open Networking Laboratory * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.net.optical.impl; import static org.hamcrest.Matchers.*; import static org.junit.Assert.*; import static org.onosproject.net.PortNumber.portNumber; import org.junit.Test; import org.onlab.packet.ChassisId; import org.onlab.util.Frequency; import org.onosproject.net.Annotations; import org.onosproject.net.DefaultAnnotations; import org.onosproject.net.DefaultDevice; import org.onosproject.net.DefaultPort; import org.onosproject.net.Device; import org.onosproject.net.Device.Type; import org.onosproject.net.DeviceId; import org.onosproject.net.Port; import org.onosproject.net.PortNumber; import org.onosproject.net.optical.OmsPort; import org.onosproject.net.provider.ProviderId; import com.google.common.testing.EqualsTester; /** * Tests for {@link DefaultOmsPort}. */ public class DefaultOmsPortTest { private static final ProviderId PID = new ProviderId("test", "id"); private static final DeviceId DID = DeviceId.deviceId("test:00123"); private static final String MFC = "MFC"; private static final String HW = "HW V"; private static final String SW = "SW V"; private static final String SER = "SER"; private static final ChassisId CHS = new ChassisId(42); private static final Annotations DEV_ANON = DefaultAnnotations.EMPTY; private static final Device DEV = new DefaultDevice(PID, DID, Type.ROADM, MFC, HW, SW, SER, CHS, DEV_ANON); @Test public void testEquality() { PortNumber pn = PortNumber.portNumber(4900); Annotations an = DefaultAnnotations.builder() .set("Base", "value") .build(); Annotations an2 = DefaultAnnotations.builder() .set("Base", "value2") .build(); Port base = new DefaultPort(DEV, pn, true, Port.Type.VIRTUAL, 2, an); Frequency minF = Frequency.ofGHz(3); Frequency maxF = Frequency.ofGHz(33); Frequency grid = Frequency.ofGHz(2); // reference OMS port OmsPort oms = new DefaultOmsPort(base, minF, maxF, grid); new EqualsTester() .addEqualityGroup(oms, // different base port type or portspeed is ignored new DefaultOmsPort(new DefaultPort(DEV, pn, true, an), minF, maxF, grid)) // different port number .addEqualityGroup(new DefaultOmsPort(new DefaultPort(DEV, portNumber(1), true, an), minF, maxF, grid)) // different isEnabled .addEqualityGroup(new DefaultOmsPort(new DefaultPort(DEV, pn, false, an), minF, maxF, grid)) // different annotation .addEqualityGroup(new DefaultOmsPort(new DefaultPort(DEV, pn, true, an2), minF, maxF, grid)) // different minFreq .addEqualityGroup(new DefaultOmsPort(base, Frequency.ofKHz(3), maxF, grid)) // different maxFreq .addEqualityGroup(new DefaultOmsPort(base, minF, Frequency.ofKHz(33), grid)) // different grid .addEqualityGroup(new DefaultOmsPort(base, minF, maxF, Frequency.ofKHz(2))) .testEquals(); } @Test public void basicTests() { PortNumber pn = PortNumber.portNumber(4900); Annotations annotations = DefaultAnnotations.builder() .set("Base", "value") .build(); boolean isEnabled = true; Port base = new DefaultPort(DEV, pn, isEnabled, Port.Type.VIRTUAL, 2, annotations); Frequency minFrequency = Frequency.ofGHz(3); Frequency maxFrequency = Frequency.ofGHz(33); Frequency grid = Frequency.ofGHz(2); OmsPort oms = new DefaultOmsPort(base, minFrequency, maxFrequency, grid); // basic attributes and annotations are inherited from base assertThat(oms.element(), is(DEV)); assertThat(oms.isEnabled(), is(isEnabled)); assertThat(oms.number(), is(pn)); assertThat(oms.annotations(), is(annotations)); assertThat("type is always OMS", oms.type(), is(Port.Type.OMS)); assertThat("port speed is undefined", oms.portSpeed(), is(equalTo(0L))); assertThat(oms.maxFrequency(), is(maxFrequency)); assertThat(oms.minFrequency(), is(minFrequency)); assertThat(oms.grid(), is(grid)); assertThat("(33-3)/2 = 15", oms.totalChannels(), is((short) 15)); } }
apache-2.0
S-Bartfast/cgeo
main/src/cgeo/geocaching/connector/tc/package-info.java
121
/** * <a href="https://www.terracaching.com">Terracaching</a> implementation. */ package cgeo.geocaching.connector.tc;
apache-2.0
nafae/developer
modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201411/ServerErrorReason.java
1374
package com.google.api.ads.dfp.jaxws.v201411; import javax.xml.bind.annotation.XmlEnum; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for ServerError.Reason. * * <p>The following schema fragment specifies the expected content contained within this class. * <p> * <pre> * &lt;simpleType name="ServerError.Reason"> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"> * &lt;enumeration value="SERVER_ERROR"/> * &lt;enumeration value="SERVER_BUSY"/> * &lt;enumeration value="UNKNOWN"/> * &lt;/restriction> * &lt;/simpleType> * </pre> * */ @XmlType(name = "ServerError.Reason") @XmlEnum public enum ServerErrorReason { /** * * Indicates that an unexpected error occured. * * */ SERVER_ERROR, /** * * Indicates that the server is currently experiencing a high load. Please * wait and try your request again. * * */ SERVER_BUSY, /** * * The value returned if the actual value is not exposed by the requested API version. * * */ UNKNOWN; public String value() { return name(); } public static ServerErrorReason fromValue(String v) { return valueOf(v); } }
apache-2.0
nafae/developer
modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201408/ProposalLineItemPremiumStatus.java
1349
package com.google.api.ads.dfp.jaxws.v201408; import javax.xml.bind.annotation.XmlEnum; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for ProposalLineItemPremiumStatus. * * <p>The following schema fragment specifies the expected content contained within this class. * <p> * <pre> * &lt;simpleType name="ProposalLineItemPremiumStatus"> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"> * &lt;enumeration value="INCLUDED"/> * &lt;enumeration value="EXCLUDED"/> * &lt;enumeration value="UNKNOWN"/> * &lt;/restriction> * &lt;/simpleType> * </pre> * */ @XmlType(name = "ProposalLineItemPremiumStatus") @XmlEnum public enum ProposalLineItemPremiumStatus { /** * * Indicating the premium is included in the pricing. * * */ INCLUDED, /** * * Indicating the premium is excluded from the pricing. * * */ EXCLUDED, /** * * The value returned if the actual value is not exposed by the requested API version. * * */ UNKNOWN; public String value() { return name(); } public static ProposalLineItemPremiumStatus fromValue(String v) { return valueOf(v); } }
apache-2.0
smgoller/geode
geode-management/src/main/java/org/apache/geode/management/api/ClusterManagementRealizationException.java
1633
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.management.api; import java.util.List; /** * Thrown by {@link ClusterManagementService#create(AbstractConfiguration)}, update, and delete * operations * which have a possibility of "partial" failure. */ public class ClusterManagementRealizationException extends ClusterManagementException { private final ClusterManagementRealizationResult result; /** * for internal use only */ public ClusterManagementRealizationException(ClusterManagementRealizationResult result) { super(result); this.result = result; } /** * A {@link ClusterManagementService#create(AbstractConfiguration)} operation may fail on all * members or * only some. This will return the per-member status. */ public List<RealizationResult> getMemberStatuses() { return result.getMemberStatuses(); } }
apache-2.0
cmorty/avrora
src/avrora/test/sim/legacy/LegacyTester.java
2319
/** * Copyright (c) 2005, Regents of the University of California * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the University of California, Los Angeles nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * Creation date: Dec 6, 2005 */ package avrora.test.sim.legacy; import avrora.Defaults; import avrora.arch.AbstractArchitecture; import avrora.arch.legacy.LegacyArchitecture; import avrora.core.Program; import avrora.sim.Simulator; import avrora.test.sim.StateAccessor; import avrora.test.sim.Tester; /** * @author Ben L. Titzer */ public class LegacyTester extends Tester { public Simulator newSimulator(Program p) { return Defaults.newSimulator(0, p); } public AbstractArchitecture getArchitecture() { return LegacyArchitecture.INSTANCE; } public StateAccessor getAccessor(Simulator sim) { return new LegacyStateAccessor(sim.getProgram(), sim); } }
bsd-3-clause
cmorty/avrora
src/avrora/arch/avr/AVRState.java
8918
/** * Copyright (c) 2005, Regents of the University of California * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the University of California, Los Angeles nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * Creation date: Sep 7, 2005 */ package avrora.arch.avr; import avrora.sim.*; import avrora.sim.clock.DeltaQueue; /** * The <code>AVRState</code> class represents an instance of the internal state of an <code>AVRInstrInterpreter</code> * instance. This class allows access to the state of the interpreter without exposing the details of the implementation * or jeopardizing the soundness of the simulation. * <p/> * </p> An <code>AVRState</code> instance contains the state of registers, memory, the code segment, and the IO * registers, as well as the interrupt table and <code>MainClock</code> instance. It provides a public interface through * the <code>get_XXX()</code> methods and a protected interface used in <code>AVRInstrInterpreter</code> that allows direct * access to the fields representing the actual state. * * @author Ben L. Titzer */ public abstract class AVRState implements State { public static int NUM_REGS = 32; protected Simulator simulator; protected InterruptTable interrupts; protected int pc; protected int nextpc; protected boolean I, T, H, V, S, N, Z, C; protected AVRDataSegment sram; protected byte[] regs; protected CodeSegment flash; protected ActiveRegister[] ioregs; protected DeltaQueue queue; protected ActiveRegister SREG_reg; protected int SREG; protected int cycles; protected boolean justReturnedFromInterrupt; /** * The <code>getPC()</code> retrieves the current program counter. * * @return the program counter as a byte address */ public int getPC() { return pc; } /** * The <code>getSP()</code> method reads the current value of the stack pointer. Since the stack pointer is stored * in two IO registers, this method will cause the invocation of the <code>.read()</code> method on each of the * <code>IOReg</code> objects that store these values. * * @return the value of the stack pointer as a byte address */ public abstract int getSP(); /** * The <code>getSRAM()</code> method reads a byte value from the data memory (SRAM) at the specified address. This * method is intended for use by probes and watches; thus, it does not trigger any watches that may be installed at * the memory address specified, since doing so could lead to infinite recursion (if a watch attempts to get the * value of the byte at the location where it itself is installed) or alter the metrics being measured by the * instrumentation at that address. * * @param address the byte address to read * @return the value of the data memory at the specified address * @throws ArrayIndexOutOfBoundsException if the specified address is not the valid memory range */ public byte getSRAM(int address) { return sram.get(address); } /** * The <code>getFlash()</code> method reads a byte value from the program (Flash) memory. The flash memory generally * stores read-only values and the instructions of the program. This method is intended for use by probes and * watches; thus, it does not trigger any watches or probes that may be installed at the memory address specified, * since doing so could lead to infinite recursion (if a watch attempts to get the value of the byte at the location * where it itself is installed) or alter the metrics being measured by the instrumentation at that address. * * @param address the byte address at which to read * @return the byte value of the program memory at the specified address * @throws ArrayIndexOutOfBoundsException if the specified address is not the valid program memory range */ public byte getFlash(int address) { return flash.get(address); } /** * The <code>getIOReg()</code> method reads the value of an IO register as a byte. Invocation of this method causes * an invocation of the <code>.read()</code> method on the corresponding internal <code>IOReg</code> object, and its * value returned. Very few devices have behavior that is triggered by a read from an IO register, but care should * be taken when calling this method for one of those IO registers. * * @param ior the IO register number * @return the value of the IO register */ public byte getIOReg(int ior) { return ioregs[ior].read(); } /** * The <code>getRegister()</code> method reads a general purpose register's current value as a byte. * * @param reg the register to read * @return the current value of the specified register as a byte */ public byte getRegister(AVRSymbol.GPR reg) { return regs[reg.value]; } /** * The <code>getCycles()</code> method returns the clock cycle count recorded so far in the simulation. * * @return the number of clock cycles elapsed in the simulation */ public long getCycles() { return queue.getCount(); } /** * The <code>getSREG()</code> method reads the value of the status register. The status register contains the I, T, * H, S, V, N, Z, and C flags, in order from highest-order to lowest-order. * * @return the value of the status register as a byte. */ public byte getSREG() { return SREG_reg.read(); } /** * The <code>isEnabled()</code> method checks whether the specified interrupt is currently enabled. * * @param inum the interrupt number to check * @return true if the specified interrupt is currently enabled; false otherwise */ public boolean isEnabled(int inum) { return interrupts.isEnabled(inum); } /** * The <code>isPosted()</code> method checks whether the specified interrupt is currently posted. * * @param inum the interrupt number to check * @return true if the specified interrupt is currently posted; false otherwise */ public boolean isPosted(int inum) { return interrupts.isPosted(inum); } /** * The <code>isPending()</code> method checks whether the specified interrupt is currently pending. * * @param inum the interrupt number to check * @return true if the specified interrupt is currently pending; false otherwise */ public boolean isPending(int inum) { return interrupts.isPending(inum); } /** * The <code>getSimulator()</code> method returns the simulator associated with this state instance. * * @return a reference to the simulator associated with this state instance. */ public Simulator getSimulator() { return simulator; } protected static int map_get(byte[] a, int indx) { return a[indx]; } protected static void map_set(byte[] a, int indx, int val) { a[indx] = (byte)val; } protected static int map_get(Segment s, int addr) { return s.read(addr); } protected static void map_set(Segment s, int addr, int val) { s.write(addr, (byte)val); } protected static int map_get(ActiveRegister[] s, int addr) { return s[addr].read(); } protected static void map_set(ActiveRegister[] s, int addr, int val) { s[addr].write((byte)val); } }
bsd-3-clause
sharadagarwal/autorest
AutoRest/Generators/Java/Java.Tests/src/main/java/fixtures/validation/AutoRestValidationTest.java
8604
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is * regenerated. */ package fixtures.validation; import java.util.List; import okhttp3.Interceptor; import okhttp3.logging.HttpLoggingInterceptor.Level; import com.microsoft.rest.AutoRestBaseUrl; import com.microsoft.rest.serializer.JacksonMapperAdapter; import com.microsoft.rest.ServiceCall; import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceException; import com.microsoft.rest.ServiceResponse; import fixtures.validation.models.ErrorException; import fixtures.validation.models.Product; import java.io.IOException; /** * The interface for AutoRestValidationTest class. */ public interface AutoRestValidationTest { /** * Gets the URL used as the base for all cloud service requests. * * @return the BaseUrl object. */ AutoRestBaseUrl getBaseUrl(); /** * Gets the list of interceptors the OkHttp client will execute. * * @return the list of interceptors. */ List<Interceptor> getClientInterceptors(); /** * Sets the logging level for OkHttp client. * * @param logLevel the logging level enum. */ void setLogLevel(Level logLevel); /** * Gets the adapter for {@link com.fasterxml.jackson.databind.ObjectMapper} for serialization * and deserialization operations.. * * @return the adapter. */ JacksonMapperAdapter getMapperAdapter(); /** * Gets Subscription ID.. * * @return the subscriptionId value. */ String getSubscriptionId(); /** * Sets Subscription ID.. * * @param subscriptionId the subscriptionId value. */ void setSubscriptionId(String subscriptionId); /** * Gets Required string following pattern \d{2}-\d{2}-\d{4}. * * @return the apiVersion value. */ String getApiVersion(); /** * Sets Required string following pattern \d{2}-\d{2}-\d{4}. * * @param apiVersion the apiVersion value. */ void setApiVersion(String apiVersion); /** * Validates input parameters on the method. See swagger for details. * * @param resourceGroupName Required string between 3 and 10 chars with pattern [a-zA-Z0-9]+. * @param id Required int multiple of 10 from 100 to 1000. * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the Product object wrapped in {@link ServiceResponse} if successful. */ ServiceResponse<Product> validationOfMethodParameters(String resourceGroupName, int id) throws ErrorException, IOException, IllegalArgumentException; /** * Validates input parameters on the method. See swagger for details. * * @param resourceGroupName Required string between 3 and 10 chars with pattern [a-zA-Z0-9]+. * @param id Required int multiple of 10 from 100 to 1000. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if callback is null * @return the {@link ServiceCall} object */ ServiceCall validationOfMethodParametersAsync(String resourceGroupName, int id, final ServiceCallback<Product> serviceCallback) throws IllegalArgumentException; /** * Validates body parameters on the method. See swagger for details. * * @param resourceGroupName Required string between 3 and 10 chars with pattern [a-zA-Z0-9]+. * @param id Required int multiple of 10 from 100 to 1000. * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the Product object wrapped in {@link ServiceResponse} if successful. */ ServiceResponse<Product> validationOfBody(String resourceGroupName, int id) throws ErrorException, IOException, IllegalArgumentException; /** * Validates body parameters on the method. See swagger for details. * * @param resourceGroupName Required string between 3 and 10 chars with pattern [a-zA-Z0-9]+. * @param id Required int multiple of 10 from 100 to 1000. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if callback is null * @return the {@link ServiceCall} object */ ServiceCall validationOfBodyAsync(String resourceGroupName, int id, final ServiceCallback<Product> serviceCallback) throws IllegalArgumentException; /** * Validates body parameters on the method. See swagger for details. * * @param resourceGroupName Required string between 3 and 10 chars with pattern [a-zA-Z0-9]+. * @param id Required int multiple of 10 from 100 to 1000. * @param body the Product value * @throws ErrorException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @throws IllegalArgumentException exception thrown from invalid parameters * @return the Product object wrapped in {@link ServiceResponse} if successful. */ ServiceResponse<Product> validationOfBody(String resourceGroupName, int id, Product body) throws ErrorException, IOException, IllegalArgumentException; /** * Validates body parameters on the method. See swagger for details. * * @param resourceGroupName Required string between 3 and 10 chars with pattern [a-zA-Z0-9]+. * @param id Required int multiple of 10 from 100 to 1000. * @param body the Product value * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if callback is null * @return the {@link ServiceCall} object */ ServiceCall validationOfBodyAsync(String resourceGroupName, int id, Product body, final ServiceCallback<Product> serviceCallback) throws IllegalArgumentException; /** * * @throws ServiceException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @return the {@link ServiceResponse} object if successful. */ ServiceResponse<Void> getWithConstantInPath() throws ServiceException, IOException; /** * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if callback is null * @return the {@link ServiceCall} object */ ServiceCall getWithConstantInPathAsync(final ServiceCallback<Void> serviceCallback) throws IllegalArgumentException; /** * * @throws ServiceException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @return the Product object wrapped in {@link ServiceResponse} if successful. */ ServiceResponse<Product> postWithConstantInBody() throws ServiceException, IOException; /** * * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if callback is null * @return the {@link ServiceCall} object */ ServiceCall postWithConstantInBodyAsync(final ServiceCallback<Product> serviceCallback) throws IllegalArgumentException; /** * * @param body the Product value * @throws ServiceException exception thrown from REST call * @throws IOException exception thrown from serialization/deserialization * @return the Product object wrapped in {@link ServiceResponse} if successful. */ ServiceResponse<Product> postWithConstantInBody(Product body) throws ServiceException, IOException; /** * * @param body the Product value * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if callback is null * @return the {@link ServiceCall} object */ ServiceCall postWithConstantInBodyAsync(Product body, final ServiceCallback<Product> serviceCallback) throws IllegalArgumentException; }
mit
thedevicers/w.draft
app/src/main/java/com/architjn/materialicons/others/SpacesItemDecoration.java
809
package com.architjn.materialicons.others; import android.graphics.Rect; import android.support.v7.widget.RecyclerView; import android.view.View; public class SpacesItemDecoration extends RecyclerView.ItemDecoration { private int space, columns; public SpacesItemDecoration(int space, int columns) { this.space = space; this.columns = columns; } @Override public void getItemOffsets(Rect outRect, View view, RecyclerView parent, RecyclerView.State state) { outRect.left = space / 2; outRect.right = space / 2; outRect.bottom = space; // Add top margin only for the first item to avoid double space between items if (parent.getChildPosition(view) < columns) outRect.top = space; } }
mit
WelcomeHUME/svn-caucho-com-resin
modules/kernel/src/com/caucho/vfs/FilesystemPath.java
9063
/* * Copyright (c) 1998-2012 Caucho Technology -- all rights reserved * * This file is part of Resin(R) Open Source * * Each copy or derived work must preserve the copyright notice and this * notice unmodified. * * Resin Open Source is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * Resin Open Source is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE, or any warranty * of NON-INFRINGEMENT. See the GNU General Public License for more * details. * * You should have received a copy of the GNU General Public License * along with Resin Open Source; if not, write to the * * Free Software Foundation, Inc. * 59 Temple Place, Suite 330 * Boston, MA 02111-1307 USA * * @author Scott Ferguson */ package com.caucho.vfs; import com.caucho.util.CharBuffer; import java.util.Map; /** * Abstract FilesystemPath, the parent of hierarchical Paths like * FilePath or HttpPath. */ abstract public class FilesystemPath extends Path { protected FilesystemPath _root; protected BindPath _bindRoot; protected String _pathname; protected String _userPath; /** * Create a new filesystemPath * * @param root Root of url space * @param userPath the user's path * @param pathname Canonical path */ protected FilesystemPath(FilesystemPath root, String userPath, String pathname) { super(root); init(root, userPath, pathname); } protected void init(FilesystemPath root, String userPath, String pathname) { if (pathname == null) throw new NullPointerException(); _userPath = userPath; _pathname = pathname; if (root != null) { _root = root; _bindRoot = root._bindRoot; } } /** * Return the parent Path */ @Override public Path getParent() { if (_pathname.length() <= 1) return lookup("/"); int length = _pathname.length(); int lastSlash = _pathname.lastIndexOf('/'); if (lastSlash < 1) return lookup("/"); if (lastSlash == length - 1) { lastSlash = _pathname.lastIndexOf('/', length - 2); if (lastSlash < 1) return lookup("/"); } return lookup(_pathname.substring(0, lastSlash)); } /** * schemeWalk is called by Path for a scheme lookup like file:/tmp/foo * * @param userPath the user's lookup() path * @param attributes the user's attributes * @param filePath the actual lookup() path * @param offset offset into filePath */ @Override public Path schemeWalk(String userPath, Map<String,Object> attributes, String filePath, int offset) { String canonicalPath; if (filePath.length() > offset && (filePath.charAt(offset) == '/' || filePath.charAt(offset) == _separatorChar)) canonicalPath = normalizePath("/", filePath, offset, _separatorChar); else canonicalPath = normalizePath(_pathname, filePath, offset, _separatorChar); return fsWalk(userPath, attributes, canonicalPath); } /** * Lookup a path relative to the current filesystem's root. * Filesystems will specialize fsWalk. * * @param userPath the exact string passed by the user's lookup() * @param newAttributes the user's new attributes * @param newPath the normalized real path * * @return the matching path */ abstract public Path fsWalk(String userPath, Map<String,Object> newAttributes, String newPath); /** * wrapper for the real normalize path routine to use CharBuffer. * * @param oldPath The parent Path's path * @param newPath The user's new path * @param offset Offset into the user path * * @return the normalized path */ static protected String normalizePath(String oldPath, String newPath, int offset, char separatorChar) { CharBuffer cb = new CharBuffer(); normalizePath(cb, oldPath, newPath, offset, separatorChar); return cb.toString(); } /** * Normalizes a filesystemPath path. * * <ul> * <li>foo//bar -> foo/bar * <li>foo/./bar -> foo/bar * <li>foo/../bar -> bar * <li>/../bar -> /bar * </ul> * * @param cb charBuffer holding the normalized result * @param oldPath the parent path * @param newPath the relative path * @param offset where in the child path to start */ static protected void normalizePath(CharBuffer cb, String oldPath, String newPath, int offset, char separatorChar) { cb.clear(); cb.append(oldPath); if (cb.length() == 0 || cb.getLastChar() != '/') cb.append('/'); int length = newPath.length(); int i = offset; while (i < length) { char ch = newPath.charAt(i); char ch2; switch (ch) { default: if (ch != separatorChar) { cb.append(ch); i++; break; } // the separator character falls through to be treated as '/' case '/': // "//" -> "/" if (cb.getLastChar() != '/') cb.append('/'); i++; break; case '.': if (cb.getLastChar() != '/') { cb.append('.'); i++; break; } // "/." -> "" if (i + 1 >= length) { i += 2; break; } switch (newPath.charAt(i + 1)) { default: if (newPath.charAt(i + 1) != separatorChar) { cb.append('.'); i++; break; } // the separator falls through to be treated as '/' // "/./" -> "/" case '/': i += 2; break; // "foo/.." -> "" case '.': if ((i + 2 >= length || (ch2 = newPath.charAt(i + 2)) == '/' || ch2 == separatorChar) && cb.getLastChar() == '/') { int segment = cb.lastIndexOf('/', cb.length() - 2); if (segment == -1) { cb.clear(); cb.append('/'); } else cb.setLength(segment + 1); i += 3; } else { cb.append('.'); i++; } break; } } } // strip trailing "/" /* if (cb.length() > 1 && cb.getLastChar() == '/') cb.setLength(cb.length() - 1); */ } /** * Returns the root. */ public FilesystemPath getRoot() { return _root; } /** * Returns the path portion of the URL. */ public String getPath() { return _pathname; } /** * Return's the application's name for the path, e.g. for * a relative path. */ public String getUserPath() { return _userPath != null ? _userPath : _pathname; } public void setUserPath(String path) { _userPath = path; } /** * For chrooted filesystems return the real system path. */ public String getFullPath() { if (_root == this || _root == null) return getPath(); String rootPath = _root.getFullPath(); String path = getPath(); if (rootPath.length() <= 1) return path; else if (path.length() <= 1) return rootPath; else return rootPath + path; } public String getTail() { String path = getPath(); int length = path.length(); int p = path.lastIndexOf('/'); if (p == -1) return ""; else if (p < length - 1) return path.substring(p + 1); else { p = path.lastIndexOf('/', length - 2); if (p < 0) return ""; return path.substring(p + 1, length - 1); } } /** * Essentially chroot */ public Path createRoot(SchemeMap schemeMap) { FilesystemPath restriction = (FilesystemPath) copy(); restriction._schemeMap = schemeMap; restriction._root = this; restriction._pathname = "/"; restriction._userPath = "/"; return restriction; } public void bind(Path context) { if (_bindRoot == null) _bindRoot = _root._bindRoot; if (_bindRoot == null) { _bindRoot = new BindPath(_root); _root._bindRoot = _bindRoot; } _bindRoot.bind(getPath(), context); } public int hashCode() { return getURL().hashCode(); } public boolean equals(Object b) { if (this == b) return true; else if (b == null || ! getClass().equals(b.getClass())) return false; Path bPath = (Path) b; return getURL().equals(bPath.getURL()); } }
gpl-2.0
mdaniel/svn-caucho-com-resin
modules/kernel/src/com/caucho/bytecode/ByteCodeParser.java
13508
/* * Copyright (c) 1998-2012 Caucho Technology -- all rights reserved * * This file is part of Resin(R) Open Source * * Each copy or derived work must preserve the copyright notice and this * notice unmodified. * * Resin Open Source is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * Resin Open Source is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE, or any warranty * of NON-INFRINGEMENT. See the GNU General Public License for more * details. * * You should have received a copy of the GNU General Public License * along with Resin Open Source; if not, write to the * * Free Software Foundation, Inc. * 59 Temple Place, Suite 330 * Boston, MA 02111-1307 USA * * @author Scott Ferguson */ package com.caucho.bytecode; import com.caucho.util.CharBuffer; import com.caucho.util.L10N; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; /** * Interface to the bytecode parser. */ public class ByteCodeParser { private static final L10N L = new L10N(ByteCode.class); static final int CP_CLASS = 7; static final int CP_FIELD_REF = 9; static final int CP_METHOD_REF = 10; static final int CP_INTERFACE_METHOD_REF = 11; static final int CP_STRING = 8; static final int CP_INTEGER = 3; static final int CP_FLOAT = 4; static final int CP_LONG = 5; static final int CP_DOUBLE = 6; static final int CP_NAME_AND_TYPE = 12; static final int CP_UTF8 = 1; static final int CP_METHOD_HANDLE = 15; static final int CP_METHOD_TYPE = 16; static final int CP_INVOKE_DYNAMIC = 18; private JavaClassLoader _loader; private InputStream _is; private JavaClass _class; private ConstantPool _cp; /** * Sets the JClassLoader */ public void setClassLoader(JavaClassLoader loader) { _loader = loader; } /** * Sets the class. */ public void setJavaClass(JavaClass javaClass) { _class = javaClass; } /** * Parses the .class file. */ public JavaClass parse(InputStream is) throws IOException { _is = is; if (_loader == null) _loader = new JavaClassLoader(); if (_class == null) _class = new JavaClass(_loader); _cp = _class.getConstantPool(); parseClass(); return _class; } /** * Returns the constant pool. */ public ConstantPool getConstantPool() { return _cp; } /** * Returns a UTF8 String from the constant pool. */ public String getUTF8(int index) { return getConstantPool().getUtf8AsString(index); } /** * Parses the ClassFile construct */ private void parseClass() throws IOException { int magic = readInt(); if (magic != JavaClass.MAGIC) throw error(L.l("bad magic number in class file")); int minor = readShort(); int major = readShort(); _class.setMajor(major); _class.setMinor(minor); parseConstantPool(); int accessFlags = readShort(); _class.setAccessFlags(accessFlags); int thisClassIndex = readShort(); _class.setThisClass(_cp.getClass(thisClassIndex).getName()); int superClassIndex = readShort(); if (superClassIndex > 0) _class.setSuperClass(_cp.getClass(superClassIndex).getName()); int interfaceCount = readShort(); for (int i = 0; i < interfaceCount; i++) { int classIndex = readShort(); _class.addInterface(_cp.getClass(classIndex).getName()); } int fieldCount = readShort(); for (int i = 0; i < fieldCount; i++) { parseField(); } int methodCount = readShort(); for (int i = 0; i < methodCount; i++) parseMethod(); int attrCount = readShort(); for (int i = 0; i < attrCount; i++) { Attribute attr = parseAttribute(); _class.addAttribute(attr); } } /** * Parses the constant pool. */ public void parseConstantPool() throws IOException { int count = readShort(); for (int i = 1; i < count; i++) { ConstantPoolEntry entry = parseConstantPoolEntry(i); _cp.addConstant(entry); if (entry instanceof DoubleConstant || entry instanceof LongConstant) { i++; _cp.addConstant(null); } } } /** * Parses a constant pool entry. */ private ConstantPoolEntry parseConstantPoolEntry(int index) throws IOException { int tag = read(); switch (tag) { case CP_CLASS: return parseClassConstant(index); case CP_FIELD_REF: return parseFieldRefConstant(index); case CP_METHOD_REF: return parseMethodRefConstant(index); case CP_INTERFACE_METHOD_REF: return parseInterfaceMethodRefConstant(index); case CP_STRING: return parseStringConstant(index); case CP_INTEGER: return parseIntegerConstant(index); case CP_FLOAT: return parseFloatConstant(index); case CP_LONG: return parseLongConstant(index); case CP_DOUBLE: return parseDoubleConstant(index); case CP_NAME_AND_TYPE: return parseNameAndTypeConstant(index); case CP_UTF8: return parseUtf8Constant(index); default: throw error(L.l("'{0}' is an unknown constant pool type.", tag)); } } /** * Parses a class constant pool entry. */ private ClassConstant parseClassConstant(int index) throws IOException { int nameIndex = readShort(); return new ClassConstant(_class.getConstantPool(), index, nameIndex); } /** * Parses a field ref constant pool entry. */ private FieldRefConstant parseFieldRefConstant(int index) throws IOException { int classIndex = readShort(); int nameAndTypeIndex = readShort(); return new FieldRefConstant(_class.getConstantPool(), index, classIndex, nameAndTypeIndex); } /** * Parses a method ref constant pool entry. */ private MethodRefConstant parseMethodRefConstant(int index) throws IOException { int classIndex = readShort(); int nameAndTypeIndex = readShort(); return new MethodRefConstant(_class.getConstantPool(), index, classIndex, nameAndTypeIndex); } /** * Parses an interface method ref constant pool entry. */ private InterfaceMethodRefConstant parseInterfaceMethodRefConstant(int index) throws IOException { int classIndex = readShort(); int nameAndTypeIndex = readShort(); return new InterfaceMethodRefConstant(_class.getConstantPool(), index, classIndex, nameAndTypeIndex); } /** * Parses a string constant pool entry. */ private StringConstant parseStringConstant(int index) throws IOException { int stringIndex = readShort(); return new StringConstant(_class.getConstantPool(), index, stringIndex); } /** * Parses an integer constant pool entry. */ private IntegerConstant parseIntegerConstant(int index) throws IOException { int value = readInt(); return new IntegerConstant(_class.getConstantPool(), index, value); } /** * Parses a float constant pool entry. */ private FloatConstant parseFloatConstant(int index) throws IOException { int bits = readInt(); float value = Float.intBitsToFloat(bits); return new FloatConstant(_class.getConstantPool(), index, value); } /** * Parses a long constant pool entry. */ private LongConstant parseLongConstant(int index) throws IOException { long value = readLong(); return new LongConstant(_class.getConstantPool(), index, value); } /** * Parses a double constant pool entry. */ private DoubleConstant parseDoubleConstant(int index) throws IOException { long bits = readLong(); double value = Double.longBitsToDouble(bits); return new DoubleConstant(_class.getConstantPool(), index, value); } /** * Parses a name and type pool entry. */ private NameAndTypeConstant parseNameAndTypeConstant(int index) throws IOException { int nameIndex = readShort(); int descriptorIndex = readShort(); return new NameAndTypeConstant(_class.getConstantPool(), index, nameIndex, descriptorIndex); } /** * Parses a utf-8 constant pool entry. */ private Utf8Constant parseUtf8Constant(int index) throws IOException { int length = readShort(); CharBuffer cb = CharBuffer.allocate(); for (int i = 0; i < length; i++) { int ch = read(); if (ch < 0x80) { cb.append((char) ch); } else if ((ch & 0xe0) == 0xc0) { int ch2 = read(); i++; cb.append((char) (((ch & 0x1f) << 6)+ (ch2 & 0x3f))); } else { int ch2 = read(); int ch3 = read(); i += 2; cb.append((char) (((ch & 0xf) << 12)+ ((ch2 & 0x3f) << 6) + ((ch3 & 0x3f)))); } } return new Utf8Constant(_class.getConstantPool(), index, cb.close()); } /** * Parses a field entry. */ private void parseField() throws IOException { int accessFlags = readShort(); int nameIndex = readShort(); int descriptorIndex = readShort(); JavaField field = new JavaField(); field.setJavaClass(_class); field.setName(_cp.getUtf8(nameIndex).getValue()); field.setDescriptor(_cp.getUtf8(descriptorIndex).getValue()); field.setAccessFlags(accessFlags); int attributesCount = readShort(); for (int i = 0; i < attributesCount; i++) { Attribute attr = parseAttribute(); field.addAttribute(attr); } _class.addField(field); } /** * Parses a method entry. */ private void parseMethod() throws IOException { int accessFlags = readShort(); int nameIndex = readShort(); int descriptorIndex = readShort(); JavaMethod method = new JavaMethod(_loader); method.setJavaClass(_class); method.setName(_cp.getUtf8(nameIndex).getValue()); method.setDescriptor(_cp.getUtf8(descriptorIndex).getValue()); method.setAccessFlags(accessFlags); int attributesCount = readShort(); for (int i = 0; i < attributesCount; i++) { Attribute attr = parseAttribute(); method.addAttribute(attr); if (attr instanceof ExceptionsAttribute) { ExceptionsAttribute exn = (ExceptionsAttribute) attr; ArrayList<String> exnNames = exn.getExceptionList(); if (exnNames.size() > 0) { JClass []exnClasses = new JClass[exnNames.size()]; for (int j = 0; j < exnNames.size(); j++) { String exnName = exnNames.get(j).replace('/', '.'); exnClasses[j] = _loader.forName(exnName); } method.setExceptionTypes(exnClasses); } } } _class.addMethod(method); } /** * Parses an attribute. */ Attribute parseAttribute() throws IOException { int nameIndex = readShort(); String name = _cp.getUtf8(nameIndex).getValue(); if (name.equals("Code")) { CodeAttribute code = new CodeAttribute(name); code.read(this); return code; } else if (name.equals("Exceptions")) { ExceptionsAttribute code = new ExceptionsAttribute(name); code.read(this); return code; } else if (name.equals("Signature")) { SignatureAttribute code = new SignatureAttribute(); code.read(this); return code; } OpaqueAttribute attr = new OpaqueAttribute(name); int length = readInt(); byte []bytes = new byte[length]; read(bytes, 0, bytes.length); attr.setValue(bytes); return attr; } /** * Parses a 64-bit int. */ long readLong() throws IOException { return (((long) _is.read() << 56) | ((long) _is.read() << 48) | ((long) _is.read() << 40) | ((long) _is.read() << 32) | ((long) _is.read() << 24) | ((long) _is.read() << 16) | ((long) _is.read() << 8) | ((long) _is.read())); } /** * Parses a 32-bit int. */ int readInt() throws IOException { return ((_is.read() << 24) | (_is.read() << 16) | (_is.read() << 8) | (_is.read())); } /** * Parses a 16-bit int. */ int readShort() throws IOException { int c1 = _is.read(); int c2 = _is.read(); return ((c1 << 8) | c2); } /** * Parses a byte */ int read() throws IOException { return _is.read(); } /** * Reads a chunk */ int read(byte []buffer, int offset, int length) throws IOException { int readLength = 0; while (length > 0) { int sublen = _is.read(buffer, offset, length); if (sublen < 0) return readLength == 0 ? -1 : readLength; offset += sublen; length -= sublen; readLength += sublen; } return readLength; } /** * Returns an error message. */ private IOException error(String message) { return new IOException(message); } }
gpl-2.0
haitaoyao/btrace
src/share/classes/com/sun/btrace/annotations/ProbeClassName.java
1746
/* * Copyright 2008-2010 Sun Microsystems, Inc. All Rights Reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Sun designates this * particular file as subject to the "Classpath" exception as provided * by Sun in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara, * CA 95054 USA or visit www.sun.com if you need additional information or * have any questions. */ package com.sun.btrace.annotations; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * It is used to mark a probe method argument as the receiver of the probe target class name<br> * Applicable only for {@linkplain OnMethod} annotation * * @author Jaroslav Bachorik <jaroslav.bachorik@sun.com> * @since 1.1 */ @Target(ElementType.PARAMETER) @Retention(RetentionPolicy.RUNTIME) public @interface ProbeClassName { }
gpl-2.0
geosolutions-it/geoserver-exts
web/app/src/test/java/org/opengeo/geoserver/StartCluster.java
1787
package org.opengeo.geoserver; import java.io.File; import java.util.HashMap; import java.util.Map; import org.mortbay.jetty.Connector; import org.mortbay.jetty.Server; import org.mortbay.jetty.bio.SocketConnector; import org.mortbay.jetty.webapp.WebAppContext; public class StartCluster { public static void main(String[] args) throws Exception { System.setProperty("GWC_METASTORE_DISABLED", "true"); System.setProperty("GWC_DISKQUOTA_DISABLED", "true"); for (int i = 0; i < 2; i++) { startJettyServer("jetty"+i, 8000 + i); } } static Server startJettyServer(String name, int port) throws Exception { Server jettyServer = new Server(); SocketConnector conn = new SocketConnector(); conn.setPort(port); conn.setAcceptQueueSize(100); conn.setMaxIdleTime(1000 * 60 * 60); conn.setSoLingerTime(-1); WebAppContext wah = new WebAppContext(); wah.setContextPath("/geoserver"); wah.setWar("src/main/webapp"); //create a node specific base directory File nodeDir = new File("target/" + name); nodeDir.mkdirs(); wah.setTempDirectory(new File(nodeDir, "work")); //this allows to send large SLD's from the styles form wah.getServletContext().getContextHandler().setMaxFormContentSize(1024 * 1024 * 2); File logDir = new File(nodeDir, "logs"); logDir.mkdirs(); Map initParams = new HashMap(); initParams.put("GEOSERVER_LOG_LOCATION", logDir.getPath() + "/geoserver.log"); wah.setInitParams(initParams); jettyServer.setHandler(wah); jettyServer.setConnectors(new Connector[] { conn }); jettyServer.start(); return jettyServer; } }
gpl-2.0
mohlerm/hotspot
test/compiler/codecache/stress/RandomAllocationTest.java
3072
/* * Copyright (c) 2014, 2015, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. * */ import java.util.ArrayList; import sun.hotspot.code.BlobType; /* * @test RandomAllocationTest * @library /testlibrary /../../test/lib * @modules java.base/sun.misc * java.management * @build RandomAllocationTest * @run main ClassFileInstaller sun.hotspot.WhiteBox * sun.hotspot.WhiteBox$WhiteBoxPermission * @run main/othervm -Xbootclasspath/a:. -XX:+UnlockDiagnosticVMOptions * -XX:CompileCommand=dontinline,Helper$TestCase::method * -XX:+WhiteBoxAPI -XX:-SegmentedCodeCache RandomAllocationTest * @run main/othervm -Xbootclasspath/a:. -XX:+UnlockDiagnosticVMOptions * -XX:CompileCommand=dontinline,Helper$TestCase::method * -XX:+WhiteBoxAPI -XX:+SegmentedCodeCache RandomAllocationTest * @summary stressing code cache by allocating randomly sized "dummy" code blobs */ public class RandomAllocationTest implements Runnable { private static final long CODE_CACHE_SIZE = Helper.WHITE_BOX.getUintxVMFlag("ReservedCodeCacheSize"); private static final int MAX_BLOB_SIZE = (int) (CODE_CACHE_SIZE >> 7); private static final BlobType[] BLOB_TYPES = BlobType.getAvailable().toArray(new BlobType[0]); public static void main(String[] args) { new CodeCacheStressRunner(new RandomAllocationTest()).runTest(); } private final ArrayList<Long> blobs = new ArrayList<>(); @Override public void run() { boolean allocate = blobs.isEmpty() || Helper.RNG.nextBoolean(); if (allocate) { int type = Helper.RNG.nextInt(BLOB_TYPES.length); long addr = Helper.WHITE_BOX.allocateCodeBlob( Helper.RNG.nextInt(MAX_BLOB_SIZE), BLOB_TYPES[type].id); if (addr != 0) { blobs.add(addr); } } else { int index = Helper.RNG.nextInt(blobs.size()); Helper.WHITE_BOX.freeCodeBlob(blobs.remove(index)); } } }
gpl-2.0
biddyweb/checker-framework
checker/jdk/javari/src/com/sun/source/tree/EmptyStatementTree.java
142
package com.sun.source.tree; import org.checkerframework.checker.javari.qual.*; public interface EmptyStatementTree extends StatementTree {}
gpl-2.0
syslover33/ctank
java/android-sdk-linux_r24.4.1_src/sources/android-23/com/android/framework/multidexlegacytestservices/manymethods/Big003.java
53491
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.android.framework.multidexlegacytestservices.manymethods; public class Big003 { public int get0() { return 0; } public int get1() { return 1; } public int get2() { return 2; } public int get3() { return 3; } public int get4() { return 4; } public int get5() { return 5; } public int get6() { return 6; } public int get7() { return 7; } public int get8() { return 8; } public int get9() { return 9; } public int get10() { return 10; } public int get11() { return 11; } public int get12() { return 12; } public int get13() { return 13; } public int get14() { return 14; } public int get15() { return 15; } public int get16() { return 16; } public int get17() { return 17; } public int get18() { return 18; } public int get19() { return 19; } public int get20() { return 20; } public int get21() { return 21; } public int get22() { return 22; } public int get23() { return 23; } public int get24() { return 24; } public int get25() { return 25; } public int get26() { return 26; } public int get27() { return 27; } public int get28() { return 28; } public int get29() { return 29; } public int get30() { return 30; } public int get31() { return 31; } public int get32() { return 32; } public int get33() { return 33; } public int get34() { return 34; } public int get35() { return 35; } public int get36() { return 36; } public int get37() { return 37; } public int get38() { return 38; } public int get39() { return 39; } public int get40() { return 40; } public int get41() { return 41; } public int get42() { return 42; } public int get43() { return 43; } public int get44() { return 44; } public int get45() { return 45; } public int get46() { return 46; } public int get47() { return 47; } public int get48() { return 48; } public int get49() { return 49; } public int get50() { return 50; } public int get51() { return 51; } public int get52() { return 52; } public int get53() { return 53; } public int get54() { return 54; } public int get55() { return 55; } public int get56() { return 56; } public int get57() { return 57; } public int get58() { return 58; } public int get59() { return 59; } public int get60() { return 60; } public int get61() { return 61; } public int get62() { return 62; } public int get63() { return 63; } public int get64() { return 64; } public int get65() { return 65; } public int get66() { return 66; } public int get67() { return 67; } public int get68() { return 68; } public int get69() { return 69; } public int get70() { return 70; } public int get71() { return 71; } public int get72() { return 72; } public int get73() { return 73; } public int get74() { return 74; } public int get75() { return 75; } public int get76() { return 76; } public int get77() { return 77; } public int get78() { return 78; } public int get79() { return 79; } public int get80() { return 80; } public int get81() { return 81; } public int get82() { return 82; } public int get83() { return 83; } public int get84() { return 84; } public int get85() { return 85; } public int get86() { return 86; } public int get87() { return 87; } public int get88() { return 88; } public int get89() { return 89; } public int get90() { return 90; } public int get91() { return 91; } public int get92() { return 92; } public int get93() { return 93; } public int get94() { return 94; } public int get95() { return 95; } public int get96() { return 96; } public int get97() { return 97; } public int get98() { return 98; } public int get99() { return 99; } public int get100() { return 100; } public int get101() { return 101; } public int get102() { return 102; } public int get103() { return 103; } public int get104() { return 104; } public int get105() { return 105; } public int get106() { return 106; } public int get107() { return 107; } public int get108() { return 108; } public int get109() { return 109; } public int get110() { return 110; } public int get111() { return 111; } public int get112() { return 112; } public int get113() { return 113; } public int get114() { return 114; } public int get115() { return 115; } public int get116() { return 116; } public int get117() { return 117; } public int get118() { return 118; } public int get119() { return 119; } public int get120() { return 120; } public int get121() { return 121; } public int get122() { return 122; } public int get123() { return 123; } public int get124() { return 124; } public int get125() { return 125; } public int get126() { return 126; } public int get127() { return 127; } public int get128() { return 128; } public int get129() { return 129; } public int get130() { return 130; } public int get131() { return 131; } public int get132() { return 132; } public int get133() { return 133; } public int get134() { return 134; } public int get135() { return 135; } public int get136() { return 136; } public int get137() { return 137; } public int get138() { return 138; } public int get139() { return 139; } public int get140() { return 140; } public int get141() { return 141; } public int get142() { return 142; } public int get143() { return 143; } public int get144() { return 144; } public int get145() { return 145; } public int get146() { return 146; } public int get147() { return 147; } public int get148() { return 148; } public int get149() { return 149; } public int get150() { return 150; } public int get151() { return 151; } public int get152() { return 152; } public int get153() { return 153; } public int get154() { return 154; } public int get155() { return 155; } public int get156() { return 156; } public int get157() { return 157; } public int get158() { return 158; } public int get159() { return 159; } public int get160() { return 160; } public int get161() { return 161; } public int get162() { return 162; } public int get163() { return 163; } public int get164() { return 164; } public int get165() { return 165; } public int get166() { return 166; } public int get167() { return 167; } public int get168() { return 168; } public int get169() { return 169; } public int get170() { return 170; } public int get171() { return 171; } public int get172() { return 172; } public int get173() { return 173; } public int get174() { return 174; } public int get175() { return 175; } public int get176() { return 176; } public int get177() { return 177; } public int get178() { return 178; } public int get179() { return 179; } public int get180() { return 180; } public int get181() { return 181; } public int get182() { return 182; } public int get183() { return 183; } public int get184() { return 184; } public int get185() { return 185; } public int get186() { return 186; } public int get187() { return 187; } public int get188() { return 188; } public int get189() { return 189; } public int get190() { return 190; } public int get191() { return 191; } public int get192() { return 192; } public int get193() { return 193; } public int get194() { return 194; } public int get195() { return 195; } public int get196() { return 196; } public int get197() { return 197; } public int get198() { return 198; } public int get199() { return 199; } public int get200() { return 200; } public int get201() { return 201; } public int get202() { return 202; } public int get203() { return 203; } public int get204() { return 204; } public int get205() { return 205; } public int get206() { return 206; } public int get207() { return 207; } public int get208() { return 208; } public int get209() { return 209; } public int get210() { return 210; } public int get211() { return 211; } public int get212() { return 212; } public int get213() { return 213; } public int get214() { return 214; } public int get215() { return 215; } public int get216() { return 216; } public int get217() { return 217; } public int get218() { return 218; } public int get219() { return 219; } public int get220() { return 220; } public int get221() { return 221; } public int get222() { return 222; } public int get223() { return 223; } public int get224() { return 224; } public int get225() { return 225; } public int get226() { return 226; } public int get227() { return 227; } public int get228() { return 228; } public int get229() { return 229; } public int get230() { return 230; } public int get231() { return 231; } public int get232() { return 232; } public int get233() { return 233; } public int get234() { return 234; } public int get235() { return 235; } public int get236() { return 236; } public int get237() { return 237; } public int get238() { return 238; } public int get239() { return 239; } public int get240() { return 240; } public int get241() { return 241; } public int get242() { return 242; } public int get243() { return 243; } public int get244() { return 244; } public int get245() { return 245; } public int get246() { return 246; } public int get247() { return 247; } public int get248() { return 248; } public int get249() { return 249; } public int get250() { return 250; } public int get251() { return 251; } public int get252() { return 252; } public int get253() { return 253; } public int get254() { return 254; } public int get255() { return 255; } public int get256() { return 256; } public int get257() { return 257; } public int get258() { return 258; } public int get259() { return 259; } public int get260() { return 260; } public int get261() { return 261; } public int get262() { return 262; } public int get263() { return 263; } public int get264() { return 264; } public int get265() { return 265; } public int get266() { return 266; } public int get267() { return 267; } public int get268() { return 268; } public int get269() { return 269; } public int get270() { return 270; } public int get271() { return 271; } public int get272() { return 272; } public int get273() { return 273; } public int get274() { return 274; } public int get275() { return 275; } public int get276() { return 276; } public int get277() { return 277; } public int get278() { return 278; } public int get279() { return 279; } public int get280() { return 280; } public int get281() { return 281; } public int get282() { return 282; } public int get283() { return 283; } public int get284() { return 284; } public int get285() { return 285; } public int get286() { return 286; } public int get287() { return 287; } public int get288() { return 288; } public int get289() { return 289; } public int get290() { return 290; } public int get291() { return 291; } public int get292() { return 292; } public int get293() { return 293; } public int get294() { return 294; } public int get295() { return 295; } public int get296() { return 296; } public int get297() { return 297; } public int get298() { return 298; } public int get299() { return 299; } public int get300() { return 300; } public int get301() { return 301; } public int get302() { return 302; } public int get303() { return 303; } public int get304() { return 304; } public int get305() { return 305; } public int get306() { return 306; } public int get307() { return 307; } public int get308() { return 308; } public int get309() { return 309; } public int get310() { return 310; } public int get311() { return 311; } public int get312() { return 312; } public int get313() { return 313; } public int get314() { return 314; } public int get315() { return 315; } public int get316() { return 316; } public int get317() { return 317; } public int get318() { return 318; } public int get319() { return 319; } public int get320() { return 320; } public int get321() { return 321; } public int get322() { return 322; } public int get323() { return 323; } public int get324() { return 324; } public int get325() { return 325; } public int get326() { return 326; } public int get327() { return 327; } public int get328() { return 328; } public int get329() { return 329; } public int get330() { return 330; } public int get331() { return 331; } public int get332() { return 332; } public int get333() { return 333; } public int get334() { return 334; } public int get335() { return 335; } public int get336() { return 336; } public int get337() { return 337; } public int get338() { return 338; } public int get339() { return 339; } public int get340() { return 340; } public int get341() { return 341; } public int get342() { return 342; } public int get343() { return 343; } public int get344() { return 344; } public int get345() { return 345; } public int get346() { return 346; } public int get347() { return 347; } public int get348() { return 348; } public int get349() { return 349; } public int get350() { return 350; } public int get351() { return 351; } public int get352() { return 352; } public int get353() { return 353; } public int get354() { return 354; } public int get355() { return 355; } public int get356() { return 356; } public int get357() { return 357; } public int get358() { return 358; } public int get359() { return 359; } public int get360() { return 360; } public int get361() { return 361; } public int get362() { return 362; } public int get363() { return 363; } public int get364() { return 364; } public int get365() { return 365; } public int get366() { return 366; } public int get367() { return 367; } public int get368() { return 368; } public int get369() { return 369; } public int get370() { return 370; } public int get371() { return 371; } public int get372() { return 372; } public int get373() { return 373; } public int get374() { return 374; } public int get375() { return 375; } public int get376() { return 376; } public int get377() { return 377; } public int get378() { return 378; } public int get379() { return 379; } public int get380() { return 380; } public int get381() { return 381; } public int get382() { return 382; } public int get383() { return 383; } public int get384() { return 384; } public int get385() { return 385; } public int get386() { return 386; } public int get387() { return 387; } public int get388() { return 388; } public int get389() { return 389; } public int get390() { return 390; } public int get391() { return 391; } public int get392() { return 392; } public int get393() { return 393; } public int get394() { return 394; } public int get395() { return 395; } public int get396() { return 396; } public int get397() { return 397; } public int get398() { return 398; } public int get399() { return 399; } public int get400() { return 400; } public int get401() { return 401; } public int get402() { return 402; } public int get403() { return 403; } public int get404() { return 404; } public int get405() { return 405; } public int get406() { return 406; } public int get407() { return 407; } public int get408() { return 408; } public int get409() { return 409; } public int get410() { return 410; } public int get411() { return 411; } public int get412() { return 412; } public int get413() { return 413; } public int get414() { return 414; } public int get415() { return 415; } public int get416() { return 416; } public int get417() { return 417; } public int get418() { return 418; } public int get419() { return 419; } public int get420() { return 420; } public int get421() { return 421; } public int get422() { return 422; } public int get423() { return 423; } public int get424() { return 424; } public int get425() { return 425; } public int get426() { return 426; } public int get427() { return 427; } public int get428() { return 428; } public int get429() { return 429; } public int get430() { return 430; } public int get431() { return 431; } public int get432() { return 432; } public int get433() { return 433; } public int get434() { return 434; } public int get435() { return 435; } public int get436() { return 436; } public int get437() { return 437; } public int get438() { return 438; } public int get439() { return 439; } public int get440() { return 440; } public int get441() { return 441; } public int get442() { return 442; } public int get443() { return 443; } public int get444() { return 444; } public int get445() { return 445; } public int get446() { return 446; } public int get447() { return 447; } public int get448() { return 448; } public int get449() { return 449; } public int get450() { return 450; } public int get451() { return 451; } public int get452() { return 452; } public int get453() { return 453; } public int get454() { return 454; } public int get455() { return 455; } public int get456() { return 456; } public int get457() { return 457; } public int get458() { return 458; } public int get459() { return 459; } public int get460() { return 460; } public int get461() { return 461; } public int get462() { return 462; } public int get463() { return 463; } public int get464() { return 464; } public int get465() { return 465; } public int get466() { return 466; } public int get467() { return 467; } public int get468() { return 468; } public int get469() { return 469; } public int get470() { return 470; } public int get471() { return 471; } public int get472() { return 472; } public int get473() { return 473; } public int get474() { return 474; } public int get475() { return 475; } public int get476() { return 476; } public int get477() { return 477; } public int get478() { return 478; } public int get479() { return 479; } public int get480() { return 480; } public int get481() { return 481; } public int get482() { return 482; } public int get483() { return 483; } public int get484() { return 484; } public int get485() { return 485; } public int get486() { return 486; } public int get487() { return 487; } public int get488() { return 488; } public int get489() { return 489; } public int get490() { return 490; } public int get491() { return 491; } public int get492() { return 492; } public int get493() { return 493; } public int get494() { return 494; } public int get495() { return 495; } public int get496() { return 496; } public int get497() { return 497; } public int get498() { return 498; } public int get499() { return 499; } public int get500() { return 500; } public int get501() { return 501; } public int get502() { return 502; } public int get503() { return 503; } public int get504() { return 504; } public int get505() { return 505; } public int get506() { return 506; } public int get507() { return 507; } public int get508() { return 508; } public int get509() { return 509; } public int get510() { return 510; } public int get511() { return 511; } public int get512() { return 512; } public int get513() { return 513; } public int get514() { return 514; } public int get515() { return 515; } public int get516() { return 516; } public int get517() { return 517; } public int get518() { return 518; } public int get519() { return 519; } public int get520() { return 520; } public int get521() { return 521; } public int get522() { return 522; } public int get523() { return 523; } public int get524() { return 524; } public int get525() { return 525; } public int get526() { return 526; } public int get527() { return 527; } public int get528() { return 528; } public int get529() { return 529; } public int get530() { return 530; } public int get531() { return 531; } public int get532() { return 532; } public int get533() { return 533; } public int get534() { return 534; } public int get535() { return 535; } public int get536() { return 536; } public int get537() { return 537; } public int get538() { return 538; } public int get539() { return 539; } public int get540() { return 540; } public int get541() { return 541; } public int get542() { return 542; } public int get543() { return 543; } public int get544() { return 544; } public int get545() { return 545; } public int get546() { return 546; } public int get547() { return 547; } public int get548() { return 548; } public int get549() { return 549; } public int get550() { return 550; } public int get551() { return 551; } public int get552() { return 552; } public int get553() { return 553; } public int get554() { return 554; } public int get555() { return 555; } public int get556() { return 556; } public int get557() { return 557; } public int get558() { return 558; } public int get559() { return 559; } public int get560() { return 560; } public int get561() { return 561; } public int get562() { return 562; } public int get563() { return 563; } public int get564() { return 564; } public int get565() { return 565; } public int get566() { return 566; } public int get567() { return 567; } public int get568() { return 568; } public int get569() { return 569; } public int get570() { return 570; } public int get571() { return 571; } public int get572() { return 572; } public int get573() { return 573; } public int get574() { return 574; } public int get575() { return 575; } public int get576() { return 576; } public int get577() { return 577; } public int get578() { return 578; } public int get579() { return 579; } public int get580() { return 580; } public int get581() { return 581; } public int get582() { return 582; } public int get583() { return 583; } public int get584() { return 584; } public int get585() { return 585; } public int get586() { return 586; } public int get587() { return 587; } public int get588() { return 588; } public int get589() { return 589; } public int get590() { return 590; } public int get591() { return 591; } public int get592() { return 592; } public int get593() { return 593; } public int get594() { return 594; } public int get595() { return 595; } public int get596() { return 596; } public int get597() { return 597; } public int get598() { return 598; } public int get599() { return 599; } public int get600() { return 600; } public int get601() { return 601; } public int get602() { return 602; } public int get603() { return 603; } public int get604() { return 604; } public int get605() { return 605; } public int get606() { return 606; } public int get607() { return 607; } public int get608() { return 608; } public int get609() { return 609; } public int get610() { return 610; } public int get611() { return 611; } public int get612() { return 612; } public int get613() { return 613; } public int get614() { return 614; } public int get615() { return 615; } public int get616() { return 616; } public int get617() { return 617; } public int get618() { return 618; } public int get619() { return 619; } public int get620() { return 620; } public int get621() { return 621; } public int get622() { return 622; } public int get623() { return 623; } public int get624() { return 624; } public int get625() { return 625; } public int get626() { return 626; } public int get627() { return 627; } public int get628() { return 628; } public int get629() { return 629; } public int get630() { return 630; } public int get631() { return 631; } public int get632() { return 632; } public int get633() { return 633; } public int get634() { return 634; } public int get635() { return 635; } public int get636() { return 636; } public int get637() { return 637; } public int get638() { return 638; } public int get639() { return 639; } public int get640() { return 640; } public int get641() { return 641; } public int get642() { return 642; } public int get643() { return 643; } public int get644() { return 644; } public int get645() { return 645; } public int get646() { return 646; } public int get647() { return 647; } public int get648() { return 648; } public int get649() { return 649; } public int get650() { return 650; } public int get651() { return 651; } public int get652() { return 652; } public int get653() { return 653; } public int get654() { return 654; } public int get655() { return 655; } public int get656() { return 656; } public int get657() { return 657; } public int get658() { return 658; } public int get659() { return 659; } public int get660() { return 660; } public int get661() { return 661; } public int get662() { return 662; } public int get663() { return 663; } public int get664() { return 664; } public int get665() { return 665; } public int get666() { return 666; } public int get667() { return 667; } public int get668() { return 668; } public int get669() { return 669; } public int get670() { return 670; } public int get671() { return 671; } public int get672() { return 672; } public int get673() { return 673; } public int get674() { return 674; } public int get675() { return 675; } public int get676() { return 676; } public int get677() { return 677; } public int get678() { return 678; } public int get679() { return 679; } public int get680() { return 680; } public int get681() { return 681; } public int get682() { return 682; } public int get683() { return 683; } public int get684() { return 684; } public int get685() { return 685; } public int get686() { return 686; } public int get687() { return 687; } public int get688() { return 688; } public int get689() { return 689; } public int get690() { return 690; } public int get691() { return 691; } public int get692() { return 692; } public int get693() { return 693; } public int get694() { return 694; } public int get695() { return 695; } public int get696() { return 696; } public int get697() { return 697; } public int get698() { return 698; } public int get699() { return 699; } public int get700() { return 700; } public int get701() { return 701; } public int get702() { return 702; } public int get703() { return 703; } public int get704() { return 704; } public int get705() { return 705; } public int get706() { return 706; } public int get707() { return 707; } public int get708() { return 708; } public int get709() { return 709; } public int get710() { return 710; } public int get711() { return 711; } public int get712() { return 712; } public int get713() { return 713; } public int get714() { return 714; } public int get715() { return 715; } public int get716() { return 716; } public int get717() { return 717; } public int get718() { return 718; } public int get719() { return 719; } public int get720() { return 720; } public int get721() { return 721; } public int get722() { return 722; } public int get723() { return 723; } public int get724() { return 724; } public int get725() { return 725; } public int get726() { return 726; } public int get727() { return 727; } public int get728() { return 728; } public int get729() { return 729; } public int get730() { return 730; } public int get731() { return 731; } public int get732() { return 732; } public int get733() { return 733; } public int get734() { return 734; } public int get735() { return 735; } public int get736() { return 736; } public int get737() { return 737; } public int get738() { return 738; } public int get739() { return 739; } public int get740() { return 740; } public int get741() { return 741; } public int get742() { return 742; } public int get743() { return 743; } public int get744() { return 744; } public int get745() { return 745; } public int get746() { return 746; } public int get747() { return 747; } public int get748() { return 748; } public int get749() { return 749; } public int get750() { return 750; } public int get751() { return 751; } public int get752() { return 752; } public int get753() { return 753; } public int get754() { return 754; } public int get755() { return 755; } public int get756() { return 756; } public int get757() { return 757; } public int get758() { return 758; } public int get759() { return 759; } public int get760() { return 760; } public int get761() { return 761; } public int get762() { return 762; } public int get763() { return 763; } public int get764() { return 764; } public int get765() { return 765; } public int get766() { return 766; } public int get767() { return 767; } public int get768() { return 768; } public int get769() { return 769; } public int get770() { return 770; } public int get771() { return 771; } public int get772() { return 772; } public int get773() { return 773; } public int get774() { return 774; } public int get775() { return 775; } public int get776() { return 776; } public int get777() { return 777; } public int get778() { return 778; } public int get779() { return 779; } public int get780() { return 780; } public int get781() { return 781; } public int get782() { return 782; } public int get783() { return 783; } public int get784() { return 784; } public int get785() { return 785; } public int get786() { return 786; } public int get787() { return 787; } public int get788() { return 788; } public int get789() { return 789; } public int get790() { return 790; } public int get791() { return 791; } public int get792() { return 792; } public int get793() { return 793; } public int get794() { return 794; } public int get795() { return 795; } public int get796() { return 796; } public int get797() { return 797; } public int get798() { return 798; } public int get799() { return 799; } public int get800() { return 800; } public int get801() { return 801; } public int get802() { return 802; } public int get803() { return 803; } public int get804() { return 804; } public int get805() { return 805; } public int get806() { return 806; } public int get807() { return 807; } public int get808() { return 808; } public int get809() { return 809; } public int get810() { return 810; } public int get811() { return 811; } public int get812() { return 812; } public int get813() { return 813; } public int get814() { return 814; } public int get815() { return 815; } public int get816() { return 816; } public int get817() { return 817; } public int get818() { return 818; } public int get819() { return 819; } public int get820() { return 820; } public int get821() { return 821; } public int get822() { return 822; } public int get823() { return 823; } public int get824() { return 824; } public int get825() { return 825; } public int get826() { return 826; } public int get827() { return 827; } public int get828() { return 828; } public int get829() { return 829; } public int get830() { return 830; } public int get831() { return 831; } public int get832() { return 832; } public int get833() { return 833; } public int get834() { return 834; } public int get835() { return 835; } public int get836() { return 836; } public int get837() { return 837; } public int get838() { return 838; } public int get839() { return 839; } public int get840() { return 840; } public int get841() { return 841; } public int get842() { return 842; } public int get843() { return 843; } public int get844() { return 844; } public int get845() { return 845; } public int get846() { return 846; } public int get847() { return 847; } public int get848() { return 848; } public int get849() { return 849; } public int get850() { return 850; } public int get851() { return 851; } public int get852() { return 852; } public int get853() { return 853; } public int get854() { return 854; } public int get855() { return 855; } public int get856() { return 856; } public int get857() { return 857; } public int get858() { return 858; } public int get859() { return 859; } public int get860() { return 860; } public int get861() { return 861; } public int get862() { return 862; } public int get863() { return 863; } public int get864() { return 864; } public int get865() { return 865; } public int get866() { return 866; } public int get867() { return 867; } public int get868() { return 868; } public int get869() { return 869; } public int get870() { return 870; } public int get871() { return 871; } public int get872() { return 872; } public int get873() { return 873; } public int get874() { return 874; } public int get875() { return 875; } public int get876() { return 876; } public int get877() { return 877; } public int get878() { return 878; } public int get879() { return 879; } public int get880() { return 880; } public int get881() { return 881; } public int get882() { return 882; } public int get883() { return 883; } public int get884() { return 884; } public int get885() { return 885; } public int get886() { return 886; } public int get887() { return 887; } public int get888() { return 888; } public int get889() { return 889; } public int get890() { return 890; } public int get891() { return 891; } public int get892() { return 892; } public int get893() { return 893; } public int get894() { return 894; } public int get895() { return 895; } public int get896() { return 896; } public int get897() { return 897; } public int get898() { return 898; } public int get899() { return 899; } public int get900() { return 900; } public int get901() { return 901; } public int get902() { return 902; } public int get903() { return 903; } public int get904() { return 904; } public int get905() { return 905; } public int get906() { return 906; } public int get907() { return 907; } public int get908() { return 908; } public int get909() { return 909; } public int get910() { return 910; } public int get911() { return 911; } public int get912() { return 912; } public int get913() { return 913; } public int get914() { return 914; } public int get915() { return 915; } public int get916() { return 916; } public int get917() { return 917; } public int get918() { return 918; } public int get919() { return 919; } public int get920() { return 920; } public int get921() { return 921; } public int get922() { return 922; } public int get923() { return 923; } public int get924() { return 924; } public int get925() { return 925; } public int get926() { return 926; } public int get927() { return 927; } public int get928() { return 928; } public int get929() { return 929; } public int get930() { return 930; } public int get931() { return 931; } public int get932() { return 932; } public int get933() { return 933; } public int get934() { return 934; } public int get935() { return 935; } public int get936() { return 936; } public int get937() { return 937; } public int get938() { return 938; } public int get939() { return 939; } public int get940() { return 940; } public int get941() { return 941; } public int get942() { return 942; } public int get943() { return 943; } public int get944() { return 944; } public int get945() { return 945; } public int get946() { return 946; } public int get947() { return 947; } public int get948() { return 948; } public int get949() { return 949; } public int get950() { return 950; } public int get951() { return 951; } public int get952() { return 952; } public int get953() { return 953; } public int get954() { return 954; } public int get955() { return 955; } public int get956() { return 956; } public int get957() { return 957; } public int get958() { return 958; } public int get959() { return 959; } public int get960() { return 960; } public int get961() { return 961; } public int get962() { return 962; } public int get963() { return 963; } public int get964() { return 964; } public int get965() { return 965; } public int get966() { return 966; } public int get967() { return 967; } public int get968() { return 968; } public int get969() { return 969; } public int get970() { return 970; } public int get971() { return 971; } public int get972() { return 972; } public int get973() { return 973; } public int get974() { return 974; } public int get975() { return 975; } public int get976() { return 976; } public int get977() { return 977; } public int get978() { return 978; } public int get979() { return 979; } public int get980() { return 980; } public int get981() { return 981; } public int get982() { return 982; } public int get983() { return 983; } public int get984() { return 984; } public int get985() { return 985; } public int get986() { return 986; } public int get987() { return 987; } public int get988() { return 988; } public int get989() { return 989; } public int get990() { return 990; } public int get991() { return 991; } public int get992() { return 992; } public int get993() { return 993; } public int get994() { return 994; } public int get995() { return 995; } public int get996() { return 996; } public int get997() { return 997; } public int get998() { return 998; } public int get999() { return 999; } }
gpl-3.0
srnsw/xena
xena/src/nu/xom/JaxenConnector.java
2596
/* Copyright 2005 Elliotte Rusty Harold This library is free software; you can redistribute it and/or modify it under the terms of version 2.1 of the GNU Lesser General Public License as published by the Free Software Foundation. This library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with this library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA You can contact Elliotte Rusty Harold by sending e-mail to elharo@metalab.unc.edu. Please include the word "XOM" in the subject line. The XOM home page is located at http://www.xom.nu/ */ package nu.xom; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import org.jaxen.BaseXPath; import org.jaxen.FunctionContext; import org.jaxen.JaxenException; import org.jaxen.XPathFunctionContext; /** * * @author Elliotte Rusty Harold * @version 1.1.1b1 * */ class JaxenConnector extends BaseXPath { private static final long serialVersionUID = 9025734269448515308L; private static FunctionContext functionContext = new XPathFunctionContext(false); JaxenConnector(String expression) throws JaxenException { super(expression, new JaxenNavigator()); // possible thread-safety issue???? this.setFunctionContext(functionContext); } public List selectNodes(Object expression) throws JaxenException { List result = super.selectNodes(expression); // Text objects are returned wrapped in a List. // We need to unwrap them here. ListIterator iterator = result.listIterator(); while (iterator.hasNext()) { Object next = iterator.next(); if (next instanceof List) { List list = (List) next; // replace the list with the first item in the list iterator.set(list.get(0)); // insert any subsequent Text objects into the list if (list.size() > 1) { Iterator texts = list.listIterator(1); while (texts.hasNext()) { iterator.add(texts.next()); } } } } return result; } }
gpl-3.0
mateor/pdroid
android-2.3.4_r1/tags/1.32/frameworks/base/tests/CoreTests/android/core/ByteArrayOutputStreamTest.java
1403
/* * Copyright (C) 2008 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.core; import junit.framework.TestCase; import java.io.ByteArrayOutputStream; import android.test.suitebuilder.annotation.SmallTest; /** * A basic test for ByteArrayOutputStraem. */ public class ByteArrayOutputStreamTest extends TestCase { @SmallTest public void testByteArrayOutputStream() throws Exception { String str = "AbCdEfGhIjKlMnOpQrStUvWxYz"; ByteArrayOutputStream a = new ByteArrayOutputStream(); ByteArrayOutputStream b = new ByteArrayOutputStream(10); a.write(str.getBytes(), 0, 26); a.write('X'); a.writeTo(b); assertEquals(27, a.size()); assertEquals("AbCdEfGhIjKlMnOpQrStUvWxYzX", a.toString()); assertEquals("AbCdEfGhIjKlMnOpQrStUvWxYzX", b.toString()); } }
gpl-3.0
mnip91/proactive-component-monitoring
src/Examples/org/objectweb/proactive/examples/masterworker/nqueens/query/FirstDiagQuery.java
2355
/* * ################################################################ * * ProActive Parallel Suite(TM): The Java(TM) library for * Parallel, Distributed, Multi-Core Computing for * Enterprise Grids & Clouds * * Copyright (C) 1997-2012 INRIA/University of * Nice-Sophia Antipolis/ActiveEon * Contact: proactive@ow2.org or contact@activeeon.com * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Affero General Public License * as published by the Free Software Foundation; version 3 of * the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * USA * * If needed, contact us to obtain a release under GPL Version 2 or 3 * or a different license than the AGPL. * * Initial developer(s): The ProActive Team * http://proactive.inria.fr/team_members.htm * Contributor(s): * * ################################################################ * $$PROACTIVE_INITIAL_DEV$$ */ package org.objectweb.proactive.examples.masterworker.nqueens.query; import java.util.Vector; public class FirstDiagQuery extends Query { int scale; public FirstDiagQuery(int n, int s) { super(n, 1, 2, 1, 0); down |= ~((1 << n) - 1); scale = s; } @Override public long run() { Vector v = split(new Vector()); int n = v.size(); long r = 0; for (int i = 0; i < n; i++) { r += ((Query) v.get(i)).run(); } return (r); } private DiagQuery next(int q, int sym) { int l = (left | q) << 1; int d = (down | q); int r = (right | q) >> 1; return (new DiagQuery(n, 2, sym, scale, l, d, r)); } @Override public Vector split(Vector v) { int nq1 = n - 1; for (int i = 2; i < nq1; i++) v.add(next(1 << i, nq1 - i)); return (v); } }
agpl-3.0
kumarrus/voltdb
tests/testprocs/org/voltdb_testprocs/regressionsuites/matviewprocs/SelectAllPeople.java
1528
/* This file is part of VoltDB. * Copyright (C) 2008-2015 VoltDB Inc. * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. */ package org.voltdb_testprocs.regressionsuites.matviewprocs; import org.voltdb.*; @ProcInfo ( partitionInfo = "PEOPLE.PARTITION: 0", singlePartition = true ) public class SelectAllPeople extends VoltProcedure { public final SQLStmt get = new SQLStmt("SELECT * FROM PEOPLE;"); public VoltTable[] run(int id) { voltQueueSQL(get); return voltExecuteSQL(); } }
agpl-3.0
sternze/CurrentTopics_JFreeChart
source/org/jfree/chart/plot/dial/DialTextAnnotation.java
12359
/* =========================================================== * JFreeChart : a free chart library for the Java(tm) platform * =========================================================== * * (C) Copyright 2000-2013, by Object Refinery Limited and Contributors. * * Project Info: http://www.jfree.org/jfreechart/index.html * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * (at your option) any later version. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, * USA. * * [Oracle and Java are registered trademarks of Oracle and/or its affiliates. * Other names may be trademarks of their respective owners.] * * ----------------------- * DialTextAnnotation.java * ----------------------- * (C) Copyright 2006-2013, by Object Refinery Limited. * * Original Author: David Gilbert (for Object Refinery Limited); * Contributor(s): -; * * Changes * ------- * 03-Nov-2006 : Version 1 (DG); * 08-Mar-2007 : Fix in hashCode() (DG); * 17-Oct-2007 : Updated equals() (DG); * 24-Oct-2007 : Added getAnchor() and setAnchor() methods (DG); * 03-Jul-2013 : Use ParamChecks (DG); * */ package org.jfree.chart.plot.dial; import java.awt.Color; import java.awt.Font; import java.awt.Graphics2D; import java.awt.Paint; import java.awt.geom.Arc2D; import java.awt.geom.Point2D; import java.awt.geom.Rectangle2D; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import org.jfree.chart.HashUtilities; import org.jfree.chart.util.ParamChecks; import org.jfree.io.SerialUtilities; import org.jfree.text.TextUtilities; import org.jfree.ui.TextAnchor; import org.jfree.util.PaintUtilities; import org.jfree.util.PublicCloneable; /** * A text annotation for a {@link DialPlot}. * * @since 1.0.7 */ public class DialTextAnnotation extends AbstractDialLayer implements DialLayer, Cloneable, PublicCloneable, Serializable { /** For serialization. */ static final long serialVersionUID = 3065267524054428071L; /** The label text. */ private String label; /** The font. */ private Font font; /** * The paint for the label. This field is transient because it requires * special handling for serialization. */ private transient Paint paint; /** The angle that defines the anchor point for the annotation. */ private double angle; /** The radius that defines the anchor point for the annotation. */ private double radius; /** The text anchor to be aligned to the annotation's anchor point. */ private TextAnchor anchor; /** * Creates a new instance of <code>DialTextAnnotation</code>. * * @param label the label (<code>null</code> not permitted). */ public DialTextAnnotation(String label) { ParamChecks.nullNotPermitted(label, "label"); this.angle = -90.0; this.radius = 0.3; this.font = new Font("Dialog", Font.BOLD, 14); this.paint = Color.black; this.label = label; this.anchor = TextAnchor.TOP_CENTER; } /** * Returns the label text. * * @return The label text (never <code>null</code). * * @see #setLabel(String) */ public String getLabel() { return this.label; } /** * Sets the label and sends a {@link DialLayerChangeEvent} to all * registered listeners. * * @param label the label (<code>null</code> not permitted). * * @see #getLabel() */ public void setLabel(String label) { ParamChecks.nullNotPermitted(label, "label"); this.label = label; notifyListeners(new DialLayerChangeEvent(this)); } /** * Returns the font used to display the label. * * @return The font (never <code>null</code>). * * @see #setFont(Font) */ public Font getFont() { return this.font; } /** * Sets the font used to display the label and sends a * {@link DialLayerChangeEvent} to all registered listeners. * * @param font the font (<code>null</code> not permitted). * * @see #getFont() */ public void setFont(Font font) { ParamChecks.nullNotPermitted(font, "font"); this.font = font; notifyListeners(new DialLayerChangeEvent(this)); } /** * Returns the paint used to display the label. * * @return The paint (never <code>null</code>). * * @see #setPaint(Paint) */ public Paint getPaint() { return this.paint; } /** * Sets the paint used to display the label and sends a * {@link DialLayerChangeEvent} to all registered listeners. * * @param paint the paint (<code>null</code> not permitted). * * @see #getPaint() */ public void setPaint(Paint paint) { ParamChecks.nullNotPermitted(paint, "paint"); this.paint = paint; notifyListeners(new DialLayerChangeEvent(this)); } /** * Returns the angle used to calculate the anchor point. * * @return The angle (in degrees). * * @see #setAngle(double) * @see #getRadius() */ public double getAngle() { return this.angle; } /** * Sets the angle used to calculate the anchor point and sends a * {@link DialLayerChangeEvent} to all registered listeners. * * @param angle the angle (in degrees). * * @see #getAngle() * @see #setRadius(double) */ public void setAngle(double angle) { this.angle = angle; notifyListeners(new DialLayerChangeEvent(this)); } /** * Returns the radius used to calculate the anchor point. This is * specified as a percentage relative to the dial's framing rectangle. * * @return The radius. * * @see #setRadius(double) * @see #getAngle() */ public double getRadius() { return this.radius; } /** * Sets the radius used to calculate the anchor point and sends a * {@link DialLayerChangeEvent} to all registered listeners. * * @param radius the radius (as a percentage of the dial's framing * rectangle). * * @see #getRadius() * @see #setAngle(double) */ public void setRadius(double radius) { if (radius < 0.0) { throw new IllegalArgumentException( "The 'radius' cannot be negative."); } this.radius = radius; notifyListeners(new DialLayerChangeEvent(this)); } /** * Returns the text anchor point that will be aligned to the position * specified by {@link #getAngle()} and {@link #getRadius()}. * * @return The anchor point. * * @see #setAnchor(TextAnchor) */ public TextAnchor getAnchor() { return this.anchor; } /** * Sets the text anchor point and sends a {@link DialLayerChangeEvent} to * all registered listeners. * * @param anchor the anchor point (<code>null</code> not permitted). * * @see #getAnchor() */ public void setAnchor(TextAnchor anchor) { ParamChecks.nullNotPermitted(anchor, "anchor"); this.anchor = anchor; notifyListeners(new DialLayerChangeEvent(this)); } /** * Returns <code>true</code> to indicate that this layer should be * clipped within the dial window. * * @return <code>true</code>. */ @Override public boolean isClippedToWindow() { return true; } /** * Draws the background to the specified graphics device. If the dial * frame specifies a window, the clipping region will already have been * set to this window before this method is called. * * @param g2 the graphics device (<code>null</code> not permitted). * @param plot the plot (ignored here). * @param frame the dial frame (ignored here). * @param view the view rectangle (<code>null</code> not permitted). */ @Override public void draw(Graphics2D g2, DialPlot plot, Rectangle2D frame, Rectangle2D view) { // work out the anchor point Rectangle2D f = DialPlot.rectangleByRadius(frame, this.radius, this.radius); Arc2D arc = new Arc2D.Double(f, this.angle, 0.0, Arc2D.OPEN); Point2D pt = arc.getStartPoint(); g2.setPaint(this.paint); g2.setFont(this.font); TextUtilities.drawAlignedString(this.label, g2, (float) pt.getX(), (float) pt.getY(), this.anchor); } /** * Tests this instance for equality with an arbitrary object. * * @param obj the object (<code>null</code> permitted). * * @return A boolean. */ @Override public boolean equals(Object obj) { if (obj == this) { return true; } if (!(obj instanceof DialTextAnnotation)) { return false; } DialTextAnnotation that = (DialTextAnnotation) obj; if (!this.label.equals(that.label)) { return false; } if (!this.font.equals(that.font)) { return false; } if (!PaintUtilities.equal(this.paint, that.paint)) { return false; } if (this.radius != that.radius) { return false; } if (this.angle != that.angle) { return false; } if (!this.anchor.equals(that.anchor)) { return false; } return super.equals(obj); } /** * Returns a hash code for this instance. * * @return The hash code. */ @Override public int hashCode() { int result = 193; result = 37 * result + HashUtilities.hashCodeForPaint(this.paint); result = 37 * result + this.font.hashCode(); result = 37 * result + this.label.hashCode(); result = 37 * result + this.anchor.hashCode(); long temp = Double.doubleToLongBits(this.angle); result = 37 * result + (int) (temp ^ (temp >>> 32)); temp = Double.doubleToLongBits(this.radius); result = 37 * result + (int) (temp ^ (temp >>> 32)); return result; } /** * Returns a clone of this instance. * * @return The clone. * * @throws CloneNotSupportedException if some attribute of this instance * cannot be cloned. */ @Override public Object clone() throws CloneNotSupportedException { return super.clone(); } /** * Provides serialization support. * * @param stream the output stream. * * @throws IOException if there is an I/O error. */ private void writeObject(ObjectOutputStream stream) throws IOException { stream.defaultWriteObject(); SerialUtilities.writePaint(this.paint, stream); } /** * Provides serialization support. * * @param stream the input stream. * * @throws IOException if there is an I/O error. * @throws ClassNotFoundException if there is a classpath problem. */ private void readObject(ObjectInputStream stream) throws IOException, ClassNotFoundException { stream.defaultReadObject(); this.paint = SerialUtilities.readPaint(stream); } }
lgpl-2.1
kigsmtua/SMSSync
smssync/src/main/java/org/addhen/smssync/data/exception/MessageNotFoundException.java
1219
/* * Copyright (c) 2010 - 2015 Ushahidi Inc * All rights reserved * Contact: team@ushahidi.com * Website: http://www.ushahidi.com * GNU Lesser General Public License Usage * This file may be used under the terms of the GNU Lesser * General Public License version 3 as published by the Free Software * Foundation and appearing in the file LICENSE.LGPL included in the * packaging of this file. Please review the following information to * ensure the GNU Lesser General Public License version 3 requirements * will be met: http://www.gnu.org/licenses/lgpl.html. * * If you have questions regarding the use of this file, please contact * Ushahidi developers at team@ushahidi.com. */ package org.addhen.smssync.data.exception; /** * @author Ushahidi Team <team@ushahidi.com> */ public class MessageNotFoundException extends Exception { public MessageNotFoundException() { super(); } public MessageNotFoundException(final String message) { super(message); } public MessageNotFoundException(final String message, final Throwable cause) { super(message, cause); } public MessageNotFoundException(final Throwable cause) { super(cause); } }
lgpl-3.0
FranciscoJavierPRamos/vget
src/main/java/com/github/axet/vget/vhs/VimeoInfo.java
473
package com.github.axet.vget.vhs; import java.net.URL; import com.github.axet.vget.info.VideoInfo; public class VimeoInfo extends VideoInfo { // keep it in order hi->lo public enum VimeoQuality { pHi, pLow } private VimeoQuality vq; public VimeoInfo(URL web) { super(web); } public VimeoQuality getVideoQuality() { return vq; } public void setVideoQuality(VimeoQuality vq) { this.vq = vq; } }
lgpl-3.0
nguyentienlong/community-edition
projects/repository/source/java/org/alfresco/repo/nodelocator/CompanyHomeNodeLocator.java
1720
/* * Copyright (C) 2005-2011 Alfresco Software Limited. * * This file is part of Alfresco * * Alfresco is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Alfresco is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with Alfresco. If not, see <http://www.gnu.org/licenses/>. */ package org.alfresco.repo.nodelocator; import java.io.Serializable; import java.util.Map; import org.alfresco.repo.model.Repository; import org.alfresco.service.cmr.repository.NodeRef; /** * Locates the Company Home {@link NodeRef}. * * @author Nick Smith * @since 4.0 */ public class CompanyHomeNodeLocator extends AbstractNodeLocator { public static final String NAME = "companyhome"; private Repository repoHelper; /** * {@inheritDoc} */ public NodeRef getNode(NodeRef source, Map<String, Serializable> params) { return repoHelper.getCompanyHome(); } /** * {@inheritDoc} */ @Override public String getName() { return NAME; } /** * @param repoHelper the repoHelper to set */ public void setRepositoryHelper(Repository repoHelper) { this.repoHelper = repoHelper; } }
lgpl-3.0
alizelzele/SeLion
server/src/main/java/com/paypal/selion/grid/RunnableLauncher.java
2841
/*-------------------------------------------------------------------------------------------------------------------*\ | Copyright (C) 2015 PayPal | | | | Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance | | with the License. | | | | You may obtain a copy of the License at | | | | http://www.apache.org/licenses/LICENSE-2.0 | | | | Unless required by applicable law or agreed to in writing, software distributed under the License is distributed | | on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for | | the specific language governing permissions and limitations under the License. | \*-------------------------------------------------------------------------------------------------------------------*/ package com.paypal.selion.grid; /** * Interface which extends {@link Runnable} and adds some additional methods */ public interface RunnableLauncher extends Runnable { /** * Shutdown the launcher */ void shutdown(); /** * @return Whether the launcher has completed it's initialization phase which includes downloading dependencies. */ boolean isInitialized(); /** * @return Whether the {@link InstanceType} for this launcher is up and running. */ boolean isRunning(); /** * Maps to a type of WebDriver instance. */ enum InstanceType { SELENIUM_HUB("hub"), SELENIUM_NODE("node"), SELENIUM_STANDALONE("standalone"), SELION_SAUCE_HUB("sauce"), IOS_DRIVER("ios-driver"), SELENDROID("selendroid"), APPIUM("appium"); private String value; InstanceType(String value) { this.value = value; } /** * @return the friendly {@link String} representation of this {@link InstanceType} */ String getFriendlyName() { return this.value; } } }
apache-2.0
mingfly/opencloudb
src/main/java/org/opencloudb/mysql/MySQLMessage.java
9795
/* * Copyright 2012-2015 org.opencloudb. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opencloudb.mysql; import java.io.UnsupportedEncodingException; import java.math.BigDecimal; import java.sql.Time; import java.sql.Timestamp; import java.util.Arrays; import java.util.Calendar; /** * @author mycat */ public class MySQLMessage { public static final long NULL_LENGTH = -1; private static final byte[] EMPTY_BYTES = new byte[0]; private final byte[] data; private final int length; private int position; public MySQLMessage(byte[] data) { this.data = data; this.length = data.length; this.position = 0; } public int length() { return length; } public int position() { return position; } public byte[] bytes() { return data; } public void move(int i) { position += i; } public void position(int i) { this.position = i; } public boolean hasRemaining() { return length > position; } public byte read(int i) { return data[i]; } public byte read() { return data[position++]; } public int readUB2() { final byte[] b = this.data; int i = b[position++] & 0xff; i |= (b[position++] & 0xff) << 8; return i; } public int readUB3() { final byte[] b = this.data; int i = b[position++] & 0xff; i |= (b[position++] & 0xff) << 8; i |= (b[position++] & 0xff) << 16; return i; } public long readUB4() { final byte[] b = this.data; long l = (long) (b[position++] & 0xff); l |= (long) (b[position++] & 0xff) << 8; l |= (long) (b[position++] & 0xff) << 16; l |= (long) (b[position++] & 0xff) << 24; return l; } public int readInt() { final byte[] b = this.data; int i = b[position++] & 0xff; i |= (b[position++] & 0xff) << 8; i |= (b[position++] & 0xff) << 16; i |= (b[position++] & 0xff) << 24; return i; } public float readFloat() { return Float.intBitsToFloat(readInt()); } public long readLong() { final byte[] b = this.data; long l = (long) (b[position++] & 0xff); l |= (long) (b[position++] & 0xff) << 8; l |= (long) (b[position++] & 0xff) << 16; l |= (long) (b[position++] & 0xff) << 24; l |= (long) (b[position++] & 0xff) << 32; l |= (long) (b[position++] & 0xff) << 40; l |= (long) (b[position++] & 0xff) << 48; l |= (long) (b[position++] & 0xff) << 56; return l; } public double readDouble() { return Double.longBitsToDouble(readLong()); } public long readLength() { int length = data[position++] & 0xff; switch (length) { case 251: return NULL_LENGTH; case 252: return readUB2(); case 253: return readUB3(); case 254: return readLong(); default: return length; } } public byte[] readBytes() { if (position >= length) { return EMPTY_BYTES; } byte[] ab = new byte[length - position]; System.arraycopy(data, position, ab, 0, ab.length); position = length; return ab; } public byte[] readBytes(int length) { byte[] ab = new byte[length]; System.arraycopy(data, position, ab, 0, length); position += length; return ab; } public byte[] readBytesWithNull() { final byte[] b = this.data; if (position >= length) { return EMPTY_BYTES; } int offset = -1; for (int i = position; i < length; i++) { if (b[i] == 0) { offset = i; break; } } switch (offset) { case -1: byte[] ab1 = new byte[length - position]; System.arraycopy(b, position, ab1, 0, ab1.length); position = length; return ab1; case 0: position++; return EMPTY_BYTES; default: byte[] ab2 = new byte[offset - position]; System.arraycopy(b, position, ab2, 0, ab2.length); position = offset + 1; return ab2; } } public byte[] readBytesWithLength() { int length = (int) readLength(); if (length <= 0) { return EMPTY_BYTES; } byte[] ab = new byte[length]; System.arraycopy(data, position, ab, 0, ab.length); position += length; return ab; } public String readString() { if (position >= length) { return null; } String s = new String(data, position, length - position); position = length; return s; } public String readString(String charset) throws UnsupportedEncodingException { if (position >= length) { return null; } String s = new String(data, position, length - position, charset); position = length; return s; } public String readStringWithNull() { final byte[] b = this.data; if (position >= length) { return null; } int offset = -1; for (int i = position; i < length; i++) { if (b[i] == 0) { offset = i; break; } } if (offset == -1) { String s = new String(b, position, length - position); position = length; return s; } if (offset > position) { String s = new String(b, position, offset - position); position = offset + 1; return s; } else { position++; return null; } } public String readStringWithNull(String charset) throws UnsupportedEncodingException { final byte[] b = this.data; if (position >= length) { return null; } int offset = -1; for (int i = position; i < length; i++) { if (b[i] == 0) { offset = i; break; } } switch (offset) { case -1: String s1 = new String(b, position, length - position, charset); position = length; return s1; case 0: position++; return null; default: String s2 = new String(b, position, offset - position, charset); position = offset + 1; return s2; } } public String readStringWithLength() { int length = (int) readLength(); if (length <= 0) { return null; } String s = new String(data, position, length); position += length; return s; } public String readStringWithLength(String charset) throws UnsupportedEncodingException { int length = (int) readLength(); if (length <= 0) { return null; } String s = new String(data, position, length, charset); position += length; return s; } public java.sql.Time readTime() { move(6); int hour = read(); int minute = read(); int second = read(); Calendar cal = getLocalCalendar(); cal.set(0, 0, 0, hour, minute, second); return new Time(cal.getTimeInMillis()); } public java.util.Date readDate() { byte length = read(); int year = readUB2(); byte month = read(); byte date = read(); int hour = read(); int minute = read(); int second = read(); if (length == 11) { long nanos = readUB4(); Calendar cal = getLocalCalendar(); cal.set(year, --month, date, hour, minute, second); Timestamp time = new Timestamp(cal.getTimeInMillis()); time.setNanos((int) nanos); return time; } else { Calendar cal = getLocalCalendar(); cal.set(year, --month, date, hour, minute, second); return new java.sql.Date(cal.getTimeInMillis()); } } public BigDecimal readBigDecimal() { String src = readStringWithLength(); return src == null ? null : new BigDecimal(src); } public String toString() { return new StringBuilder().append(Arrays.toString(data)).toString(); } private static final ThreadLocal<Calendar> localCalendar = new ThreadLocal<Calendar>(); private static final Calendar getLocalCalendar() { Calendar cal = localCalendar.get(); if (cal == null) { cal = Calendar.getInstance(); localCalendar.set(cal); } return cal; } }
apache-2.0
onders86/camel
components/camel-netty4/src/test/java/org/apache/camel/component/netty4/Netty2978IssueTest.java
4373
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.netty4; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import org.apache.camel.CamelContext; import org.apache.camel.Endpoint; import org.apache.camel.Exchange; import org.apache.camel.Processor; import org.apache.camel.ProducerTemplate; import org.apache.camel.builder.RouteBuilder; import org.junit.Ignore; import org.junit.Test; /** * @version */ @Ignore("This test can cause CI servers to hang") public class Netty2978IssueTest extends BaseNettyTest { @Test public void testNetty2978() throws Exception { CamelClient client = new CamelClient(context); try { for (int i = 0; i < 1000; i++) { Object reply = client.lookup(i); assertEquals("Bye " + i, reply); } } finally { client.close(); } } @Test public void testNetty2978Concurrent() throws Exception { final CamelClient client = new CamelClient(context); try { final List<Callable<String>> callables = new ArrayList<>(); for (int count = 0; count < 1000; count++) { final int i = count; callables.add(new Callable<String>() { public String call() { return client.lookup(i); } }); } final ExecutorService executorService = Executors.newFixedThreadPool(10); final List<Future<String>> results = executorService.invokeAll(callables); final Set<String> replies = new HashSet<>(); for (Future<String> future : results) { // wait at most 60 sec to not hang test String reply = future.get(60, TimeUnit.SECONDS); assertTrue(reply.startsWith("Bye ")); replies.add(reply); } // should be 1000 unique replies assertEquals(1000, replies.size()); executorService.shutdownNow(); } finally { client.close(); } } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { from("netty4:tcp://localhost:{{port}}?sync=true") .process(new Processor() { public void process(final Exchange exchange) { String body = exchange.getIn().getBody(String.class); exchange.getOut().setBody("Bye " + body); } }); } }; } private static final class CamelClient { private final Endpoint endpoint; private final ProducerTemplate producerTemplate; CamelClient(CamelContext camelContext) { this.endpoint = camelContext.getEndpoint("netty4:tcp://localhost:{{port}}?sync=true"); this.producerTemplate = camelContext.createProducerTemplate(); } public void close() throws Exception { producerTemplate.stop(); } public String lookup(int num) { return producerTemplate.requestBody(endpoint, num, String.class); } } }
apache-2.0
xli/gocd
common/test/unit/com/thoughtworks/go/domain/materials/svn/SvnCommandRemoteTest.java
15850
/*************************GO-LICENSE-START********************************* * Copyright 2016 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *************************GO-LICENSE-END***********************************/ package com.thoughtworks.go.domain.materials.svn; import java.io.File; import java.io.IOException; import java.util.List; import com.googlecode.junit.ext.JunitExtRunner; import com.thoughtworks.go.domain.materials.Modification; import com.thoughtworks.go.domain.materials.ValidationBean; import com.thoughtworks.go.helper.SvnRemoteRepository; import com.thoughtworks.go.util.FileUtil; import com.thoughtworks.go.util.TestFileUtil; import com.thoughtworks.go.util.command.InMemoryStreamConsumer; import com.thoughtworks.go.util.command.ProcessOutputStreamConsumer; import org.apache.commons.io.FileUtils; import org.hamcrest.core.Is; import org.jdom.input.SAXBuilder; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import static com.thoughtworks.go.util.command.ProcessOutputStreamConsumer.inMemoryConsumer; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; @RunWith(JunitExtRunner.class) public class SvnCommandRemoteTest { public SvnRemoteRepository repository; private static final String HARRY = "harry"; private static final String HARRYS_PASSWORD = "harryssecret"; public SvnCommand command; public File workingDir; private InMemoryStreamConsumer outputStreamConsumer; @Before public void startRepo() throws Exception { repository = new SvnRemoteRepository(); repository.addUser(HARRY, HARRYS_PASSWORD); repository.start(); command = new SvnCommand(null, repository.getUrl(), HARRY, HARRYS_PASSWORD, true); workingDir = TestFileUtil.createTempFolder("workingDir" + System.currentTimeMillis()); outputStreamConsumer = inMemoryConsumer(); } @After public void stopRepo() throws Exception { if (repository!=null) repository.stop(); FileUtil.deleteFolder(workingDir); } @Test public void shouldSupportSvnInfo() throws Exception { SvnCommand.SvnInfo info = command.remoteInfo(new SAXBuilder()); assertThat(info.getUrl(), is(repository.getUrl())); } @Test public void shouldSupportSvnLog() throws Exception { List<Modification> info = command.latestModification(); assertThat(info.get(0).getComment(), is("Added simple build shell to dump the environment to console.")); } @Test public void shouldSupportModificationsSince() throws Exception { List<Modification> info = command.modificationsSince(new SubversionRevision(2)); assertThat(info.size(), is(2)); assertThat(info.get(0).getRevision(), is("4")); assertThat(info.get(1).getRevision(), is("3")); } @Test public void shouldSupportLocalSvnInfoWithoutPassword() throws Exception { command.checkoutTo(ProcessOutputStreamConsumer.inMemoryConsumer(), workingDir, new SubversionRevision(4)); SvnCommand commandWithoutPassword = new SvnCommand(null, repository.getUrl(), null, null, true); SvnCommand.SvnInfo info = commandWithoutPassword.workingDirInfo(workingDir); assertThat(info.getUrl(), is(repository.getUrl())); } @Test public void shouldMaskPassword_CheckConnection() { ValidationBean goodResponse = command.checkConnection(); assertThat(goodResponse.isValid(), Is.is(true)); assertThat("Plain text password detected!", goodResponse.getError().contains(HARRYS_PASSWORD), Is.is(false)); ValidationBean badResponse = badUserNameCommand().checkConnection(); assertThat(badResponse.isValid(), Is.is(false)); assertThat("Plain text password detected!", badResponse.getError().contains(HARRYS_PASSWORD), Is.is(false)); badResponse = badPasswordCommand().checkConnection(); assertThat(badResponse.isValid(), Is.is(false)); assertThat("Plain text password detected!", badResponse.getError().contains("some_bad_password"), Is.is(false)); badResponse = badUrlCommand().checkConnection(); assertThat(badResponse.isValid(), Is.is(false)); assertThat("Plain text password detected!", badResponse.getError().contains(HARRYS_PASSWORD), Is.is(false)); } @Test public void shouldMaskPassword_UpdateTo() { command.checkoutTo(outputStreamConsumer, workingDir, new SubversionRevision(2)); command.updateTo(outputStreamConsumer, workingDir, new SubversionRevision(2)); assertThat("Plain text password detected!", outputStreamConsumer.getAllOutput().contains(HARRYS_PASSWORD), Is.is(false)); try { badUserNameCommand().updateTo(outputStreamConsumer, workingDir, new SubversionRevision(2)); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false)); assertThat("Plain text password detected!", outputStreamConsumer.getAllOutput().contains(HARRYS_PASSWORD), Is.is(false)); } try { badPasswordCommand().updateTo(outputStreamConsumer, workingDir, new SubversionRevision(2)); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains("some_bad_password"), Is.is(false)); assertThat("Plain text password detected!", outputStreamConsumer.getAllOutput().contains("some_bad_password"), Is.is(false)); } try { badUrlCommand().updateTo(outputStreamConsumer, workingDir, new SubversionRevision(2)); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false)); assertThat("Plain text password detected!", outputStreamConsumer.getAllOutput().contains(HARRYS_PASSWORD), Is.is(false)); } } @Test public void shouldMaskPassword_CheckoutTo() { command.checkoutTo(outputStreamConsumer, workingDir, new SubversionRevision(2)); assertThat("Plain text password detected!", outputStreamConsumer.getAllOutput().contains(HARRYS_PASSWORD), Is.is(false)); try { FileUtil.deleteFolder(workingDir); badUserNameCommand().checkoutTo(outputStreamConsumer, workingDir, new SubversionRevision(2)); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false)); assertThat("Plain text password detected!", outputStreamConsumer.getAllOutput().contains(HARRYS_PASSWORD), Is.is(false)); } try { FileUtil.deleteFolder(workingDir); badPasswordCommand().checkoutTo(outputStreamConsumer, workingDir, new SubversionRevision(2)); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains("some_bad_password"), Is.is(false)); assertThat("Plain text password detected!", outputStreamConsumer.getAllOutput().contains("some_bad_password"), Is.is(false)); } try { FileUtil.deleteFolder(workingDir); badUrlCommand().checkoutTo(outputStreamConsumer, workingDir, new SubversionRevision(2)); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false)); assertThat("Plain text password detected!", outputStreamConsumer.getAllOutput().contains(HARRYS_PASSWORD), Is.is(false)); } } @Test public void shouldMaskPassword_getAllExternalURLs() { try { badUserNameCommand().getAllExternalURLs(); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false)); } try { badPasswordCommand().getAllExternalURLs(); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains("some_bad_password"), Is.is(false)); } try { badUrlCommand().getAllExternalURLs(); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false)); } } @Test public void shouldMaskPassword_latestModification() { try { badUserNameCommand().latestModification(); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false)); } try { badPasswordCommand().latestModification(); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains("some_bad_password"), Is.is(false)); } try { badUrlCommand().latestModification(); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false)); } } @Test public void shouldMaskPassword_modificationsSince() { try { badUserNameCommand().latestModification(); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false)); } try { badPasswordCommand().latestModification(); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains("some_bad_password"), Is.is(false)); } try { badUrlCommand().latestModification(); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false)); } } @Test public void shouldMaskPassword_remoteInfo() { try { badUserNameCommand().remoteInfo(new SAXBuilder()); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false)); } try { badPasswordCommand().remoteInfo(new SAXBuilder()); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains("some_bad_password"), Is.is(false)); } try { badUrlCommand().remoteInfo(new SAXBuilder()); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false)); } } @Test public void shouldMaskPassword_workingDirInfo() { try { badUserNameCommand().workingDirInfo(workingDir); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false)); } try { badPasswordCommand().workingDirInfo(workingDir); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains("some_bad_password"), Is.is(false)); } try { badUrlCommand().workingDirInfo(workingDir); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false)); } } @Test public void shouldMaskPassword_commit() throws IOException { command.checkoutTo(outputStreamConsumer, workingDir, new SubversionRevision(2)); File newFile = new File(workingDir.getAbsolutePath() + "/foo"); FileUtils.writeStringToFile(newFile, "content"); command.add(outputStreamConsumer, newFile); try { badUserNameCommand().commit(outputStreamConsumer, workingDir, "message"); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", outputStreamConsumer.getAllOutput().contains(HARRYS_PASSWORD), Is.is(false)); assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false)); } try { badPasswordCommand().commit(outputStreamConsumer, workingDir, "message"); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", outputStreamConsumer.getAllOutput().contains("some_bad_password"), Is.is(false)); assertThat("Plain text password detected!", e.getMessage().contains("some_bad_password"), Is.is(false)); } try { badUrlCommand().commit(outputStreamConsumer, workingDir, "message"); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", outputStreamConsumer.getAllOutput().contains(HARRYS_PASSWORD), Is.is(false)); assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false)); } } @Test public void shouldMaskPassword_propset() throws IOException { try { badUserNameCommand().propset(workingDir, "svn:ignore", "*.foo"); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false)); } try { badPasswordCommand().propset(workingDir, "svn:ignore", "*.foo"); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains("some_bad_password"), Is.is(false)); } try { badUrlCommand().propset(workingDir, "svn:ignore", "*.foo"); fail("should have failed"); } catch (Exception e) { assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false)); } } private SvnCommand badUrlCommand() { return new SvnCommand(null, "https://invalid", "blrstdcrspair", HARRYS_PASSWORD, false); } private SvnCommand badUserNameCommand() { return new SvnCommand(null, repository.getUrl(), "some_bad_user", HARRYS_PASSWORD, false); } private SvnCommand badPasswordCommand() { return new SvnCommand(null, repository.getUrl(), HARRY, "some_bad_password", false); } }
apache-2.0
PGer/incubator-hawq
pxf/pxf-service/src/main/java/com/pivotal/pxf/service/utilities/CustomWebappLoader.java
7022
package com.pivotal.pxf.service.utilities; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.net.URI; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import org.apache.catalina.LifecycleException; import org.apache.catalina.loader.WebappLoader; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; /** * A WebappLoader that allows a customized classpath to be added through configuration in context xml. * Any additional classpath entry will be added to the default webapp classpath. * <p/> * <pre> * &lt;Context> * &lt;Loader className="com.pivotal.pxf.service.utilities.CustomWebappLoader" * classpathFiles="/somedir/classpathFile1;/somedir/classpathFile2"/> * &lt;/Context> * </pre> */ public class CustomWebappLoader extends WebappLoader { /** * Because this class belongs in tcServer itself, logs go into tcServer's log facility that is separate * from the web app's log facility. * * Logs are directed to catalina.log file. By default only INFO or higher messages are logged. * To change log level, add the following line to {catalina.base}/conf/logging.properties * <code>com.pivotal.pxf.level = FINE/INFO/WARNING</code> (FINE = debug). */ private static final Log LOG = LogFactory.getLog(CustomWebappLoader.class); /** * Classpath files containing path entries, separated by new line. * Globbing is supported for the file name. * e.g: * somedir * anotherdir/somejar.jar * anotherone/hadoop*.jar * anotherone/pxf*[0-9].jar * Unix wildcard convention can be used to match a number of files * (e.g. <code>*</code>, <code>[0-9]</code>, <code>?</code>), but not a number of directories. * * The files specified under classpathFiles must exist - if they can't be read an exception will be thrown. */ private String classpathFiles; /** * Secondary classpath files - if these files are unavailable only a warning will be logged. */ private String secondaryClasspathFiles; /** * Constructs a WebappLoader with no defined parent class loader (actual parent will be the system class loader). */ public CustomWebappLoader() { super(); } /** * Constructs a WebappLoader with the specified class loader to be defined as the parent for this ClassLoader. * * @param parent The parent class loader */ public CustomWebappLoader(ClassLoader parent) { super(parent); } /** * <code>classpathFiles</code> attribute is automatically set from the context xml file. * * @param classpathFiles Files separated by <code>;</code> Which contains <code>;</code> separated list of path entries. */ public void setClasspathFiles(String classpathFiles) { this.classpathFiles = classpathFiles; } /** * <code>secondaryClasspathFiles</code> attribute is automatically set from the context xml file. * * @param secondaryClasspathFiles Files separated by <code>;</code> Which contains <code>;</code> separated list of path entries. */ public void setSecondaryClasspathFiles(String secondaryClasspathFiles) { this.secondaryClasspathFiles = secondaryClasspathFiles; } /** * Implements {@link org.apache.catalina.util.LifecycleBase#startInternal()}. * * @throws LifecycleException if this component detects a fatal error that prevents this component from being used. */ @Override protected void startInternal() throws LifecycleException { addRepositories(classpathFiles, true); addRepositories(secondaryClasspathFiles, false); super.startInternal(); } private void addRepositories(String classpathFiles, boolean throwException) throws LifecycleException { for (String classpathFile : classpathFiles.split(";")) { String classpath = readClasspathFile(classpathFile, throwException); if (classpath == null) { continue; } ArrayList<String> classpathEntries = trimEntries(classpath.split("[\\r\\n]+")); LOG.info("Classpath file " + classpathFile + " has " + classpathEntries.size() + " entries"); for (String entry : classpathEntries) { LOG.debug("Trying to load entry " + entry); int repositoriesCount = 0; Path pathEntry = Paths.get(entry); /* * For each entry, we look at the parent directory and try to match each of the files * inside it to the file name or pattern in the file name (the last part of the path). * E.g., for path '/some/path/with/pattern*', the parent directory will be '/some/path/with/' * and the file name will be 'pattern*'. Each file under that directory matching * this pattern will be added to the class loader repository. */ try (DirectoryStream<Path> repositories = Files.newDirectoryStream(pathEntry.getParent(), pathEntry.getFileName().toString())) { for (Path repository : repositories) { if (addPathToRepository(repository, entry)) { repositoriesCount++; } } } catch (IOException e) { LOG.warn("Failed to load entry " + entry + ": " + e); } if (repositoriesCount == 0) { LOG.warn("Entry " + entry + " doesn't match any files"); } LOG.debug("Loaded " + repositoriesCount + " repositories from entry " + entry); } } } private String readClasspathFile(String classpathFile, boolean throwException) throws LifecycleException { String classpath = null; try { LOG.info("Trying to read classpath file " + classpathFile); classpath = new String(Files.readAllBytes(Paths.get(classpathFile))); } catch (IOException ioe) { LOG.warn("Failed to read classpath file: " + ioe); if (throwException) { throw new LifecycleException("Failed to read classpath file: " + ioe, ioe); } } return classpath; } /** * Returns a list of valid classpath entries, excluding null, empty and comment lines. * @param classpathEntries original entries * @return valid entries */ private ArrayList<String> trimEntries(String[] classpathEntries) { ArrayList<String> trimmed = new ArrayList<String>(); int line = 0; for (String entry : classpathEntries) { line++; if (entry == null) { LOG.debug("Skipping entry #" + line + " (null)"); continue; } entry = entry.trim(); if (entry.isEmpty() || entry.startsWith("#")) { LOG.debug("Skipping entry #" + line + " (" + entry + ")"); continue; } trimmed.add(entry); } return trimmed; } private boolean addPathToRepository(Path path, String entry) { try { URI pathUri = path.toUri(); String pathUriStr = pathUri.toString(); File file = new File(pathUri); if (!file.canRead()) { throw new FileNotFoundException(pathUriStr + " cannot be read"); } addRepository(pathUriStr); LOG.debug("Repository " + pathUriStr + " added from entry " + entry); return true; } catch (Exception e) { LOG.warn("Failed to load path " + path + " to repository: " + e); } return false; } }
apache-2.0
nchandrappa/incubator-geode
gemfire-core/src/test/java/com/gemstone/gemfire/cache/query/internal/index/ConcurrentIndexInitOnOverflowRegionDUnitTest.java
15454
/*========================================================================= * Copyright (c) 2010-2014 Pivotal Software, Inc. All Rights Reserved. * This product is protected by U.S. and international copyright * and intellectual property laws. Pivotal products are covered by * one or more patents listed at http://www.pivotal.io/patents. *========================================================================= */ /** * */ package com.gemstone.gemfire.cache.query.internal.index; import java.io.IOException; import com.gemstone.gemfire.cache.AttributesFactory; import com.gemstone.gemfire.cache.Cache; import com.gemstone.gemfire.cache.CacheException; import com.gemstone.gemfire.cache.DataPolicy; import com.gemstone.gemfire.cache.DiskStore; import com.gemstone.gemfire.cache.EvictionAction; import com.gemstone.gemfire.cache.EvictionAlgorithm; import com.gemstone.gemfire.cache.Region; import com.gemstone.gemfire.cache.RegionFactory; import com.gemstone.gemfire.cache.RegionShortcut; import com.gemstone.gemfire.cache.client.ClientCache; import com.gemstone.gemfire.cache.client.ClientCacheFactory; import com.gemstone.gemfire.cache.client.ClientRegionShortcut; import com.gemstone.gemfire.cache.query.Index; import com.gemstone.gemfire.cache.query.QueryService; import com.gemstone.gemfire.cache.query.data.Portfolio; import com.gemstone.gemfire.cache.query.data.PortfolioData; import com.gemstone.gemfire.cache.query.internal.index.IndexManager.TestHook; import com.gemstone.gemfire.cache.query.partitioned.PRQueryDUnitHelper; import com.gemstone.gemfire.cache.util.BridgeServer; import com.gemstone.gemfire.cache30.CacheSerializableRunnable; import com.gemstone.gemfire.cache30.CacheTestCase; import com.gemstone.gemfire.internal.cache.EvictionAttributesImpl; import dunit.AsyncInvocation; import dunit.DistributedTestCase; import dunit.Host; import dunit.VM; /** * @author shobhit * */ public class ConcurrentIndexInitOnOverflowRegionDUnitTest extends CacheTestCase { PRQueryDUnitHelper PRQHelp = new PRQueryDUnitHelper(""); String name; final int redundancy = 0; private int cnt = 0; private int cntDest = 1; public static volatile boolean hooked = false; private static int bridgeServerPort; /** * @param name */ public ConcurrentIndexInitOnOverflowRegionDUnitTest(String name) { super(name); } /** * */ public void testAsyncIndexInitDuringEntryDestroyAndQueryOnRR() { Host host = Host.getHost(0); VM vm0 = host.getVM(0); name = "PartionedPortfoliosPR"; // Create Overflow Persistent Partition Region vm0.invoke(new CacheSerializableRunnable( "Create local region with synchronous index maintenance") { @Override public void run2() throws CacheException { Cache cache = PRQHelp.getCache(); Region partitionRegion = null; IndexManager.testHook = null; try { DiskStore ds = cache.findDiskStore("disk"); if (ds == null) { ds = cache.createDiskStoreFactory().setDiskDirs(getDiskDirs()) .create("disk"); } AttributesFactory attr = new AttributesFactory(); attr.setValueConstraint(PortfolioData.class); attr.setIndexMaintenanceSynchronous(true); EvictionAttributesImpl evicAttr = new EvictionAttributesImpl() .setAction(EvictionAction.OVERFLOW_TO_DISK); evicAttr.setAlgorithm(EvictionAlgorithm.LRU_ENTRY).setMaximum(1); attr.setEvictionAttributes(evicAttr); attr.setDataPolicy(DataPolicy.REPLICATE); // attr.setPartitionAttributes(new // PartitionAttributesFactory().setTotalNumBuckets(1).create()); attr.setDiskStoreName("disk"); RegionFactory regionFactory = cache.createRegionFactory(attr.create()); partitionRegion = regionFactory.create(name); } catch (IllegalStateException ex) { getLogWriter().warning("Creation caught IllegalStateException", ex); } assertNotNull("Region " + name + " not in cache", cache.getRegion(name)); assertNotNull("Region ref null", partitionRegion); assertTrue("Region ref claims to be destroyed", !partitionRegion.isDestroyed()); // Create Indexes try { Index index = cache.getQueryService().createIndex("statusIndex", "p.status", "/" + name + " p"); assertNotNull(index); } catch (Exception e1) { e1.printStackTrace(); fail("Index creation failed"); } } }); // Start changing the value in Region which should turn into a deadlock if // the fix is not there AsyncInvocation asyncInv1 = vm0.invokeAsync(new CacheSerializableRunnable( "Change value in region") { @Override public void run2() throws CacheException { Cache cache = PRQHelp.getCache(); // Do a put in region. Region r = PRQHelp.getCache().getRegion(name); for (int i = 0; i < 100; i++) { r.put(i, new PortfolioData(i)); } assertNull(IndexManager.testHook); IndexManager.testHook = new IndexManagerTestHook(); // Destroy one of the values. PRQHelp.getCache().getLogger().fine("Destroying the value"); r.destroy(1); IndexManager.testHook = null; } }); AsyncInvocation asyncInv2 = vm0.invokeAsync(new CacheSerializableRunnable( "Run query on region") { @Override public void run2() throws CacheException { Cache cache = PRQHelp.getCache(); while (!hooked) { pause(100); } // Create and hence initialize Index try { Index index = cache.getQueryService().createIndex("idIndex", "p.ID", "/" + name + " p"); assertNotNull(index); } catch (Exception e1) { e1.printStackTrace(); fail("Index creation failed"); } } }); // If we take more than 30 seconds then its a deadlock. DistributedTestCase.join(asyncInv2, 30 * 1000, PRQHelp.getCache() .getLogger()); DistributedTestCase.join(asyncInv1, 30 * 1000, PRQHelp.getCache() .getLogger()); } /** * */ public void testAsyncIndexInitDuringEntryPutUsingClientOnRR() { Host host = Host.getHost(0); VM vm0 = host.getVM(0); VM vm1 = host.getVM(1); addExpectedException("Unexpected IOException:"); addExpectedException("java.net.SocketException"); name = "PartionedPortfoliosPR"; // Create Overflow Persistent Partition Region vm0.invoke(new CacheSerializableRunnable( "Create local region with synchronous index maintenance") { @Override public void run2() throws CacheException { Cache cache = PRQHelp.getCache(); Region partitionRegion = null; IndexManager.testHook = null; try { BridgeServer bridge = cache.addBridgeServer(); bridge.setPort(0); bridge.start(); bridgeServerPort = bridge.getPort(); DiskStore ds = cache.findDiskStore("disk"); if (ds == null) { ds = cache.createDiskStoreFactory().setDiskDirs(getDiskDirs()) .create("disk"); } AttributesFactory attr = new AttributesFactory(); attr.setValueConstraint(PortfolioData.class); attr.setIndexMaintenanceSynchronous(true); EvictionAttributesImpl evicAttr = new EvictionAttributesImpl() .setAction(EvictionAction.OVERFLOW_TO_DISK); evicAttr.setAlgorithm(EvictionAlgorithm.LRU_ENTRY).setMaximum(1); attr.setEvictionAttributes(evicAttr); attr.setDataPolicy(DataPolicy.REPLICATE); // attr.setPartitionAttributes(new // PartitionAttributesFactory().setTotalNumBuckets(1).create()); attr.setDiskStoreName("disk"); RegionFactory regionFactory = cache.createRegionFactory(attr.create()); partitionRegion = regionFactory.create(name); } catch (IllegalStateException ex) { getLogWriter().warning("Creation caught IllegalStateException", ex); } catch (IOException e) { e.printStackTrace(); } assertNotNull("Region " + name + " not in cache", cache.getRegion(name)); assertNotNull("Region ref null", partitionRegion); assertTrue("Region ref claims to be destroyed", !partitionRegion.isDestroyed()); // Create Indexes try { Index index = cache.getQueryService().createIndex("idIndex", "p.ID", "/" + name + " p"); assertNotNull(index); } catch (Exception e1) { e1.printStackTrace(); fail("Index creation failed"); } } }); final int port = vm0.invokeInt(ConcurrentIndexInitOnOverflowRegionDUnitTest.class, "getCacheServerPort"); final String host0 = getServerHostName(vm0.getHost()); // Start changing the value in Region which should turn into a deadlock if // the fix is not there vm1.invoke(new CacheSerializableRunnable( "Change value in region") { @Override public void run2() throws CacheException { disconnectFromDS(); ClientCache clientCache = new ClientCacheFactory().addPoolServer(host0, port).create(); // Do a put in region. Region r = clientCache.createClientRegionFactory(ClientRegionShortcut.PROXY).create(name); for (int i = 0; i < 100; i++) { r.put(i, new PortfolioData(i)); } } }); vm0.invoke(new CacheSerializableRunnable("Set Test Hook") { @Override public void run2() throws CacheException { // Set test hook before client operation assertNull(IndexManager.testHook); IndexManager.testHook = new IndexManagerTestHook(); } }); AsyncInvocation asyncInv1 = vm1.invokeAsync(new CacheSerializableRunnable("Change value in region") { @Override public void run2() throws CacheException { ClientCache clientCache = ClientCacheFactory.getAnyInstance(); // Do a put in region. Region r = clientCache.getRegion(name); // Destroy one of the values. clientCache.getLogger().fine("Destroying the value"); r.destroy(1); } }); AsyncInvocation asyncInv2 = vm0.invokeAsync(new CacheSerializableRunnable( "Run query on region") { @Override public void run2() throws CacheException { Cache cache = PRQHelp.getCache(); while (!hooked) { pause(100); } // Create Indexes try { Index index = cache.getQueryService().createIndex("statusIndex", "p.status", "/" + name + " p"); assertNotNull(index); } catch (Exception e1) { e1.printStackTrace(); fail("Index creation failed"); } } }); // If we take more than 30 seconds then its a deadlock. DistributedTestCase.join(asyncInv2, 30 * 1000, PRQHelp.getCache() .getLogger()); DistributedTestCase.join(asyncInv1, 30 * 1000, PRQHelp.getCache() .getLogger()); vm0.invoke(new CacheSerializableRunnable("Set Test Hook") { @Override public void run2() throws CacheException { assertNotNull(IndexManager.testHook); IndexManager.testHook = null; } }); } /** * This tests if index updates are blocked while region.clear() is * called and indexes are being reinitialized. */ public void testIndexUpdateWithRegionClear() { Host host = Host.getHost(0); VM vm0 = host.getVM(0); final String regionName = "portfolio"; hooked = false; // Create region and an index on it vm0.invoke(new CacheSerializableRunnable("Create region and index") { @Override public void run2() throws CacheException { Cache cache = PRQHelp.getCache(); Region region = cache.createRegionFactory(RegionShortcut.LOCAL).create(regionName); QueryService qService = cache.getQueryService(); try { qService.createIndex("idIndex", "ID", "/"+regionName); qService.createIndex("secIdIndex", "pos.secId", "/" + regionName + " p, p.positions.values pos"); } catch (Exception e) { fail("Index creation failed." + e); } } }); final class LocalTestHook implements TestHook { @Override public void hook(int spot) throws RuntimeException { switch (spot) { case 6: // processAction in IndexManager hooked = true; //wait untill some thread unhooks. while (hooked) { pause(20); } break; default: break; } } } // Asynch invocation for continuous index updates AsyncInvocation indexUpdateAsysnch = vm0.invokeAsync(new CacheSerializableRunnable("index updates") { @Override public void run2() throws CacheException { Region region = PRQHelp.getCache().getRegion(regionName); for (int i=0; i<100; i++) { if (i == 50) IndexManager.testHook = new LocalTestHook(); region.put(i, new Portfolio(i)); if (i == 50) pause(20); } } }); // Region.clear() which should block other region updates. vm0.invoke(new CacheSerializableRunnable("Clear the region") { @Override public void run2() throws CacheException { Region region = PRQHelp.getCache().getRegion(regionName); while(!hooked) { pause(100); } if (hooked) { hooked = false; IndexManager.testHook = null; region.clear(); } try { QueryService qservice = PRQHelp.getCache().getQueryService(); Index index = qservice.getIndex(region, "idIndex"); if (((CompactRangeIndex)index).getIndexStorage().size() > 1) { fail("After clear region size is supposed to be zero as all index updates are blocked. Current region size is: "+ region.size()); } } finally { IndexManager.testHook = null; } } }); // Kill asynch thread DistributedTestCase.join(indexUpdateAsysnch, 20000, PRQHelp.getCache() .getLogger()); //Verify region size which must be 50 vm0.invoke(new CacheSerializableRunnable("Check region size") { @Override public void run2() throws CacheException { Region region = PRQHelp.getCache().getRegion(regionName); if (region.size() > 50) { fail("After clear region size is supposed to be 50 as all index updates are blocked " + region.size()); } } }); } public class IndexManagerTestHook implements com.gemstone.gemfire.cache.query.internal.index.IndexManager.TestHook { public void hook(final int spot) throws RuntimeException { switch (spot) { case 6: // Before Index update and after region entry lock. hooked = true; getLogWriter().fine("IndexManagerTestHook is hooked."); pause(10000); hooked = false; break; default: break; } } } private static int getCacheServerPort() { return bridgeServerPort; } }
apache-2.0
DariusX/camel
core/camel-api/src/main/java/org/apache/camel/FailedToCreateRouteException.java
2401
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel; import org.apache.camel.util.URISupport; /** * Exception when failing to create a {@link org.apache.camel.Route}. */ public class FailedToCreateRouteException extends RuntimeCamelException { private final String routeId; public FailedToCreateRouteException(String routeId, String route, Throwable cause) { super("Failed to create route " + routeId + ": " + getRouteMessage(route) + " because of " + getExceptionMessage(cause), cause); this.routeId = routeId; } public FailedToCreateRouteException(String routeId, String route, String at, Throwable cause) { super("Failed to create route " + routeId + " at: >>> " + at + " <<< in route: " + getRouteMessage(route) + " because of " + getExceptionMessage(cause), cause); this.routeId = routeId; } public String getRouteId() { return routeId; } protected static String getExceptionMessage(Throwable cause) { return cause.getMessage() != null ? cause.getMessage() : cause.getClass().getSimpleName(); } protected static String getRouteMessage(String route) { // ensure to sanitize uri's in the route so we do not show sensitive information such as passwords route = URISupport.sanitizeUri(route); // cut the route after 60 chars so it won't be too big in the message // users just need to be able to identify the route so they know where to look if (route.length() > 60) { return route.substring(0, 60) + "..."; } else { return route; } } }
apache-2.0
goodwinnk/intellij-community
platform/lang-impl/src/com/intellij/codeInsight/template/impl/TemplateSegments.java
3790
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.template.impl; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.RangeMarker; import java.util.ArrayList; class TemplateSegments { private final ArrayList<RangeMarker> mySegments = new ArrayList<>(); private final Editor myEditor; TemplateSegments(Editor editor) { myEditor = editor; } int getSegmentStart(int i) { RangeMarker rangeMarker = mySegments.get(i); return rangeMarker.getStartOffset(); } int getSegmentEnd(int i) { RangeMarker rangeMarker = mySegments.get(i); return rangeMarker.getEndOffset(); } boolean isValid(int i) { return mySegments.get(i).isValid(); } void removeAll() { for (RangeMarker segment : mySegments) { segment.dispose(); } mySegments.clear(); } void addSegment(int start, int end) { RangeMarker rangeMarker = myEditor.getDocument().createRangeMarker(start, end); mySegments.add(rangeMarker); } void setSegmentsGreedy(boolean greedy) { for (final RangeMarker segment : mySegments) { segment.setGreedyToRight(greedy); segment.setGreedyToLeft(greedy); } } boolean isInvalid() { for (RangeMarker marker : mySegments) { if (!marker.isValid()) { return true; } } return false; } void replaceSegmentAt(int index, int start, int end) { replaceSegmentAt(index, start, end, false); } void replaceSegmentAt(int index, int start, int end, boolean preserveGreediness) { RangeMarker rangeMarker = mySegments.get(index); boolean greedyToLeft = rangeMarker.isGreedyToLeft(); boolean greedyToRight = rangeMarker.isGreedyToRight(); rangeMarker.dispose(); Document doc = myEditor.getDocument(); rangeMarker = doc.createRangeMarker(start, end); rangeMarker.setGreedyToLeft(greedyToLeft || !preserveGreediness); rangeMarker.setGreedyToRight(greedyToRight || !preserveGreediness); mySegments.set(index, rangeMarker); } void setNeighboursGreedy(final int segmentNumber, final boolean greedy) { if (segmentNumber > 0) { final RangeMarker left = mySegments.get(segmentNumber - 1); left.setGreedyToLeft(greedy); left.setGreedyToRight(greedy); } if (segmentNumber + 1 < mySegments.size()) { final RangeMarker right = mySegments.get(segmentNumber + 1); right.setGreedyToLeft(greedy); right.setGreedyToRight(greedy); } } /** * IDEADEV-13618 * * prevent two different segments to grow simultaneously if they both starts at the same offset. */ void lockSegmentAtTheSameOffsetIfAny(final int number) { if (number == -1) { return; } final RangeMarker current = mySegments.get(number); int offset = current.getStartOffset(); for (int i = 0; i < mySegments.size(); i++) { if (i != number) { final RangeMarker segment = mySegments.get(i); final int startOffset2 = segment.getStartOffset(); if (offset == startOffset2) { segment.setGreedyToLeft(false); } } } } int getSegmentsCount() { return mySegments.size(); } }
apache-2.0
ollie314/spring-security
web/src/test/java/org/springframework/security/web/authentication/rememberme/JdbcTokenRepositoryImplTests.java
7343
/* * Copyright 2002-2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.security.web.authentication.rememberme; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import java.sql.Timestamp; import java.util.Calendar; import java.util.Date; import java.util.List; import java.util.Map; import org.apache.commons.logging.Log; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import org.springframework.dao.EmptyResultDataAccessException; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.datasource.SingleConnectionDataSource; import org.springframework.test.util.ReflectionTestUtils; /** * @author Luke Taylor */ @RunWith(MockitoJUnitRunner.class) public class JdbcTokenRepositoryImplTests { @Mock private Log logger; private static SingleConnectionDataSource dataSource; private JdbcTokenRepositoryImpl repo; private JdbcTemplate template; @BeforeClass public static void createDataSource() { dataSource = new SingleConnectionDataSource("jdbc:hsqldb:mem:tokenrepotest", "sa", "", true); dataSource.setDriverClassName("org.hsqldb.jdbc.JDBCDriver"); } @AfterClass public static void clearDataSource() throws Exception { dataSource.destroy(); dataSource = null; } @Before public void populateDatabase() { repo = new JdbcTokenRepositoryImpl(); ReflectionTestUtils.setField(repo, "logger", logger); repo.setDataSource(dataSource); repo.initDao(); template = repo.getJdbcTemplate(); template.execute( "create table persistent_logins (username varchar(100) not null, " + "series varchar(100) not null, token varchar(500) not null, last_used timestamp not null)"); } @After public void clearData() { template.execute("drop table persistent_logins"); } @Test public void createNewTokenInsertsCorrectData() { Timestamp currentDate = new Timestamp(Calendar.getInstance().getTimeInMillis()); PersistentRememberMeToken token = new PersistentRememberMeToken("joeuser", "joesseries", "atoken", currentDate); repo.createNewToken(token); Map<String, Object> results = template.queryForMap( "select * from persistent_logins"); assertThat(results.get("last_used")).isEqualTo(currentDate); assertThat(results.get("username")).isEqualTo("joeuser"); assertThat(results.get("series")).isEqualTo("joesseries"); assertThat(results.get("token")).isEqualTo("atoken"); } @Test public void retrievingTokenReturnsCorrectData() { template.execute( "insert into persistent_logins (series, username, token, last_used) values " + "('joesseries', 'joeuser', 'atoken', '2007-10-09 18:19:25.000000000')"); PersistentRememberMeToken token = repo.getTokenForSeries("joesseries"); assertThat(token.getUsername()).isEqualTo("joeuser"); assertThat(token.getSeries()).isEqualTo("joesseries"); assertThat(token.getTokenValue()).isEqualTo("atoken"); assertThat(token.getDate()).isEqualTo( Timestamp.valueOf("2007-10-09 18:19:25.000000000")); } @Test public void retrievingTokenWithDuplicateSeriesReturnsNull() { template.execute( "insert into persistent_logins (series, username, token, last_used) values " + "('joesseries', 'joeuser', 'atoken2', '2007-10-19 18:19:25.000000000')"); template.execute( "insert into persistent_logins (series, username, token, last_used) values " + "('joesseries', 'joeuser', 'atoken', '2007-10-09 18:19:25.000000000')"); // List results = // template.queryForList("select * from persistent_logins where series = // 'joesseries'"); assertThat(repo.getTokenForSeries("joesseries")).isNull(); } // SEC-1964 @Test public void retrievingTokenWithNoSeriesReturnsNull() { when(logger.isDebugEnabled()).thenReturn(true); assertThat(repo.getTokenForSeries("missingSeries")).isNull(); verify(logger).isDebugEnabled(); verify(logger).debug( eq("Querying token for series 'missingSeries' returned no results."), any(EmptyResultDataAccessException.class)); verifyNoMoreInteractions(logger); } @Test public void removingUserTokensDeletesData() { template.execute( "insert into persistent_logins (series, username, token, last_used) values " + "('joesseries2', 'joeuser', 'atoken2', '2007-10-19 18:19:25.000000000')"); template.execute( "insert into persistent_logins (series, username, token, last_used) values " + "('joesseries', 'joeuser', 'atoken', '2007-10-09 18:19:25.000000000')"); // List results = // template.queryForList("select * from persistent_logins where series = // 'joesseries'"); repo.removeUserTokens("joeuser"); List<Map<String, Object>> results = template.queryForList( "select * from persistent_logins where username = 'joeuser'"); assertThat(results).isEmpty(); } @Test public void updatingTokenModifiesTokenValueAndLastUsed() { Timestamp ts = new Timestamp(System.currentTimeMillis() - 1); template.execute( "insert into persistent_logins (series, username, token, last_used) values " + "('joesseries', 'joeuser', 'atoken', '" + ts.toString() + "')"); repo.updateToken("joesseries", "newtoken", new Date()); Map<String, Object> results = template.queryForMap( "select * from persistent_logins where series = 'joesseries'"); assertThat(results.get("username")).isEqualTo("joeuser"); assertThat(results.get("series")).isEqualTo("joesseries"); assertThat(results.get("token")).isEqualTo("newtoken"); Date lastUsed = (Date) results.get("last_used"); assertThat(lastUsed.getTime() > ts.getTime()).isTrue(); } @Test public void createTableOnStartupCreatesCorrectTable() { template.execute("drop table persistent_logins"); repo = new JdbcTokenRepositoryImpl(); repo.setDataSource(dataSource); repo.setCreateTableOnStartup(true); repo.initDao(); template.queryForList( "select username,series,token,last_used from persistent_logins"); } // SEC-2879 @Test public void updateUsesLastUsed() { JdbcTemplate template = mock(JdbcTemplate.class); Date lastUsed = new Date(1424841314059L); JdbcTokenRepositoryImpl repository = new JdbcTokenRepositoryImpl(); repository.setJdbcTemplate(template); repository.updateToken("series", "token", lastUsed); verify(template).update(anyString(), anyString(), eq(lastUsed), anyString()); } }
apache-2.0
jbertouch/elasticsearch
core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorBuilder.java
5194
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.bucket.histogram; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.search.aggregations.AggregatorFactory; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; import java.io.IOException; import java.util.Objects; public class DateHistogramAggregatorBuilder extends AbstractHistogramBuilder<DateHistogramAggregatorBuilder> { public static final DateHistogramAggregatorBuilder PROTOTYPE = new DateHistogramAggregatorBuilder(""); private DateHistogramInterval dateHistogramInterval; public DateHistogramAggregatorBuilder(String name) { super(name, InternalDateHistogram.HISTOGRAM_FACTORY); } /** * Set the interval. */ public DateHistogramAggregatorBuilder dateHistogramInterval(DateHistogramInterval dateHistogramInterval) { if (dateHistogramInterval == null) { throw new IllegalArgumentException("[dateHistogramInterval] must not be null: [" + name + "]"); } this.dateHistogramInterval = dateHistogramInterval; return this; } public DateHistogramAggregatorBuilder offset(String offset) { if (offset == null) { throw new IllegalArgumentException("[offset] must not be null: [" + name + "]"); } return offset(parseStringOffset(offset)); } protected static long parseStringOffset(String offset) { if (offset.charAt(0) == '-') { return -TimeValue .parseTimeValue(offset.substring(1), null, DateHistogramAggregatorBuilder.class.getSimpleName() + ".parseOffset") .millis(); } int beginIndex = offset.charAt(0) == '+' ? 1 : 0; return TimeValue .parseTimeValue(offset.substring(beginIndex), null, DateHistogramAggregatorBuilder.class.getSimpleName() + ".parseOffset") .millis(); } public DateHistogramInterval dateHistogramInterval() { return dateHistogramInterval; } @Override protected DateHistogramAggregatorFactory innerBuild(AggregationContext context, ValuesSourceConfig<Numeric> config, AggregatorFactory<?> parent, Builder subFactoriesBuilder) throws IOException { return new DateHistogramAggregatorFactory(name, type, config, interval, dateHistogramInterval, offset, order, keyed, minDocCount, extendedBounds, context, parent, subFactoriesBuilder, metaData); } @Override public String getWriteableName() { return InternalDateHistogram.TYPE.name(); } @Override protected XContentBuilder doXContentInterval(XContentBuilder builder, Params params) throws IOException { if (dateHistogramInterval == null) { super.doXContentInterval(builder, params); } else { builder.value(dateHistogramInterval.toString()); } return builder; } @Override protected DateHistogramAggregatorBuilder createFactoryFromStream(String name, StreamInput in) throws IOException { DateHistogramAggregatorBuilder factory = new DateHistogramAggregatorBuilder(name); if (in.readBoolean()) { factory.dateHistogramInterval = DateHistogramInterval.readFromStream(in); } return factory; } @Override protected void writeFactoryToStream(StreamOutput out) throws IOException { boolean hasDateInterval = dateHistogramInterval != null; out.writeBoolean(hasDateInterval); if (hasDateInterval) { dateHistogramInterval.writeTo(out); } } @Override protected int innerHashCode() { return Objects.hash(super.innerHashCode(), dateHistogramInterval); } @Override protected boolean innerEquals(Object obj) { DateHistogramAggregatorBuilder other = (DateHistogramAggregatorBuilder) obj; return super.innerEquals(obj) && Objects.equals(dateHistogramInterval, other.dateHistogramInterval); } }
apache-2.0
robin13/elasticsearch
server/src/main/java/org/elasticsearch/action/support/single/instance/InstanceShardOperationRequestBuilder.java
1908
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.action.support.single.instance; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.common.unit.TimeValue; public abstract class InstanceShardOperationRequestBuilder<Request extends InstanceShardOperationRequest<Request>, Response extends ActionResponse, RequestBuilder extends InstanceShardOperationRequestBuilder<Request, Response, RequestBuilder>> extends ActionRequestBuilder<Request, Response> { protected InstanceShardOperationRequestBuilder(ElasticsearchClient client, ActionType<Response> action, Request request) { super(client, action, request); } @SuppressWarnings("unchecked") public final RequestBuilder setIndex(String index) { request.index(index); return (RequestBuilder) this; } /** * A timeout to wait if the index operation can't be performed immediately. Defaults to {@code 1m}. */ @SuppressWarnings("unchecked") public final RequestBuilder setTimeout(TimeValue timeout) { request.timeout(timeout); return (RequestBuilder) this; } /** * A timeout to wait if the index operation can't be performed immediately. Defaults to {@code 1m}. */ @SuppressWarnings("unchecked") public final RequestBuilder setTimeout(String timeout) { request.timeout(timeout); return (RequestBuilder) this; } }
apache-2.0
keshvari/cas
cas-server-core/src/main/java/org/jasig/cas/authentication/principal/PersistentIdGenerator.java
1427
/* * Licensed to Apereo under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Apereo licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.cas.authentication.principal; import java.io.Serializable; /** * Generates a unique consistent Id based on the principal. * * @author Scott Battaglia * @since 3.1 */ public interface PersistentIdGenerator extends Serializable { /** * Generates a PersistentId based on some algorithm plus the principal. * * @param principal the principal to generate the id for. * @param service the service for which the id may be generated. * @return the generated persistent id. */ String generate(Principal principal, Service service); }
apache-2.0
dsyer/spring-security
samples/gae/src/test/java/samples/gae/users/GaeDataStoreUserRegistryTests.java
1658
package samples.gae.users; import static org.junit.Assert.assertEquals; import java.util.EnumSet; import java.util.Set; import com.google.appengine.tools.development.testing.LocalDatastoreServiceTestConfig; import com.google.appengine.tools.development.testing.LocalServiceTestHelper; import org.junit.After; import org.junit.Before; import org.junit.Test; import samples.gae.security.AppRole; /** * @author Luke Taylor */ public class GaeDataStoreUserRegistryTests { private final LocalServiceTestHelper helper = new LocalServiceTestHelper(new LocalDatastoreServiceTestConfig()); @Before public void setUp() throws Exception { helper.setUp(); } @After public void tearDown() throws Exception { helper.tearDown(); } @Test public void correctDataIsRetrievedAfterInsert() { GaeDatastoreUserRegistry registry = new GaeDatastoreUserRegistry(); Set<AppRole> roles = EnumSet.of(AppRole.ADMIN, AppRole.USER); String userId = "someUserId"; GaeUser origUser = new GaeUser(userId, "nick", "nick@blah.com", "Forename", "Surname", roles, true); registry.registerUser(origUser); GaeUser loadedUser = registry.findUser(userId); assertEquals(loadedUser.getUserId(), origUser.getUserId()); assertEquals(true, loadedUser.isEnabled()); assertEquals(roles, loadedUser.getAuthorities()); assertEquals("nick", loadedUser.getNickname()); assertEquals("nick@blah.com", loadedUser.getEmail()); assertEquals("Forename", loadedUser.getForename()); assertEquals("Surname", loadedUser.getSurname()); } }
apache-2.0
McLeodMoores/starling
projects/time-series/src/main/java/com/opengamma/timeseries/precise/instant/InstantDoubleTimeSeries.java
7327
/** * Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.timeseries.precise.instant; import org.threeten.bp.Instant; import com.opengamma.timeseries.DoubleTimeSeries; import com.opengamma.timeseries.DoubleTimeSeriesOperators.BinaryOperator; import com.opengamma.timeseries.DoubleTimeSeriesOperators.UnaryOperator; import com.opengamma.timeseries.precise.PreciseDoubleTimeSeries; import com.opengamma.timeseries.precise.PreciseTimeSeries; /** * A time series that stores {@code double} data values against {@code Instant} times. * <p> * The "time" key to the time-series is an {@code Instant}. * See {@link PreciseTimeSeries} for details about the "time" represented as a {@code long}. */ public interface InstantDoubleTimeSeries extends PreciseDoubleTimeSeries<Instant> { /** * Gets an iterator over the instant-value pairs. * <p> * Although the pairs are expressed as instances of {@code Map.Entry}, * it is recommended to use the primitive methods on {@code InstantDoubleIterator}. * * @return the iterator, not null */ @Override // override for covariant return type InstantDoubleEntryIterator iterator(); //------------------------------------------------------------------------- @Override // override for covariant return type InstantDoubleTimeSeries subSeries(Instant startTime, Instant endTime); @Override // override for covariant return type InstantDoubleTimeSeries subSeries(Instant startTime, boolean includeStart, Instant endTime, boolean includeEnd); @Override // override for covariant return type InstantDoubleTimeSeries subSeriesFast(long startTime, long endTime); @Override // override for covariant return type InstantDoubleTimeSeries subSeriesFast(long startTime, boolean includeStart, long endTime, boolean includeEnd); @Override // override for covariant return type InstantDoubleTimeSeries head(int numItems); @Override // override for covariant return type InstantDoubleTimeSeries tail(int numItems); @Override // override for covariant return type InstantDoubleTimeSeries lag(int lagCount); //------------------------------------------------------------------------- @Override // override for covariant return type InstantDoubleTimeSeries operate(UnaryOperator operator); @Override // override for covariant return type InstantDoubleTimeSeries operate(double other, BinaryOperator operator); @Override // override for covariant return type InstantDoubleTimeSeries operate(PreciseDoubleTimeSeries<?> otherTimeSeries, BinaryOperator operator); @Override // override for covariant return type InstantDoubleTimeSeries unionOperate(PreciseDoubleTimeSeries<?> otherTimeSeries, BinaryOperator operator); //------------------------------------------------------------------------- @Override // override for covariant return type InstantDoubleTimeSeries add(double amountToAdd); @Override // override for covariant return type InstantDoubleTimeSeries add(DoubleTimeSeries<?> other); @Override // override for covariant return type InstantDoubleTimeSeries unionAdd(DoubleTimeSeries<?> other); //------------------------------------------------------------------------- @Override // override for covariant return type InstantDoubleTimeSeries subtract(double amountToSubtract); @Override // override for covariant return type InstantDoubleTimeSeries subtract(DoubleTimeSeries<?> other); @Override // override for covariant return type InstantDoubleTimeSeries unionSubtract(DoubleTimeSeries<?> other); //------------------------------------------------------------------------- @Override // override for covariant return type InstantDoubleTimeSeries multiply(double amountToMultiplyBy); @Override // override for covariant return type InstantDoubleTimeSeries multiply(DoubleTimeSeries<?> other); @Override // override for covariant return type InstantDoubleTimeSeries unionMultiply(DoubleTimeSeries<?> other); //------------------------------------------------------------------------- @Override // override for covariant return type InstantDoubleTimeSeries divide(double amountToDivideBy); @Override // override for covariant return type InstantDoubleTimeSeries divide(DoubleTimeSeries<?> other); @Override // override for covariant return type InstantDoubleTimeSeries unionDivide(DoubleTimeSeries<?> other); //------------------------------------------------------------------------- @Override // override for covariant return type InstantDoubleTimeSeries power(double power); @Override // override for covariant return type InstantDoubleTimeSeries power(DoubleTimeSeries<?> other); @Override // override for covariant return type InstantDoubleTimeSeries unionPower(DoubleTimeSeries<?> other); //------------------------------------------------------------------------- @Override // override for covariant return type InstantDoubleTimeSeries minimum(double minValue); @Override // override for covariant return type InstantDoubleTimeSeries minimum(DoubleTimeSeries<?> other); @Override // override for covariant return type InstantDoubleTimeSeries unionMinimum(DoubleTimeSeries<?> other); //------------------------------------------------------------------------- @Override // override for covariant return type InstantDoubleTimeSeries maximum(double maxValue); @Override // override for covariant return type InstantDoubleTimeSeries maximum(DoubleTimeSeries<?> other); @Override // override for covariant return type InstantDoubleTimeSeries unionMaximum(DoubleTimeSeries<?> other); //------------------------------------------------------------------------- @Override // override for covariant return type InstantDoubleTimeSeries average(double value); @Override // override for covariant return type InstantDoubleTimeSeries average(DoubleTimeSeries<?> other); @Override // override for covariant return type InstantDoubleTimeSeries unionAverage(DoubleTimeSeries<?> other); //------------------------------------------------------------------------- @Override // override for covariant return type InstantDoubleTimeSeries intersectionFirstValue(DoubleTimeSeries<?> other); @Override // override for covariant return type InstantDoubleTimeSeries intersectionSecondValue(DoubleTimeSeries<?> other); @Override // override for covariant return type InstantDoubleTimeSeries noIntersectionOperation(DoubleTimeSeries<?> other); //------------------------------------------------------------------------- @Override // override for covariant return type InstantDoubleTimeSeries negate(); @Override // override for covariant return type InstantDoubleTimeSeries reciprocal(); @Override // override for covariant return type InstantDoubleTimeSeries log(); @Override // override for covariant return type InstantDoubleTimeSeries log10(); @Override // override for covariant return type InstantDoubleTimeSeries abs(); //------------------------------------------------------------------------- @Override // override for covariant return type InstantDoubleTimeSeriesBuilder toBuilder(); }
apache-2.0
robertwb/incubator-beam
runners/flink/src/main/java/org/apache/beam/runners/flink/translation/types/EncodedValueComparator.java
5784
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.flink.translation.types; import java.io.IOException; import java.util.Arrays; import org.apache.beam.sdk.coders.Coder; import org.apache.flink.api.common.typeutils.TypeComparator; import org.apache.flink.core.memory.DataInputView; import org.apache.flink.core.memory.DataOutputView; import org.apache.flink.core.memory.MemorySegment; /** * Flink {@link org.apache.flink.api.common.typeutils.TypeComparator} for Beam values that have been * encoded to byte data by a {@link Coder}. */ @SuppressWarnings({ "rawtypes", // TODO(https://issues.apache.org/jira/browse/BEAM-10556) "nullness" // TODO(https://issues.apache.org/jira/browse/BEAM-10402) }) public class EncodedValueComparator extends TypeComparator<byte[]> { /** For storing the Reference in encoded form. */ private transient byte[] encodedReferenceKey; private final boolean ascending; public EncodedValueComparator(boolean ascending) { this.ascending = ascending; } @Override public int hash(byte[] record) { return Arrays.hashCode(record); } @Override public void setReference(byte[] toCompare) { this.encodedReferenceKey = toCompare; } @Override public boolean equalToReference(byte[] candidate) { if (encodedReferenceKey.length != candidate.length) { return false; } int len = candidate.length; for (int i = 0; i < len; i++) { if (encodedReferenceKey[i] != candidate[i]) { return false; } } return true; } @Override public int compareToReference(TypeComparator<byte[]> other) { // VERY IMPORTANT: compareToReference does not behave like Comparable.compare // the meaning of the return value is inverted. EncodedValueComparator otherEncodedValueComparator = (EncodedValueComparator) other; int len = Math.min( encodedReferenceKey.length, otherEncodedValueComparator.encodedReferenceKey.length); for (int i = 0; i < len; i++) { byte b1 = encodedReferenceKey[i]; byte b2 = otherEncodedValueComparator.encodedReferenceKey[i]; int result = (b1 < b2 ? -1 : (b1 == b2 ? 0 : 1)); if (result != 0) { return ascending ? -result : result; } } int result = encodedReferenceKey.length - otherEncodedValueComparator.encodedReferenceKey.length; return ascending ? -result : result; } @Override public int compare(byte[] first, byte[] second) { int len = Math.min(first.length, second.length); for (int i = 0; i < len; i++) { byte b1 = first[i]; byte b2 = second[i]; int result = (b1 < b2 ? -1 : (b1 == b2 ? 0 : 1)); if (result != 0) { return ascending ? result : -result; } } int result = first.length - second.length; return ascending ? result : -result; } @Override public int compareSerialized(DataInputView firstSource, DataInputView secondSource) throws IOException { int lengthFirst = firstSource.readInt(); int lengthSecond = secondSource.readInt(); int len = Math.min(lengthFirst, lengthSecond); for (int i = 0; i < len; i++) { byte b1 = firstSource.readByte(); byte b2 = secondSource.readByte(); int result = (b1 < b2 ? -1 : (b1 == b2 ? 0 : 1)); if (result != 0) { return ascending ? result : -result; } } int result = lengthFirst - lengthSecond; return ascending ? result : -result; } @Override public boolean supportsNormalizedKey() { // disabled because this seems to not work with some coders, // such as the AvroCoder return false; } @Override public boolean supportsSerializationWithKeyNormalization() { return false; } @Override public int getNormalizeKeyLen() { return Integer.MAX_VALUE; } @Override public boolean isNormalizedKeyPrefixOnly(int keyBytes) { return true; } @Override public void putNormalizedKey(byte[] record, MemorySegment target, int offset, int numBytes) { final int limit = offset + numBytes; target.put(offset, record, 0, Math.min(numBytes, record.length)); offset += record.length; while (offset < limit) { target.put(offset++, (byte) 0); } } @Override public void writeWithKeyNormalization(byte[] record, DataOutputView target) throws IOException { throw new UnsupportedOperationException(); } @Override public byte[] readWithKeyDenormalization(byte[] reuse, DataInputView source) throws IOException { throw new UnsupportedOperationException(); } @Override public boolean invertNormalizedKey() { return !ascending; } @Override public TypeComparator<byte[]> duplicate() { return new EncodedValueComparator(ascending); } @Override public int extractKeys(Object record, Object[] target, int index) { target[index] = record; return 1; } @Override public TypeComparator[] getFlatComparators() { return new TypeComparator[] {this.duplicate()}; } }
apache-2.0
subhrajyotim/camunda-bpm-platform
engine/src/main/java/org/camunda/bpm/engine/impl/migration/MigrationPlanBuilderImpl.java
3273
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.engine.impl.migration; import java.util.ArrayList; import java.util.List; import org.camunda.bpm.engine.migration.MigrationPlanBuilder; import org.camunda.bpm.engine.impl.cmd.CreateMigrationPlanCmd; import org.camunda.bpm.engine.impl.interceptor.CommandExecutor; import org.camunda.bpm.engine.migration.MigrationInstructionBuilder; import org.camunda.bpm.engine.migration.MigrationInstructionsBuilder; import org.camunda.bpm.engine.migration.MigrationPlan; /** * @author Thorben Lindhauer * */ public class MigrationPlanBuilderImpl implements MigrationInstructionBuilder, MigrationInstructionsBuilder { protected CommandExecutor commandExecutor; protected String sourceProcessDefinitionId; protected String targetProcessDefinitionId; protected List<MigrationInstructionImpl> explicitMigrationInstructions; protected boolean mapEqualActivities = false; protected boolean updateEventTriggersForGeneratedInstructions = false; public MigrationPlanBuilderImpl(CommandExecutor commandExecutor, String sourceProcessDefinitionId, String targetProcessDefinitionId) { this.commandExecutor = commandExecutor; this.sourceProcessDefinitionId = sourceProcessDefinitionId; this.targetProcessDefinitionId = targetProcessDefinitionId; this.explicitMigrationInstructions = new ArrayList<MigrationInstructionImpl>(); } public MigrationInstructionsBuilder mapEqualActivities() { this.mapEqualActivities = true; return this; } public MigrationInstructionBuilder mapActivities(String sourceActivityId, String targetActivityId) { this.explicitMigrationInstructions.add( new MigrationInstructionImpl(sourceActivityId, targetActivityId) ); return this; } public MigrationInstructionBuilder updateEventTrigger() { explicitMigrationInstructions .get(explicitMigrationInstructions.size() - 1) .setUpdateEventTrigger(true); return this; } public MigrationInstructionsBuilder updateEventTriggers() { this.updateEventTriggersForGeneratedInstructions = true; return this; } public String getSourceProcessDefinitionId() { return sourceProcessDefinitionId; } public String getTargetProcessDefinitionId() { return targetProcessDefinitionId; } public boolean isMapEqualActivities() { return mapEqualActivities; } public boolean isUpdateEventTriggersForGeneratedInstructions() { return updateEventTriggersForGeneratedInstructions; } public List<MigrationInstructionImpl> getExplicitMigrationInstructions() { return explicitMigrationInstructions; } public MigrationPlan build() { return commandExecutor.execute(new CreateMigrationPlanCmd(this)); } }
apache-2.0
vincent99/cattle
code/framework/server/src/main/java/io/cattle/platform/server/context/ServerContext.java
5080
package io.cattle.platform.server.context; import io.cattle.platform.archaius.util.ArchaiusUtil; import java.net.Inet6Address; import java.net.InetAddress; import java.net.NetworkInterface; import java.net.SocketException; import java.util.Collections; import org.apache.commons.lang.StringUtils; import com.netflix.config.DynamicIntProperty; import com.netflix.config.DynamicStringProperty; public class ServerContext { public static final DynamicIntProperty HTTP_PORT = ArchaiusUtil.getInt("cattle.http.port"); public static final DynamicIntProperty HTTPS_PORT = ArchaiusUtil.getInt("cattle.https.port"); public static final DynamicStringProperty URL_PATH = ArchaiusUtil.getString("cattle.url.path"); public static final DynamicStringProperty SERVER_IP = ArchaiusUtil.getString("cattle.server.ip"); public static final DynamicStringProperty SERVER_ID = ArchaiusUtil.getString("cattle.server.id"); public static final DynamicStringProperty HOST = ArchaiusUtil.getString("api.host"); private static final String FOUND_SERVER_IP = lookupServerIp(); private static final String SERVER_ID_FORMAT = System.getProperty("cattle.server.id.format", "%s"); public static final String HOST_API_PROXY_MODE_OFF = "off"; public static final String HOST_API_PROXY_MODE_EMBEDDED = "embedded"; public static final String HOST_API_PROXY_MODE_HA = "ha"; public static boolean isCustomApiHost() { return !StringUtils.isBlank(HOST.get()); } public enum BaseProtocol { HTTP, WEBSOCKET } public static String getLocalhostUrl(BaseProtocol proto) { StringBuilder buffer = new StringBuilder(); if (HTTPS_PORT.get() > 0) { buffer.append("https://localhost"); buffer.append(":").append(HTTPS_PORT.get()); } else { buffer.append("http://localhost"); buffer.append(":").append(HTTP_PORT.get()); } String url = buffer.toString(); if (BaseProtocol.WEBSOCKET.equals(proto)) { url = url.replaceFirst("http", "ws"); } else { // websocket endpoints don't follow same pathing as rest of api url += URL_PATH.get(); } return url; } public static String getHostApiBaseUrl(BaseProtocol proto) { String url = null; if (ServerContext.isCustomApiHost()) { String apiHost = HOST.get(); if (!apiHost.startsWith("http")) { apiHost = "http://" + apiHost; } url = apiHost; } if (url == null) { StringBuilder buffer = new StringBuilder(); if (HTTPS_PORT.get() > 0) { buffer.append("https://"); buffer.append(getServerIp()); buffer.append(":").append(HTTPS_PORT.get()); } else { buffer.append("http://"); buffer.append(getServerIp()); buffer.append(":").append(HTTP_PORT.get()); } url = buffer.toString(); } if (BaseProtocol.WEBSOCKET.equals(proto)) { url = url.replaceFirst("http", "ws"); } else { // websocket endpoints don't follow same pathing as rest of api url += URL_PATH.get(); } return url; } public static String getServerId() { String id = SERVER_ID.get(); String ip = getServerIp(); if (id != null) { return String.format(id, ip); } return String.format(SERVER_ID_FORMAT, ip); } public static String getHostApiProxyMode() { String embedded = System.getenv("CATTLE_HOST_API_PROXY_MODE"); if (StringUtils.isEmpty(embedded)) { embedded = System.getProperty("host.api.proxy.mode", "off"); } return embedded; } protected static String getServerIp() { String ip = SERVER_IP.get(); return ip == null ? FOUND_SERVER_IP : ip; } protected static String lookupServerIp() { String address = null; String v6Address = null; try { for (NetworkInterface iface : Collections.list(NetworkInterface.getNetworkInterfaces())) { for (InetAddress addr : Collections.list(iface.getInetAddresses())) { if (addr instanceof Inet6Address) { v6Address = addr.getHostAddress(); } else { if (!addr.isLoopbackAddress() && (address == null || !addr.isSiteLocalAddress())) { address = addr.getHostAddress(); } } } } if (address != null) { return address; } else if (v6Address != null) { return v6Address; } else { return "localhost"; } } catch (SocketException e) { throw new IllegalStateException("Failed to lookup IP of server", e); } } }
apache-2.0
shirshanka/gobblin
gobblin-modules/gobblin-elasticsearch/src/test/java/org/apache/gobblin/elasticsearch/writer/TestClient.java
1301
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gobblin.elasticsearch.writer; import java.io.Closeable; import java.io.IOException; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; /** * An interface to describe a functional Elasticsearch client to aid in verification * of test results */ public interface TestClient extends Closeable { GetResponse get(GetRequest getRequest) throws IOException; void recreateIndex(String indexName) throws IOException; }
apache-2.0
amasta/java-driver
driver-core/src/test/java/com/datastax/driver/core/RefreshConnectedHostTest.java
4839
/* * Copyright (C) 2012-2015 DataStax Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.datastax.driver.core; import static java.util.concurrent.TimeUnit.SECONDS; import org.mockito.Mockito; import org.testng.annotations.Test; import com.datastax.driver.core.Host.State; import com.datastax.driver.core.policies.ConstantReconnectionPolicy; import com.datastax.driver.core.policies.LimitingLoadBalancingPolicy; import com.datastax.driver.core.policies.RoundRobinPolicy; import static com.datastax.driver.core.Assertions.assertThat; public class RefreshConnectedHostTest { /** * Tests {@link PoolingOptions#refreshConnectedHost(Host)} through a custom load balancing policy. */ @Test(groups = "long") public void should_refresh_single_connected_host() { CCMBridge ccm = null; Cluster cluster = null; try { // This will make the driver use at most 2 hosts, the others will be ignored LimitingLoadBalancingPolicy loadBalancingPolicy = new LimitingLoadBalancingPolicy(new RoundRobinPolicy(), 2, 1); // Setup a 3-host cluster, start only two hosts so that we know in advance which ones the policy will use ccm = CCMBridge.builder("test").withNodes(3).notStarted().build(); ccm.start(1); ccm.start(2); ccm.waitForUp(1); ccm.waitForUp(2); PoolingOptions poolingOptions = Mockito.spy(new PoolingOptions()); cluster = Cluster.builder() .addContactPoint(CCMBridge.ipOfNode(1)) .withPoolingOptions(poolingOptions) .withLoadBalancingPolicy(loadBalancingPolicy) .withReconnectionPolicy(new ConstantReconnectionPolicy(1000)) .build(); Session session = cluster.connect(); assertThat(cluster).usesControlHost(1); assertThat(cluster).host(1) .hasState(State.UP) .isAtDistance(HostDistance.LOCAL); // Wait for the node to be up, because apparently on Jenkins it's still only ADDED when we reach this line // Waiting for NEW_NODE_DELAY_SECONDS+1 allows the driver to create a connection pool and mark the node up assertThat(cluster).host(2) .comesUpWithin(Cluster.NEW_NODE_DELAY_SECONDS+1, SECONDS) .isAtDistance(HostDistance.LOCAL); // Bring host 3 up, its presence should be acknowledged but it should be ignored ccm.start(3); ccm.waitForUp(3); assertThat(cluster).host(1) .hasState(State.UP) .isAtDistance(HostDistance.LOCAL); assertThat(cluster).host(2) .hasState(State.UP) .isAtDistance(HostDistance.LOCAL); assertThat(cluster).host(3) .comesUpWithin(Cluster.NEW_NODE_DELAY_SECONDS+1, SECONDS) .isAtDistance(HostDistance.IGNORED); assertThat(session).hasNoPoolFor(3); // Kill host 2, host 3 should take its place ccm.stop(2); TestUtils.waitFor(CCMBridge.ipOfNode(3), cluster); assertThat(cluster).host(1) .hasState(State.UP) .isAtDistance(HostDistance.LOCAL); assertThat(cluster).host(2) .hasState(State.DOWN); assertThat(cluster).host(3) .hasState(State.UP) .isAtDistance(HostDistance.LOCAL); assertThat(session).hasPoolFor(3); // This is when refreshConnectedHost should have been invoked, it triggers pool creation when // we switch the node from IGNORED to UP: Mockito.verify(poolingOptions) .refreshConnectedHost(TestUtils.findHost(cluster, 3)); } finally { if (cluster != null) cluster.close(); if (ccm != null) ccm.remove(); } } }
apache-2.0
ricepanda/rice-git3
rice-middleware/sampleapp/src/main/java/org/kuali/rice/config/SampleAppPSC.java
3941
/** * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl2.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.rice.config; import java.util.ArrayList; import java.util.List; import java.util.Properties; import javax.servlet.ServletContext; import org.kuali.common.jdbc.project.spring.JdbcPropertyLocationsConfig; import org.kuali.common.util.log.LoggerUtils; import org.kuali.common.util.properties.Location; import org.kuali.common.util.properties.PropertiesService; import org.kuali.common.util.properties.spring.DefaultPropertiesServiceConfig; import org.kuali.common.util.spring.service.PropertySourceConfig; import org.kuali.rice.core.api.config.property.Config; import org.kuali.rice.core.api.config.property.ConfigContext; import org.kuali.rice.core.api.config.property.ConfigPropertySource; import org.kuali.rice.sql.spring.SourceSqlPropertyLocationsConfig; import org.kuali.rice.xml.ingest.RiceConfigUtils; import org.kuali.rice.xml.spring.IngestXmlPropertyLocationsConfig; import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.core.env.PropertySource; /** * Holds the property source for all of the different properties needed for starting up the KRAD * Sample App. * * @author Kuali Rice Team (rice.collab@kuali.org) */ @Configuration @Import({SampleAppProjectConfig.class, JdbcPropertyLocationsConfig.class, DefaultPropertiesServiceConfig.class, SourceSqlPropertyLocationsConfig.class, IngestXmlPropertyLocationsConfig.class}) public class SampleAppPSC implements PropertySourceConfig { private static final String KR_SAMPLE_APP_CONFIG = "classpath:META-INF/sample-app-config.xml"; private static final Logger logger = LoggerUtils.make(); @Autowired JdbcPropertyLocationsConfig jdbcConfig; @Autowired SourceSqlPropertyLocationsConfig sourceSqlConfig; @Autowired IngestXmlPropertyLocationsConfig ingestXmlConfig; @Autowired PropertiesService service; @Autowired ServletContext servletContext; @Override @Bean public PropertySource<?> propertySource() { // Combine locations making sure Rice properties go in last List<Location> locations = new ArrayList<Location>(); locations.addAll(jdbcConfig.jdbcPropertyLocations()); locations.addAll(sourceSqlConfig.riceSourceSqlPropertyLocations()); locations.addAll(ingestXmlConfig.riceIngestXmlPropertyLocations()); // Default behavior is load->decrypt->resolve // -Dproperties.resolve=false turns off placeholder resolution Properties properties = service.getProperties(locations); logger.info("Loaded {} regular properties", properties.size()); // Combine normal properties with Rice properties using Rice's custom placeholder resolution logic to resolve everything Config rootCfg = RiceConfigUtils.getRootConfig(properties, KR_SAMPLE_APP_CONFIG, servletContext); // Make sure ConfigContext.getCurrentContextConfig() return's the rootCfg object ConfigContext.init(rootCfg); // Make Spring and Rice use the exact same source for obtaining property values return new ConfigPropertySource("riceConfig", rootCfg); } }
apache-2.0
vineetgarg02/hive
accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/mr/HiveAccumuloTableInputFormat.java
18098
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.accumulo.mr; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.accumulo.core.client.AccumuloException; import org.apache.accumulo.core.client.AccumuloSecurityException; import org.apache.accumulo.core.client.Connector; import org.apache.accumulo.core.client.Instance; import org.apache.accumulo.core.client.IteratorSetting; import org.apache.accumulo.core.client.mapred.AccumuloInputFormat; import org.apache.accumulo.core.client.mapred.AccumuloRowInputFormat; import org.apache.accumulo.core.client.mapred.RangeInputSplit; import org.apache.accumulo.core.client.mapreduce.lib.impl.ConfiguratorBase; import org.apache.accumulo.core.client.mock.MockInstance; import org.apache.accumulo.core.client.security.tokens.AuthenticationToken; import org.apache.accumulo.core.client.security.tokens.KerberosToken; import org.apache.accumulo.core.client.security.tokens.PasswordToken; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Range; import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.security.Authorizations; import org.apache.accumulo.core.util.Pair; import org.apache.accumulo.core.util.PeekingIterator; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.accumulo.AccumuloConnectionParameters; import org.apache.hadoop.hive.accumulo.AccumuloHiveRow; import org.apache.hadoop.hive.accumulo.HiveAccumuloHelper; import org.apache.hadoop.hive.accumulo.columns.ColumnMapper; import org.apache.hadoop.hive.accumulo.columns.ColumnMapping; import org.apache.hadoop.hive.accumulo.columns.HiveAccumuloColumnMapping; import org.apache.hadoop.hive.accumulo.columns.HiveAccumuloMapColumnMapping; import org.apache.hadoop.hive.accumulo.predicate.AccumuloPredicateHandler; import org.apache.hadoop.hive.accumulo.serde.AccumuloSerDeParameters; import org.apache.hadoop.hive.accumulo.serde.TooManyAccumuloColumnsException; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.ColumnProjectionUtils; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.InputSplit; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.RecordReader; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.StringUtils; import org.apache.log4j.Level; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Wraps older InputFormat for use with Hive. * * Configure input scan with proper ranges, iterators, and columns based on serde properties for * Hive table. */ public class HiveAccumuloTableInputFormat implements org.apache.hadoop.mapred.InputFormat<Text,AccumuloHiveRow> { private static final Logger log = LoggerFactory.getLogger(HiveAccumuloTableInputFormat.class); // Visible for testing protected AccumuloRowInputFormat accumuloInputFormat = new AccumuloRowInputFormat(); protected AccumuloPredicateHandler predicateHandler = AccumuloPredicateHandler.getInstance(); protected HiveAccumuloHelper helper = new HiveAccumuloHelper(); @Override public InputSplit[] getSplits(JobConf jobConf, int numSplits) throws IOException { final AccumuloConnectionParameters accumuloParams = new AccumuloConnectionParameters(jobConf); final Instance instance = accumuloParams.getInstance(); final ColumnMapper columnMapper; try { columnMapper = getColumnMapper(jobConf); } catch (TooManyAccumuloColumnsException e) { throw new IOException(e); } JobContext context = ShimLoader.getHadoopShims().newJobContext(Job.getInstance(jobConf)); Path[] tablePaths = FileInputFormat.getInputPaths(context); try { Connector connector = null; // Need to get a Connector so we look up the user's authorizations if not otherwise specified if (accumuloParams.useSasl()) { log.info("Current user: " + UserGroupInformation.getCurrentUser()); // In a YARN/Tez job, don't have the Kerberos credentials anymore, use the delegation token AuthenticationToken token = ConfiguratorBase.getAuthenticationToken( AccumuloInputFormat.class, jobConf); if (null != token && !jobConf.getCredentials().getAllTokens().isEmpty()) { // Convert the stub from the configuration back into a normal Token log.info("Found authentication token in Configuration: " + token); log.info("Job credential tokens: " + jobConf.getCredentials().getAllTokens()); AuthenticationToken unwrappedToken = ConfiguratorBase.unwrapAuthenticationToken(jobConf, token); log.info("Converted authentication token from Configuration into: " + unwrappedToken); // It's possible that the Job doesn't have the token in its credentials. In this case, unwrapAuthenticatinoToken // will return back the original token (which we know is insufficient) if (unwrappedToken != token) { log.info("Creating Accumulo Connector with unwrapped delegation token"); connector = instance.getConnector(accumuloParams.getAccumuloUserName(), unwrappedToken); } else { log.info("Job credentials did not contain delegation token, fetching new token"); } } if (connector == null) { log.info("Obtaining Accumulo Connector using KerberosToken"); // Construct a KerberosToken -- relies on ProxyUser configuration. Will be the client making // the request on top of the HS2's user. Accumulo will require proper proxy-user auth configs. connector = instance.getConnector(accumuloParams.getAccumuloUserName(), new KerberosToken(accumuloParams.getAccumuloUserName())); } } else { // Still in the local JVM, use the username+password or Kerberos credentials connector = accumuloParams.getConnector(instance); } final List<ColumnMapping> columnMappings = columnMapper.getColumnMappings(); final List<IteratorSetting> iterators = predicateHandler.getIterators(jobConf, columnMapper); final Collection<Range> ranges = predicateHandler.getRanges(jobConf, columnMapper); // Setting an empty collection of ranges will, unexpectedly, scan all data // We don't want that. if (null != ranges && ranges.isEmpty()) { return new InputSplit[0]; } // Set the relevant information in the Configuration for the AccumuloInputFormat configure(jobConf, instance, connector, accumuloParams, columnMapper, iterators, ranges); int numColumns = columnMappings.size(); List<Integer> readColIds = ColumnProjectionUtils.getReadColumnIDs(jobConf); // Sanity check if (numColumns < readColIds.size()) throw new IOException("Number of column mappings (" + numColumns + ")" + " numbers less than the hive table columns. (" + readColIds.size() + ")"); // get splits from Accumulo InputSplit[] splits = accumuloInputFormat.getSplits(jobConf, numSplits); HiveAccumuloSplit[] hiveSplits = new HiveAccumuloSplit[splits.length]; for (int i = 0; i < splits.length; i++) { RangeInputSplit ris = (RangeInputSplit) splits[i]; ris.setLogLevel(Level.DEBUG); hiveSplits[i] = new HiveAccumuloSplit(ris, tablePaths[0]); } return hiveSplits; } catch (AccumuloException e) { log.error("Could not configure AccumuloInputFormat", e); throw new IOException(StringUtils.stringifyException(e)); } catch (AccumuloSecurityException e) { log.error("Could not configure AccumuloInputFormat", e); throw new IOException(StringUtils.stringifyException(e)); } catch (SerDeException e) { log.error("Could not configure AccumuloInputFormat", e); throw new IOException(StringUtils.stringifyException(e)); } } /** * Setup accumulo input format from conf properties. Delegates to final RecordReader from mapred * package. */ @Override public RecordReader<Text,AccumuloHiveRow> getRecordReader(InputSplit inputSplit, final JobConf jobConf, final Reporter reporter) throws IOException { final ColumnMapper columnMapper; try { columnMapper = getColumnMapper(jobConf); } catch (TooManyAccumuloColumnsException e) { throw new IOException(e); } try { final AccumuloConnectionParameters accumuloParams = new AccumuloConnectionParameters( jobConf); final List<IteratorSetting> iterators = predicateHandler.getIterators(jobConf, columnMapper); HiveAccumuloSplit hiveSplit = (HiveAccumuloSplit) inputSplit; RangeInputSplit rangeSplit = hiveSplit.getSplit(); log.info("Split: " + rangeSplit); // The RangeInputSplit *should* have all of the necesary information contained in it // which alleviates us from re-parsing our configuration from the AccumuloStorageHandler // and re-setting it into the Configuration (like we did in getSplits(...)). Thus, it should // be unnecessary to re-invoke configure(...) // ACCUMULO-2962 Iterators weren't getting serialized into the InputSplit, but we can // compensate because we still have that info. // Should be fixed in Accumulo 1.5.2 and 1.6.1 if (null == rangeSplit.getIterators() || (rangeSplit.getIterators().isEmpty() && !iterators.isEmpty())) { log.debug("Re-setting iterators on InputSplit due to Accumulo bug."); rangeSplit.setIterators(iterators); } // ACCUMULO-3015 Like the above, RangeInputSplit should have the table name // but we want it to, so just re-set it if it's null. if (null == rangeSplit.getTableName()) { rangeSplit.setTableName(accumuloParams.getAccumuloTableName()); } // ACCUMULO-4670 RangeInputSplit doesn't preserve useSasl on the ClientConfiguration/ZooKeeperInstance // We have to manually re-set it in the JobConf to make sure it gets picked up. if (accumuloParams.useSasl()) { helper.setInputFormatZooKeeperInstance(jobConf, accumuloParams.getAccumuloInstanceName(), accumuloParams.getZooKeepers(), accumuloParams.useSasl()); } final RecordReader<Text,PeekingIterator<Map.Entry<Key,Value>>> recordReader = accumuloInputFormat .getRecordReader(rangeSplit, jobConf, reporter); return new HiveAccumuloRecordReader(recordReader, iterators.size()); } catch (SerDeException e) { throw new IOException(StringUtils.stringifyException(e)); } } protected ColumnMapper getColumnMapper(Configuration conf) throws IOException, TooManyAccumuloColumnsException { final String defaultStorageType = conf.get(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE); String[] columnNamesArr = conf.getStrings(serdeConstants.LIST_COLUMNS); if (null == columnNamesArr) { throw new IOException( "Hive column names must be provided to InputFormat in the Configuration"); } List<String> columnNames = Arrays.asList(columnNamesArr); String serializedTypes = conf.get(serdeConstants.LIST_COLUMN_TYPES); if (null == serializedTypes) { throw new IOException( "Hive column types must be provided to InputFormat in the Configuration"); } ArrayList<TypeInfo> columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(serializedTypes); return new ColumnMapper(conf.get(AccumuloSerDeParameters.COLUMN_MAPPINGS), defaultStorageType, columnNames, columnTypes); } /** * Configure the underlying AccumuloInputFormat * * @param conf * Job configuration * @param instance * Accumulo instance * @param connector * Accumulo connector * @param accumuloParams * Connection information to the Accumulo instance * @param columnMapper * Configuration of Hive to Accumulo columns * @param iterators * Any iterators to be configured server-side * @param ranges * Accumulo ranges on for the query */ protected void configure(JobConf conf, Instance instance, Connector connector, AccumuloConnectionParameters accumuloParams, ColumnMapper columnMapper, List<IteratorSetting> iterators, Collection<Range> ranges) throws AccumuloSecurityException, AccumuloException, SerDeException, IOException { // Handle implementation of Instance and invoke appropriate InputFormat method if (instance instanceof MockInstance) { getHelper().setInputFormatMockInstance(conf, instance.getInstanceName()); } else { getHelper().setInputFormatZooKeeperInstance(conf, instance.getInstanceName(), instance.getZooKeepers(), accumuloParams.useSasl()); } // Set the username/passwd for the Accumulo connection if (accumuloParams.useSasl()) { getHelper().updateInputFormatConfWithAccumuloToken(conf, UserGroupInformation.getCurrentUser(), accumuloParams); } else { getHelper().setInputFormatConnectorInfo(conf, accumuloParams.getAccumuloUserName(), new PasswordToken(accumuloParams.getAccumuloPassword())); } // Read from the given Accumulo table setInputTableName(conf, accumuloParams.getAccumuloTableName()); // Check Configuration for any user-provided Authorization definition Authorizations auths = AccumuloSerDeParameters.getAuthorizationsFromConf(conf); if (null == auths) { // Default to all of user's authorizations when no configuration is provided auths = connector.securityOperations().getUserAuthorizations( accumuloParams.getAccumuloUserName()); } // Implicitly handles users providing invalid authorizations setScanAuthorizations(conf, auths); // restrict with any filters found from WHERE predicates. addIterators(conf, iterators); // restrict with any ranges found from WHERE predicates. // not setting ranges scans the entire table if (null != ranges) { log.info("Setting ranges: " + ranges); setRanges(conf, ranges); } // Restrict the set of columns that we want to read from the Accumulo table HashSet<Pair<Text,Text>> pairs = getPairCollection(columnMapper.getColumnMappings()); if (null != pairs && !pairs.isEmpty()) { fetchColumns(conf, pairs); } } // Wrap the static AccumuloInputFormat methods with methods that we can // verify were correctly called via Mockito protected void setInputTableName(JobConf conf, String tableName) { AccumuloInputFormat.setInputTableName(conf, tableName); } protected void setScanAuthorizations(JobConf conf, Authorizations auths) { AccumuloInputFormat.setScanAuthorizations(conf, auths); } protected void addIterators(JobConf conf, List<IteratorSetting> iterators) { for (IteratorSetting is : iterators) { AccumuloInputFormat.addIterator(conf, is); } } protected void setRanges(JobConf conf, Collection<Range> ranges) { AccumuloInputFormat.setRanges(conf, ranges); } protected void fetchColumns(JobConf conf, Set<Pair<Text,Text>> cfCqPairs) { AccumuloInputFormat.fetchColumns(conf, cfCqPairs); } /** * Create col fam/qual pairs from pipe separated values, usually from config object. Ignores * rowID. * * @param columnMappings * The list of ColumnMappings for the given query * @return a Set of Pairs of colfams and colquals */ protected HashSet<Pair<Text,Text>> getPairCollection(List<ColumnMapping> columnMappings) { final HashSet<Pair<Text,Text>> pairs = new HashSet<Pair<Text,Text>>(); for (ColumnMapping columnMapping : columnMappings) { if (columnMapping instanceof HiveAccumuloColumnMapping) { HiveAccumuloColumnMapping accumuloColumnMapping = (HiveAccumuloColumnMapping) columnMapping; Text cf = new Text(accumuloColumnMapping.getColumnFamily()); Text cq = null; // A null cq implies an empty column qualifier if (null != accumuloColumnMapping.getColumnQualifier()) { cq = new Text(accumuloColumnMapping.getColumnQualifier()); } pairs.add(new Pair<Text,Text>(cf, cq)); } else if (columnMapping instanceof HiveAccumuloMapColumnMapping) { HiveAccumuloMapColumnMapping mapMapping = (HiveAccumuloMapColumnMapping) columnMapping; // Can't fetch prefix on colqual, must pull the entire qualifier // TODO use an iterator to do the filter, server-side. pairs.add(new Pair<Text,Text>(new Text(mapMapping.getColumnFamily()), null)); } } log.info("Computed columns to fetch (" + pairs + ") from " + columnMappings); return pairs; } HiveAccumuloHelper getHelper() { return helper; } }
apache-2.0
douglasjunior/android-validation-komensky
library/src/main/java/eu/inmite/android/lib/validations/form/annotations/MinLength.java
991
/* * Copyright (c) 2013, Inmite s.r.o. (www.inmite.eu). * * All rights reserved. This source code can be used only for purposes specified * by the given license contract signed by the rightful deputy of Inmite s.r.o. * This source code can be used only by the owner of the license. * * Any disputes arising in respect of this agreement (license) shall be brought * before the Municipal Court of Prague. */ package eu.inmite.android.lib.validations.form.annotations; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; /** * Validate for minimal length of input string. * @author Tomas Vondracek */ @Target(value= ElementType.FIELD) @Retention(value= RetentionPolicy.RUNTIME) public @interface MinLength { /** * minimal length */ int value(); ComparingPolicy policy() default ComparingPolicy.INCLUSIVE; int messageId() default 0; int order() default 1000; }
apache-2.0
apache/oodt
filemgr/src/main/java/org/apache/oodt/cas/filemgr/structs/exceptions/ValidationLayerException.java
1680
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.oodt.cas.filemgr.structs.exceptions; /** * @author mattmann * @version $Revision$ * * <p>An exception thrown by the validation layer.</p> * */ public class ValidationLayerException extends Exception { /* serial version UID */ private static final long serialVersionUID = -929293299393L; /** * */ public ValidationLayerException() { super(); // TODO Auto-generated constructor stub } /** * @param arg0 */ public ValidationLayerException(String arg0) { super(arg0); // TODO Auto-generated constructor stub } /** * @param arg0 * @param arg1 */ public ValidationLayerException(String arg0, Throwable arg1) { super(arg0, arg1); // TODO Auto-generated constructor stub } /** * @param arg0 */ public ValidationLayerException(Throwable arg0) { super(arg0); // TODO Auto-generated constructor stub } }
apache-2.0
sommerc/bioformats
components/forks/poi/src/loci/poi/util/LittleEndianConsts.java
2296
/* * #%L * Fork of Apache Jakarta POI. * %% * Copyright (C) 2008 - 2015 Open Microscopy Environment: * - Board of Regents of the University of Wisconsin-Madison * - Glencoe Software, Inc. * - University of Dundee * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ /* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package loci.poi.util; /** * a repository for constants shared by classes within this package * * @author Marc Johnson * @author Andrew C. Oliver (acoliver at apache dot org) */ public interface LittleEndianConsts { // sizes of various numbers in this environment public static final int BYTE_SIZE = 1; public static final int SHORT_SIZE = 2; public static final int INT_SIZE = 4; public static final int LONG_SIZE = 8; public static final int DOUBLE_SIZE = 8; } // end public interface LittleEndianConsts
gpl-2.0
DmitryRendov/AuthMeReloaded
src/test/java/fr/xephi/authme/settings/properties/TestConfiguration.java
1649
package fr.xephi.authme.settings.properties; import ch.jalu.configme.SettingsHolder; import ch.jalu.configme.properties.Property; import java.util.List; import static ch.jalu.configme.properties.PropertyInitializer.newListProperty; import static ch.jalu.configme.properties.PropertyInitializer.newProperty; /** * Sample properties for testing purposes. */ public final class TestConfiguration implements SettingsHolder { public static final Property<Integer> DURATION_IN_SECONDS = newProperty("test.duration", 4); public static final Property<String> SYSTEM_NAME = newProperty("test.systemName", "[TestDefaultValue]"); public static final Property<TestEnum> RATIO_ORDER = newProperty(TestEnum.class, "sample.ratio.order", TestEnum.SECOND); public static final Property<List<String>> RATIO_FIELDS = newListProperty("sample.ratio.fields", "a", "b", "c"); public static final Property<Integer> VERSION_NUMBER = newProperty("version", 32046); public static final Property<Boolean> SKIP_BORING_FEATURES = newProperty("features.boring.skip", false); public static final Property<List<String>> BORING_COLORS = newListProperty("features.boring.colors"); public static final Property<Integer> DUST_LEVEL = newProperty("features.boring.dustLevel", -1); public static final Property<Boolean> USE_COOL_FEATURES = newProperty("features.cool.enabled", false); public static final Property<List<String>> COOL_OPTIONS = newListProperty("features.cool.options", "Sparks", "Sprinkles"); private TestConfiguration() { } }
gpl-3.0
bhutchinson/kfs
kfs-core/src/main/java/org/kuali/kfs/sys/document/validation/impl/AccountingLineCheckValidationHutch.java
5269
/* * The Kuali Financial System, a comprehensive financial management system for higher education. * * Copyright 2005-2014 The Kuali Foundation * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.kuali.kfs.sys.document.validation.impl; import org.apache.commons.lang.StringUtils; import org.kuali.kfs.sys.businessobject.AccountingLine; import org.kuali.kfs.sys.document.AccountingDocument; import org.kuali.kfs.sys.document.validation.Validation; import org.kuali.kfs.sys.document.validation.event.AttributedDocumentEvent; import org.kuali.rice.krad.util.ObjectUtils; /** * */ public class AccountingLineCheckValidationHutch implements Validation { protected Validation lineAmountValidation; protected Validation lineCheckValidation; protected Validation lineValuesAllowedValidation; protected String accountingDocumentParameterPropertyName; protected String accountingLineParameterPropertyName; protected AccountingDocument accountingDocumentForValidation; protected AccountingLine accountingLineForValidation; protected boolean quitOnFail; /** * @see org.kuali.kfs.sys.document.validation.Validation#shouldQuitOnFail() */ public boolean shouldQuitOnFail() { return quitOnFail; } /** * Sets whether the validation hutch should quit on the failure of the entire validation case failing. * @param b */ public void setShouldQuitOnFail(boolean b) { quitOnFail = b; } /** * @see org.kuali.kfs.sys.document.validation.Validation#stageValidation(org.kuali.kfs.sys.document.validation.event.AttributedDocumentEvent) */ public boolean stageValidation(AttributedDocumentEvent event) { grabDocumentAndLineForValidationFromEvent(event); updateValidationsWithParameters(); return validate(event); } /** * Using the parameter property names set, finds the accounting document and accounting line to be validate * from the property. * @param event the event to take properties from */ protected void grabDocumentAndLineForValidationFromEvent(AttributedDocumentEvent event) { if (StringUtils.isNotBlank(accountingDocumentParameterPropertyName)) { accountingDocumentForValidation = (AccountingDocument)ObjectUtils.getPropertyValue(event, accountingDocumentParameterPropertyName); } if (StringUtils.isNotBlank(accountingLineParameterPropertyName)) { accountingLineForValidation = (AccountingLine)ObjectUtils.getPropertyValue(event, accountingLineParameterPropertyName); } } /** * Updates the child validations with accounting document and accounting line information. */ protected void updateValidationsWithParameters() { } /** * * @see org.kuali.kfs.sys.document.validation.Validation#validate(org.kuali.kfs.sys.document.validation.event.AttributedDocumentEvent) */ public boolean validate(AttributedDocumentEvent event) { // TODO Auto-generated method stub return false; } /** * Gets the lineAmountValidation attribute. * @return Returns the lineAmountValidation. */ public Validation getLineAmountValidation() { return lineAmountValidation; } /** * Sets the lineAmountValidation attribute value. * @param lineAmountValidation The lineAmountValidation to set. */ public void setLineAmountValidation(Validation lineAmountValidation) { this.lineAmountValidation = lineAmountValidation; } /** * Gets the lineCheckValidation attribute. * @return Returns the lineCheckValidation. */ public Validation getLineCheckValidation() { return lineCheckValidation; } /** * Sets the lineCheckValidation attribute value. * @param lineCheckValidation The lineCheckValidation to set. */ public void setLineCheckValidation(Validation lineCheckValidation) { this.lineCheckValidation = lineCheckValidation; } /** * Gets the lineValuesAllowedValidation attribute. * @return Returns the lineValuesAllowedValidation. */ public Validation getLineValuesAllowedValidation() { return lineValuesAllowedValidation; } /** * Sets the lineValuesAllowedValidation attribute value. * @param lineValuesAllowedValidation The lineValuesAllowedValidation to set. */ public void setLineValuesAllowedValidation(Validation lineValuesAllowedValidation) { this.lineValuesAllowedValidation = lineValuesAllowedValidation; } }
agpl-3.0