repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
lpxz/grail-derby104
java/engine/org/apache/derby/impl/sql/execute/IndexConstantAction.java
2362
/* Derby - Class org.apache.derby.impl.sql.execute.IndexConstantAction Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.impl.sql.execute; import org.apache.derby.catalog.UUID; import org.apache.derby.iapi.services.sanity.SanityManager; /** * This class is the superclass for the classes that describe actions * that are ALWAYS performed for a CREATE/DROP INDEX Statement at Execution time. * */ public abstract class IndexConstantAction extends DDLSingleTableConstantAction { String indexName; String tableName; String schemaName; // CONSTRUCTORS /** * Make the ConstantAction for a CREATE/DROP INDEX statement. * * @param tableId The table uuid * @param indexName Index name. * @param tableName The table name * @param schemaName Schema that index lives in. * */ protected IndexConstantAction( UUID tableId, String indexName, String tableName, String schemaName) { super(tableId); this.indexName = indexName; this.tableName = tableName; this.schemaName = schemaName; if (SanityManager.DEBUG) { SanityManager.ASSERT(schemaName != null, "Schema name is null"); } } // CLASS METHODS /** * Get the index name. * * @return the name of the index */ public String getIndexName() { return indexName; } /** * Set the index name at execution time. * Useful for unnamed constraints which have a backing index. * * @param indexName The (generated) index name. */ public void setIndexName(String indexName) { this.indexName = indexName; } }
apache-2.0
oneops/OneOps
crawler/src/generated-sources/java/com/oneops/crawler/jooq/cms/routines/DjRmRfcRel.java
1169
/* * This file is generated by jOOQ. */ package com.oneops.crawler.jooq.cms.routines; import com.oneops.crawler.jooq.cms.Kloopzcm; import javax.annotation.Generated; import org.jooq.Parameter; import org.jooq.impl.AbstractRoutine; /** * This class is generated by jOOQ. */ @Generated( value = { "http://www.jooq.org", "jOOQ version:3.10.0" }, comments = "This class is generated by jOOQ" ) @SuppressWarnings({ "all", "unchecked", "rawtypes" }) public class DjRmRfcRel extends AbstractRoutine<java.lang.Void> { private static final long serialVersionUID = 1124531785; /** * The parameter <code>kloopzcm.dj_rm_rfc_rel.p_rfc_id</code>. */ public static final Parameter<Long> P_RFC_ID = createParameter("p_rfc_id", org.jooq.impl.SQLDataType.BIGINT, false, false); /** * Create a new routine call instance */ public DjRmRfcRel() { super("dj_rm_rfc_rel", Kloopzcm.KLOOPZCM); addInParameter(P_RFC_ID); } /** * Set the <code>p_rfc_id</code> parameter IN value to the routine */ public void setPRfcId(Long value) { setValue(P_RFC_ID, value); } }
apache-2.0
NCIP/cadsr-sentinel
software/src/java/gov/nih/nci/cadsr/sentinel/ui/Edit.java
7029
/*L * Copyright ScenPro Inc, SAIC-F * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cadsr-sentinal/LICENSE.txt for details. */ // Copyright (c) 2004 ScenPro, Inc. // $Header: /share/content/gforge/sentinel/sentinel/src/gov/nih/nci/cadsr/sentinel/ui/Edit.java,v 1.3 2007-07-19 15:26:45 hebell Exp $ // $Name: not supported by cvs2svn $ package gov.nih.nci.cadsr.sentinel.ui; import gov.nih.nci.cadsr.sentinel.database.DBAlert; import gov.nih.nci.cadsr.sentinel.database.DBAlertUtil; import gov.nih.nci.cadsr.sentinel.tool.AlertRec; import gov.nih.nci.cadsr.sentinel.tool.Constants; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.struts.action.Action; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; /** * Process the edit.jsp page. * * @author Larry Hebel */ public class Edit extends Action { /** * Constructor. */ public Edit() { } /** * Action process to Edit an Alert Definition. * * @param mapping_ * The action map from the struts-config.xml. * @param form_ * The form bean for the edit.jsp page. * @param request_ * The servlet request object. * @param response_ * The servlet response object. * @return The action to continue processing. */ public ActionForward execute(ActionMapping mapping_, ActionForm form_, HttpServletRequest request_, HttpServletResponse response_) { // Get data. EditForm form = (EditForm) form_; AlertBean ub = (AlertBean) request_.getSession().getAttribute( AlertBean._SESSIONNAME); ub.setRunPrev(Constants._ACTEDIT); // If we are going back this will effectively cancel any edit operation. if (form.getNextScreen().equals(Constants._ACTBACK)) { form.setNextScreen(ub.getEditPrev()); } // The user requests a Run. else if (form.getNextScreen().equals(Constants._ACTRUN)) { // Get the data from the form. AlertRec rec = readForm(ub, form); ub.setWorking(rec); } // We must save the edits made. else if (form.getNextScreen().equals(Constants._ACTSAVE)) { // We will return to the edit screen to display the message box. form.setNextScreen(Constants._ACTEDIT); // Get the data from the form. AlertRec rec = readForm(ub, form); // Connect to the database. DBAlert db = DBAlertUtil.factory(); if (db.open(request_, ub.getUser()) == 0) { // If we started with a database record this is an update // operation. if (rec.getAlertRecNum() != null) { // This should always work but you never know. if (db.updateAlert(rec) == 0) { request_.setAttribute(Constants._ACTSAVE, "Y"); form.setNextScreen(Constants._ACTLIST); } else request_.setAttribute(Constants._ACTSAVE, db .getErrorMsg(true)); } // This is a new alert definition. else { // Of course it should work. if (db.insertAlert(rec) == 0) { request_.setAttribute(Constants._ACTSAVE, "Y"); form.setNextScreen(Constants._ACTLIST); } else request_.setAttribute(Constants._ACTSAVE, db .getErrorMsg(true)); } } else { // We couldn't connect. request_.setAttribute(Constants._ACTSAVE, db.getErrorMsg(true)); } // Always close the connection. db.close(); ub.setWorking(rec); } // On to the next page. return mapping_.findForward(form.getNextScreen()); } /** * Move the data from the form into the standard AlertRec object. * * @param ub_ * The session bean. * @param form_ * The edit form from the request. * @return An AlertRec object. */ private AlertRec readForm(AlertBean ub_, EditForm form_) { AlertRec rec = new AlertRec(ub_.getWorking()); rec.setName(form_.getPropName()); rec.setSummary(form_.getPropDesc(), true); rec.setFreq(form_.getFreqUnit(), form_.getFreqWeekly(), form_ .getFreqMonthly()); rec.setActive(form_.getPropStatus(), form_.getPropBeginDate(), form_ .getPropEndDate()); rec.setInactiveReason(form_.getPropStatusReason()); rec.setInfoVerNum(form_.getInfoVerNum()); rec.setActVerNum(form_.getActVerNum()); rec.setIntro(form_.getPropIntro(), true); rec.setIncPropSect(form_.getRepIncProp()); rec.setReportStyle(form_.getRepStyle()); rec.setReportEmpty(form_.getFreqEmpty()); rec.setReportAck(form_.getFreqAck()); rec.setVDTE(form_.getInfoVDTE()); rec.setVDTN(form_.getInfoVDTN()); rec.setIUse(form_.getInfoContextUse()); rec.setAUse(form_.getActContextUse()); rec.setTerm(form_.getInfoSearchTerm()); rec.setIVersion(form_.getInfoVersion()); rec.setAVersion(form_.getActVersion()); rec.setRelated(form_.getActDependantChg()); rec.setAdminChg(form_.getActAdminChg()); rec.setAdminCopy(form_.getActAdminCopy()); rec.setAdminNew(form_.getActAdminNew()); rec.setAdminDel(form_.getActAdminDel()); rec.setAVDT(form_.getActVDT()); rec.setRecipients(form_.getPropRecipients()); rec.setAttrs(form_.getRepAttributes()); rec.setSearchIn(form_.getInfoSearchIn()); rec.setAWorkflow(form_.getActWorkflowStatus()); rec.setCWorkflow(form_.getInfoWorkflow()); rec.setARegis(form_.getActRegStatus()); rec.setCRegStatus(form_.getInfoRegStatus()); rec.setCreators(form_.getInfoCreator()); rec.setModifiers(form_.getInfoModifier()); rec.setSearchAC(form_.getInfoSearchFor()); rec.setSchemes(form_.getInfoSchemes()); rec.setSchemeItems(form_.getInfoSchemeItems()); rec.setDomains(form_.getInfoConceptDomain()); rec.setContexts(form_.getInfoContext()); rec.setProtocols(form_.getInfoProtos()); rec.setForms(form_.getInfoForms()); rec.setDateFilter((form_.getInfoDateFilter())); rec.setACTypes(form_.getInfoACTypes()); rec.setIAssocLvl(form_.getInfoAssocLvl()); return rec; } }
bsd-3-clause
MarinnaCole/LightZone
lightcrafts/src/com/lightcrafts/image/export/BlackPointCompensationOption.java
881
/* Copyright (C) 2005-2011 Fabio Riccardi */ package com.lightcrafts.image.export; /** * An <code>BlackPointCompensationOption</code> is-an * {@link BooleanExportOption} for storing a boolean value representing whether * black-point compensation is enabled. * * @author Paul J. Lucas [paul@lightcrafts.com] */ public final class BlackPointCompensationOption extends BooleanExportOption { public static final boolean DEFAULT_VALUE = false; public static final String NAME = BlackPointCompensationOption.class.getName(); /** * Construct an <code>BlackPointCompensationOption</code>. * * @param options The {@link ImageExportOptions} of which this option is a * member. */ public BlackPointCompensationOption( ImageExportOptions options ) { super( NAME, DEFAULT_VALUE, options ); } } /* vim:set et sw=4 ts=4: */
bsd-3-clause
consulo/lombokOld
test/transform/resource/after-delombok/SetterOnClass.java
1462
class SetterOnClass1 { boolean isNone; boolean isPublic; @java.lang.SuppressWarnings("all") public void setPublic(final boolean isPublic) { this.isPublic = isPublic; } } class SetterOnClass2 { boolean isNone; boolean isProtected; boolean isPackage; @java.lang.SuppressWarnings("all") protected void setProtected(final boolean isProtected) { this.isProtected = isProtected; } @java.lang.SuppressWarnings("all") void setPackage(final boolean isPackage) { this.isPackage = isPackage; } } class SetterOnClass3 { boolean isNone; boolean isPackage; @java.lang.SuppressWarnings("all") void setPackage(final boolean isPackage) { this.isPackage = isPackage; } } class SetterOnClass4 { boolean isNone; boolean isPrivate; @java.lang.SuppressWarnings("all") private void setPrivate(final boolean isPrivate) { this.isPrivate = isPrivate; } } class SetterOnClass5 { boolean isNone; boolean isPublic; @java.lang.SuppressWarnings("all") public void setPublic(final boolean isPublic) { this.isPublic = isPublic; } } class SetterOnClass6 { String couldBeNull; @lombok.NonNull String nonNull; @java.lang.SuppressWarnings("all") public void setCouldBeNull(final String couldBeNull) { this.couldBeNull = couldBeNull; } @java.lang.SuppressWarnings("all") public void setNonNull(@lombok.NonNull final String nonNull) { if (nonNull == null) { throw new java.lang.NullPointerException("nonNull"); } this.nonNull = nonNull; } }
mit
computergeek1507/openhab
bundles/binding/org.openhab.binding.tinkerforge/src/main/java/org/openhab/binding/tinkerforge/internal/model/TFServoConfiguration.java
9011
/** * Copyright (c) 2010-2019 Contributors to the openHAB project * * See the NOTICE file(s) distributed with this work for additional * information. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 */ package org.openhab.binding.tinkerforge.internal.model; /** * <!-- begin-user-doc --> * A representation of the model object '<em><b>TF Servo Configuration</b></em>'. * * @author Theo Weiss * @since 1.3.0 * <!-- end-user-doc --> * * <p> * The following features are supported: * </p> * <ul> * <li>{@link org.openhab.binding.tinkerforge.internal.model.TFServoConfiguration#getVelocity * <em>Velocity</em>}</li> * <li>{@link org.openhab.binding.tinkerforge.internal.model.TFServoConfiguration#getAcceleration * <em>Acceleration</em>}</li> * <li>{@link org.openhab.binding.tinkerforge.internal.model.TFServoConfiguration#getServoVoltage <em>Servo * Voltage</em>}</li> * <li>{@link org.openhab.binding.tinkerforge.internal.model.TFServoConfiguration#getPulseWidthMin <em>Pulse * Width Min</em>}</li> * <li>{@link org.openhab.binding.tinkerforge.internal.model.TFServoConfiguration#getPulseWidthMax <em>Pulse * Width Max</em>}</li> * <li>{@link org.openhab.binding.tinkerforge.internal.model.TFServoConfiguration#getPeriod <em>Period</em>}</li> * <li>{@link org.openhab.binding.tinkerforge.internal.model.TFServoConfiguration#getOutputVoltage <em>Output * Voltage</em>}</li> * </ul> * * @see org.openhab.binding.tinkerforge.internal.model.ModelPackage#getTFServoConfiguration() * @model * @generated */ public interface TFServoConfiguration extends DimmableConfiguration { /** * Returns the value of the '<em><b>Velocity</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Velocity</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * * @return the value of the '<em>Velocity</em>' attribute. * @see #setVelocity(int) * @see org.openhab.binding.tinkerforge.internal.model.ModelPackage#getTFServoConfiguration_Velocity() * @model unique="false" * @generated */ int getVelocity(); /** * Sets the value of the '{@link org.openhab.binding.tinkerforge.internal.model.TFServoConfiguration#getVelocity * <em>Velocity</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @param value the new value of the '<em>Velocity</em>' attribute. * @see #getVelocity() * @generated */ void setVelocity(int value); /** * Returns the value of the '<em><b>Acceleration</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Acceleration</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * * @return the value of the '<em>Acceleration</em>' attribute. * @see #setAcceleration(int) * @see org.openhab.binding.tinkerforge.internal.model.ModelPackage#getTFServoConfiguration_Acceleration() * @model unique="false" * @generated */ int getAcceleration(); /** * Sets the value of the '{@link org.openhab.binding.tinkerforge.internal.model.TFServoConfiguration#getAcceleration * <em>Acceleration</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @param value the new value of the '<em>Acceleration</em>' attribute. * @see #getAcceleration() * @generated */ void setAcceleration(int value); /** * Returns the value of the '<em><b>Servo Voltage</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Servo Voltage</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * * @return the value of the '<em>Servo Voltage</em>' attribute. * @see #setServoVoltage(int) * @see org.openhab.binding.tinkerforge.internal.model.ModelPackage#getTFServoConfiguration_ServoVoltage() * @model unique="false" * @generated */ int getServoVoltage(); /** * Sets the value of the '{@link org.openhab.binding.tinkerforge.internal.model.TFServoConfiguration#getServoVoltage * <em>Servo Voltage</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @param value the new value of the '<em>Servo Voltage</em>' attribute. * @see #getServoVoltage() * @generated */ void setServoVoltage(int value); /** * Returns the value of the '<em><b>Pulse Width Min</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Pulse Width Min</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * * @return the value of the '<em>Pulse Width Min</em>' attribute. * @see #setPulseWidthMin(int) * @see org.openhab.binding.tinkerforge.internal.model.ModelPackage#getTFServoConfiguration_PulseWidthMin() * @model unique="false" * @generated */ int getPulseWidthMin(); /** * Sets the value of the * '{@link org.openhab.binding.tinkerforge.internal.model.TFServoConfiguration#getPulseWidthMin <em>Pulse Width * Min</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @param value the new value of the '<em>Pulse Width Min</em>' attribute. * @see #getPulseWidthMin() * @generated */ void setPulseWidthMin(int value); /** * Returns the value of the '<em><b>Pulse Width Max</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Pulse Width Max</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * * @return the value of the '<em>Pulse Width Max</em>' attribute. * @see #setPulseWidthMax(int) * @see org.openhab.binding.tinkerforge.internal.model.ModelPackage#getTFServoConfiguration_PulseWidthMax() * @model unique="false" * @generated */ int getPulseWidthMax(); /** * Sets the value of the * '{@link org.openhab.binding.tinkerforge.internal.model.TFServoConfiguration#getPulseWidthMax <em>Pulse Width * Max</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @param value the new value of the '<em>Pulse Width Max</em>' attribute. * @see #getPulseWidthMax() * @generated */ void setPulseWidthMax(int value); /** * Returns the value of the '<em><b>Period</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Period</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * * @return the value of the '<em>Period</em>' attribute. * @see #setPeriod(int) * @see org.openhab.binding.tinkerforge.internal.model.ModelPackage#getTFServoConfiguration_Period() * @model unique="false" * @generated */ int getPeriod(); /** * Sets the value of the '{@link org.openhab.binding.tinkerforge.internal.model.TFServoConfiguration#getPeriod * <em>Period</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @param value the new value of the '<em>Period</em>' attribute. * @see #getPeriod() * @generated */ void setPeriod(int value); /** * Returns the value of the '<em><b>Output Voltage</b></em>' attribute. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Output Voltage</em>' attribute isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * * @return the value of the '<em>Output Voltage</em>' attribute. * @see #setOutputVoltage(int) * @see org.openhab.binding.tinkerforge.internal.model.ModelPackage#getTFServoConfiguration_OutputVoltage() * @model unique="false" * @generated */ int getOutputVoltage(); /** * Sets the value of the * '{@link org.openhab.binding.tinkerforge.internal.model.TFServoConfiguration#getOutputVoltage <em>Output * Voltage</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * * @param value the new value of the '<em>Output Voltage</em>' attribute. * @see #getOutputVoltage() * @generated */ void setOutputVoltage(int value); } // TFServoConfiguration
epl-1.0
shitalm/jsignpdf2
src/main/java/com/lowagie/text/pdf/PdfCopyFields.java
9897
/* * Copyright 2004 by Paulo Soares. * * The contents of this file are subject to the Mozilla Public License Version 1.1 * (the "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the License. * * The Original Code is 'iText, a free JAVA-PDF library'. * * The Initial Developer of the Original Code is Bruno Lowagie. Portions created by * the Initial Developer are Copyright (C) 1999, 2000, 2001, 2002 by Bruno Lowagie. * All Rights Reserved. * Co-Developer of the code is Paulo Soares. Portions created by the Co-Developer * are Copyright (C) 2000, 2001, 2002 by Paulo Soares. All Rights Reserved. * * Contributor(s): all the names of the contributors are added in the source code * where applicable. * * Alternatively, the contents of this file may be used under the terms of the * LGPL license (the "GNU LIBRARY GENERAL PUBLIC LICENSE"), in which case the * provisions of LGPL are applicable instead of those above. If you wish to * allow use of your version of this file only under the terms of the LGPL * License and not to allow others to use your version of this file under * the MPL, indicate your decision by deleting the provisions above and * replace them with the notice and other provisions required by the LGPL. * If you do not delete the provisions above, a recipient may use your version * of this file under either the MPL or the GNU LIBRARY GENERAL PUBLIC LICENSE. * * This library is free software; you can redistribute it and/or modify it * under the terms of the MPL as stated above or under the terms of the GNU * Library General Public License as published by the Free Software Foundation; * either version 2 of the License, or any later version. * * This library is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS * FOR A PARTICULAR PURPOSE. See the GNU Library general Public License for more * details. * * If you didn't download this code from the following link, you should check if * you aren't using an obsolete version: * http://www.lowagie.com/iText/ */ package com.lowagie.text.pdf; import java.io.IOException; import java.io.OutputStream; import java.security.cert.Certificate; import java.util.List; import com.lowagie.text.DocWriter; import com.lowagie.text.DocumentException; import com.lowagie.text.pdf.interfaces.PdfEncryptionSettings; import com.lowagie.text.pdf.interfaces.PdfViewerPreferences; /** * Concatenates PDF documents including form fields. The rules for the form field * concatenation are the same as in Acrobat. All the documents are kept in memory unlike * PdfCopy. * @author Paulo Soares (psoares@consiste.pt) */ public class PdfCopyFields implements PdfViewerPreferences, PdfEncryptionSettings { private PdfCopyFieldsImp fc; /** * Creates a new instance. * @param os the output stream * @throws DocumentException on error */ public PdfCopyFields(OutputStream os) throws DocumentException { fc = new PdfCopyFieldsImp(os); } /** * Creates a new instance. * @param os the output stream * @param pdfVersion the pdf version the output will have * @throws DocumentException on error */ public PdfCopyFields(OutputStream os, char pdfVersion) throws DocumentException { fc = new PdfCopyFieldsImp(os, pdfVersion); } /** * Concatenates a PDF document. * @param reader the PDF document * @throws DocumentException on error */ public void addDocument(PdfReader reader) throws DocumentException, IOException { fc.addDocument(reader); } /** * Concatenates a PDF document selecting the pages to keep. The pages are described as a * <CODE>List</CODE> of <CODE>Integer</CODE>. The page ordering can be changed but * no page repetitions are allowed. * @param reader the PDF document * @param pagesToKeep the pages to keep * @throws DocumentException on error */ public void addDocument(PdfReader reader, List pagesToKeep) throws DocumentException, IOException { fc.addDocument(reader, pagesToKeep); } /** * Concatenates a PDF document selecting the pages to keep. The pages are described as * ranges. The page ordering can be changed but * no page repetitions are allowed. * @param reader the PDF document * @param ranges the comma separated ranges as described in {@link SequenceList} * @throws DocumentException on error */ public void addDocument(PdfReader reader, String ranges) throws DocumentException, IOException { fc.addDocument(reader, SequenceList.expand(ranges, reader.getNumberOfPages())); } /** Sets the encryption options for this document. The userPassword and the * ownerPassword can be null or have zero length. In this case the ownerPassword * is replaced by a random string. The open permissions for the document can be * AllowPrinting, AllowModifyContents, AllowCopy, AllowModifyAnnotations, * AllowFillIn, AllowScreenReaders, AllowAssembly and AllowDegradedPrinting. * The permissions can be combined by ORing them. * @param userPassword the user password. Can be null or empty * @param ownerPassword the owner password. Can be null or empty * @param permissions the user permissions * @param strength128Bits <code>true</code> for 128 bit key length, <code>false</code> for 40 bit key length * @throws DocumentException if the document is already open */ public void setEncryption(byte userPassword[], byte ownerPassword[], int permissions, boolean strength128Bits) throws DocumentException { fc.setEncryption(userPassword, ownerPassword, permissions, strength128Bits ? PdfWriter.STANDARD_ENCRYPTION_128 : PdfWriter.STANDARD_ENCRYPTION_40); } /** * Sets the encryption options for this document. The userPassword and the * ownerPassword can be null or have zero length. In this case the ownerPassword * is replaced by a random string. The open permissions for the document can be * AllowPrinting, AllowModifyContents, AllowCopy, AllowModifyAnnotations, * AllowFillIn, AllowScreenReaders, AllowAssembly and AllowDegradedPrinting. * The permissions can be combined by ORing them. * @param strength true for 128 bit key length. false for 40 bit key length * @param userPassword the user password. Can be null or empty * @param ownerPassword the owner password. Can be null or empty * @param permissions the user permissions * @throws DocumentException if the document is already open */ public void setEncryption(boolean strength, String userPassword, String ownerPassword, int permissions) throws DocumentException { setEncryption(DocWriter.getISOBytes(userPassword), DocWriter.getISOBytes(ownerPassword), permissions, strength); } /** * Closes the output document. */ public void close() { fc.close(); } /** * Opens the document. This is usually not needed as addDocument() will do it * automatically. */ public void open() { fc.openDoc(); } /** * Adds JavaScript to the global document * @param js the JavaScript */ public void addJavaScript(String js) { fc.addJavaScript(js, !PdfEncodings.isPdfDocEncoding(js)); } /** * Sets the bookmarks. The list structure is defined in * <CODE>SimpleBookmark#</CODE>. * @param outlines the bookmarks or <CODE>null</CODE> to remove any */ public void setOutlines(List outlines) { fc.setOutlines(outlines); } /** Gets the underlying PdfWriter. * @return the underlying PdfWriter */ public PdfWriter getWriter() { return fc; } /** * Gets the 1.5 compression status. * @return <code>true</code> if the 1.5 compression is on */ public boolean isFullCompression() { return fc.isFullCompression(); } /** * Sets the document's compression to the new 1.5 mode with object streams and xref * streams. It can be set at any time but once set it can't be unset. * <p> * If set before opening the document it will also set the pdf version to 1.5. */ public void setFullCompression() { fc.setFullCompression(); } /** * @see com.lowagie.text.pdf.interfaces.PdfEncryptionSettings#setEncryption(byte[], byte[], int, int) */ public void setEncryption(byte[] userPassword, byte[] ownerPassword, int permissions, int encryptionType) throws DocumentException { fc.setEncryption(userPassword, ownerPassword, permissions, encryptionType); } /** * @see com.lowagie.text.pdf.interfaces.PdfViewerPreferences#addViewerPreference(com.lowagie.text.pdf.PdfName, com.lowagie.text.pdf.PdfObject) */ public void addViewerPreference(PdfName key, PdfObject value) { fc.addViewerPreference(key, value); } /** * @see com.lowagie.text.pdf.interfaces.PdfViewerPreferences#setViewerPreferences(int) */ public void setViewerPreferences(int preferences) { fc.setViewerPreferences(preferences); } /** * @see com.lowagie.text.pdf.interfaces.PdfEncryptionSettings#setEncryption(java.security.cert.Certificate[], int[], int) */ public void setEncryption(Certificate[] certs, int[] permissions, int encryptionType) throws DocumentException { fc.setEncryption(certs, permissions, encryptionType); } }
gpl-2.0
mdaniel/svn-caucho-com-resin
modules/resin/src/com/caucho/network/balance/ClientNetworkState.java
6946
/* * Copyright (c) 1998-2012 Caucho Technology -- all rights reserved * * This file is part of Resin(R) Open Source * * Each copy or derived work must preserve the copyright notice and this * notice unmodified. * * Resin Open Source is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * Resin Open Source is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE, or any warranty * of NON-INFRINGEMENT. See the GNU General Public License for more * details. * * You should have received a copy of the GNU General Public License * along with Resin Open Source; if not, write to the * * Free Software Foundation, Inc. * 59 Temple Place, Suite 330 * Boston, MA 02111-1307 USA * * @author Scott Ferguson */ package com.caucho.network.balance; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import com.caucho.util.CurrentTime; /** * State of a network connection to a target server. * * <h3>Fail Recover Time</h3> * * The fail recover time is dynamic. The first timeout is 1s. After the 1s, * the client tries again. If that fails, the timeout is doubled until * reaching the maximum _loadBalanceRecoverTime. */ public class ClientNetworkState { private final String _id; private final long _recoverTimeout; private final AtomicReference<State> _state = new AtomicReference<State>(State.ACTIVE); private AtomicLong _dynamicRecoverTimeout = new AtomicLong(); private AtomicInteger _connectionCount = new AtomicInteger(); // load management data private volatile long _firstFailTime; private volatile long _lastFailTime; private volatile long _firstSuccessTime; public ClientNetworkState(String id, long recoverTimeout) { _id = id; _recoverTimeout = recoverTimeout; } /** * Returns the user-readable id of the target server. */ public String getId() { return _id; } /** * Return the max recover time. */ public long getRecoverTimeout() { return _recoverTimeout; } /** * Returns the lifecycle state. */ public final String getState() { // updateWarmup(); return String.valueOf(_state); } /** * Returns true if the server is active. */ public final boolean isActive() { return _state.get().isActive(); } /** * Return true if enabled. */ public boolean isEnabled() { return _state.get().isEnabled(); } /** * Returns true if the server is dead. */ public final boolean isDead() { return ! isActive(); } // // action callbacks // /** * Enable the client. */ public void enable() { toState(State.ENABLED); } /** * Disable the client. */ public void disable() { toState(State.DISABLED); _firstSuccessTime = 0; } /** * Called when the server has a successful response */ public void onSuccess() { if (_firstSuccessTime <= 0) { _firstSuccessTime = CurrentTime.getCurrentTime(); } // reset the connection fail recover time _dynamicRecoverTimeout.set(1000L); _firstFailTime = 0; } /** * Called when the connection fails. */ public void onFail() { _lastFailTime = CurrentTime.getCurrentTime(); if (_firstFailTime == 0) { _firstFailTime = _lastFailTime; } _firstSuccessTime = 0; toState(State.FAIL); long recoverTimeout = _dynamicRecoverTimeout.get(); long nextRecoverTimeout = Math.min(recoverTimeout + 1000L, _recoverTimeout); _dynamicRecoverTimeout.compareAndSet(recoverTimeout, nextRecoverTimeout); } /** * Start a new connection. Returns true if the connection can be * started. */ public boolean startConnection() { State state = _state.get(); if (state.isActive()) { // when active, always start a connection _connectionCount.incrementAndGet(); return true; } long now = CurrentTime.getCurrentTime(); long lastFailTime = _lastFailTime; long recoverTimeout = _dynamicRecoverTimeout.get(); if (now < lastFailTime + recoverTimeout) { // if the fail recover hasn't timed out, return false return false; } // when fail, only start a single connection int count; do { count = _connectionCount.get(); if (count > 0) { return false; } } while (! _connectionCount.compareAndSet(count, count + 1)); return true; } public void completeConnection() { _connectionCount.decrementAndGet(); } /** * Close the client */ public void close() { toState(State.CLOSED); } private State toState(State targetState) { State oldState; State newState; do { oldState = _state.get(); newState = targetState.toState(oldState); } while (! _state.compareAndSet(oldState, newState)); return _state.get(); } @Override public String toString() { return (getClass().getSimpleName() + "[" + getId() + "]"); } enum State { ACTIVE { boolean isActive() { return true; } boolean isEnabled() { return true; } State toState(State state) { return state.toActive(); } State toFail() { return FAIL; } State toDisable() { return DISABLED; } }, FAIL { boolean isEnabled() { return true; } State toState(State state) { return state.toFail(); } State toActive() { return ACTIVE; } State toDisable() { return DISABLED; } }, DISABLED { State toState(State state) { return state.toDisable(); } State toActive() { return this; } State toFail() { return this; } State toEnable() { return State.ACTIVE; } }, ENABLED { State toState(State state) { return state.toEnable(); } }, CLOSED { boolean isClosed() { return true; } State toState(State state) { return CLOSED; } State toActive() { return this; } State toFail() { return this; } State toEnable() { return this; } State toDisable() { return this; } }; boolean isActive() { return false; } boolean isEnabled() { return false; } boolean isClosed() { return false; } State toActive() { return this; } State toFail() { return this; } State toEnable() { return this; } State toDisable() { return DISABLED; } State toState(State state) { throw new UnsupportedOperationException(toString()); } } }
gpl-2.0
khmarbaise/jqassistant
plugin/java/src/test/java/com/buschmais/jqassistant/plugin/java/test/set/scanner/annotation/NestedAnnotation.java
325
package com.buschmais.jqassistant.plugin.java.test.set.scanner.annotation; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; /** * A annotation which will be nested in {@link Annotation}. */ @Retention(RetentionPolicy.RUNTIME) public @interface NestedAnnotation { String value(); }
gpl-3.0
cascheberg/Signal-Android
core-util/src/main/java/org/signal/core/util/GraphemeClusterLimitFilter.java
1805
package org.signal.core.util; import android.text.InputFilter; import android.text.Spanned; import org.signal.core.util.logging.Log; /** * This filter will constrain edits not to make the number of character breaks of the text * greater than the specified maximum. * <p> * This means it will limit to a maximum number of grapheme clusters. */ public final class GraphemeClusterLimitFilter implements InputFilter { private static final String TAG = Log.tag(GraphemeClusterLimitFilter.class); private final BreakIteratorCompat breakIteratorCompat; private final int max; public GraphemeClusterLimitFilter(int max) { this.breakIteratorCompat = BreakIteratorCompat.getInstance(); this.max = max; } @Override public CharSequence filter(CharSequence source, int start, int end, Spanned dest, int dstart, int dend) { CharSequence sourceFragment = source.subSequence(start, end); CharSequence head = dest.subSequence(0, dstart); CharSequence tail = dest.subSequence(dend, dest.length()); breakIteratorCompat.setText(String.format("%s%s%s", head, sourceFragment, tail)); int length = breakIteratorCompat.countBreaks(); if (length > max) { breakIteratorCompat.setText(sourceFragment); int sourceLength = breakIteratorCompat.countBreaks(); CharSequence trimmedSource = breakIteratorCompat.take(sourceLength - (length - max)); breakIteratorCompat.setText(String.format("%s%s%s", head, trimmedSource, tail)); int newExpectedCount = breakIteratorCompat.countBreaks(); if (newExpectedCount > max) { Log.w(TAG, "Failed to create string under the required length " + newExpectedCount); return ""; } return trimmedSource; } return source; } }
gpl-3.0
IdentityAutomation/jcifs-idautopatch
src/jcifs/smb/NtlmPasswordAuthentication.java
22939
/* jcifs smb client library in Java * Copyright (C) 2002 "Michael B. Allen" <jcifs at samba dot org> * "Eric Glass" <jcifs at samba dot org> * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package jcifs.smb; import java.io.UnsupportedEncodingException; import java.io.Serializable; import java.security.Principal; import java.security.MessageDigest; import java.security.GeneralSecurityException; import java.util.Random; import java.util.Arrays; import jcifs.Config; import jcifs.util.*; /** * This class stores and encrypts NTLM user credentials. The default * credentials are retrieved from the <tt>jcifs.smb.client.domain</tt>, * <tt>jcifs.smb.client.username</tt>, and <tt>jcifs.smb.client.password</tt> * properties. * <p> * Read <a href="../../../authhandler.html">jCIFS Exceptions and * NtlmAuthenticator</a> for related information. */ public final class NtlmPasswordAuthentication implements Principal, Serializable { private static final int LM_COMPATIBILITY = Config.getInt("jcifs.smb.lmCompatibility", 3); private static final Random RANDOM = new Random(); private static LogStream log = LogStream.getInstance(); // KGS!@#$% private static final byte[] S8 = { (byte)0x4b, (byte)0x47, (byte)0x53, (byte)0x21, (byte)0x40, (byte)0x23, (byte)0x24, (byte)0x25 }; /* Accepts key multiple of 7 * Returns enc multiple of 8 * Multiple is the same like: 21 byte key gives 24 byte result */ private static void E( byte[] key, byte[] data, byte[] e ) { byte[] key7 = new byte[7]; byte[] e8 = new byte[8]; for( int i = 0; i < key.length / 7; i++ ) { System.arraycopy( key, i * 7, key7, 0, 7 ); DES des = new DES( key7 ); des.encrypt( data, e8 ); System.arraycopy( e8, 0, e, i * 8, 8 ); } } static String DEFAULT_DOMAIN; static String DEFAULT_USERNAME; static String DEFAULT_PASSWORD; static final String BLANK = ""; public static final NtlmPasswordAuthentication ANONYMOUS = new NtlmPasswordAuthentication("", "", ""); static void initDefaults() { if (DEFAULT_DOMAIN != null) return; DEFAULT_DOMAIN = Config.getProperty("jcifs.smb.client.domain", "?"); DEFAULT_USERNAME = Config.getProperty("jcifs.smb.client.username", "GUEST"); DEFAULT_PASSWORD = Config.getProperty("jcifs.smb.client.password", BLANK); } /** * Generate the ANSI DES hash for the password associated with these credentials. */ static public byte[] getPreNTLMResponse( String password, byte[] challenge ) { byte[] p14 = new byte[14]; byte[] p21 = new byte[21]; byte[] p24 = new byte[24]; byte[] passwordBytes; try { passwordBytes = password.toUpperCase().getBytes( ServerMessageBlock.OEM_ENCODING ); } catch( UnsupportedEncodingException uee ) { throw new RuntimeException("Try setting jcifs.encoding=US-ASCII", uee); } int passwordLength = passwordBytes.length; // Only encrypt the first 14 bytes of the password for Pre 0.12 NT LM if( passwordLength > 14) { passwordLength = 14; } System.arraycopy( passwordBytes, 0, p14, 0, passwordLength ); E( p14, S8, p21); E( p21, challenge, p24); return p24; } /** * Generate the Unicode MD4 hash for the password associated with these credentials. */ static public byte[] getNTLMResponse( String password, byte[] challenge ) { byte[] uni = null; byte[] p21 = new byte[21]; byte[] p24 = new byte[24]; try { uni = password.getBytes( SmbConstants.UNI_ENCODING ); } catch( UnsupportedEncodingException uee ) { if( log.level > 0 ) uee.printStackTrace( log ); } MD4 md4 = new MD4(); md4.update( uni ); try { md4.digest(p21, 0, 16); } catch (Exception ex) { if( log.level > 0 ) ex.printStackTrace( log ); } E( p21, challenge, p24 ); return p24; } /** * Creates the LMv2 response for the supplied information. * * @param domain The domain in which the username exists. * @param user The username. * @param password The user's password. * @param challenge The server challenge. * @param clientChallenge The client challenge (nonce). */ public static byte[] getLMv2Response(String domain, String user, String password, byte[] challenge, byte[] clientChallenge) { try { byte[] hash = new byte[16]; byte[] response = new byte[24]; // The next 2-1/2 lines of this should be placed with nTOWFv1 in place of password MD4 md4 = new MD4(); md4.update(password.getBytes(SmbConstants.UNI_ENCODING)); HMACT64 hmac = new HMACT64(md4.digest()); hmac.update(user.toUpperCase().getBytes(SmbConstants.UNI_ENCODING)); hmac.update(domain.toUpperCase().getBytes(SmbConstants.UNI_ENCODING)); hmac = new HMACT64(hmac.digest()); hmac.update(challenge); hmac.update(clientChallenge); hmac.digest(response, 0, 16); System.arraycopy(clientChallenge, 0, response, 16, 8); return response; } catch (Exception ex) { if( log.level > 0 ) ex.printStackTrace( log ); return null; } } public static byte[] getNTLM2Response(byte[] nTOWFv1, byte[] serverChallenge, byte[] clientChallenge) { byte[] sessionHash = new byte[8]; try { MessageDigest md5; md5 = MessageDigest.getInstance("MD5"); md5.update(serverChallenge); md5.update(clientChallenge, 0, 8); System.arraycopy(md5.digest(), 0, sessionHash, 0, 8); } catch (GeneralSecurityException gse) { if (log.level > 0) gse.printStackTrace(log); throw new RuntimeException("MD5", gse); } byte[] key = new byte[21]; System.arraycopy(nTOWFv1, 0, key, 0, 16); byte[] ntResponse = new byte[24]; E(key, sessionHash, ntResponse); return ntResponse; } public static byte[] nTOWFv1(String password) { if (password == null) throw new RuntimeException("Password parameter is required"); try { MD4 md4 = new MD4(); md4.update(password.getBytes(SmbConstants.UNI_ENCODING)); return md4.digest(); } catch (UnsupportedEncodingException uee) { throw new RuntimeException(uee.getMessage()); } } public static byte[] nTOWFv2(String domain, String username, String password) { try { MD4 md4 = new MD4(); md4.update(password.getBytes(SmbConstants.UNI_ENCODING)); HMACT64 hmac = new HMACT64(md4.digest()); hmac.update(username.toUpperCase().getBytes(SmbConstants.UNI_ENCODING)); hmac.update(domain.getBytes(SmbConstants.UNI_ENCODING)); return hmac.digest(); } catch (UnsupportedEncodingException uee) { throw new RuntimeException(uee.getMessage()); } } static byte[] computeResponse(byte[] responseKey, byte[] serverChallenge, byte[] clientData, int offset, int length) { HMACT64 hmac = new HMACT64(responseKey); hmac.update(serverChallenge); hmac.update(clientData, offset, length); byte[] mac = hmac.digest(); byte[] ret = new byte[mac.length + clientData.length]; System.arraycopy(mac, 0, ret, 0, mac.length); System.arraycopy(clientData, 0, ret, mac.length, clientData.length); return ret; } public static byte[] getLMv2Response( byte[] responseKeyLM, byte[] serverChallenge, byte[] clientChallenge) { return NtlmPasswordAuthentication.computeResponse(responseKeyLM, serverChallenge, clientChallenge, 0, clientChallenge.length); } public static byte[] getNTLMv2Response( byte[] responseKeyNT, byte[] serverChallenge, byte[] clientChallenge, long nanos1601, byte[] targetInfo) { int targetInfoLength = targetInfo != null ? targetInfo.length : 0; byte[] temp = new byte[28 + targetInfoLength + 4]; Encdec.enc_uint32le(0x00000101, temp, 0); // Header Encdec.enc_uint32le(0x00000000, temp, 4); // Reserved Encdec.enc_uint64le(nanos1601, temp, 8); System.arraycopy(clientChallenge, 0, temp, 16, 8); Encdec.enc_uint32le(0x00000000, temp, 24); // Unknown if (targetInfo != null) System.arraycopy(targetInfo, 0, temp, 28, targetInfoLength); Encdec.enc_uint32le(0x00000000, temp, 28 + targetInfoLength); // mystery bytes! return NtlmPasswordAuthentication.computeResponse(responseKeyNT, serverChallenge, temp, 0, temp.length); } static final NtlmPasswordAuthentication NULL = new NtlmPasswordAuthentication( "", "", "" ); static final NtlmPasswordAuthentication GUEST = new NtlmPasswordAuthentication( "?", "GUEST", "" ); static final NtlmPasswordAuthentication DEFAULT = new NtlmPasswordAuthentication( null ); String domain; String username; String password; byte[] ansiHash; byte[] unicodeHash; boolean hashesExternal = false; byte[] clientChallenge = null; byte[] challenge = null; /** * Create an <tt>NtlmPasswordAuthentication</tt> object from the userinfo * component of an SMB URL like "<tt>domain;user:pass</tt>". This constructor * is used internally be jCIFS when parsing SMB URLs. */ public NtlmPasswordAuthentication( String userInfo ) { domain = username = password = null; if( userInfo != null ) { try { userInfo = unescape( userInfo ); } catch( UnsupportedEncodingException uee ) { } int i, u, end; char c; end = userInfo.length(); for( i = 0, u = 0; i < end; i++ ) { c = userInfo.charAt( i ); if( c == ';' ) { domain = userInfo.substring( 0, i ); u = i + 1; } else if( c == ':' ) { password = userInfo.substring( i + 1 ); break; } } username = userInfo.substring( u, i ); } initDefaults(); if( domain == null ) this.domain = DEFAULT_DOMAIN; if( username == null ) this.username = DEFAULT_USERNAME; if( password == null ) this.password = DEFAULT_PASSWORD; } /** * Create an <tt>NtlmPasswordAuthentication</tt> object from a * domain, username, and password. Parameters that are <tt>null</tt> * will be substituted with <tt>jcifs.smb.client.domain</tt>, * <tt>jcifs.smb.client.username</tt>, <tt>jcifs.smb.client.password</tt> * property values. */ public NtlmPasswordAuthentication( String domain, String username, String password ) { int ci; if (username != null) { ci = username.indexOf('@'); if (ci > 0) { domain = username.substring(ci + 1); username = username.substring(0, ci); } else { ci = username.indexOf('\\'); if (ci > 0) { domain = username.substring(0, ci); username = username.substring(ci + 1); } } } this.domain = domain; this.username = username; this.password = password; initDefaults(); if( domain == null ) this.domain = DEFAULT_DOMAIN; if( username == null ) this.username = DEFAULT_USERNAME; if( password == null ) this.password = DEFAULT_PASSWORD; } /** * Create an <tt>NtlmPasswordAuthentication</tt> object with raw password * hashes. This is used exclusively by the <tt>jcifs.http.NtlmSsp</tt> * class which is in turn used by NTLM HTTP authentication functionality. */ public NtlmPasswordAuthentication( String domain, String username, byte[] challenge, byte[] ansiHash, byte[] unicodeHash ) { if( domain == null || username == null || ansiHash == null || unicodeHash == null ) { throw new IllegalArgumentException( "External credentials cannot be null" ); } this.domain = domain; this.username = username; this.password = null; this.challenge = challenge; this.ansiHash = ansiHash; this.unicodeHash = unicodeHash; hashesExternal = true; } /** * Returns the domain. */ public String getDomain() { return domain; } /** * Returns the username. */ public String getUsername() { return username; } /** * Returns the password in plain text or <tt>null</tt> if the raw password * hashes were used to construct this <tt>NtlmPasswordAuthentication</tt> * object which will be the case when NTLM HTTP Authentication is * used. There is no way to retrieve a users password in plain text unless * it is supplied by the user at runtime. */ public String getPassword() { return password; } /** * Return the domain and username in the format: * <tt>domain\\username</tt>. This is equivalent to <tt>toString()</tt>. */ public String getName() { boolean d = domain.length() > 0 && domain.equals( "?" ) == false; return d ? domain + "\\" + username : username; } /** * Computes the 24 byte ANSI password hash given the 8 byte server challenge. */ public byte[] getAnsiHash( byte[] challenge ) { if( hashesExternal ) { return ansiHash; } switch (LM_COMPATIBILITY) { case 0: case 1: return getPreNTLMResponse( password, challenge ); case 2: return getNTLMResponse( password, challenge ); case 3: case 4: case 5: if( clientChallenge == null ) { clientChallenge = new byte[8]; RANDOM.nextBytes( clientChallenge ); } return getLMv2Response(domain, username, password, challenge, clientChallenge); default: return getPreNTLMResponse( password, challenge ); } } /** * Computes the 24 byte Unicode password hash given the 8 byte server challenge. */ public byte[] getUnicodeHash( byte[] challenge ) { if( hashesExternal ) { return unicodeHash; } switch (LM_COMPATIBILITY) { case 0: case 1: case 2: return getNTLMResponse( password, challenge ); case 3: case 4: case 5: /* if( clientChallenge == null ) { clientChallenge = new byte[8]; RANDOM.nextBytes( clientChallenge ); } return getNTLMv2Response(domain, username, password, null, challenge, clientChallenge); */ return new byte[0]; default: return getNTLMResponse( password, challenge ); } } public byte[] getSigningKey(byte[] challenge) throws SmbException { switch (LM_COMPATIBILITY) { case 0: case 1: case 2: byte[] signingKey = new byte[40]; getUserSessionKey(challenge, signingKey, 0); System.arraycopy(getUnicodeHash(challenge), 0, signingKey, 16, 24); return signingKey; case 3: case 4: case 5: /* This code is only called if extended security is not on. This will * all be cleaned up an normalized in JCIFS 2.x. */ throw new SmbException("NTLMv2 requires extended security (jcifs.smb.client.useExtendedSecurity must be true if jcifs.smb.lmCompatibility >= 3)"); } return null; } /** * Returns the effective user session key. * * @param challenge The server challenge. * @return A <code>byte[]</code> containing the effective user session key, * used in SMB MAC signing and NTLMSSP signing and sealing. */ public byte[] getUserSessionKey(byte[] challenge) { if (hashesExternal) return null; byte[] key = new byte[16]; try { getUserSessionKey(challenge, key, 0); } catch (Exception ex) { if( log.level > 0 ) ex.printStackTrace( log ); } return key; } /** * Calculates the effective user session key. * * @param challenge The server challenge. * @param dest The destination array in which the user session key will be * placed. * @param offset The offset in the destination array at which the * session key will start. */ void getUserSessionKey(byte[] challenge, byte[] dest, int offset) throws SmbException { if (hashesExternal) return; try { MD4 md4 = new MD4(); md4.update(password.getBytes(SmbConstants.UNI_ENCODING)); switch (LM_COMPATIBILITY) { case 0: case 1: case 2: md4.update(md4.digest()); md4.digest(dest, offset, 16); break; case 3: case 4: case 5: if( clientChallenge == null ) { clientChallenge = new byte[8]; RANDOM.nextBytes( clientChallenge ); } HMACT64 hmac = new HMACT64(md4.digest()); hmac.update(username.toUpperCase().getBytes( SmbConstants.UNI_ENCODING)); hmac.update(domain.toUpperCase().getBytes( SmbConstants.UNI_ENCODING)); byte[] ntlmv2Hash = hmac.digest(); hmac = new HMACT64(ntlmv2Hash); hmac.update(challenge); hmac.update(clientChallenge); HMACT64 userKey = new HMACT64(ntlmv2Hash); userKey.update(hmac.digest()); userKey.digest(dest, offset, 16); break; default: md4.update(md4.digest()); md4.digest(dest, offset, 16); break; } } catch (Exception e) { throw new SmbException("", e); } } /** * Compares two <tt>NtlmPasswordAuthentication</tt> objects for * equality. Two <tt>NtlmPasswordAuthentication</tt> objects are equal if * their caseless domain and username fields are equal and either both hashes are external and they are equal or both internally supplied passwords are equal. If one <tt>NtlmPasswordAuthentication</tt> object has external hashes (meaning negotiated via NTLM HTTP Authentication) and the other does not they will not be equal. This is technically not correct however the server 8 byte challage would be required to compute and compare the password hashes but that it not available with this method. */ public boolean equals( Object obj ) { if( obj instanceof NtlmPasswordAuthentication ) { NtlmPasswordAuthentication ntlm = (NtlmPasswordAuthentication)obj; if( ntlm.domain.toUpperCase().equals( domain.toUpperCase() ) && ntlm.username.toUpperCase().equals( username.toUpperCase() )) { if( hashesExternal && ntlm.hashesExternal ) { return Arrays.equals( ansiHash, ntlm.ansiHash ) && Arrays.equals( unicodeHash, ntlm.unicodeHash ); /* This still isn't quite right. If one npa object does not have external * hashes and the other does then they will not be considered equal even * though they may be. */ } else if( !hashesExternal && password.equals( ntlm.password )) { return true; } } } return false; } /** * Return the upcased username hash code. */ public int hashCode() { return getName().toUpperCase().hashCode(); } /** * Return the domain and username in the format: * <tt>domain\\username</tt>. This is equivalent to <tt>getName()</tt>. */ public String toString() { return getName(); } static String unescape( String str ) throws NumberFormatException, UnsupportedEncodingException { char ch; int i, j, state, len; char[] out; byte[] b = new byte[1]; if( str == null ) { return null; } len = str.length(); out = new char[len]; state = 0; for( i = j = 0; i < len; i++ ) { switch( state ) { case 0: ch = str.charAt( i ); if( ch == '%' ) { state = 1; } else { out[j++] = ch; } break; case 1: /* Get ASCII hex value and convert to platform dependant * encoding like EBCDIC perhaps */ b[0] = (byte)(Integer.parseInt( str.substring( i, i + 2 ), 16 ) & 0xFF); out[j++] = (new String( b, 0, 1, "ASCII" )).charAt( 0 ); i++; state = 0; } } return new String( out, 0, j ); } }
lgpl-2.1
Tybion/community-edition
projects/repository/source/java/org/alfresco/repo/module/AbstractModuleComponent.java
11020
/* * Copyright (C) 2005-2013 Alfresco Software Limited. * * This file is part of Alfresco * * Alfresco is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Alfresco is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with Alfresco. If not, see <http://www.gnu.org/licenses/>. */ package org.alfresco.repo.module; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.alfresco.error.AlfrescoRuntimeException; import org.alfresco.repo.security.authentication.AuthenticationComponent; import org.alfresco.repo.tenant.TenantAdminService; import org.alfresco.service.ServiceRegistry; import org.alfresco.service.cmr.module.ModuleService; import org.alfresco.util.EqualsHelper; import org.alfresco.util.PropertyCheck; import org.springframework.beans.factory.BeanNameAware; import org.springframework.extensions.surf.util.I18NUtil; /** * Implementation of a {@link org.alfresco.repo.module.ModuleComponent} to provide * the basic necessities. * * @see #executeInternal() * * @author Roy Wetherall * @author Derek Hulley * @since 2.0 */ public abstract class AbstractModuleComponent implements ModuleComponent, BeanNameAware { private static final String ERR_ALREADY_EXECUTED = "module.err.already_executed"; private static final String ERR_EXECUTION_FAILED = "module.err.execution_failed"; // Supporting components protected ServiceRegistry serviceRegistry; protected AuthenticationComponent authenticationComponent; protected ModuleService moduleService; private TenantAdminService tenantAdminService; private String moduleId; private String name; private String description; private ModuleVersionNumber sinceVersion; private ModuleVersionNumber appliesFromVersion; private ModuleVersionNumber appliesToVersion; private List<ModuleComponent> dependsOn; /** Defaults to <tt>true</tt> */ private boolean executeOnceOnly; private Map<String, Boolean> executed; public AbstractModuleComponent() { sinceVersion = ModuleVersionNumber.VERSION_ZERO; appliesFromVersion = ModuleVersionNumber.VERSION_ZERO; appliesToVersion = ModuleVersionNumber.VERSION_BIG; dependsOn = new ArrayList<ModuleComponent>(0); executeOnceOnly = true; executed = new HashMap<String, Boolean>(1); } /** * Checks for the presence of all generally-required properties. */ protected void checkProperties() { PropertyCheck.mandatory(this, "serviceRegistry", serviceRegistry); PropertyCheck.mandatory(this, "authenticationComponent", authenticationComponent); PropertyCheck.mandatory(this, "moduleId", moduleId); PropertyCheck.mandatory(this, "name", name); PropertyCheck.mandatory(this, "sinceVersion", sinceVersion); PropertyCheck.mandatory(this, "appliesFromVersion", appliesFromVersion); PropertyCheck.mandatory(this, "appliesToVersion", appliesToVersion); } /** * @see #getModuleId() * @see #getName() */ @Override public String toString() { StringBuilder sb = new StringBuilder(128); sb.append("ModuleComponent") .append("[ module=").append(moduleId) .append(", name=").append(name) .append(", since=").append(sinceVersion) .append(", appliesFrom=").append(appliesFromVersion) .append(", appliesTo=").append(appliesToVersion) .append(", onceOnly=").append(executeOnceOnly) .append("]"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (false == obj instanceof ModuleComponent) { return false; } ModuleComponent that = (ModuleComponent) obj; return (EqualsHelper.nullSafeEquals(this.moduleId, that.getModuleId()) && EqualsHelper.nullSafeEquals(this.name, that.getName())); } @Override public int hashCode() { return moduleId.hashCode() + 17 * name.hashCode(); } public void setAuthenticationComponent(AuthenticationComponent authenticationComponent) { this.authenticationComponent = authenticationComponent; } /** * Set the module service to register with. If not set, the component will not be * automatically started. * * @param moduleService the service to register against. This is optional. */ public void setModuleService(ModuleService moduleService) { this.moduleService = moduleService; } public void setServiceRegistry(ServiceRegistry serviceRegistry) { this.serviceRegistry = serviceRegistry; } public void setTenantAdminService(TenantAdminService tenantAdminService) { this.tenantAdminService = tenantAdminService; } /** * {@inheritDoc} */ public String getModuleId() { return moduleId; } /** * @param moduleId the globally unique module name. */ public void setModuleId(String moduleId) { this.moduleId = moduleId; } /** * {@inheritDoc} */ public String getName() { return name; } /** * Set the component name, which must be unique within the context of the * module. If the is not set, then the bean name will be used. * * @param name the name of the component within the module. * * @see #setBeanName(String) */ public void setName(String name) { this.name = name; } /** * Convenience method that will set the name of the component to * match the bean name, unless the {@link #setName(String) name} has * been explicitly set. */ public void setBeanName(String name) { setName(name); } /** * {@inheritDoc} */ public String getDescription() { return description; } /** * Set the component's description. This will automatically be I18N'ized, so it may just * be a resource bundle key. * * @param description a description of the component. */ public void setDescription(String description) { this.description = description; } /** * {@inheritDoc} */ public ModuleVersionNumber getSinceVersionNumber() { return sinceVersion; } /** * Set the version number for which this component was added. */ public void setSinceVersion(String version) { this.sinceVersion = new ModuleVersionNumber(version); } /** * {@inheritDoc} */ public ModuleVersionNumber getAppliesFromVersionNumber() { return appliesFromVersion; } /** * Set the minimum module version number to which this component applies. * Default <b>0.0</b>. */ public void setAppliesFromVersion(String version) { this.appliesFromVersion = new ModuleVersionNumber(version); } /** * {@inheritDoc} */ public ModuleVersionNumber getAppliesToVersionNumber() { return appliesToVersion; } /** * Set the minimum module version number to which this component applies. * Default <b>999.0</b>. */ public void setAppliesToVersion(String version) { this.appliesToVersion = new ModuleVersionNumber(version); } /** * {@inheritDoc} */ public List<ModuleComponent> getDependsOn() { return dependsOn; } /** * @param dependsOn a list of modules that must be executed before this one */ public void setDependsOn(List<ModuleComponent> dependsOn) { this.dependsOn = dependsOn; } /** * {@inheritDoc} * * @return Returns <tt>true</tt> always. Override as required. */ public boolean isExecuteOnceOnly() { return executeOnceOnly; } /** * @param executeOnceOnly <tt>true</tt> to force execution of this component with * each startup or <tt>false</tt> if it must only be executed once. */ public void setExecuteOnceOnly(boolean executeOnceOnly) { this.executeOnceOnly = executeOnceOnly; } public void init() { // Ensure that the description gets I18N'ized description = I18NUtil.getMessage(description); // Register the component with the service if (moduleService != null) // Allows optional registration of the component { moduleService.registerComponent(this); } } /** * The method that performs the actual work. For the most part, derived classes will * only have to override this method to be fully functional. * * @throws Throwable any problems, just throw them */ protected abstract void executeInternal() throws Throwable; /** * {@inheritDoc} * * @see #executeInternal() the abstract method to be implemented by subclasses */ public final synchronized void execute() { // ensure that this has not been executed already String tenantDomain = tenantAdminService.getCurrentUserDomain(); if (! executed.containsKey(tenantDomain)) { executed.put(tenantDomain, false); } if (executed.get(tenantDomain)) { throw AlfrescoRuntimeException.create(ERR_ALREADY_EXECUTED, moduleId, name); } // Ensure properties have been set checkProperties(); // Execute try { executeInternal(); } catch (Throwable e) { throw AlfrescoRuntimeException.create(e, ERR_EXECUTION_FAILED, name, e.getMessage()); } finally { // There are no second chances executed.put(tenantDomain, true); } } // from Thor public final synchronized void shutdown() { String tenantDomain = tenantAdminService.getCurrentUserDomain(); executed.put(tenantDomain, false); } }
lgpl-3.0
SeaCloudsEU/SoftCare-Case-Study
softcare-gui/src/eu/ehealth/ws_client/storagecomponent/UpdateSystemParameterResponse.java
1583
package eu.ehealth.ws_client.storagecomponent; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; import eu.ehealth.ws_client.xsd.OperationResult; /** * <p>Clase Java para anonymous complex type. * * <p>El siguiente fragmento de esquema especifica el contenido que se espera que haya en esta clase. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="out" type="{http://aladdin-project.eu/xsd}OperationResult"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "out" }) @XmlRootElement(name = "UpdateSystemParameterResponse") public class UpdateSystemParameterResponse { @XmlElement(required = true) protected OperationResult out; /** * Obtiene el valor de la propiedad out. * * @return * possible object is * {@link OperationResult } * */ public OperationResult getOut() { return out; } /** * Define el valor de la propiedad out. * * @param value * allowed object is * {@link OperationResult } * */ public void setOut(OperationResult value) { this.out = value; } }
apache-2.0
andreagenso/java2scala
test/J2s/java/openjdk-6-src-b27/jdk/src/share/classes/java/awt/peer/LabelPeer.java
1615
/* * Copyright (c) 1995, 2007, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.awt.peer; /** * The peer interfaces are intended only for use in porting * the AWT. They are not intended for use by application * developers, and developers should not implement peers * nor invoke any of the peer methods directly on the peer * instances. */ public interface LabelPeer extends ComponentPeer { void setText(String label); void setAlignment(int alignment); }
apache-2.0
oujesky/closure-templates
java/tests/com/google/template/soy/soytree/MsgSubstUnitBaseVarNameUtilsTest.java
7432
/* * Copyright 2013 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.template.soy.soytree; import static com.google.common.truth.Truth.assertThat; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import com.google.template.soy.FormattingErrorReporter; import com.google.template.soy.base.SourceLocation; import com.google.template.soy.error.ErrorReporter; import com.google.template.soy.error.ExplodingErrorReporter; import com.google.template.soy.exprparse.ExpressionParser; import com.google.template.soy.exprtree.ExprNode; import junit.framework.TestCase; import java.util.List; /** * Unit tests for {@link MsgSubstUnitBaseVarNameUtils}. * */ public final class MsgSubstUnitBaseVarNameUtilsTest extends TestCase { private static final ErrorReporter FAIL = ExplodingErrorReporter.get(); public void testGenBaseNames() { String exprText = "$aaBb"; assertNaiveBaseNameForExpr("AA_BB", exprText); assertShortestBaseNameForExpr("AA_BB", exprText); assertCandidateBaseNamesForExpr(ImmutableList.of("AA_BB"), exprText); exprText = "$aaBb.ccDd"; assertNaiveBaseNameForExpr("CC_DD", exprText); assertShortestBaseNameForExpr("CC_DD", exprText); assertCandidateBaseNamesForExpr(ImmutableList.of("CC_DD", "AA_BB_CC_DD"), exprText); exprText = "$ij.aaBb.ccDd"; assertNaiveBaseNameForExpr("CC_DD", exprText); assertShortestBaseNameForExpr("CC_DD", exprText); assertCandidateBaseNamesForExpr(ImmutableList.of("CC_DD", "AA_BB_CC_DD"), exprText); exprText = "$aaBb?.ccDd0"; assertNaiveBaseNameForExpr("CC_DD_0", exprText); assertShortestBaseNameForExpr("CC_DD_0", exprText); assertCandidateBaseNamesForExpr(ImmutableList.of("CC_DD_0", "AA_BB_CC_DD_0"), exprText); exprText = "aa_._bb._CC_DD_"; assertNaiveBaseNameForExpr("CC_DD", exprText); assertShortestBaseNameForExpr("CC_DD", exprText); assertCandidateBaseNamesForExpr( ImmutableList.of("CC_DD", "BB_CC_DD", "AA_BB_CC_DD"), exprText); exprText = "length($aaBb)"; assertNaiveBaseNameForExpr("FALLBACK", exprText); assertShortestBaseNameForExpr("FALLBACK", exprText); assertCandidateBaseNamesForExpr(ImmutableList.<String>of(), exprText); exprText = "$aaBb + 1"; assertNaiveBaseNameForExpr("FALLBACK", exprText); assertShortestBaseNameForExpr("FALLBACK", exprText); assertCandidateBaseNamesForExpr(ImmutableList.<String>of(), exprText); exprText = "$aaBb0.1.2.ccDd.5"; assertNaiveBaseNameForExpr("FALLBACK", exprText); assertShortestBaseNameForExpr("CC_DD_5", exprText); assertCandidateBaseNamesForExpr(ImmutableList.of("CC_DD_5", "AA_BB_0_1_2_CC_DD_5"), exprText); exprText = "$aa0_0bb[1][2]?.cc_dd.ee?[5]"; assertNaiveBaseNameForExpr("FALLBACK", exprText); assertShortestBaseNameForExpr("EE_5", exprText); assertCandidateBaseNamesForExpr( ImmutableList.of("EE_5", "CC_DD_EE_5", "AA_0_0_BB_1_2_CC_DD_EE_5"), exprText); exprText = "$aa0_0bb['foo'][2]?.cc_dd.ee?[5]"; assertNaiveBaseNameForExpr("FALLBACK", exprText); assertShortestBaseNameForExpr("EE_5", exprText); assertCandidateBaseNamesForExpr( ImmutableList.of("EE_5", "CC_DD_EE_5"), exprText); } private void assertNaiveBaseNameForExpr(String expected, String exprText) { ExprNode exprRoot = new ExpressionParser( exprText, SourceLocation.UNKNOWN, FAIL) .parseExpression(); String actual = MsgSubstUnitBaseVarNameUtils.genNaiveBaseNameForExpr(exprRoot, "FALLBACK"); MsgNodeTest.assertEquals(expected, actual); } private void assertShortestBaseNameForExpr(String expected, String exprText) { ExprNode exprRoot = new ExpressionParser( exprText, SourceLocation.UNKNOWN, FAIL) .parseExpression(); String actual = MsgSubstUnitBaseVarNameUtils.genShortestBaseNameForExpr( exprRoot, "FALLBACK"); MsgNodeTest.assertEquals(expected, actual); } private void assertCandidateBaseNamesForExpr(List<String> expected, String exprText) { ExprNode exprRoot = new ExpressionParser( exprText, SourceLocation.UNKNOWN, FAIL) .parseExpression(); List<String> actual = MsgSubstUnitBaseVarNameUtils.genCandidateBaseNamesForExpr( exprRoot); MsgNodeTest.assertEquals(expected, actual); } public void testGenNoncollidingBaseNames() { assertNoncollidingBaseNamesForExprs( ImmutableList.of("GENDER"), "$user.gender"); assertErrorMsgWhenGenNoncollidingBaseNamesForExprs( "Cannot generate noncolliding base names for vars. " + "Colliding expressions: '$gender' and '$ij.gender'.", "$gender, $ij.gender"); assertErrorMsgWhenGenNoncollidingBaseNamesForExprs( "Cannot generate noncolliding base names for vars. " + "Colliding expressions: '$ij.gender' and '$userGender'.", "$userGender, $ij.gender"); assertNoncollidingBaseNamesForExprs( ImmutableList.of("USERGENDER", "GENDER"), "$usergender, $ij.gender"); assertNoncollidingBaseNamesForExprs( ImmutableList.of("USER_GENDER", "TARGET_GENDER"), "$userGender, $target.gender"); assertNoncollidingBaseNamesForExprs( ImmutableList.of("USER_GENDER", "TARGET_GENDER"), "$user.gender, $target.gender"); assertNoncollidingBaseNamesForExprs( ImmutableList.of("USER_GENDER", "TARGET_0_GENDER", "TARGET_1_GENDER"), "$ij.userGender, $target.0?.gender, $target.1?.gender"); assertNoncollidingBaseNamesForExprs( ImmutableList.of("USER_GENDER", "TARGET_0_GENDER", "TARGET_1_GENDER"), "$ij.user.gender, $target[0]?.gender, $target[1]?.gender"); assertNoncollidingBaseNamesForExprs( ImmutableList.of("OWNER_GENDER", "ACTOR_GENDER", "TARGET_GENDER"), "$owner.gender, $actor.gender, $target.gender"); } private void assertNoncollidingBaseNamesForExprs(List<String> expected, String exprListText) { List<ExprNode> exprRoots = new ExpressionParser(exprListText, SourceLocation.UNKNOWN, FAIL) .parseExpressionList(); List<String> actual = MsgSubstUnitBaseVarNameUtils.genNoncollidingBaseNamesForExprs(exprRoots, "FALLBACK", FAIL); MsgNodeTest.assertEquals(expected, actual); } private void assertErrorMsgWhenGenNoncollidingBaseNamesForExprs( String expectedErrorMsg, String exprListText) { List<ExprNode> exprRoots = new ExpressionParser(exprListText, SourceLocation.UNKNOWN, FAIL) .parseExpressionList(); FormattingErrorReporter errorReporter = new FormattingErrorReporter(); MsgSubstUnitBaseVarNameUtils.genNoncollidingBaseNamesForExprs( exprRoots, "FALLBACK", errorReporter); assertThat(errorReporter.getErrorMessages()).hasSize(1); assertThat(Iterables.getOnlyElement(errorReporter.getErrorMessages())) .contains(expectedErrorMsg); } }
apache-2.0
hgschmie/presto
presto-main/src/test/java/io/prestosql/sql/planner/iterative/rule/TestPruneJoinColumns.java
5504
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.sql.planner.iterative.rule; import com.google.common.base.Predicates; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import io.prestosql.sql.planner.Symbol; import io.prestosql.sql.planner.assertions.PlanMatchPattern; import io.prestosql.sql.planner.iterative.rule.test.BaseRuleTest; import io.prestosql.sql.planner.iterative.rule.test.PlanBuilder; import io.prestosql.sql.planner.plan.Assignments; import io.prestosql.sql.planner.plan.JoinNode; import io.prestosql.sql.planner.plan.PlanNode; import org.testng.annotations.Test; import java.util.List; import java.util.Optional; import java.util.function.Predicate; import static com.google.common.collect.ImmutableList.toImmutableList; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.equiJoinClause; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.join; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.strictProject; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.values; public class TestPruneJoinColumns extends BaseRuleTest { @Test public void testNotAllOutputsReferenced() { tester().assertThat(new PruneJoinColumns()) .on(p -> buildProjectedJoin(p, symbol -> symbol.getName().equals("rightValue"))) .matches( strictProject( ImmutableMap.of("rightValue", PlanMatchPattern.expression("rightValue")), join( JoinNode.Type.INNER, ImmutableList.of(equiJoinClause("leftKey", "rightKey")), Optional.empty(), values(ImmutableList.of("leftKey", "leftValue")), values(ImmutableList.of("rightKey", "rightValue"))) .withExactOutputs("rightValue"))); } @Test public void testAllInputsReferenced() { tester().assertThat(new PruneJoinColumns()) .on(p -> buildProjectedJoin(p, Predicates.alwaysTrue())) .doesNotFire(); } @Test public void testCrossJoin() { tester().assertThat(new PruneJoinColumns()) .on(p -> { Symbol leftValue = p.symbol("leftValue"); Symbol rightValue = p.symbol("rightValue"); return p.project( Assignments.of(), p.join( JoinNode.Type.INNER, p.values(leftValue), p.values(rightValue), ImmutableList.of(), ImmutableList.of(leftValue), ImmutableList.of(rightValue), Optional.empty(), Optional.empty(), Optional.empty())); }) .matches( strictProject( ImmutableMap.of(), join( JoinNode.Type.INNER, ImmutableList.of(), Optional.empty(), values(ImmutableList.of("leftValue")), values(ImmutableList.of("rightValue"))) .withExactOutputs())); } private static PlanNode buildProjectedJoin(PlanBuilder p, Predicate<Symbol> projectionFilter) { Symbol leftKey = p.symbol("leftKey"); Symbol leftValue = p.symbol("leftValue"); Symbol rightKey = p.symbol("rightKey"); Symbol rightValue = p.symbol("rightValue"); List<Symbol> leftOutputs = ImmutableList.of(leftKey, leftValue); List<Symbol> rightOutputs = ImmutableList.of(rightKey, rightValue); return p.project( Assignments.identity( ImmutableList.of(leftKey, leftValue, rightKey, rightValue).stream() .filter(projectionFilter) .collect(toImmutableList())), p.join( JoinNode.Type.INNER, p.values(leftKey, leftValue), p.values(rightKey, rightValue), ImmutableList.of(new JoinNode.EquiJoinClause(leftKey, rightKey)), leftOutputs, rightOutputs, Optional.empty(), Optional.empty(), Optional.empty())); } }
apache-2.0
StrategyObject/fop
src/java/org/apache/fop/afp/modca/IMImageObject.java
4580
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.apache.fop.afp.modca; import java.io.IOException; import java.io.OutputStream; import org.apache.fop.afp.ioca.ImageCellPosition; import org.apache.fop.afp.ioca.ImageInputDescriptor; import org.apache.fop.afp.ioca.ImageOutputControl; import org.apache.fop.afp.ioca.ImageRasterData; /** * An IM image data object specifies the contents of a raster image and * its placement on a page, overlay, or page segment. An IM image can be * either simple or complex. A simple image is composed of one or more Image * Raster Data (IRD) structured fields that define the raster pattern for the * entire image. A complex image is divided into regions called image cells. * Each image cell is composed of one or more IRD structured fields that define * the raster pattern for the image cell, and one Image Cell Position (ICP) * structured field that defines the position of the image cell relative to * the origin of the entire image. Each ICP also specifies the size of the * image cell and a fill rectangle into which the cell is replicated. */ public class IMImageObject extends AbstractNamedAFPObject { /** * The image output control */ private ImageOutputControl imageOutputControl; /** * The image input descriptor */ private ImageInputDescriptor imageInputDescriptor; /** * The image cell position */ private ImageCellPosition imageCellPosition; /** * The image rastor data */ private ImageRasterData imageRasterData; /** * Constructor for the image object with the specified name, * the name must be a fixed length of eight characters. * * @param name The name of the image. */ public IMImageObject(String name) { super(name); } /** * Sets the ImageOutputControl. * * @param imageOutputControl The imageOutputControl to set */ public void setImageOutputControl(ImageOutputControl imageOutputControl) { this.imageOutputControl = imageOutputControl; } /** * Sets the ImageCellPosition. * * @param imageCellPosition The imageCellPosition to set */ public void setImageCellPosition(ImageCellPosition imageCellPosition) { this.imageCellPosition = imageCellPosition; } /** * Sets the ImageInputDescriptor. * * @param imageInputDescriptor The imageInputDescriptor to set */ public void setImageInputDescriptor(ImageInputDescriptor imageInputDescriptor) { this.imageInputDescriptor = imageInputDescriptor; } /** * Sets the ImageRastorData. * * @param imageRasterData The imageRasterData to set */ public void setImageRasterData(ImageRasterData imageRasterData) { this.imageRasterData = imageRasterData; } /** {@inheritDoc} */ protected void writeContent(OutputStream os) throws IOException { super.writeContent(os); if (imageOutputControl != null) { imageOutputControl.writeToStream(os); } if (imageInputDescriptor != null) { imageInputDescriptor.writeToStream(os); } if (imageCellPosition != null) { imageCellPosition.writeToStream(os); } if (imageRasterData != null) { imageRasterData.writeToStream(os); } } /** {@inheritDoc} */ protected void writeStart(OutputStream os) throws IOException { byte[] data = new byte[17]; copySF(data, Type.BEGIN, Category.IM_IMAGE); os.write(data); } /** {@inheritDoc} */ protected void writeEnd(OutputStream os) throws IOException { byte[] data = new byte[17]; copySF(data, Type.END, Category.IM_IMAGE); os.write(data); } }
apache-2.0
wildwind/incubator-rocketmq
test/src/test/java/org/apache/rocketmq/test/client/producer/exception/msg/MessageUserPropIT.java
3558
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.rocketmq.test.client.producer.exception.msg; import org.apache.log4j.Logger; import org.apache.rocketmq.common.message.Message; import org.apache.rocketmq.test.base.BaseConf; import org.apache.rocketmq.test.client.consumer.balance.NormalMsgStaticBalanceIT; import org.apache.rocketmq.test.client.rmq.RMQNormalConsumer; import org.apache.rocketmq.test.client.rmq.RMQNormalProducer; import org.apache.rocketmq.test.factory.MessageFactory; import org.apache.rocketmq.test.listener.rmq.concurrent.RMQNormalListner; import org.junit.After; import org.junit.Before; import org.junit.Test; import static com.google.common.truth.Truth.assertThat; public class MessageUserPropIT extends BaseConf { private static Logger logger = Logger.getLogger(NormalMsgStaticBalanceIT.class); private RMQNormalProducer producer = null; private String topic = null; @Before public void setUp() { topic = initTopic(); logger.info(String.format("use topic: %s !", topic)); producer = getProducer(nsAddr, topic); } @After public void tearDown() { super.shutDown(); } /** * @since version3.4.6 */ @Test public void testSendEnglishUserProp() { Message msg = MessageFactory.getRandomMessage(topic); String msgKey = "jueyinKey"; String msgValue = "jueyinValue"; msg.putUserProperty(msgKey, msgValue); RMQNormalConsumer consumer = getConsumer(nsAddr, topic, "*", new RMQNormalListner()); producer.send(msg, null); assertThat(producer.getAllMsgBody().size()).isEqualTo(1); consumer.getListner().waitForMessageConsume(producer.getAllMsgBody(), consumeTime); Message sendMsg = (Message) producer.getFirstMsg(); Message recvMsg = (Message) consumer.getListner().getFirstMsg(); assertThat(recvMsg.getUserProperty(msgKey)).isEqualTo(sendMsg.getUserProperty(msgKey)); } /** * @since version3.4.6 */ @Test public void testSendChinaUserProp() { Message msg = MessageFactory.getRandomMessage(topic); String msgKey = "jueyinKey"; String msgValue = "jueyinzhi"; msg.putUserProperty(msgKey, msgValue); RMQNormalConsumer consumer = getConsumer(nsAddr, topic, "*", new RMQNormalListner()); producer.send(msg, null); assertThat(producer.getAllMsgBody().size()).isEqualTo(1); consumer.getListner().waitForMessageConsume(producer.getAllMsgBody(), consumeTime); Message sendMsg = (Message) producer.getFirstMsg(); Message recvMsg = (Message) consumer.getListner().getFirstMsg(); assertThat(recvMsg.getUserProperty(msgKey)).isEqualTo(sendMsg.getUserProperty(msgKey)); } }
apache-2.0
drmaas/resilience4j
resilience4j-spring/src/test/java/io/github/resilience4j/bulkhead/configure/ReactorBulkheadAspectExtTest.java
2068
/* * Copyright 2019 Mahmoud Romeh * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.github.resilience4j.bulkhead.configure; import io.github.resilience4j.bulkhead.Bulkhead; import org.aspectj.lang.ProceedingJoinPoint; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.InjectMocks; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.when; /** * aspect unit test */ @RunWith(MockitoJUnitRunner.class) public class ReactorBulkheadAspectExtTest { @Mock ProceedingJoinPoint proceedingJoinPoint; @InjectMocks ReactorBulkheadAspectExt reactorBulkheadAspectExt; @Test public void testCheckTypes() { assertThat(reactorBulkheadAspectExt.canHandleReturnType(Mono.class)).isTrue(); assertThat(reactorBulkheadAspectExt.canHandleReturnType(Flux.class)).isTrue(); } @Test public void testReactorTypes() throws Throwable { Bulkhead bulkhead = Bulkhead.ofDefaults("test"); when(proceedingJoinPoint.proceed()).thenReturn(Mono.just("Test")); assertThat(reactorBulkheadAspectExt.handle(proceedingJoinPoint, bulkhead, "testMethod")) .isNotNull(); when(proceedingJoinPoint.proceed()).thenReturn(Flux.just("Test")); assertThat(reactorBulkheadAspectExt.handle(proceedingJoinPoint, bulkhead, "testMethod")) .isNotNull(); } }
apache-2.0
rpiotti/Web-Karma
karma-common/src/main/java/edu/isi/karma/rep/model/ClassAtom.java
2147
/******************************************************************************* * Copyright 2012 University of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * This code was developed by the Information Integration Group as part * of the Karma project at the Information Sciences Institute of the * University of Southern California. For more information, publications, * and related projects, please see: http://www.isi.edu/integration ******************************************************************************/ package edu.isi.karma.rep.model; import edu.isi.karma.rep.alignment.Label; /** * A class atom consists of an OWL named class or class expression and a single argument representing an OWL individual. * @author mohsen * */ public class ClassAtom extends Atom { private Label classPredicate; private Argument argument1; public ClassAtom(Label classPredicate, Argument argument1) { this.classPredicate = classPredicate; this.argument1 = argument1; } public Label getClassPredicate() { return classPredicate; } public Argument getArgument1() { return argument1; } public void setClassPredicate(Label classPredicate) { this.classPredicate = classPredicate; } public void setArgument1(Argument argument1) { this.argument1 = argument1; } public void print() { System.out.println("class predicate uri: " + classPredicate.getUri()); System.out.println("class predicate ns: " + classPredicate.getNs()); System.out.println("class predicate prefix: " + classPredicate.getPrefix()); System.out.println("argument1: " + argument1.getId()); } }
apache-2.0
apache/incubator-myriad
myriad-scheduler/src/main/java/org/apache/myriad/scheduler/MyriadScheduler.java
6867
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.myriad.scheduler; import java.util.List; import javax.inject.Inject; import org.apache.mesos.Protos; import org.apache.mesos.Scheduler; import org.apache.mesos.SchedulerDriver; import org.apache.myriad.configuration.MyriadConfiguration; import org.apache.myriad.scheduler.event.DisconnectedEvent; import org.apache.myriad.scheduler.event.ErrorEvent; import org.apache.myriad.scheduler.event.ExecutorLostEvent; import org.apache.myriad.scheduler.event.FrameworkMessageEvent; import org.apache.myriad.scheduler.event.OfferRescindedEvent; import org.apache.myriad.scheduler.event.ReRegisteredEvent; import org.apache.myriad.scheduler.event.RegisteredEvent; import org.apache.myriad.scheduler.event.ResourceOffersEvent; import org.apache.myriad.scheduler.event.SlaveLostEvent; import org.apache.myriad.scheduler.event.StatusUpdateEvent; import com.lmax.disruptor.EventTranslator; /** * The Myriad implementation of the Mesos Scheduler callback interface, where the method implementations * publish Myriad framework events corresponding to the Mesos callbacks. */ public class MyriadScheduler implements Scheduler { private org.apache.myriad.DisruptorManager disruptorManager; @Inject public MyriadScheduler(final MyriadConfiguration cfg, final org.apache.myriad.DisruptorManager disruptorManager) { this.disruptorManager = disruptorManager; } /** * Publishes a RegisteredEvent */ @Override public void registered(final SchedulerDriver driver, final Protos.FrameworkID frameworkId, final Protos.MasterInfo masterInfo) { disruptorManager.getRegisteredEventDisruptor().publishEvent(new EventTranslator<RegisteredEvent>() { @Override public void translateTo(RegisteredEvent event, long sequence) { event.setDriver(driver); event.setFrameworkId(frameworkId); event.setMasterInfo(masterInfo); } }); } /** * Publishes a ReRegisteredEvent */ @Override public void reregistered(final SchedulerDriver driver, final Protos.MasterInfo masterInfo) { disruptorManager.getReRegisteredEventDisruptor().publishEvent(new EventTranslator<ReRegisteredEvent>() { @Override public void translateTo(ReRegisteredEvent event, long sequence) { event.setDriver(driver); event.setMasterInfo(masterInfo); } }); } /** * Publishes a ResourceOffersEvent */ @Override public void resourceOffers(final SchedulerDriver driver, final List<Protos.Offer> offers) { disruptorManager.getResourceOffersEventDisruptor().publishEvent(new EventTranslator<ResourceOffersEvent>() { @Override public void translateTo(ResourceOffersEvent event, long sequence) { event.setDriver(driver); event.setOffers(offers); } }); } /** * Publishes a OfferRescindedEvent */ @Override public void offerRescinded(final SchedulerDriver driver, final Protos.OfferID offerId) { disruptorManager.getOfferRescindedEventDisruptor().publishEvent(new EventTranslator<OfferRescindedEvent>() { @Override public void translateTo(OfferRescindedEvent event, long sequence) { event.setDriver(driver); event.setOfferId(offerId); } }); } /** * Publishes a StatusUpdateEvent */ @Override public void statusUpdate(final SchedulerDriver driver, final Protos.TaskStatus status) { disruptorManager.getStatusUpdateEventDisruptor().publishEvent(new EventTranslator<StatusUpdateEvent>() { @Override public void translateTo(StatusUpdateEvent event, long sequence) { event.setDriver(driver); event.setStatus(status); } }); } /** * Publishes FrameworkMessageEvent */ @Override public void frameworkMessage(final SchedulerDriver driver, final Protos.ExecutorID executorId, final Protos.SlaveID slaveId, final byte[] bytes) { disruptorManager.getFrameworkMessageEventDisruptor().publishEvent(new EventTranslator<FrameworkMessageEvent>() { @Override public void translateTo(FrameworkMessageEvent event, long sequence) { event.setDriver(driver); event.setBytes(bytes); event.setExecutorId(executorId); event.setSlaveId(slaveId); } }); } /** * Publishes DisconnectedEvent */ @Override public void disconnected(final SchedulerDriver driver) { disruptorManager.getDisconnectedEventDisruptor().publishEvent(new EventTranslator<DisconnectedEvent>() { @Override public void translateTo(DisconnectedEvent event, long sequence) { event.setDriver(driver); } }); } /** * Publishes SlaveLostEvent */ @Override public void slaveLost(final SchedulerDriver driver, final Protos.SlaveID slaveId) { disruptorManager.getSlaveLostEventDisruptor().publishEvent(new EventTranslator<SlaveLostEvent>() { @Override public void translateTo(SlaveLostEvent event, long sequence) { event.setDriver(driver); event.setSlaveId(slaveId); } }); } /** * Publishes ExecutorLostEvent */ @Override public void executorLost(final SchedulerDriver driver, final Protos.ExecutorID executorId, final Protos.SlaveID slaveId, final int exitStatus) { disruptorManager.getExecutorLostEventDisruptor().publishEvent(new EventTranslator<ExecutorLostEvent>() { @Override public void translateTo(ExecutorLostEvent event, long sequence) { event.setDriver(driver); event.setExecutorId(executorId); event.setSlaveId(slaveId); event.setExitStatus(exitStatus); } }); } /** * Publishes ErrorEvent */ @Override public void error(final SchedulerDriver driver, final String message) { disruptorManager.getErrorEventDisruptor().publishEvent(new EventTranslator<ErrorEvent>() { @Override public void translateTo(ErrorEvent event, long sequence) { event.setDriver(driver); event.setMessage(message); } }); } }
apache-2.0
kishorvpatil/incubator-storm
storm-client/src/jvm/org/apache/storm/utils/StormBoundedExponentialBackoffRetry.java
3581
/** * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version * 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package org.apache.storm.utils; import java.util.Random; import org.apache.storm.shade.org.apache.curator.retry.BoundedExponentialBackoffRetry; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class StormBoundedExponentialBackoffRetry extends BoundedExponentialBackoffRetry { private static final Logger LOG = LoggerFactory.getLogger(StormBoundedExponentialBackoffRetry.class); private final Random random = new Random(); private final int linearBaseSleepMs; private final int stepSize; private int expRetriesThreshold; /** * The class provides generic exponential-linear backoff retry strategy for storm. It calculates threshold for exponentially increasing * sleeptime for retries. Beyond this threshold, the sleeptime increase is linear. * * <p>Also adds jitter for exponential/linear retry. It guarantees `currSleepTimeMs >= prevSleepTimeMs` and `baseSleepTimeMs <= * currSleepTimeMs <= maxSleepTimeMs` */ public StormBoundedExponentialBackoffRetry(int baseSleepTimeMs, int maxSleepTimeMs, int maxRetries) { super(baseSleepTimeMs, maxSleepTimeMs, maxRetries); expRetriesThreshold = 1; while ((1 << (expRetriesThreshold + 1)) < ((maxSleepTimeMs - baseSleepTimeMs) / 2)) { expRetriesThreshold++; } LOG.debug("The baseSleepTimeMs [{}] the maxSleepTimeMs [{}] the maxRetries [{}]", baseSleepTimeMs, maxSleepTimeMs, maxRetries); if (baseSleepTimeMs > maxSleepTimeMs) { LOG.warn("Misconfiguration: the baseSleepTimeMs [" + baseSleepTimeMs + "] can't be greater than " + "the maxSleepTimeMs [" + maxSleepTimeMs + "]."); } if (maxRetries > 0 && maxRetries > expRetriesThreshold) { this.stepSize = Math.max(1, (maxSleepTimeMs - (1 << expRetriesThreshold)) / (maxRetries - expRetriesThreshold)); } else { this.stepSize = 1; } this.linearBaseSleepMs = super.getBaseSleepTimeMs() + (1 << expRetriesThreshold); } @Override public long getSleepTimeMs(int retryCount, long elapsedTimeMs) { if (retryCount < expRetriesThreshold) { int exp = 1 << retryCount; int jitter = random.nextInt(exp); long sleepTimeMs = super.getBaseSleepTimeMs() + exp + jitter; LOG.debug("WILL SLEEP FOR {}ms (NOT MAX)", sleepTimeMs); return sleepTimeMs; } else { int stepJitter = random.nextInt(stepSize); long sleepTimeMs = Math.min(super.getMaxSleepTimeMs(), (linearBaseSleepMs + (stepSize * (retryCount - expRetriesThreshold)) + stepJitter)); LOG.debug("WILL SLEEP FOR {}ms (MAX)", sleepTimeMs); return sleepTimeMs; } } }
apache-2.0
YangliAtGitHub/HanLP
src/main/java/com/hankcs/hanlp/model/trigram/frequency/Probability.java
4118
/* * <summary></summary> * <author>hankcs</author> * <email>me@hankcs.com</email> * <create-date>2015/5/6 19:57</create-date> * * <copyright file="Probability.java"> * Copyright (c) 2003-2015, hankcs. All Right Reserved, http://www.hankcs.com/ * </copyright> */ package com.hankcs.hanlp.model.trigram.frequency; import com.hankcs.hanlp.collection.trie.bintrie.BaseNode; import com.hankcs.hanlp.collection.trie.bintrie.BinTrie; import com.hankcs.hanlp.collection.trie.bintrie._ValueArray; import com.hankcs.hanlp.corpus.io.ByteArray; import com.hankcs.hanlp.corpus.io.ICacheAble; import java.io.DataOutputStream; import java.util.Collection; import java.util.Set; /** * 概率统计工具 * * @author hankcs */ public class Probability implements ICacheAble { public BinTrie<Integer> d; int total; public Probability() { d = new BinTrie<Integer>(){ @Override public boolean load(ByteArray byteArray, _ValueArray valueArray) { BaseNode<Integer>[] nchild = new BaseNode[child.length - 1]; // 兼容旧模型 System.arraycopy(child, 0, nchild, 0, nchild.length); child = nchild; return super.load(byteArray, valueArray); } }; } public boolean exists(String key) { return d.containsKey(key); } public int getsum() { return total; } Integer get(String key) { return d.get(key); } public int get(char[]... keyArray) { Integer f = get(convert(keyArray)); if (f == null) return 0; return f; } public int get(char... key) { Integer f = d.get(key); if (f == null) return 0; return f; } public double freq(String key) { Integer f = get(key); if (f == null) f = 0; return f / (double) total; } public double freq(char[]... keyArray) { return freq(convert(keyArray)); } public double freq(char... keyArray) { Integer f = d.get(keyArray); if (f == null) f = 0; return f / (double) total; } public Set<String> samples() { return d.keySet(); } void add(String key, int value) { Integer f = get(key); if (f == null) f = 0; f += value; d.put(key, f); total += value; } void add(int value, char... key) { Integer f = d.get(key); if (f == null) f = 0; f += value; d.put(key, f); total += value; } public void add(int value, char[]... keyArray) { add(convert(keyArray), value); } public void add(int value, Collection<char[]> keyArray) { add(convert(keyArray), value); } private String convert(Collection<char[]> keyArray) { StringBuilder sbKey = new StringBuilder(keyArray.size() * 2); for (char[] key : keyArray) { sbKey.append(key[0]); sbKey.append(key[1]); } return sbKey.toString(); } static private String convert(char[]... keyArray) { StringBuilder sbKey = new StringBuilder(keyArray.length * 2); for (char[] key : keyArray) { sbKey.append(key[0]); sbKey.append(key[1]); } return sbKey.toString(); } @Override public void save(DataOutputStream out) throws Exception { out.writeInt(total); Integer[] valueArray = d.getValueArray(new Integer[0]); out.writeInt(valueArray.length); for (Integer v : valueArray) { out.writeInt(v); } d.save(out); } @Override public boolean load(ByteArray byteArray) { total = byteArray.nextInt(); int size = byteArray.nextInt(); Integer[] valueArray = new Integer[size]; for (int i = 0; i < valueArray.length; ++i) { valueArray[i] = byteArray.nextInt(); } d.load(byteArray, valueArray); return true; } }
apache-2.0
shs96c/buck
test/com/facebook/buck/event/listener/BuildThreadStateRendererTest.java
10279
/* * Copyright 2015-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.event.listener; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertThat; import com.facebook.buck.core.build.event.BuildRuleEvent; import com.facebook.buck.core.build.stats.BuildRuleDurationTracker; import com.facebook.buck.core.model.BuildTarget; import com.facebook.buck.core.model.BuildTargetFactory; import com.facebook.buck.core.rulekey.RuleKey; import com.facebook.buck.core.rules.BuildRule; import com.facebook.buck.core.rules.impl.FakeBuildRule; import com.facebook.buck.event.LeafEvent; import com.facebook.buck.event.TestEventConfigurator; import com.facebook.buck.rules.keys.FakeRuleKeyFactory; import com.facebook.buck.step.StepEvent; import com.facebook.buck.util.Ansi; import com.facebook.buck.util.timing.ClockDuration; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSortedSet; import com.google.common.hash.HashCode; import java.util.Locale; import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.function.Function; import org.junit.Test; public class BuildThreadStateRendererTest { private static final Ansi ANSI = Ansi.withoutTty(); private static final Function<Long, String> FORMAT_TIME_FUNCTION = timeMs -> String.format(Locale.US, "%.1fs", timeMs / 1000.0); private static final BuildTarget TARGET1 = BuildTargetFactory.newInstance("//:target1"); private static final BuildTarget TARGET2 = BuildTargetFactory.newInstance("//:target2"); private static final BuildTarget TARGET3 = BuildTargetFactory.newInstance("//:target3"); private static final BuildTarget TARGET4 = BuildTargetFactory.newInstance("//:target4"); private static final BuildRule RULE1 = createFakeRule(TARGET1); private static final BuildRule RULE2 = createFakeRule(TARGET2); private static final BuildRule RULE3 = createFakeRule(TARGET3); private static final BuildRule RULE4 = createFakeRule(TARGET4); @Test public void emptyInput() { BuildThreadStateRenderer renderer = createRenderer(2100, ImmutableMap.of(), ImmutableMap.of(), 80 /* outputMaxColumns */); assertThat(renderLines(renderer, true), is(equalTo(ImmutableList.<String>of()))); assertThat(renderLines(renderer, false), is(equalTo(ImmutableList.<String>of()))); assertThat(renderShortStatus(renderer, true), is(equalTo(ImmutableList.<String>of()))); assertThat(renderShortStatus(renderer, false), is(equalTo(ImmutableList.<String>of()))); } @Test public void commonCase() { BuildThreadStateRenderer renderer = createRenderer( 4200, ImmutableMap.of( 1L, createRuleBeginningEventOptional(1, 1200, 1400, RULE2), 3L, createRuleBeginningEventOptional(3, 2300, 700, RULE3), 4L, createRuleBeginningEventOptional(4, 1100, 200, RULE1), 5L, Optional.empty(), 8L, createRuleBeginningEventOptional(6, 3000, 0, RULE4)), ImmutableMap.of( 1L, createStepStartedEventOptional(1, 1500, "step A"), 3L, Optional.empty(), 4L, Optional.empty(), 5L, Optional.empty(), 8L, createStepStartedEventOptional(1, 3700, "step B")), 80 /* outputMaxColumns */); assertThat( renderLines(renderer, true), is( equalTo( ImmutableList.of( " - //:target2... 4.4s (running step A[2.7s])", " - //:target1... 3.3s (preparing)", " - //:target3... 2.6s (preparing)", " - //:target4... 1.2s (running step B[0.5s])", " - IDLE")))); assertThat( renderLines(renderer, false), is( equalTo( ImmutableList.of( " - //:target2... 4.4s (running step A[2.7s])", " - //:target3... 2.6s (preparing)", " - //:target1... 3.3s (preparing)", " - IDLE", " - //:target4... 1.2s (running step B[0.5s])")))); assertThat( renderShortStatus(renderer, true), is(equalTo(ImmutableList.of("[:]", "[:]", "[:]", "[:]", "[ ]")))); assertThat( renderShortStatus(renderer, false), is(equalTo(ImmutableList.of("[:]", "[:]", "[:]", "[ ]", "[:]")))); } @Test public void testSmallWidth() { BuildThreadStateRenderer renderer = createRenderer( 4200, ImmutableMap.of( 1L, createRuleBeginningEventOptional(1, 1200, 1400, RULE2), 3L, createRuleBeginningEventOptional(3, 2300, 700, RULE3), 4L, createRuleBeginningEventOptional(4, 1100, 200, RULE1), 5L, Optional.empty(), 8L, createRuleBeginningEventOptional(6, 3000, 0, RULE4)), ImmutableMap.of( 1L, createStepStartedEventOptional(1, 1500, "step A"), 3L, Optional.empty(), 4L, Optional.empty(), 5L, Optional.empty(), 8L, createStepStartedEventOptional(1, 3700, "step B")), 20 /* outputMaxColumns */); // Output is truncated. assertThat( renderLines(renderer, true), is( equalTo( ImmutableList.of( " - //:target... 4.4s", " - //:target... 3.3s", " - //:target... 2.6s", " - //:target... 1.2s", " - IDLE")))); } @Test public void withMissingInformation() { // SuperConsoleEventBusListener stores the data it passes to the renderer in a map that might // be concurrently modified from other threads. It is important that the renderer can handle // data containing inconsistencies. BuildThreadStateRenderer renderer = createRenderer( 4200, ImmutableMap.of( 3L, createRuleBeginningEventOptional(3, 2300, 700, RULE3), 5L, Optional.empty(), 8L, createRuleBeginningEventOptional(6, 3000, 0, RULE4)), ImmutableMap.of( 1L, createStepStartedEventOptional(1, 1500, "step A"), 4L, Optional.empty(), 5L, Optional.empty(), 8L, createStepStartedEventOptional(1, 3700, "step B")), 80 /* outputMaxColumns */); assertThat( renderLines(renderer, true), is( equalTo( ImmutableList.of( // one missing build rule - no output " - //:target3... 2.6s (preparing)", // missing step information " - //:target4... 1.2s (running step B[0.5s])", " - IDLE")))); // missing accumulated time - show as IDLE assertThat( renderShortStatus(renderer, true), is(equalTo(ImmutableList.of("[:]", "[:]", "[ ]")))); } private static BuildRule createFakeRule(BuildTarget target) { return new FakeBuildRule(target, ImmutableSortedSet.of()); } private static Optional<? extends BuildRuleEvent.BeginningBuildRuleEvent> createRuleBeginningEventOptional( long threadId, long timeMs, long durationMs, BuildRule rule) { BuildRuleDurationTracker durationTracker = new BuildRuleDurationTracker(); durationTracker.setDuration(rule, new ClockDuration(durationMs, 0, 0)); RuleKey ruleKey = new RuleKey(HashCode.fromString("aa")); return Optional.of( TestEventConfigurator.configureTestEventAtTime( BuildRuleEvent.resumed( rule, durationTracker, new FakeRuleKeyFactory(ImmutableMap.of(rule.getBuildTarget(), ruleKey))), timeMs, TimeUnit.MILLISECONDS, threadId)); } private static Optional<? extends LeafEvent> createStepStartedEventOptional( long threadId, long timeMs, String name) { return Optional.of( TestEventConfigurator.configureTestEventAtTime( StepEvent.started(name, name + " description", UUID.randomUUID()), timeMs, TimeUnit.MILLISECONDS, threadId)); } private BuildThreadStateRenderer createRenderer( long timeMs, Map<Long, Optional<? extends BuildRuleEvent.BeginningBuildRuleEvent>> buildEvents, Map<Long, Optional<? extends LeafEvent>> runningSteps, int outputMaxColumns) { return new BuildThreadStateRenderer( ANSI, FORMAT_TIME_FUNCTION, timeMs, outputMaxColumns, /* outputMaxColumns */ 0, /* minimumDurationMillis */ runningSteps, new BuildRuleThreadTracker(buildEvents, ImmutableMap.of())); } private ImmutableList<String> renderLines(BuildThreadStateRenderer renderer, boolean sortByTime) { ImmutableList.Builder<String> lines = ImmutableList.builder(); StringBuilder lineBuilder = new StringBuilder(); for (long threadId : renderer.getSortedExecutorIds(sortByTime)) { lineBuilder.delete(0, lineBuilder.length()); lines.add(renderer.renderStatusLine(threadId, lineBuilder)); } return lines.build(); } private ImmutableList<String> renderShortStatus( BuildThreadStateRenderer renderer, boolean sortByTime) { ImmutableList.Builder<String> status = ImmutableList.builder(); for (long threadId : renderer.getSortedExecutorIds(sortByTime)) { status.add(renderer.renderShortStatus(threadId)); } return status.build(); } }
apache-2.0
rockmkd/datacollector
spark-processor-protolib/src/test/java/com/streamsets/pipeline/stage/processor/spark/MethodCallCountingTransformer.java
1546
/* * Copyright 2017 StreamSets Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.pipeline.stage.processor.spark; import com.streamsets.pipeline.api.Record; import com.streamsets.pipeline.spark.api.SparkTransformer; import com.streamsets.pipeline.spark.api.TransformResult; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; import java.util.List; public class MethodCallCountingTransformer extends SparkTransformer { static int initCallCount = 0; static int transformCallCount = 0; static int destroyCallCount = 0; public MethodCallCountingTransformer() { initCallCount = transformCallCount = destroyCallCount = 0; } @Override public void init(JavaSparkContext jsc, List<String> params) { initCallCount++; } @Override public TransformResult transform(JavaRDD<Record> recordRDD) { transformCallCount++; return new TransformResult(recordRDD, null); } @Override public void destroy() { destroyCallCount++; } }
apache-2.0
davinash/geode
geode-redis/src/distributedTest/java/org/apache/geode/redis/session/springRedisTestApplication/config/SessionListener.java
1967
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.redis.session.springRedisTestApplication.config; import java.util.HashSet; import java.util.concurrent.atomic.AtomicLong; import javax.servlet.http.HttpSessionEvent; import javax.servlet.http.HttpSessionListener; import org.apache.logging.log4j.Logger; import org.springframework.context.annotation.Configuration; import org.apache.geode.logging.internal.log4j.api.LogService; @Configuration public class SessionListener implements HttpSessionListener { public static AtomicLong sessionCount = new AtomicLong(0); public static HashSet<String> sessionIds = new HashSet<>(); private static final Logger logger = LogService.getLogger(); @Override public void sessionCreated(HttpSessionEvent event) { sessionCount.getAndIncrement(); sessionIds.add(event.getSession().getId()); logger.info("session created: " + event.getSession().getId()); event.getSession().setMaxInactiveInterval(15); } @Override public void sessionDestroyed(HttpSessionEvent event) { sessionIds.remove(event.getSession().getId()); sessionCount.getAndDecrement(); logger.info("session destroyed: " + event.getSession().getId()); } }
apache-2.0
objectiser/camel
core/camel-core-engine/src/main/java/org/apache/camel/model/rest/RestOperationParamDefinition.java
9322
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.model.rest; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElementWrapper; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlTransient; import org.apache.camel.spi.Metadata; import org.apache.camel.util.StringHelper; /** * To specify the rest operation parameters using Swagger. * <p/> * This maps to the Swagger Parameter Message Object. */ @Metadata(label = "rest") @XmlRootElement(name = "param") @XmlAccessorType(XmlAccessType.FIELD) public class RestOperationParamDefinition { @XmlTransient private VerbDefinition verb; @XmlAttribute(required = true) private String name; @XmlAttribute(required = true) @Metadata(defaultValue = "path") private RestParamType type; @XmlAttribute @Metadata(defaultValue = "") private String description; @XmlAttribute @Metadata(defaultValue = "") private String defaultValue; @XmlAttribute @Metadata(defaultValue = "true") private Boolean required; @XmlAttribute @Metadata(defaultValue = "csv") private CollectionFormat collectionFormat; @XmlAttribute @Metadata(defaultValue = "string") private String arrayType; @XmlAttribute @Metadata(defaultValue = "string") private String dataType; @XmlAttribute private String dataFormat; @XmlElementWrapper(name = "allowableValues") @XmlElement(name = "value") private List<String> allowableValues; @XmlElement(name = "examples") private List<RestPropertyDefinition> examples; public RestOperationParamDefinition() { } public RestOperationParamDefinition(VerbDefinition verb) { this.verb = verb; } public RestParamType getType() { return type != null ? type : RestParamType.path; } /** * Sets the Swagger Parameter type. */ public void setType(RestParamType type) { this.type = type; } public String getName() { return name; } /** * Sets the Swagger Parameter name. */ public void setName(String name) { this.name = name; } public String getDescription() { return description != null ? description : ""; } /** * Sets the Swagger Parameter description. */ public void setDescription(String description) { this.description = description; } /** * Sets the Swagger Parameter default value. */ public String getDefaultValue() { return defaultValue != null ? defaultValue : ""; } public void setDefaultValue(String defaultValue) { this.defaultValue = defaultValue; } public Boolean getRequired() { return required != null ? required : true; } /** * Sets the Swagger Parameter required flag. */ public void setRequired(Boolean required) { this.required = required; } public CollectionFormat getCollectionFormat() { return collectionFormat; } /** * Sets the Swagger Parameter collection format. */ public void setCollectionFormat(CollectionFormat collectionFormat) { this.collectionFormat = collectionFormat; } public String getArrayType() { return arrayType; } /** * Sets the Swagger Parameter array type. Required if data type is "array". * Describes the type of items in the array. */ public void setArrayType(String arrayType) { this.arrayType = arrayType; } public String getDataType() { return dataType != null ? dataType : "string"; } /** * Sets the Swagger Parameter data type. */ public void setDataType(String dataType) { this.dataType = dataType; } public String getDataFormat() { return dataFormat; } /** * Sets the Swagger Parameter data format. */ public void setDataFormat(String dataFormat) { this.dataFormat = dataFormat; } public List<String> getAllowableValues() { if (allowableValues != null) { return allowableValues; } return new ArrayList<>(); } /** * Sets the Swagger Parameter list of allowable values (enum). */ public void setAllowableValues(List<String> allowableValues) { this.allowableValues = allowableValues; } public List<RestPropertyDefinition> getExamples() { return examples; } /** * Sets the Swagger Parameter examples. */ public void setExamples(List<RestPropertyDefinition> examples) { this.examples = examples; } /** * Name of the parameter. * <p/> * This option is mandatory. */ public RestOperationParamDefinition name(String name) { setName(name); return this; } /** * Description of the parameter. */ public RestOperationParamDefinition description(String name) { setDescription(name); return this; } /** * The default value of the parameter. */ public RestOperationParamDefinition defaultValue(String name) { setDefaultValue(name); return this; } /** * Whether the parameter is required */ public RestOperationParamDefinition required(Boolean required) { setRequired(required); return this; } /** * Sets the collection format. */ public RestOperationParamDefinition collectionFormat(CollectionFormat collectionFormat) { setCollectionFormat(collectionFormat); return this; } /** * The data type of the array data type */ public RestOperationParamDefinition arrayType(String arrayType) { setArrayType(arrayType); return this; } /** * The data type of the parameter such as <tt>string</tt>, <tt>integer</tt>, * <tt>boolean</tt> */ public RestOperationParamDefinition dataType(String type) { setDataType(type); return this; } /** * The data format of the parameter such as <tt>binary</tt>, <tt>date</tt>, * <tt>date-time</tt>, <tt>password</tt>. The format is usually derived from * the dataType alone. However you can set this option for more fine grained * control of the format in use. */ public RestOperationParamDefinition dataFormat(String type) { setDataFormat(type); return this; } /** * Allowed values of the parameter when its an enum type */ public RestOperationParamDefinition allowableValues(List<String> allowableValues) { setAllowableValues(allowableValues); return this; } /** * Allowed values of the parameter when its an enum type */ public RestOperationParamDefinition allowableValues(String... allowableValues) { setAllowableValues(Arrays.asList(allowableValues)); return this; } /** * Allowed values of the parameter when its an enum type */ public RestOperationParamDefinition allowableValues(String allowableValues) { setAllowableValues(Arrays.asList(allowableValues.split(","))); return this; } /** * The parameter type such as body, form, header, path, query */ public RestOperationParamDefinition type(RestParamType type) { setType(type); return this; } /** * Adds a body example with the given content-type */ public RestOperationParamDefinition example(String contentType, String example) { if (examples == null) { examples = new ArrayList<>(); } examples.add(new RestPropertyDefinition(contentType, example)); return this; } /** * Adds a single example */ public RestOperationParamDefinition example(String example) { if (examples == null) { examples = new ArrayList<>(); } examples.add(new RestPropertyDefinition("", example)); return this; } /** * Ends the configuration of this parameter */ public RestDefinition endParam() { // name is mandatory StringHelper.notEmpty(name, "name"); verb.getParams().add(this); return verb.getRest(); } }
apache-2.0
torito/wisdom
extensions/wisdom-filters/src/test/java/org/wisdom/framework/filters/test/RedirectFilterTest.java
7830
/* * #%L * Wisdom-Framework * %% * Copyright (C) 2013 - 2014 Wisdom Framework * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package org.wisdom.framework.filters.test; import com.google.common.net.HttpHeaders; import org.junit.Test; import org.wisdom.api.configuration.Configuration; import org.wisdom.api.http.*; import org.wisdom.api.interception.RequestContext; import org.wisdom.api.router.Route; import org.wisdom.framework.filters.RedirectFilter; import org.wisdom.test.parents.FakeContext; import org.wisdom.test.parents.FakeRequest; import org.wisdom.test.parents.WisdomUnitTest; import java.net.URI; import java.net.URISyntaxException; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class RedirectFilterTest extends WisdomUnitTest { @Test public void testRedirection() throws Exception { RedirectFilter filter = new RedirectFilter() { @Override protected String getRedirectTo() { return "http://perdu.com"; } @Override protected String getPrefix() { return "/"; } }; Route route = mock(Route.class); RequestContext rc = mock(RequestContext.class); FakeContext context = new FakeContext(); context.setPath("/").setHeader(HttpHeaders.CONNECTION, "keep-alive"); FakeRequest request = new FakeRequest(context).method(HttpMethod.GET).uri("/"); when(rc.context()).thenReturn(context); when(rc.request()).thenReturn(request); Result result = filter.call(route, rc); assertThat(result).isNotNull(); assertThat(result.getStatusCode()).isEqualTo(Status.SEE_OTHER); assertThat(result.getHeaders().get(HeaderNames.LOCATION)).isEqualTo("http://perdu.com"); } @Test public void testRedirectionWithQuery() throws Exception { RedirectFilter filter = new RedirectFilter() { @Override protected String getRedirectTo() { return "http://perdu.com"; } @Override protected String getPrefix() { return "/"; } }; Route route = mock(Route.class); RequestContext rc = mock(RequestContext.class); FakeContext context = new FakeContext(); context.setPath("/").setHeader(HttpHeaders.CONNECTION, "keep-alive"); FakeRequest request = new FakeRequest(context).method(HttpMethod.GET).uri("/?foo=bar"); when(rc.context()).thenReturn(context); when(rc.request()).thenReturn(request); Result result = filter.call(route, rc); assertThat(result).isNotNull(); assertThat(result.getStatusCode()).isEqualTo(Status.SEE_OTHER); assertThat(result.getHeaders().get(HeaderNames.LOCATION)).isEqualTo("http://perdu.com?foo=bar"); } @Test public void testRedirectionForPostRequests() throws Exception { RedirectFilter filter = new RedirectFilter() { @Override protected String getRedirectTo() { return "http://perdu.com"; } @Override protected String getPrefix() { return "/"; } }; Route route = mock(Route.class); RequestContext rc = mock(RequestContext.class); FakeContext context = new FakeContext(); context.setPath("/").setHeader(HttpHeaders.CONNECTION, "keep-alive"); FakeRequest request = new FakeRequest(context).method(HttpMethod.POST).uri("/"); when(rc.context()).thenReturn(context); when(rc.request()).thenReturn(request); Result result = filter.call(route, rc); assertThat(result).isNotNull(); assertThat(result.getStatusCode()).isEqualTo(Status.SEE_OTHER); assertThat(result.getHeaders().get(HeaderNames.LOCATION)).isEqualTo("http://perdu.com"); } @Test public void testFailedRewriting() throws Exception { RedirectFilter filter = new RedirectFilter() { @Override protected String getRedirectTo() { return "http://perdu.com"; } @Override protected String getPrefix() { return "/"; } @Override protected Result onRewriteFailed(RequestContext context) { return Results.badRequest(); } @Override public URI rewriteURI(Request request) throws URISyntaxException { // return null on purpose to simulate an error while rewriting the url. return null; } }; Route route = mock(Route.class); RequestContext rc = mock(RequestContext.class); FakeContext context = new FakeContext(); context.setPath("/").setHeader(HttpHeaders.CONNECTION, "keep-alive"); FakeRequest request = new FakeRequest(context).method(HttpMethod.GET).uri("/"); when(rc.context()).thenReturn(context); when(rc.request()).thenReturn(request); Result result = filter.call(route, rc); assertThat(result).isNotNull(); assertThat(result).isNotNull(); assertThat(result.getStatusCode()).isEqualTo(Status.BAD_REQUEST); } @Test public void testConfiguration() throws Exception { Configuration configuration = mock(Configuration.class); when(configuration.get("prefix")).thenReturn("/redirected"); when(configuration.get("redirectTo")).thenReturn("http://perdu.com"); RedirectFilter filter = new RedirectFilter(configuration); Route route = mock(Route.class); RequestContext rc = mock(RequestContext.class); FakeContext context = new FakeContext(); context.setPath("/redirected").setHeader(HttpHeaders.CONNECTION, "keep-alive"); FakeRequest request = new FakeRequest(context).method(HttpMethod.GET).uri("/redirected"); when(rc.context()).thenReturn(context); when(rc.request()).thenReturn(request); Result result = filter.call(route, rc); assertThat(result).isNotNull(); assertThat(result.getStatusCode()).isEqualTo(Status.SEE_OTHER); assertThat(result.getHeaders().get(HeaderNames.LOCATION)).isEqualTo("http://perdu.com"); } @Test public void testPathComputation() throws Exception { RedirectFilter filter = new RedirectFilter() { @Override protected String getRedirectTo() { return "http://perdu.com"; } @Override protected String getPrefix() { return "/proxy"; } }; RequestContext rc = mock(RequestContext.class); FakeContext context = new FakeContext(); context.setPath("/proxy/foo/bar").setHeader(HttpHeaders.CONNECTION, "keep-alive"); FakeRequest request = new FakeRequest(context).method(HttpMethod.GET).uri("/proxy/foo/bar"); when(rc.context()).thenReturn(context); when(rc.request()).thenReturn(request); URI uri = filter.rewriteURI(request); assertThat(uri.toString()).endsWith("/foo/bar"); } }
apache-2.0
ilantukh/ignite
modules/core/src/test/java/org/apache/ignite/internal/processors/cache/persistence/IgnitePdsDynamicCacheTest.java
6883
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.persistence; import java.io.Serializable; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.cache.CacheAtomicityMode; import org.apache.ignite.cache.CacheMode; import org.apache.ignite.cache.CacheRebalanceMode; import org.apache.ignite.cache.CacheWriteSynchronizationMode; import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction; import org.apache.ignite.cache.query.annotations.QuerySqlField; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.DataRegionConfiguration; import org.apache.ignite.configuration.DataStorageConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.configuration.WALMode; import org.apache.ignite.internal.processors.database.IgniteDbDynamicCacheSelfTest; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.testframework.MvccFeatureChecker; import org.junit.Test; /** * */ public class IgnitePdsDynamicCacheTest extends IgniteDbDynamicCacheSelfTest { /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(gridName); DataStorageConfiguration memCfg = new DataStorageConfiguration() .setDefaultDataRegionConfiguration( new DataRegionConfiguration().setMaxSize(200L * 1024 * 1024).setPersistenceEnabled(true)) .setWalMode(WALMode.LOG_ONLY); cfg.setDataStorageConfiguration(memCfg); if ("client".equals(gridName)) cfg.setClientMode(true); return cfg; } /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { System.setProperty(GridCacheDatabaseSharedManager.IGNITE_PDS_CHECKPOINT_TEST_SKIP_SYNC, "true"); super.beforeTest(); } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { super.beforeTestsStarted(); cleanPersistenceDir(); } /** {@inheritDoc} */ @Override protected void afterTest() throws Exception { super.afterTest(); System.clearProperty(GridCacheDatabaseSharedManager.IGNITE_PDS_CHECKPOINT_TEST_SKIP_SYNC); cleanPersistenceDir(); } /** * @throws Exception If failed. */ @Test public void testRestartAndCreate() throws Exception { startGrids(3); Ignite ignite = ignite(0); ignite.active(true); CacheConfiguration ccfg1 = new CacheConfiguration(); ccfg1.setName("cache1"); ccfg1.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL); ccfg1.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC); ccfg1.setAffinity(new RendezvousAffinityFunction(false, 32)); if (MvccFeatureChecker.forcedMvcc()) ccfg1.setRebalanceDelay(Long.MAX_VALUE); else ccfg1.setRebalanceMode(CacheRebalanceMode.NONE); CacheConfiguration ccfg2 = new CacheConfiguration(); ccfg2.setName("cache2"); ccfg2.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL); ccfg2.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC); ccfg2.setAffinity(new RendezvousAffinityFunction(false, 32)); ccfg2.setIndexedTypes(Integer.class, Value.class); if (MvccFeatureChecker.forcedMvcc()) ccfg2.setRebalanceDelay(Long.MAX_VALUE); else ccfg2.setRebalanceMode(CacheRebalanceMode.NONE); CacheConfiguration ccfg3 = new CacheConfiguration(); ccfg3.setName("cache3"); ccfg3.setAtomicityMode(CacheAtomicityMode.ATOMIC); ccfg3.setCacheMode(CacheMode.LOCAL); ignite.createCache(ccfg1); ignite.createCache(ccfg2); ignite.createCache(ccfg3).put(2, 3); int iterations = 20; long stopTime = U.currentTimeMillis() + 20_000; for (int k = 0; k < iterations && U.currentTimeMillis() < stopTime; k++) { log.info("Iteration: " + k); stopAllGrids(); startGrids(3); ignite = ignite(0); ignite.active(true); ignite.getOrCreateCache(ccfg1); ignite.getOrCreateCache(ccfg2); assertEquals(1, ignite.cache(ccfg3.getName()).size()); assertEquals(3, ignite.cache(ccfg3.getName()).get(2)); ignite.destroyCache(ccfg2.getName()); ignite.getOrCreateCache(ccfg2); ignite.destroyCache(ccfg1.getName()); } } /** * @throws Exception If failed. */ @Test public void testDynamicCacheSavingOnNewNode() throws Exception { Ignite ignite = startGrid(0); ignite.active(true); CacheConfiguration ccfg = new CacheConfiguration(DEFAULT_CACHE_NAME); ccfg.setAtomicityMode(CacheAtomicityMode.TRANSACTIONAL); ccfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC); ccfg.setRebalanceMode(CacheRebalanceMode.SYNC); ccfg.setAffinity(new RendezvousAffinityFunction(false, 32)); IgniteCache cache = ignite.getOrCreateCache(ccfg); for (int i = 0; i < 160; i++) cache.put(i, i); ignite = startGrid(1); awaitPartitionMapExchange(); cache = ignite.cache(DEFAULT_CACHE_NAME); for (int i = 0; i < 160; i++) assertEquals(i, cache.get(i)); stopAllGrids(true); startGrid(0); ignite = startGrid(1); ignite.active(true); cache = ignite.cache(DEFAULT_CACHE_NAME); for (int i = 0; i < 160; i++) assertEquals(i, cache.get(i)); } /** * */ static class Value implements Serializable { /** */ @QuerySqlField(index = true, groups = "full_name") String fName; /** */ @QuerySqlField(index = true, groups = "full_name") String lName; } }
apache-2.0
gmarz/elasticsearch
core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java
8723
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.suggest; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; import org.junit.AfterClass; import org.junit.BeforeClass; import java.io.IOException; import static java.util.Collections.emptyList; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; public abstract class AbstractSuggestionBuilderTestCase<SB extends SuggestionBuilder<SB>> extends ESTestCase { private static final int NUMBER_OF_TESTBUILDERS = 20; protected static NamedWriteableRegistry namedWriteableRegistry; protected static IndicesQueriesRegistry queriesRegistry; protected static ParseFieldMatcher parseFieldMatcher; protected static Suggesters suggesters; /** * setup for the whole base test class */ @BeforeClass public static void init() throws IOException { SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList()); namedWriteableRegistry = new NamedWriteableRegistry(searchModule.getNamedWriteables()); queriesRegistry = searchModule.getQueryParserRegistry(); suggesters = searchModule.getSuggesters(); parseFieldMatcher = ParseFieldMatcher.STRICT; } @AfterClass public static void afterClass() throws Exception { namedWriteableRegistry = null; suggesters = null; queriesRegistry = null; } /** * Test serialization and deserialization of the suggestion builder */ public void testSerialization() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { SB original = randomTestBuilder(); SB deserialized = copy(original); assertEquals(deserialized, original); assertEquals(deserialized.hashCode(), original.hashCode()); assertNotSame(deserialized, original); } } /** * returns a random suggestion builder, setting the common options randomly */ protected SB randomTestBuilder() { SB randomSuggestion = randomSuggestionBuilder(); return randomSuggestion; } public static void setCommonPropertiesOnRandomBuilder(SuggestionBuilder<?> randomSuggestion) { randomSuggestion.text(randomAsciiOfLengthBetween(2, 20)); // have to set the text because we don't know if the global text was set maybeSet(randomSuggestion::prefix, randomAsciiOfLengthBetween(2, 20)); maybeSet(randomSuggestion::regex, randomAsciiOfLengthBetween(2, 20)); maybeSet(randomSuggestion::analyzer, randomAsciiOfLengthBetween(2, 20)); maybeSet(randomSuggestion::size, randomIntBetween(1, 20)); maybeSet(randomSuggestion::shardSize, randomIntBetween(1, 20)); } /** * create a randomized {@link SuggestBuilder} that is used in further tests */ protected abstract SB randomSuggestionBuilder(); /** * Test equality and hashCode properties */ public void testEqualsAndHashcode() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { checkEqualsAndHashCode(randomTestBuilder(), this::copy, this::mutate); } } /** * creates random suggestion builder, renders it to xContent and back to new * instance that should be equal to original */ public void testFromXContent() throws IOException { for (int runs = 0; runs < NUMBER_OF_TESTBUILDERS; runs++) { SB suggestionBuilder = randomTestBuilder(); XContentBuilder xContentBuilder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); if (randomBoolean()) { xContentBuilder.prettyPrint(); } xContentBuilder.startObject(); suggestionBuilder.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); xContentBuilder.endObject(); XContentBuilder shuffled = shuffleXContent(xContentBuilder, shuffleProtectedFields()); XContentParser parser = XContentHelper.createParser(shuffled.bytes()); QueryParseContext context = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); // we need to skip the start object and the name, those will be parsed by outer SuggestBuilder parser.nextToken(); SuggestionBuilder<?> secondSuggestionBuilder = SuggestionBuilder.fromXContent(context, suggesters); assertNotSame(suggestionBuilder, secondSuggestionBuilder); assertEquals(suggestionBuilder, secondSuggestionBuilder); assertEquals(suggestionBuilder.hashCode(), secondSuggestionBuilder.hashCode()); } } /** * Subclasses can override this method and return a set of fields which should be protected from * recursive random shuffling in the {@link #testFromXContent()} test case */ protected String[] shuffleProtectedFields() { return new String[0]; } private SB mutate(SB firstBuilder) throws IOException { SB mutation = copy(firstBuilder); assertNotSame(mutation, firstBuilder); // change ither one of the shared SuggestionBuilder parameters, or delegate to the specific tests mutate method if (randomBoolean()) { switch (randomIntBetween(0, 5)) { case 0: mutation.text(randomValueOtherThan(mutation.text(), () -> randomAsciiOfLengthBetween(2, 20))); break; case 1: mutation.prefix(randomValueOtherThan(mutation.prefix(), () -> randomAsciiOfLengthBetween(2, 20))); break; case 2: mutation.regex(randomValueOtherThan(mutation.regex(), () -> randomAsciiOfLengthBetween(2, 20))); break; case 3: mutation.analyzer(randomValueOtherThan(mutation.analyzer(), () -> randomAsciiOfLengthBetween(2, 20))); break; case 4: mutation.size(randomValueOtherThan(mutation.size(), () -> randomIntBetween(1, 20))); break; case 5: mutation.shardSize(randomValueOtherThan(mutation.shardSize(), () -> randomIntBetween(1, 20))); break; } } else { mutateSpecificParameters(firstBuilder); } return mutation; } /** * take and input {@link SuggestBuilder} and return another one that is * different in one aspect (to test non-equality) */ protected abstract void mutateSpecificParameters(SB firstBuilder) throws IOException; @SuppressWarnings("unchecked") protected SB copy(SB original) throws IOException { return copyWriteable(original, namedWriteableRegistry, (Writeable.Reader<SB>) namedWriteableRegistry.getReader(SuggestionBuilder.class, original.getWriteableName())); } protected static QueryParseContext newParseContext(final String xcontent) throws IOException { XContentParser parser = XContentFactory.xContent(xcontent).createParser(xcontent); final QueryParseContext parseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); return parseContext; } }
apache-2.0
trekawek/jackrabbit-oak
oak-run-elastic/src/main/java/org/apache/jackrabbit/oak/run/AvailableElasticModes.java
1438
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.run; import com.google.common.collect.ImmutableMap; import org.apache.jackrabbit.oak.index.ElasticIndexCommand; import org.apache.jackrabbit.oak.run.commons.Command; import org.apache.jackrabbit.oak.run.commons.Modes; /* Avaialble modes for elastic. Add new elastic operations/commands to be supported here. */ public final class AvailableElasticModes { // list of available Modes for the tool public static final Modes MODES = new Modes( ImmutableMap.<String, Command>builder() .put("index", new ElasticIndexCommand()) .build()); }
apache-2.0
apixandru/intellij-community
platform/lang-impl/src/com/intellij/openapi/module/WebModuleBuilder.java
4022
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.module; import com.intellij.icons.AllIcons; import com.intellij.ide.util.projectWizard.ModuleBuilder; import com.intellij.ide.util.projectWizard.ModuleWizardStep; import com.intellij.ide.util.projectWizard.SettingsStep; import com.intellij.ide.util.projectWizard.WebProjectTemplate; import com.intellij.openapi.options.ConfigurationException; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ModifiableRootModel; import com.intellij.openapi.roots.ModuleRootManager; import com.intellij.openapi.ui.ValidationInfo; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.platform.ProjectGeneratorPeer; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; /** * @author Dmitry Avdeev * Date: 9/27/12 */ public class WebModuleBuilder<T> extends ModuleBuilder { public static final String GROUP_NAME = "Static Web"; public static final Icon ICON = AllIcons.Nodes.PpWeb; private final WebProjectTemplate<T> myTemplate; private final ProjectGeneratorPeer<T> myGeneratorPeer; public WebModuleBuilder(@NotNull WebProjectTemplate<T> template) { myTemplate = template; myGeneratorPeer = myTemplate.createLazyPeer().getValue(); } public WebModuleBuilder() { myTemplate = null; myGeneratorPeer = null; } @Override public void setupRootModel(ModifiableRootModel modifiableRootModel) throws ConfigurationException { doAddContentEntry(modifiableRootModel); } @Override public ModuleType getModuleType() { return WebModuleType.getInstance(); } @Override public String getPresentableName() { return getGroupName(); } @Override public boolean isTemplateBased() { return true; } @Override public String getGroupName() { return GROUP_NAME; } @Override public Icon getNodeIcon() { return myTemplate != null ? myTemplate.getIcon() : ICON; } @Nullable @Override public Module commitModule(@NotNull Project project, @Nullable ModifiableModuleModel model) { Module module = super.commitModule(project, model); if (module != null && myTemplate != null) { doGenerate(myTemplate, module); } return module; } private void doGenerate(@NotNull WebProjectTemplate<T> template, @NotNull Module module) { ModuleRootManager moduleRootManager = ModuleRootManager.getInstance(module); VirtualFile[] contentRoots = moduleRootManager.getContentRoots(); VirtualFile dir = module.getProject().getBaseDir(); if (contentRoots.length > 0 && contentRoots[0] != null) { dir = contentRoots[0]; } template.generateProject(module.getProject(), dir, myGeneratorPeer.getSettings(), module); } @Nullable @Override public ModuleWizardStep modifySettingsStep(@NotNull SettingsStep settingsStep) { if (myTemplate == null) { return super.modifySettingsStep(settingsStep); } myGeneratorPeer.buildUI(settingsStep); return new ModuleWizardStep() { @Override public JComponent getComponent() { return null; } @Override public void updateDataModel() { } @Override public boolean validate() throws ConfigurationException { ValidationInfo info = myGeneratorPeer.validate(); if (info != null) throw new ConfigurationException(info.message); return true; } }; } }
apache-2.0
madhav123/gkmaster
appdomain/src/main/java/org/mifos/accounts/financial/business/service/activity/WriteOffFinancialActivity.java
1615
/* * Copyright (c) 2005-2011 Grameen Foundation USA * All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * See also http://www.apache.org/licenses/LICENSE-2.0.html for an * explanation of the license and how it is applied. */ package org.mifos.accounts.financial.business.service.activity; import java.util.ArrayList; import java.util.List; import org.mifos.accounts.business.AccountTrxnEntity; import org.mifos.accounts.financial.business.service.activity.accountingentry.BaseAccountingEntry; import org.mifos.accounts.financial.business.service.activity.accountingentry.WriteOffAccountingEntry; public class WriteOffFinancialActivity extends BaseFinancialActivity { public WriteOffFinancialActivity(AccountTrxnEntity accountTrxn) { super(accountTrxn); } @Override protected List<BaseAccountingEntry> getFinancialActionEntry() { List<BaseAccountingEntry> financialActionEntryList = new ArrayList<BaseAccountingEntry>(); financialActionEntryList.add(new WriteOffAccountingEntry()); return financialActionEntryList; } }
apache-2.0
mbaechler/james-mailbox
store/src/test/java/org/apache/james/mailbox/store/TestId.java
2228
/**************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * * or more contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The ASF licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * ****************************************************************/ package org.apache.james.mailbox.store; import org.apache.james.mailbox.store.mail.model.MailboxId; public class TestId implements MailboxId { public static TestId of(long id) { return new TestId(id); } public final Long id; private TestId(long id) { this.id = id; } @Override public String serialize() { return String.valueOf(id); } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((id == null) ? 0 : id.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; TestId other = (TestId) obj; if (id == null) { if (other.id != null) return false; } else if (!id.equals(other.id)) return false; return true; } }
apache-2.0
TiVo/kafka
trogdor/src/main/java/org/apache/kafka/trogdor/agent/AgentClient.java
12854
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.trogdor.agent; import com.fasterxml.jackson.core.type.TypeReference; import net.sourceforge.argparse4j.ArgumentParsers; import net.sourceforge.argparse4j.inf.ArgumentParser; import net.sourceforge.argparse4j.inf.Namespace; import net.sourceforge.argparse4j.inf.Subparser; import net.sourceforge.argparse4j.inf.Subparsers; import org.apache.kafka.common.utils.Exit; import org.apache.kafka.trogdor.common.JsonUtil; import org.apache.kafka.trogdor.common.StringFormatter; import org.apache.kafka.trogdor.rest.AgentStatusResponse; import org.apache.kafka.trogdor.rest.CreateWorkerRequest; import org.apache.kafka.trogdor.rest.DestroyWorkerRequest; import org.apache.kafka.trogdor.rest.Empty; import org.apache.kafka.trogdor.rest.JsonRestServer; import org.apache.kafka.trogdor.rest.JsonRestServer.HttpResponse; import org.apache.kafka.trogdor.rest.StopWorkerRequest; import org.apache.kafka.trogdor.rest.WorkerState; import org.apache.kafka.trogdor.task.TaskSpec; import org.apache.kafka.trogdor.rest.UptimeResponse; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.ws.rs.core.UriBuilder; import java.time.OffsetDateTime; import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import static net.sourceforge.argparse4j.impl.Arguments.store; import static net.sourceforge.argparse4j.impl.Arguments.storeTrue; import static org.apache.kafka.trogdor.common.StringFormatter.dateString; import static org.apache.kafka.trogdor.common.StringFormatter.durationString; /** * A client for the Trogdor agent. */ public class AgentClient { private final Logger log; /** * The maximum number of tries to make. */ private final int maxTries; /** * The URL target. */ private final String target; public static class Builder { private Logger log = LoggerFactory.getLogger(AgentClient.class); private int maxTries = 1; private String target = null; public Builder() { } public Builder log(Logger log) { this.log = log; return this; } public Builder maxTries(int maxTries) { this.maxTries = maxTries; return this; } public Builder target(String target) { this.target = target; return this; } public Builder target(String host, int port) { this.target = String.format("%s:%d", host, port); return this; } public AgentClient build() { if (target == null) { throw new RuntimeException("You must specify a target."); } return new AgentClient(log, maxTries, target); } } private AgentClient(Logger log, int maxTries, String target) { this.log = log; this.maxTries = maxTries; this.target = target; } public String target() { return target; } public int maxTries() { return maxTries; } private String url(String suffix) { return String.format("http://%s%s", target, suffix); } public AgentStatusResponse status() throws Exception { HttpResponse<AgentStatusResponse> resp = JsonRestServer.<AgentStatusResponse>httpRequest(url("/agent/status"), "GET", null, new TypeReference<AgentStatusResponse>() { }, maxTries); return resp.body(); } public UptimeResponse uptime() throws Exception { HttpResponse<UptimeResponse> resp = JsonRestServer.httpRequest(url("/agent/uptime"), "GET", null, new TypeReference<UptimeResponse>() { }, maxTries); return resp.body(); } public void createWorker(CreateWorkerRequest request) throws Exception { HttpResponse<Empty> resp = JsonRestServer.<Empty>httpRequest( url("/agent/worker/create"), "POST", request, new TypeReference<Empty>() { }, maxTries); resp.body(); } public void stopWorker(StopWorkerRequest request) throws Exception { HttpResponse<Empty> resp = JsonRestServer.<Empty>httpRequest(url( "/agent/worker/stop"), "PUT", request, new TypeReference<Empty>() { }, maxTries); resp.body(); } public void destroyWorker(DestroyWorkerRequest request) throws Exception { UriBuilder uriBuilder = UriBuilder.fromPath(url("/agent/worker")); uriBuilder.queryParam("workerId", request.workerId()); HttpResponse<Empty> resp = JsonRestServer.<Empty>httpRequest(uriBuilder.build().toString(), "DELETE", null, new TypeReference<Empty>() { }, maxTries); resp.body(); } public void invokeShutdown() throws Exception { HttpResponse<Empty> resp = JsonRestServer.<Empty>httpRequest(url( "/agent/shutdown"), "PUT", null, new TypeReference<Empty>() { }, maxTries); resp.body(); } private static void addTargetArgument(ArgumentParser parser) { parser.addArgument("--target", "-t") .action(store()) .required(true) .type(String.class) .dest("target") .metavar("TARGET") .help("A colon-separated host and port pair. For example, example.com:8888"); } private static void addJsonArgument(ArgumentParser parser) { parser.addArgument("--json") .action(storeTrue()) .dest("json") .metavar("JSON") .help("Show the full response as JSON."); } private static void addWorkerIdArgument(ArgumentParser parser, String help) { parser.addArgument("--workerId") .action(storeTrue()) .type(Long.class) .dest("workerId") .metavar("WORKER_ID") .help(help); } public static void main(String[] args) throws Exception { ArgumentParser rootParser = ArgumentParsers .newArgumentParser("trogdor-agent-client") .defaultHelp(true) .description("The Trogdor agent client."); Subparsers subParsers = rootParser.addSubparsers(). dest("command"); Subparser uptimeParser = subParsers.addParser("uptime") .help("Get the agent uptime."); addTargetArgument(uptimeParser); addJsonArgument(uptimeParser); Subparser statusParser = subParsers.addParser("status") .help("Get the agent status."); addTargetArgument(statusParser); addJsonArgument(statusParser); Subparser createWorkerParser = subParsers.addParser("createWorker") .help("Create a new worker."); addTargetArgument(createWorkerParser); addWorkerIdArgument(createWorkerParser, "The worker ID to create."); createWorkerParser.addArgument("--taskId") .action(store()) .required(true) .type(String.class) .dest("taskId") .metavar("TASK_ID") .help("The task ID to create."); createWorkerParser.addArgument("--spec", "-s") .action(store()) .required(true) .type(String.class) .dest("taskSpec") .metavar("TASK_SPEC") .help("The task spec to create, or a path to a file containing the task spec."); Subparser stopWorkerParser = subParsers.addParser("stopWorker") .help("Stop a worker."); addTargetArgument(stopWorkerParser); addWorkerIdArgument(stopWorkerParser, "The worker ID to stop."); Subparser destroyWorkerParser = subParsers.addParser("destroyWorker") .help("Destroy a worker."); addTargetArgument(destroyWorkerParser); addWorkerIdArgument(destroyWorkerParser, "The worker ID to destroy."); Subparser shutdownParser = subParsers.addParser("shutdown") .help("Shut down the agent."); addTargetArgument(shutdownParser); Namespace res = rootParser.parseArgsOrFail(args); String target = res.getString("target"); AgentClient client = new Builder(). maxTries(3). target(target). build(); ZoneOffset localOffset = OffsetDateTime.now().getOffset(); switch (res.getString("command")) { case "uptime": { UptimeResponse uptime = client.uptime(); if (res.getBoolean("json")) { System.out.println(JsonUtil.toJsonString(uptime)); } else { System.out.printf("Agent is running at %s.%n", target); System.out.printf("\tStart time: %s%n", dateString(uptime.serverStartMs(), localOffset)); System.out.printf("\tCurrent server time: %s%n", dateString(uptime.nowMs(), localOffset)); System.out.printf("\tUptime: %s%n", durationString(uptime.nowMs() - uptime.serverStartMs())); } break; } case "status": { AgentStatusResponse status = client.status(); if (res.getBoolean("json")) { System.out.println(JsonUtil.toJsonString(status)); } else { System.out.printf("Agent is running at %s.%n", target); System.out.printf("\tStart time: %s%n", dateString(status.serverStartMs(), localOffset)); List<List<String>> lines = new ArrayList<>(); List<String> header = new ArrayList<>( Arrays.asList("WORKER_ID", "TASK_ID", "STATE", "TASK_TYPE")); lines.add(header); for (Map.Entry<Long, WorkerState> entry : status.workers().entrySet()) { List<String> cols = new ArrayList<>(); cols.add(Long.toString(entry.getKey())); cols.add(entry.getValue().taskId()); cols.add(entry.getValue().getClass().getSimpleName()); cols.add(entry.getValue().spec().getClass().getCanonicalName()); lines.add(cols); } System.out.print(StringFormatter.prettyPrintGrid(lines)); } break; } case "createWorker": { long workerId = res.getLong("workerId"); String taskId = res.getString("taskId"); TaskSpec taskSpec = JsonUtil. objectFromCommandLineArgument(res.getString("taskSpec"), TaskSpec.class); CreateWorkerRequest req = new CreateWorkerRequest(workerId, taskId, taskSpec); client.createWorker(req); System.out.printf("Sent CreateWorkerRequest for worker %d%n.", req.workerId()); break; } case "stopWorker": { long workerId = res.getLong("workerId"); client.stopWorker(new StopWorkerRequest(workerId)); System.out.printf("Sent StopWorkerRequest for worker %d%n.", workerId); break; } case "destroyWorker": { long workerId = res.getLong("workerId"); client.destroyWorker(new DestroyWorkerRequest(workerId)); System.out.printf("Sent DestroyWorkerRequest for worker %d%n.", workerId); break; } case "shutdown": { client.invokeShutdown(); System.out.println("Sent ShutdownRequest."); break; } default: { System.out.println("You must choose an action. Type --help for help."); Exit.exit(1); } } } }
apache-2.0
trekawek/jackrabbit-oak
oak-core/src/test/java/org/apache/jackrabbit/oak/security/authorization/accesscontrol/AdminPrincipalsBaseTest.java
5800
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.security.authorization.accesscontrol; import java.security.Principal; import javax.jcr.RepositoryException; import javax.jcr.security.AccessControlException; import javax.jcr.security.AccessControlList; import javax.jcr.security.AccessControlManager; import javax.jcr.security.AccessControlPolicy; import javax.jcr.security.AccessControlPolicyIterator; import org.apache.jackrabbit.JcrConstants; import org.apache.jackrabbit.api.security.user.Authorizable; import org.apache.jackrabbit.oak.AbstractSecurityTest; import org.apache.jackrabbit.oak.api.Tree; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.plugins.tree.TreeUtil; import org.apache.jackrabbit.oak.spi.security.principal.AdminPrincipal; import org.apache.jackrabbit.oak.spi.security.principal.PrincipalImpl; import org.apache.jackrabbit.oak.spi.security.principal.SystemPrincipal; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants; import org.junit.Test; public abstract class AdminPrincipalsBaseTest extends AbstractSecurityTest { static final String ADMINISTRATORS_PRINCIPAL_NAME = "administrators"; AccessControlList acl; private Principal administrativePrincipal; @Override public void before() throws Exception { super.before(); Tree rootNode = root.getTree(PathUtils.ROOT_PATH); TreeUtil.addChild(rootNode, "testNode", JcrConstants.NT_UNSTRUCTURED); administrativePrincipal = getUserManager(root).createGroup(new PrincipalImpl(ADMINISTRATORS_PRINCIPAL_NAME)).getPrincipal(); root.commit(); AccessControlManager acMgr = getAccessControlManager(root); AccessControlPolicyIterator itr = acMgr.getApplicablePolicies("/testNode"); while (itr.hasNext() && acl == null) { AccessControlPolicy policy = itr.nextAccessControlPolicy(); if (policy instanceof AccessControlList) { acl = (AccessControlList) policy; } } if (acl == null) { throw new RepositoryException("No applicable policy found."); } } @Override public void after() throws Exception { try { root.refresh(); root.getTree("/testNode").remove(); Authorizable gr = getUserManager(root).getAuthorizable(administrativePrincipal); if (gr != null) { gr.remove(); } root.commit(); } finally { super.after(); } } abstract void assertResult(boolean success) throws Exception; abstract void assertException() throws Exception; /** * Test if the ACL code properly deals the creation of ACEs for administrative * principals which have full access anyway. * * @since Oak 1.1.1 * @see <a href="https://issues.apache.org/jira/browse/OAK-2158">OAK-2158</a> */ @Test public void testAdminPrincipal() throws Exception { try { boolean success = acl.addAccessControlEntry((AdminPrincipal) () -> "admin", privilegesFromNames(PrivilegeConstants.JCR_READ)); assertResult(success); } catch (AccessControlException e) { assertException(); } } @Test public void testAdminAuthInfoPrincipals() throws Exception { try { for (Principal p : adminSession.getAuthInfo().getPrincipals()) { if (p instanceof AdminPrincipal) { boolean success = acl.addAccessControlEntry(p, privilegesFromNames(PrivilegeConstants.JCR_READ)); assertResult(success); } } } catch (AccessControlException e) { assertException(); } } /** * Test if the ACL code properly deals the creation of ACEs for system * principals which have full access anyway. * * @since Oak 1.3.0 * @see <a href="https://issues.apache.org/jira/browse/OAK-2955">OAK-2955</a> */ @Test public void testSystemPrincipal() throws Exception { try { boolean success = acl.addAccessControlEntry(SystemPrincipal.INSTANCE, privilegesFromNames(PrivilegeConstants.JCR_READ)); assertResult(success); } catch (AccessControlException e) { assertException(); } } /** * Test if the ACL code properly deals the creation of ACEs for configured * admin-principals, which have full access anyway. * * @since Oak 1.3.0 * @see <a href="https://issues.apache.org/jira/browse/OAK-2955">OAK-2955</a> */ @Test public void testConfiguredAdministrativePrincipal() throws Exception { try { boolean success = acl.addAccessControlEntry(administrativePrincipal, privilegesFromNames(PrivilegeConstants.JCR_READ)); assertResult(success); } catch (AccessControlException e) { assertException(); } } }
apache-2.0
mduerig/jackrabbit-oak
oak-segment-tar/src/main/java/org/apache/jackrabbit/oak/segment/SegmentNodeState.java
25564
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.segment; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Suppliers.memoize; import static com.google.common.collect.Lists.newArrayListWithCapacity; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.apache.jackrabbit.JcrConstants.JCR_MIXINTYPES; import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE; import static org.apache.jackrabbit.oak.api.Type.BOOLEAN; import static org.apache.jackrabbit.oak.api.Type.LONG; import static org.apache.jackrabbit.oak.api.Type.NAME; import static org.apache.jackrabbit.oak.api.Type.NAMES; import static org.apache.jackrabbit.oak.api.Type.STRING; import static org.apache.jackrabbit.oak.api.Type.STRINGS; import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE; import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.MISSING_NODE; import static org.apache.jackrabbit.oak.spi.state.AbstractNodeState.checkValidName; import java.nio.ByteBuffer; import java.util.Collections; import java.util.List; import java.util.UUID; import javax.annotation.CheckForNull; import javax.annotation.Nonnull; import javax.annotation.Nullable; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.api.Type; import org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState; import org.apache.jackrabbit.oak.plugins.memory.MemoryChildNodeEntry; import org.apache.jackrabbit.oak.spi.blob.BlobStore; import org.apache.jackrabbit.oak.spi.state.AbstractNodeState; import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.apache.jackrabbit.oak.spi.state.NodeStateDiff; /** * A record of type "NODE". This class can read a node record from a segment. It * currently doesn't cache data (but the template is fully loaded). */ public class SegmentNodeState extends Record implements NodeState { @Nonnull private final SegmentReader reader; @Nullable private final BlobStore blobStore; @Nonnull private final Supplier<SegmentWriter> writer; private volatile RecordId templateId = null; private volatile Template template = null; SegmentNodeState( @Nonnull SegmentReader reader, @Nonnull Supplier<SegmentWriter> writer, @Nullable BlobStore blobStore, @Nonnull RecordId id) { super(id); this.reader = checkNotNull(reader); this.writer = checkNotNull(memoize(writer)); this.blobStore = blobStore; } public SegmentNodeState( @Nonnull SegmentReader reader, @Nonnull SegmentWriter writer, @Nullable BlobStore blobStore, @Nonnull RecordId id) { this(reader, Suppliers.ofInstance(writer), blobStore, id); } RecordId getTemplateId() { if (templateId == null) { // no problem if updated concurrently, // as each concurrent thread will just get the same value templateId = getSegment().readRecordId(getRecordNumber(), 0, 1); } return templateId; } Template getTemplate() { if (template == null) { // no problem if updated concurrently, // as each concurrent thread will just get the same value template = reader.readTemplate(getTemplateId()); } return template; } MapRecord getChildNodeMap() { Segment segment = getSegment(); return reader.readMap(segment.readRecordId(getRecordNumber(), 0, 2)); } @Nonnull static String getStableId(@Nonnull ByteBuffer stableId) { ByteBuffer buffer = stableId.duplicate(); long msb = buffer.getLong(); long lsb = buffer.getLong(); int offset = buffer.getInt(); return new UUID(msb, lsb) + ":" + offset; } /** * Returns the stable id of this node. In contrast to the node's record id * (which is technically the node's address) the stable id doesn't change * after an online gc cycle. It might though change after an offline gc cycle. * * @return stable id */ public String getStableId() { return getStableId(getStableIdBytes()); } /** * Returns the stable ID of this node, non parsed. In contrast to the node's * record id (which is technically the node's address) the stable id doesn't * change after an online gc cycle. It might though change after an offline * gc cycle. * * @return the stable ID of this node. */ public ByteBuffer getStableIdBytes() { // The first record id of this node points to the stable id. RecordId id = getSegment().readRecordId(getRecordNumber()); if (id.equals(getRecordId())) { // If that id is equal to the record id of this node then the stable // id is the string representation of the record id of this node. // See RecordWriters.NodeStateWriter.writeRecordContent() return id.getBytes(); } else { // Otherwise that id points to the serialised (msb, lsb, offset) // stable id. return id.getSegment().readBytes(id.getRecordNumber(), 0, RecordId.SERIALIZED_RECORD_ID_BYTES); } } @Override public boolean exists() { return true; } @Override public long getPropertyCount() { Template template = getTemplate(); long count = template.getPropertyTemplates().length; if (template.getPrimaryType() != null) { count++; } if (template.getMixinTypes() != null) { count++; } return count; } @Override public boolean hasProperty(@Nonnull String name) { checkNotNull(name); Template template = getTemplate(); switch (name) { case JCR_PRIMARYTYPE: return template.getPrimaryType() != null; case JCR_MIXINTYPES: return template.getMixinTypes() != null; default: return template.getPropertyTemplate(name) != null; } } @Override @CheckForNull public PropertyState getProperty(@Nonnull String name) { checkNotNull(name); Template template = getTemplate(); PropertyState property = null; if (JCR_PRIMARYTYPE.equals(name)) { property = template.getPrimaryType(); } else if (JCR_MIXINTYPES.equals(name)) { property = template.getMixinTypes(); } if (property != null) { return property; } PropertyTemplate propertyTemplate = template.getPropertyTemplate(name); if (propertyTemplate != null) { Segment segment = getSegment(); RecordId id = getRecordId(segment, template, propertyTemplate); return reader.readProperty(id, propertyTemplate); } else { return null; } } private RecordId getRecordId(Segment segment, Template template, PropertyTemplate propertyTemplate) { int ids = 2; if (template.getChildName() != Template.ZERO_CHILD_NODES) { ids++; } RecordId rid = segment.readRecordId(getRecordNumber(), 0, ids); ListRecord pIds = new ListRecord(rid, template.getPropertyTemplates().length); return pIds.getEntry(propertyTemplate.getIndex()); } @Override @Nonnull public Iterable<PropertyState> getProperties() { Template template = getTemplate(); PropertyTemplate[] propertyTemplates = template.getPropertyTemplates(); List<PropertyState> list = newArrayListWithCapacity(propertyTemplates.length + 2); PropertyState primaryType = template.getPrimaryType(); if (primaryType != null) { list.add(primaryType); } PropertyState mixinTypes = template.getMixinTypes(); if (mixinTypes != null) { list.add(mixinTypes); } Segment segment = getSegment(); int ids = 2; if (template.getChildName() != Template.ZERO_CHILD_NODES) { ids++; } if (propertyTemplates.length > 0) { ListRecord pIds = new ListRecord(segment.readRecordId(getRecordNumber(), 0, ids), propertyTemplates.length); for (int i = 0; i < propertyTemplates.length; i++) { RecordId propertyId = pIds.getEntry(i); list.add(reader.readProperty(propertyId, propertyTemplates[i])); } } return list; } @Override public boolean getBoolean(@Nonnull String name) { return Boolean.TRUE.toString().equals(getValueAsString(name, BOOLEAN)); } @Override public long getLong(String name) { String value = getValueAsString(name, LONG); if (value != null) { return Long.parseLong(value); } else { return 0; } } @Override @CheckForNull public String getString(String name) { return getValueAsString(name, STRING); } @Override @Nonnull public Iterable<String> getStrings(@Nonnull String name) { return getValuesAsStrings(name, STRINGS); } @Override @CheckForNull public String getName(@Nonnull String name) { return getValueAsString(name, NAME); } @Override @Nonnull public Iterable<String> getNames(@Nonnull String name) { return getValuesAsStrings(name, NAMES); } /** * Optimized value access method. Returns the string value of a property * of a given non-array type. Returns {@code null} if the named property * does not exist, or is of a different type than given. * * @param name property name * @param type property type * @return string value of the property, or {@code null} */ @CheckForNull private String getValueAsString(String name, Type<?> type) { checkArgument(!type.isArray()); Template template = getTemplate(); if (JCR_PRIMARYTYPE.equals(name)) { PropertyState primary = template.getPrimaryType(); if (primary != null) { if (type == NAME) { return primary.getValue(NAME); } else { return null; } } } else if (JCR_MIXINTYPES.equals(name) && template.getMixinTypes() != null) { return null; } PropertyTemplate propertyTemplate = template.getPropertyTemplate(name); if (propertyTemplate == null || propertyTemplate.getType() != type) { return null; } Segment segment = getSegment(); RecordId id = getRecordId(segment, template, propertyTemplate); return reader.readString(id); } /** * Optimized value access method. Returns the string values of a property * of a given array type. Returns an empty iterable if the named property * does not exist, or is of a different type than given. * * @param name property name * @param type property type * @return string values of the property, or an empty iterable */ @Nonnull private Iterable<String> getValuesAsStrings(String name, Type<?> type) { checkArgument(type.isArray()); Template template = getTemplate(); if (JCR_MIXINTYPES.equals(name)) { PropertyState mixin = template.getMixinTypes(); if (type == NAMES && mixin != null) { return mixin.getValue(NAMES); } else if (type == NAMES || mixin != null) { return emptyList(); } } else if (JCR_PRIMARYTYPE.equals(name) && template.getPrimaryType() != null) { return emptyList(); } PropertyTemplate propertyTemplate = template.getPropertyTemplate(name); if (propertyTemplate == null || propertyTemplate.getType() != type) { return emptyList(); } Segment segment = getSegment(); RecordId id = getRecordId(segment, template, propertyTemplate); segment = id.getSegment(); int size = segment.readInt(id.getRecordNumber()); if (size == 0) { return emptyList(); } id = segment.readRecordId(id.getRecordNumber(), 4); if (size == 1) { return singletonList(reader.readString(id)); } List<String> values = newArrayListWithCapacity(size); ListRecord list = new ListRecord(id, size); for (RecordId value : list.getEntries()) { values.add(reader.readString(value)); } return values; } @Override public long getChildNodeCount(long max) { String childName = getTemplate().getChildName(); if (childName == Template.ZERO_CHILD_NODES) { return 0; } else if (childName == Template.MANY_CHILD_NODES) { return getChildNodeMap().size(); } else { return 1; } } @Override public boolean hasChildNode(@Nonnull String name) { String childName = getTemplate().getChildName(); if (childName == Template.ZERO_CHILD_NODES) { return false; } else if (childName == Template.MANY_CHILD_NODES) { return getChildNodeMap().getEntry(name) != null; } else { return childName.equals(name); } } @Override @Nonnull public NodeState getChildNode(@Nonnull String name) { String childName = getTemplate().getChildName(); if (childName == Template.MANY_CHILD_NODES) { MapEntry child = getChildNodeMap().getEntry(name); if (child != null) { return child.getNodeState(); } } else if (childName != Template.ZERO_CHILD_NODES && childName.equals(name)) { RecordId childNodeId = getSegment().readRecordId(getRecordNumber(), 0, 2); return reader.readNode(childNodeId); } checkValidName(name); return MISSING_NODE; } @Override @Nonnull public Iterable<String> getChildNodeNames() { String childName = getTemplate().getChildName(); if (childName == Template.ZERO_CHILD_NODES) { return Collections.emptyList(); } else if (childName == Template.MANY_CHILD_NODES) { return getChildNodeMap().getKeys(); } else { return Collections.singletonList(childName); } } @Override @Nonnull public Iterable<? extends ChildNodeEntry> getChildNodeEntries() { String childName = getTemplate().getChildName(); if (childName == Template.ZERO_CHILD_NODES) { return Collections.emptyList(); } else if (childName == Template.MANY_CHILD_NODES) { return getChildNodeMap().getEntries(); } else { RecordId childNodeId = getSegment().readRecordId(getRecordNumber(), 0, 2); return Collections.singletonList(new MemoryChildNodeEntry( childName, reader.readNode(childNodeId))); } } @Override @Nonnull public SegmentNodeBuilder builder() { return new SegmentNodeBuilder(this, blobStore, reader, writer.get()); } @Override public boolean compareAgainstBaseState(NodeState base, NodeStateDiff diff) { if (this == base || fastEquals(this, base)) { return true; // no changes } else if (base == EMPTY_NODE || !base.exists()) { // special case return EmptyNodeState.compareAgainstEmptyState(this, diff); } else if (!(base instanceof SegmentNodeState)) { // fallback return AbstractNodeState.compareAgainstBaseState(this, base, diff); } SegmentNodeState that = (SegmentNodeState) base; Template beforeTemplate = that.getTemplate(); RecordId beforeId = that.getRecordId(); Template afterTemplate = getTemplate(); RecordId afterId = getRecordId(); // Compare type properties if (!compareProperties( beforeTemplate.getPrimaryType(), afterTemplate.getPrimaryType(), diff)) { return false; } if (!compareProperties( beforeTemplate.getMixinTypes(), afterTemplate.getMixinTypes(), diff)) { return false; } // Compare other properties, leveraging the ordering int beforeIndex = 0; int afterIndex = 0; PropertyTemplate[] beforeProperties = beforeTemplate.getPropertyTemplates(); PropertyTemplate[] afterProperties = afterTemplate.getPropertyTemplates(); while (beforeIndex < beforeProperties.length && afterIndex < afterProperties.length) { int d = Integer.valueOf(afterProperties[afterIndex].hashCode()) .compareTo(beforeProperties[beforeIndex].hashCode()); if (d == 0) { d = afterProperties[afterIndex].getName().compareTo( beforeProperties[beforeIndex].getName()); } PropertyState beforeProperty = null; PropertyState afterProperty = null; if (d < 0) { afterProperty = afterTemplate.getProperty(afterId, afterIndex++); } else if (d > 0) { beforeProperty = beforeTemplate.getProperty(beforeId, beforeIndex++); } else { afterProperty = afterTemplate.getProperty(afterId, afterIndex++); beforeProperty = beforeTemplate.getProperty(beforeId, beforeIndex++); } if (!compareProperties(beforeProperty, afterProperty, diff)) { return false; } } while (afterIndex < afterProperties.length) { if (!diff.propertyAdded( afterTemplate.getProperty(afterId, afterIndex++))) { return false; } } while (beforeIndex < beforeProperties.length) { PropertyState beforeProperty = beforeTemplate.getProperty(beforeId, beforeIndex++); if (!diff.propertyDeleted(beforeProperty)) { return false; } } String beforeChildName = beforeTemplate.getChildName(); String afterChildName = afterTemplate.getChildName(); if (afterChildName == Template.ZERO_CHILD_NODES) { if (beforeChildName != Template.ZERO_CHILD_NODES) { for (ChildNodeEntry entry : beforeTemplate.getChildNodeEntries(beforeId)) { if (!diff.childNodeDeleted( entry.getName(), entry.getNodeState())) { return false; } } } } else if (afterChildName != Template.MANY_CHILD_NODES) { NodeState afterNode = afterTemplate.getChildNode(afterChildName, afterId); NodeState beforeNode = beforeTemplate.getChildNode(afterChildName, beforeId); if (!beforeNode.exists()) { if (!diff.childNodeAdded(afterChildName, afterNode)) { return false; } } else if (!fastEquals(afterNode, beforeNode)) { if (!diff.childNodeChanged( afterChildName, beforeNode, afterNode)) { return false; } } if (beforeChildName == Template.MANY_CHILD_NODES || (beforeChildName != Template.ZERO_CHILD_NODES && !beforeNode.exists())) { for (ChildNodeEntry entry : beforeTemplate.getChildNodeEntries(beforeId)) { if (!afterChildName.equals(entry.getName())) { if (!diff.childNodeDeleted( entry.getName(), entry.getNodeState())) { return false; } } } } } else if (beforeChildName == Template.ZERO_CHILD_NODES) { for (ChildNodeEntry entry : afterTemplate.getChildNodeEntries(afterId)) { if (!diff.childNodeAdded( entry.getName(), entry.getNodeState())) { return false; } } } else if (beforeChildName != Template.MANY_CHILD_NODES) { boolean beforeChildRemoved = true; NodeState beforeChild = beforeTemplate.getChildNode(beforeChildName, beforeId); for (ChildNodeEntry entry : afterTemplate.getChildNodeEntries(afterId)) { String childName = entry.getName(); NodeState afterChild = entry.getNodeState(); if (beforeChildName.equals(childName)) { beforeChildRemoved = false; if (!fastEquals(afterChild, beforeChild) && !diff.childNodeChanged( childName, beforeChild, afterChild)) { return false; } } else if (!diff.childNodeAdded(childName, afterChild)) { return false; } } if (beforeChildRemoved) { if (!diff.childNodeDeleted(beforeChildName, beforeChild)) { return false; } } } else { MapRecord afterMap = afterTemplate.getChildNodeMap(afterId); MapRecord beforeMap = beforeTemplate.getChildNodeMap(beforeId); return afterMap.compare(beforeMap, diff); } return true; } private static boolean compareProperties( PropertyState before, PropertyState after, NodeStateDiff diff) { if (before == null) { return after == null || diff.propertyAdded(after); } else if (after == null) { return diff.propertyDeleted(before); } else { return before.equals(after) || diff.propertyChanged(before, after); } } //------------------------------------------------------------< Object >-- /** * Indicates whether two {@link NodeState} instances are equal to each * other. A return value of {@code true} clearly means that the instances * are equal, while a return value of {@code false} doesn't necessarily mean * the instances are not equal. These "false negatives" are an * implementation detail and callers cannot rely on them being stable. * * @param a * the first {@link NodeState} instance * @param b * the second {@link NodeState} instance * @return {@code true}, if these two instances are equal. */ public static boolean fastEquals(NodeState a, NodeState b) { if (Record.fastEquals(a, b)) { return true; } if (a instanceof SegmentNodeState && b instanceof SegmentNodeState && ((SegmentNodeState) a).getStableId().equals(((SegmentNodeState) b).getStableId())) { return true; } return false; } @Override public int hashCode() { return getStableId().hashCode(); } @Override public boolean equals(Object object) { if (object instanceof SegmentNodeState) { SegmentNodeState that = (SegmentNodeState) object; if (fastEquals(this, that)) { return true; } else { Template template = getTemplate(); return template.equals(that.getTemplate()) && template.compare(getRecordId(), that.getRecordId()); } } else { return object instanceof NodeState && AbstractNodeState.equals(this, (NodeState) object); } } @Override public String toString() { return AbstractNodeState.toString(this); } }
apache-2.0
haint/jgentle
src/org/jgentleframework/core/intercept/JGentleFastClass.java
1617
/* * Copyright 2007-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Project: JGentleFramework */ package org.jgentleframework.core.intercept; import net.sf.cglib.reflect.FastClass; /** * Gives JGentle classes custom names. * * @author LE QUOC CHUNG - mailto: <a * href="mailto:skydunkpro@yahoo.com">skydunkpro@yahoo.com</a> * @date Aug 1, 2008 */ public class JGentleFastClass { /** * Creates the {@link FastClass}. * * @param type * the type * @return the fast class */ public static FastClass create(Class<?> type) { return create(type.getClassLoader(), type); } /** * Creates the {@link FastClass}. * * @param loader * the loader * @param type * the type * @return the fast class */ public static FastClass create(ClassLoader loader, Class<?> type) { FastClass.Generator generator = new FastClass.Generator(); generator.setType(type); generator.setClassLoader(loader); generator.setNamingPolicy(new JGentleNamingPolicy()); return generator.create(); } }
apache-2.0
henakamaMSFT/elasticsearch
core/src/test/java/org/elasticsearch/get/GetActionIT.java
53425
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.get; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.get.GetRequestBuilder; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.get.MultiGetRequestBuilder; import org.elasticsearch.action.get.MultiGetResponse; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.test.ESIntegTestCase; import java.io.IOException; import java.util.Collections; import java.util.HashSet; import java.util.Set; import static java.util.Collections.singleton; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; public class GetActionIT extends ESIntegTestCase { public void testSimpleGet() { assertAcked(prepareCreate("test") .addMapping("type1", "field1", "type=keyword,store=true", "field2", "type=keyword,store=true") .setSettings(Settings.builder().put("index.refresh_interval", -1)) .addAlias(new Alias("alias"))); ensureGreen(); GetResponse response = client().prepareGet(indexOrAlias(), "type1", "1").get(); assertThat(response.isExists(), equalTo(false)); logger.info("--> index doc 1"); client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); logger.info("--> non realtime get 1"); response = client().prepareGet(indexOrAlias(), "type1", "1").setRealtime(false).get(); assertThat(response.isExists(), equalTo(false)); logger.info("--> realtime get 1"); response = client().prepareGet(indexOrAlias(), "type1", "1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2")); logger.info("--> realtime get 1 (no source, implicit)"); response = client().prepareGet(indexOrAlias(), "type1", "1").setStoredFields(Strings.EMPTY_ARRAY).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); Set<String> fields = new HashSet<>(response.getFields().keySet()); assertThat(fields, equalTo(Collections.<String>emptySet())); assertThat(response.getSourceAsBytes(), nullValue()); logger.info("--> realtime get 1 (no source, explicit)"); response = client().prepareGet(indexOrAlias(), "type1", "1").setFetchSource(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); fields = new HashSet<>(response.getFields().keySet()); assertThat(fields, equalTo(Collections.<String>emptySet())); assertThat(response.getSourceAsBytes(), nullValue()); logger.info("--> realtime get 1 (no type)"); response = client().prepareGet(indexOrAlias(), null, "1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2")); logger.info("--> realtime fetch of field"); response = client().prepareGet(indexOrAlias(), "type1", "1").setStoredFields("field1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsBytes(), nullValue()); assertThat(response.getField("field1").getValues().get(0).toString(), equalTo("value1")); assertThat(response.getField("field2"), nullValue()); logger.info("--> realtime fetch of field & source"); response = client().prepareGet(indexOrAlias(), "type1", "1") .setStoredFields("field1").setFetchSource("field1", null).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsMap(), hasKey("field1")); assertThat(response.getSourceAsMap(), not(hasKey("field2"))); assertThat(response.getField("field1").getValues().get(0).toString(), equalTo("value1")); assertThat(response.getField("field2"), nullValue()); logger.info("--> flush the index, so we load it from it"); flush(); logger.info("--> realtime get 1 (loaded from index)"); response = client().prepareGet(indexOrAlias(), "type1", "1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2")); logger.info("--> non realtime get 1 (loaded from index)"); response = client().prepareGet(indexOrAlias(), "type1", "1").setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2")); logger.info("--> realtime fetch of field (loaded from index)"); response = client().prepareGet(indexOrAlias(), "type1", "1").setStoredFields("field1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsBytes(), nullValue()); assertThat(response.getField("field1").getValues().get(0).toString(), equalTo("value1")); assertThat(response.getField("field2"), nullValue()); logger.info("--> realtime fetch of field & source (loaded from index)"); response = client().prepareGet(indexOrAlias(), "type1", "1") .setStoredFields("field1").setFetchSource(true).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsBytes(), not(nullValue())); assertThat(response.getField("field1").getValues().get(0).toString(), equalTo("value1")); assertThat(response.getField("field2"), nullValue()); logger.info("--> update doc 1"); client().prepareIndex("test", "type1", "1").setSource("field1", "value1_1", "field2", "value2_1").get(); logger.info("--> realtime get 1"); response = client().prepareGet(indexOrAlias(), "type1", "1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1_1")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2_1")); logger.info("--> update doc 1 again"); client().prepareIndex("test", "type1", "1").setSource("field1", "value1_2", "field2", "value2_2").get(); response = client().prepareGet(indexOrAlias(), "type1", "1").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getSourceAsMap().get("field1").toString(), equalTo("value1_2")); assertThat(response.getSourceAsMap().get("field2").toString(), equalTo("value2_2")); DeleteResponse deleteResponse = client().prepareDelete("test", "type1", "1").get(); assertEquals(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); response = client().prepareGet(indexOrAlias(), "type1", "1").get(); assertThat(response.isExists(), equalTo(false)); } private static String indexOrAlias() { return randomBoolean() ? "test" : "alias"; } public void testSimpleMultiGet() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias")) .addMapping("type1", "field", "type=keyword,store=true") .setSettings(Settings.builder().put("index.refresh_interval", -1))); ensureGreen(); MultiGetResponse response = client().prepareMultiGet().add(indexOrAlias(), "type1", "1").get(); assertThat(response.getResponses().length, equalTo(1)); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(false)); for (int i = 0; i < 10; i++) { client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value" + i).get(); } response = client().prepareMultiGet() .add(indexOrAlias(), "type1", "1") .add(indexOrAlias(), "type1", "15") .add(indexOrAlias(), "type1", "3") .add(indexOrAlias(), "type1", "9") .add(indexOrAlias(), "type1", "11").get(); assertThat(response.getResponses().length, equalTo(5)); assertThat(response.getResponses()[0].getId(), equalTo("1")); assertThat(response.getResponses()[0].getIndex(), equalTo("test")); assertThat(response.getResponses()[0].getResponse().getIndex(), equalTo("test")); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[0].getResponse().getSourceAsMap().get("field").toString(), equalTo("value1")); assertThat(response.getResponses()[1].getId(), equalTo("15")); assertThat(response.getResponses()[1].getIndex(), equalTo("test")); assertThat(response.getResponses()[1].getResponse().getIndex(), equalTo("test")); assertThat(response.getResponses()[1].getResponse().isExists(), equalTo(false)); assertThat(response.getResponses()[2].getId(), equalTo("3")); assertThat(response.getResponses()[2].getIndex(), equalTo("test")); assertThat(response.getResponses()[2].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[3].getId(), equalTo("9")); assertThat(response.getResponses()[3].getIndex(), equalTo("test")); assertThat(response.getResponses()[3].getResponse().getIndex(), equalTo("test")); assertThat(response.getResponses()[3].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[4].getId(), equalTo("11")); assertThat(response.getResponses()[4].getIndex(), equalTo("test")); assertThat(response.getResponses()[4].getResponse().getIndex(), equalTo("test")); assertThat(response.getResponses()[4].getResponse().isExists(), equalTo(false)); // multi get with specific field response = client().prepareMultiGet() .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").storedFields("field")) .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "3").storedFields("field")) .get(); assertThat(response.getResponses().length, equalTo(2)); assertThat(response.getResponses()[0].getResponse().getSourceAsBytes(), nullValue()); assertThat(response.getResponses()[0].getResponse().getField("field").getValues().get(0).toString(), equalTo("value1")); } public void testGetDocWithMultivaluedFields() throws Exception { String mapping1 = XContentFactory.jsonBuilder().startObject().startObject("type1") .startObject("properties") .startObject("field").field("type", "text").field("store", true).endObject() .endObject() .endObject().endObject().string(); String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type2") .startObject("properties") .startObject("field").field("type", "text").field("store", true).endObject() .endObject() .endObject().endObject().string(); assertAcked(prepareCreate("test") .addMapping("type1", mapping1) .addMapping("type2", mapping2) .setSettings(Settings.builder().put("index.refresh_interval", -1))); ensureGreen(); GetResponse response = client().prepareGet("test", "type1", "1").get(); assertThat(response.isExists(), equalTo(false)); response = client().prepareGet("test", "type2", "1").get(); assertThat(response.isExists(), equalTo(false)); client().prepareIndex("test", "type1", "1") .setSource(jsonBuilder().startObject().array("field", "1", "2").endObject()).get(); client().prepareIndex("test", "type2", "1") .setSource(jsonBuilder().startObject().array("field", "1", "2").endObject()).get(); response = client().prepareGet("test", "type1", "1").setStoredFields("field").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getType(), equalTo("type1")); Set<String> fields = new HashSet<>(response.getFields().keySet()); assertThat(fields, equalTo(singleton("field"))); assertThat(response.getFields().get("field").getValues().size(), equalTo(2)); assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1")); assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2")); response = client().prepareGet("test", "type2", "1").setStoredFields("field").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getType(), equalTo("type2")); assertThat(response.getId(), equalTo("1")); fields = new HashSet<>(response.getFields().keySet()); assertThat(fields, equalTo(singleton("field"))); assertThat(response.getFields().get("field").getValues().size(), equalTo(2)); assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1")); assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2")); // Now test values being fetched from stored fields. refresh(); response = client().prepareGet("test", "type1", "1").setStoredFields("field").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); fields = new HashSet<>(response.getFields().keySet()); assertThat(fields, equalTo(singleton("field"))); assertThat(response.getFields().get("field").getValues().size(), equalTo(2)); assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1")); assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2")); response = client().prepareGet("test", "type2", "1").setStoredFields("field").get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); fields = new HashSet<>(response.getFields().keySet()); assertThat(fields, equalTo(singleton("field"))); assertThat(response.getFields().get("field").getValues().size(), equalTo(2)); assertThat(response.getFields().get("field").getValues().get(0).toString(), equalTo("1")); assertThat(response.getFields().get("field").getValues().get(1).toString(), equalTo("2")); } public void testGetWithVersion() { assertAcked(prepareCreate("test").addAlias(new Alias("alias")) .setSettings(Settings.builder().put("index.refresh_interval", -1))); ensureGreen(); GetResponse response = client().prepareGet("test", "type1", "1").get(); assertThat(response.isExists(), equalTo(false)); logger.info("--> index doc 1"); client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); // From translog: response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getVersion(), equalTo(1L)); response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getVersion(), equalTo(1L)); try { client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).get(); fail(); } catch (VersionConflictEngineException e) { //all good } // From Lucene index: refresh(); response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(1L)); response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(1L)); try { client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).setRealtime(false).get(); fail(); } catch (VersionConflictEngineException e) { //all good } logger.info("--> index doc 1 again, so increasing the version"); client().prepareIndex("test", "type1", "1").setSource("field1", "value1", "field2", "value2").get(); // From translog: response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(2L)); try { client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).get(); fail(); } catch (VersionConflictEngineException e) { //all good } response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(2L)); // From Lucene index: refresh(); response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(2L)); try { client().prepareGet(indexOrAlias(), "type1", "1").setVersion(1).setRealtime(false).get(); fail(); } catch (VersionConflictEngineException e) { //all good } response = client().prepareGet(indexOrAlias(), "type1", "1").setVersion(2).setRealtime(false).get(); assertThat(response.isExists(), equalTo(true)); assertThat(response.getId(), equalTo("1")); assertThat(response.getIndex(), equalTo("test")); assertThat(response.getVersion(), equalTo(2L)); } public void testMultiGetWithVersion() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias")) .setSettings(Settings.builder().put("index.refresh_interval", -1))); ensureGreen(); MultiGetResponse response = client().prepareMultiGet().add(indexOrAlias(), "type1", "1").get(); assertThat(response.getResponses().length, equalTo(1)); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(false)); for (int i = 0; i < 3; i++) { client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value" + i).get(); } // Version from translog response = client().prepareMultiGet() .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(Versions.MATCH_ANY)) .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(1)) .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(2)) .get(); assertThat(response.getResponses().length, equalTo(3)); // [0] version doesn't matter, which is the default assertThat(response.getResponses()[0].getFailure(), nullValue()); assertThat(response.getResponses()[0].getId(), equalTo("1")); assertThat(response.getResponses()[0].getIndex(), equalTo("test")); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[0].getResponse().getSourceAsMap().get("field").toString(), equalTo("value1")); assertThat(response.getResponses()[1].getId(), equalTo("1")); assertThat(response.getResponses()[1].getIndex(), equalTo("test")); assertThat(response.getResponses()[1].getFailure(), nullValue()); assertThat(response.getResponses()[1].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[1].getResponse().getSourceAsMap().get("field").toString(), equalTo("value1")); assertThat(response.getResponses()[2].getFailure(), notNullValue()); assertThat(response.getResponses()[2].getFailure().getId(), equalTo("1")); assertThat(response.getResponses()[2].getFailure().getMessage(), startsWith("[type1][1]: version conflict")); assertThat(response.getResponses()[2].getFailure().getFailure(), instanceOf(VersionConflictEngineException.class)); //Version from Lucene index refresh(); response = client().prepareMultiGet() .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(Versions.MATCH_ANY)) .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(1)) .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "1").version(2)) .setRealtime(false) .get(); assertThat(response.getResponses().length, equalTo(3)); // [0] version doesn't matter, which is the default assertThat(response.getResponses()[0].getFailure(), nullValue()); assertThat(response.getResponses()[0].getId(), equalTo("1")); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[0].getResponse().getSourceAsMap().get("field").toString(), equalTo("value1")); assertThat(response.getResponses()[1].getId(), equalTo("1")); assertThat(response.getResponses()[1].getFailure(), nullValue()); assertThat(response.getResponses()[1].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[1].getResponse().getSourceAsMap().get("field").toString(), equalTo("value1")); assertThat(response.getResponses()[2].getFailure(), notNullValue()); assertThat(response.getResponses()[2].getFailure().getId(), equalTo("1")); assertThat(response.getResponses()[2].getFailure().getMessage(), startsWith("[type1][1]: version conflict")); assertThat(response.getResponses()[2].getFailure().getFailure(), instanceOf(VersionConflictEngineException.class)); for (int i = 0; i < 3; i++) { client().prepareIndex("test", "type1", Integer.toString(i)).setSource("field", "value" + i).get(); } // Version from translog response = client().prepareMultiGet() .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(Versions.MATCH_ANY)) .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(1)) .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(2)) .get(); assertThat(response.getResponses().length, equalTo(3)); // [0] version doesn't matter, which is the default assertThat(response.getResponses()[0].getFailure(), nullValue()); assertThat(response.getResponses()[0].getId(), equalTo("2")); assertThat(response.getResponses()[0].getIndex(), equalTo("test")); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[0].getResponse().getSourceAsMap().get("field").toString(), equalTo("value2")); assertThat(response.getResponses()[1].getFailure(), notNullValue()); assertThat(response.getResponses()[1].getFailure().getId(), equalTo("2")); assertThat(response.getResponses()[1].getIndex(), equalTo("test")); assertThat(response.getResponses()[1].getFailure().getMessage(), startsWith("[type1][2]: version conflict")); assertThat(response.getResponses()[2].getId(), equalTo("2")); assertThat(response.getResponses()[2].getIndex(), equalTo("test")); assertThat(response.getResponses()[2].getFailure(), nullValue()); assertThat(response.getResponses()[2].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[2].getResponse().getSourceAsMap().get("field").toString(), equalTo("value2")); //Version from Lucene index refresh(); response = client().prepareMultiGet() .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(Versions.MATCH_ANY)) .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(1)) .add(new MultiGetRequest.Item(indexOrAlias(), "type1", "2").version(2)) .setRealtime(false) .get(); assertThat(response.getResponses().length, equalTo(3)); // [0] version doesn't matter, which is the default assertThat(response.getResponses()[0].getFailure(), nullValue()); assertThat(response.getResponses()[0].getId(), equalTo("2")); assertThat(response.getResponses()[0].getIndex(), equalTo("test")); assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[0].getResponse().getSourceAsMap().get("field").toString(), equalTo("value2")); assertThat(response.getResponses()[1].getFailure(), notNullValue()); assertThat(response.getResponses()[1].getFailure().getId(), equalTo("2")); assertThat(response.getResponses()[1].getIndex(), equalTo("test")); assertThat(response.getResponses()[1].getFailure().getMessage(), startsWith("[type1][2]: version conflict")); assertThat(response.getResponses()[2].getId(), equalTo("2")); assertThat(response.getResponses()[2].getIndex(), equalTo("test")); assertThat(response.getResponses()[2].getFailure(), nullValue()); assertThat(response.getResponses()[2].getResponse().isExists(), equalTo(true)); assertThat(response.getResponses()[2].getResponse().getSourceAsMap().get("field").toString(), equalTo("value2")); } public void testGetFieldsMetaData() throws Exception { assertAcked(prepareCreate("test") .addMapping("parent") .addMapping("my-type1", "_parent", "type=parent", "field1", "type=keyword,store=true") .addAlias(new Alias("alias")) .setSettings(Settings.builder().put("index.refresh_interval", -1))); client().prepareIndex("test", "my-type1", "1") .setRouting("1") .setParent("parent_1") .setSource(jsonBuilder().startObject().field("field1", "value").endObject()) .get(); GetResponse getResponse = client().prepareGet(indexOrAlias(), "my-type1", "1") .setRouting("1") .setStoredFields("field1") .get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getField("field1").isMetadataField(), equalTo(false)); assertThat(getResponse.getField("field1").getValue().toString(), equalTo("value")); assertThat(getResponse.getField("_routing").isMetadataField(), equalTo(true)); assertThat(getResponse.getField("_routing").getValue().toString(), equalTo("1")); assertThat(getResponse.getField("_parent").isMetadataField(), equalTo(true)); assertThat(getResponse.getField("_parent").getValue().toString(), equalTo("parent_1")); flush(); getResponse = client().prepareGet(indexOrAlias(), "my-type1", "1") .setStoredFields("field1") .setRouting("1") .get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getField("field1").isMetadataField(), equalTo(false)); assertThat(getResponse.getField("field1").getValue().toString(), equalTo("value")); assertThat(getResponse.getField("_routing").isMetadataField(), equalTo(true)); assertThat(getResponse.getField("_routing").getValue().toString(), equalTo("1")); assertThat(getResponse.getField("_parent").isMetadataField(), equalTo(true)); assertThat(getResponse.getField("_parent").getValue().toString(), equalTo("parent_1")); } public void testGetFieldsNonLeafField() throws Exception { assertAcked(prepareCreate("test").addAlias(new Alias("alias")) .addMapping("my-type1", jsonBuilder().startObject().startObject("my-type1").startObject("properties") .startObject("field1").startObject("properties") .startObject("field2").field("type", "text").endObject() .endObject().endObject() .endObject().endObject().endObject()) .setSettings(Settings.builder().put("index.refresh_interval", -1))); client().prepareIndex("test", "my-type1", "1") .setSource(jsonBuilder().startObject().startObject("field1").field("field2", "value1").endObject().endObject()) .get(); IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> client().prepareGet(indexOrAlias(), "my-type1", "1").setStoredFields("field1").get()); assertThat(exc.getMessage(), equalTo("field [field1] isn't a leaf field")); flush(); exc = expectThrows(IllegalArgumentException.class, () -> client().prepareGet(indexOrAlias(), "my-type1", "1").setStoredFields("field1").get()); assertThat(exc.getMessage(), equalTo("field [field1] isn't a leaf field")); } public void testGetFieldsComplexField() throws Exception { assertAcked(prepareCreate("my-index") .setSettings(Settings.builder().put("index.refresh_interval", -1)) .addMapping("my-type2", jsonBuilder().startObject().startObject("my-type2").startObject("properties") .startObject("field1").field("type", "object").startObject("properties") .startObject("field2").field("type", "object").startObject("properties") .startObject("field3").field("type", "object").startObject("properties") .startObject("field4").field("type", "text").field("store", true) .endObject().endObject() .endObject().endObject() .endObject().endObject().endObject() .endObject().endObject().endObject())); BytesReference source = jsonBuilder().startObject() .startArray("field1") .startObject() .startObject("field2") .startArray("field3") .startObject() .field("field4", "value1") .endObject() .endArray() .endObject() .endObject() .startObject() .startObject("field2") .startArray("field3") .startObject() .field("field4", "value2") .endObject() .endArray() .endObject() .endObject() .endArray() .endObject().bytes(); logger.info("indexing documents"); client().prepareIndex("my-index", "my-type1", "1").setSource(source).get(); client().prepareIndex("my-index", "my-type2", "1").setSource(source).get(); logger.info("checking real time retrieval"); String field = "field1.field2.field3.field4"; GetResponse getResponse = client().prepareGet("my-index", "my-type1", "1").setStoredFields(field).get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getField(field).isMetadataField(), equalTo(false)); assertThat(getResponse.getField(field).getValues().size(), equalTo(2)); assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1")); assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2")); getResponse = client().prepareGet("my-index", "my-type2", "1").setStoredFields(field).get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getField(field).isMetadataField(), equalTo(false)); assertThat(getResponse.getField(field).getValues().size(), equalTo(2)); assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1")); assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2")); logger.info("waiting for recoveries to complete"); // Flush fails if shard has ongoing recoveries, make sure the cluster is settled down ensureGreen(); logger.info("flushing"); FlushResponse flushResponse = client().admin().indices().prepareFlush("my-index").setForce(true).get(); if (flushResponse.getSuccessfulShards() == 0) { StringBuilder sb = new StringBuilder("failed to flush at least one shard. total shards [") .append(flushResponse.getTotalShards()).append("], failed shards: [").append(flushResponse.getFailedShards()).append("]"); for (ShardOperationFailedException failure: flushResponse.getShardFailures()) { sb.append("\nShard failure: ").append(failure); } fail(sb.toString()); } logger.info("checking post-flush retrieval"); getResponse = client().prepareGet("my-index", "my-type1", "1").setStoredFields(field).get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getField(field).isMetadataField(), equalTo(false)); assertThat(getResponse.getField(field).getValues().size(), equalTo(2)); assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1")); assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2")); getResponse = client().prepareGet("my-index", "my-type2", "1").setStoredFields(field).get(); assertThat(getResponse.isExists(), equalTo(true)); assertThat(getResponse.getField(field).isMetadataField(), equalTo(false)); assertThat(getResponse.getField(field).getValues().size(), equalTo(2)); assertThat(getResponse.getField(field).getValues().get(0).toString(), equalTo("value1")); assertThat(getResponse.getField(field).getValues().get(1).toString(), equalTo("value2")); } public void testGetAllField() throws Exception { assertAcked(prepareCreate("test") .addAlias(new Alias("alias")) .addMapping("my-type1", jsonBuilder() .startObject() .startObject("my-type1") .startObject("_all") .field("store", true) .endObject() .startObject("properties") .startObject("some_field") .field("type", "text") .endObject() .endObject() .endObject() .endObject())); index("test", "my-type1", "1", "some_field", "some text"); refresh(); GetResponse getResponse = client().prepareGet(indexOrAlias(), "my-type1", "1").setStoredFields("_all").get(); assertNotNull(getResponse.getField("_all").getValue()); assertThat(getResponse.getField("_all").getValue().toString(), equalTo("some text")); } public void testUngeneratedFieldsThatAreNeverStored() throws IOException { String createIndexSource = "{\n" + " \"settings\": {\n" + " \"index.translog.flush_threshold_size\": \"1pb\",\n" + " \"refresh_interval\": \"-1\"\n" + " },\n" + " \"mappings\": {\n" + " \"doc\": {\n" + " \"properties\": {\n" + " \"suggest\": {\n" + " \"type\": \"completion\"\n" + " }\n" + " }\n" + " }\n" + " }\n" + "}"; assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource)); ensureGreen(); String doc = "{\n" + " \"suggest\": {\n" + " \"input\": [\n" + " \"Nevermind\",\n" + " \"Nirvana\"\n" + " ]\n" + " }\n" + "}"; index("test", "doc", "1", doc); String[] fieldsList = {"suggest"}; // before refresh - document is only in translog assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); refresh(); //after refresh - document is in translog and also indexed assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); flush(); //after flush - document is in not anymore translog - only indexed assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); } public void testUngeneratedFieldsThatAreAlwaysStored() throws IOException { String createIndexSource = "{\n" + " \"settings\": {\n" + " \"index.translog.flush_threshold_size\": \"1pb\",\n" + " \"refresh_interval\": \"-1\"\n" + " },\n" + " \"mappings\": {\n" + " \"parentdoc\": {\n" + " },\n" + " \"doc\": {\n" + " \"_parent\": {\n" + " \"type\": \"parentdoc\"\n" + " }\n" + " }\n" + " }\n" + "}"; assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource)); ensureGreen(); client().prepareIndex("test", "doc").setId("1").setSource("{}").setParent("1").get(); String[] fieldsList = {"_parent"}; // before refresh - document is only in translog assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList, "1"); refresh(); //after refresh - document is in translog and also indexed assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList, "1"); flush(); //after flush - document is in not anymore translog - only indexed assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList, "1"); } public void testUngeneratedFieldsNotPartOfSourceStored() throws IOException { String createIndexSource = "{\n" + " \"settings\": {\n" + " \"index.translog.flush_threshold_size\": \"1pb\",\n" + " \"refresh_interval\": \"-1\"\n" + " }\n" + "}"; assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource)); ensureGreen(); String doc = "{\n" + " \"text\": \"some text.\"\n" + "}\n"; client().prepareIndex("test", "doc").setId("1").setSource(doc).setRouting("1").get(); String[] fieldsList = {"_routing"}; // before refresh - document is only in translog assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList, "1"); refresh(); //after refresh - document is in translog and also indexed assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList, "1"); flush(); //after flush - document is in not anymore translog - only indexed assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList, "1"); } public void testGeneratedStringFieldsUnstored() throws IOException { indexSingleDocumentWithStringFieldsGeneratedFromText(false, randomBoolean()); String[] fieldsList = {"_all", "_field_names"}; // before refresh - document is only in translog assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); refresh(); //after refresh - document is in translog and also indexed assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); flush(); //after flush - document is in not anymore translog - only indexed assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); } public void testGeneratedStringFieldsStored() throws IOException { indexSingleDocumentWithStringFieldsGeneratedFromText(true, randomBoolean()); String[] fieldsList = {"_all"}; String[] alwaysNotStoredFieldsList = {"_field_names"}; assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList); assertGetFieldsNull(indexOrAlias(), "doc", "1", alwaysNotStoredFieldsList); flush(); //after flush - document is in not anymore translog - only indexed assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList); assertGetFieldsNull(indexOrAlias(), "doc", "1", alwaysNotStoredFieldsList); } void indexSingleDocumentWithStringFieldsGeneratedFromText(boolean stored, boolean sourceEnabled) { String storedString = stored ? "true" : "false"; String createIndexSource = "{\n" + " \"settings\": {\n" + " \"index.translog.flush_threshold_size\": \"1pb\",\n" + " \"refresh_interval\": \"-1\"\n" + " },\n" + " \"mappings\": {\n" + " \"doc\": {\n" + " \"_source\" : {\"enabled\" : " + sourceEnabled + "}," + " \"_all\" : {\"enabled\" : true, \"store\":\"" + storedString + "\" }" + " }\n" + " }\n" + "}"; assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource)); ensureGreen(); String doc = "{\n" + " \"text1\": \"some text.\"\n," + " \"text2\": \"more text.\"\n" + "}\n"; index("test", "doc", "1", doc); } public void testGeneratedNumberFieldsUnstored() throws IOException { indexSingleDocumentWithNumericFieldsGeneratedFromText(false, randomBoolean()); String[] fieldsList = {"token_count", "text.token_count"}; // before refresh - document is only in translog assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); refresh(); //after refresh - document is in translog and also indexed assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); flush(); //after flush - document is in not anymore translog - only indexed assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList); } public void testGeneratedNumberFieldsStored() throws IOException { indexSingleDocumentWithNumericFieldsGeneratedFromText(true, randomBoolean()); String[] fieldsList = {"token_count", "text.token_count"}; assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList); flush(); //after flush - document is in not anymore translog - only indexed assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList); } void indexSingleDocumentWithNumericFieldsGeneratedFromText(boolean stored, boolean sourceEnabled) { String storedString = stored ? "true" : "false"; String createIndexSource = "{\n" + " \"settings\": {\n" + " \"index.translog.flush_threshold_size\": \"1pb\",\n" + " \"refresh_interval\": \"-1\"\n" + " },\n" + " \"mappings\": {\n" + " \"doc\": {\n" + " \"_source\" : {\"enabled\" : " + sourceEnabled + "}," + " \"properties\": {\n" + " \"token_count\": {\n" + " \"type\": \"token_count\",\n" + " \"analyzer\": \"standard\",\n" + " \"store\": \"" + storedString + "\"" + " },\n" + " \"text\": {\n" + " \"type\": \"text\",\n" + " \"fields\": {\n" + " \"token_count\": {\n" + " \"type\": \"token_count\",\n" + " \"analyzer\": \"standard\",\n" + " \"store\": \"" + storedString + "\"" + " }\n" + " }\n" + " }" + " }\n" + " }\n" + " }\n" + "}"; assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSource(createIndexSource)); ensureGreen(); String doc = "{\n" + " \"token_count\": \"A text with five words.\",\n" + " \"text\": \"A text with five words.\"\n" + "}\n"; index("test", "doc", "1", doc); } private void assertGetFieldsAlwaysWorks(String index, String type, String docId, String[] fields) { assertGetFieldsAlwaysWorks(index, type, docId, fields, null); } private void assertGetFieldsAlwaysWorks(String index, String type, String docId, String[] fields, @Nullable String routing) { for (String field : fields) { assertGetFieldWorks(index, type, docId, field, routing); assertGetFieldWorks(index, type, docId, field, routing); } } private void assertGetFieldWorks(String index, String type, String docId, String field, @Nullable String routing) { GetResponse response = getDocument(index, type, docId, field, routing); assertThat(response.getId(), equalTo(docId)); assertTrue(response.isExists()); assertNotNull(response.getField(field)); response = multiGetDocument(index, type, docId, field, routing); assertThat(response.getId(), equalTo(docId)); assertTrue(response.isExists()); assertNotNull(response.getField(field)); } private void assertGetFieldException(String index, String type, String docId, String field) { try { client().prepareGet().setIndex(index).setType(type).setId(docId).setStoredFields(field); fail(); } catch (ElasticsearchException e) { assertTrue(e.getMessage().contains("You can only get this field after refresh() has been called.")); } MultiGetResponse multiGetResponse = client().prepareMultiGet().add(new MultiGetRequest.Item(index, type, docId).storedFields(field)).get(); assertNull(multiGetResponse.getResponses()[0].getResponse()); assertTrue(multiGetResponse.getResponses()[0].getFailure().getMessage().contains("You can only get this field after refresh() has been called.")); } protected void assertGetFieldsNull(String index, String type, String docId, String[] fields) { assertGetFieldsNull(index, type, docId, fields, null); } protected void assertGetFieldsNull(String index, String type, String docId, String[] fields, @Nullable String routing) { for (String field : fields) { assertGetFieldNull(index, type, docId, field, routing); } } protected void assertGetFieldsAlwaysNull(String index, String type, String docId, String[] fields) { assertGetFieldsAlwaysNull(index, type, docId, fields, null); } protected void assertGetFieldsAlwaysNull(String index, String type, String docId, String[] fields, @Nullable String routing) { for (String field : fields) { assertGetFieldNull(index, type, docId, field, routing); assertGetFieldNull(index, type, docId, field, routing); } } protected void assertGetFieldNull(String index, String type, String docId, String field, @Nullable String routing) { //for get GetResponse response = getDocument(index, type, docId, field, routing); assertTrue(response.isExists()); assertNull(response.getField(field)); assertThat(response.getId(), equalTo(docId)); //same for multi get response = multiGetDocument(index, type, docId, field, routing); assertNull(response.getField(field)); assertThat(response.getId(), equalTo(docId)); assertTrue(response.isExists()); } private GetResponse multiGetDocument(String index, String type, String docId, String field, @Nullable String routing) { MultiGetRequest.Item getItem = new MultiGetRequest.Item(index, type, docId).storedFields(field); if (routing != null) { getItem.routing(routing); } MultiGetRequestBuilder multiGetRequestBuilder = client().prepareMultiGet().add(getItem); MultiGetResponse multiGetResponse = multiGetRequestBuilder.get(); assertThat(multiGetResponse.getResponses().length, equalTo(1)); return multiGetResponse.getResponses()[0].getResponse(); } private GetResponse getDocument(String index, String type, String docId, String field, @Nullable String routing) { GetRequestBuilder getRequestBuilder = client().prepareGet().setIndex(index).setType(type).setId(docId).setStoredFields(field); if (routing != null) { getRequestBuilder.setRouting(routing); } return getRequestBuilder.get(); } }
apache-2.0
smgoller/geode
geode-pulse/src/main/java/org/apache/geode/tools/pulse/internal/service/MemberRegionsService.java
4769
/* * * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. * */ package org.apache.geode.tools.pulse.internal.service; import static org.apache.geode.tools.pulse.internal.data.PulseConstants.FOUR_PLACE_DECIMAL_FORMAT; import static org.apache.geode.tools.pulse.internal.util.NameUtil.makeCompliantName; import javax.servlet.http.HttpServletRequest; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.ObjectNode; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import org.springframework.stereotype.Service; import org.apache.geode.tools.pulse.internal.data.Cluster; import org.apache.geode.tools.pulse.internal.data.Repository; /** * Class MemberRegionsService * * This class contains implementations of getting Member's Regions details. * * @since GemFire version 7.5 */ @Component @Service("MemberRegions") @Scope("singleton") public class MemberRegionsService implements PulseService { private final ObjectMapper mapper = new ObjectMapper(); // String constants used for forming a json response private static final String NAME = "name"; private static final String ENTRY_SIZE = "entrySize"; private static final String DISC_STORE_NAME = "diskStoreName"; private static final String DISC_SYNCHRONOUS = "diskSynchronous"; private final Repository repository; @Autowired public MemberRegionsService(Repository repository) { this.repository = repository; } @Override public ObjectNode execute(final HttpServletRequest request) throws Exception { // get cluster object Cluster cluster = repository.getCluster(); // json object to be sent as response ObjectNode responseJSON = mapper.createObjectNode(); JsonNode requestDataJSON = mapper.readTree(request.getParameter("pulseData")); String memberName = requestDataJSON.get("MemberRegions").get("memberName").textValue(); Cluster.Member clusterMember = cluster.getMember(makeCompliantName(memberName)); if (clusterMember != null) { responseJSON.put("memberId", clusterMember.getId()); responseJSON.put(NAME, clusterMember.getName()); responseJSON.put("host", clusterMember.getHost()); // member's regions Cluster.Region[] memberRegions = clusterMember.getMemberRegionsList(); ArrayNode regionsListJson = mapper.createArrayNode(); for (Cluster.Region memberRegion : memberRegions) { ObjectNode regionJSON = mapper.createObjectNode(); regionJSON.put(NAME, memberRegion.getName()); regionJSON.put("fullPath", memberRegion.getFullPath()); regionJSON.put("type", memberRegion.getRegionType()); regionJSON.put("entryCount", memberRegion.getSystemRegionEntryCount()); long entrySize = memberRegion.getEntrySize(); String entrySizeInMB = FOUR_PLACE_DECIMAL_FORMAT.format(entrySize / (1024f * 1024f)); if (entrySize < 0) { regionJSON.put(ENTRY_SIZE, VALUE_NA); } else { regionJSON.put(ENTRY_SIZE, entrySizeInMB); } regionJSON.put("scope", memberRegion.getScope()); String diskStoreName = memberRegion.getDiskStoreName(); if (StringUtils.isNotBlank(diskStoreName)) { regionJSON.put(DISC_STORE_NAME, diskStoreName); regionJSON.put(DISC_SYNCHRONOUS, memberRegion.isDiskSynchronous()); } else { regionJSON.put(DISC_SYNCHRONOUS, VALUE_NA); regionJSON.put(DISC_STORE_NAME, ""); } regionJSON.put("gatewayEnabled", memberRegion.getWanEnabled()); regionsListJson.add(regionJSON); } responseJSON.set("memberRegions", regionsListJson); // response responseJSON.put("status", "Normal"); } // Send json response return responseJSON; } }
apache-2.0
tdopires/cJUnit-mc626
toStringBuilder/src/main/java/org/apache/commons/lang3/math/package-info.java
2112
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * <p>Extends {@link java.math} for business mathematical classes. * This package is intended for business mathematical use, not scientific use. * See <a href="http://commons.apache.org/math/">Commons Math</a> for a more complete set of mathematical classes. * These classes are immutable, and therefore thread-safe.</p> * * <p>Although Commons Math also exists, some basic mathematical functions are contained within Lang. * These include classes to a {@link org.apache.commons.lang3.math.Fraction} class, various utilities for random numbers, and the flagship class, {@link org.apache.commons.lang3.math.NumberUtils} which contains a handful of classic number functions.</p> * * <p>There are two aspects of this package that should be highlighted. * The first is {@link org.apache.commons.lang3.math.NumberUtils#createNumber(String)}, a method which does its best to convert a String into a {@link java.lang.Number} object. * You have no idea what type of Number it will return, so you should call the relevant <code>xxxValue</code> method when you reach the point of needing a number. * NumberUtils also has a related {@link org.apache.commons.lang3.math.NumberUtils#isNumber(String) isNumber(String)} method.</p> * * @since 2.0 * @version $Id$ */ package org.apache.commons.lang3.math;
apache-2.0
lgrill-pentaho/big-data-plugin
legacy/src/test/java/org/pentaho/hadoop/PluginPropertiesUtilTest.java
1763
/******************************************************************************* * * Pentaho Big Data * * Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.hadoop; import static org.junit.Assert.*; import org.junit.Test; public class PluginPropertiesUtilTest { @Test public void getVersion() { // This test will only success if using classes produced by the ant build PluginPropertiesUtil util = new PluginPropertiesUtil(); assertNotNull( "Should never be null", util.getVersion() ); } @Test public void testGetVersionFromNonDefaultLocation() { PluginPropertiesUtil ppu = new PluginPropertiesUtil( "test-version.properties" ); String version = ppu.getVersion(); assertEquals( "X.Y.Z-TEST", version ); } @Test public void testGetVersionFromNonExistingLocation() { PluginPropertiesUtil ppu = new PluginPropertiesUtil( "non-existing-version.properties" ); String version = ppu.getVersion(); assertEquals( "@VERSION@", version ); } }
apache-2.0
JSDemos/android-sdk-20
src/jsr166/CopyOnWriteArraySetTest.java
10588
/* * Written by Doug Lea with assistance from members of JCP JSR-166 * Expert Group and released to the public domain, as explained at * http://creativecommons.org/publicdomain/zero/1.0/ * Other contributors include Andrew Wright, Jeffrey Hayes, * Pat Fisher, Mike Judd. */ package jsr166; import junit.framework.*; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Set; import java.util.Vector; import java.util.concurrent.CopyOnWriteArraySet; public class CopyOnWriteArraySetTest extends JSR166TestCase { static CopyOnWriteArraySet<Integer> populatedSet(int n) { CopyOnWriteArraySet<Integer> a = new CopyOnWriteArraySet<Integer>(); assertTrue(a.isEmpty()); for (int i = 0; i < n; i++) a.add(i); assertFalse(a.isEmpty()); assertEquals(n, a.size()); return a; } static CopyOnWriteArraySet populatedSet(Integer[] elements) { CopyOnWriteArraySet<Integer> a = new CopyOnWriteArraySet<Integer>(); assertTrue(a.isEmpty()); for (int i = 0; i < elements.length; i++) a.add(elements[i]); assertFalse(a.isEmpty()); assertEquals(elements.length, a.size()); return a; } /** * Default-constructed set is empty */ public void testConstructor() { CopyOnWriteArraySet a = new CopyOnWriteArraySet(); assertTrue(a.isEmpty()); } /** * Collection-constructed set holds all of its elements */ public void testConstructor3() { Integer[] ints = new Integer[SIZE]; for (int i = 0; i < SIZE-1; ++i) ints[i] = new Integer(i); CopyOnWriteArraySet a = new CopyOnWriteArraySet(Arrays.asList(ints)); for (int i = 0; i < SIZE; ++i) assertTrue(a.contains(ints[i])); } /** * addAll adds each element from the given collection */ public void testAddAll() { CopyOnWriteArraySet full = populatedSet(3); Vector v = new Vector(); v.add(three); v.add(four); v.add(five); full.addAll(v); assertEquals(6, full.size()); } /** * addAll adds each element from the given collection that did not * already exist in the set */ public void testAddAll2() { CopyOnWriteArraySet full = populatedSet(3); Vector v = new Vector(); v.add(three); v.add(four); v.add(one); // will not add this element full.addAll(v); assertEquals(5, full.size()); } /** * add will not add the element if it already exists in the set */ public void testAdd2() { CopyOnWriteArraySet full = populatedSet(3); full.add(one); assertEquals(3, full.size()); } /** * add adds the element when it does not exist in the set */ public void testAdd3() { CopyOnWriteArraySet full = populatedSet(3); full.add(three); assertTrue(full.contains(three)); } /** * clear removes all elements from the set */ public void testClear() { CopyOnWriteArraySet full = populatedSet(3); full.clear(); assertEquals(0, full.size()); } /** * contains returns true for added elements */ public void testContains() { CopyOnWriteArraySet full = populatedSet(3); assertTrue(full.contains(one)); assertFalse(full.contains(five)); } /** * Sets with equal elements are equal */ public void testEquals() { CopyOnWriteArraySet a = populatedSet(3); CopyOnWriteArraySet b = populatedSet(3); assertTrue(a.equals(b)); assertTrue(b.equals(a)); assertEquals(a.hashCode(), b.hashCode()); a.add(m1); assertFalse(a.equals(b)); assertFalse(b.equals(a)); b.add(m1); assertTrue(a.equals(b)); assertTrue(b.equals(a)); assertEquals(a.hashCode(), b.hashCode()); } /** * containsAll returns true for collections with subset of elements */ public void testContainsAll() { CopyOnWriteArraySet full = populatedSet(3); Vector v = new Vector(); v.add(one); v.add(two); assertTrue(full.containsAll(v)); v.add(six); assertFalse(full.containsAll(v)); } /** * isEmpty is true when empty, else false */ public void testIsEmpty() { CopyOnWriteArraySet empty = new CopyOnWriteArraySet(); CopyOnWriteArraySet full = populatedSet(3); assertTrue(empty.isEmpty()); assertFalse(full.isEmpty()); } /** * iterator() returns an iterator containing the elements of the * set in insertion order */ public void testIterator() { Collection empty = new CopyOnWriteArraySet(); assertFalse(empty.iterator().hasNext()); try { empty.iterator().next(); shouldThrow(); } catch (NoSuchElementException success) {} Integer[] elements = new Integer[SIZE]; for (int i = 0; i < SIZE; i++) elements[i] = i; Collections.shuffle(Arrays.asList(elements)); Collection<Integer> full = populatedSet(elements); Iterator it = full.iterator(); for (int j = 0; j < SIZE; j++) { assertTrue(it.hasNext()); assertEquals(elements[j], it.next()); } assertFalse(it.hasNext()); try { it.next(); shouldThrow(); } catch (NoSuchElementException success) {} } /** * iterator remove is unsupported */ public void testIteratorRemove() { CopyOnWriteArraySet full = populatedSet(3); Iterator it = full.iterator(); it.next(); try { it.remove(); shouldThrow(); } catch (UnsupportedOperationException success) {} } /** * toString holds toString of elements */ public void testToString() { assertEquals("[]", new CopyOnWriteArraySet().toString()); CopyOnWriteArraySet full = populatedSet(3); String s = full.toString(); for (int i = 0; i < 3; ++i) assertTrue(s.contains(String.valueOf(i))); assertEquals(new ArrayList(full).toString(), full.toString()); } /** * removeAll removes all elements from the given collection */ public void testRemoveAll() { CopyOnWriteArraySet full = populatedSet(3); Vector v = new Vector(); v.add(one); v.add(two); full.removeAll(v); assertEquals(1, full.size()); } /** * remove removes an element */ public void testRemove() { CopyOnWriteArraySet full = populatedSet(3); full.remove(one); assertFalse(full.contains(one)); assertEquals(2, full.size()); } /** * size returns the number of elements */ public void testSize() { CopyOnWriteArraySet empty = new CopyOnWriteArraySet(); CopyOnWriteArraySet full = populatedSet(3); assertEquals(3, full.size()); assertEquals(0, empty.size()); } /** * toArray() returns an Object array containing all elements from * the set in insertion order */ public void testToArray() { Object[] a = new CopyOnWriteArraySet().toArray(); assertTrue(Arrays.equals(new Object[0], a)); assertSame(Object[].class, a.getClass()); Integer[] elements = new Integer[SIZE]; for (int i = 0; i < SIZE; i++) elements[i] = i; Collections.shuffle(Arrays.asList(elements)); Collection<Integer> full = populatedSet(elements); assertTrue(Arrays.equals(elements, full.toArray())); assertSame(Object[].class, full.toArray().getClass()); } /** * toArray(Integer array) returns an Integer array containing all * elements from the set in insertion order */ public void testToArray2() { Collection empty = new CopyOnWriteArraySet(); Integer[] a; a = new Integer[0]; assertSame(a, empty.toArray(a)); a = new Integer[SIZE/2]; Arrays.fill(a, 42); assertSame(a, empty.toArray(a)); assertNull(a[0]); for (int i = 1; i < a.length; i++) assertEquals(42, (int) a[i]); Integer[] elements = new Integer[SIZE]; for (int i = 0; i < SIZE; i++) elements[i] = i; Collections.shuffle(Arrays.asList(elements)); Collection<Integer> full = populatedSet(elements); Arrays.fill(a, 42); assertTrue(Arrays.equals(elements, full.toArray(a))); for (int i = 0; i < a.length; i++) assertEquals(42, (int) a[i]); assertSame(Integer[].class, full.toArray(a).getClass()); a = new Integer[SIZE]; Arrays.fill(a, 42); assertSame(a, full.toArray(a)); assertTrue(Arrays.equals(elements, a)); a = new Integer[2*SIZE]; Arrays.fill(a, 42); assertSame(a, full.toArray(a)); assertTrue(Arrays.equals(elements, Arrays.copyOf(a, SIZE))); assertNull(a[SIZE]); for (int i = SIZE + 1; i < a.length; i++) assertEquals(42, (int) a[i]); } /** * toArray throws an ArrayStoreException when the given array can * not store the objects inside the set */ public void testToArray_ArrayStoreException() { try { CopyOnWriteArraySet c = new CopyOnWriteArraySet(); c.add("zfasdfsdf"); c.add("asdadasd"); c.toArray(new Long[5]); shouldThrow(); } catch (ArrayStoreException success) {} } /** * A deserialized serialized set is equal */ public void testSerialization() throws Exception { Set x = populatedSet(SIZE); Set y = serialClone(x); assertNotSame(y, x); assertEquals(x.size(), y.size()); assertEquals(x.toString(), y.toString()); assertTrue(Arrays.equals(x.toArray(), y.toArray())); assertEquals(x, y); assertEquals(y, x); } /** * addAll is idempotent */ public void testAddAll_idempotent() throws Exception { Set x = populatedSet(SIZE); Set y = new CopyOnWriteArraySet(x); y.addAll(x); assertEquals(x, y); assertEquals(y, x); } }
apache-2.0
alanfgates/hive
llap-server/src/java/org/apache/hadoop/hive/llap/cli/service/LlapServiceCommandLine.java
15416
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.llap.cli.service; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableSet; import jline.TerminalFactory; import java.util.Arrays; import java.util.Properties; import java.util.Set; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.llap.log.LogHelpers; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.util.StringUtils.TraditionalBinaryPrefix; @SuppressWarnings("static-access") class LlapServiceCommandLine { private static final Logger LOG = LoggerFactory.getLogger(LlapServiceCommandLine.class.getName()); private static final Option DIRECTORY = OptionBuilder .withLongOpt("directory") .withDescription("Temp directory for jars etc.") .withArgName("directory") .hasArg() .create('d'); private static final Option NAME = OptionBuilder .withLongOpt("name") .withDescription("Cluster name for YARN registry") .withArgName("name") .hasArg() .create('n'); private static final Option EXECUTORS = OptionBuilder .withLongOpt("executors") .withDescription("executor per instance") .withArgName("executors") .hasArg() .create('e'); private static final Option IO_THREADS = OptionBuilder .withLongOpt("iothreads") .withDescription("iothreads per instance") .withArgName("iothreads") .hasArg() .create('t'); private static final Option CACHE = OptionBuilder .withLongOpt("cache") .withDescription("cache size per instance") .withArgName("cache") .hasArg() .create('c'); private static final Option SIZE = OptionBuilder .withLongOpt("size") .withDescription("cache size per instance") .withArgName("size") .hasArg() .create('s'); private static final Option XMX = OptionBuilder .withLongOpt("xmx") .withDescription("working memory size") .withArgName("xmx") .hasArg() .create('w'); private static final Option AUXJARS = OptionBuilder .withLongOpt("auxjars") .withDescription("additional jars to package (by default, JSON SerDe jar is packaged if available)") .withArgName("auxjars") .hasArg() .create('j'); private static final Option AUXHBASE = OptionBuilder .withLongOpt("auxhbase") .withDescription("whether to package the HBase jars (true by default)") .withArgName("auxhbase") .hasArg() .create('h'); private static final Option HIVECONF = OptionBuilder .withLongOpt("hiveconf") .withDescription("Use value for given property. Overridden by explicit parameters") .withArgName("property=value") .hasArgs(2) .withValueSeparator() .create(); private static final Option JAVAHOME = OptionBuilder .withLongOpt("javaHome") .withDescription("Path to the JRE/JDK. This should be installed at the same location on all cluster nodes " + "($JAVA_HOME, java.home by default)") .withArgName("javaHome") .hasArg() .create(); private static final Option QUEUE = OptionBuilder .withLongOpt("queue") .withDescription("The queue within which LLAP will be started") .withArgName("queue") .hasArg() .create('q'); private static final Set<String> VALID_LOGGERS = ImmutableSet.of(LogHelpers.LLAP_LOGGER_NAME_RFA.toLowerCase(), LogHelpers.LLAP_LOGGER_NAME_QUERY_ROUTING.toLowerCase(), LogHelpers.LLAP_LOGGER_NAME_CONSOLE.toLowerCase()); private static final Option LOGGER = OptionBuilder .withLongOpt("logger") .withDescription("logger for llap instance ([" + VALID_LOGGERS + "]") .withArgName("logger") .hasArg() .create(); private static final Option START = OptionBuilder .withLongOpt("startImmediately") .withDescription("immediately start the cluster") .withArgName("startImmediately") .hasArg(false) .create('z'); private static final Option OUTPUT = OptionBuilder .withLongOpt("output") .withDescription("Output directory for the generated scripts") .withArgName("output") .hasArg() .create(); private static final Option AUXHIVE = OptionBuilder .withLongOpt("auxhive") .withDescription("whether to package the Hive aux jars (true by default)") .withArgName("auxhive") .hasArg() .create("auxhive"); private static final Option HELP = OptionBuilder .withLongOpt("help") .withDescription("Print help information") .withArgName("help") .hasArg(false) .create('H'); // Options for the python script that are here because our option parser cannot ignore the unknown ones private static final String OPTION_INSTANCES = "instances"; private static final String OPTION_ARGS = "args"; private static final String OPTION_LOGLEVEL = "loglevel"; private static final String OPTION_SERVICE_KEYTAB_DIR = "service-keytab-dir"; private static final String OPTION_SERVICE_KEYTAB = "service-keytab"; private static final String OPTION_SERVICE_PRINCIPAL = "service-principal"; private static final String OPTION_SERVICE_PLACEMENT = "service-placement"; private static final String OPTION_SERVICE_DEFAULT_KEYTAB = "service-default-keytab"; private static final String OPTION_HEALTH_PERCENT = "health-percent"; private static final String OPTION_HEALTH_TIME_WINDOW_SECS = "health-time-window-secs"; private static final String OPTION_HEALTH_INIT_DELAY_SECS = "health-init-delay-secs"; private static final String OPTION_SERVICE_AM_CONTAINER_MB = "service-am-container-mb"; private static final String OPTION_SERVICE_APPCONFIG_GLOBAL = "service-appconfig-global"; private static final Options OPTIONS = new Options(); static { OPTIONS.addOption(DIRECTORY); OPTIONS.addOption(NAME); OPTIONS.addOption(EXECUTORS); OPTIONS.addOption(IO_THREADS); OPTIONS.addOption(CACHE); OPTIONS.addOption(SIZE); OPTIONS.addOption(XMX); OPTIONS.addOption(AUXJARS); OPTIONS.addOption(AUXHBASE); OPTIONS.addOption(HIVECONF); OPTIONS.addOption(JAVAHOME); OPTIONS.addOption(QUEUE); OPTIONS.addOption(LOGGER); OPTIONS.addOption(START); OPTIONS.addOption(OUTPUT); OPTIONS.addOption(AUXHIVE); OPTIONS.addOption(HELP); OPTIONS.addOption(OptionBuilder .withLongOpt(OPTION_INSTANCES) .withDescription("Specify the number of instances to run this on") .withArgName(OPTION_INSTANCES) .hasArg() .create('i')); OPTIONS.addOption(OptionBuilder .withLongOpt(OPTION_ARGS) .withDescription("java arguments to the llap instance") .withArgName(OPTION_ARGS) .hasArg() .create('a')); OPTIONS.addOption(OptionBuilder .withLongOpt(OPTION_LOGLEVEL) .withDescription("log levels for the llap instance") .withArgName(OPTION_LOGLEVEL) .hasArg() .create('l')); OPTIONS.addOption(OptionBuilder .withLongOpt(OPTION_SERVICE_KEYTAB_DIR) .withDescription("Service AM keytab directory on HDFS (where the headless user keytab is stored by Service " + "keytab installation, e.g. .yarn/keytabs/llap)") .withArgName(OPTION_SERVICE_KEYTAB_DIR) .hasArg() .create()); OPTIONS.addOption(OptionBuilder .withLongOpt(OPTION_SERVICE_KEYTAB) .withDescription("Service AM keytab file name inside " + OPTION_SERVICE_KEYTAB_DIR) .withArgName(OPTION_SERVICE_KEYTAB) .hasArg() .create()); OPTIONS.addOption(OptionBuilder .withLongOpt(OPTION_SERVICE_PRINCIPAL) .withDescription("Service AM principal; should be the user running the cluster, e.g. hive@EXAMPLE.COM") .withArgName(OPTION_SERVICE_PRINCIPAL) .hasArg() .create()); OPTIONS.addOption(OptionBuilder .withLongOpt(OPTION_SERVICE_PLACEMENT) .withDescription("Service placement policy; see YARN documentation at " + "https://issues.apache.org/jira/browse/YARN-1042. This is unnecessary if LLAP is going to take more than " + "half of the YARN capacity of a node.") .withArgName(OPTION_SERVICE_PLACEMENT) .hasArg() .create()); OPTIONS.addOption(OptionBuilder .withLongOpt(OPTION_SERVICE_DEFAULT_KEYTAB) .withDescription("try to set default settings for Service AM keytab; mostly for dev testing") .withArgName(OPTION_SERVICE_DEFAULT_KEYTAB) .hasArg(false) .create()); OPTIONS.addOption(OptionBuilder .withLongOpt(OPTION_HEALTH_PERCENT) .withDescription("Percentage of running containers after which LLAP application is considered healthy" + " (Default: 80)") .withArgName(OPTION_HEALTH_PERCENT) .hasArg() .create()); OPTIONS.addOption(OptionBuilder .withLongOpt(OPTION_HEALTH_TIME_WINDOW_SECS) .withDescription("Time window in seconds (after initial delay) for which LLAP application is allowed to be " + "in unhealthy state before being killed (Default: 300)") .withArgName(OPTION_HEALTH_TIME_WINDOW_SECS) .hasArg() .create()); OPTIONS.addOption(OptionBuilder .withLongOpt(OPTION_HEALTH_INIT_DELAY_SECS) .withDescription("Delay in seconds after which health percentage is monitored (Default: 400)") .withArgName(OPTION_HEALTH_INIT_DELAY_SECS) .hasArg() .create()); OPTIONS.addOption(OptionBuilder .withLongOpt(OPTION_SERVICE_AM_CONTAINER_MB) .withDescription("The size of the service AppMaster container in MB") .withArgName("b") .hasArg() .create('b')); OPTIONS.addOption(OptionBuilder .withLongOpt(OPTION_SERVICE_APPCONFIG_GLOBAL) .withDescription("Property (key=value) to be set in the global section of the Service appConfig") .withArgName("property=value") .hasArgs(2) .withValueSeparator() .create()); } private String[] args; private String directory; private String name; private int executors; private int ioThreads; private long cache; private long size; private long xmx; private String jars; private boolean isHbase; private Properties conf = new Properties(); private String javaPath = null; private String llapQueueName; private String logger = null; private boolean isStarting; private String output; private boolean isHiveAux; private boolean isHelp; static LlapServiceCommandLine parseArguments(String[] args) { LlapServiceCommandLine cl = null; try { cl = new LlapServiceCommandLine(args); } catch (Exception e) { LOG.error("Parsing the command line arguments failed", e); printUsage(); System.exit(1); } if (cl.isHelp) { printUsage(); System.exit(0); } return cl; } LlapServiceCommandLine(String[] args) throws ParseException { LOG.info("LLAP invoked with arguments = {}", Arrays.toString(args)); this.args = args; parseCommandLine(args); } private void parseCommandLine(String[] args) throws ParseException { CommandLine cl = new GnuParser().parse(OPTIONS, args); if (cl.hasOption(HELP.getOpt())) { isHelp = true; return; } if (!cl.hasOption(OPTION_INSTANCES)) { printUsage(); throw new ParseException("instance must be set"); } int instances = Integer.parseInt(cl.getOptionValue(OPTION_INSTANCES)); if (instances <= 0) { throw new ParseException("Invalid configuration: " + instances + " (should be greater than 0)"); } directory = cl.getOptionValue(DIRECTORY.getOpt()); name = cl.getOptionValue(NAME.getOpt()); executors = Integer.parseInt(cl.getOptionValue(EXECUTORS.getOpt(), "-1")); ioThreads = Integer.parseInt(cl.getOptionValue(IO_THREADS.getOpt(), Integer.toString(executors))); cache = TraditionalBinaryPrefix.string2long(cl.getOptionValue(CACHE.getOpt(), "-1")); size = TraditionalBinaryPrefix.string2long(cl.getOptionValue(SIZE.getOpt(), "-1")); xmx = TraditionalBinaryPrefix.string2long(cl.getOptionValue(XMX.getOpt(), "-1")); jars = cl.getOptionValue(AUXJARS.getOpt()); isHbase = Boolean.parseBoolean(cl.getOptionValue(AUXHBASE.getOpt(), "true")); if (cl.hasOption(HIVECONF.getLongOpt())) { conf = cl.getOptionProperties(HIVECONF.getLongOpt()); } if (cl.hasOption(JAVAHOME.getLongOpt())) { javaPath = cl.getOptionValue(JAVAHOME.getLongOpt()); } llapQueueName = cl.getOptionValue(QUEUE.getOpt(), ConfVars.LLAP_DAEMON_QUEUE_NAME.getDefaultValue()); if (cl.hasOption(LOGGER.getLongOpt())) { logger = cl.getOptionValue(LOGGER.getLongOpt()); Preconditions.checkArgument(VALID_LOGGERS.contains(logger.toLowerCase())); } isStarting = cl.hasOption(START.getOpt()); output = cl.getOptionValue(OUTPUT.getLongOpt()); isHiveAux = Boolean.parseBoolean(cl.getOptionValue(AUXHIVE.getOpt(), "true")); } private static void printUsage() { HelpFormatter hf = new HelpFormatter(); try { int width = hf.getWidth(); int jlineWidth = TerminalFactory.get().getWidth(); width = Math.min(160, Math.max(jlineWidth, width)); hf.setWidth(width); } catch (Throwable t) { // Ignore } hf.printHelp("llap", OPTIONS); } String[] getArgs() { return args; } String getDirectory() { return directory; } String getName() { return name; } int getExecutors() { return executors; } int getIoThreads() { return ioThreads; } long getCache() { return cache; } long getSize() { return size; } long getXmx() { return xmx; } String getAuxJars() { return jars; } boolean getIsHBase() { return isHbase; } boolean getIsHiveAux() { return isHiveAux; } Properties getConfig() { return conf; } String getJavaPath() { return javaPath; } String getLlapQueueName() { return llapQueueName; } String getLogger() { return logger; } boolean isStarting() { return isStarting; } String getOutput() { return output; } }
apache-2.0
linqingyicen/jdonframework
example/cqrs+dci/robot/src/main/java/sample/repository/RobotRepository.java
242
package sample.repository; import java.util.HashMap; import sample.domain.Robot; public interface RobotRepository { Robot find(String id); void save(Robot robot); // for test public HashMap<String, Robot> getMemDB(); }
apache-2.0
prasi-in/geode
geode-core/src/test/java/org/apache/geode/cache/query/internal/index/MultiIndexCreationDUnitTest.java
7057
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.cache.query.internal.index; import org.junit.experimental.categories.Category; import org.junit.Test; import static org.junit.Assert.*; import org.apache.geode.test.dunit.cache.internal.JUnit4CacheTestCase; import org.apache.geode.test.dunit.internal.JUnit4DistributedTestCase; import org.apache.geode.test.junit.categories.DistributedTest; import java.util.Collection; import java.util.List; import org.apache.geode.cache.Region; import org.apache.geode.cache.RegionShortcut; import org.apache.geode.cache.query.Index; import org.apache.geode.cache.query.QueryService; import org.apache.geode.cache.query.SelectResults; import org.apache.geode.cache.query.data.Portfolio; import org.apache.geode.cache.query.internal.QueryObserver; import org.apache.geode.cache.query.internal.QueryObserverAdapter; import org.apache.geode.cache.query.internal.QueryObserverHolder; import org.apache.geode.cache.query.internal.index.IndexManager.TestHook; import org.apache.geode.cache30.CacheTestCase; import org.apache.geode.test.dunit.AsyncInvocation; import org.apache.geode.test.dunit.Host; import org.apache.geode.test.dunit.Invoke; import org.apache.geode.test.dunit.LogWriterUtils; import org.apache.geode.test.dunit.SerializableCallable; import org.apache.geode.test.dunit.ThreadUtils; import org.apache.geode.test.dunit.VM; import org.apache.geode.test.dunit.Wait; @Category(DistributedTest.class) public class MultiIndexCreationDUnitTest extends JUnit4CacheTestCase { private final String regionName = "MultiIndexCreationDUnitTest"; public static volatile boolean hooked = false; public MultiIndexCreationDUnitTest() { super(); } @Test public void testConcurrentMultiIndexCreationAndQuery() throws Exception { final Host host = Host.getHost(0); final VM server1 = host.getVM(1); final int numberOfEntries = 10; final String name = "/" + regionName; // Start server1 AsyncInvocation a1 = server1.invokeAsync(new SerializableCallable("Create Server1") { @Override public Object call() throws Exception { Region r = getCache().createRegionFactory(RegionShortcut.REPLICATE).create(regionName); for (int i = 0; i < numberOfEntries; i++) { Portfolio p = new Portfolio(i); r.put("key-" + i, p); } IndexManager.testHook = new MultiIndexCreationTestHook(); QueryService qs = getCache().getQueryService(); qs.defineIndex("statusIndex", "status", r.getFullPath()); qs.defineIndex("IDIndex", "ID", r.getFullPath()); List<Index> indexes = qs.createDefinedIndexes(); assertEquals("Only 2 indexes should have been created. ", 2, indexes.size()); return null; } }); final String[] queries = {"select * from " + name + " where status = 'active'", "select * from " + name + " where ID > 4"}; AsyncInvocation a2 = server1.invokeAsync(new SerializableCallable("Create Server1") { @Override public Object call() throws Exception { long giveupTime = System.currentTimeMillis() + 60000; while (!hooked && System.currentTimeMillis() < giveupTime) { LogWriterUtils.getLogWriter().info("Query Waiting for index hook."); Wait.pause(100); } assertTrue(hooked); QueryObserver old = QueryObserverHolder.setInstance(new QueryObserverAdapter() { private boolean indexCalled = false; public void afterIndexLookup(Collection results) { indexCalled = true; } public void endQuery() { assertFalse("Index should not have been used. ", indexCalled); } }); SelectResults sr = null; for (int i = 0; i < queries.length; i++) { try { sr = (SelectResults) getCache().getQueryService().newQuery(queries[i]).execute(); } catch (Exception e) { fail("QueryExecution failed, " + e); } assertEquals(5, sr.size()); } QueryObserverHolder.setInstance(old); hooked = false; return null; } }); ThreadUtils.join(a1, 120000); if (a1.exceptionOccurred()) { fail(a1.getException().getMessage()); } ThreadUtils.join(a2, 120000); if (a2.exceptionOccurred()) { fail(a2.getException().getMessage()); } server1.invoke(new SerializableCallable("Create Server1") { @Override public Object call() throws Exception { IndexManager.testHook = null; QueryObserver old = QueryObserverHolder.setInstance(new QueryObserverAdapter() { private boolean indexCalled = false; public void afterIndexLookup(Collection results) { indexCalled = true; } public void endQuery() { assertTrue("Index should have been used. ", indexCalled); } }); SelectResults sr = null; for (int i = 0; i < queries.length; i++) { try { sr = (SelectResults) getCache().getQueryService().newQuery(queries[i]).execute(); } catch (Exception e) { fail("QueryExecution failed, " + e); } assertEquals(5, sr.size()); } QueryObserverHolder.setInstance(old); return null; } }); } @Override public final void preTearDownCacheTestCase() throws Exception { hooked = false; Invoke.invokeInEveryVM(() -> disconnectFromDS()); } @Override public final void postTearDownCacheTestCase() throws Exception { Invoke.invokeInEveryVM(() -> QueryObserverHolder.reset()); } private static class MultiIndexCreationTestHook implements TestHook { @Override public void hook(int spot) throws RuntimeException { long giveupTime = System.currentTimeMillis() + 60000; if (spot == 13) { hooked = true; LogWriterUtils.getLogWriter() .info("MultiIndexCreationTestHook is hooked in create defined indexes."); while (hooked && System.currentTimeMillis() < giveupTime) { LogWriterUtils.getLogWriter().info("MultiIndexCreationTestHook waiting."); Wait.pause(100); } assertEquals(hooked, false); } } } }
apache-2.0
mxm/incubator-beam
runners/google-cloud-dataflow-java/worker/src/main/java/org/apache/beam/runners/dataflow/worker/util/common/worker/WorkProgressUpdater.java
14378
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.dataflow.worker.util.common.worker; import com.google.api.client.util.Clock; import com.google.common.annotations.VisibleForTesting; import com.google.common.util.concurrent.ThreadFactoryBuilder; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import javax.annotation.concurrent.GuardedBy; import javax.annotation.concurrent.NotThreadSafe; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * WorkProgressUpdater allows a work executor to send work progress updates to the worker service. * The life-cycle of the WorkProgressUpdater is controlled externally through its {@link * #startReportingProgress()} and {@link #stopReportingProgress()} methods. The updater queries the * worker for progress updates and sends the updates to the worker service. The interval between two * consecutive updates is controlled by the worker service through reporting interval hints sent * back in the update response messages. To avoid update storms and monitoring staleness, the * interval between two consecutive updates is also bound by {@link #getMinReportingInterval} and * {@link #getMaxReportingInterval}. */ @NotThreadSafe public abstract class WorkProgressUpdater { private static final Logger LOG = LoggerFactory.getLogger(WorkProgressUpdater.class); /** The default lease duration to request from the external worker service (3 minutes). */ public static final long DEFAULT_LEASE_DURATION_MILLIS = 3 * 60 * 1000; /** The lease renewal RPC latency margin (5 seconds). */ private static final long DEFAULT_LEASE_RENEWAL_LATENCY_MARGIN = 5000; /** * The minimum period between two consecutive progress updates. Ensures the {@link * WorkProgressUpdater} does not generate update storms (5 seconds). */ private static final long DEFAULT_MIN_REPORTING_INTERVAL_MILLIS = 5000; /** * The maximum period between two consecutive progress updates. Ensures the {@link * WorkProgressUpdater} does not cause monitoring staleness (10 minutes). */ private static final long DEFAULT_MAX_REPORTING_INTERVAL_MILLIS = 10 * 60 * 1000; /** * Worker providing the work progress updates. This is a volatile variable because the worker * thread sets it while the progress updater thread reads it. */ protected volatile WorkExecutor worker = null; /** Requested periodic checkpoint period. */ private final int checkpointPeriodSec; /** * The time when the next periodic checkpoint should occur. In the same units as {@code * Clock.currentTimeMillis()}. */ private long nextPeriodicCheckpointTimeMs; /** Executor used to schedule work progress updates. */ private final ScheduledExecutorService executor; /** Clock used to either provide real system time or mocked to virtualize time for testing. */ private final Clock clock; /** The lease duration to request from the external worker service. */ protected long requestedLeaseDurationMs; /** The time period until the next work progress update. */ protected long progressReportIntervalMs; /** The state of worker checkpointing. */ protected enum CheckpointState { /** No checkpoint has yet been requested. */ CHECKPOINT_NOT_REQUESTED, /** A checkpoint has been requested but not yet done successfully. */ CHECKPOINT_REQUESTED, /** A successful checkpoint has been done. */ CHECKPOINT_SUCCESSFUL } @GuardedBy("executor") protected CheckpointState checkpointState = CheckpointState.CHECKPOINT_NOT_REQUESTED; /** * The {@link NativeReader.DynamicSplitResult} to report to the service in the next progress * update, or {@code null} if there is nothing to report (if no dynamic split happened since the * last progress update). */ protected NativeReader.DynamicSplitResult dynamicSplitResultToReport; /** * @param checkpointPeriodSec the desired amount of time in seconds between periodic checkpoints; * if no periodic checkpoints are desired then pass {@link Integer#MAX_VALUE} */ public WorkProgressUpdater(WorkExecutor worker, int checkpointPeriodSec) { this( worker, checkpointPeriodSec, Executors.newSingleThreadScheduledExecutor( new ThreadFactoryBuilder() .setDaemon(true) .setNameFormat("WorkProgressUpdater-%d") .build()), Clock.SYSTEM); } /** * @param checkpointPeriodSec the desired amount of time in seconds between periodic checkpoints; * if no periodic checkpoints are desired then pass {@link Integer#MAX_VALUE} * @param executor the desired executor, can be used to inject a executor for testing * @param clock the desired clock, can be used to inject a mock clock for testing */ @VisibleForTesting protected WorkProgressUpdater( WorkExecutor worker, int checkpointPeriodSec, ScheduledExecutorService executor, Clock clock) { this.worker = worker; this.checkpointPeriodSec = checkpointPeriodSec; this.executor = executor; this.clock = clock; } /** @param worker workexecutor for the updater. */ public void setWorker(WorkExecutor worker) { this.worker = worker; } /** Starts sending work progress updates to the worker service. */ public void startReportingProgress() { // The initial work progress report is sent according to hints from the service if any. // Otherwise the default is half-way through the lease. long leaseRemainingTime = leaseRemainingTime(getWorkUnitLeaseExpirationTimestamp()); progressReportIntervalMs = nextProgressReportInterval(getWorkUnitSuggestedReportingInterval(), leaseRemainingTime); requestedLeaseDurationMs = DEFAULT_LEASE_DURATION_MILLIS; nextPeriodicCheckpointTimeMs = clock.currentTimeMillis() + ((long) checkpointPeriodSec) * 1000; LOG.debug("Started reporting progress for work item: {}", workString()); scheduleNextUpdate(); } /** Requests that a checkpoint be done. */ public void requestCheckpoint() { synchronized (executor) { LOG.debug("Asynchronous checkpoint for work item {}.", workString()); if (checkpointState == CheckpointState.CHECKPOINT_NOT_REQUESTED) { checkpointState = CheckpointState.CHECKPOINT_REQUESTED; } if (tryCheckpointIfNeeded()) { reportProgress(); } } } /** * Stops sending work progress updates to the worker service. It may throw an exception if the * final progress report fails to be sent for some reason. */ public void stopReportingProgress() throws Exception { // Wait until there are no more progress updates in progress, then shut down. synchronized (executor) { executor.shutdownNow(); // We send a final progress report in case there was an unreported dynamic split. if (dynamicSplitResultToReport != null) { LOG.debug( "Sending final progress update with unreported split: {} " + "for work item: {}", dynamicSplitResultToReport, workString()); reportProgressHelper(); // This call can fail with an exception } } LOG.debug("Stopped reporting progress for work item: {}", workString()); } /** * Computes the time before sending the next work progress update making sure that it falls * between the [{@link #getMinReportingInterval}, {@link #getMaxReportingInterval}] interval. * Makes an attempt to bound the result by the remaining lease time, with an RPC latency margin of * {@link #getLeaseRenewalLatencyMargin}. * * @param suggestedInterval the suggested progress report interval * @param leaseRemainingTime milliseconds left before the work lease expires * @return the time in milliseconds before sending the next progress update */ protected final long nextProgressReportInterval(long suggestedInterval, long leaseRemainingTime) { // Try to send the next progress update before the next lease expiration // allowing some RPC latency margin. suggestedInterval = Math.min(suggestedInterval, leaseRemainingTime - getLeaseRenewalLatencyMargin()); // Bound reporting interval to avoid staleness and progress update storms. return Math.min( Math.max(getMinReportingInterval(), suggestedInterval), getMaxReportingInterval()); } /** Schedules the next work progress update or periodic checkpoint. */ @SuppressWarnings("FutureReturnValueIgnored") private void scheduleNextUpdate() { if (executor.isShutdown()) { return; } long delay = Math.min( progressReportIntervalMs, nextPeriodicCheckpointTimeMs - clock.currentTimeMillis()); executor.schedule( new Runnable() { @Override public void run() { doNextUpdate(); } }, delay, TimeUnit.MILLISECONDS); LOG.debug( "Next work progress update for work item {} scheduled to occur in {} ms.", workString(), progressReportIntervalMs); } /** Does the next work progress update or periodic checkpoint. */ private void doNextUpdate() { // Don't shut down while reporting progress. synchronized (executor) { if (executor.isShutdown()) { return; } try { checkForPeriodicCheckpoint(); tryCheckpointIfNeeded(); reportProgress(); } finally { scheduleNextUpdate(); } } } /** If it is time for a periodic checkpoint then requests it. */ @GuardedBy("executor") private void checkForPeriodicCheckpoint() { if (clock.currentTimeMillis() >= nextPeriodicCheckpointTimeMs) { LOG.debug("Periodic checkpoint for work item {}.", workString()); if (checkpointState == CheckpointState.CHECKPOINT_NOT_REQUESTED) { checkpointState = CheckpointState.CHECKPOINT_REQUESTED; } nextPeriodicCheckpointTimeMs = Long.MAX_VALUE; } } /** * If a checkpoint has been requested but not yet done, tries to do it. Returns whether a * successful checkpoint was done. */ @GuardedBy("executor") protected boolean tryCheckpointIfNeeded() { if (checkpointState == CheckpointState.CHECKPOINT_REQUESTED && worker != null) { LOG.debug("Trying to checkpoint for work item {}.", workString()); try { NativeReader.DynamicSplitResult checkpointPos = worker.requestCheckpoint(); if (checkpointPos != null) { LOG.debug("Successful checkpoint for work item {} at {}.", workString(), checkpointPos); dynamicSplitResultToReport = checkpointPos; checkpointState = CheckpointState.CHECKPOINT_SUCCESSFUL; return true; } } catch (Throwable e) { LOG.warn("Error trying to checkpoint the worker: ", e); } } return false; } /** Reports the current work progress to the worker service. */ @GuardedBy("executor") private void reportProgress() { LOG.debug("Updating progress on work item {}", workString()); try { reportProgressHelper(); } catch (InterruptedException e) { LOG.info("Cancelling workitem execution: {}", workString(), e); worker.abort(); } catch (Throwable e) { LOG.warn("Error reporting workitem progress update to Dataflow service: ", e); } } /** * Computes the amount of time left, in milliseconds, before a lease with the specified expiration * timestamp expires. Returns zero if the lease has already expired. */ protected long leaseRemainingTime(long leaseExpirationTimestamp) { long now = clock.currentTimeMillis(); if (leaseExpirationTimestamp < now) { LOG.debug("Lease remaining time for {} is 0 ms.", workString()); return 0; } LOG.debug( "Lease remaining time for {} is {} ms.", workString(), leaseExpirationTimestamp - now); return leaseExpirationTimestamp - now; } @VisibleForTesting public NativeReader.DynamicSplitResult getDynamicSplitResultToReport() { return dynamicSplitResultToReport; } /** * Reports the current work progress to the worker service. Holds lock on executor during call so * that checkpointState can be accessed. * * @throws an InterruptedException to indicate that the WorkItem has been aborted. */ @GuardedBy("executor") protected abstract void reportProgressHelper() throws Exception; /** Returns the current work item's lease expiration timestamp. */ protected abstract long getWorkUnitLeaseExpirationTimestamp(); /** Returns the current work item's suggested progress reporting interval. */ protected long getWorkUnitSuggestedReportingInterval() { return leaseRemainingTime(getWorkUnitLeaseExpirationTimestamp()) / 2; } /** Returns the minimum allowed time between two periodic progress updates. */ protected long getMinReportingInterval() { return DEFAULT_MIN_REPORTING_INTERVAL_MILLIS; } /** Returns the maximum allowed time between two periodic progress updates. */ protected long getMaxReportingInterval() { return DEFAULT_MAX_REPORTING_INTERVAL_MILLIS; } /** * Returns the maximum allowed time between a periodic progress update and the moment the current * lease expires. */ protected long getLeaseRenewalLatencyMargin() { return DEFAULT_LEASE_RENEWAL_LATENCY_MARGIN; } /** * Returns a string representation of the work item whose progress is being updated, for use in * logging messages. */ protected abstract String workString(); }
apache-2.0
smmribeiro/intellij-community
plugins/groovy/groovy-psi/src/org/jetbrains/plugins/groovy/dsl/GdslUtil.java
804
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.plugins.groovy.dsl; import com.intellij.openapi.fileTypes.FileTypeRegistry; import com.intellij.openapi.util.Condition; import com.intellij.openapi.util.Key; import com.intellij.openapi.vfs.VirtualFile; import org.jetbrains.plugins.groovy.GdslFileType; public final class GdslUtil { public static final Key<GroovyClassDescriptor> INITIAL_CONTEXT = Key.create("gdsl.initialContext"); public static final Condition<VirtualFile> GDSL_FILTER = file -> FileTypeRegistry.getInstance().isFileOfType(file, GdslFileType.INSTANCE); static volatile boolean ourGdslStopped = false; static void stopGdsl() { ourGdslStopped = true; } }
apache-2.0
kishorvpatil/incubator-storm
external/storm-hdfs/src/test/java/org/apache/storm/hdfs/spout/TestHdfsSpout.java
31487
/** * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version * 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package org.apache.storm.hdfs.spout; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.lang.reflect.Field; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.util.ReflectionUtils; import org.apache.storm.Config; import org.apache.storm.hdfs.common.HdfsUtils; import org.apache.storm.hdfs.common.HdfsUtils.Pair; import org.apache.storm.hdfs.testing.MiniDFSClusterRule; import org.apache.storm.spout.SpoutOutputCollector; import org.apache.storm.task.TopologyContext; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.ClassRule; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; public class TestHdfsSpout { private static final Configuration conf = new Configuration(); @ClassRule public static MiniDFSClusterRule DFS_CLUSTER_RULE = new MiniDFSClusterRule(); private static DistributedFileSystem fs; @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); public File baseFolder; private Path source; private Path archive; private Path badfiles; @BeforeClass public static void setupClass() throws IOException { fs = DFS_CLUSTER_RULE.getDfscluster().getFileSystem(); } @AfterClass public static void teardownClass() throws IOException { fs.close(); } private static <T> T getField(HdfsSpout spout, String fieldName) throws NoSuchFieldException, IllegalAccessException { Field readerFld = HdfsSpout.class.getDeclaredField(fieldName); readerFld.setAccessible(true); return (T) readerFld.get(spout); } private static boolean getBoolField(HdfsSpout spout, String fieldName) throws NoSuchFieldException, IllegalAccessException { Field readerFld = HdfsSpout.class.getDeclaredField(fieldName); readerFld.setAccessible(true); return readerFld.getBoolean(spout); } private static List<String> readTextFile(FileSystem fs, String f) throws IOException { Path file = new Path(f); FSDataInputStream x = fs.open(file); BufferedReader reader = new BufferedReader(new InputStreamReader(x)); String line = null; ArrayList<String> result = new ArrayList<>(); while ((line = reader.readLine()) != null) { result.add(line); } return result; } private static void createSeqFile(FileSystem fs, Path file, int rowCount) throws IOException { Configuration conf = new Configuration(); try { if (fs.exists(file)) { fs.delete(file, false); } SequenceFile.Writer w = SequenceFile.createWriter(fs, conf, file, IntWritable.class, Text.class); for (int i = 0; i < rowCount; i++) { w.append(new IntWritable(i), new Text("line " + i)); } w.close(); System.out.println("done"); } catch (IOException e) { e.printStackTrace(); } } @Before public void setup() throws Exception { baseFolder = tempFolder.newFolder("hdfsspout"); source = new Path(baseFolder.toString() + "/source"); fs.mkdirs(source); archive = new Path(baseFolder.toString() + "/archive"); fs.mkdirs(archive); badfiles = new Path(baseFolder.toString() + "/bad"); fs.mkdirs(badfiles); } @After public void shutDown() throws IOException { fs.delete(new Path(baseFolder.toString()), true); } @Test public void testSimpleText_noACK() throws Exception { Path file1 = new Path(source.toString() + "/file1.txt"); createTextFile(file1, 5); Path file2 = new Path(source.toString() + "/file2.txt"); createTextFile(file2, 5); try (AutoCloseableHdfsSpout closeableSpout = makeSpout(Configs.TEXT, TextFileReader.defaultFields)) { HdfsSpout spout = closeableSpout.spout; spout.setCommitFrequencyCount(1); spout.setCommitFrequencySec(1); Map<String, Object> conf = getCommonConfigs(); openSpout(spout, 0, conf); runSpout(spout, "r11"); Path arc1 = new Path(archive.toString() + "/file1.txt"); Path arc2 = new Path(archive.toString() + "/file2.txt"); checkCollectorOutput_txt((MockCollector) spout.getCollector(), arc1, arc2); } } @Test public void testSimpleText_ACK() throws Exception { Path file1 = new Path(source.toString() + "/file1.txt"); createTextFile(file1, 5); Path file2 = new Path(source.toString() + "/file2.txt"); createTextFile(file2, 5); try (AutoCloseableHdfsSpout closeableSpout = makeSpout(Configs.TEXT, TextFileReader.defaultFields)) { HdfsSpout spout = closeableSpout.spout; spout.setCommitFrequencyCount(1); spout.setCommitFrequencySec(1); Map<String, Object> conf = getCommonConfigs(); conf.put(Config.TOPOLOGY_ACKER_EXECUTORS, "1"); // enable ACKing openSpout(spout, 0, conf); // consume file 1 runSpout(spout, "r6", "a0", "a1", "a2", "a3", "a4"); Path arc1 = new Path(archive.toString() + "/file1.txt"); checkCollectorOutput_txt((MockCollector) spout.getCollector(), arc1); // consume file 2 runSpout(spout, "r6", "a5", "a6", "a7", "a8", "a9"); Path arc2 = new Path(archive.toString() + "/file2.txt"); checkCollectorOutput_txt((MockCollector) spout.getCollector(), arc1, arc2); } } @Test public void testEmptySimpleText_ACK() throws Exception { Path file1 = new Path(source.toString() + "/file_empty.txt"); createTextFile(file1, 0); //Ensure the second file has a later modified timestamp, as the spout should pick the first file first. Thread.sleep(2); Path file2 = new Path(source.toString() + "/file.txt"); createTextFile(file2, 5); try (AutoCloseableHdfsSpout closeableSpout = makeSpout(Configs.TEXT, TextFileReader.defaultFields)) { HdfsSpout spout = closeableSpout.spout; spout.setCommitFrequencyCount(1); Map<String, Object> conf = getCommonConfigs(); conf.put(Config.TOPOLOGY_ACKER_EXECUTORS, "1"); // enable ACKing openSpout(spout, 0, conf); // Read once. Since the first file is empty, the spout should continue with file 2 runSpout(spout, "r6", "a0", "a1", "a2", "a3", "a4"); //File 1 should be moved to archive assertThat(fs.isFile(new Path(archive.toString() + "/file_empty.txt")), is(true)); //File 2 should be read Path arc2 = new Path(archive.toString() + "/file.txt"); checkCollectorOutput_txt((MockCollector) spout.getCollector(), arc2); } } @Test public void testResumeAbandoned_Text_NoAck() throws Exception { Path file1 = new Path(source.toString() + "/file1.txt"); createTextFile(file1, 6); final Integer lockExpirySec = 1; try (AutoCloseableHdfsSpout closeableSpout = makeSpout(Configs.TEXT, TextFileReader.defaultFields)) { HdfsSpout spout = closeableSpout.spout; spout.setCommitFrequencyCount(1); spout.setCommitFrequencySec(1000); // effectively disable commits based on time spout.setLockTimeoutSec(lockExpirySec); try (AutoCloseableHdfsSpout closeableSpout2 = makeSpout(Configs.TEXT, TextFileReader.defaultFields)) { HdfsSpout spout2 = closeableSpout2.spout; spout2.setCommitFrequencyCount(1); spout2.setCommitFrequencySec(1000); // effectively disable commits based on time spout2.setLockTimeoutSec(lockExpirySec); Map<String, Object> conf = getCommonConfigs(); openSpout(spout, 0, conf); openSpout(spout2, 1, conf); // consume file 1 partially List<String> res = runSpout(spout, "r2"); Assert.assertEquals(2, res.size()); // abandon file FileLock lock = getField(spout, "lock"); TestFileLock.closeUnderlyingLockFile(lock); Thread.sleep(lockExpirySec * 2 * 1000); // check lock file presence Assert.assertTrue(fs.exists(lock.getLockFile())); // create another spout to take over processing and read a few lines List<String> res2 = runSpout(spout2, "r3"); Assert.assertEquals(3, res2.size()); // check lock file presence Assert.assertTrue(fs.exists(lock.getLockFile())); // check lock file contents List<String> contents = readTextFile(fs, lock.getLockFile().toString()); Assert.assertFalse(contents.isEmpty()); // finish up reading the file res2 = runSpout(spout2, "r2"); Assert.assertEquals(4, res2.size()); // check lock file is gone Assert.assertFalse(fs.exists(lock.getLockFile())); FileReader rdr = getField(spout2, "reader"); Assert.assertNull(rdr); Assert.assertTrue(getBoolField(spout2, "fileReadCompletely")); } } } @Test public void testResumeAbandoned_Seq_NoAck() throws Exception { Path file1 = new Path(source.toString() + "/file1.seq"); createSeqFile(fs, file1, 6); final Integer lockExpirySec = 1; try (AutoCloseableHdfsSpout closeableSpout = makeSpout(Configs.SEQ, SequenceFileReader.defaultFields)) { HdfsSpout spout = closeableSpout.spout; spout.setCommitFrequencyCount(1); spout.setCommitFrequencySec(1000); // effectively disable commits based on time spout.setLockTimeoutSec(lockExpirySec); try (AutoCloseableHdfsSpout closeableSpout2 = makeSpout(Configs.SEQ, SequenceFileReader.defaultFields)) { HdfsSpout spout2 = closeableSpout2.spout; spout2.setCommitFrequencyCount(1); spout2.setCommitFrequencySec(1000); // effectively disable commits based on time spout2.setLockTimeoutSec(lockExpirySec); Map<String, Object> conf = getCommonConfigs(); openSpout(spout, 0, conf); openSpout(spout2, 1, conf); // consume file 1 partially List<String> res = runSpout(spout, "r2"); Assert.assertEquals(2, res.size()); // abandon file FileLock lock = getField(spout, "lock"); TestFileLock.closeUnderlyingLockFile(lock); Thread.sleep(lockExpirySec * 2 * 1000); // check lock file presence Assert.assertTrue(fs.exists(lock.getLockFile())); // create another spout to take over processing and read a few lines List<String> res2 = runSpout(spout2, "r3"); Assert.assertEquals(3, res2.size()); // check lock file presence Assert.assertTrue(fs.exists(lock.getLockFile())); // check lock file contents List<String> contents = getTextFileContents(fs, lock.getLockFile()); Assert.assertFalse(contents.isEmpty()); // finish up reading the file res2 = runSpout(spout2, "r3"); Assert.assertEquals(4, res2.size()); // check lock file is gone Assert.assertFalse(fs.exists(lock.getLockFile())); FileReader rdr = getField(spout2, "reader"); Assert.assertNull(rdr); Assert.assertTrue(getBoolField(spout2, "fileReadCompletely")); } } } private void checkCollectorOutput_txt(MockCollector collector, Path... txtFiles) throws IOException { ArrayList<String> expected = new ArrayList<>(); for (Path txtFile : txtFiles) { List<String> lines = getTextFileContents(fs, txtFile); expected.addAll(lines); } List<String> actual = new ArrayList<>(); for (Pair<HdfsSpout.MessageId, List<Object>> item : collector.items) { actual.add(item.getValue().get(0).toString()); } Assert.assertEquals(expected, actual); } private List<String> getTextFileContents(FileSystem fs, Path txtFile) throws IOException { ArrayList<String> result = new ArrayList<>(); FSDataInputStream istream = fs.open(txtFile); InputStreamReader isreader = new InputStreamReader(istream, "UTF-8"); BufferedReader reader = new BufferedReader(isreader); for (String line = reader.readLine(); line != null; line = reader.readLine()) { result.add(line); } isreader.close(); return result; } private void checkCollectorOutput_seq(MockCollector collector, Path... seqFiles) throws IOException { ArrayList<String> expected = new ArrayList<>(); for (Path seqFile : seqFiles) { List<String> lines = getSeqFileContents(fs, seqFile); expected.addAll(lines); } Assert.assertTrue(expected.equals(collector.lines)); } private List<String> getSeqFileContents(FileSystem fs, Path... seqFiles) throws IOException { ArrayList<String> result = new ArrayList<>(); for (Path seqFile : seqFiles) { Path file = new Path(fs.getUri().toString() + seqFile.toString()); SequenceFile.Reader reader = new SequenceFile.Reader(conf, SequenceFile.Reader.file(file)); try { Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf); Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf); while (reader.next(key, value)) { String keyValStr = Arrays.asList(key, value).toString(); result.add(keyValStr); } } finally { reader.close(); } }// for return result; } private List<String> listDir(Path p) throws IOException { ArrayList<String> result = new ArrayList<>(); RemoteIterator<LocatedFileStatus> fileNames = fs.listFiles(p, false); while (fileNames.hasNext()) { LocatedFileStatus fileStatus = fileNames.next(); result.add(Path.getPathWithoutSchemeAndAuthority(fileStatus.getPath()).toString()); } return result; } @Test public void testMultipleFileConsumption_Ack() throws Exception { Path file1 = new Path(source.toString() + "/file1.txt"); createTextFile(file1, 5); try (AutoCloseableHdfsSpout closeableSpout = makeSpout(Configs.TEXT, TextFileReader.defaultFields)) { HdfsSpout spout = closeableSpout.spout; spout.setCommitFrequencyCount(1); spout.setCommitFrequencySec(1); Map<String, Object> conf = getCommonConfigs(); conf.put(Config.TOPOLOGY_ACKER_EXECUTORS, "1"); // enable ACKing openSpout(spout, 0, conf); // read few lines from file1 dont ack runSpout(spout, "r3"); FileReader reader = getField(spout, "reader"); Assert.assertNotNull(reader); Assert.assertEquals(false, getBoolField(spout, "fileReadCompletely")); // read remaining lines runSpout(spout, "r3"); reader = getField(spout, "reader"); Assert.assertNotNull(reader); Assert.assertEquals(true, getBoolField(spout, "fileReadCompletely")); // ack few runSpout(spout, "a0", "a1", "a2"); reader = getField(spout, "reader"); Assert.assertNotNull(reader); Assert.assertEquals(true, getBoolField(spout, "fileReadCompletely")); //ack rest runSpout(spout, "a3", "a4"); reader = getField(spout, "reader"); Assert.assertNull(reader); Assert.assertEquals(true, getBoolField(spout, "fileReadCompletely")); // go to next file Path file2 = new Path(source.toString() + "/file2.txt"); createTextFile(file2, 5); // Read 1 line runSpout(spout, "r1"); Assert.assertNotNull(getField(spout, "reader")); Assert.assertEquals(false, getBoolField(spout, "fileReadCompletely")); // ack 1 tuple runSpout(spout, "a5"); Assert.assertNotNull(getField(spout, "reader")); Assert.assertEquals(false, getBoolField(spout, "fileReadCompletely")); // read and ack remaining lines runSpout(spout, "r5", "a6", "a7", "a8", "a9"); Assert.assertNull(getField(spout, "reader")); Assert.assertEquals(true, getBoolField(spout, "fileReadCompletely")); } } @Test public void testSimpleSequenceFile() throws Exception { //1) create a couple files to consume source = new Path("/tmp/hdfsspout/source"); fs.mkdirs(source); archive = new Path("/tmp/hdfsspout/archive"); fs.mkdirs(archive); Path file1 = new Path(source + "/file1.seq"); createSeqFile(fs, file1, 5); Path file2 = new Path(source + "/file2.seq"); createSeqFile(fs, file2, 5); try (AutoCloseableHdfsSpout closeableSpout = makeSpout(Configs.SEQ, SequenceFileReader.defaultFields)) { HdfsSpout spout = closeableSpout.spout; Map<String, Object> conf = getCommonConfigs(); openSpout(spout, 0, conf); // consume both files List<String> res = runSpout(spout, "r11"); Assert.assertEquals(10, res.size()); Assert.assertEquals(2, listDir(archive).size()); Path f1 = new Path(archive + "/file1.seq"); Path f2 = new Path(archive + "/file2.seq"); checkCollectorOutput_seq((MockCollector) spout.getCollector(), f1, f2); } } @Test public void testReadFailures() throws Exception { // 1) create couple of input files to read Path file1 = new Path(source.toString() + "/file1.txt"); Path file2 = new Path(source.toString() + "/file2.txt"); createTextFile(file1, 6); createTextFile(file2, 7); Assert.assertEquals(2, listDir(source).size()); // 2) run spout try ( AutoCloseableHdfsSpout closeableSpout = makeSpout(MockTextFailingReader.class.getName(), MockTextFailingReader.defaultFields)) { HdfsSpout spout = closeableSpout.spout; Map<String, Object> conf = getCommonConfigs(); openSpout(spout, 0, conf); List<String> res = runSpout(spout, "r11"); String[] expected = new String[]{ "[line 0]", "[line 1]", "[line 2]", "[line 0]", "[line 1]", "[line 2]" }; Assert.assertArrayEquals(expected, res.toArray()); // 3) make sure 6 lines (3 from each file) were read in all Assert.assertEquals(((MockCollector) spout.getCollector()).lines.size(), 6); ArrayList<Path> badFiles = HdfsUtils.listFilesByModificationTime(fs, badfiles, 0); Assert.assertEquals(badFiles.size(), 2); } } // check lock creation/deletion and contents @Test public void testLocking() throws Exception { Path file1 = new Path(source.toString() + "/file1.txt"); createTextFile(file1, 10); // 0) config spout to log progress in lock file for each tuple try (AutoCloseableHdfsSpout closeableSpout = makeSpout(Configs.TEXT, TextFileReader.defaultFields)) { HdfsSpout spout = closeableSpout.spout; spout.setCommitFrequencyCount(1); spout.setCommitFrequencySec(1000); // effectively disable commits based on time Map<String, Object> conf = getCommonConfigs(); openSpout(spout, 0, conf); // 1) read initial lines in file, then check if lock exists List<String> res = runSpout(spout, "r5"); Assert.assertEquals(5, res.size()); List<String> lockFiles = listDir(spout.getLockDirPath()); Assert.assertEquals(1, lockFiles.size()); // 2) check log file content line count == tuples emitted + 1 List<String> lines = readTextFile(fs, lockFiles.get(0)); Assert.assertEquals(lines.size(), res.size() + 1); // 3) read remaining lines in file, then ensure lock is gone runSpout(spout, "r6"); lockFiles = listDir(spout.getLockDirPath()); Assert.assertEquals(0, lockFiles.size()); // 4) --- Create another input file and reverify same behavior --- Path file2 = new Path(source.toString() + "/file2.txt"); createTextFile(file2, 10); // 5) read initial lines in file, then check if lock exists res = runSpout(spout, "r5"); Assert.assertEquals(15, res.size()); lockFiles = listDir(spout.getLockDirPath()); Assert.assertEquals(1, lockFiles.size()); // 6) check log file content line count == tuples emitted + 1 lines = readTextFile(fs, lockFiles.get(0)); Assert.assertEquals(6, lines.size()); // 7) read remaining lines in file, then ensure lock is gone runSpout(spout, "r6"); lockFiles = listDir(spout.getLockDirPath()); Assert.assertEquals(0, lockFiles.size()); } } @Test public void testLockLoggingFreqCount() throws Exception { Path file1 = new Path(source.toString() + "/file1.txt"); createTextFile(file1, 10); // 0) config spout to log progress in lock file for each tuple try (AutoCloseableHdfsSpout closeableSpout = makeSpout(Configs.TEXT, TextFileReader.defaultFields)) { HdfsSpout spout = closeableSpout.spout; spout.setCommitFrequencyCount(2); // 1 lock log entry every 2 tuples spout.setCommitFrequencySec(1000); // Effectively disable commits based on time Map<String, Object> conf = getCommonConfigs(); openSpout(spout, 0, conf); // 1) read 5 lines in file, runSpout(spout, "r5"); // 2) check log file contents String lockFile = listDir(spout.getLockDirPath()).get(0); List<String> lines = readTextFile(fs, lockFile); Assert.assertEquals(lines.size(), 3); // 3) read 6th line and see if another log entry was made runSpout(spout, "r1"); lines = readTextFile(fs, lockFile); Assert.assertEquals(lines.size(), 4); } } @Test public void testLockLoggingFreqSec() throws Exception { Path file1 = new Path(source.toString() + "/file1.txt"); createTextFile(file1, 10); // 0) config spout to log progress in lock file for each tuple try (AutoCloseableHdfsSpout closeableSpout = makeSpout(Configs.TEXT, TextFileReader.defaultFields)) { HdfsSpout spout = closeableSpout.spout; spout.setCommitFrequencyCount(0); // disable it spout.setCommitFrequencySec(2); // log every 2 sec Map<String, Object> conf = getCommonConfigs(); openSpout(spout, 0, conf); // 1) read 5 lines in file runSpout(spout, "r5"); // 2) check log file contents String lockFile = listDir(spout.getLockDirPath()).get(0); List<String> lines = readTextFile(fs, lockFile); Assert.assertEquals(lines.size(), 1); Thread.sleep(3000); // allow freq_sec to expire // 3) read another line and see if another log entry was made runSpout(spout, "r1"); lines = readTextFile(fs, lockFile); Assert.assertEquals(2, lines.size()); } } private Map<String, Object> getCommonConfigs() { Map<String, Object> topoConf = new HashMap<>(); topoConf.put(Config.TOPOLOGY_ACKER_EXECUTORS, "0"); return topoConf; } private AutoCloseableHdfsSpout makeSpout(String readerType, String[] outputFields) { HdfsSpout spout = new HdfsSpout().withOutputFields(outputFields) .setReaderType(readerType) .setHdfsUri(DFS_CLUSTER_RULE.getDfscluster().getURI().toString()) .setSourceDir(source.toString()) .setArchiveDir(archive.toString()) .setBadFilesDir(badfiles.toString()); return new AutoCloseableHdfsSpout(spout); } private void openSpout(HdfsSpout spout, int spoutId, Map<String, Object> topoConf) { MockCollector collector = new MockCollector(); spout.open(topoConf, new MockTopologyContext(spoutId, topoConf), collector); } /** * Execute a sequence of calls on HdfsSpout. * * @param cmds: set of commands to run, e.g. "r,r,r,r,a1,f2,...". The commands are: r[N] - receive() called N times aN - ack, item * number: N fN - fail, item number: N */ private List<String> runSpout(HdfsSpout spout, String... cmds) { MockCollector collector = (MockCollector) spout.getCollector(); for (String cmd : cmds) { if (cmd.startsWith("r")) { int count = 1; if (cmd.length() > 1) { count = Integer.parseInt(cmd.substring(1)); } for (int i = 0; i < count; ++i) { spout.nextTuple(); } } else if (cmd.startsWith("a")) { int n = Integer.parseInt(cmd.substring(1)); Pair<HdfsSpout.MessageId, List<Object>> item = collector.items.get(n); spout.ack(item.getKey()); } else if (cmd.startsWith("f")) { int n = Integer.parseInt(cmd.substring(1)); Pair<HdfsSpout.MessageId, List<Object>> item = collector.items.get(n); spout.fail(item.getKey()); } } return collector.lines; } private void createTextFile(Path file, int lineCount) throws IOException { FSDataOutputStream os = fs.create(file); for (int i = 0; i < lineCount; i++) { os.writeBytes("line " + i + System.lineSeparator()); } os.close(); } private static class AutoCloseableHdfsSpout implements AutoCloseable { private final HdfsSpout spout; public AutoCloseableHdfsSpout(HdfsSpout spout) { this.spout = spout; } @Override public void close() throws Exception { spout.close(); } } static class MockCollector extends SpoutOutputCollector { //comma separated offsets public ArrayList<String> lines; public ArrayList<Pair<HdfsSpout.MessageId, List<Object>>> items; public MockCollector() { super(null); lines = new ArrayList<>(); items = new ArrayList<>(); } @Override public List<Integer> emit(List<Object> tuple, Object messageId) { lines.add(tuple.toString()); items.add(HdfsUtils.Pair.of(messageId, tuple)); return null; } @Override public List<Integer> emit(String streamId, List<Object> tuple, Object messageId) { return emit(tuple, messageId); } @Override public void emitDirect(int arg0, String arg1, List<Object> arg2, Object arg3) { throw new UnsupportedOperationException("NOT Implemented"); } @Override public void reportError(Throwable arg0) { throw new UnsupportedOperationException("NOT Implemented"); } @Override public long getPendingCount() { return 0; } } // class MockCollector // Throws IOExceptions for 3rd & 4th call to next(), succeeds on 5th, thereafter // throws ParseException. Effectively produces 3 lines (1,2 & 3) from each file read static class MockTextFailingReader extends TextFileReader { public static final String[] defaultFields = { "line" }; int readAttempts = 0; public MockTextFailingReader(FileSystem fs, Path file, Map<String, Object> conf) throws IOException { super(fs, file, conf); } @Override public List<Object> next() throws IOException, ParseException { readAttempts++; if (readAttempts == 3 || readAttempts == 4) { throw new IOException("mock test exception"); } else if (readAttempts > 5) { throw new ParseException("mock test exception", null); } return super.next(); } } static class MockTopologyContext extends TopologyContext { private final int componentId; public MockTopologyContext(int componentId, Map<String, Object> topoConf) { super(null, topoConf, null, null, null, null, null, null, null, 0, 0, null, null, null, null, null, null, null); this.componentId = componentId; } @Override public String getThisComponentId() { return Integer.toString(componentId); } } }
apache-2.0
sflyphotobooks/crp-batik
sources/org/apache/batik/css/engine/value/URIValue.java
1476
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.css.engine.value; import org.w3c.dom.css.CSSPrimitiveValue; /** * This class represents uri values. * * @author <a href="mailto:stephane@hillion.org">Stephane Hillion</a> * @version $Id: URIValue.java 478283 2006-11-22 18:53:40Z dvholten $ */ public class URIValue extends StringValue { String cssText; /** * Creates a new StringValue. */ public URIValue(String cssText, String uri) { super(CSSPrimitiveValue.CSS_URI, uri); this.cssText = cssText; } /** * A string representation of the current value. */ public String getCssText() { return "url(" + cssText + ')'; } }
apache-2.0
robin13/elasticsearch
libs/x-content/src/main/java/org/elasticsearch/common/xcontent/support/AbstractXContentParser.java
17378
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.common.xcontent.support; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.RestApiVersion; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParseException; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.math.BigDecimal; import java.math.BigInteger; import java.nio.CharBuffer; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.function.Supplier; public abstract class AbstractXContentParser implements XContentParser { // Currently this is not a setting that can be changed and is a policy // that relates to how parsing of things like "boost" are done across // the whole of Elasticsearch (eg if String "1.0" is a valid float). // The idea behind keeping it as a constant is that we can track // references to this policy decision throughout the codebase and find // and change any code that needs to apply an alternative policy. public static final boolean DEFAULT_NUMBER_COERCE_POLICY = true; private static void checkCoerceString(boolean coerce, Class<? extends Number> clazz) { if (coerce == false) { //Need to throw type IllegalArgumentException as current catch logic in //NumberFieldMapper.parseCreateField relies on this for "malformed" value detection throw new IllegalArgumentException(clazz.getSimpleName() + " value passed as String"); } } private final NamedXContentRegistry xContentRegistry; private final DeprecationHandler deprecationHandler; private final RestApiVersion restApiVersion; public AbstractXContentParser(NamedXContentRegistry xContentRegistry, DeprecationHandler deprecationHandler, RestApiVersion restApiVersion) { this.xContentRegistry = xContentRegistry; this.deprecationHandler = deprecationHandler; this.restApiVersion = restApiVersion; } public AbstractXContentParser(NamedXContentRegistry xContentRegistry, DeprecationHandler deprecationHandler) { this(xContentRegistry, deprecationHandler, RestApiVersion.current()); } // The 3rd party parsers we rely on are known to silently truncate fractions: see // http://fasterxml.github.io/jackson-core/javadoc/2.3.0/com/fasterxml/jackson/core/JsonParser.html#getShortValue() // If this behaviour is flagged as undesirable and any truncation occurs // then this method is called to trigger the"malformed" handling logic void ensureNumberConversion(boolean coerce, long result, Class<? extends Number> clazz) throws IOException { if (coerce == false) { double fullVal = doDoubleValue(); if (result != fullVal) { // Need to throw type IllegalArgumentException as current catch // logic in NumberFieldMapper.parseCreateField relies on this // for "malformed" value detection throw new IllegalArgumentException(fullVal + " cannot be converted to " + clazz.getSimpleName() + " without data loss"); } } } @Override public boolean isBooleanValue() throws IOException { switch (currentToken()) { case VALUE_BOOLEAN: return true; case VALUE_STRING: return Booleans.isBoolean(textCharacters(), textOffset(), textLength()); default: return false; } } @Override public boolean booleanValue() throws IOException { Token token = currentToken(); if (token == Token.VALUE_STRING) { return Booleans.parseBoolean(textCharacters(), textOffset(), textLength(), false /* irrelevant */); } return doBooleanValue(); } protected abstract boolean doBooleanValue() throws IOException; @Override public short shortValue() throws IOException { return shortValue(DEFAULT_NUMBER_COERCE_POLICY); } @Override public short shortValue(boolean coerce) throws IOException { Token token = currentToken(); if (token == Token.VALUE_STRING) { checkCoerceString(coerce, Short.class); double doubleValue = Double.parseDouble(text()); if (doubleValue < Short.MIN_VALUE || doubleValue > Short.MAX_VALUE) { throw new IllegalArgumentException("Value [" + text() + "] is out of range for a short"); } return (short) doubleValue; } short result = doShortValue(); ensureNumberConversion(coerce, result, Short.class); return result; } protected abstract short doShortValue() throws IOException; @Override public int intValue() throws IOException { return intValue(DEFAULT_NUMBER_COERCE_POLICY); } @Override public int intValue(boolean coerce) throws IOException { Token token = currentToken(); if (token == Token.VALUE_STRING) { checkCoerceString(coerce, Integer.class); double doubleValue = Double.parseDouble(text()); if (doubleValue < Integer.MIN_VALUE || doubleValue > Integer.MAX_VALUE) { throw new IllegalArgumentException("Value [" + text() + "] is out of range for an integer"); } return (int) doubleValue; } int result = doIntValue(); ensureNumberConversion(coerce, result, Integer.class); return result; } protected abstract int doIntValue() throws IOException; private static BigInteger LONG_MAX_VALUE_AS_BIGINTEGER = BigInteger.valueOf(Long.MAX_VALUE); private static BigInteger LONG_MIN_VALUE_AS_BIGINTEGER = BigInteger.valueOf(Long.MIN_VALUE); // weak bounds on the BigDecimal representation to allow for coercion private static BigDecimal BIGDECIMAL_GREATER_THAN_LONG_MAX_VALUE = BigDecimal.valueOf(Long.MAX_VALUE).add(BigDecimal.ONE); private static BigDecimal BIGDECIMAL_LESS_THAN_LONG_MIN_VALUE = BigDecimal.valueOf(Long.MIN_VALUE).subtract(BigDecimal.ONE); /** Return the long that {@code stringValue} stores or throws an exception if the * stored value cannot be converted to a long that stores the exact same * value and {@code coerce} is false. */ private static long toLong(String stringValue, boolean coerce) { try { return Long.parseLong(stringValue); } catch (NumberFormatException e) { // we will try again with BigDecimal } final BigInteger bigIntegerValue; try { final BigDecimal bigDecimalValue = new BigDecimal(stringValue); if (bigDecimalValue.compareTo(BIGDECIMAL_GREATER_THAN_LONG_MAX_VALUE) >= 0 || bigDecimalValue.compareTo(BIGDECIMAL_LESS_THAN_LONG_MIN_VALUE) <= 0) { throw new IllegalArgumentException("Value [" + stringValue + "] is out of range for a long"); } bigIntegerValue = coerce ? bigDecimalValue.toBigInteger() : bigDecimalValue.toBigIntegerExact(); } catch (ArithmeticException e) { throw new IllegalArgumentException("Value [" + stringValue + "] has a decimal part"); } catch (NumberFormatException e) { throw new IllegalArgumentException("For input string: \"" + stringValue + "\""); } if (bigIntegerValue.compareTo(LONG_MAX_VALUE_AS_BIGINTEGER) > 0 || bigIntegerValue.compareTo(LONG_MIN_VALUE_AS_BIGINTEGER) < 0) { throw new IllegalArgumentException("Value [" + stringValue + "] is out of range for a long"); } assert bigIntegerValue.longValueExact() <= Long.MAX_VALUE; // asserting that no ArithmeticException is thrown return bigIntegerValue.longValue(); } @Override public long longValue() throws IOException { return longValue(DEFAULT_NUMBER_COERCE_POLICY); } @Override public long longValue(boolean coerce) throws IOException { Token token = currentToken(); if (token == Token.VALUE_STRING) { checkCoerceString(coerce, Long.class); return toLong(text(), coerce); } long result = doLongValue(); ensureNumberConversion(coerce, result, Long.class); return result; } protected abstract long doLongValue() throws IOException; @Override public float floatValue() throws IOException { return floatValue(DEFAULT_NUMBER_COERCE_POLICY); } @Override public float floatValue(boolean coerce) throws IOException { Token token = currentToken(); if (token == Token.VALUE_STRING) { checkCoerceString(coerce, Float.class); return Float.parseFloat(text()); } return doFloatValue(); } protected abstract float doFloatValue() throws IOException; @Override public double doubleValue() throws IOException { return doubleValue(DEFAULT_NUMBER_COERCE_POLICY); } @Override public double doubleValue(boolean coerce) throws IOException { Token token = currentToken(); if (token == Token.VALUE_STRING) { checkCoerceString(coerce, Double.class); return Double.parseDouble(text()); } return doDoubleValue(); } protected abstract double doDoubleValue() throws IOException; @Override public final String textOrNull() throws IOException { if (currentToken() == Token.VALUE_NULL) { return null; } return text(); } @Override public CharBuffer charBufferOrNull() throws IOException { if (currentToken() == Token.VALUE_NULL) { return null; } return charBuffer(); } @Override public Map<String, Object> map() throws IOException { return readMapSafe(this, SIMPLE_MAP_FACTORY); } @Override public Map<String, Object> mapOrdered() throws IOException { return readMapSafe(this, ORDERED_MAP_FACTORY); } @Override public Map<String, String> mapStrings() throws IOException { return map(HashMap::new, XContentParser::text); } @Override public <T> Map<String, T> map( Supplier<Map<String, T>> mapFactory, CheckedFunction<XContentParser, T, IOException> mapValueParser) throws IOException { final Map<String, T> map = mapFactory.get(); if (findNonEmptyMapStart(this) == false) { return map; } assert currentToken() == Token.FIELD_NAME : "Expected field name but saw [" + currentToken() + "]"; do { // Must point to field name String fieldName = currentName(); // And then the value... nextToken(); T value = mapValueParser.apply(this); map.put(fieldName, value); } while (nextToken() == XContentParser.Token.FIELD_NAME); return map; } @Override public List<Object> list() throws IOException { skipToListStart(this); return readListUnsafe(this, SIMPLE_MAP_FACTORY); } @Override public List<Object> listOrderedMap() throws IOException { skipToListStart(this); return readListUnsafe(this, ORDERED_MAP_FACTORY); } private static final Supplier<Map<String, Object>> SIMPLE_MAP_FACTORY = HashMap::new; private static final Supplier<Map<String, Object>> ORDERED_MAP_FACTORY = LinkedHashMap::new; private static Map<String, Object> readMapSafe(XContentParser parser, Supplier<Map<String, Object>> mapFactory) throws IOException { final Map<String, Object> map = mapFactory.get(); return findNonEmptyMapStart(parser) ? readMapEntries(parser, mapFactory, map) : map; } // Read a map without bounds checks from a parser that is assumed to be at the map's first field's name token private static Map<String, Object> readMapEntries(XContentParser parser, Supplier<Map<String, Object>> mapFactory, Map<String, Object> map) throws IOException { assert parser.currentToken() == Token.FIELD_NAME : "Expected field name but saw [" + parser.currentToken() + "]"; do { // Must point to field name String fieldName = parser.currentName(); // And then the value... Object value = readValueUnsafe(parser.nextToken(), parser, mapFactory); map.put(fieldName, value); } while (parser.nextToken() == Token.FIELD_NAME); return map; } /** * Checks if the next current token in the supplied parser is a map start for a non-empty map. * Skips to the next token if the parser does not yet have a current token (i.e. {@link #currentToken()} returns {@code null}) and then * checks it. * * @return true if a map start for a non-empty map is found */ private static boolean findNonEmptyMapStart(XContentParser parser) throws IOException { Token token = parser.currentToken(); if (token == null) { token = parser.nextToken(); } if (token == XContentParser.Token.START_OBJECT) { token = parser.nextToken(); } return token == Token.FIELD_NAME; } // Skips the current parser to the next array start. Assumes that the parser is either positioned before an array field's name token or // on the start array token. private static void skipToListStart(XContentParser parser) throws IOException { Token token = parser.currentToken(); if (token == null) { token = parser.nextToken(); } if (token == XContentParser.Token.FIELD_NAME) { token = parser.nextToken(); } if (token != XContentParser.Token.START_ARRAY) { throw new XContentParseException(parser.getTokenLocation(), "Failed to parse list: expecting " + XContentParser.Token.START_ARRAY + " but got " + token); } } // read a list without bounds checks, assuming the the current parser is always on an array start private static List<Object> readListUnsafe(XContentParser parser, Supplier<Map<String, Object>> mapFactory) throws IOException { assert parser.currentToken() == Token.START_ARRAY; ArrayList<Object> list = new ArrayList<>(); for (Token token = parser.nextToken(); token != null && token != XContentParser.Token.END_ARRAY; token = parser.nextToken()) { list.add(readValueUnsafe(token, parser, mapFactory)); } return list; } public static Object readValue(XContentParser parser, Supplier<Map<String, Object>> mapFactory) throws IOException { return readValueUnsafe(parser.currentToken(), parser, mapFactory); } /** * Reads next value from the parser that is assumed to be at the given current token without any additional checks. * * @param currentToken current token that the parser is at * @param parser parser to read from * @param mapFactory map factory to use for reading objects */ private static Object readValueUnsafe(Token currentToken, XContentParser parser, Supplier<Map<String, Object>> mapFactory) throws IOException { assert currentToken == parser.currentToken() : "Supplied current token [" + currentToken + "] is different from actual parser current token [" + parser.currentToken() + "]"; switch (currentToken) { case VALUE_STRING: return parser.text(); case VALUE_NUMBER: return parser.numberValue(); case VALUE_BOOLEAN: return parser.booleanValue(); case START_OBJECT: { final Map<String, Object> map = mapFactory.get(); return parser.nextToken() != Token.FIELD_NAME ? map : readMapEntries(parser, mapFactory, map); } case START_ARRAY: return readListUnsafe(parser, mapFactory); case VALUE_EMBEDDED_OBJECT: return parser.binaryValue(); case VALUE_NULL: default: return null; } } @Override public <T> T namedObject(Class<T> categoryClass, String name, Object context) throws IOException { return xContentRegistry.parseNamedObject(categoryClass, name, this, context); } @Override public NamedXContentRegistry getXContentRegistry() { return xContentRegistry; } @Override public abstract boolean isClosed(); @Override public RestApiVersion getRestApiVersion() { return restApiVersion; } @Override public DeprecationHandler getDeprecationHandler() { return deprecationHandler; } }
apache-2.0
zhouluoyang/openfire
src/plugins/websocket/src/java/org/jivesoftware/openfire/websocket/XMPPPPacketReaderFactory.java
2313
/** * Copyright (C) 2015 Tom Evans. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.openfire.websocket; import org.apache.commons.pool2.BasePooledObjectFactory; import org.apache.commons.pool2.PooledObject; import org.apache.commons.pool2.impl.DefaultPooledObject; import org.dom4j.io.XMPPPacketReader; import org.jivesoftware.openfire.net.MXParser; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xmlpull.v1.XmlPullParserException; import org.xmlpull.v1.XmlPullParserFactory; public class XMPPPPacketReaderFactory extends BasePooledObjectFactory<XMPPPacketReader> { private static Logger Log = LoggerFactory.getLogger( XMPPPPacketReaderFactory.class ); private static XmlPullParserFactory xppFactory = null; static { try { xppFactory = XmlPullParserFactory.newInstance(MXParser.class.getName(), null); xppFactory.setNamespaceAware(true); } catch (XmlPullParserException e) { Log.error("Error creating a parser factory", e); } } //-- BasePooledObjectFactory implementation @Override public XMPPPacketReader create() throws Exception { XMPPPacketReader parser = new XMPPPacketReader(); parser.setXPPFactory( xppFactory ); return parser; } @Override public PooledObject<XMPPPacketReader> wrap(XMPPPacketReader reader) { return new DefaultPooledObject<XMPPPacketReader>(reader); } @Override public boolean validateObject(PooledObject<XMPPPacketReader> po) { // reset the input for the pooled parser try { po.getObject().getXPPParser().resetInput(); return true; } catch (XmlPullParserException xppe) { Log.error("Failed to reset pooled parser; evicting from pool", xppe); return false; } } }
apache-2.0
VibyJocke/gocd
config/config-api/src/com/thoughtworks/go/util/ConfigUtil.java
4246
/*************************GO-LICENSE-START********************************* * Copyright 2014 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *************************GO-LICENSE-END***********************************/ package com.thoughtworks.go.util; import java.util.ArrayList; import java.util.List; import com.thoughtworks.go.config.ConfigAttribute; import com.thoughtworks.go.config.ConfigTag; import com.thoughtworks.go.config.registry.ConfigElementImplementationRegistry; import com.thoughtworks.go.domain.Task; import org.jdom.Attribute; import org.jdom.Element; import org.jdom.Namespace; import org.jdom.output.XMLOutputter; import static com.thoughtworks.go.util.ExceptionUtils.bomb; import static com.thoughtworks.go.util.ObjectUtil.defaultIfNull; public class ConfigUtil { private final String configFile; public ConfigUtil(String configFile) { this.configFile = defaultIfNull(configFile, "<no config file specified>"); } public static List<String> allTasks(ConfigElementImplementationRegistry registry) { List<String> allTasks = new ArrayList<>(); for (Class<? extends Task> task : registry.implementersOf(Task.class)) { ConfigTag tag = task.getAnnotation(ConfigTag.class); allTasks.add(tag.value()); } return allTasks; } public Element getChild(Element e, ConfigTag tag) { Element child = child(e, tag); if (child == null) { throw bomb("Error finding child '" + tag + "' in config: " + configFile + elementOutput(e)); } return child; } private Element child(Element e, ConfigTag tag) { return e.getChild(tag.value(), Namespace.getNamespace(tag.namespacePrefix(), tag.namespaceURI())); } public String getAttribute(Element e, String attribute) { Attribute attr = e.getAttribute(attribute); if(attr == null) { throw bomb("Error finding attribute '" + attribute + "' in config: " + configFile + elementOutput(e)); } return attr.getValue(); } public String elementOutput(Element e) { return "\n\t" + new XMLOutputter().outputString(e); } public boolean hasChild(Element e, ConfigTag tag) { return child(e, tag) != null; } public String getAttribute(Element e, String attribute, String defaultValue) { if (!hasAttribute(e, attribute)) { return defaultValue; } return getAttribute(e, attribute); } public boolean hasAttribute(Element e, String attribute) { return e.getAttribute(attribute) != null; } public boolean atTag(Element e, String tag) { return e.getName().equals(tag); } public boolean optionalAndMissingAttribute(Element e, ConfigAttribute attribute) { boolean optional = attribute.optional(); boolean isMissingAttribute = !hasAttribute(e, attribute.value()); if (!optional && isMissingAttribute) { throw bomb("Non optional attribute '" + attribute.value() + "' is not in element: " + elementOutput(e)); } return optional && isMissingAttribute; } public Object getAttribute(Element e, ConfigAttribute attribute) { if (optionalAndMissingAttribute(e, attribute)) { return null; } return getAttribute(e, attribute.value()); } public boolean optionalAndMissingTag(Element e, ConfigTag tag, boolean optional) { boolean isMissingElement = !hasChild(e, tag); if (!optional && isMissingElement) { throw bomb("Non optional tag '" + tag + "' is not in config file. Found: " + elementOutput(e)); } return optional && isMissingElement; } }
apache-2.0
guozhangwang/kafka
clients/src/test/java/org/apache/kafka/common/utils/ImplicitLinkedHashMultiCollectionTest.java
7105
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.common.utils; import org.apache.kafka.common.utils.ImplicitLinkedHashCollectionTest.TestElement; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; import java.util.Iterator; import java.util.LinkedList; import java.util.Random; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; /** * A unit test for ImplicitLinkedHashMultiCollection. */ @Timeout(120) public class ImplicitLinkedHashMultiCollectionTest { @Test public void testNullForbidden() { ImplicitLinkedHashMultiCollection<TestElement> multiSet = new ImplicitLinkedHashMultiCollection<>(); assertFalse(multiSet.add(null)); } @Test public void testFindFindAllContainsRemoveOnEmptyCollection() { ImplicitLinkedHashMultiCollection<TestElement> coll = new ImplicitLinkedHashMultiCollection<>(); assertNull(coll.find(new TestElement(2))); assertFalse(coll.contains(new TestElement(2))); assertFalse(coll.remove(new TestElement(2))); assertTrue(coll.findAll(new TestElement(2)).isEmpty()); } @Test public void testInsertDelete() { ImplicitLinkedHashMultiCollection<TestElement> multiSet = new ImplicitLinkedHashMultiCollection<>(100); TestElement e1 = new TestElement(1); TestElement e2 = new TestElement(1); TestElement e3 = new TestElement(2); multiSet.mustAdd(e1); multiSet.mustAdd(e2); multiSet.mustAdd(e3); assertFalse(multiSet.add(e3)); assertEquals(3, multiSet.size()); expectExactTraversal(multiSet.findAll(e1).iterator(), e1, e2); expectExactTraversal(multiSet.findAll(e3).iterator(), e3); multiSet.remove(e2); expectExactTraversal(multiSet.findAll(e1).iterator(), e1); assertTrue(multiSet.contains(e2)); } @Test public void testTraversal() { ImplicitLinkedHashMultiCollection<TestElement> multiSet = new ImplicitLinkedHashMultiCollection<>(); expectExactTraversal(multiSet.iterator()); TestElement e1 = new TestElement(1); TestElement e2 = new TestElement(1); TestElement e3 = new TestElement(2); assertTrue(multiSet.add(e1)); assertTrue(multiSet.add(e2)); assertTrue(multiSet.add(e3)); expectExactTraversal(multiSet.iterator(), e1, e2, e3); assertTrue(multiSet.remove(e2)); expectExactTraversal(multiSet.iterator(), e1, e3); assertTrue(multiSet.remove(e1)); expectExactTraversal(multiSet.iterator(), e3); } static void expectExactTraversal(Iterator<TestElement> iterator, TestElement... sequence) { int i = 0; while (iterator.hasNext()) { TestElement element = iterator.next(); assertTrue(i < sequence.length, "Iterator yieled " + (i + 1) + " elements, but only " + sequence.length + " were expected."); if (sequence[i] != element) { fail("Iterator value number " + (i + 1) + " was incorrect."); } i = i + 1; } assertTrue(i == sequence.length, "Iterator yieled " + (i + 1) + " elements, but " + sequence.length + " were expected."); } @Test public void testEnlargement() { ImplicitLinkedHashMultiCollection<TestElement> multiSet = new ImplicitLinkedHashMultiCollection<>(5); assertEquals(11, multiSet.numSlots()); TestElement[] testElements = { new TestElement(100), new TestElement(101), new TestElement(102), new TestElement(100), new TestElement(101), new TestElement(105) }; for (int i = 0; i < testElements.length; i++) { assertTrue(multiSet.add(testElements[i])); } for (int i = 0; i < testElements.length; i++) { assertFalse(multiSet.add(testElements[i])); } assertEquals(23, multiSet.numSlots()); assertEquals(testElements.length, multiSet.size()); expectExactTraversal(multiSet.iterator(), testElements); multiSet.remove(testElements[1]); assertEquals(23, multiSet.numSlots()); assertEquals(5, multiSet.size()); expectExactTraversal(multiSet.iterator(), testElements[0], testElements[2], testElements[3], testElements[4], testElements[5]); } @Test public void testManyInsertsAndDeletes() { Random random = new Random(123); LinkedList<TestElement> existing = new LinkedList<>(); ImplicitLinkedHashMultiCollection<TestElement> multiSet = new ImplicitLinkedHashMultiCollection<>(); for (int i = 0; i < 100; i++) { for (int j = 0; j < 4; j++) { TestElement testElement = new TestElement(random.nextInt()); multiSet.mustAdd(testElement); existing.add(testElement); } int elementToRemove = random.nextInt(multiSet.size()); Iterator<TestElement> iter1 = multiSet.iterator(); Iterator<TestElement> iter2 = existing.iterator(); for (int j = 0; j <= elementToRemove; j++) { iter1.next(); iter2.next(); } iter1.remove(); iter2.remove(); expectTraversal(multiSet.iterator(), existing.iterator()); } } void expectTraversal(Iterator<TestElement> iter, Iterator<TestElement> expectedIter) { int i = 0; while (iter.hasNext()) { TestElement element = iter.next(); assertTrue(expectedIter.hasNext(), "Iterator yieled " + (i + 1) + " elements, but only " + i + " were expected."); TestElement expected = expectedIter.next(); assertTrue(expected == element, "Iterator value number " + (i + 1) + " was incorrect."); i = i + 1; } assertFalse(expectedIter.hasNext(), "Iterator yieled " + i + " elements, but at least " + (i + 1) + " were expected."); } }
apache-2.0
tryleung/energy
renren-api/src/main/java/io/renren/service/impl/UserServiceImpl.java
1817
package io.renren.service.impl; import io.renren.dao.UserDao; import io.renren.entity.UserEntity; import io.renren.service.UserService; import io.renren.utils.RRException; import io.renren.validator.Assert; import org.apache.commons.codec.digest.DigestUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import java.util.Date; import java.util.List; import java.util.Map; @Service("userService") public class UserServiceImpl implements UserService { @Autowired private UserDao userDao; @Override public UserEntity queryObject(Long userId){ return userDao.queryObject(userId); } @Override public List<UserEntity> queryList(Map<String, Object> map){ return userDao.queryList(map); } @Override public int queryTotal(Map<String, Object> map){ return userDao.queryTotal(map); } @Override public void save(String mobile, String password){ UserEntity user = new UserEntity(); user.setMobile(mobile); user.setUsername(mobile); user.setPassword(DigestUtils.sha256Hex(password)); user.setCreateTime(new Date()); userDao.save(user); } @Override public void update(UserEntity user){ userDao.update(user); } @Override public void delete(Long userId){ userDao.delete(userId); } @Override public void deleteBatch(Long[] userIds){ userDao.deleteBatch(userIds); } @Override public UserEntity queryByMobile(String mobile) { return userDao.queryByMobile(mobile); } @Override public long login(String mobile, String password) { UserEntity user = queryByMobile(mobile); Assert.isNull(user, "手机号或密码错误"); //密码错误 if(!user.getPassword().equals(DigestUtils.sha256Hex(password))){ throw new RRException("手机号或密码错误"); } return user.getUserId(); } }
apache-2.0
rasika90/iot-server-extensions
components/iotserver-api/org.wso2.carbon.device.mgt.iot.common.api/src/main/java/org/wso2/carbon/device/mgt/iot/common/api/PolicyManagementService.java
17705
/* * Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.wso2.carbon.device.mgt.iot.common.api; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.wso2.carbon.context.CarbonContext; import org.wso2.carbon.context.PrivilegedCarbonContext; import org.wso2.carbon.device.mgt.common.DeviceIdentifier; import org.wso2.carbon.device.mgt.common.DeviceManagementException; import org.wso2.carbon.policy.mgt.common.Policy; import org.wso2.carbon.policy.mgt.common.PolicyAdministratorPoint; import org.wso2.carbon.policy.mgt.common.PolicyManagementException; import org.wso2.carbon.policy.mgt.common.PolicyMonitoringTaskException; import org.wso2.carbon.policy.mgt.common.monitor.ComplianceData; import org.wso2.carbon.policy.mgt.common.monitor.PolicyComplianceException; import org.wso2.carbon.policy.mgt.core.PolicyManagerService; import org.wso2.carbon.policy.mgt.core.task.TaskScheduleService; import javax.jws.WebService; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.*; import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; import java.util.List; @WebService public class PolicyManagementService { private static Log log = LogFactory.getLog(PolicyManagementService.class); @Context //injected response proxy supporting multiple thread private HttpServletResponse response; private PrivilegedCarbonContext ctx; private PolicyManagerService getPolicyServiceProvider() throws DeviceManagementException { String tenantDomain = CarbonContext.getThreadLocalCarbonContext().getTenantDomain(); PrivilegedCarbonContext.startTenantFlow(); ctx = PrivilegedCarbonContext.getThreadLocalCarbonContext(); ctx.setTenantDomain(tenantDomain, true); if (log.isDebugEnabled()) { log.debug("Getting thread local carbon context for tenant domain: " + tenantDomain); } PolicyManagerService policyManagerService = (PolicyManagerService) ctx.getOSGiService( PolicyManagerService.class, null); if (policyManagerService == null) { String msg = "Policy Management service not initialized"; log.error(msg); throw new DeviceManagementException(msg); } return policyManagerService; } private void endTenantFlow() { PrivilegedCarbonContext.endTenantFlow(); ctx = null; if (log.isDebugEnabled()) { log.debug("Tenant flow ended"); } } @POST @Path("/inactive-policy") @Produces("application/json") public boolean addPolicy(Policy policy) { try { PolicyManagerService policyManagerService = getPolicyServiceProvider(); PolicyAdministratorPoint pap = policyManagerService.getPAP(); pap.addPolicy(policy); response.setStatus(Response.Status.CREATED.getStatusCode()); if (log.isDebugEnabled()) { log.debug("Policy has been added successfully."); } return true; } catch (PolicyManagementException e) { String error = "Policy Management related exception."; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return false; } catch (DeviceManagementException e) { String error = "Error occurred while invoking Policy Management Service."; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return false; } finally { this.endTenantFlow(); } } @POST @Path("/active-policy") @Produces("application/json") public boolean addActivePolicy(Policy policy) { policy.setActive(true); try { PolicyManagerService policyManagerService = getPolicyServiceProvider(); PolicyAdministratorPoint pap = policyManagerService.getPAP(); pap.addPolicy(policy); response.setStatus(Response.Status.CREATED.getStatusCode()); if (log.isDebugEnabled()) { log.debug("Policy has been added successfully."); } return true; } catch (PolicyManagementException e) { String error = "Policy Management related exception."; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return false; } catch (DeviceManagementException e) { String error = "Error occurred while invoking Policy Management Service."; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return false; } finally { this.endTenantFlow(); } } @GET @Produces("application/json") public Policy[] getAllPolicies() { try { PolicyManagerService policyManagerService = getPolicyServiceProvider(); PolicyAdministratorPoint policyAdministratorPoint = policyManagerService.getPAP(); List<Policy> policies = policyAdministratorPoint.getPolicies(); return policyAdministratorPoint.getPolicies().toArray(new Policy[policies.size()]); } catch (PolicyManagementException e) { String error = "Policy Management related exception"; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return null; } catch (DeviceManagementException e) { String error = "Error occurred while invoking Policy Management Service."; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return null; } finally { this.endTenantFlow(); } } @GET @Produces("application/json") @Path("/{id}") public Policy getPolicy(@PathParam("id") int policyId) { try { PolicyManagerService policyManagerService = getPolicyServiceProvider(); PolicyAdministratorPoint policyAdministratorPoint = policyManagerService.getPAP(); Policy policy = policyAdministratorPoint.getPolicy(policyId); if (policy != null) { if (log.isDebugEnabled()) { log.debug("Sending policy for ID " + policyId); } return policy; } else { log.error("Policy for ID " + policyId + " not found."); response.setStatus(Response.Status.NOT_FOUND.getStatusCode()); return null; } } catch (PolicyManagementException e) { String error = "Policy Management related exception"; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return null; } catch (DeviceManagementException e) { String error = "Error occurred while invoking Policy Management Service."; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return null; } finally { this.endTenantFlow(); } } @GET @Path("/count") public int getPolicyCount() { try { PolicyManagerService policyManagerService = getPolicyServiceProvider(); PolicyAdministratorPoint policyAdministratorPoint = policyManagerService.getPAP(); return policyAdministratorPoint.getPolicyCount(); } catch (PolicyManagementException e) { String error = "Policy Management related exception"; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return -1; } catch (DeviceManagementException e) { String error = "Error occurred while invoking Policy Management Service."; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return -1; } finally { this.endTenantFlow(); } } @PUT @Path("/{id}") @Produces("application/json") public boolean updatePolicy(Policy policy, @PathParam("id") int policyId) { try { PolicyManagerService policyManagerService = getPolicyServiceProvider(); PolicyAdministratorPoint pap = policyManagerService.getPAP(); org.wso2.carbon.policy.mgt.common.Policy previousPolicy = pap.getPolicy(policyId); policy.setProfile(pap.getProfile(previousPolicy.getProfileId())); policy.setPolicyName(previousPolicy.getPolicyName()); pap.updatePolicy(policy); if (log.isDebugEnabled()) { log.debug("Policy with ID " + policyId + " has been updated successfully."); } return true; } catch (PolicyManagementException e) { String error = "Policy Management related exception"; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return false; } catch (DeviceManagementException e) { String error = "Error occurred while invoking Policy Management Service."; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return false; } finally { this.endTenantFlow(); } } @PUT @Path("/priorities") @Consumes("application/json") @Produces("application/json") public boolean updatePolicyPriorities(List<Policy> priorityUpdatedPolicies) { try { PolicyManagerService policyManagerService = getPolicyServiceProvider(); PolicyAdministratorPoint pap = policyManagerService.getPAP(); boolean policiesUpdated = pap.updatePolicyPriorities(priorityUpdatedPolicies); if (policiesUpdated) { if (log.isDebugEnabled()) { log.debug("Policy Priorities successfully updated."); } return true; } else { if (log.isDebugEnabled()) { log.debug("Policy priorities did not update. Bad Request."); } response.setStatus(Response.Status.BAD_REQUEST.getStatusCode()); return false; } } catch (PolicyManagementException e) { String error = "Exception in updating policy priorities."; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return false; } catch (DeviceManagementException e) { String error = "Error occurred while invoking Policy Management Service."; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return false; } finally { this.endTenantFlow(); } } @DELETE @Path("/{id}") @Produces("application/json") public boolean deletePolicy(@PathParam("id") int policyId) { try { PolicyManagerService policyManagerService = getPolicyServiceProvider(); PolicyAdministratorPoint pap = policyManagerService.getPAP(); org.wso2.carbon.policy.mgt.common.Policy policy = pap.getPolicy(policyId); boolean policyDeleted = pap.deletePolicy(policy); if (policyDeleted) { if (log.isDebugEnabled()) { log.debug("Policy by id:" + policyId + " has been successfully deleted."); } return true; } else { if (log.isDebugEnabled()) { log.debug("Policy by id:" + policyId + " does not exist."); } response.setStatus(Response.Status.NOT_FOUND.getStatusCode()); return false; } } catch (PolicyManagementException e) { String error = "Exception in deleting policy by id:" + policyId; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return false; } catch (DeviceManagementException e) { String error = "Error occurred while invoking Policy Management Service."; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return false; } finally { this.endTenantFlow(); } } @PUT @Produces("application/json") @Path("/activate/{id}") public boolean activatePolicy(@PathParam("id") int policyId) { try { PolicyManagerService policyManagerService = getPolicyServiceProvider(); PolicyAdministratorPoint pap = policyManagerService.getPAP(); pap.activatePolicy(policyId); if (log.isDebugEnabled()) { log.debug("Policy by id:" + policyId + " has been successfully activated."); } return true; } catch (PolicyManagementException e) { String error = "Exception in activating policy by id:" + policyId; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return false; } catch (DeviceManagementException e) { String error = "Error occurred while invoking Policy Management Service."; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return false; } finally { this.endTenantFlow(); } } @PUT @Produces("application/json") @Path("/inactivate/{id}") public boolean inactivatePolicy(@PathParam("id") int policyId) { try { PolicyManagerService policyManagerService = getPolicyServiceProvider(); PolicyAdministratorPoint pap = policyManagerService.getPAP(); pap.inactivatePolicy(policyId); if (log.isDebugEnabled()) { log.debug("Policy by id:" + policyId + " has been successfully inactivated."); } return true; } catch (PolicyManagementException e) { String error = "Exception in inactivating policy by id:" + policyId; log.error(error, e); return false; } catch (DeviceManagementException e) { String error = "Error occurred while invoking Policy Management Service."; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return false; } finally { this.endTenantFlow(); } } @PUT @Produces("application/json") @Path("/apply-changes") public boolean applyChanges() { try { PolicyManagerService policyManagerService = getPolicyServiceProvider(); PolicyAdministratorPoint pap = policyManagerService.getPAP(); pap.publishChanges(); if (log.isDebugEnabled()) { log.debug("Changes have been successfully updated."); } return true; } catch (PolicyManagementException e) { String error = "Exception in applying changes."; log.error(error, e); return false; } catch (DeviceManagementException e) { String error = "Error occurred while invoking Policy Management Service."; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return false; } finally { this.endTenantFlow(); } } @GET @Path("/start-task/{milliseconds}") public boolean startTaskService(@PathParam("milliseconds") int monitoringFrequency) { try { PolicyManagerService policyManagerService = getPolicyServiceProvider(); TaskScheduleService taskScheduleService = policyManagerService.getTaskScheduleService(); taskScheduleService.startTask(monitoringFrequency); if (log.isDebugEnabled()) { log.debug("Policy monitoring service started successfully."); } return true; } catch (PolicyMonitoringTaskException e) { String error = "Policy Management related exception."; log.error(error, e); return false; } catch (DeviceManagementException e) { String error = "Error occurred while invoking Policy Management Service."; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return false; } finally { this.endTenantFlow(); } } @GET @Path("/update-task/{milliseconds}") public boolean updateTaskService(@PathParam("milliseconds") int monitoringFrequency) { try { PolicyManagerService policyManagerService = getPolicyServiceProvider(); TaskScheduleService taskScheduleService = policyManagerService.getTaskScheduleService(); taskScheduleService.updateTask(monitoringFrequency); if (log.isDebugEnabled()) { log.debug("Policy monitoring service updated successfully."); } return true; } catch (PolicyMonitoringTaskException e) { String error = "Policy Management related exception."; log.error(error, e); return false; } catch (DeviceManagementException e) { String error = "Error occurred while invoking Policy Management Service."; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return false; } finally { this.endTenantFlow(); } } @GET @Path("/stop-task") public boolean stopTaskService() { try { PolicyManagerService policyManagerService = getPolicyServiceProvider(); TaskScheduleService taskScheduleService = policyManagerService.getTaskScheduleService(); taskScheduleService.stopTask(); if (log.isDebugEnabled()) { log.debug("Policy monitoring service stopped successfully."); } return true; } catch (PolicyMonitoringTaskException e) { String error = "Policy Management related exception."; log.error(error, e); return false; } catch (DeviceManagementException e) { String error = "Error occurred while invoking Policy Management Service."; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return false; } finally { this.endTenantFlow(); } } @GET @Path("/{type}/{id}") public ComplianceData getComplianceDataOfDevice(@PathParam("id") String deviceId, @PathParam("type") String deviceType) { try { PolicyManagerService policyManagerService = getPolicyServiceProvider(); DeviceIdentifier deviceIdentifier = new DeviceIdentifier(); deviceIdentifier.setType(deviceType); deviceIdentifier.setId(deviceId); return policyManagerService.getDeviceCompliance(deviceIdentifier); } catch (PolicyComplianceException e) { String error = "Error occurred while getting the compliance data."; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return null; } catch (DeviceManagementException e) { String error = "Error occurred while invoking Policy Management Service."; log.error(error, e); response.setStatus(Response.Status.INTERNAL_SERVER_ERROR.getStatusCode()); return null; } finally { this.endTenantFlow(); } } }
apache-2.0
horzelski/orbit
commons/src/main/java/com/ea/orbit/exception/UncheckedException.java
2515
/* Copyright (C) 2015 Electronic Arts Inc. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of Electronic Arts, Inc. ("EA") nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY ELECTRONIC ARTS AND ITS CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL ELECTRONIC ARTS OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.ea.orbit.exception; /** * A runtime exception that dodges some warnings about using * the class RuntimeException directly. * <p/> * This is useful for those cases where checked exceptions are a nuisance * and no matter the error you just want to rethrow the exception without * having to add it to your api signature. */ public class UncheckedException extends RuntimeException { private static final long serialVersionUID = 1L; public UncheckedException() { super(); } public UncheckedException(String message, Throwable cause, boolean enableSuppression, boolean writableStackTrace) { super(message, cause, enableSuppression, writableStackTrace); } public UncheckedException(String message, Throwable cause) { super(message, cause); } public UncheckedException(String message) { super(message); } public UncheckedException(Throwable cause) { super(cause); } }
bsd-3-clause
exponentjs/exponent
android/ReactAndroid/src/main/java/com/facebook/react/fabric/mounting/mountitems/SendAccessibilityEvent.java
1704
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ package com.facebook.react.fabric.mounting.mountitems; import androidx.annotation.NonNull; import com.facebook.react.bridge.ReactSoftException; import com.facebook.react.bridge.RetryableMountingLayerException; import com.facebook.react.fabric.mounting.MountingManager; public class SendAccessibilityEvent implements MountItem { private final String TAG = "Fabric.SendAccessibilityEvent"; private final int mReactTag; private final int mEventType; public SendAccessibilityEvent(int reactTag, int eventType) { mReactTag = reactTag; mEventType = eventType; } @Override public void execute(@NonNull MountingManager mountingManager) { try { mountingManager.sendAccessibilityEvent(mReactTag, mEventType); } catch (RetryableMountingLayerException e) { // Accessibility events are similar to commands in that they're imperative // calls from JS, disconnected from the commit lifecycle, and therefore // inherently unpredictable and dangerous. If we encounter a "retryable" // error, that is, a known category of errors that this is likely to hit // due to race conditions (like the view disappearing after the event is // queued and before it executes), we log a soft exception and continue along. // Other categories of errors will still cause a hard crash. ReactSoftException.logSoftException(TAG, e); } } @Override public String toString() { return "SendAccessibilityEvent [" + mReactTag + "] " + mEventType; } }
bsd-3-clause
hejunbinlan/Qiitanium
qiitanium/src/main/java/com/ogaclejapan/qiitanium/util/CrashlyticsTree.java
433
package com.ogaclejapan.qiitanium.util; import android.util.Log; import com.crashlytics.android.Crashlytics; import timber.log.Timber; public class CrashlyticsTree extends Timber.Tree { @Override protected void log(int priority, String tag, String message, Throwable t) { if (priority != Log.ERROR) { return; } Crashlytics.log(message); if (t != null) { Crashlytics.logException(t); } } }
mit
guliashvili/Sums-and-Products
src/main/java/org/uncommons/watchmaker/framework/EvolutionaryOperator.java
3228
//============================================================================= // Copyright 2006-2010 Daniel W. Dyer // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //============================================================================= package org.uncommons.watchmaker.framework; import java.util.List; import java.util.Random; /** * <p>An evolutionary operator is a function that takes a population of * candidates as an argument and returns a new population that is the * result of applying a transformation to the original population.</p> * <p><strong>An implementation of this class must not modify any of * the selected candidate objects passed in.</strong> Doing so will * affect the correct operation of the {@link EvolutionEngine}. Instead * the operator should create and return new candidate objects. The * operator is not required to create copies of unmodified individuals * (for efficiency these may be returned directly).</p> * @param <T> The type of evolvable entity that this operator accepts. * @author Daniel Dyer */ public interface EvolutionaryOperator<T> { /** * <p>Apply the operation to each entry in the list of selected * candidates. It is important to note that this method operates on * the list of candidates returned by the selection strategy and not * on the current population. Each entry in the list (not each * individual - the list may contain the same individual more than * once) must be operated on exactly once.</p> * * <p>Implementing classes should not assume any particular ordering * (or lack of ordering) for the selection. If ordering or * shuffling is required, it should be performed by the implementing * class. The implementation should not re-order the list provided * but instead should make a copy of the list and re-order that. * The ordering of the selection should be totally irrelevant for * operators that process each candidate in isolation, such as mutation. * It should only be an issue for operators, such as cross-over, that * deal with multiple candidates in a single operation.</p> * <p><strong>The operator must not modify any of the candidates passed * in</strong>. Instead it should return a list that contains evolved * copies of those candidates (umodified candidates can be included in * the results without having to be copied).</p> * @param selectedCandidates The individuals to evolve. * @param rng A source of randomness for stochastic operators (most * operators will be stochastic). * @return The evolved individuals. */ List<T> apply(List<T> selectedCandidates, Random rng); }
mit
feiyue/maven-framework-project
commons-dbutils-tutorial/src/main/java/org/commons/dbutils/datasource/tutorial/BoneCPDataSource.java
1798
package org.commons.dbutils.datasource.tutorial; import java.beans.PropertyVetoException; import java.io.IOException; import java.sql.Connection; import java.sql.SQLException; import javax.sql.DataSource; import com.jolbox.bonecp.BoneCP; import com.jolbox.bonecp.BoneCPConfig; public class BoneCPDataSource { private DataSource dataSource; private static BoneCPDataSource boneCPDataSource; private BoneCP connectionPool; private BoneCPDataSource() throws IOException, SQLException, PropertyVetoException { try { // load the database driver (make sure this is in your classpath!) Class.forName("org.h2.Driver"); } catch (Exception e) { e.printStackTrace(); return; } try { // setup the connection pool using BoneCP Configuration BoneCPConfig config = new BoneCPConfig(); // jdbc url specific to your database, eg // jdbc:mysql://127.0.0.1/yourdb config.setJdbcUrl("jdbc:h2:./target/test;AUTO_SERVER=TRUE"); config.setUsername("sa"); config.setPassword(""); config.setMinConnectionsPerPartition(5); config.setMaxConnectionsPerPartition(10); config.setPartitionCount(1); // setup the connection pool connectionPool = new BoneCP(config); } catch (Exception e) { e.printStackTrace(); return; } } public static BoneCPDataSource getInstance() throws IOException, SQLException, PropertyVetoException { if (boneCPDataSource == null) { boneCPDataSource = new BoneCPDataSource(); return boneCPDataSource; } else { return boneCPDataSource; } } public Connection getConnection() throws SQLException { return this.connectionPool.getConnection(); } public DataSource getDataSource() { return dataSource; } public void setDataSource(DataSource dataSource) { this.dataSource = dataSource; } }
mit
ControlSystemStudio/cs-studio
applications/appunorganized/appunorganized-plugins/org.csstudio.swt.rtplot/src/org/csstudio/swt/rtplot/internal/util/IntList.java
1873
/******************************************************************************* * Copyright (c) 2014 Oak Ridge National Laboratory. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html ******************************************************************************/ package org.csstudio.swt.rtplot.internal.util; import java.util.Arrays; /** List of <code>int</code> * * <p>Fundamentally like <code>List<Integer></code>, * but avoids boxing operations. * * @author Kay Kasemir */ public class IntList { private int[] data; private int size; /** @param capacity Initial capacity */ public IntList(final int capacity) { data = new int[capacity]; size = 0; } /** @return Size */ final public int size() { return size; } /** @param value Value to add */ final public void add(final int value) { if (size == data.length) data = Arrays.copyOf(data, 2*data.length); data[size++] = value; } /** @param index Valid index 0 .. size()-1 * @param value Value for that array element */ public void set(final int index, final int value) { data[index] = value; } /** @param index Index of value to get * @return Value at that index */ final public int get(final int index) { return data[index]; } /** Clear array elements */ final public void clear() { size = 0; } /** @return Plain <code>int</code> array */ final public int[] toArray() { final int[] copy = new int[size]; System.arraycopy(data, 0, copy, 0, size); return copy; } }
epl-1.0
veresh/tempo
ui-fw/src/test/java/org/intalio/tempo/portlet/SecuredControllerTest.java
7820
package org.intalio.tempo.portlet; import java.util.ArrayList; import java.util.Collection; import javax.portlet.ActionResponse; import javax.portlet.RenderResponse; import javax.servlet.http.HttpSession; import junit.framework.TestCase; import org.apache.pluto.wrappers.ActionRequestWrapper; import org.apache.pluto.wrappers.PortletRequestWrapper; import org.intalio.tempo.security.Property; import org.intalio.tempo.security.token.TokenService; import org.intalio.tempo.uiframework.UIFWApplicationState; import org.intalio.tempo.web.ApplicationState; import org.intalio.tempo.web.User; import org.intalio.tempo.web.controller.ActionDef; import org.jmock.Expectations; import org.junit.runner.RunWith; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.validation.BindException; import com.googlecode.instinct.expect.ExpectThat; import com.googlecode.instinct.expect.ExpectThatImpl; import com.googlecode.instinct.integrate.junit4.InstinctRunner; import com.googlecode.instinct.marker.annotate.Mock; import com.googlecode.instinct.marker.annotate.Specification; import com.googlecode.instinct.marker.annotate.Stub; import com.googlecode.instinct.marker.annotate.Subject; import edu.yale.its.tp.cas.client.CASReceipt; @RunWith(InstinctRunner.class) public class SecuredControllerTest extends TestCase { protected transient Logger _log = LoggerFactory.getLogger(getClass()); final static ExpectThat expect = new ExpectThatImpl(); @Subject SecuredController sc; @Mock TokenService ts; @Mock PortletRequestWrapper request; @Mock HttpSession s; @Mock ApplicationState st; @Mock User user; @Specification public void testGetCurrentUserName(){ sc = new SecuredController(ts); expect.that(new Expectations(){{ atLeast(1).of(request).getSession();will(returnValue(s)); atLeast(1).of(s).getAttribute("APPLICATION_STATE");will(returnValue(st)); atLeast(1).of(st).getCurrentUser();will(returnValue(user)); atLeast(1).of(user).getName();will(returnValue("user1")); }}); assertTrue(SecuredController.getCurrentUserName(request).equals("user1")); } @Mock RenderResponse rr; // @Mock TestRenderRequest rrequest; @Mock BindException be; @Mock CASReceipt receipt; @Stub Property[] props; @Specification public void testShowForm() throws Exception{ props = new Property[0]; final String serviceURL = "dummyServiceURL"; sc = new SecuredController(ts, serviceURL); expect.that(new Expectations(){{ one(s).getAttribute("edu.yale.its.tp.cas.client.filter.receipt");will(returnValue(receipt)); atLeast(1).of(receipt).getPgtIou();will(returnValue("dummy")); atLeast(1).of(s).getAttribute("APPLICATION_STATE");will(returnValue(st)); one(st).getCurrentUser();will(returnValue(user)); one(st).getCurrentUser();will(returnValue(null)); one(s).getAttribute("edu.yale.its.tp.cas.client.filter.receipt");will(returnValue(receipt)); one(st).getCurrentUser();will(returnValue(null)); one(ts).getTokenFromTicket(null, serviceURL);will(returnValue(null)); one(ts).getTokenProperties(null);will(returnValue(props)); ignoring(st); one(s).setAttribute("APPLICATION_STATE", st); }}); MockRenderRequest rrequest = new MockRenderRequest(s); assertNotNull(sc.showForm(rrequest, rr, be)); assertNotNull(sc.showForm(rrequest, rr, be)); // travel different branch } // @Mock UIFWApplicationState st2; // travel different branch @Specification public void testShowForm2() throws Exception{ props = new Property[4]; props[0] = new Property("a", "b"); props[1] = new Property("c", "d"); props[2] = new Property("user", "dummyUser"); props[3] = new Property("roles", "role1,role2,role3"); final String serviceURL = "dummyServiceURL"; sc = new SecuredController(ts, serviceURL); expect.that(new Expectations(){{ one(s).getAttribute("edu.yale.its.tp.cas.client.filter.receipt");will(returnValue(receipt)); atLeast(1).of(receipt).getPgtIou();will(returnValue("dummy")); atLeast(1).of(s).getAttribute("APPLICATION_STATE");will(returnValue(st)); one(st).getCurrentUser();will(returnValue(null)); one(ts).getTokenFromTicket(null, serviceURL);will(returnValue(null)); one(ts).getTokenProperties(null);will(returnValue(props)); ignoring(st).getClass(); one(s).setAttribute("APPLICATION_STATE", st); }}); MockRenderRequest rrequest = new MockRenderRequest(s); assertNotNull(sc.showForm(rrequest, rr, be)); // travel different branch } @Mock ActionRequestWrapper areq; @Mock ActionResponse ares; @Mock UIFWApplicationState st1; // @Specification // public void testHandleRequest() throws Exception{ // props = new Property[0]; // final String serviceURL = "http://localhost:8080/dummy"; // sc = new SecuredController(ts, serviceURL); // expect.that(new Expectations(){{ // one(rr).setProperty("portlet.expiration-cache", "0"); // one(s).getAttribute("edu.yale.its.tp.cas.client.filter.receipt");will(returnValue(receipt)); // atLeast(1).of(receipt).getPgtIou();will(returnValue("dummy")); // atLeast(1).of(s).getAttribute("APPLICATION_STATE");will(returnValue(st1)); // atLeast(1).of(st1).getCurrentUser();will(returnValue(user)); //// one(st).getCurrentUser();will(returnValue(null)); // atLeast(1).of(areq).getSession();will(returnValue(s)); //// one(st).getCurrentUser();will(returnValue(user)); // one(areq).getParameter("actionName");will(returnValue("testAction")); // one(areq).getParameter("update");will(returnValue("true")); // one(areq).getParameter("page"); // one(areq).getParameter("rp"); // one(areq).getParameter("sortname"); // one(areq).getScheme(); // one(areq).getServerName(); // one(areq).getServerPort(); // // one(user).getToken();will(returnValue("token")); // one(user).getName();will(returnValue("user1")); //// //// //// one(s).getAttribute("edu.yale.its.tp.cas.client.filter.receipt");will(returnValue(receipt)); //// one(st).getCurrentUser();will(returnValue(null)); //// one(ts).getTokenFromTicket(null, serviceURL);will(returnValue(null)); //// one(ts).getTokenProperties(null);will(returnValue(props)); // //// ignoring(st); //// one(s).setAttribute("APPLICATION_STATE", st); // }}); // MockRenderRequest rrequest = new MockRenderRequest(s); // // ActionRequestImpl a = new ActionRequestImpl(); // assertNotNull(sc.handleRenderRequest(rrequest, rr)); // Collection<ActionDef> col = new ArrayList<ActionDef>(); // ActionDef ad = new ActionDef(); // ad.setActionName("testAction"); // ad.setActionClass("org.intalio.tempo.portlet.TasksAction"); // col.add(ad); // sc.setActionDefs(col); // sc.setDefaultAction(ad); // // sc.handleActionRequest(areq, ares); // assertTrue(sc.getDefaultAction() == ad); // assertTrue(sc.getActionDefs() == col); // Action<Object> ac = sc.instantiateDefaultAction(); // assertNotNull(ac); // } }
epl-1.0
RallySoftware/eclipselink.runtime
foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/performance/concurrent/ReadAnyObjectIsolatedConcurrentTest.java
2001
/******************************************************************************* * Copyright (c) 1998, 2015 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.testing.tests.performance.concurrent; import java.util.*; import org.eclipse.persistence.sessions.*; import org.eclipse.persistence.testing.models.performance.toplink.*; /** * This test compares the concurrency of read object cache hits. * This test must be run on a multi-CPU machine to be meaningful. */ public class ReadAnyObjectIsolatedConcurrentTest extends IsolatedConcurrentTest { protected int index; protected List allObjects; public ReadAnyObjectIsolatedConcurrentTest() { setDescription("This tests the concurrency of read-object cache hits."); } /** * Find all employees. */ public void setup() { super.setup(); allObjects = new ArrayList(getServerSession().acquireClientSession().readAllObjects(Employee.class)); } /** * Cached read-object. */ public void runTask() throws Exception { int currentIndex = index; if (currentIndex >= allObjects.size()) { index = 0; currentIndex = 0; } index++; Object employee = allObjects.get(currentIndex); Session client = getServerSession().acquireClientSession(); client.readObject(employee); client.release(); } }
epl-1.0
elucash/eclipse-oxygen
org.eclipse.jdt.core/src/org/eclipse/jdt/internal/core/util/ILRUCacheable.java
1044
/******************************************************************************* * Copyright (c) 2000, 2006 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.jdt.internal.core.util; /** * Types implementing this interface can occupy a variable amount of space * in an LRUCache. Cached items that do not implement this interface are * considered to occupy one unit of space. * * @see LRUCache */ public interface ILRUCacheable { /** * Returns the space the receiver consumes in an LRU Cache. The default space * value is 1. * * @return int Amount of cache space taken by the receiver */ public int getCacheFootprint(); }
epl-1.0
RallySoftware/eclipselink.runtime
jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/config/locking/OptimisticLockingImpl.java
2036
/******************************************************************************* * Copyright (c) 2013, 2015 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Guy Pelletier - initial API and implementation ******************************************************************************/ package org.eclipse.persistence.internal.jpa.config.locking; import java.util.ArrayList; import org.eclipse.persistence.internal.jpa.config.MetadataImpl; import org.eclipse.persistence.internal.jpa.config.columns.ColumnImpl; import org.eclipse.persistence.internal.jpa.metadata.columns.ColumnMetadata; import org.eclipse.persistence.internal.jpa.metadata.locking.OptimisticLockingMetadata; import org.eclipse.persistence.jpa.config.Column; import org.eclipse.persistence.jpa.config.OptimisticLocking; /** * JPA scripting API implementation. * * @author Guy Pelletier * @since EclipseLink 2.5.1 */ public class OptimisticLockingImpl extends MetadataImpl<OptimisticLockingMetadata> implements OptimisticLocking { public OptimisticLockingImpl() { super(new OptimisticLockingMetadata()); getMetadata().setSelectedColumns(new ArrayList<ColumnMetadata>()); } public Column addSelectedColumn() { ColumnImpl column = new ColumnImpl(); getMetadata().getSelectedColumns().add(column.getMetadata()); return column; } public OptimisticLocking setCascade(Boolean cascade) { getMetadata().setCascade(cascade); return this; } public OptimisticLocking setType(String type) { getMetadata().setType(type); return this; } }
epl-1.0
RallySoftware/eclipselink.runtime
jpa/eclipselink.jpa.test/src/org/eclipse/persistence/testing/models/jpa/inheritance/Lawyer.java
920
/******************************************************************************* * Copyright (c) 1998, 2015 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.testing.models.jpa.inheritance; import javax.persistence.*; @Entity @Table(name="CMP3_PERSON") @DiscriminatorValue("3") public class Lawyer extends Person { }
epl-1.0
RallySoftware/eclipselink.runtime
foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/models/inheritance/SoftwareEngineer.java
964
/******************************************************************************* * Copyright (c) 1998, 2015 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * Oracle - initial API and implementation from Oracle TopLink ******************************************************************************/ package org.eclipse.persistence.testing.models.inheritance; public class SoftwareEngineer extends Engineer { public String isExperiencedInJava = "F"; public Engineer boss; public SoftwareEngineer() { super(); } }
epl-1.0
qoswork/opennmszh
opennms-provision/opennms-provisiond/src/main/java/org/opennms/netmgt/provision/service/operations/DeleteOperation.java
2351
/******************************************************************************* * This file is part of OpenNMS(R). * * Copyright (C) 2008-2012 The OpenNMS Group, Inc. * OpenNMS(R) is Copyright (C) 1999-2012 The OpenNMS Group, Inc. * * OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc. * * OpenNMS(R) is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published * by the Free Software Foundation, either version 3 of the License, * or (at your option) any later version. * * OpenNMS(R) is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with OpenNMS(R). If not, see: * http://www.gnu.org/licenses/ * * For more information contact: * OpenNMS(R) Licensing <license@opennms.org> * http://www.opennms.org/ * http://www.opennms.com/ *******************************************************************************/ package org.opennms.netmgt.provision.service.operations; import org.opennms.netmgt.provision.service.ProvisionService; public class DeleteOperation extends ImportOperation { private Integer m_nodeId; /** * <p>Constructor for DeleteOperation.</p> * * @param nodeId a {@link java.lang.Integer} object. * @param foreignSource a {@link java.lang.String} object. * @param foreignId a {@link java.lang.String} object. * @param provisionService a {@link org.opennms.netmgt.provision.service.ProvisionService} object. */ public DeleteOperation(Integer nodeId, String foreignSource, String foreignId, ProvisionService provisionService) { super(provisionService); m_nodeId = nodeId; } /** * <p>toString</p> * * @return a {@link java.lang.String} object. */ @Override public String toString() { return "DELETE: Node "+m_nodeId; } /** * <p>scan</p> */ @Override public void scan() { // no additional data to gather } /** {@inheritDoc} */ @Override protected void doPersist() { getProvisionService().deleteNode(m_nodeId); } }
gpl-2.0
arodchen/MaxSim
graal/graal/com.oracle.graal.java.decompiler/src/com/oracle/graal/java/decompiler/lines/DecompilerIfLine.java
2627
/* * Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.oracle.graal.java.decompiler.lines; import com.oracle.graal.graph.*; import com.oracle.graal.java.decompiler.block.*; public class DecompilerIfLine extends DecompilerSyntaxLine { private final Node condition; private DecompilerAssignmentLine mergedCondition; public DecompilerIfLine(DecompilerBlock block, Node node, Node condition) { super(block, node); this.condition = condition; } public void setMergedCondition(DecompilerAssignmentLine mergedCondition) { this.mergedCondition = mergedCondition; } @Override public String getAsString() { if (mergedCondition == null) { return "IF (" + getStringRepresentation(condition) + ") " + block.getBlock().getSuccessors().get(0) + ":" + block.getBlock().getSuccessors().get(1); } else { return "IF (" + mergedCondition.getStatement() + ") " + block.getBlock().getSuccessors().get(0) + ":" + block.getBlock().getSuccessors().get(1); } } public String getIfStatement() { if (mergedCondition == null) { return "IF (" + getStringRepresentation(condition) + ")"; } else { return "IF (" + mergedCondition.getStatement() + ")"; } } public String getIfNegStatement() { if (mergedCondition == null) { return "IF (!(" + getStringRepresentation(condition) + "))"; } else { return "IF (!(" + mergedCondition.getStatement() + "))"; } } public Node getCondition() { return condition; } }
gpl-2.0
imoseyon/leanKernel-d2usc-deprecated
vendor/samsung/common/packages/apps/Email/lib_Src/activation-1.1.1/source/gnu/activation/viewers/TextViewer.java
2284
/* * TextViewer.java * Copyright (C) 2004 The Free Software Foundation * * This file is part of GNU Java Activation Framework (JAF), a library. * * GNU JAF is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * GNU JAF is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA * * As a special exception, if you link this library with other files to * produce an executable, this library does not by itself cause the * resulting executable to be covered by the GNU General Public License. * This exception does not however invalidate any other reasons why the * executable file might be covered by the GNU General Public License. */ package gnu.activation.viewers; import java.awt.Dimension; import java.awt.TextArea; import java.io.ByteArrayOutputStream; import java.io.InputStream; import java.io.IOException; import javax.activation.CommandObject; import javax.activation.DataHandler; /** * Simple text display component. * * @author <a href='mailto:dog@gnu.org'>Chris Burdess</a> * @version 1.0.2 */ public class TextViewer extends TextArea implements CommandObject { public TextViewer() { super("", 24, 80, 1); setEditable(false); } public Dimension getPreferredSize() { return getMinimumSize(24, 80); } public void setCommandContext(String verb, DataHandler dh) throws IOException { InputStream in = dh.getInputStream(); ByteArrayOutputStream bytes = new ByteArrayOutputStream(); byte[] buf = new byte[4096]; for (int len = in.read(buf); len != -1; len = in.read(buf)) bytes.write(buf, 0, len); in.close(); setText(bytes.toString()); } }
gpl-2.0
wdd1990/J2EE
src/main/java/javaeetutorial/web/websocketbot/messages/UsersMessage.java
811
/** * Copyright (c) 2014 Oracle and/or its affiliates. All rights reserved. * * You may not modify, use, reproduce, or distribute this software except in * compliance with the terms of the License at: * http://java.net/projects/javaeetutorial/pages/BerkeleyLicense */ package javaeetutorial.web.websocketbot.messages; import java.util.List; /* Represents the list of users currently connected to the chat */ public class UsersMessage extends Message { private List<String> userlist; public UsersMessage(List<String> userlist) { this.userlist = userlist; } public List<String> getUserList() { return userlist; } /* For logging purposes */ @Override public String toString() { return "[UsersMessage] " + userlist.toString(); } }
gpl-2.0
ninneko/velocity-edit
src/com/hudson/velocityweb/util/StringUtil.java
1041
package com.hudson.velocityweb.util; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.StringWriter; /** * @author <a href="mailto: jhudson8@users.sourceforge.net">Joe Hudson</a> */ public class StringUtil { /** * Return the contents of the Stream as a String. * Note: If the InputStream represents a null String, the Java implementation will try to read from the stream for a certain amount of time * before timing out. * @param is the InputStream to transform into a String * @return the String representation of the Stream */ public static String getStringFromStream (InputStream is) throws IOException { try { InputStreamReader reader = new InputStreamReader(is); char[] buffer = new char[1024]; StringWriter writer = new StringWriter(); int bytes_read; while ((bytes_read = reader.read(buffer)) != -1) { writer.write(buffer, 0, bytes_read); } return (writer.toString()); } finally { if (null != is) is.close(); } } }
gpl-2.0
intfloat/CoreNLP
src/edu/stanford/nlp/pipeline/CoreMapAttributeAggregator.java
11080
package edu.stanford.nlp.pipeline; import edu.stanford.nlp.ling.CoreAnnotations; import edu.stanford.nlp.ling.CoreLabel; import edu.stanford.nlp.stats.IntCounter; import edu.stanford.nlp.util.ArrayMap; import edu.stanford.nlp.util.CoreMap; import edu.stanford.nlp.util.Generics; import java.util.*; /** * Functions for aggregating token attributes. * * @author Angel Chang */ public abstract class CoreMapAttributeAggregator { public static Map<Class, CoreMapAttributeAggregator> getDefaultAggregators() { return DEFAULT_AGGREGATORS; } public static CoreMapAttributeAggregator getAggregator(String str) { return AGGREGATOR_LOOKUP.get(str); } public abstract Object aggregate(Class key, List<? extends CoreMap> in); public static final CoreMapAttributeAggregator FIRST_NON_NIL = new CoreMapAttributeAggregator() { public Object aggregate(Class key, List<? extends CoreMap> in) { if (in == null) return null; for (CoreMap cm:in) { Object obj = cm.get(key); if (obj != null) { return obj; } } return null; } }; public static final CoreMapAttributeAggregator FIRST = new CoreMapAttributeAggregator() { public Object aggregate(Class key, List<? extends CoreMap> in) { if (in == null) return null; for (CoreMap cm:in) { Object obj = cm.get(key); return obj; } return null; } }; public static final CoreMapAttributeAggregator LAST_NON_NIL = new CoreMapAttributeAggregator() { public Object aggregate(Class key, List<? extends CoreMap> in) { if (in == null) return null; for (int i = in.size()-1; i >= 0; i--) { CoreMap cm = in.get(i); Object obj = cm.get(key); if (obj != null) { return obj; } } return null; } }; public static final CoreMapAttributeAggregator LAST = new CoreMapAttributeAggregator() { public Object aggregate(Class key, List<? extends CoreMap> in) { if (in == null) return null; for (int i = in.size()-1; i >= 0; i--) { CoreMap cm = in.get(i); return cm.get(key); } return null; } }; public static final class ConcatListAggregator<T> extends CoreMapAttributeAggregator { public ConcatListAggregator() { } @Override public Object aggregate(Class key, List<? extends CoreMap> in) { if (in == null) return null; List<T> res = new ArrayList<>(); for (CoreMap cm:in) { Object obj = cm.get(key); if (obj != null) { if (obj instanceof List) { res.addAll( (List<T>) obj); } } } return res; } } public static final class ConcatCoreMapListAggregator<T extends CoreMap> extends CoreMapAttributeAggregator { boolean concatSelf = false; public ConcatCoreMapListAggregator() { } public ConcatCoreMapListAggregator(boolean concatSelf) { this.concatSelf = concatSelf; } public Object aggregate(Class key, List<? extends CoreMap> in) { if (in == null) return null; List<T> res = new ArrayList<>(); for (CoreMap cm:in) { Object obj = cm.get(key); boolean added = false; if (obj != null) { if (obj instanceof List) { res.addAll( (List<T>) obj); added = true; } } if (!added && concatSelf) { res.add((T) cm); } } return res; } } public static final ConcatCoreMapListAggregator<CoreLabel> CONCAT_TOKENS = new ConcatCoreMapListAggregator<>(true); public static final ConcatCoreMapListAggregator<CoreMap> CONCAT_COREMAP = new ConcatCoreMapListAggregator<>(true); public static final class ConcatAggregator extends CoreMapAttributeAggregator { String delimiter; public ConcatAggregator(String delimiter) { this.delimiter = delimiter; } public Object aggregate(Class key, List<? extends CoreMap> in) { if (in == null) return null; StringBuilder sb = new StringBuilder(); for (CoreMap cm:in) { Object obj = cm.get(key); if (obj != null) { if (sb.length() > 0) { sb.append(delimiter); } sb.append(obj); } } return sb.toString(); } } public static final class ConcatTextAggregator extends CoreMapAttributeAggregator { String delimiter; public ConcatTextAggregator(String delimiter) { this.delimiter = delimiter; } public Object aggregate(Class key, List<? extends CoreMap> in) { if (in == null) return null; String text = ChunkAnnotationUtils.getTokenText(in, key); return text; } } public static final CoreMapAttributeAggregator CONCAT = new ConcatAggregator(" "); public static final CoreMapAttributeAggregator CONCAT_TEXT = new ConcatTextAggregator(" "); public static final CoreMapAttributeAggregator COUNT = new CoreMapAttributeAggregator() { public Object aggregate(Class key, List<? extends CoreMap> in) { return in.size(); } }; public static final CoreMapAttributeAggregator SUM = new CoreMapAttributeAggregator() { public Object aggregate(Class key, List<? extends CoreMap> in) { if (in == null) return null; double sum = 0; for (CoreMap cm:in) { Object obj = cm.get(key); if (obj != null) { if (obj instanceof Number) { sum += ((Number) obj).doubleValue(); } else if (obj instanceof String) { sum += Double.parseDouble((String) obj); } else { throw new RuntimeException("Cannot sum attribute " + key + ", object of type: " + obj.getClass()); } } } return sum; } }; public static final CoreMapAttributeAggregator MIN = new CoreMapAttributeAggregator() { public Object aggregate(Class key, List<? extends CoreMap> in) { if (in == null) return null; Comparable min = null; for (CoreMap cm:in) { Object obj = cm.get(key); if (obj != null) { if (obj instanceof Comparable) { Comparable c = (Comparable) obj; if (min == null) { min = c; } else if (c.compareTo(min) < 0) { min = c; } } else { throw new RuntimeException("Cannot get min of attribute " + key + ", object of type: " + obj.getClass()); } } } return min; } }; public static final CoreMapAttributeAggregator MAX = new CoreMapAttributeAggregator() { public Object aggregate(Class key, List<? extends CoreMap> in) { if (in == null) return null; Comparable max = null; for (CoreMap cm:in) { Object obj = cm.get(key); if (obj != null) { if (obj instanceof Comparable) { Comparable c = (Comparable) obj; if (max == null) { max = c; } else if (c.compareTo(max) > 0) { max = c; } } else { throw new RuntimeException("Cannot get max of attribute " + key + ", object of type: " + obj.getClass()); } } } return max; } }; public static final class MostFreqAggregator extends CoreMapAttributeAggregator { Set<Object> ignoreSet; public MostFreqAggregator() { } public MostFreqAggregator(Set<Object> set) { ignoreSet = set; } public Object aggregate(Class key, List<? extends CoreMap> in) { if (in == null) return null; IntCounter<Object> counter = new IntCounter<>(); for (CoreMap cm:in) { Object obj = cm.get(key); if (obj != null && (ignoreSet == null || !ignoreSet.contains(obj))) { counter.incrementCount(obj); } } if (counter.size() > 0) { return counter.argmax(); } else { return null; } } } public static final CoreMapAttributeAggregator MOST_FREQ = new MostFreqAggregator(); private static final Map<String, CoreMapAttributeAggregator> AGGREGATOR_LOOKUP = Generics.newHashMap(); static { AGGREGATOR_LOOKUP.put("FIRST", FIRST); AGGREGATOR_LOOKUP.put("FIRST_NON_NIL", FIRST_NON_NIL); AGGREGATOR_LOOKUP.put("LAST", LAST); AGGREGATOR_LOOKUP.put("LAST_NON_NIL", LAST_NON_NIL); AGGREGATOR_LOOKUP.put("MIN", MIN); AGGREGATOR_LOOKUP.put("MAX", MAX); AGGREGATOR_LOOKUP.put("COUNT", COUNT); AGGREGATOR_LOOKUP.put("SUM", SUM); AGGREGATOR_LOOKUP.put("CONCAT", CONCAT); AGGREGATOR_LOOKUP.put("CONCAT_TEXT", CONCAT_TEXT); AGGREGATOR_LOOKUP.put("CONCAT_TOKENS", CONCAT_TOKENS); AGGREGATOR_LOOKUP.put("MOST_FREQ", MOST_FREQ); } public static final Map<Class, CoreMapAttributeAggregator> DEFAULT_AGGREGATORS; public static final Map<Class, CoreMapAttributeAggregator> DEFAULT_NUMERIC_AGGREGATORS; public static final Map<Class, CoreMapAttributeAggregator> DEFAULT_NUMERIC_TOKENS_AGGREGATORS; static { Map<Class, CoreMapAttributeAggregator> defaultAggr = new ArrayMap<>(); defaultAggr.put(CoreAnnotations.TextAnnotation.class, CoreMapAttributeAggregator.CONCAT_TEXT); defaultAggr.put(CoreAnnotations.CharacterOffsetBeginAnnotation.class, CoreMapAttributeAggregator.FIRST); defaultAggr.put(CoreAnnotations.CharacterOffsetEndAnnotation.class, CoreMapAttributeAggregator.LAST); defaultAggr.put(CoreAnnotations.TokenBeginAnnotation.class, CoreMapAttributeAggregator.FIRST); defaultAggr.put(CoreAnnotations.TokenEndAnnotation.class, CoreMapAttributeAggregator.LAST); defaultAggr.put(CoreAnnotations.TokensAnnotation.class, CoreMapAttributeAggregator.CONCAT_TOKENS); defaultAggr.put(CoreAnnotations.BeforeAnnotation.class, CoreMapAttributeAggregator.FIRST); defaultAggr.put(CoreAnnotations.AfterAnnotation.class, CoreMapAttributeAggregator.LAST); DEFAULT_AGGREGATORS = Collections.unmodifiableMap(defaultAggr); Map<Class, CoreMapAttributeAggregator> defaultNumericAggr = new ArrayMap<>(DEFAULT_AGGREGATORS); defaultNumericAggr.put(CoreAnnotations.NumericCompositeTypeAnnotation.class, CoreMapAttributeAggregator.FIRST_NON_NIL); defaultNumericAggr.put(CoreAnnotations.NumericCompositeValueAnnotation.class, CoreMapAttributeAggregator.FIRST_NON_NIL); defaultNumericAggr.put(CoreAnnotations.NamedEntityTagAnnotation.class, CoreMapAttributeAggregator.FIRST_NON_NIL); defaultNumericAggr.put(CoreAnnotations.NormalizedNamedEntityTagAnnotation.class, CoreMapAttributeAggregator.FIRST_NON_NIL); DEFAULT_NUMERIC_AGGREGATORS = Collections.unmodifiableMap(defaultNumericAggr); Map<Class, CoreMapAttributeAggregator> defaultNumericTokensAggr = new ArrayMap<>(DEFAULT_NUMERIC_AGGREGATORS); defaultNumericTokensAggr.put(CoreAnnotations.NumerizedTokensAnnotation.class, CoreMapAttributeAggregator.CONCAT_COREMAP); DEFAULT_NUMERIC_TOKENS_AGGREGATORS = Collections.unmodifiableMap(defaultNumericTokensAggr); } }
gpl-2.0
rex-xxx/mt6572_x201
device/google/accessory/demokit/app/src/com/google/android/DemoKit/DemoKitLaunch.java
1150
package com.google.android.DemoKit; import android.app.Activity; import android.content.ActivityNotFoundException; import android.content.Intent; import android.os.Bundle; import android.util.Log; import android.view.Display; public class DemoKitLaunch extends Activity { static final String TAG = "DemoKitLaunch"; static Intent createIntent(Activity activity) { Display display = activity.getWindowManager().getDefaultDisplay(); int maxExtent = Math.max(display.getWidth(), display.getHeight()); Intent intent; if (maxExtent > 1200) { Log.i(TAG, "starting tablet ui"); intent = new Intent(activity, DemoKitTablet.class); } else { Log.i(TAG, "starting phone ui"); intent = new Intent(activity, DemoKitPhone.class); } return intent; } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Intent intent = createIntent(this); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TOP); try { startActivity(intent); } catch (ActivityNotFoundException e) { Log.e(TAG, "unable to start DemoKit activity", e); } finish(); } }
gpl-2.0
s20121035/rk3288_android5.1_repo
external/javassist/src/main/javassist/bytecode/ExceptionTable.java
8995
/* * Javassist, a Java-bytecode translator toolkit. * Copyright (C) 1999-2007 Shigeru Chiba. All Rights Reserved. * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. Alternatively, the contents of this file may be used under * the terms of the GNU Lesser General Public License Version 2.1 or later. * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. */ package javassist.bytecode; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.Map; class ExceptionTableEntry { int startPc; int endPc; int handlerPc; int catchType; ExceptionTableEntry(int start, int end, int handle, int type) { startPc = start; endPc = end; handlerPc = handle; catchType = type; } } /** * <code>exception_table[]</code> of <code>Code_attribute</code>. */ public class ExceptionTable implements Cloneable { private ConstPool constPool; private ArrayList entries; /** * Constructs an <code>exception_table[]</code>. * * @param cp constant pool table. */ public ExceptionTable(ConstPool cp) { constPool = cp; entries = new ArrayList(); } ExceptionTable(ConstPool cp, DataInputStream in) throws IOException { constPool = cp; int length = in.readUnsignedShort(); ArrayList list = new ArrayList(length); for (int i = 0; i < length; ++i) { int start = in.readUnsignedShort(); int end = in.readUnsignedShort(); int handle = in.readUnsignedShort(); int type = in.readUnsignedShort(); list.add(new ExceptionTableEntry(start, end, handle, type)); } entries = list; } /** * Creates and returns a copy of this object. * The constant pool object is shared between this object * and the cloned object. */ public Object clone() throws CloneNotSupportedException { ExceptionTable r = (ExceptionTable)super.clone(); r.entries = new ArrayList(entries); return r; } /** * Returns <code>exception_table_length</code>, which is the number * of entries in the <code>exception_table[]</code>. */ public int size() { return entries.size(); } /** * Returns <code>startPc</code> of the <i>n</i>-th entry. * * @param nth the <i>n</i>-th (&gt;= 0). */ public int startPc(int nth) { ExceptionTableEntry e = (ExceptionTableEntry)entries.get(nth); return e.startPc; } /** * Sets <code>startPc</code> of the <i>n</i>-th entry. * * @param nth the <i>n</i>-th (&gt;= 0). * @param value new value. */ public void setStartPc(int nth, int value) { ExceptionTableEntry e = (ExceptionTableEntry)entries.get(nth); e.startPc = value; } /** * Returns <code>endPc</code> of the <i>n</i>-th entry. * * @param nth the <i>n</i>-th (&gt;= 0). */ public int endPc(int nth) { ExceptionTableEntry e = (ExceptionTableEntry)entries.get(nth); return e.endPc; } /** * Sets <code>endPc</code> of the <i>n</i>-th entry. * * @param nth the <i>n</i>-th (&gt;= 0). * @param value new value. */ public void setEndPc(int nth, int value) { ExceptionTableEntry e = (ExceptionTableEntry)entries.get(nth); e.endPc = value; } /** * Returns <code>handlerPc</code> of the <i>n</i>-th entry. * * @param nth the <i>n</i>-th (&gt;= 0). */ public int handlerPc(int nth) { ExceptionTableEntry e = (ExceptionTableEntry)entries.get(nth); return e.handlerPc; } /** * Sets <code>handlerPc</code> of the <i>n</i>-th entry. * * @param nth the <i>n</i>-th (&gt;= 0). * @param value new value. */ public void setHandlerPc(int nth, int value) { ExceptionTableEntry e = (ExceptionTableEntry)entries.get(nth); e.handlerPc = value; } /** * Returns <code>catchType</code> of the <i>n</i>-th entry. * * @param nth the <i>n</i>-th (&gt;= 0). * @return an index into the <code>constant_pool</code> table, * or zero if this exception handler is for all exceptions. */ public int catchType(int nth) { ExceptionTableEntry e = (ExceptionTableEntry)entries.get(nth); return e.catchType; } /** * Sets <code>catchType</code> of the <i>n</i>-th entry. * * @param nth the <i>n</i>-th (&gt;= 0). * @param value new value. */ public void setCatchType(int nth, int value) { ExceptionTableEntry e = (ExceptionTableEntry)entries.get(nth); e.catchType = value; } /** * Copies the given exception table at the specified position * in the table. * * @param index index (&gt;= 0) at which the entry is to be inserted. * @param offset the offset added to the code position. */ public void add(int index, ExceptionTable table, int offset) { int len = table.size(); while (--len >= 0) { ExceptionTableEntry e = (ExceptionTableEntry)table.entries.get(len); add(index, e.startPc + offset, e.endPc + offset, e.handlerPc + offset, e.catchType); } } /** * Adds a new entry at the specified position in the table. * * @param index index (&gt;= 0) at which the entry is to be inserted. * @param start <code>startPc</code> * @param end <code>endPc</code> * @param handler <code>handlerPc</code> * @param type <code>catchType</code> */ public void add(int index, int start, int end, int handler, int type) { if (start < end) entries.add(index, new ExceptionTableEntry(start, end, handler, type)); } /** * Appends a new entry at the end of the table. * * @param start <code>startPc</code> * @param end <code>endPc</code> * @param handler <code>handlerPc</code> * @param type <code>catchType</code> */ public void add(int start, int end, int handler, int type) { if (start < end) entries.add(new ExceptionTableEntry(start, end, handler, type)); } /** * Removes the entry at the specified position in the table. * * @param index the index of the removed entry. */ public void remove(int index) { entries.remove(index); } /** * Makes a copy of this <code>exception_table[]</code>. * Class names are replaced according to the * given <code>Map</code> object. * * @param newCp the constant pool table used by the new copy. * @param classnames pairs of replaced and substituted * class names. */ public ExceptionTable copy(ConstPool newCp, Map classnames) { ExceptionTable et = new ExceptionTable(newCp); ConstPool srcCp = constPool; int len = size(); for (int i = 0; i < len; ++i) { ExceptionTableEntry e = (ExceptionTableEntry)entries.get(i); int type = srcCp.copy(e.catchType, newCp, classnames); et.add(e.startPc, e.endPc, e.handlerPc, type); } return et; } void shiftPc(int where, int gapLength, boolean exclusive) { int len = size(); for (int i = 0; i < len; ++i) { ExceptionTableEntry e = (ExceptionTableEntry)entries.get(i); e.startPc = shiftPc(e.startPc, where, gapLength, exclusive); e.endPc = shiftPc(e.endPc, where, gapLength, exclusive); e.handlerPc = shiftPc(e.handlerPc, where, gapLength, exclusive); } } private static int shiftPc(int pc, int where, int gapLength, boolean exclusive) { if (pc > where || (exclusive && pc == where)) pc += gapLength; return pc; } void write(DataOutputStream out) throws IOException { int len = size(); out.writeShort(len); // exception_table_length for (int i = 0; i < len; ++i) { ExceptionTableEntry e = (ExceptionTableEntry)entries.get(i); out.writeShort(e.startPc); out.writeShort(e.endPc); out.writeShort(e.handlerPc); out.writeShort(e.catchType); } } }
gpl-3.0
saifrahmed/Whereabouts
src/test/org/jnativehook/mouse/NativeMouseMotionListenerTest.java
2247
/* JNativeHook: Global keyboard and mouse hooking for Java. * Copyright (C) 2006-2015 Alexander Barker. All Rights Received. * https://github.com/kwhat/jnativehook/ * * JNativeHook is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * JNativeHook is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.jnativehook.mouse; // Imports. import org.jnativehook.mouse.listeners.NativeMouseInputListenerImpl; import org.junit.Test; import static org.junit.Assert.assertEquals; /** * * @author abarker */ public class NativeMouseMotionListenerTest { /** * Test of nativeMouseMoved method, of class NativeMouseMotionListener. */ @Test public void testNativeMouseMoved() { System.out.println("nativeMouseMoved"); NativeMouseEvent event = new NativeMouseEvent( NativeMouseEvent.NATIVE_MOUSE_MOVED, System.currentTimeMillis(), 0x00, // Modifiers 50, // X 75, // Y 0, // Click Count NativeMouseEvent.NOBUTTON); NativeMouseInputListenerImpl listener = new NativeMouseInputListenerImpl(); listener.nativeMouseMoved(event); assertEquals(event, listener.getLastEvent()); } /** * Test of nativeMouseDragged method, of class NativeMouseMotionListener. */ @Test public void testNativeMouseDragged() { System.out.println("nativeMouseDragged"); NativeMouseEvent event = new NativeMouseEvent( NativeMouseEvent.NATIVE_MOUSE_DRAGGED, System.currentTimeMillis(), NativeMouseEvent.BUTTON1_MASK, 50, // X 75, // Y 0, // Click Count NativeMouseEvent.NOBUTTON); NativeMouseInputListenerImpl listener = new NativeMouseInputListenerImpl(); listener.nativeMouseDragged(event); assertEquals(event, listener.getLastEvent()); } }
gpl-3.0
open-health-hub/openMAXIMS
openmaxims_workspace/ValueObjects/src/ims/vo/interfaces/IHotlistItem.java
2137
//############################################################################# //# # //# Copyright (C) <2014> <IMS MAXIMS> # //# # //# This program is free software: you can redistribute it and/or modify # //# it under the terms of the GNU Affero General Public License as # //# published by the Free Software Foundation, either version 3 of the # //# License, or (at your option) any later version. # //# # //# This program is distributed in the hope that it will be useful, # //# but WITHOUT ANY WARRANTY; without even the implied warranty of # //# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # //# GNU Affero General Public License for more details. # //# # //# You should have received a copy of the GNU Affero General Public License # //# along with this program. If not, see <http://www.gnu.org/licenses/>. # //# # //############################################################################# //#EOH // This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751) // Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved. // WARNING: DO NOT MODIFY the content of this file package ims.vo.interfaces; /* * Interface Functions for the Hotlist Item Vos */ public interface IHotlistItem extends Comparable { /* * gets the IGenericItem contained in the Hotlist Item vo */ ims.vo.interfaces.IGenericItem getIGenericItem(); /* * returns the ID of the hotlist item */ Integer getIHotlistItemID(); /* * sets the IGenericItem to be the given parameter */ void setIGenericItem(ims.vo.interfaces.IGenericItem iGenericItem); }
agpl-3.0
open-health-hub/openMAXIMS
openmaxims_workspace/OCRR/src/ims/ocrr/domain/impl/MyOrderImpl.java
66882
//############################################################################# //# # //# Copyright (C) <2014> <IMS MAXIMS> # //# # //# This program is free software: you can redistribute it and/or modify # //# it under the terms of the GNU Affero General Public License as # //# published by the Free Software Foundation, either version 3 of the # //# License, or (at your option) any later version. # //# # //# This program is distributed in the hope that it will be useful, # //# but WITHOUT ANY WARRANTY; without even the implied warranty of # //# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # //# GNU Affero General Public License for more details. # //# # //# You should have received a copy of the GNU Affero General Public License # //# along with this program. If not, see <http://www.gnu.org/licenses/>. # //# # //############################################################################# //#EOH // This code was generated by Marius Mihalec using IMS Development Environment (version 1.35 build 2102.13596) // Copyright (C) 1995-2005 IMS MAXIMS plc. All rights reserved. package ims.ocrr.domain.impl; import ims.admin.domain.HcpAdmin; import ims.admin.domain.MosAdmin; import ims.admin.domain.OrganisationAndLocation; import ims.admin.domain.impl.HcpAdminImpl; import ims.admin.domain.impl.MosAdminImpl; import ims.admin.domain.impl.OrganisationAndLocationImpl; import ims.RefMan.domain.objects.CatsReferral; import ims.RefMan.domain.objects.OrderInvAppt; import ims.RefMan.vo.CatsReferralListVo; import ims.RefMan.vo.CatsReferralRefVo; import ims.RefMan.vo.OrderInvWithStatusApptVo; import ims.RefMan.vo.domain.CatsReferralListVoAssembler; import ims.RefMan.vo.domain.OrderInvWithStatusApptVoAssembler; import ims.chooseandbook.vo.lookups.ActionRequestType; import ims.clinical.domain.ClinicalNotesList; import ims.clinical.domain.impl.ClinicalNotesListImpl; import ims.clinical.domain.objects.SummaryClinicalInformation; import ims.clinical.vo.SummaryClinicalInformationVo; import ims.clinical.vo.domain.SummaryClinicalInformationVoAssembler; import ims.configuration.gen.ConfigFlag; import ims.core.admin.domain.objects.CareContext; import ims.core.admin.pas.domain.objects.InpatientEpisode; import ims.core.admin.pas.domain.objects.OutpatientAttendance; import ims.core.admin.pas.domain.objects.PASEvent; import ims.core.admin.pas.vo.PASEventRefVo; import ims.core.admin.vo.CareContextRefVo; import ims.core.clinical.vo.ServiceRefVo; import ims.core.patient.vo.PatientRefVo; import ims.core.resource.people.domain.objects.Gp; import ims.core.resource.people.domain.objects.Hcp; import ims.core.resource.people.vo.MemberOfStaffRefVo; import ims.core.resource.place.domain.objects.Clinic; import ims.core.resource.place.domain.objects.LocSite; import ims.core.resource.place.domain.objects.Location; import ims.core.resource.place.vo.ClinicRefVo; import ims.core.resource.place.vo.LocationRefVo; import ims.core.vo.CareContextShortVo; import ims.core.vo.ClinicLiteVo; import ims.core.vo.ClinicLiteVoCollection; import ims.core.vo.ClinicalNotesFilterVo; import ims.core.vo.ClinicalNotesLiteVoCollection; import ims.core.vo.GeneralQuestionAnswerVoCollection; import ims.core.vo.GpLiteVo; import ims.core.vo.GpShortVoCollection; import ims.core.vo.HcpCollection; import ims.core.vo.HcpFilter; import ims.core.vo.HcpLiteVo; import ims.core.vo.HcpLiteVoCollection; import ims.core.vo.LocShortMappingsVoCollection; import ims.core.vo.LocShortVoCollection; import ims.core.vo.LocSiteLiteVo; import ims.core.vo.LocSiteShortVo; import ims.core.vo.LocationLiteVo; import ims.core.vo.LocationLiteVoCollection; import ims.core.vo.MemberOfStaffLiteVoCollection; import ims.core.vo.MemberOfStaffShortVo; import ims.core.vo.MemberOfStaffShortVoCollection; import ims.core.vo.PasEventShortVo; import ims.core.vo.PersonName; import ims.core.vo.domain.CareContextShortVoAssembler; import ims.core.vo.domain.ClinicLiteVoAssembler; import ims.core.vo.domain.GeneralQuestionAnswerVoAssembler; import ims.core.vo.domain.GpLiteVoAssembler; import ims.core.vo.domain.GpShortVoAssembler; import ims.core.vo.domain.HcpLiteVoAssembler; import ims.core.vo.domain.LocSiteShortVoAssembler; import ims.core.vo.domain.LocationLiteVoAssembler; import ims.core.vo.domain.PasEventShortVoAssembler; import ims.core.vo.lookups.ClinicalNoteType; import ims.core.vo.lookups.GPStatus; import ims.core.vo.lookups.LocationType; import ims.core.vo.lookups.PollStatus; import ims.domain.DomainFactory; import ims.domain.DomainObject; import ims.domain.exceptions.DomainRuntimeException; import ims.domain.exceptions.ForeignKeyViolationException; import ims.domain.exceptions.StaleObjectException; import ims.domain.impl.DomainImpl; import ims.domain.lookups.LookupInstance; import ims.framework.exceptions.CodingRuntimeException; import ims.framework.interfaces.ILocation; import ims.framework.utils.Date; import ims.framework.utils.DateTime; import ims.framework.utils.Time; import ims.ocrr.configuration.domain.objects.Container; import ims.ocrr.configuration.domain.objects.DFTCollectionTypesConfig; import ims.ocrr.configuration.domain.objects.Investigation; import ims.ocrr.configuration.domain.objects.InvestigationIndex; import ims.ocrr.configuration.domain.objects.Specimen; import ims.ocrr.configuration.vo.InvestigationIndexRefVo; import ims.ocrr.configuration.vo.InvestigationRefVo; import ims.ocrr.domain.CategoryQuestions; import ims.ocrr.domain.DFTCollectionTypeConfigurations; import ims.ocrr.domain.ServiceQuestions; import ims.ocrr.domain.SpecimenCollectionConfig; import ims.ocrr.helper.IOCRRSchedulingHelper; import ims.ocrr.orderingresults.domain.objects.OcsOrderSession; import ims.ocrr.orderingresults.domain.objects.OrderInvestigation; import ims.ocrr.orderingresults.domain.objects.OrderSpecimen; import ims.ocrr.orderingresults.domain.objects.OrderedInvestigationStatus; import ims.ocrr.orderingresults.domain.objects.SpecimenWorkListItem; import ims.ocrr.orderingresults.vo.OcsOrderSessionRefVo; import ims.ocrr.orderingresults.vo.OrderInvestigationRefVo; import ims.ocrr.orderingresults.vo.OrderInvestigationRefVoCollection; import ims.ocrr.vo.CategoryQuestionShortVoCollection; import ims.ocrr.vo.DFTCollectionTypesConfigVo; import ims.ocrr.vo.InvestigationHelpTextVo; import ims.ocrr.vo.InvestigationQuestionAnswerVoCollection; import ims.ocrr.vo.InvestigationUnderSpecimenVo; import ims.ocrr.vo.MyOrderInpatEpisVo; import ims.ocrr.vo.MyOrderOutpatAttendVo; import ims.ocrr.vo.MyOrderOutpatAttendVoCollection; import ims.ocrr.vo.OcsOrderVo; import ims.ocrr.vo.OrderInvestigationBookingWithStatusVo; import ims.ocrr.vo.OrderInvestigationForStatusChangeVo; import ims.ocrr.vo.OrderInvestigationLiteVo; import ims.ocrr.vo.OrderInvestigationVo; import ims.ocrr.vo.OrderSpecimenVo; import ims.ocrr.vo.OrderSpecimenVoCollection; import ims.ocrr.vo.PathInvDetailsVo; import ims.ocrr.vo.PathSpecimenContainerVo; import ims.ocrr.vo.PathSpecimenContainerVoCollection; import ims.ocrr.vo.PhlebotomyRoundShortVoCollection; import ims.ocrr.vo.ServiceQuestionShortVoCollection; import ims.ocrr.vo.SpecimenCollectionListConfigDetailsVoCollection; import ims.ocrr.vo.SpecimenWorkListItemVo; import ims.ocrr.vo.SpecimenWorkListItemVoCollection; import ims.ocrr.vo.domain.DFTCollectionTypesConfigVoAssembler; import ims.ocrr.vo.domain.InvestigationHelpTextVoAssembler; import ims.ocrr.vo.domain.InvestigationQuestionAnswerVoAssembler; import ims.ocrr.vo.domain.InvestigationQuestionsSelectOrderVoAssembler; import ims.ocrr.vo.domain.InvestigationUnderSpecimenVoAssembler; import ims.ocrr.vo.domain.MyOrderInpatEpisVoAssembler; import ims.ocrr.vo.domain.MyOrderOutpatAttendVoAssembler; import ims.ocrr.vo.domain.OcsOrderVoAssembler; import ims.ocrr.vo.domain.OrderInvestigationBookingWithStatusVoAssembler; import ims.ocrr.vo.domain.OrderInvestigationForStatusChangeVoAssembler; import ims.ocrr.vo.domain.OrderInvestigationLiteVoAssembler; import ims.ocrr.vo.domain.PathInvDetailsVoAssembler; import ims.ocrr.vo.domain.PathSpecimenContainerVoAssembler; import ims.ocrr.vo.domain.PhlebotomyRoundShortVoAssembler; import ims.ocrr.vo.domain.SpecimenWorkListItemVoAssembler; import ims.ocrr.vo.enums.ORDERSTATE; import ims.ocrr.vo.lookups.AuthorisationOrderStatus; import ims.ocrr.vo.lookups.Category; import ims.ocrr.vo.lookups.InvEventType; import ims.ocrr.vo.lookups.LookupHelper; import ims.ocrr.vo.lookups.OcsDisplayFlag; import ims.ocrr.vo.lookups.OrderCategory; import ims.ocrr.vo.lookups.OrderInvStatus; import ims.ocrr.vo.lookups.OrderPriority; import ims.ocrr.vo.lookups.OrderPriorityCollection; import ims.ocrr.vo.lookups.SpecimenCollectionMethod; import ims.ocrr.vo.lookups.SpecimenCollectionTime; import ims.ocs_if.helper.GHGWinPathNumbers; import ims.scheduling.domain.OCSExternalEvents; import ims.scheduling.domain.SessionAdmin; import ims.scheduling.domain.impl.OCSExternalEventsImpl; import ims.scheduling.domain.impl.SessionAdminImpl; import ims.scheduling.domain.objects.Booking_Appointment; import ims.scheduling.vo.Booking_AppointmentRefVo; import ims.scheduling.vo.Booking_AppointmentVo; import ims.scheduling.vo.lookups.Status_Reason; import ims.vo.ValueObject; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import org.apache.log4j.Logger; import com.sun.org.apache.bcel.internal.generic.NEW; public class MyOrderImpl extends DomainImpl implements ims.ocrr.domain.MyOrder, ims.domain.impl.Transactional { private static final long serialVersionUID = 1L; private static final Logger LOG = Logger.getLogger(MyOrderImpl.class); /** * Returns a list of questions */ public ims.ocrr.vo.InvestigationQuestionsSelectOrderVo getQuestions(ims.ocrr.vo.MyOrderComponentVo component) { return InvestigationQuestionsSelectOrderVoAssembler.create((Investigation) getDomainFactory().getDomainObject(Investigation.class, component.getID().intValue())); } public HcpCollection listHcp(String text) { if (text == null || text.length() == 0) throw new DomainRuntimeException("Filter not supplied"); HcpFilter filter = new HcpFilter(); filter.setQueryName(new PersonName()); filter.getQueryName().setSurname(text); HcpAdmin impl = (HcpAdmin) getDomainImpl(HcpAdminImpl.class); return impl.listHCPs(filter); } public MemberOfStaffShortVoCollection listMos(String text) { if (text == null || text.length() == 0) throw new DomainRuntimeException("Filter not supplied"); MemberOfStaffShortVo filter = new MemberOfStaffShortVo(); filter.setName(new PersonName()); filter.getName().setSurname(text); filter.setIsActive(true); MosAdmin impl = (MosAdmin) getDomainImpl(MosAdminImpl.class); return impl.listMembersOfStaffWithPrimaryLocations(filter); } public OcsOrderVo saveOcsOrder(OcsOrderVo ocsOrder, SpecimenWorkListItemVoCollection voCollWorkListItem, ORDERSTATE state, CatsReferralRefVo referral, Booking_AppointmentRefVo appt) throws StaleObjectException { if (ocsOrder == null) throw new DomainRuntimeException("Cannot save null value for OcsOrderVo"); if (!ocsOrder.isValidated()) throw new DomainRuntimeException("OcsOrderVo has not been validated"); DomainFactory factory = getDomainFactory(); HashMap objMap = new HashMap(); OcsOrderSession doCurrentOcsOrder = null; if(ocsOrder.getID_OcsOrderSessionIsNotNull()) { doCurrentOcsOrder = (OcsOrderSession) factory.getDomainObject(OcsOrderSession.class, ocsOrder.getID_OcsOrderSession().intValue()); //updating the order if(state == ORDERSTATE.AUTHORISING && doCurrentOcsOrder.getId() != null) { Iterator it2 = doCurrentOcsOrder.getSpecimens().iterator(); String idsValues = ""; while (it2.hasNext()) { OrderSpecimen doSpecimen = (OrderSpecimen) it2.next(); if(idsValues.length() > 0) idsValues += ","; idsValues += doSpecimen.getId(); } if(idsValues != null && !idsValues.equals("")) { try { factory.delete("from SpecimenWorkListItem item where item.specimen.id in ( " + idsValues + " ) "); } catch (ForeignKeyViolationException e) { throw new CodingRuntimeException ("delete of SpecimenWorkListItem failed - Foreign Key Violation in updateOcsOrder"); } } } } // Remove OrderSpecimen for DTF investigations // This is hack to get it to work. The entire MyOrder form needs to be clearOrderSpecimensFromOrderInvestigationsDFT(ocsOrder, state); // For the Specimen Worklist items processSpecimenWorklistItems(ocsOrder, voCollWorkListItem); OcsOrderSession doOcsOrder = OcsOrderVoAssembler.extractOcsOrderSession(factory, ocsOrder, objMap); setDefaults(doOcsOrder, state); //update the order factory.save(doOcsOrder); //WDEV-7493 Set<?> s =doOcsOrder.getSpecimens(); for (Object object : s) { OrderSpecimen os = (OrderSpecimen)object; if(ConfigFlag.DOM.USE_GHG_SPECIMEN_NUMBERING.getValue()) { String placerNumber = new GHGWinPathNumbers().getOCSNumber(os.getId()); os.setPlacerOrdNum(placerNumber); } else { DecimalFormat myFormatter = new DecimalFormat("000000000"); // Required for placer order number os.setPlacerOrdNum(myFormatter.format(os.getId())); } } Set<?> investigations = doOcsOrder.getInvestigations(); for (Object object : investigations) { OrderInvestigation investigation = (OrderInvestigation) object; if (Category.PATHOLOGY.equals(LookupHelper.getCategoryInstance(getLookupService(), investigation.getInvestigation().getInvestigationIndex().getCategory().getId()))) { if (InvEventType.SINGLE_EVENT.equals(LookupHelper.getInvEventTypeInstance(getLookupService(), investigation.getInvestigation().getEventType().getId())) && investigation.getSpecimen() != null) { List<?> specimens = investigation.getSpecimen(); if (specimens.size() > 0) { investigation.setPlacerOrdNum(((OrderSpecimen)specimens.get(0)).getPlacerOrdNum()); } } else { DecimalFormat timeSeriesFormatter = new DecimalFormat("G00000000"); investigation.setPlacerOrdNum(timeSeriesFormatter.format(investigation.getId())); } } else { DecimalFormat nonPathologyFormatter = new DecimalFormat("000000000"); investigation.setPlacerOrdNum(nonPathologyFormatter.format(investigation.getId())); } } factory.save(doOcsOrder); //end WDEV-7493 see change below as well if (voCollWorkListItem != null && voCollWorkListItem.size() > 0) voCollWorkListItem = saveWorkListItems(factory, voCollWorkListItem, objMap, state); CatsReferral doReferral = null; if(referral != null) doReferral = (CatsReferral) factory.getDomainObject(referral); //WDEV-6787 //get each OrderInvestigation record and create and save a new OrderInvAppt record for the appt //add the newly created OrdInvAppt records to the CatReferral and save and (send message - at end of method) Map extEvents = null; if(appt != null) { extEvents = new HashMap(); Booking_Appointment doAppt = (Booking_Appointment)factory.getDomainObject(appt); Iterator it = doOcsOrder.getInvestigations().iterator(); while(it.hasNext()) { OrderInvestigation doOrderInv = (OrderInvestigation) it.next(); if (!(doOrderInv.getOrdInvCurrentStatus() != null && getDomLookup(OrderInvStatus.CANCEL_REQUEST).equals(doOrderInv.getOrdInvCurrentStatus().getOrdInvStatus()))) { //dont link invs to appts that can't be scheduled if(!canInvBeScheduled(doOrderInv)) continue; OrderInvAppt doOrdInvAppt = new OrderInvAppt(); //WDEV-6967 OrderedInvestigationStatus status = new OrderedInvestigationStatus(); status.setOrdInvStatus(getDomLookup(OrderInvStatus.ORDERED)); status.setChangeDateTime(new java.util.Date()); status.setProcessedDateTime(new java.util.Date()); status.setChangeUser(getHcpLiteUser() != null ? ((HcpLiteVo) getHcpLiteUser()).getIMosName() : null); doOrderInv.setOrdInvCurrentStatus(status); doOrderInv.getOrdInvStatusHistory().add(status); doOrdInvAppt.setOrderInvestigation(doOrderInv); doOrdInvAppt.setAppointment(doAppt); if(doAppt != null) doOrderInv.setAppointmentDate(doAppt.getAppointmentDate()); doOrdInvAppt.setStatus(getDomLookup(PollStatus.CANCELNOTPROCESSED)); if(doReferral != null) doReferral.getOrderInvAppts().add(doOrdInvAppt); if (doOrdInvAppt.getAppointment() != null && doOrdInvAppt.getOrderInvestigation() != null) { extEvents.put(new OrderInvestigationRefVo(doOrdInvAppt.getOrderInvestigation().getId(), doOrdInvAppt.getOrderInvestigation().getVersion()), new Booking_AppointmentRefVo(doOrdInvAppt.getAppointment().getId(), doOrdInvAppt.getAppointment().getVersion())); } } } } //update the CatsReferal WDEV-5560 if(doReferral != null) { doReferral.getInvestigationOrders().add(doOcsOrder); //WDEV-5727 - setHasInvestigations doReferral.setHasInvestigations(true); factory.save(doReferral); } //WDEV-6787 - send message after save as message end was causing data problems - generateNewOrderEvent method did some get calls if(extEvents != null) { OCSExternalEvents impl = (OCSExternalEvents) getDomainImpl(OCSExternalEventsImpl.class); Iterator it = extEvents.entrySet().iterator(); while(it.hasNext()) { Map.Entry entry = (Entry) it.next(); impl.generateNewOrderEvent((Booking_AppointmentRefVo )entry.getValue(), (OrderInvestigationRefVo)entry.getKey()); } } return OcsOrderVoAssembler.create(doOcsOrder); } /** * Remove OrderSpecimens from OrderInvestigation created in error for DFT * @param state */ private void clearOrderSpecimensFromOrderInvestigationsDFT(OcsOrderVo ocsOrder, ORDERSTATE state) { OrderSpecimenVoCollection specimens = new OrderSpecimenVoCollection(); OrderInvestigationRefVoCollection investigationsDFT = new OrderInvestigationRefVoCollection(); for (OrderInvestigationVo investigation : ocsOrder.getInvestigations()) { if (InvEventType.TIME_SERIES.equals(investigation.getInvestigation().getEventType())) { investigation.setSpecimen(null); investigationsDFT.add(investigation); // Do not change the status of Cancelled or Cancel Requests orders if (investigation.getOrdInvCurrentStatus() == null || (!OrderInvStatus.CANCEL_REQUEST.equals(investigation.getOrdInvCurrentStatus().getOrdInvStatus()) && !OrderInvStatus.CANCELLED.equals(investigation.getOrdInvCurrentStatus().getOrdInvStatus()))) { // Set status as requested if (ORDERSTATE.AUTHORISING.equals(state)) { investigation.getOrdInvCurrentStatus().setOrdInvStatus(OrderInvStatus.ORDERED); investigation.setDisplayFlag(OcsDisplayFlag.REQUESTED); } else { investigation.getOrdInvCurrentStatus().setOrdInvStatus(OrderInvStatus.AWAITING_AUTHORISATION); investigation.setDisplayFlag(OcsDisplayFlag.REQUESTED); } } } } for (OrderSpecimenVo specimen : ocsOrder.getSpecimens()) { OrderInvestigationRefVoCollection investigationNonDFT = getNonDFTInvestigation(specimen, investigationsDFT); if (investigationNonDFT != null && investigationNonDFT.size() > 0) { specimen.setInvestigations(investigationNonDFT); specimens.add(specimen); } } ocsOrder.setSpecimens(specimens); } /** * For SpecimenWorklist items created for DFT investigation remove the OrderSpecimen entry */ private void processSpecimenWorklistItems(OcsOrderVo order, SpecimenWorkListItemVoCollection specimenWorklistItems) { if (specimenWorklistItems != null) { for (SpecimenWorkListItemVo specimen : specimenWorklistItems) { if (specimen.getDFTOrderInvestigation() != null) specimen.setSpecimen(null); } } } private OrderInvestigationRefVoCollection getNonDFTInvestigation(OrderSpecimenVo specimen, OrderInvestigationRefVoCollection investigationsDFT) { OrderInvestigationRefVoCollection nonDFTInvestigations = new OrderInvestigationRefVoCollection(); if (specimen.getInvestigations() != null) { for (OrderInvestigationRefVo investigation : specimen.getInvestigations()) { if (investigationsDFT == null || !investigationsDFT.contains(investigation)) nonDFTInvestigations.add(investigation); } } return nonDFTInvestigations; } private boolean canInvBeScheduled(OrderInvestigation doOrderInv) { if(doOrderInv.getInvestigation() != null && doOrderInv.getInvestigation().getProviderService() != null && doOrderInv.getInvestigation().getProviderService().getLocationService() != null && doOrderInv.getInvestigation().getProviderService().getLocationService().getService() != null && doOrderInv.getInvestigation().getProviderService().getLocationService().getService().isCanBeScheduled() != null) return doOrderInv.getInvestigation().getProviderService().getLocationService().getService().isCanBeScheduled(); return false; } private void setDefaults(OcsOrderSession doOcsOrder, ORDERSTATE state) { if (doOcsOrder == null || state == null) throw new CodingRuntimeException("Coding Error - Order or state is null"); if(state.equals(ORDERSTATE.CANCELLED)) doOcsOrder.setAuthorisationOrderStatus( getDomLookup(AuthorisationOrderStatus.CANCELLED)); else if(state.equals(ORDERSTATE.REQUESTING_AUTHORISATION)) doOcsOrder.setAuthorisationOrderStatus( getDomLookup(AuthorisationOrderStatus.AWAITING_AUTHORISATION)); else if(state.equals(ORDERSTATE.AUTHORISING)) doOcsOrder.setAuthorisationOrderStatus( getDomLookup(AuthorisationOrderStatus.AUTHORISED)); //defaults Iterator it = doOcsOrder.getInvestigations().iterator(); while (it.hasNext()) { OrderInvestigation doOrderInv = (OrderInvestigation) it.next(); if (OrderInvStatus.CANCEL_REQUEST.getID() != doOrderInv.getOrdInvCurrentStatus().getOrdInvStatus().getId()) //WDEV-16998 { doOrderInv.getOrdInvStatusHistory().add(doOrderInv.getOrdInvCurrentStatus()); doOrderInv.setDisplayTimeSupplied(Boolean.FALSE); // WDEV-3602 doOrderInv.setResponsibleClinician(doOcsOrder.getResponsibleClinician()); doOrderInv.setResponsibleGp(doOcsOrder.getResponsibleGp()); doOrderInv.setPatientClinic(doOcsOrder.getPatientClinic()); doOrderInv.setPatientLocation(doOcsOrder.getPatientLocation()); // WDEV-18165 set ParentLocation dependent on current location if (doOrderInv.getPatientLocation() != null) doOrderInv.setParentLocation(doOrderInv.getPatientLocation().getParentLocation()); else if (doOrderInv.getPatientClinic() != null) doOrderInv.setParentLocation(doOrderInv.getPatientClinic().getClinicLocation()); // If the OrderInvestigation locations are not set, then its outpatient department // so get the parent location from there. if (doOrderInv.getParentLocation() == null && doOcsOrder.getOutpatientDept() != null) doOrderInv.setParentLocation(doOcsOrder.getOutpatientDept().getParentLocation()); } } // doOcsOrder.getSpecimens() //WDEV-7493 for (Object iterable_element : doOcsOrder.getSpecimens()) { OrderSpecimen spec = (OrderSpecimen) iterable_element; String token = "Do not use:" + (Double.valueOf(Math.random()*10000).toString().substring(0,4)); spec.setPlacerOrdNum(token); } //End WDEV-7493 } public InvestigationUnderSpecimenVo getInvestigationUnderSpecimen(Integer idInvestigation) { if (idInvestigation == null) throw new DomainRuntimeException("No id provided for getInvestigationUnderSpecimen"); DomainFactory factory = getDomainFactory(); return InvestigationUnderSpecimenVoAssembler.create((Investigation) factory.getDomainObject(Investigation.class, idInvestigation)); } public SpecimenCollectionListConfigDetailsVoCollection listSpecimenCollectionDetails(SpecimenCollectionMethod type, Date date) { SpecimenCollectionConfig impl = (SpecimenCollectionConfig) getDomainImpl(SpecimenCollectionConfigImpl.class); return impl.listSpecimenCollectionDetails(type, date); } public ClinicalNotesLiteVoCollection listClinicalNotesByCareContextAndNoteType(CareContextRefVo careContext, ClinicalNoteType noteType) { ClinicalNotesList impl = (ClinicalNotesList) getDomainImpl(ClinicalNotesListImpl.class); ClinicalNotesFilterVo voFilter = new ClinicalNotesFilterVo(); voFilter.setNoteType(noteType); voFilter.setCareContext(careContext); return impl.listClinicalNoteLite(voFilter); } public LocationLiteVoCollection listActiveLocationsByName(String name) { OrganisationAndLocation impl = (OrganisationAndLocation) getDomainImpl(OrganisationAndLocationImpl.class); return impl.listActiveLocationsByName(name); } public PathSpecimenContainerVoCollection getAdultOrPaediatricContainerforProfileInvestigation(InvestigationIndexRefVo invIndex, Boolean isUsePaediatricContainer) { if (invIndex == null) throw new CodingRuntimeException("No id supplied for get - method getSpecimenDetailsforProfileInvestigation()"); DomainFactory factory = getDomainFactory(); InvestigationIndex doInvIndex = (InvestigationIndex) factory.getDomainObject(InvestigationIndex.class, invIndex.getID_InvestigationIndex()); Investigation doInvestigation = null; if (doInvIndex.getInvestigations().iterator().hasNext()) doInvestigation = (Investigation) doInvIndex.getInvestigations().iterator().next(); if (doInvestigation != null) { // TODO - This needs to be updated when coding for complex investigation Specimen specimen = (Specimen) doInvestigation.getPathInvDetails().getSpecimens().get(0); if (isUsePaediatricContainer.booleanValue()) { return PathSpecimenContainerVoAssembler.createPathSpecimenContainerVoCollectionFromPathSpecimenContainer(specimen.getPaediatricContainers()); // Container paediatricContainer = (Container) specimen.getPaediatricContainers(); // return PathSpecimenContainerVoAssembler.create(paediatricContainer.getSpecContainer()); } else { return PathSpecimenContainerVoAssembler.createPathSpecimenContainerVoCollectionFromPathSpecimenContainer(specimen.getPaediatricContainers()); // Container adultContainer = (Container) specimen.getPaediatricContainers().get(0); // return PathSpecimenContainerVoAssembler.create(adultContainer.getSpecContainer()); } } return null; } public PathInvDetailsVo getPathInvDetailsForInvestigation(Integer idInvestigation) { DomainFactory factory = getDomainFactory(); List invDetails = factory.find("select inv.pathInvDetails from Investigation inv where inv.id = :idInv", new String[]{"idInv"}, new Object[]{idInvestigation}); if (invDetails != null && invDetails.size() > 0) return PathInvDetailsVoAssembler.createPathInvDetailsVoCollectionFromPathInvDetails(invDetails).get(0); return null; } private SpecimenWorkListItemVoCollection saveWorkListItems(DomainFactory factory, SpecimenWorkListItemVoCollection workListitems, HashMap objMap, ORDERSTATE state) throws StaleObjectException { if (workListitems == null) throw new CodingRuntimeException("No workListitems to save - method saveWorkListItems()"); List items = SpecimenWorkListItemVoAssembler.extractSpecimenWorkListItemList(factory, workListitems, null, objMap); Iterator it = items.iterator(); while (it.hasNext()) { SpecimenWorkListItem item = (SpecimenWorkListItem) it.next(); if(state.equals(ORDERSTATE.REQUESTING_AUTHORISATION)) item.setCollectionStatus(getDomLookup(ims.ocrr.vo.lookups.SpecimenCollectionStatus.AWAITING_AUTHORISATION)); factory.save(item); } workListitems = SpecimenWorkListItemVoAssembler.createSpecimenWorkListItemVoCollectionFromSpecimenWorkListItem(items); return workListitems; } public GpShortVoCollection listGPsBySurname(String text) { if(text == null) throw new CodingRuntimeException("text is mandatory in method listGPsBySurname"); text = text.replaceAll("%", ""); String hql = "from Gp gp where gp.name.upperSurname like :surname and gp.status = :activeStatus order by gp.name.upperSurname"; return GpShortVoAssembler.createGpShortVoCollectionFromGp(getDomainFactory().find(hql, new String[] {"surname", "activeStatus"}, new Object[] {text.toUpperCase() + "%", getDomLookup(GPStatus.ACTIVE)})); } public PhlebotomyRoundShortVoCollection listClosedRounds(Date date, SpecimenCollectionTime round, LocationRefVo ward) { if (date == null || round == null || ward == null) return null; DomainFactory factory = getDomainFactory(); String hql = "select pRound from PhlebotomyRound" + " as pRound left join pRound.wards as ward where pRound.date = :date and pRound.roundToCollect = :round and ward.id = :wardId"; List items = factory.find(hql, new String[]{"date", "round", "wardId"}, new Object[]{date.getDate(), getDomLookup(round), ward.getID_Location()}); if (items != null && items.size() > 0) return PhlebotomyRoundShortVoAssembler.createPhlebotomyRoundShortVoCollectionFromPhlebotomyRound(items); return null; } public HcpLiteVoCollection listResponsibleMedicsByName(String strMedicName) { HcpFilter filter = new HcpFilter(); PersonName name = new PersonName(); name.setSurname(strMedicName); filter.setQueryName(name); HcpAdmin impl = (HcpAdmin) getDomainImpl(HcpAdminImpl.class); HcpLiteVoCollection voCollHcp = impl.listResponsibleMedics(filter); //WDEV-11656 return voCollHcp; } public HcpLiteVoCollection listHcpLiteByName(String hcpName) { if (hcpName == null || hcpName.length() == 0) return null; String query = "SELECT hcp FROM Hcp AS hcp LEFT JOIN hcp.mos AS mos WHERE hcp.isActive = 1 AND hcp.isHCPaResponsibleHCP = 1 AND mos.name.upperSurname LIKE :HCP_NAME"; return HcpLiteVoAssembler.createHcpLiteVoCollectionFromHcp(getDomainFactory().find(query, "HCP_NAME", hcpName.toUpperCase() + "%")).sort(); } public LocShortMappingsVoCollection listActiveHospitals() { OrganisationAndLocation impl = (OrganisationAndLocation) getDomainImpl(OrganisationAndLocationImpl.class); return impl.listActiveHospitals(); } public ServiceQuestionShortVoCollection listActiveServiceQuestions(Integer serviceId) { ServiceQuestions impl = (ServiceQuestions) getDomainImpl(ServiceQuestionsImpl.class); ServiceRefVo voServiceRef = new ServiceRefVo(); voServiceRef.setID_Service(serviceId); return impl.listServiceQuestions(voServiceRef); } public CategoryQuestionShortVoCollection listActiveCategoryQuestions(Category category) { CategoryQuestions impl = (CategoryQuestions) getDomainImpl(CategoryQuestionsImpl.class); return impl.listCategoryQuestions(category); } public ILocation getParentHospital(ValueObject wardOrClinicOrOutpatientDepartment) { if(wardOrClinicOrOutpatientDepartment == null) throw new CodingRuntimeException("wardOrClinic parameter is null in method getParentHospital"); DomainFactory factory = getDomainFactory(); if(wardOrClinicOrOutpatientDepartment instanceof LocationLiteVo) { LocationLiteVo voLocLite = (LocationLiteVo) wardOrClinicOrOutpatientDepartment; Location doLocation = (Location) factory.getDomainObject(Location.class, voLocLite.getID()); return getHospital(doLocation); } else if (wardOrClinicOrOutpatientDepartment instanceof LocSiteLiteVo) { LocSiteLiteVo voLocStieLite = (LocSiteLiteVo) wardOrClinicOrOutpatientDepartment; Location doLocation = (Location) factory.getDomainObject(Location.class, voLocStieLite.getID_Location()); return getHospital(doLocation); } else if(wardOrClinicOrOutpatientDepartment instanceof ClinicLiteVo) { ClinicLiteVo voClinLite = (ClinicLiteVo) wardOrClinicOrOutpatientDepartment; Clinic doClinic = (Clinic) factory.getDomainObject(Clinic.class, voClinLite.getID_Clinic()); Location doLocation = doClinic.getClinicLocation(); return LocationLiteVoAssembler.create(doLocation); } return null; } private LocSiteShortVo getHospital(Location doLocation) { if(doLocation instanceof LocSite && doLocation.getType().equals(getDomLookup(LocationType.HOSP))) return LocSiteShortVoAssembler.create((LocSite) doLocation); while(doLocation.getParentLocation() != null) { doLocation = doLocation.getParentLocation(); if(doLocation instanceof LocSite && doLocation.getType().equals(getDomLookup(LocationType.HOSP))) return LocSiteShortVoAssembler.create((LocSite) doLocation); } return null; } public LocationLiteVoCollection listWardsForHospitalByNameLite(LocationRefVo location, String name) { OrganisationAndLocation impl = (OrganisationAndLocation) getDomainImpl(OrganisationAndLocationImpl.class); //WDEV-6721 - if user enters more than 1 '%' - search was failing String[] arr = null; if(name.contains("%")) { arr = name.split("%"); if(arr.length > 0) name = arr[0] + "%"; else name = "%"; } return impl.listActiveWardsForHospitalByNameLite(location, name); } public ClinicLiteVoCollection listClinicsForHospitalByNameLite(LocationRefVo location, String name) { DomainFactory factory = getDomainFactory(); List clinics = factory.find("from Clinic clin where clin.clinicLocation.id = :idLocation and upper(clin.clinicName) like :clinName and clin.isActive = 1 order by clin.clinicName", new String[]{"idLocation", "clinName"}, new Object[]{location.getID_Location(), "%" + name.toUpperCase() + "%"}); return ClinicLiteVoAssembler.createClinicLiteVoCollectionFromClinic(clinics); } public LocationLiteVoCollection listOutpatDeptsForHospitalByName(LocationRefVo hospital, String nameFilter) { OrganisationAndLocation impl = (OrganisationAndLocation) getDomainImpl(OrganisationAndLocationImpl.class); return impl.listActiveOutpatDeptsForHospitalByNameLite(hospital, nameFilter); } public LocShortVoCollection listWardsForHospitalByNameShort(LocationRefVo hospital, String name) { OrganisationAndLocation impl = (OrganisationAndLocation) getDomainImpl(OrganisationAndLocationImpl.class); return impl.listActiveWardsForHospitalByName(hospital, name); } public OcsOrderVo getOcsOrder(OcsOrderSessionRefVo ocsOrder) { if(ocsOrder == null || ocsOrder.getID_OcsOrderSession() == null) return null; OcsOrderSession doOcsOrder = (OcsOrderSession) getDomainFactory().getDomainObject(ocsOrder); return OcsOrderVoAssembler.create(doOcsOrder); } public MemberOfStaffLiteVoCollection listActiveMosByName(String name) { if(name == null) throw new CodingRuntimeException("name parameter is null in method listActiveMosByName"); HcpAdmin impl = (HcpAdmin) getDomainImpl(HcpAdminImpl.class); //WDEV-8204 HcpLiteVoCollection hcps = impl.listHcpLiteByName(name, 400); //WDEV-11656 if (hcps == null || hcps.size()==0) return null; MemberOfStaffLiteVoCollection result = new MemberOfStaffLiteVoCollection(); for (int i=0 ; i<hcps.size() ; i++) { result.add(hcps.get(i).getMos()); } return result; //return MemberOfStaffLiteVoAssembler.createMemberOfStaffLiteVoCollectionFromMemberOfStaff(members).sort(); } public Boolean getProfileLevelPhlebMayCollect(Integer parentInvestigationId) { DomainFactory factory = getDomainFactory(); List invDetails = factory.find("select inv.investigationIndex.phlebMayCollect from Investigation inv where inv.id = :idInv", new String[]{"idInv"}, new Object[]{parentInvestigationId}); if (invDetails != null && invDetails.size() > 0) return (Boolean)invDetails.get(0); return null; } public LocShortVoCollection listOutpatDeptsForHospitalByNameShort(LocationRefVo hospital, String nameFilter) { OrganisationAndLocation impl = (OrganisationAndLocation) getDomainImpl(OrganisationAndLocationImpl.class); return impl.listActiveOutpatDeptsForHospitalByName(hospital, nameFilter); } public MyOrderInpatEpisVo getInpatientEpisode(PatientRefVo patient, PASEventRefVo pasEvent) { if (patient == null) throw new CodingRuntimeException("patient parameter is null method getInpatientEpisode"); DomainFactory factory = getDomainFactory(); String hql = ""; List inps = null; if(pasEvent != null) { hql = "from InpatientEpisode as inp where inp.pasEvent.patient.id = :idPatient and inp.pasEvent.location is not null and inp.pasEvent.id = :idPasEvent"; inps = factory.find(hql, new String[]{"idPatient", "idPasEvent"}, new Object[]{patient.getID_Patient(), pasEvent.getID_PASEvent()}); } else { hql = "from InpatientEpisode as inp where inp.pasEvent.patient.id = :idPatient and inp.pasEvent.location is not null"; inps = factory.find(hql, new String[]{"idPatient"}, new Object[]{patient.getID_Patient()}); } if (inps != null && inps.size() > 0) return MyOrderInpatEpisVoAssembler.create((InpatientEpisode) inps.get(0)); /* WDEV-16954 for merged patients you can have more than one inpatient records else if (inps != null && inps.size() > 1) throw new DomainRuntimeException("More than 1 current inpatient record found for patient with id = " + patient.getID_Patient()); */ return null; } public MyOrderOutpatAttendVo getOutPatientAttendance(PatientRefVo patient, PASEventRefVo pasEvent) { if (patient == null) throw new CodingRuntimeException("patient parameter is null method getOutPatientAttendance"); DomainFactory factory = getDomainFactory(); // WDEV-16607 MyOrderOutpatAttendVo todayAppointment = findTodayAppointment(patient, pasEvent); if (todayAppointment != null) return todayAppointment; String hql = ""; List outps = null; if(pasEvent != null) { hql = "from OutpatientAttendance as outp " + " where outp.pasEvent.patient.id = :idPatient " + " and outp.pasEvent.id = :idPasEvent " + " and outp.clinic is not null " + " and outp.appointmentStatus.id != :cancelledStatus"; outps = factory.find(hql, new String[]{"idPatient", "idPasEvent", "cancelledStatus"}, new Object[]{patient.getID_Patient(), pasEvent.getID_PASEvent(), new Integer(Status_Reason.CANCELLED.getId())}); } else { hql = "from OutpatientAttendance as outp " + " where outp.pasEvent.patient.id = :idPatient " + " and outp.clinic is not null " + " and outp.appointmentStatus.id != :cancelledStatus" + " and outp.appointmentDateTime >= :todayMinus7Days order by outp.appointmentDateTime"; outps = factory.find(hql, new String[]{"idPatient", "todayMinus7Days", "cancelledStatus"}, new Object[]{patient.getID_Patient(), new Date().addDay(-7).getDate(), new Integer(Status_Reason.CANCELLED.getId())}); } if(outps != null && outps.size() > 0) return MyOrderOutpatAttendVoAssembler.create((OutpatientAttendance) outps.get(0)); return null; } // WDEV-16607 private MyOrderOutpatAttendVo findTodayAppointment(PatientRefVo patient, PASEventRefVo pasEvent) { if (patient == null) return null; ArrayList<String> paramNames = new ArrayList<String>(); ArrayList<Object> paramValues = new ArrayList<Object>(); StringBuilder todayQuery = new StringBuilder("FROM OutpatientAttendance AS outp "); todayQuery.append("WHERE outp.pasEvent.patient.id = :PATIENT_ID "); paramNames.add("PATIENT_ID"); paramValues.add(patient.getID_Patient()); if (pasEvent != null) { todayQuery.append("AND outp.pasEvent.id = :PAS_EVENT_ID "); paramNames.add("PAS_EVENT_ID"); paramValues.add(pasEvent.getID_PASEvent()); } todayQuery.append("AND outp.clinic is not null AND outp.appointmentStatus.id != :CANCELLED_STATUS and outp.appointmentDateTime BETWEEN :TODAY AND :TODAY_MIDNIGHT"); paramNames.add("CANCELLED_STATUS"); paramValues.add(Status_Reason.CANCELLED.getID()); paramNames.add("TODAY"); paramValues.add(new DateTime(new Date(), new Time(0, 0, 0)).getJavaDate()); paramNames.add("TODAY_MIDNIGHT"); paramValues.add(new DateTime(new Date(), new Time(23, 59, 59)).getJavaDate()); MyOrderOutpatAttendVoCollection outpatientAttendances = MyOrderOutpatAttendVoAssembler.createMyOrderOutpatAttendVoCollectionFromOutpatientAttendance(getDomainFactory().find(todayQuery.toString(), paramNames, paramValues)); if (outpatientAttendances != null && outpatientAttendances.size() > 0) return outpatientAttendances.get(0); return null; } public LocationLiteVoCollection listActiveLocationForService(ServiceRefVo service) { if(service == null) throw new CodingRuntimeException("service param is null in method : listActiveLocationForService"); DomainFactory factory = getDomainFactory(); List locs = factory.find("select locService.location from LocationService as locService left join locService.location as loc where locService.service.id = :serviceId and locService.isActive = true and loc.isVirtual = false and loc.isActive = true",new String[]{"serviceId"},new Object[]{service.getID_Service()}); if(locs != null && locs.size() > 0) return LocationLiteVoAssembler.createLocationLiteVoCollectionFromLocation(locs); return null; } public LocationLiteVoCollection listAandEForHospitalByNameLite(LocationRefVo hosp, String name) { OrganisationAndLocation impl = (OrganisationAndLocation) getDomainImpl(OrganisationAndLocationImpl.class); return impl.listActiveAandEForHospitalByNameLite(hosp, name); } public LocationLiteVoCollection listUserEnteredLocationsForHospitalByNameLite(LocationRefVo hosp, String name) { if(hosp == null || hosp.getID_Location() == null) throw new CodingRuntimeException("Hospital parameter value not supplied for method :listUserEnteredLocationsForHospitalNyNameLite"); DomainFactory factory = getDomainFactory(); List locs = factory.find("from Location loc where loc.parentLocation.id = :hospId and loc.upperName like :name and loc.type.id >= 0 and loc.isActive = true and loc.isVirtual = false", new String[]{"hospId", "name"}, new Object[] {hosp.getID_Location(), name.toUpperCase() + "%"}); if(locs != null && locs.size() > 0) return LocationLiteVoAssembler.createLocationLiteVoCollectionFromLocation(locs); return null; } public void updateCatsReferralAdditionalInvStatus(CatsReferralRefVo catsReferral) throws StaleObjectException { IOCRRSchedulingHelper impl; try { impl = (IOCRRSchedulingHelper) getDomainImpl(Class.forName("ims.ocrr.helper.OCRRSchedulingHelper")); impl.updateCatsReferralAdditionalInvStatus(catsReferral); } catch (ClassNotFoundException e) { LOG.error("Attempting to Load CARE_UK code in non care uk context", e); } } public GpLiteVo getReferrerGP(CatsReferralRefVo catsRef) { if (catsRef == null || catsRef.getID_CatsReferral() == null) throw new CodingRuntimeException("catsRef is null or id not provided in method getReferrerGP"); String hql = "select refDetail.gPName from CatsReferral as cats left join cats.referralDetails as refDetail where cats.id = '" + catsRef.getID_CatsReferral() + "'"; List gps = getDomainFactory().find(hql); if(gps.size() > 0) { return GpLiteVoAssembler.create((Gp)gps.get(0)); } return null; } //WDEV-9913 - get the latest record public HcpLiteVo getInitiallySeenByHcpFromConsultationClinicalNotesByReferral(CatsReferralRefVo referral) { if (referral == null || referral.getID_CatsReferral() == null) throw new CodingRuntimeException("referral is null or id not provided in method getInitiallySeenByHcpFromConsultationClinicalNotesByReferral"); String hql = "select cons.initiallySeenBy.authoringHcp from ConsultationClinicalNotes as cons left join cons.catsReferral as referral where referral.id = " + referral.getID_CatsReferral() + " order by cons.systemInformation.creationDateTime desc "; List hcpList = getDomainFactory().find(hql); if(hcpList != null && hcpList.size() >= 1) { Hcp doHcp = (Hcp)hcpList.get(0); if(doHcp != null) return HcpLiteVoAssembler.create(doHcp); } return null; } //WDEV-11915 public LocationLiteVoCollection listActiveOutpatientDepartment(LocationRefVo hospital) { if(hospital == null || !hospital.getID_LocationIsNotNull()) throw new CodingRuntimeException("Can not list Outpatient Departments on null Hospital Id."); OrganisationAndLocation orgLoc = (OrganisationAndLocation)getDomainImpl(OrganisationAndLocationImpl.class); return orgLoc.listActiveOutpatDeptsForHospitalByNameLite(hospital, null); } //WDEV-11915 public LocationLiteVo getOutpatientDepartmentByClinic(ClinicRefVo clinic) { if(clinic == null || !clinic.getID_ClinicIsNotNull()) throw new CodingRuntimeException("Can not get Outpatient Department on null Clinic Id."); DomainFactory factory = getDomainFactory(); String query = "select c.outpatientDept from Clinic as c where c.id = :ClinicId"; List<?> department = factory.find(query, new String[] {"ClinicId"}, new Object[] {clinic.getID_Clinic()}); if(department != null && department.size() == 1) { return LocationLiteVoAssembler.create((Location) department.get(0)); } return null; } //WDEV-11938 public InvestigationHelpTextVo getInvestigationHelpText(Integer investigation) { if(investigation == null) return null; return InvestigationHelpTextVoAssembler.create((Investigation) getDomainFactory().getDomainObject(Investigation.class, investigation)); } //wdev-12864 public CatsReferralListVo getCatsReferralListVo(CatsReferralRefVo catsRefVo) { return CatsReferralListVoAssembler.create((CatsReferral)getDomainFactory().getDomainObject(CatsReferral.class, catsRefVo.getID_CatsReferral())); } /** * WDEV-13890 * Function used to retrieve inpatient episode */ public MyOrderInpatEpisVo getInpatientEpisodeForPatient(PatientRefVo patient) { if (patient == null || !patient.getID_PatientIsNotNull()) return null; String query = "select ie from InpatientEpisode as ie left join ie.pasEvent as pe left join pe.patient as p where p.id = :PatId and ie.isRIE is null"; return MyOrderInpatEpisVoAssembler.create((InpatientEpisode) getDomainFactory().findFirst(query, "PatId", patient.getID_Patient())); } /** * WDEV-13890 * Function used to retrieve the default emergency department (if there is only one it will retrieve it, else it will not) */ public LocationLiteVoCollection getDefaultEmergencyDepartment(LocationRefVo hospital) { if (hospital == null) return null; Location parentLocation = (Location) getDomainFactory().getDomainObject(Location.class, hospital.getID_Location()); return getDefaultEmergencyDepartment(parentLocation, null); } /** * WDEV-13890 * Function used to retrieve the default emergency department (if there is only one, if more than one or none exist - null is returned) * It will recursively iterate through tree and search the emergency department * If it founds more than one, then it will return null; */ private LocationLiteVoCollection getDefaultEmergencyDepartment(Location parentLocation, LocationLiteVoCollection emergencyDepartments) { if (emergencyDepartments == null) emergencyDepartments = new LocationLiteVoCollection(); if (emergencyDepartments.size() > 1) return null; Iterator<?> iterator = parentLocation.getLocations().iterator(); while (iterator.hasNext()) { Location location = (Location) iterator.next(); if (getDomLookup(LocationType.ANE).equals(location.getType()) && Boolean.TRUE.equals(location.isIsActive()) && Boolean.FALSE.equals(location.isIsVirtual()) && location.getIsRIE() == null) { emergencyDepartments.add(LocationLiteVoAssembler.create(location)); } getDefaultEmergencyDepartment(location, emergencyDepartments); } return emergencyDepartments; } /** * WDEV-13999 * Function used to determine if a patient has arrived for a referral */ public Boolean hasArrived(CatsReferralRefVo referral) { if (referral == null || !referral.getID_CatsReferralIsNotNull()) return Boolean.FALSE; StringBuilder query = new StringBuilder(); query.append("SELECT COUNT(appointments.id) FROM CatsReferral AS referral LEFT JOIN referral.appointments AS appointments LEFT JOIN appointments.apptStatus AS status"); query.append(" WHERE referral.id = :REF_ID AND (status.id = :SEEN OR status.id = :ARRIVED)"); ArrayList<String> paramNames = new ArrayList<String>(); ArrayList<Object> paramValues = new ArrayList<Object>(); paramNames.add("REF_ID"); paramValues.add(referral.getID_CatsReferral()); paramNames.add("SEEN"); paramValues.add(Status_Reason.SEEN.getID()); paramNames.add("ARRIVED"); paramValues.add(Status_Reason.ARRIVAL.getID()); List<?> result = getDomainFactory().find(query.toString(), paramNames, paramValues); Long count = (Long) result.iterator().next(); if (count.longValue() > 0) return Boolean.TRUE; return Boolean.FALSE; } /** * WDEV-13999 * Function used to retrieve the Lead Consultant for a referral speciality */ public HcpLiteVo getLeadConsultantForReferral(CatsReferralRefVo referral) { if (referral == null || !referral.getID_CatsReferralIsNotNull()) return null; StringBuilder query = new StringBuilder(); query.append("SELECT hcp FROM CatsReferral AS referral LEFT JOIN referral.referralDetails AS details LEFT JOIN details.service AS service, "); query.append(" SpecialtyLeadConsultant AS spec LEFT JOIN spec.leadConsultant AS hcp WHERE "); query.append(" spec.specialty.id = service.specialty.id AND referral.id = :REF_ID"); return HcpLiteVoAssembler.create((Hcp) getDomainFactory().findFirst(query.toString(), "REF_ID", referral.getID_CatsReferral())); } /** * Function used to retrieve the HCP corresponding to a member of staff */ public HcpLiteVo getHCPbyMoS(MemberOfStaffRefVo memberOfStaff) { if (memberOfStaff == null || !memberOfStaff.getID_MemberOfStaffIsNotNull()) return null; StringBuilder query = new StringBuilder(); query.append("select hcp from MemberOfStaff as mos left join mos.hcp as hcp"); query.append(" where mos.id = :MOS_ID"); return HcpLiteVoAssembler.create((Hcp) getDomainFactory().findFirst(query.toString(), "MOS_ID", memberOfStaff.getID_MemberOfStaff())); } /** * Function used to retrieve the questions and answers for an investigation */ public InvestigationQuestionAnswerVoCollection getInvestigationQuestionsAnswers(InvestigationRefVo investigation, OcsOrderSessionRefVo orderDetails) { if (investigation == null || orderDetails == null) return null; StringBuilder query = new StringBuilder(); query.append("SELECT questions FROM OcsOrderSession AS ocsorder LEFT JOIN ocsorder.clinicalInfo AS clinInfo LEFT JOIN clinInfo.categoryQuestionAnswers AS category "); query.append(" LEFT JOIN category.serviceQuestionAnswers AS serviceQA LEFT JOIN serviceQA.investigationQuestionAnswers AS questions "); query.append(" LEFT JOIN questions.investigation AS investigation"); query.append(" WHERE ocsorder.id = :ID_ORDERDETAILS AND investigation.id = :ID_INVESTIGATION"); ArrayList<String> paramNames = new ArrayList<String>(); ArrayList<Object> paramValues = new ArrayList<Object>(); paramNames.add("ID_ORDERDETAILS"); paramValues.add(orderDetails.getID_OcsOrderSession()); paramNames.add("ID_INVESTIGATION"); paramValues.add(investigation.getID_Investigation()); return InvestigationQuestionAnswerVoAssembler.createInvestigationQuestionAnswerVoCollectionFromInvestigationQuestionAnswer(getDomainFactory().find(query.toString(), paramNames, paramValues)); } /** * Function used to retrieve the OrderInvestigationAppointment for the investigation */ public OrderInvWithStatusApptVo getOrderInvestigationAppointment(OrderInvestigationRefVo investigation) { if (investigation == null || !investigation.getID_OrderInvestigationIsNotNull()) return null; StringBuilder query = new StringBuilder(); query.append("SELECT orderInv FROM OrderInvAppt AS orderInv LEFT JOIN orderInv.orderInvestigation AS investigation "); query.append(" WHERE investigation.id = :ID_INV"); return OrderInvWithStatusApptVoAssembler.create((OrderInvAppt) getDomainFactory().findFirst(query.toString(), "ID_INV", investigation.getID_OrderInvestigation())); } /** * Function used to retrieve the OrderInvestigation Booking VO */ public OrderInvestigationBookingWithStatusVo getOrderInvestigationBooking(OrderInvestigationRefVo investigation) { if (investigation == null || !investigation.getID_OrderInvestigationIsNotNull()) return null; return OrderInvestigationBookingWithStatusVoAssembler.create((OrderInvestigation) getDomainFactory().getDomainObject(OrderInvestigation.class, investigation.getID_OrderInvestigation())); } /** * Function used to update OrderInvestigationAppointment (with new Investigation - amended one) */ public void updateOrderInvestigationAppointment(OrderInvWithStatusApptVo orderInvestigationAppointment, OrderInvestigationLiteVo investigationToAmend) throws StaleObjectException { if (orderInvestigationAppointment == null) throw new CodingRuntimeException("Can not save null OderInvestigationAppointemnt"); if (orderInvestigationAppointment.isValidated() == false) throw new CodingRuntimeException("Can not save not validated OrderInvestigationAppointment"); if (investigationToAmend == null) throw new CodingRuntimeException("Can not save null OrderInvestigation"); // Get domain object to save OrderInvAppt domOrderInvestigationAppointment = OrderInvWithStatusApptVoAssembler.extractOrderInvAppt(getDomainFactory(), orderInvestigationAppointment); OrderInvestigation domOrderInvestigation = OrderInvestigationLiteVoAssembler.extractOrderInvestigation(getDomainFactory(), investigationToAmend); getDomainFactory().save(domOrderInvestigationAppointment); getDomainFactory().save(domOrderInvestigation); OCSExternalEvents impl = (OCSExternalEvents) getDomainImpl(OCSExternalEventsImpl.class); impl.generateOrderUpdateEvent(new Booking_AppointmentRefVo(domOrderInvestigationAppointment.getAppointment().getId(), domOrderInvestigationAppointment.getAppointment().getVersion()), new OrderInvestigationRefVo(domOrderInvestigationAppointment.getOrderInvestigation().getId(), domOrderInvestigationAppointment.getOrderInvestigation().getVersion())); return; } /** * Function used to cancel an appointment */ public Booking_AppointmentVo cancelAppointment(Booking_AppointmentVo appointment, ActionRequestType requestType, String requestSource) throws StaleObjectException { SessionAdmin impl = (SessionAdmin) getDomainImpl(SessionAdminImpl.class); return impl.cancelAppt(appointment, requestType, requestSource); } public void updateCatsReferralCancelStatus(CatsReferralRefVo referral) throws StaleObjectException { if(referral == null || referral.getID_CatsReferral() == null) throw new CodingRuntimeException("catsReferral is null or id not provided in method updateCatsReferralAdditionalInvStatus"); DomainFactory factory = getDomainFactory(); CatsReferral doCatsReferral = (CatsReferral) factory.getDomainObject(referral); doCatsReferral.setHasCancelledApptsForReview(true); factory.save(doCatsReferral); } /** * Function used to retrieve Questions & Answers for the service of the amended investigation */ public GeneralQuestionAnswerVoCollection getServiceQuestionsAnswers(OcsOrderSessionRefVo orderDetails, ServiceRefVo service) { if (orderDetails == null || service == null) return null; StringBuilder query = new StringBuilder(); query.append("SELECT questions "); query.append(" FROM OcsOrderSession AS ocsOrder LEFT JOIN ocsOrder.clinicalInfo AS clinInfo LEFT JOIN clinInfo.categoryQuestionAnswers AS categoryQA "); query.append(" LEFT JOIN categoryQA.serviceQuestionAnswers AS serviceQA LEFT JOIN serviceQA.service AS service LEFT JOIN serviceQA.serviceQuestionAnswers AS questions"); query.append(" WHERE ocsOrder.id = :ID_ORDER AND service.id = :ID_SERVICE"); ArrayList<String> paramNames = new ArrayList<String>(); ArrayList<Object> paramValues = new ArrayList<Object>(); paramNames.add("ID_ORDER"); paramValues.add(orderDetails.getID_OcsOrderSession()); paramNames.add("ID_SERVICE"); paramValues.add(service.getID_Service()); return GeneralQuestionAnswerVoAssembler.createGeneralQuestionAnswerVoCollectionFromGeneralQuestionAnswer(getDomainFactory().find(query.toString(), paramNames, paramValues)); } /** * Function used to retrieve Question & Answers for the category for amended investigation */ public GeneralQuestionAnswerVoCollection getCategoryQuestionsAnswers(OcsOrderSessionRefVo orderDetails, Category category) { if (orderDetails == null || category == null) return null; StringBuilder query = new StringBuilder(); query.append("SELECT questions "); query.append(" FROM OcsOrderSession AS ocsOrder LEFT JOIN ocsOrder.clinicalInfo AS clinInfo LEFT JOIN clinInfo.categoryQuestionAnswers AS categoryQA "); query.append(" LEFT JOIN categoryQA.categoryQuestionAnswers AS questions LEFT JOIN categoryQA.category AS category "); query.append(" WHERE ocsOrder.id = :ID_ORDER AND category.id = :ID_CATEGORY "); ArrayList<String> paramNames = new ArrayList<String>(); ArrayList<Object> paramValues = new ArrayList<Object>(); paramNames.add("ID_ORDER"); paramValues.add(orderDetails.getID_OcsOrderSession()); paramNames.add("ID_CATEGORY"); paramValues.add(category.getID()); return GeneralQuestionAnswerVoAssembler.createGeneralQuestionAnswerVoCollectionFromGeneralQuestionAnswer(getDomainFactory().find(query.toString(), paramNames, paramValues)); } public OrderInvestigationForStatusChangeVo getOrderInvestigation(OrderInvestigationRefVo investigation) { if (investigation == null) throw new CodingRuntimeException("Error - Investigation to amend must not be null"); return OrderInvestigationForStatusChangeVoAssembler.create((OrderInvestigation) getDomainFactory().getDomainObject(OrderInvestigation.class, investigation.getID_OrderInvestigation())); } public OrderInvestigationLiteVo updateInvestigationStatus(OrderInvestigationForStatusChangeVo investigation) throws StaleObjectException { if (investigation == null) throw new CodingRuntimeException("Error - Can not update the status for a null investigation"); OrderInvestigation domInvestigation = OrderInvestigationForStatusChangeVoAssembler.extractOrderInvestigation(getDomainFactory(), investigation); getDomainFactory().save(domInvestigation); return OrderInvestigationLiteVoAssembler.create(domInvestigation); } public SummaryClinicalInformationVo getLatestSummaryClinicalInformation(PatientRefVo patient) { if (patient == null || patient.getID_Patient() == null) return null; String query = "SELECT summary FROM SummaryClinicalInformation AS summary LEFT JOIN summary.patient AS patient WHERE summary.isRIE is null AND patient.id = :ID_PATIENT ORDER BY summary.systemInformation.creationDateTime DESC"; return SummaryClinicalInformationVoAssembler.create((SummaryClinicalInformation) getDomainFactory().findFirst(query, "ID_PATIENT", patient.getID_Patient())); } //WDEV-15899 public LocSiteShortVo getTypeOfLocSite(LocationRefVo locRef) { if (locRef == null) throw new CodingRuntimeException("Location not provided"); DomainFactory factory = getDomainFactory(); LocSite doLocation = (LocSite) factory.getDomainObject(LocSite.class, locRef.getID_Location()); return LocSiteShortVoAssembler.create(doLocation); } //WDEV-15899 public HcpLiteVoCollection listResponsibleEdClinicians(String hcpName) { if (hcpName == null || hcpName.length() == 0) return null; String query = "SELECT hcp FROM Hcp AS hcp LEFT JOIN hcp.mos AS mos WHERE hcp.isActive = 1 AND hcp.isAResponsibleEDClinician = 1 AND mos.name.upperSurname LIKE :HCP_NAME"; return HcpLiteVoAssembler.createHcpLiteVoCollectionFromHcp(getDomainFactory().find(query, "HCP_NAME", hcpName.toUpperCase() + "%")).sort(); } public CareContextShortVo getCareContextByPasEvent(PASEventRefVo pasEvent) { if (pasEvent == null || pasEvent.getID_PASEvent() == null) return null; StringBuilder query = new StringBuilder("SELECT context FROM CareContext AS context LEFT JOIN context.pasEvent AS pas WHERE pas.id = :PAS_EVENT_ID"); ArrayList<String> paramNames = new ArrayList<String>(); ArrayList<Object> paramValues = new ArrayList<Object>(); paramNames.add("PAS_EVENT_ID"); paramValues.add(pasEvent.getID_PASEvent()); return CareContextShortVoAssembler.create((CareContext) getDomainFactory().findFirst(query.toString(), paramNames, paramValues)); } public DFTCollectionTypesConfigVo getDFTCollectionTypes() { String query = " from DFTCollectionTypesConfig as dftConfig where (dftConfig.isRIE is null )"; return DFTCollectionTypesConfigVoAssembler.create((DFTCollectionTypesConfig) getDomainFactory().findFirst(query)); } public OrderPriorityCollection listOrderPriority(Boolean requiresPathologyMappings, Boolean requiresRadiologyMappings, Boolean requiresClinicalMappings) { OrderPriorityCollection results = new OrderPriorityCollection(); OrderPriorityCollection resultsPathology = new OrderPriorityCollection(); OrderPriorityCollection resultsRadiology = new OrderPriorityCollection(); OrderPriorityCollection resultsClinical = new OrderPriorityCollection(); String radiologySystem = "Radiology System"; String pathologySystem = "Pathology System"; String clinicalMapping = "Clinical Mapping"; StringBuilder query = new StringBuilder("SELECT orderPriority.id FROM LookupInstance AS orderPriority LEFT JOIN orderPriority.type AS lookupType LEFT JOIN orderPriority.mappings AS mappings"); query.append(" WHERE lookupType.id = :ORDER_PRIORITY_TYPE AND orderPriority.active = 1"); if (Boolean.TRUE.equals(requiresPathologyMappings)) { StringBuilder queryPathology = new StringBuilder(query.toString()); queryPathology.append(" AND mappings.extSystem = :PATHOLOGY_SYSTEM ORDER BY orderPriority.id"); List<?> pathologyPriorities = getDomainFactory().find(queryPathology.toString(), new String[] {"ORDER_PRIORITY_TYPE", "PATHOLOGY_SYSTEM"}, new Object[] {OrderPriority.TYPE_ID, pathologySystem}); for (int i = 0; i < pathologyPriorities.size(); i++) resultsPathology.add(LookupHelper.getOrderPriorityInstance(getLookupService(), (Integer) pathologyPriorities.get(i))); if (Boolean.FALSE.equals(requiresRadiologyMappings) && Boolean.FALSE.equals(requiresClinicalMappings)) return resultsPathology; } if (Boolean.TRUE.equals(requiresRadiologyMappings)) { StringBuilder queryRadiology = new StringBuilder(query.toString()); queryRadiology.append(" AND mappings.extSystem = :RADIOLOGY_SYSTEM ORDER BY orderPriority.id"); List<?> radiologyPriorities = getDomainFactory().find(queryRadiology.toString(), new String[] {"ORDER_PRIORITY_TYPE", "RADIOLOGY_SYSTEM"}, new Object[] {OrderPriority.TYPE_ID, radiologySystem}); for (int i = 0; i < radiologyPriorities.size(); i++) resultsRadiology.add(LookupHelper.getOrderPriorityInstance(getLookupService(), (Integer) radiologyPriorities.get(i))); if (Boolean.FALSE.equals(requiresPathologyMappings) && Boolean.FALSE.equals(requiresClinicalMappings)) return resultsRadiology; } if (Boolean.TRUE.equals(requiresClinicalMappings)) { StringBuilder queryClinical = new StringBuilder(query.toString()); queryClinical.append(" AND mappings.extSystem = :CLINICAL_SYSTEM ORDER BY orderPriority.id"); List<?> clinicalPriorities = getDomainFactory().find(queryClinical.toString(), new String[] {"ORDER_PRIORITY_TYPE", "CLINICAL_SYSTEM"}, new Object[] {OrderPriority.TYPE_ID, clinicalMapping}); for (int i = 0; i < clinicalPriorities.size(); i++) resultsClinical.add(LookupHelper.getOrderPriorityInstance(getLookupService(), (Integer) clinicalPriorities.get(i))); if (Boolean.FALSE.equals(requiresPathologyMappings) && Boolean.FALSE.equals(requiresRadiologyMappings)) return resultsClinical; } if (Boolean.TRUE.equals(requiresPathologyMappings)) { for (int i = 0; i < resultsPathology.size(); i++) { OrderPriority orderPriority = resultsPathology.get(i); if ((Boolean.FALSE.equals(requiresRadiologyMappings) || resultsRadiology.contains(orderPriority)) && (Boolean.FALSE.equals(requiresClinicalMappings) || resultsClinical.contains(orderPriority))) results.add(orderPriority); } } else { for (int i = 0; i < resultsRadiology.size(); i++) { OrderPriority orderPriority = resultsRadiology.get(i); if (Boolean.TRUE.equals(requiresClinicalMappings) || resultsClinical.contains(orderPriority)) results.add(orderPriority); } } return results; } public PasEventShortVo getLatestPasEvent(PatientRefVo patient) { if (patient == null) return null; StringBuilder query = new StringBuilder("SELECT pasEv FROM PASEvent AS pasEv LEFT JOIN pasEv.patient AS pat WHERE "); query.append(" pat.id = :PATIENT_ID ORDER BY pasEv.systemInformation.creationDateTime DESC"); ArrayList<String> paramNames = new ArrayList<String>(); ArrayList<Object> paramValues = new ArrayList<Object>(); paramNames.add("PATIENT_ID"); paramValues.add(patient.getID_Patient()); return PasEventShortVoAssembler.create((PASEvent) getDomainFactory().findFirst(query.toString(), paramNames, paramValues)); } //wdev-17823 public OrderCategory getOrderCategoryByMap() { DomainFactory factory = getDomainFactory(); StringBuffer hql = new StringBuffer(); hql.append("select lookInst from Lookup as look left join look.instances as lookInst left join lookInst.mappings as mappings where (look.id = 1161035 and mappings.extSystem = 'MAXIMS' and mappings.extCode like 'ALIAS_DEFAULT' and lookInst.active = 1)"); List<?> list = factory.find(hql.toString()); if (list!=null && list.size()>0) { LookupInstance doLookInst=(LookupInstance)list.get(0); ims.ocrr.vo.lookups.OrderCategory voLookup = new ims.ocrr.vo.lookups.OrderCategory(doLookInst.getId(),doLookInst.getText(),doLookInst.isActive(),null,doLookInst.getImage(),doLookInst.getColor()); return voLookup; } return null; } }
agpl-3.0
open-health-hub/openMAXIMS
openmaxims_workspace/SpinalInjuries/src/ims/spinalinjuries/forms/opdvisitdetails/AccessLogic.java
2313
//############################################################################# //# # //# Copyright (C) <2014> <IMS MAXIMS> # //# # //# This program is free software: you can redistribute it and/or modify # //# it under the terms of the GNU Affero General Public License as # //# published by the Free Software Foundation, either version 3 of the # //# License, or (at your option) any later version. # //# # //# This program is distributed in the hope that it will be useful, # //# but WITHOUT ANY WARRANTY; without even the implied warranty of # //# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # //# GNU Affero General Public License for more details. # //# # //# You should have received a copy of the GNU Affero General Public License # //# along with this program. If not, see <http://www.gnu.org/licenses/>. # //# # //############################################################################# //#EOH // This code was generated by Neil McAnaspie using IMS Development Environment (version 1.52 build 2497.19681) // Copyright (C) 1995-2006 IMS MAXIMS plc. All rights reserved. package ims.spinalinjuries.forms.opdvisitdetails; import ims.core.vo.lookups.ContextType; import java.io.Serializable; public final class AccessLogic extends BaseAccessLogic implements Serializable { private static final long serialVersionUID = 1L; public boolean isAccessible() { if(form.getGlobalContext().Core.getPatientShortIsNotNull() && form.getGlobalContext().Core.getCurrentCareContextIsNotNull() && form.getGlobalContext().Core.getCurrentCareContext().getContext().equals(ContextType.OUTPATIENT)) return true; return false; } public boolean isReadOnly() { if(super.isReadOnly()) return true; // TODO: Add your conditions here. return false; } }
agpl-3.0
open-health-hub/openMAXIMS
openmaxims_workspace/ValueObjects/src/ims/core/vo/SupportNetworkServicesVo.java
13813
//############################################################################# //# # //# Copyright (C) <2014> <IMS MAXIMS> # //# # //# This program is free software: you can redistribute it and/or modify # //# it under the terms of the GNU Affero General Public License as # //# published by the Free Software Foundation, either version 3 of the # //# License, or (at your option) any later version. # //# # //# This program is distributed in the hope that it will be useful, # //# but WITHOUT ANY WARRANTY; without even the implied warranty of # //# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # //# GNU Affero General Public License for more details. # //# # //# You should have received a copy of the GNU Affero General Public License # //# along with this program. If not, see <http://www.gnu.org/licenses/>. # //# # //############################################################################# //#EOH // This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751) // Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved. // WARNING: DO NOT MODIFY the content of this file package ims.core.vo; /** * Linked to core.clinical.SupportNetworkServices business object (ID: 1003100011). */ public class SupportNetworkServicesVo extends ims.core.clinical.vo.SupportNetworkServicesRefVo implements ims.vo.ImsCloneable, Comparable { private static final long serialVersionUID = 1L; public SupportNetworkServicesVo() { } public SupportNetworkServicesVo(Integer id, int version) { super(id, version); } public SupportNetworkServicesVo(ims.core.vo.beans.SupportNetworkServicesVoBean bean) { this.id = bean.getId(); this.version = bean.getVersion(); this.locationservice = bean.getLocationService() == null ? null : bean.getLocationService().buildVo(); this.frequencyofservice = bean.getFrequencyOfService(); this.comments = bean.getComments(); this.iscurrentlyactive = bean.getIsCurrentlyActive(); this.sysinfo = bean.getSysInfo() == null ? null : bean.getSysInfo().buildSystemInformation(); this.suppliedservice = bean.getSuppliedService() == null ? null : bean.getSuppliedService().buildVo(); this.service = bean.getService() == null ? null : bean.getService().buildVo(); this.authoringinfo = bean.getAuthoringInfo() == null ? null : bean.getAuthoringInfo().buildVo(); } public void populate(ims.vo.ValueObjectBeanMap map, ims.core.vo.beans.SupportNetworkServicesVoBean bean) { this.id = bean.getId(); this.version = bean.getVersion(); this.locationservice = bean.getLocationService() == null ? null : bean.getLocationService().buildVo(map); this.frequencyofservice = bean.getFrequencyOfService(); this.comments = bean.getComments(); this.iscurrentlyactive = bean.getIsCurrentlyActive(); this.sysinfo = bean.getSysInfo() == null ? null : bean.getSysInfo().buildSystemInformation(); this.suppliedservice = bean.getSuppliedService() == null ? null : bean.getSuppliedService().buildVo(map); this.service = bean.getService() == null ? null : bean.getService().buildVo(map); this.authoringinfo = bean.getAuthoringInfo() == null ? null : bean.getAuthoringInfo().buildVo(map); } public ims.vo.ValueObjectBean getBean() { return this.getBean(new ims.vo.ValueObjectBeanMap()); } public ims.vo.ValueObjectBean getBean(ims.vo.ValueObjectBeanMap map) { ims.core.vo.beans.SupportNetworkServicesVoBean bean = null; if(map != null) bean = (ims.core.vo.beans.SupportNetworkServicesVoBean)map.getValueObjectBean(this); if (bean == null) { bean = new ims.core.vo.beans.SupportNetworkServicesVoBean(); map.addValueObjectBean(this, bean); bean.populate(map, this); } return bean; } public Object getFieldValueByFieldName(String fieldName) { if(fieldName == null) throw new ims.framework.exceptions.CodingRuntimeException("Invalid field name"); fieldName = fieldName.toUpperCase(); if(fieldName.equals("LOCATIONSERVICE")) return getLocationService(); if(fieldName.equals("FREQUENCYOFSERVICE")) return getFrequencyOfService(); if(fieldName.equals("COMMENTS")) return getComments(); if(fieldName.equals("ISCURRENTLYACTIVE")) return getIsCurrentlyActive(); if(fieldName.equals("SYSINFO")) return getSysInfo(); if(fieldName.equals("SUPPLIEDSERVICE")) return getSuppliedService(); if(fieldName.equals("SERVICE")) return getService(); if(fieldName.equals("AUTHORINGINFO")) return getAuthoringInfo(); return super.getFieldValueByFieldName(fieldName); } public boolean getLocationServiceIsNotNull() { return this.locationservice != null; } public ims.core.vo.LocationServiceVo getLocationService() { return this.locationservice; } public void setLocationService(ims.core.vo.LocationServiceVo value) { this.isValidated = false; this.locationservice = value; } public boolean getFrequencyOfServiceIsNotNull() { return this.frequencyofservice != null; } public String getFrequencyOfService() { return this.frequencyofservice; } public static int getFrequencyOfServiceMaxLength() { return 255; } public void setFrequencyOfService(String value) { this.isValidated = false; this.frequencyofservice = value; } public boolean getCommentsIsNotNull() { return this.comments != null; } public String getComments() { return this.comments; } public static int getCommentsMaxLength() { return 255; } public void setComments(String value) { this.isValidated = false; this.comments = value; } public boolean getIsCurrentlyActiveIsNotNull() { return this.iscurrentlyactive != null; } public Boolean getIsCurrentlyActive() { return this.iscurrentlyactive; } public void setIsCurrentlyActive(Boolean value) { this.isValidated = false; this.iscurrentlyactive = value; } public boolean getSysInfoIsNotNull() { return this.sysinfo != null; } public ims.vo.SystemInformation getSysInfo() { return this.sysinfo; } public void setSysInfo(ims.vo.SystemInformation value) { this.isValidated = false; this.sysinfo = value; } public boolean getSuppliedServiceIsNotNull() { return this.suppliedservice != null; } public ims.core.vo.SuppNetworkServiceProfessionDetailVo getSuppliedService() { return this.suppliedservice; } public void setSuppliedService(ims.core.vo.SuppNetworkServiceProfessionDetailVo value) { this.isValidated = false; this.suppliedservice = value; } public boolean getServiceIsNotNull() { return this.service != null; } public ims.core.vo.ServiceLiteVo getService() { return this.service; } public void setService(ims.core.vo.ServiceLiteVo value) { this.isValidated = false; this.service = value; } public boolean getAuthoringInfoIsNotNull() { return this.authoringinfo != null; } public ims.core.vo.AuthoringInformationVo getAuthoringInfo() { return this.authoringinfo; } public void setAuthoringInfo(ims.core.vo.AuthoringInformationVo value) { this.isValidated = false; this.authoringinfo = value; } public boolean isValidated() { if(this.isBusy) return true; this.isBusy = true; if(!this.isValidated) { this.isBusy = false; return false; } if(this.locationservice != null) { if(!this.locationservice.isValidated()) { this.isBusy = false; return false; } } if(this.suppliedservice != null) { if(!this.suppliedservice.isValidated()) { this.isBusy = false; return false; } } if(this.service != null) { if(!this.service.isValidated()) { this.isBusy = false; return false; } } if(this.authoringinfo != null) { if(!this.authoringinfo.isValidated()) { this.isBusy = false; return false; } } this.isBusy = false; return true; } public String[] validate() { return validate(null); } public String[] validate(String[] existingErrors) { if(this.isBusy) return null; this.isBusy = true; java.util.ArrayList<String> listOfErrors = new java.util.ArrayList<String>(); if(existingErrors != null) { for(int x = 0; x < existingErrors.length; x++) { listOfErrors.add(existingErrors[x]); } } if(this.locationservice != null) { String[] listOfOtherErrors = this.locationservice.validate(); if(listOfOtherErrors != null) { for(int x = 0; x < listOfOtherErrors.length; x++) { listOfErrors.add(listOfOtherErrors[x]); } } } if(this.frequencyofservice != null) if(this.frequencyofservice.length() > 255) listOfErrors.add("The length of the field [frequencyofservice] in the value object [ims.core.vo.SupportNetworkServicesVo] is too big. It should be less or equal to 255"); if(this.comments != null) if(this.comments.length() > 255) listOfErrors.add("The length of the field [comments] in the value object [ims.core.vo.SupportNetworkServicesVo] is too big. It should be less or equal to 255"); if(this.suppliedservice != null) { String[] listOfOtherErrors = this.suppliedservice.validate(); if(listOfOtherErrors != null) { for(int x = 0; x < listOfOtherErrors.length; x++) { listOfErrors.add(listOfOtherErrors[x]); } } } if(this.service != null) { String[] listOfOtherErrors = this.service.validate(); if(listOfOtherErrors != null) { for(int x = 0; x < listOfOtherErrors.length; x++) { listOfErrors.add(listOfOtherErrors[x]); } } } if(this.authoringinfo == null) listOfErrors.add("Authoring HCP and Date/Time are mandatory"); if(this.authoringinfo != null) { String[] listOfOtherErrors = this.authoringinfo.validate(); if(listOfOtherErrors != null) { for(int x = 0; x < listOfOtherErrors.length; x++) { listOfErrors.add(listOfOtherErrors[x]); } } } int errorCount = listOfErrors.size(); if(errorCount == 0) { this.isBusy = false; this.isValidated = true; return null; } String[] result = new String[errorCount]; for(int x = 0; x < errorCount; x++) result[x] = (String)listOfErrors.get(x); this.isBusy = false; this.isValidated = false; return result; } public void clearIDAndVersion() { this.id = null; this.version = 0; } public Object clone() { if(this.isBusy) return this; this.isBusy = true; SupportNetworkServicesVo clone = new SupportNetworkServicesVo(this.id, this.version); if(this.locationservice == null) clone.locationservice = null; else clone.locationservice = (ims.core.vo.LocationServiceVo)this.locationservice.clone(); clone.frequencyofservice = this.frequencyofservice; clone.comments = this.comments; clone.iscurrentlyactive = this.iscurrentlyactive; if(this.sysinfo == null) clone.sysinfo = null; else clone.sysinfo = (ims.vo.SystemInformation)this.sysinfo.clone(); if(this.suppliedservice == null) clone.suppliedservice = null; else clone.suppliedservice = (ims.core.vo.SuppNetworkServiceProfessionDetailVo)this.suppliedservice.clone(); if(this.service == null) clone.service = null; else clone.service = (ims.core.vo.ServiceLiteVo)this.service.clone(); if(this.authoringinfo == null) clone.authoringinfo = null; else clone.authoringinfo = (ims.core.vo.AuthoringInformationVo)this.authoringinfo.clone(); clone.isValidated = this.isValidated; this.isBusy = false; return clone; } public int compareTo(Object obj) { return compareTo(obj, true); } public int compareTo(Object obj, boolean caseInsensitive) { if (obj == null) { return -1; } if(caseInsensitive); // this is to avoid eclipse warning only. if (!(SupportNetworkServicesVo.class.isAssignableFrom(obj.getClass()))) { throw new ClassCastException("A SupportNetworkServicesVo object cannot be compared an Object of type " + obj.getClass().getName()); } SupportNetworkServicesVo compareObj = (SupportNetworkServicesVo)obj; int retVal = 0; if (retVal == 0) { if(this.getSysInfo() == null && compareObj.getSysInfo() != null) return -1; if(this.getSysInfo() != null && compareObj.getSysInfo() == null) return 1; if(this.getSysInfo() != null && compareObj.getSysInfo() != null) retVal = this.getSysInfo().compareTo(compareObj.getSysInfo()); } return retVal; } public synchronized static int generateValueObjectUniqueID() { return ims.vo.ValueObject.generateUniqueID(); } public int countFieldsWithValue() { int count = 0; if(this.locationservice != null) count++; if(this.frequencyofservice != null) count++; if(this.comments != null) count++; if(this.iscurrentlyactive != null) count++; if(this.sysinfo != null) count++; if(this.suppliedservice != null) count++; if(this.service != null) count++; if(this.authoringinfo != null) count++; return count; } public int countValueObjectFields() { return 8; } protected ims.core.vo.LocationServiceVo locationservice; protected String frequencyofservice; protected String comments; protected Boolean iscurrentlyactive; protected ims.vo.SystemInformation sysinfo; protected ims.core.vo.SuppNetworkServiceProfessionDetailVo suppliedservice; protected ims.core.vo.ServiceLiteVo service; protected ims.core.vo.AuthoringInformationVo authoringinfo; private boolean isValidated = false; private boolean isBusy = false; }
agpl-3.0
open-health-hub/openMAXIMS
openmaxims_workspace/Clinical/src/ims/clinical/forms/patientproceduresdialog/GenForm.java
37653
//############################################################################# //# # //# Copyright (C) <2014> <IMS MAXIMS> # //# # //# This program is free software: you can redistribute it and/or modify # //# it under the terms of the GNU Affero General Public License as # //# published by the Free Software Foundation, either version 3 of the # //# License, or (at your option) any later version. # //# # //# This program is distributed in the hope that it will be useful, # //# but WITHOUT ANY WARRANTY; without even the implied warranty of # //# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # //# GNU Affero General Public License for more details. # //# # //# You should have received a copy of the GNU Affero General Public License # //# along with this program. If not, see <http://www.gnu.org/licenses/>. # //# # //############################################################################# //#EOH // This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751) // Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved. // WARNING: DO NOT MODIFY the content of this file package ims.clinical.forms.patientproceduresdialog; import ims.framework.*; import ims.framework.controls.*; import ims.framework.enumerations.*; import ims.framework.utils.RuntimeAnchoring; public class GenForm extends FormBridge { private static final long serialVersionUID = 1L; public boolean canProvideData(IReportSeed[] reportSeeds) { return new ReportDataProvider(reportSeeds, this.getFormReportFields()).canProvideData(); } public boolean hasData(IReportSeed[] reportSeeds) { return new ReportDataProvider(reportSeeds, this.getFormReportFields()).hasData(); } public IReportField[] getData(IReportSeed[] reportSeeds) { return getData(reportSeeds, false); } public IReportField[] getData(IReportSeed[] reportSeeds, boolean excludeNulls) { return new ReportDataProvider(reportSeeds, this.getFormReportFields(), excludeNulls).getData(); } public static class grpFilterRadioButton extends RadioButtonBridge { private static final long serialVersionUID = 1L; protected void setContext(Integer startTabIndex, ims.framework.utils.SizeInfo designSize, ims.framework.utils.SizeInfo runtimeSize) { if(startTabIndex == null) throw new RuntimeException("Invalid startTabIndex "); RuntimeAnchoring anchoringHelper1 = new RuntimeAnchoring(designSize, runtimeSize, 208, 40, 88, 16, ims.framework.enumerations.ControlAnchoring.TOPLEFT); control.addButton(0, anchoringHelper1.getX(), anchoringHelper1.getY(), anchoringHelper1.getWidth(), "All Procedures", startTabIndex.intValue() + 2); RuntimeAnchoring anchoringHelper2 = new RuntimeAnchoring(designSize, runtimeSize, 80, 40, 104, 16, ims.framework.enumerations.ControlAnchoring.TOPLEFT); control.addButton(1, anchoringHelper2.getX(), anchoringHelper2.getY(), anchoringHelper2.getWidth(), "Specialty Hotlist", startTabIndex.intValue() + 1); } public void setText(grpFilterEnumeration option, String value) { if(option != null && option.id >= 0 && value != null) control.setText(option.id, value); } public grpFilterEnumeration getValue() { switch (super.control.getValue()) { case -1: return grpFilterEnumeration.None; case 0: return grpFilterEnumeration.rdoAll; case 1: return grpFilterEnumeration.rdoSpecialty; } return null; } public void setValue(grpFilterEnumeration value) { if(value != null) super.control.setValue(value.id); else super.control.setValue(grpFilterEnumeration.None.id); } public boolean isEnabled(grpFilterEnumeration option) { return super.control.isEnabled(option.id); } public void setEnabled(grpFilterEnumeration option, boolean value) { super.control.setEnabled(option.id, value); } public boolean isVisible(grpFilterEnumeration option) { return super.control.isVisible(option.id); } public void setVisible(grpFilterEnumeration option, boolean value) { super.control.setVisible(option.id, value); } public void setVisible(boolean value) { super.control.setVisible(value); } public void setEnabled(boolean value) { super.control.setEnabled(value); } } public static class grpFilterEnumeration implements java.io.Serializable { private static final long serialVersionUID = 1L; public static grpFilterEnumeration None = new grpFilterEnumeration(-1); public static grpFilterEnumeration rdoAll = new grpFilterEnumeration(0); public static grpFilterEnumeration rdoSpecialty = new grpFilterEnumeration(1); private grpFilterEnumeration(int id) { this.id = id; } public boolean equals(Object o) { return this.id == ((grpFilterEnumeration)o).id; } private int id; } private void validateContext(ims.framework.Context context) { if(context == null) return; if(!context.isValidContextType(ims.core.vo.CareContextShortVo.class)) throw new ims.framework.exceptions.CodingRuntimeException("The type 'ims.core.vo.CareContextShortVo' of the global context variable 'Core.CurrentCareContext' is not supported."); if(!context.isValidContextType(ims.core.vo.EpisodeofCareShortVo.class)) throw new ims.framework.exceptions.CodingRuntimeException("The type 'ims.core.vo.EpisodeofCareShortVo' of the global context variable 'Core.EpisodeofCareShort' is not supported."); } private void validateMandatoryContext(Context context) { if(new ims.framework.ContextVariable("Core.EpisodeofCareShort", "_cvp_Core.EpisodeofCareShort").getValueIsNull(context)) throw new ims.framework.exceptions.FormMandatoryContextMissingException("The required context data 'Core.EpisodeofCareShort' is not available."); } public boolean supportsRecordedInError() { return false; } public ims.vo.ValueObject getRecordedInErrorVo() { return null; } protected void setContext(FormLoader loader, Form form, ims.framework.interfaces.IAppForm appForm, UIFactory factory, Context context) throws Exception { setContext(loader, form, appForm, factory, context, Boolean.FALSE, new Integer(0), null, null, new Integer(0)); } protected void setContext(FormLoader loader, Form form, ims.framework.interfaces.IAppForm appForm, UIFactory factory, Context context, Boolean skipContextValidation) throws Exception { setContext(loader, form, appForm, factory, context, skipContextValidation, new Integer(0), null, null, new Integer(0)); } protected void setContext(FormLoader loader, Form form, ims.framework.interfaces.IAppForm appForm, UIFactory factory, ims.framework.Context context, Boolean skipContextValidation, Integer startControlID, ims.framework.utils.SizeInfo runtimeSize, ims.framework.Control control, Integer startTabIndex) throws Exception { if(loader == null); // this is to avoid eclipse warning only. if(factory == null); // this is to avoid eclipse warning only. if(runtimeSize == null); // this is to avoid eclipse warning only. if(appForm == null) throw new RuntimeException("Invalid application form"); if(startControlID == null) throw new RuntimeException("Invalid startControlID"); if(control == null); // this is to avoid eclipse warning only. if(startTabIndex == null) throw new RuntimeException("Invalid startTabIndex"); this.context = context; this.componentIdentifier = startControlID.toString(); this.formInfo = form.getFormInfo(); this.globalContext = new GlobalContext(context); if(skipContextValidation == null || !skipContextValidation.booleanValue()) { validateContext(context); validateMandatoryContext(context); } super.setContext(form); ims.framework.utils.SizeInfo designSize = new ims.framework.utils.SizeInfo(808, 192); if(runtimeSize == null) runtimeSize = designSize; form.setWidth(runtimeSize.getWidth()); form.setHeight(runtimeSize.getHeight()); super.setFormReferences(FormReferencesFlyweightFactory.getInstance().create(Forms.class)); super.setImageReferences(ImageReferencesFlyweightFactory.getInstance().create(Images.class)); super.setGlobalContext(ContextBridgeFlyweightFactory.getInstance().create(GlobalContextBridge.class, context, false)); super.setLocalContext(new LocalContext(context, form.getFormInfo(), componentIdentifier)); // Panel Controls RuntimeAnchoring anchoringHelper3 = new RuntimeAnchoring(designSize, runtimeSize, 8, 0, 792, 32, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT); super.addControl(factory.getControl(Panel.class, new Object[] { control, new Integer(startControlID.intValue() + 1000), new Integer(anchoringHelper3.getX()), new Integer(anchoringHelper3.getY()), new Integer(anchoringHelper3.getWidth()), new Integer(anchoringHelper3.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT,"Procedures", new Integer(1), ""})); // Custom Controls ims.framework.CustomComponent instance1 = factory.getEmptyCustomComponent(); RuntimeAnchoring anchoringHelper4 = new RuntimeAnchoring(designSize, runtimeSize, 16, 64, 776, 64, ims.framework.enumerations.ControlAnchoring.TOPLEFT); ims.framework.FormUiLogic m_ccProcedureForm = loader.loadComponent(123133, appForm, startControlID * 10 + 1000, anchoringHelper4.getSize(), instance1, startTabIndex.intValue() + 3, skipContextValidation); //ims.framework.Control m_ccProcedureControl = factory.getControl(CustomComponent.class, new Object[] { control, new Integer(startControlID.intValue() + 1001), new Integer(16), new Integer(64), new Integer(776), new Integer(64), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT, new Integer(startTabIndex.intValue() + 3), m_ccProcedureForm, instance1 } ); ims.framework.Control m_ccProcedureControl = factory.getControl(CustomComponent.class, new Object[] { control, new Integer(startControlID.intValue() + 1002), new Integer(anchoringHelper4.getX()), new Integer(anchoringHelper4.getY()), new Integer(anchoringHelper4.getWidth()), new Integer(anchoringHelper4.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT, new Integer(startTabIndex.intValue() + 3), m_ccProcedureForm, instance1, Boolean.FALSE } ); super.addControl(m_ccProcedureControl); Menu[] menus1 = m_ccProcedureForm.getForm().getRegisteredMenus(); for(int x = 0; x < menus1.length; x++) { form.registerMenu(menus1[x]); } // Label Controls RuntimeAnchoring anchoringHelper5 = new RuntimeAnchoring(designSize, runtimeSize, 24, 136, 53, 17, ims.framework.enumerations.ControlAnchoring.BOTTOMLEFT); super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1003), new Integer(anchoringHelper5.getX()), new Integer(anchoringHelper5.getY()), new Integer(anchoringHelper5.getWidth()), new Integer(anchoringHelper5.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.BOTTOMLEFT, "Primary:", new Integer(1), null, new Integer(0)})); RuntimeAnchoring anchoringHelper6 = new RuntimeAnchoring(designSize, runtimeSize, 24, 40, 41, 17, ims.framework.enumerations.ControlAnchoring.TOPLEFT); super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1004), new Integer(anchoringHelper6.getX()), new Integer(anchoringHelper6.getY()), new Integer(anchoringHelper6.getWidth()), new Integer(anchoringHelper6.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT, "Filter: ", new Integer(1), null, new Integer(0)})); // Button Controls RuntimeAnchoring anchoringHelper7 = new RuntimeAnchoring(designSize, runtimeSize, 632, 160, 75, 23, ims.framework.enumerations.ControlAnchoring.BOTTOMRIGHT); super.addControl(factory.getControl(Button.class, new Object[] { control, new Integer(startControlID.intValue() + 1005), new Integer(anchoringHelper7.getX()), new Integer(anchoringHelper7.getY()), new Integer(anchoringHelper7.getWidth()), new Integer(anchoringHelper7.getHeight()), new Integer(startTabIndex.intValue() + 1006), ControlState.HIDDEN, ControlState.ENABLED, ims.framework.enumerations.ControlAnchoring.BOTTOMRIGHT, "Save", Boolean.TRUE, null, Boolean.FALSE, Boolean.TRUE, Boolean.FALSE, null, ims.framework.utils.Color.Default, ims.framework.utils.Color.Default })); RuntimeAnchoring anchoringHelper8 = new RuntimeAnchoring(designSize, runtimeSize, 712, 160, 75, 23, ims.framework.enumerations.ControlAnchoring.BOTTOMRIGHT); super.addControl(factory.getControl(Button.class, new Object[] { control, new Integer(startControlID.intValue() + 1006), new Integer(anchoringHelper8.getX()), new Integer(anchoringHelper8.getY()), new Integer(anchoringHelper8.getWidth()), new Integer(anchoringHelper8.getHeight()), new Integer(startTabIndex.intValue() + 1008), ControlState.HIDDEN, ControlState.ENABLED, ims.framework.enumerations.ControlAnchoring.BOTTOMRIGHT, "Cancel", Boolean.FALSE, null, Boolean.FALSE, Boolean.FALSE, Boolean.FALSE, null, ims.framework.utils.Color.Default, ims.framework.utils.Color.Default })); // CheckBox Controls RuntimeAnchoring anchoringHelper9 = new RuntimeAnchoring(designSize, runtimeSize, 128, 136, 16, 16, ims.framework.enumerations.ControlAnchoring.TOPLEFT); super.addControl(factory.getControl(CheckBox.class, new Object[] { control, new Integer(startControlID.intValue() + 1007), new Integer(anchoringHelper9.getX()), new Integer(anchoringHelper9.getY()), new Integer(anchoringHelper9.getWidth()), new Integer(anchoringHelper9.getHeight()), new Integer(startTabIndex.intValue() + 1004), ControlState.UNKNOWN, ControlState.UNKNOWN,ims.framework.enumerations.ControlAnchoring.TOPLEFT ,"", Boolean.FALSE, null})); // RadioButton Controls RadioButton tmpgrpFilter = (RadioButton)factory.getControl(RadioButton.class, new Object[] { control, new Integer(startControlID.intValue() + 1008), new Integer(0), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT,Boolean.TRUE}); super.addControl(tmpgrpFilter); grpFilterRadioButton grpFilter = (grpFilterRadioButton)RadioButtonBridgeFlyweightFactory.getInstance().createRadioButtonBridge(grpFilterRadioButton.class, tmpgrpFilter); grpFilter.setContext(startTabIndex, designSize, runtimeSize); super.addRadioButton(grpFilter); } public Forms getForms() { return (Forms)super.getFormReferences(); } public Images getImages() { return (Images)super.getImageReferences(); } public ims.clinical.forms.clinicalcoding.IComponent ccProcedure() { return (ims.clinical.forms.clinicalcoding.IComponent)((ims.framework.cn.controls.CustomComponent)super.getControl(1)).getLogic(); } public void setccProcedureValueChangedEvent(ims.framework.delegates.ValueChanged delegate) { ((CustomComponent)super.getControl(1)).setValueChangedEvent(delegate); } public void setccProcedureVisible(boolean value) { ((ims.framework.Control)super.getControl(1)).setVisible(value); } public boolean isccProcedureVisible() { return ((ims.framework.Control)super.getControl(1)).isVisible(); } public void setccProcedureEnabled(boolean value) { ((ims.framework.Control)super.getControl(1)).setEnabled(value); } public boolean isccProcedureEnabled() { return ((ims.framework.Control)super.getControl(1)).isEnabled(); } public Button btnSave() { return (Button)super.getControl(4); } public Button btnCancel() { return (Button)super.getControl(5); } public CheckBox chkPrimary() { return (CheckBox)super.getControl(6); } public grpFilterRadioButton grpFilter() { return (grpFilterRadioButton)super.getRadioButton(0); } public static class Forms implements java.io.Serializable { private static final long serialVersionUID = 1L; protected final class LocalFormName extends FormName { private static final long serialVersionUID = 1L; private LocalFormName(int name) { super(name); } } private Forms() { Core = new CoreForms(); } public final class CoreForms implements java.io.Serializable { private static final long serialVersionUID = 1L; private CoreForms() { TaxonomySearch = new LocalFormName(104102); YesNoDialog = new LocalFormName(102107); } public final FormName TaxonomySearch; public final FormName YesNoDialog; } public CoreForms Core; } public static class Images implements java.io.Serializable { private static final long serialVersionUID = 1L; private final class ImageHelper extends ims.framework.utils.ImagePath { private static final long serialVersionUID = 1L; private ImageHelper(int id, String path, Integer width, Integer height) { super(id, path, width, height); } } private Images() { Core = new CoreImages(); } public final class CoreImages implements java.io.Serializable { private static final long serialVersionUID = 1L; private CoreImages() { Search = new ImageHelper(102120, "Images/Core/bin.gif", new Integer(15), new Integer(15)); } public final ims.framework.utils.Image Search; } public final CoreImages Core; } public GlobalContext getGlobalContext() { return this.globalContext; } public static class GlobalContextBridge extends ContextBridge { private static final long serialVersionUID = 1L; } public LocalContext getLocalContext() { return (LocalContext)super.getLocalCtx(); } public class LocalContext extends ContextBridge { private static final long serialVersionUID = 1L; public LocalContext(Context context, ims.framework.FormInfo formInfo, String componentIdentifier) { super.setContext(context); String prefix = formInfo.getLocalVariablesPrefix(); cxl_voPatientDiagnosis = new ims.framework.ContextVariable("voPatientDiagnosis", prefix + "_lv_Clinical.PatientProceduresDialog.__internal_x_context__voPatientDiagnosis_" + componentIdentifier + ""); cxl_voContact = new ims.framework.ContextVariable("voContact", prefix + "_lv_Clinical.PatientProceduresDialog.__internal_x_context__voContact_" + componentIdentifier + ""); cxl_voTaxonomyMap = new ims.framework.ContextVariable("voTaxonomyMap", prefix + "_lv_Clinical.PatientProceduresDialog.__internal_x_context__voTaxonomyMap_" + componentIdentifier + ""); cxl_voDiagnosis = new ims.framework.ContextVariable("voDiagnosis", prefix + "_lv_Clinical.PatientProceduresDialog.__internal_x_context__voDiagnosis_" + componentIdentifier + ""); } public boolean getvoPatientDiagnosisIsNotNull() { return !cxl_voPatientDiagnosis.getValueIsNull(context); } public ims.core.vo.PatientDiagnosisVo getvoPatientDiagnosis() { return (ims.core.vo.PatientDiagnosisVo)cxl_voPatientDiagnosis.getValue(context); } public void setvoPatientDiagnosis(ims.core.vo.PatientDiagnosisVo value) { cxl_voPatientDiagnosis.setValue(context, value); } private ims.framework.ContextVariable cxl_voPatientDiagnosis = null; public boolean getvoContactIsNotNull() { return !cxl_voContact.getValueIsNull(context); } public ims.core.vo.ClinicalContactVo getvoContact() { return (ims.core.vo.ClinicalContactVo)cxl_voContact.getValue(context); } public void setvoContact(ims.core.vo.ClinicalContactVo value) { cxl_voContact.setValue(context, value); } private ims.framework.ContextVariable cxl_voContact = null; public boolean getvoTaxonomyMapIsNotNull() { return !cxl_voTaxonomyMap.getValueIsNull(context); } public ims.core.vo.TaxonomyMap getvoTaxonomyMap() { return (ims.core.vo.TaxonomyMap)cxl_voTaxonomyMap.getValue(context); } public void setvoTaxonomyMap(ims.core.vo.TaxonomyMap value) { cxl_voTaxonomyMap.setValue(context, value); } private ims.framework.ContextVariable cxl_voTaxonomyMap = null; public boolean getvoDiagnosisIsNotNull() { return !cxl_voDiagnosis.getValueIsNull(context); } public ims.core.vo.DiagnosisVo getvoDiagnosis() { return (ims.core.vo.DiagnosisVo)cxl_voDiagnosis.getValue(context); } public void setvoDiagnosis(ims.core.vo.DiagnosisVo value) { cxl_voDiagnosis.setValue(context, value); } private ims.framework.ContextVariable cxl_voDiagnosis = null; } private IReportField[] getFormReportFields() { if(this.context == null) return null; if(this.reportFields == null) this.reportFields = new ReportFields(this.context, this.formInfo, this.componentIdentifier).getReportFields(); return this.reportFields; } private class ReportFields { public ReportFields(Context context, ims.framework.FormInfo formInfo, String componentIdentifier) { this.context = context; this.formInfo = formInfo; this.componentIdentifier = componentIdentifier; } public IReportField[] getReportFields() { String prefix = formInfo.getLocalVariablesPrefix(); IReportField[] fields = new IReportField[103]; fields[0] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-ID", "ID_Patient"); fields[1] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-SEX", "Sex"); fields[2] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-DOB", "Dob"); fields[3] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-DOD", "Dod"); fields[4] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-RELIGION", "Religion"); fields[5] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-ISACTIVE", "IsActive"); fields[6] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-ETHNICORIGIN", "EthnicOrigin"); fields[7] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-MARITALSTATUS", "MaritalStatus"); fields[8] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-SCN", "SCN"); fields[9] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-SOURCEOFINFORMATION", "SourceOfInformation"); fields[10] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-TIMEOFDEATH", "TimeOfDeath"); fields[11] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-ISQUICKREGISTRATIONPATIENT", "IsQuickRegistrationPatient"); fields[12] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-CURRENTRESPONSIBLECONSULTANT", "CurrentResponsibleConsultant"); fields[13] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientFilter", "BO-1001100000-ID", "ID_Patient"); fields[14] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientFilter", "BO-1001100000-SEX", "Sex"); fields[15] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientFilter", "BO-1001100000-DOB", "Dob"); fields[16] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentClinicalContact", "BO-1004100003-ID", "ID_ClinicalContact"); fields[17] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentClinicalContact", "BO-1004100003-SPECIALTY", "Specialty"); fields[18] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentClinicalContact", "BO-1004100003-CONTACTTYPE", "ContactType"); fields[19] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentClinicalContact", "BO-1004100003-STARTDATETIME", "StartDateTime"); fields[20] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentClinicalContact", "BO-1004100003-ENDDATETIME", "EndDateTime"); fields[21] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentClinicalContact", "BO-1004100003-CARECONTEXT", "CareContext"); fields[22] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentClinicalContact", "BO-1004100003-ISCLINICALNOTECREATED", "IsClinicalNoteCreated"); fields[23] = new ims.framework.ReportField(this.context, "_cvp_Core.RecordingHCP", "BO-1006100000-ID", "ID_Hcp"); fields[24] = new ims.framework.ReportField(this.context, "_cvp_Core.RecordingHCP", "BO-1006100000-HCPTYPE", "HcpType"); fields[25] = new ims.framework.ReportField(this.context, "_cvp_Core.RecordingHCP", "BO-1006100000-ISACTIVE", "IsActive"); fields[26] = new ims.framework.ReportField(this.context, "_cvp_Core.RecordingHCP", "BO-1006100000-ISHCPARESPONSIBLEHCP", "IsHCPaResponsibleHCP"); fields[27] = new ims.framework.ReportField(this.context, "_cvp_Core.RecordingHCP", "BO-1006100000-ISARESPONSIBLEEDCLINICIAN", "IsAResponsibleEDClinician"); fields[28] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentCareContext", "BO-1004100019-ID", "ID_CareContext"); fields[29] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentCareContext", "BO-1004100019-CONTEXT", "Context"); fields[30] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentCareContext", "BO-1004100019-ORDERINGHOSPITAL", "OrderingHospital"); fields[31] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentCareContext", "BO-1004100019-ESTIMATEDDISCHARGEDATE", "EstimatedDischargeDate"); fields[32] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentCareContext", "BO-1004100019-STARTDATETIME", "StartDateTime"); fields[33] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentCareContext", "BO-1004100019-ENDDATETIME", "EndDateTime"); fields[34] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentCareContext", "BO-1004100019-LOCATIONTYPE", "LocationType"); fields[35] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentCareContext", "BO-1004100019-RESPONSIBLEHCP", "ResponsibleHCP"); fields[36] = new ims.framework.ReportField(this.context, "_cvp_Core.EpisodeofCareShort", "BO-1004100018-ID", "ID_EpisodeOfCare"); fields[37] = new ims.framework.ReportField(this.context, "_cvp_Core.EpisodeofCareShort", "BO-1004100018-CARESPELL", "CareSpell"); fields[38] = new ims.framework.ReportField(this.context, "_cvp_Core.EpisodeofCareShort", "BO-1004100018-SPECIALTY", "Specialty"); fields[39] = new ims.framework.ReportField(this.context, "_cvp_Core.EpisodeofCareShort", "BO-1004100018-RELATIONSHIP", "Relationship"); fields[40] = new ims.framework.ReportField(this.context, "_cvp_Core.EpisodeofCareShort", "BO-1004100018-STARTDATE", "StartDate"); fields[41] = new ims.framework.ReportField(this.context, "_cvp_Core.EpisodeofCareShort", "BO-1004100018-ENDDATE", "EndDate"); fields[42] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-ID", "ID_ClinicalNotes"); fields[43] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-CLINICALNOTE", "ClinicalNote"); fields[44] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-NOTETYPE", "NoteType"); fields[45] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-DISCIPLINE", "Discipline"); fields[46] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-CLINICALCONTACT", "ClinicalContact"); fields[47] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-ISDERIVEDNOTE", "IsDerivedNote"); fields[48] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-FORREVIEW", "ForReview"); fields[49] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-FORREVIEWDISCIPLINE", "ForReviewDiscipline"); fields[50] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-REVIEWINGDATETIME", "ReviewingDateTime"); fields[51] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-ISCORRECTED", "IsCorrected"); fields[52] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-ISTRANSCRIBED", "IsTranscribed"); fields[53] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-SOURCEOFNOTE", "SourceOfNote"); fields[54] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-RECORDINGDATETIME", "RecordingDateTime"); fields[55] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-INHOSPITALREPORT", "InHospitalReport"); fields[56] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-CARECONTEXT", "CareContext"); fields[57] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-NOTECLASSIFICATION", "NoteClassification"); fields[58] = new ims.framework.ReportField(this.context, "_cvp_STHK.AvailableBedsListFilter", "BO-1014100009-ID", "ID_BedSpaceState"); fields[59] = new ims.framework.ReportField(this.context, "_cvp_STHK.PendingEmergencyAdmissionsFilter", "BO-1014100011-ID", "ID_PendingEmergencyAdmission"); fields[60] = new ims.framework.ReportField(this.context, "_cvp_STHK.PendingEmergencyAdmissionsFilter", "BO-1014100011-ADMISSIONSTATUS", "AdmissionStatus"); fields[61] = new ims.framework.ReportField(this.context, "_cvp_STHK.PendingDischargesListFilter", "BO-1014100000-ID", "ID_InpatientEpisode"); fields[62] = new ims.framework.ReportField(this.context, "_cvp_STHK.PendingDischargesListFilter", "BO-1014100000-ESTDISCHARGEDATE", "EstDischargeDate"); fields[63] = new ims.framework.ReportField(this.context, "_cvp_Clinical.ExtendedClinicalNotesListFilter", "BO-1011100000-ID", "ID_ClinicalNotes"); fields[64] = new ims.framework.ReportField(this.context, "_cvp_Clinical.ExtendedClinicalNotesListFilter", "BO-1011100000-FORREVIEW", "ForReview"); fields[65] = new ims.framework.ReportField(this.context, "_cvp_Clinical.ExtendedClinicalNotesListFilter", "BO-1011100000-FORREVIEWDISCIPLINE", "ForReviewDiscipline"); fields[66] = new ims.framework.ReportField(this.context, "_cvp_Clinical.ExtendedClinicalNotesListFilter", "BO-1011100000-NOTECLASSIFICATION", "NoteClassification"); fields[67] = new ims.framework.ReportField(this.context, "_cvp_Clinical.ExtendedClinicalNotesListFilter", "BO-1011100000-CARECONTEXT", "CareContext"); fields[68] = new ims.framework.ReportField(this.context, "_cvp_Core.PasEvent", "BO-1014100003-ID", "ID_PASEvent"); fields[69] = new ims.framework.ReportField(this.context, "_cvp_Correspondence.CorrespondenceDetails", "BO-1052100001-ID", "ID_CorrespondenceDetails"); fields[70] = new ims.framework.ReportField(this.context, "_cvp_RefMan.CatsReferral", "BO-1004100035-ID", "ID_CatsReferral"); fields[71] = new ims.framework.ReportField(this.context, "_cv_Clinical.SelectedPatientProcedureFromSurgicalAudit", "BO-1003100017-ID", "ID_PatientProcedure"); fields[72] = new ims.framework.ReportField(this.context, "_cv_Clinical.SelectedPatientProcedureFromSurgicalAudit", "BO-1003100017-PROCEDUREDESCRIPTION", "ProcedureDescription"); fields[73] = new ims.framework.ReportField(this.context, "_cv_Clinical.SelectedPatientProcedureFromSurgicalAudit", "BO-1003100017-INFOSOURCE", "InfoSource"); fields[74] = new ims.framework.ReportField(this.context, "_cv_Clinical.SelectedPatientProcedureFromSurgicalAudit", "BO-1003100017-CARECONTEXT", "CareContext"); fields[75] = new ims.framework.ReportField(this.context, "_cv_Clinical.SelectedPatientProcedureFromSurgicalAudit", "BO-1003100017-EPISODEOFCARE", "EpisodeOfCare"); fields[76] = new ims.framework.ReportField(this.context, "_cv_Clinical.SelectedPatientProcedureFromSurgicalAudit", "BO-1003100017-ISPRIMARY", "IsPrimary"); fields[77] = new ims.framework.ReportField(this.context, "_cv_Clinical.SelectedPatientProcedureFromSurgicalAudit", "BO-1003100017-PROCEDURESTATUS", "ProcedureStatus"); fields[78] = new ims.framework.ReportField(this.context, "_cv_Clinical.SelectedPatientProcedureFromSurgicalAudit", "BO-1003100017-PROCEDUREOUTCOME", "ProcedureOutcome"); fields[79] = new ims.framework.ReportField(this.context, "_cv_Clinical.SelectedPatientProcedureFromSurgicalAudit", "BO-1003100017-CANCELLEDDATE", "CancelledDate"); fields[80] = new ims.framework.ReportField(this.context, "_cv_Clinical.PatientProcedureMarkedAsPrimary", "BO-1003100017-ID", "ID_PatientProcedure"); fields[81] = new ims.framework.ReportField(this.context, "_cv_Clinical.PatientProcedureMarkedAsPrimary", "BO-1003100017-PROCEDUREDESCRIPTION", "ProcedureDescription"); fields[82] = new ims.framework.ReportField(this.context, "_cv_Clinical.PatientProcedureMarkedAsPrimary", "BO-1003100017-INFOSOURCE", "InfoSource"); fields[83] = new ims.framework.ReportField(this.context, "_cv_Clinical.PatientProcedureMarkedAsPrimary", "BO-1003100017-CARECONTEXT", "CareContext"); fields[84] = new ims.framework.ReportField(this.context, "_cv_Clinical.PatientProcedureMarkedAsPrimary", "BO-1003100017-EPISODEOFCARE", "EpisodeOfCare"); fields[85] = new ims.framework.ReportField(this.context, "_cv_Clinical.PatientProcedureMarkedAsPrimary", "BO-1003100017-ISPRIMARY", "IsPrimary"); fields[86] = new ims.framework.ReportField(this.context, "_cv_Clinical.PatientProcedureMarkedAsPrimary", "BO-1003100017-PROCEDURESTATUS", "ProcedureStatus"); fields[87] = new ims.framework.ReportField(this.context, "_cv_Clinical.PatientProcedureMarkedAsPrimary", "BO-1003100017-PROCEDUREOUTCOME", "ProcedureOutcome"); fields[88] = new ims.framework.ReportField(this.context, "_cv_Clinical.PatientProcedureMarkedAsPrimary", "BO-1003100017-CANCELLEDDATE", "CancelledDate"); fields[89] = new ims.framework.ReportField(this.context, prefix + "_lv_Clinical.PatientProceduresDialog.__internal_x_context__voContact_" + componentIdentifier, "BO-1004100003-ID", "ID_ClinicalContact"); fields[90] = new ims.framework.ReportField(this.context, prefix + "_lv_Clinical.PatientProceduresDialog.__internal_x_context__voContact_" + componentIdentifier, "BO-1004100003-SPECIALTY", "Specialty"); fields[91] = new ims.framework.ReportField(this.context, prefix + "_lv_Clinical.PatientProceduresDialog.__internal_x_context__voContact_" + componentIdentifier, "BO-1004100003-SEENBY", "SeenBy"); fields[92] = new ims.framework.ReportField(this.context, prefix + "_lv_Clinical.PatientProceduresDialog.__internal_x_context__voContact_" + componentIdentifier, "BO-1004100003-CONTACTTYPE", "ContactType"); fields[93] = new ims.framework.ReportField(this.context, prefix + "_lv_Clinical.PatientProceduresDialog.__internal_x_context__voContact_" + componentIdentifier, "BO-1004100003-STARTDATETIME", "StartDateTime"); fields[94] = new ims.framework.ReportField(this.context, prefix + "_lv_Clinical.PatientProceduresDialog.__internal_x_context__voContact_" + componentIdentifier, "BO-1004100003-ENDDATETIME", "EndDateTime"); fields[95] = new ims.framework.ReportField(this.context, prefix + "_lv_Clinical.PatientProceduresDialog.__internal_x_context__voContact_" + componentIdentifier, "BO-1004100003-CARECONTEXT", "CareContext"); fields[96] = new ims.framework.ReportField(this.context, prefix + "_lv_Clinical.PatientProceduresDialog.__internal_x_context__voContact_" + componentIdentifier, "BO-1004100003-ISCLINICALNOTECREATED", "IsClinicalNoteCreated"); fields[97] = new ims.framework.ReportField(this.context, prefix + "_lv_Clinical.PatientProceduresDialog.__internal_x_context__voTaxonomyMap_" + componentIdentifier, "BO-1003100010-ID", "ID_TaxonomyMap"); fields[98] = new ims.framework.ReportField(this.context, prefix + "_lv_Clinical.PatientProceduresDialog.__internal_x_context__voTaxonomyMap_" + componentIdentifier, "BO-1003100010-TAXONOMYNAME", "TaxonomyName"); fields[99] = new ims.framework.ReportField(this.context, prefix + "_lv_Clinical.PatientProceduresDialog.__internal_x_context__voTaxonomyMap_" + componentIdentifier, "BO-1003100010-TAXONOMYCODE", "TaxonomyCode"); fields[100] = new ims.framework.ReportField(this.context, prefix + "_lv_Clinical.PatientProceduresDialog.__internal_x_context__voDiagnosis_" + componentIdentifier, "BO-1003100025-ID", "ID_Diagnosis"); fields[101] = new ims.framework.ReportField(this.context, prefix + "_lv_Clinical.PatientProceduresDialog.__internal_x_context__voDiagnosis_" + componentIdentifier, "BO-1003100025-DIAGNOSISNAME", "DiagnosisName"); fields[102] = new ims.framework.ReportField(this.context, prefix + "_lv_Clinical.PatientProceduresDialog.__internal_x_context__voDiagnosis_" + componentIdentifier, "BO-1003100025-ISACTIVE", "IsActive"); return fields; } protected Context context = null; protected ims.framework.FormInfo formInfo; protected String componentIdentifier; } public String getUniqueIdentifier() { return null; } private Context context = null; private ims.framework.FormInfo formInfo = null; private String componentIdentifier; private GlobalContext globalContext = null; private IReportField[] reportFields = null; }
agpl-3.0
gpickin/Lucee4
lucee-java/lucee-core/src/lucee/runtime/functions/arrays/ArrayMin.java
1429
/** * * Copyright (c) 2014, the Railo Company Ltd. All rights reserved. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library. If not, see <http://www.gnu.org/licenses/>. * **/ /** * Implements the CFML Function arraymin */ package lucee.runtime.functions.arrays; import lucee.runtime.PageContext; import lucee.runtime.exp.PageException; import lucee.runtime.functions.BIF; import lucee.runtime.op.Caster; import lucee.runtime.type.Array; import lucee.runtime.type.util.ArrayUtil; public final class ArrayMin extends BIF { private static final long serialVersionUID = 7640801691378949924L; public static double call(PageContext pc , Array array) throws PageException { return ArrayUtil.min(array); } @Override public Object invoke(PageContext pc, Object[] args) throws PageException { return call(pc,Caster.toArray(args[0])); } }
lgpl-2.1
paulklinkenberg/Lucee4
lucee-java/lucee-core/src/lucee/commons/collection/SyncMap.java
4578
/** * * Copyright (c) 2014, the Railo Company Ltd. All rights reserved. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library. If not, see <http://www.gnu.org/licenses/>. * **/ package lucee.commons.collection; import java.io.IOException; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.Collection; import java.util.Map; import java.util.Set; import lucee.runtime.exp.PageException; import lucee.runtime.type.util.StructUtil; public class SyncMap<K,V> implements MapPro<K,V>, Serializable { private static final long serialVersionUID = 1978198479659022715L; private final MapPro<K,V> m; // Backing Map final Serializable mutex; // Object on which to synchronize public SyncMap() { this(null); } public SyncMap(MapPro<K,V> m) { if (m==null) this.m = new HashMapPro<K, V>(); else this.m = m; mutex = this; } SyncMap(MapPro<K,V> m, Serializable mutex) { this.m = m; this.mutex = mutex; } public int size() { synchronized (mutex) {return m.size();} } public boolean isEmpty() { synchronized (mutex) {return m.isEmpty();} } public boolean containsKey(Object key) { synchronized (mutex) {return m.containsKey(key);} } public boolean containsValue(Object value) { synchronized (mutex) {return m.containsValue(value);} } public V get(Object key) { synchronized (mutex) {return m.get(key);} } @Override public V g(K key) throws PageException { synchronized (mutex) {return m.g(key);} } @Override public V g(K key, V defaultValue) { synchronized (mutex) {return m.g(key,defaultValue);} } @Override public V r(K key) throws PageException { synchronized (mutex) {return m.r(key);} } @Override public V r(K key, V defaultValue) { synchronized (mutex) {return m.r(key,defaultValue);} } public V put(K key, V value) { synchronized (mutex) {return m.put(key, value);} } public V remove(Object key) { synchronized (mutex) {return m.remove(key);} } public void putAll(Map<? extends K, ? extends V> map) { synchronized (mutex) {m.putAll(map);} } public void clear() { synchronized (mutex) {m.clear();} } private transient Set<K> keySet = null; private transient Set<MapPro.Entry<K,V>> entrySet = null; private transient Collection<V> values = null; public Set<K> keySet() { synchronized (mutex) { if (keySet==null) keySet = new SyncSet<K>(m.keySet(), mutex); return keySet; } } public Set<Map.Entry<K,V>> entrySet() { synchronized (mutex) { if (entrySet==null) entrySet = new SyncSet<Map.Entry<K,V>>(m.entrySet(), mutex); return entrySet; } } public Collection<V> values() { synchronized (mutex) { if (values==null) values = new SyncCollection<V>(m.values(), mutex); return values; } } public boolean equals(Object o) { if (this == o) return true; synchronized (mutex) {return m.equals(o);} } public int hashCode() { synchronized (mutex) {return m.hashCode();} } public String toString() { synchronized (mutex) {return m.toString();} } private void writeObject(ObjectOutputStream s) throws IOException { synchronized (mutex) {s.defaultWriteObject();} } public int getType() { return StructUtil.getType(m); } }
lgpl-2.1
stoksey69/googleads-java-lib
modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201408/CustomTargetingServiceInterfacegetCustomTargetingKeysByStatement.java
2939
package com.google.api.ads.dfp.jaxws.v201408; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; /** * * Gets a {@link CustomTargetingKeyPage} of {@link CustomTargetingKey} objects * that satisfy the given {@link Statement#query}. The following fields are * supported for filtering: * * <table> * <tr> * <th scope="col">PQL Property</th> <th scope="col">Object Property</th> * </tr> * <tr> * <tr> * <td>{@code id}</td> * <td>{@link CustomTargetingKey#id}</td> * </tr> * <tr> * <td>{@code name}</td> * <td>{@link CustomTargetingKey#name}</td> * </tr> * <tr> * <td>{@code displayName}</td> * <td>{@link CustomTargetingKey#displayName}</td> * </tr> * <tr> * <td>{@code type}</td> * <td>{@link CustomTargetingKey#type}</td> * </tr> * </table> * * @param filterStatement a Publisher Query Language statement used to filter * a set of custom targeting keys * @return the custom targeting keys that match the given filter * * * <p>Java class for getCustomTargetingKeysByStatement element declaration. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;element name="getCustomTargetingKeysByStatement"> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="filterStatement" type="{https://www.google.com/apis/ads/publisher/v201408}Statement" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * &lt;/element> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "filterStatement" }) @XmlRootElement(name = "getCustomTargetingKeysByStatement") public class CustomTargetingServiceInterfacegetCustomTargetingKeysByStatement { protected Statement filterStatement; /** * Gets the value of the filterStatement property. * * @return * possible object is * {@link Statement } * */ public Statement getFilterStatement() { return filterStatement; } /** * Sets the value of the filterStatement property. * * @param value * allowed object is * {@link Statement } * */ public void setFilterStatement(Statement value) { this.filterStatement = value; } }
apache-2.0
gabby2212/gs-collections
unit-tests/src/test/java/com/gs/collections/impl/lazy/parallel/bag/ParallelCollectBagTest.java
1637
/* * Copyright 2014 Goldman Sachs. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gs.collections.impl.lazy.parallel.bag; import com.gs.collections.api.bag.MutableBag; import com.gs.collections.api.bag.ParallelBag; import com.gs.collections.impl.bag.mutable.HashBag; public class ParallelCollectBagTest extends ParallelBagTestCase { @Override protected ParallelBag<Integer> classUnderTest() { return this.newWith(1, 2, 2, 3, 3, 3, 4, 4, 4, 4); } @Override protected ParallelBag<Integer> newWith(Integer... littleElements) { return HashBag.newBagWith(littleElements) .asParallel(this.executorService, this.batchSize) .collect(String::valueOf) .collect(string -> "null".equals(string) ? null : Integer.valueOf(string)); } @Override protected MutableBag<Integer> getExpectedWith(Integer... littleElements) { return HashBag.newBagWith(littleElements) .collect(String::valueOf) .collect(string -> "null".equals(string) ? null : Integer.valueOf(string)); } }
apache-2.0
Pelumi/gs-collections
serialization-tests/src/test/java/com/gs/collections/impl/bag/immutable/primitive/ImmutableDoubleSingletonBagSerializationTest.java
1192
/* * Copyright 2013 Goldman Sachs. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gs.collections.impl.bag.immutable.primitive; import com.gs.collections.impl.test.Verify; import org.junit.Test; public class ImmutableDoubleSingletonBagSerializationTest { @Test public void serializedForm() { Verify.assertSerializedForm( 1L, "rO0ABXNyAEtjb20uZ3MuY29sbGVjdGlvbnMuaW1wbC5iYWcuaW1tdXRhYmxlLnByaW1pdGl2ZS5J\n" + "bW11dGFibGVEb3VibGVTaW5nbGV0b25CYWcAAAAAAAAAAQIAAUQACGVsZW1lbnQxeHBAAAAAAAAA\n" + "AA==", new ImmutableDoubleSingletonBag(2.0)); } }
apache-2.0
shyTNT/googleads-java-lib
modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201502/BaseDynamicAllocationCreative.java
1077
package com.google.api.ads.dfp.jaxws.v201502; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlSeeAlso; import javax.xml.bind.annotation.XmlType; /** * * A base class for dynamic allocation creatives. * * * <p>Java class for BaseDynamicAllocationCreative complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="BaseDynamicAllocationCreative"> * &lt;complexContent> * &lt;extension base="{https://www.google.com/apis/ads/publisher/v201502}Creative"> * &lt;sequence> * &lt;/sequence> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "BaseDynamicAllocationCreative") @XmlSeeAlso({ AdMobBackfillCreative.class, HasHtmlSnippetDynamicAllocationCreative.class }) public abstract class BaseDynamicAllocationCreative extends Creative { }
apache-2.0
dongjoon-hyun/reef
lang/java/reef-runtime-multi/src/main/java/org/apache/reef/runtime/multi/utils/MultiRuntimeDefinitionSerializer.java
2824
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.reef.runtime.multi.utils; import org.apache.avro.io.*; import org.apache.avro.specific.SpecificDatumReader; import org.apache.avro.specific.SpecificDatumWriter; import org.apache.reef.runtime.multi.utils.avro.AvroMultiRuntimeDefinition; import java.io.ByteArrayOutputStream; import java.io.IOException; /** * Serializer for MultiRuntimeDefinition. */ public final class MultiRuntimeDefinitionSerializer { private static final String CHARSET_NAME = "UTF-8"; /** * Serializes MultiRuntimeDefinition. * @param runtimeDefinition the Avro object to toString * @return Serialized avro string */ public String toString(final AvroMultiRuntimeDefinition runtimeDefinition){ final DatumWriter<AvroMultiRuntimeDefinition> configurationWriter = new SpecificDatumWriter<>(AvroMultiRuntimeDefinition.class); final String serializedConfiguration; try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { final JsonEncoder encoder = EncoderFactory.get().jsonEncoder(runtimeDefinition.getSchema(), out); configurationWriter.write(runtimeDefinition, encoder); encoder.flush(); out.flush(); serializedConfiguration = out.toString(CHARSET_NAME); } catch (final IOException e) { throw new RuntimeException(e); } return serializedConfiguration; } /** * Deserializes avro definition. * @param serializedRuntimeDefinition serialized definition * @return Avro object * @throws IOException */ public AvroMultiRuntimeDefinition fromString(final String serializedRuntimeDefinition) throws IOException{ final JsonDecoder decoder = DecoderFactory.get(). jsonDecoder(AvroMultiRuntimeDefinition.getClassSchema(), serializedRuntimeDefinition); final SpecificDatumReader<AvroMultiRuntimeDefinition> reader = new SpecificDatumReader<>(AvroMultiRuntimeDefinition .class); final AvroMultiRuntimeDefinition rd = reader.read(null, decoder); return rd; } }
apache-2.0
esaunders/autopsy
Core/src/org/sleuthkit/autopsy/datamodel/DataSourceGrouping.java
1830
/* * Autopsy Forensic Browser * * Copyright 2018 Basis Technology Corp. * Contact: carrier <at> sleuthkit <dot> org * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.sleuthkit.autopsy.datamodel; import java.util.Objects; import org.sleuthkit.datamodel.DataSource; /** * A top level UI grouping of Files, Views, Results, Tags * for 'Group by Data Source' view of the tree. * */ public class DataSourceGrouping implements AutopsyVisitableItem { private final DataSource dataSource; public DataSourceGrouping(DataSource dataSource) { this.dataSource = dataSource; } DataSource getDataSource() { return this.dataSource; } @Override public <T> T accept(AutopsyItemVisitor<T> visitor) { return visitor.visit(this); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final DataSourceGrouping other = (DataSourceGrouping) obj; return this.dataSource.getId() == other.getDataSource().getId(); } @Override public int hashCode() { int hash = 7; hash = 17 * hash + Objects.hashCode(this.dataSource); return hash; } }
apache-2.0
suyucs/presto
presto-main/src/main/java/com/facebook/presto/operator/scalar/TypeParameterContainer.java
958
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.operator.scalar; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Retention(RetentionPolicy.RUNTIME) @Target({ElementType.PARAMETER, ElementType.METHOD, ElementType.CONSTRUCTOR}) public @interface TypeParameterContainer { TypeParameter[] value(); }
apache-2.0
bclozel/spring-boot
spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/data/redis/LettuceConnectionConfiguration.java
6397
/* * Copyright 2012-2017 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.data.redis; import java.net.UnknownHostException; import java.util.Collections; import java.util.List; import io.lettuce.core.RedisClient; import io.lettuce.core.resource.ClientResources; import io.lettuce.core.resource.DefaultClientResources; import org.apache.commons.pool2.impl.GenericObjectPoolConfig; import org.springframework.beans.factory.ObjectProvider; import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.autoconfigure.data.redis.RedisProperties.Pool; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.data.redis.connection.RedisClusterConfiguration; import org.springframework.data.redis.connection.RedisConnectionFactory; import org.springframework.data.redis.connection.RedisSentinelConfiguration; import org.springframework.data.redis.connection.lettuce.LettuceClientConfiguration; import org.springframework.data.redis.connection.lettuce.LettuceClientConfiguration.LettuceClientConfigurationBuilder; import org.springframework.data.redis.connection.lettuce.LettuceConnectionFactory; import org.springframework.data.redis.connection.lettuce.LettucePoolingClientConfiguration; import org.springframework.util.StringUtils; /** * Redis connection configuration using Lettuce. * * @author Mark Paluch * @author Andy Wilkinson */ @Configuration @ConditionalOnClass(RedisClient.class) class LettuceConnectionConfiguration extends RedisConnectionConfiguration { private final RedisProperties properties; private final List<LettuceClientConfigurationBuilderCustomizer> builderCustomizers; LettuceConnectionConfiguration(RedisProperties properties, ObjectProvider<RedisSentinelConfiguration> sentinelConfigurationProvider, ObjectProvider<RedisClusterConfiguration> clusterConfigurationProvider, ObjectProvider<List<LettuceClientConfigurationBuilderCustomizer>> builderCustomizers) { super(properties, sentinelConfigurationProvider, clusterConfigurationProvider); this.properties = properties; this.builderCustomizers = builderCustomizers .getIfAvailable(Collections::emptyList); } @Bean(destroyMethod = "shutdown") @ConditionalOnMissingBean(ClientResources.class) public DefaultClientResources lettuceClientResources() { return DefaultClientResources.create(); } @Bean @ConditionalOnMissingBean(RedisConnectionFactory.class) public LettuceConnectionFactory redisConnectionFactory( ClientResources clientResources) throws UnknownHostException { LettuceClientConfiguration clientConfig = getLettuceClientConfiguration( clientResources, this.properties.getLettuce().getPool()); return createLettuceConnectionFactory(clientConfig); } private LettuceConnectionFactory createLettuceConnectionFactory( LettuceClientConfiguration clientConfiguration) { if (getSentinelConfig() != null) { return new LettuceConnectionFactory(getSentinelConfig(), clientConfiguration); } if (getClusterConfiguration() != null) { return new LettuceConnectionFactory(getClusterConfiguration(), clientConfiguration); } return new LettuceConnectionFactory(getStandaloneConfig(), clientConfiguration); } private LettuceClientConfiguration getLettuceClientConfiguration( ClientResources clientResources, Pool pool) { LettuceClientConfigurationBuilder builder = createBuilder(pool); applyProperties(builder); if (StringUtils.hasText(this.properties.getUrl())) { customizeConfigurationFromUrl(builder); } builder.clientResources(clientResources); customize(builder); return builder.build(); } private LettuceClientConfigurationBuilder createBuilder(Pool pool) { if (pool == null) { return LettuceClientConfiguration.builder(); } return new PoolBuilderFactory().createBuilder(pool); } private LettuceClientConfigurationBuilder applyProperties( LettuceClientConfiguration.LettuceClientConfigurationBuilder builder) { if (this.properties.isSsl()) { builder.useSsl(); } if (this.properties.getTimeout() != null) { builder.commandTimeout(this.properties.getTimeout()); } if (this.properties.getLettuce() != null) { RedisProperties.Lettuce lettuce = this.properties.getLettuce(); if (lettuce.getShutdownTimeout() != null && !lettuce.getShutdownTimeout().isZero()) { builder.shutdownTimeout( this.properties.getLettuce().getShutdownTimeout()); } } return builder; } private void customizeConfigurationFromUrl( LettuceClientConfiguration.LettuceClientConfigurationBuilder builder) { ConnectionInfo connectionInfo = parseUrl(this.properties.getUrl()); if (connectionInfo.isUseSsl()) { builder.useSsl(); } } private void customize( LettuceClientConfiguration.LettuceClientConfigurationBuilder builder) { for (LettuceClientConfigurationBuilderCustomizer customizer : this.builderCustomizers) { customizer.customize(builder); } } /** * Inner class to allow optional commons-pool2 dependency. */ private static class PoolBuilderFactory { public LettuceClientConfigurationBuilder createBuilder(Pool properties) { return LettucePoolingClientConfiguration.builder() .poolConfig(getPoolConfig(properties)); } private GenericObjectPoolConfig getPoolConfig(Pool properties) { GenericObjectPoolConfig config = new GenericObjectPoolConfig(); config.setMaxTotal(properties.getMaxActive()); config.setMaxIdle(properties.getMaxIdle()); config.setMinIdle(properties.getMinIdle()); if (properties.getMaxWait() != null) { config.setMaxWaitMillis(properties.getMaxWait().toMillis()); } return config; } } }
apache-2.0
djimondev/Sensors-ToolKit
JJIL-Android/src/jjil/android/RgbImageAndroid.java
2333
package jjil.android; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import jjil.core.RgbImage; import android.content.Context; import android.graphics.Bitmap; public class RgbImageAndroid { /** * The sole way to create an RgbImage from an image captured from the camera. * The parameters are the pointer to the byte data passed to the JPEG picture * callback and the width and height image you want. You must reduce the * image size because otherwise you will run out of memory. Width and height * reduction by a factor of 2 works on the GPhone.<p> * Ex. usage<p> * public void onPictureTaken(byte [] jpegData, android.hardware.Camera camera) { * RgbImage rgb = RgbImageAndroid.toRgbImage(jpegData, * camera.getParameters().getPictureSize().width/2, * camera.getParameters().getPictureSize().height/2); * } * @param jpegData image data supplied to JpegPictureCallback * @param nWidth target width image to return * @param nHeight target height image to return * @return RgbImage initialized with the image from the camera. */ static public RgbImage toRgbImage(Bitmap bmp) { int nWidth = bmp.getWidth(); int nHeight = bmp.getHeight(); RgbImage rgb = new RgbImage(nWidth, nHeight); bmp.getPixels(rgb.getData(), 0, nWidth, 0, 0, nWidth, nHeight); return rgb; } static public Bitmap toBitmap(RgbImage rgb) { return Bitmap.createBitmap( rgb.getData(), rgb.getWidth(), rgb.getHeight(), Bitmap.Config.ARGB_8888); } static public void toFile(Context context, RgbImage rgb, int nQuality, String szPath) throws IOException { OutputStream os = new FileOutputStream(szPath); try { Bitmap bmp = toBitmap(rgb); Bitmap.CompressFormat format = Bitmap.CompressFormat.JPEG; szPath = szPath.toLowerCase(); if (szPath.endsWith("jpg") || szPath.endsWith("jpeg")) { //$NON-NLS-1$ //$NON-NLS-2$ format = Bitmap.CompressFormat.JPEG; } else if (szPath.endsWith("png")) { //$NON-NLS-1$ format = Bitmap.CompressFormat.PNG; } bmp.compress(format, nQuality, os); } finally { os.close(); } } }
apache-2.0
deroneriksson/incubator-systemml
src/test/java/org/apache/sysml/test/integration/functions/misc/ScalarToMatrixInLoopTest.java
1776
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.test.integration.functions.misc; import org.junit.Test; import org.apache.sysml.test.integration.AutomatedTestBase; import org.apache.sysml.test.integration.TestConfiguration; public class ScalarToMatrixInLoopTest extends AutomatedTestBase { private static final String TEST_DIR = "functions/misc/"; private static final String TEST_CLASS_DIR = TEST_DIR + ScalarToMatrixInLoopTest.class.getSimpleName() + "/"; private static final String TEST_NAME = "ScalarToMatrixInLoop"; @Override public void setUp() { addTestConfiguration(TEST_NAME, new TestConfiguration(TEST_CLASS_DIR, "TestScalarToMatrixInLoop", new String[] {})); } @Test public void testScalarToMatrixInLoop() { if(shouldSkipTest()) return; int rows = 5, cols = 5; TestConfiguration config = getTestConfiguration(TEST_NAME); config.addVariable("rows", rows); config.addVariable("cols", cols); loadTestConfiguration(config); runTest(); } }
apache-2.0
mohanvive/carbon-event-processing
components/event-processor/org.wso2.carbon.event.processor.template.deployer/src/main/java/org/wso2/carbon/event/processor/template/deployer/internal/ExecutionPlanDeployerConstants.java
913
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wso2.carbon.event.processor.template.deployer.internal; public class ExecutionPlanDeployerConstants { public static final String EXECUTION_PLAN_NAME_ANNOTATION = "@Plan:name"; public static final String REGEX_NAME_COMMENTED_VALUE = "\\(.*?\\)"; }
apache-2.0
seanbright/asterisk-java
src/main/java/org/asteriskjava/manager/event/AgentsCompleteEvent.java
1630
/* * Copyright 2004-2006 Stefan Reuter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.asteriskjava.manager.event; /** * An AgentsCompleteEvent is triggered after the state of all agents has been * reported in response to an AgentsAction.<p> * Available since Asterisk 1.2 * * @see org.asteriskjava.manager.action.AgentsAction * @author srt * @version $Id$ * @since 0.2 */ public class AgentsCompleteEvent extends ResponseEvent { /** * Serial version identifier */ private static final long serialVersionUID = -1177773673509373296L; private Integer listItems; private String eventList; /** * @param source */ public AgentsCompleteEvent(Object source) { super(source); } public Integer getListItems() { return listItems; } public void setListItems(Integer listItems) { this.listItems = listItems; } public String getEventList() { return eventList; } public void setEventList(String eventList) { this.eventList = eventList; } }
apache-2.0
mycFelix/heron
heron/healthmgr/tests/java/org/apache/heron/healthmgr/detectors/LargeWaitQueueDetectorTest.java
3004
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.heron.healthmgr.detectors; import java.time.Instant; import java.util.ArrayList; import java.util.Collection; import com.microsoft.dhalion.core.Measurement; import com.microsoft.dhalion.core.Symptom; import com.microsoft.dhalion.policy.PoliciesExecutor; import org.junit.Test; import org.apache.heron.healthmgr.HealthPolicyConfig; import static org.apache.heron.healthmgr.detectors.LargeWaitQueueDetector.CONF_SIZE_LIMIT; import static org.apache.heron.healthmgr.sensors.BaseSensor.MetricName.METRIC_WAIT_Q_SIZE; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class LargeWaitQueueDetectorTest { @Test public void testConfigAndFilter() { HealthPolicyConfig config = mock(HealthPolicyConfig.class); when(config.getConfig(CONF_SIZE_LIMIT, 1000)).thenReturn(20); Measurement measurement1 = new Measurement( "bolt", "i1", METRIC_WAIT_Q_SIZE.text(), Instant.ofEpochSecond(1497892222), 21); Measurement measurement2 = new Measurement( "bolt", "i1", METRIC_WAIT_Q_SIZE.text(), Instant.ofEpochSecond(1497892322), 21); Collection<Measurement> metrics = new ArrayList<>(); metrics.add(measurement1); metrics.add(measurement2); LargeWaitQueueDetector detector = new LargeWaitQueueDetector(config); PoliciesExecutor.ExecutionContext context = mock(PoliciesExecutor.ExecutionContext.class); when(context.checkpoint()).thenReturn(Instant.now()); detector.initialize(context); Collection<Symptom> symptoms = detector.detect(metrics); assertEquals(1, symptoms.size()); assertEquals(1, symptoms.iterator().next().assignments().size()); measurement1 = new Measurement( "bolt", "i1", METRIC_WAIT_Q_SIZE.text(), Instant.ofEpochSecond(1497892222), 11); measurement2 = new Measurement( "bolt", "i1", METRIC_WAIT_Q_SIZE.text(), Instant.ofEpochSecond(1497892322), 10); metrics = new ArrayList<>(); metrics.add(measurement1); metrics.add(measurement2); detector = new LargeWaitQueueDetector(config); symptoms = detector.detect(metrics); assertEquals(0, symptoms.size()); } }
apache-2.0
punkhorn/camel-upstream
core/camel-core/src/test/java/org/apache/camel/processor/ToDynamicIgnoreTest.java
2089
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.processor; import org.apache.camel.ContextTestSupport; import org.apache.camel.builder.RouteBuilder; import org.junit.Test; public class ToDynamicIgnoreTest extends ContextTestSupport { @Test public void testToDynamic() throws Exception { getMockEndpoint("mock:foo").expectedBodiesReceived("Hello Camel"); getMockEndpoint("mock:bar").expectedBodiesReceived("Hello World"); template.sendBodyAndHeader("direct:start", "Hello Camel", "foo", "foo"); template.sendBodyAndHeader("direct:start", "Hello World", "foo", "bar"); assertMockEndpointsSatisfied(); } @Test public void testToDynamicInvalid() throws Exception { getMockEndpoint("mock:foo").expectedBodiesReceived("Hello Camel"); template.sendBodyAndHeader("direct:start", "Hello Camel", "foo", "foo"); template.sendBody("direct:start", "Hello World"); assertMockEndpointsSatisfied(); } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { from("direct:start") .toD("mock:${header.foo}", true); } }; } }
apache-2.0
DaemonSu/fuse-master
components/camel-sap/camel-sap-component/src/main/java/org/fusesource/camel/component/sap/SapTransactionalIDocListProducer.java
2423
/** * Copyright 2014 Red Hat, Inc. * * Red Hat licenses this file to you under the Apache License, version * 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * */ package org.fusesource.camel.component.sap; import org.apache.camel.Exchange; import org.apache.camel.impl.DefaultProducer; import org.fusesource.camel.component.sap.model.idoc.DocumentList; import org.fusesource.camel.component.sap.util.IDocUtil; import org.fusesource.camel.component.sap.util.Util; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * An SAP producer sending a IDoc (Intermediate Document) list to an SAP system using * the transactional remote function call (tRFC) protocol. * * @author William Collins <punkhornsw@gmail.com> * */ public class SapTransactionalIDocListProducer extends DefaultProducer { private static final transient Logger LOG = LoggerFactory.getLogger(SapTransactionalIDocListProducer.class); public SapTransactionalIDocListProducer(SapTransactionalIDocListDestinationEndpoint endpoint) { super(endpoint); } @Override public void process(Exchange exchange) throws Exception { DocumentList documentList = exchange.getIn().getBody(DocumentList.class); if (documentList == null) { LOG.warn("Exchange input message body does not contain IDoc document list"); return; } if (LOG.isDebugEnabled()) { try { LOG.debug("Sending IDoc document list to ''{}''", getEndpoint().getEndpointUri()); LOG.debug("Document: " + (documentList == null ? documentList : Util.marshal(documentList))); } catch (Exception e) { LOG.warn("Failed to log request", e); } } String tid = DestinationRfcTransactionHandler.getTID(exchange, getEndpoint().getDestination()); IDocUtil.sendDocumentList(getEndpoint().getDestination(), documentList, tid); } @Override public SapTransactionalIDocListDestinationEndpoint getEndpoint() { return (SapTransactionalIDocListDestinationEndpoint) super.getEndpoint(); } }
apache-2.0
yida-lxw/solr-5.3.1
lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/stats/TaskStats.java
5752
package org.apache.lucene.benchmark.byTask.stats; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import org.apache.lucene.benchmark.byTask.tasks.PerfTask; /** * Statistics for a task run. * <br>The same task can run more than once, but, if that task records statistics, * each run would create its own TaskStats. */ public class TaskStats implements Cloneable { /** task for which data was collected */ private PerfTask task; /** round in which task run started */ private int round; /** task start time */ private long start; /** task elapsed time. elapsed &gt;= 0 indicates run completion! */ private long elapsed = -1; /** max tot mem during task */ private long maxTotMem; /** max used mem during task */ private long maxUsedMem; /** serial run number of this task run in the perf run */ private int taskRunNum; /** number of other tasks that started to run while this task was still running */ private int numParallelTasks; /** number of work items done by this task. * For indexing that can be number of docs added. * For warming that can be number of scanned items, etc. * For repeating tasks, this is a sum over repetitions. */ private int count; /** Number of similar tasks aggregated into this record. * Used when summing up on few runs/instances of similar tasks. */ private int numRuns = 1; /** * Create a run data for a task that is starting now. * To be called from Points. */ TaskStats (PerfTask task, int taskRunNum, int round) { this.task = task; this.taskRunNum = taskRunNum; this.round = round; maxTotMem = Runtime.getRuntime().totalMemory(); maxUsedMem = maxTotMem - Runtime.getRuntime().freeMemory(); start = System.currentTimeMillis(); } /** * mark the end of a task */ void markEnd (int numParallelTasks, int count) { elapsed = System.currentTimeMillis() - start; long totMem = Runtime.getRuntime().totalMemory(); if (totMem > maxTotMem) { maxTotMem = totMem; } long usedMem = totMem - Runtime.getRuntime().freeMemory(); if (usedMem > maxUsedMem) { maxUsedMem = usedMem; } this.numParallelTasks = numParallelTasks; this.count = count; } private int[] countsByTime; private long countsByTimeStepMSec; public void setCountsByTime(int[] counts, long msecStep) { countsByTime = counts; countsByTimeStepMSec = msecStep; } public int[] getCountsByTime() { return countsByTime; } public long getCountsByTimeStepMSec() { return countsByTimeStepMSec; } /** * @return the taskRunNum. */ public int getTaskRunNum() { return taskRunNum; } /* (non-Javadoc) * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder res = new StringBuilder(task.getName()); res.append(" "); res.append(count); res.append(" "); res.append(elapsed); return res.toString(); } /** * @return Returns the count. */ public int getCount() { return count; } /** * @return elapsed time. */ public long getElapsed() { return elapsed; } /** * @return Returns the maxTotMem. */ public long getMaxTotMem() { return maxTotMem; } /** * @return Returns the maxUsedMem. */ public long getMaxUsedMem() { return maxUsedMem; } /** * @return Returns the numParallelTasks. */ public int getNumParallelTasks() { return numParallelTasks; } /** * @return Returns the task. */ public PerfTask getTask() { return task; } /** * @return Returns the numRuns. */ public int getNumRuns() { return numRuns; } /** * Add data from another stat, for aggregation * @param stat2 the added stat data. */ public void add(TaskStats stat2) { numRuns += stat2.getNumRuns(); elapsed += stat2.getElapsed(); maxTotMem += stat2.getMaxTotMem(); maxUsedMem += stat2.getMaxUsedMem(); count += stat2.getCount(); if (round != stat2.round) { round = -1; // no meaning if aggregating tasks of different round. } if (countsByTime != null && stat2.countsByTime != null) { if (countsByTimeStepMSec != stat2.countsByTimeStepMSec) { throw new IllegalStateException("different by-time msec step"); } if (countsByTime.length != stat2.countsByTime.length) { throw new IllegalStateException("different by-time msec count"); } for(int i=0;i<stat2.countsByTime.length;i++) { countsByTime[i] += stat2.countsByTime[i]; } } } /* (non-Javadoc) * @see java.lang.Object#clone() */ @Override public TaskStats clone() throws CloneNotSupportedException { TaskStats c = (TaskStats) super.clone(); if (c.countsByTime != null) { c.countsByTime = c.countsByTime.clone(); } return c; } /** * @return the round number. */ public int getRound() { return round; } }
apache-2.0