repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
sebbrudzinski/motech | platform/mds/mds/src/main/java/org/motechproject/mds/filter/DateFilterValue.java | 1408 | package org.motechproject.mds.filter;
import org.joda.time.DateMidnight;
import org.joda.time.DateTime;
import org.motechproject.commons.date.util.DateUtil;
import java.util.Arrays;
import java.util.List;
/**
* Represents Date values used for filtering data in MDS Data Browser.
* Provides proper value, param and operator for value.
*/
public class DateFilterValue extends FilterValue {
public static final List<String> DATE_FILTER_VALUES = Arrays.asList(TODAY, PAST_7_DAYS, THIS_MONTH, THIS_YEAR);
public DateFilterValue(String value) {
super.setValue(value);
}
@Override
public Object valueForQuery() {
switch (super.getValue()) {
case TODAY:
return new DateMidnight(DateUtil.now()).toDateTime();
case PAST_7_DAYS:
return new DateMidnight(DateUtil.now()).minusDays(7).toDateTime();
case THIS_MONTH:
return new DateMidnight(DateUtil.now()).withDayOfMonth(1).toDateTime();
case THIS_YEAR:
return new DateMidnight(DateUtil.now()).withDayOfYear(1).toDateTime();
default:
return super.getValue();
}
}
@Override
public String paramTypeForQuery() {
return DateTime.class.getName();
}
@Override
public List<String> operatorForQueryFilter() {
return Arrays.asList(">=");
}
}
| bsd-3-clause |
LeonidShamis/XChange | xchange-bittrex/src/main/java/org/knowm/xchange/bittrex/dto/account/BittrexBalance.java | 2375 | package org.knowm.xchange.bittrex.dto.account;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.math.BigDecimal;
public class BittrexBalance {
private BigDecimal available;
private BigDecimal balance;
private String cryptoAddress;
private String currency;
private BigDecimal pending;
private boolean requested;
private String uuid;
public BittrexBalance(
@JsonProperty("Available") BigDecimal available,
@JsonProperty("Balance") BigDecimal balance,
@JsonProperty("CryptoAddress") String cryptoAddress,
@JsonProperty("Currency") String currency,
@JsonProperty("Pending") BigDecimal pending,
@JsonProperty("Requested") boolean requested,
@JsonProperty("Uuid") String uuid) {
super();
this.available = available;
this.balance = balance;
this.cryptoAddress = cryptoAddress;
this.currency = currency;
this.pending = pending;
this.requested = requested;
this.uuid = uuid;
}
public BigDecimal getAvailable() {
return available;
}
public void setAvailable(BigDecimal available) {
this.available = available;
}
public BigDecimal getBalance() {
return balance;
}
public void setBalance(BigDecimal balance) {
this.balance = balance;
}
public String getCryptoAddress() {
return cryptoAddress;
}
public void setCryptoAddress(String cryptoAddress) {
this.cryptoAddress = cryptoAddress;
}
public String getCurrency() {
return currency;
}
public void setCurrency(String currency) {
this.currency = currency;
}
public BigDecimal getPending() {
return pending;
}
public void setPending(BigDecimal pending) {
this.pending = pending;
}
public boolean isRequested() {
return requested;
}
public void setRequested(boolean requested) {
this.requested = requested;
}
public String getUuid() {
return uuid;
}
public void setUuid(String uuid) {
this.uuid = uuid;
}
@Override
public String toString() {
return "Bittrexbalance [available="
+ available
+ ", balance="
+ balance
+ ", cryptoAddress="
+ cryptoAddress
+ ", currency="
+ currency
+ ", pending="
+ pending
+ ", requested="
+ requested
+ ", uuid="
+ uuid
+ "]";
}
}
| mit |
sakim/snownote | org.eclipse.epf.richtext/src/org/eclipse/epf/richtext/preferences/RichTextPreferencesInitializer.java | 1240 | //------------------------------------------------------------------------------
// Copyright (c) 2005, 2006 IBM Corporation and others.
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// which accompanies this distribution, and is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// Contributors:
// IBM Corporation - initial implementation
//------------------------------------------------------------------------------
package org.eclipse.epf.richtext.preferences;
import org.eclipse.core.runtime.preferences.AbstractPreferenceInitializer;
import org.eclipse.epf.richtext.RichTextPlugin;
import org.eclipse.jface.preference.IPreferenceStore;
/**
* The rich text preferences initializer.
*
* @author Kelvin Low
* @since 1.0
*/
public class RichTextPreferencesInitializer extends
AbstractPreferenceInitializer {
/**
* @see org.eclipse.core.runtime.preferences.AbstractPreferenceInitializer#initializeDefaultPreferences()
*/
public void initializeDefaultPreferences() {
IPreferenceStore store = RichTextPlugin.getDefault()
.getPreferenceStore();
RichTextPreferences.initializeDefaultPreferences(store);
}
}
| epl-1.0 |
evidolob/che | plugins/plugin-svn/che-plugin-svn-ext-server/src/main/java/org/eclipse/che/plugin/svn/server/upstream/CommandLineOutputProcessor.java | 1247 | /*******************************************************************************
* Copyright (c) 2012-2016 Codenvy, S.A.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Codenvy, S.A. - initial API and implementation
*******************************************************************************/
package org.eclipse.che.plugin.svn.server.upstream;
import org.eclipse.che.api.core.util.LineConsumer;
import java.io.IOException;
import java.util.List;
/**
* Implementation of {@link LineConsumer} handling output of executing command line.
*/
public class CommandLineOutputProcessor implements LineConsumer {
private List<String> output;
public CommandLineOutputProcessor(final List<String> output) {
this.output = output;
}
@Override
public void writeLine(String line) throws IOException {
output.add(line);
}
@Override
public void close() throws IOException {
//nothing to close
}
public List<String> getOutput() {
return output;
}
}
| epl-1.0 |
md-5/jdk10 | src/java.base/share/classes/java/net/ConnectException.java | 2077 | /*
* Copyright (c) 1996, 2019, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.net;
/**
* Signals that an error occurred while attempting to connect a
* socket to a remote address and port. Typically, the connection
* was refused remotely (e.g., no process is listening on the
* remote address/port).
*
* @since 1.1
*/
public class ConnectException extends SocketException {
@java.io.Serial
private static final long serialVersionUID = 3831404271622369215L;
/**
* Constructs a new ConnectException with the specified detail
* message as to why the connect error occurred.
* A detail message is a String that gives a specific
* description of this error.
* @param msg the detail message
*/
public ConnectException(String msg) {
super(msg);
}
/**
* Construct a new ConnectException with no detailed message.
*/
public ConnectException() {}
}
| gpl-2.0 |
itzamnamx/AdempiereFS | base/src/org/eevolution/model/X_C_TaxGroup.java | 4645 | /******************************************************************************
* Product: Adempiere ERP & CRM Smart Business Solution *
* Copyright (C) 1999-2007 ComPiere, Inc. All Rights Reserved. *
* This program is free software, you can redistribute it and/or modify it *
* under the terms version 2 of the GNU General Public License as published *
* by the Free Software Foundation. This program is distributed in the hope *
* that it will be useful, but WITHOUT ANY WARRANTY, without even the implied *
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *
* See the GNU General Public License for more details. *
* You should have received a copy of the GNU General Public License along *
* with this program, if not, write to the Free Software Foundation, Inc., *
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. *
* For the text or an alternative of this public license, you may reach us *
* ComPiere, Inc., 2620 Augustine Dr. #245, Santa Clara, CA 95054, USA *
* or via info@compiere.org or http://www.compiere.org/license.html *
*****************************************************************************/
/** Generated Model - DO NOT CHANGE */
package org.eevolution.model;
import java.sql.ResultSet;
import java.util.Properties;
import org.compiere.model.*;
import org.compiere.util.KeyNamePair;
/** Generated Model for C_TaxGroup
* @author Adempiere (generated)
* @version Release 3.8.0 - $Id$ */
public class X_C_TaxGroup extends PO implements I_C_TaxGroup, I_Persistent
{
/**
*
*/
private static final long serialVersionUID = 20150223L;
/** Standard Constructor */
public X_C_TaxGroup (Properties ctx, int C_TaxGroup_ID, String trxName)
{
super (ctx, C_TaxGroup_ID, trxName);
/** if (C_TaxGroup_ID == 0)
{
setC_TaxGroup_ID (0);
setName (null);
setValue (null);
} */
}
/** Load Constructor */
public X_C_TaxGroup (Properties ctx, ResultSet rs, String trxName)
{
super (ctx, rs, trxName);
}
/** AccessLevel
* @return 3 - Client - Org
*/
protected int get_AccessLevel()
{
return accessLevel.intValue();
}
/** Load Meta Data */
protected POInfo initPO (Properties ctx)
{
POInfo poi = POInfo.getPOInfo (ctx, Table_ID, get_TrxName());
return poi;
}
public String toString()
{
StringBuffer sb = new StringBuffer ("X_C_TaxGroup[")
.append(get_ID()).append("]");
return sb.toString();
}
/** Set Tax Group.
@param C_TaxGroup_ID Tax Group */
public void setC_TaxGroup_ID (int C_TaxGroup_ID)
{
if (C_TaxGroup_ID < 1)
set_ValueNoCheck (COLUMNNAME_C_TaxGroup_ID, null);
else
set_ValueNoCheck (COLUMNNAME_C_TaxGroup_ID, Integer.valueOf(C_TaxGroup_ID));
}
/** Get Tax Group.
@return Tax Group */
public int getC_TaxGroup_ID ()
{
Integer ii = (Integer)get_Value(COLUMNNAME_C_TaxGroup_ID);
if (ii == null)
return 0;
return ii.intValue();
}
/** Set Description.
@param Description
Optional short description of the record
*/
public void setDescription (String Description)
{
set_Value (COLUMNNAME_Description, Description);
}
/** Get Description.
@return Optional short description of the record
*/
public String getDescription ()
{
return (String)get_Value(COLUMNNAME_Description);
}
/** Set Comment/Help.
@param Help
Comment or Hint
*/
public void setHelp (String Help)
{
set_Value (COLUMNNAME_Help, Help);
}
/** Get Comment/Help.
@return Comment or Hint
*/
public String getHelp ()
{
return (String)get_Value(COLUMNNAME_Help);
}
/** Set Name.
@param Name
Alphanumeric identifier of the entity
*/
public void setName (String Name)
{
set_Value (COLUMNNAME_Name, Name);
}
/** Get Name.
@return Alphanumeric identifier of the entity
*/
public String getName ()
{
return (String)get_Value(COLUMNNAME_Name);
}
/** Get Record ID/ColumnName
@return ID/ColumnName pair
*/
public KeyNamePair getKeyNamePair()
{
return new KeyNamePair(get_ID(), getName());
}
/** Set Search Key.
@param Value
Search key for the record in the format required - must be unique
*/
public void setValue (String Value)
{
set_Value (COLUMNNAME_Value, Value);
}
/** Get Search Key.
@return Search key for the record in the format required - must be unique
*/
public String getValue ()
{
return (String)get_Value(COLUMNNAME_Value);
}
} | gpl-2.0 |
YouDiSN/OpenJDK-Research | jdk9/hotspot/src/jdk.internal.vm.ci/share/classes/module-info.java | 1666 | /*
* Copyright (c) 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
module jdk.internal.vm.ci {
exports jdk.vm.ci.services to jdk.internal.vm.compiler;
uses jdk.vm.ci.services.JVMCIServiceLocator;
uses jdk.vm.ci.hotspot.HotSpotJVMCIBackendFactory;
provides jdk.vm.ci.hotspot.HotSpotJVMCIBackendFactory with
jdk.vm.ci.hotspot.aarch64.AArch64HotSpotJVMCIBackendFactory,
jdk.vm.ci.hotspot.amd64.AMD64HotSpotJVMCIBackendFactory,
jdk.vm.ci.hotspot.sparc.SPARCHotSpotJVMCIBackendFactory;
}
| gpl-2.0 |
qtekfun/htcDesire820Kernel | external/proguard/src/proguard/shrink/ShortestUsagePrinter.java | 7406 | /*
* ProGuard -- shrinking, optimization, obfuscation, and preverification
* of Java bytecode.
*
* Copyright (c) 2002-2009 Eric Lafortune (eric@graphics.cornell.edu)
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package proguard.shrink;
import proguard.classfile.*;
import proguard.classfile.util.ClassUtil;
import proguard.classfile.visitor.*;
import java.io.PrintStream;
/**
* This ClassVisitor and MemberVisitor prints out the reasons why
* classes and class members have been marked as being used.
*
* @see UsageMarker
*
* @author Eric Lafortune
*/
public class ShortestUsagePrinter
implements ClassVisitor,
MemberVisitor
{
private final ShortestUsageMarker shortestUsageMarker;
private final boolean verbose;
private final PrintStream ps;
/**
* Creates a new UsagePrinter that prints verbosely to <code>System.out</code>.
* @param shortestUsageMarker the usage marker that was used to mark the
* classes and class members.
*/
public ShortestUsagePrinter(ShortestUsageMarker shortestUsageMarker)
{
this(shortestUsageMarker, true);
}
/**
* Creates a new UsagePrinter that prints to the given stream.
* @param shortestUsageMarker the usage marker that was used to mark the
* classes and class members.
* @param verbose specifies whether the output should be verbose.
*/
public ShortestUsagePrinter(ShortestUsageMarker shortestUsageMarker,
boolean verbose)
{
this(shortestUsageMarker, verbose, System.out);
}
/**
* Creates a new UsagePrinter that prints to the given stream.
* @param shortestUsageMarker the usage marker that was used to mark the
* classes and class members.
* @param verbose specifies whether the output should be verbose.
* @param printStream the stream to which to print.
*/
public ShortestUsagePrinter(ShortestUsageMarker shortestUsageMarker,
boolean verbose,
PrintStream printStream)
{
this.shortestUsageMarker = shortestUsageMarker;
this.verbose = verbose;
this.ps = printStream;
}
// Implementations for ClassVisitor.
public void visitProgramClass(ProgramClass programClass)
{
// Print the name of this class.
ps.println(ClassUtil.externalClassName(programClass.getName()));
// Print the reason for keeping this class.
printReason(programClass);
}
public void visitLibraryClass(LibraryClass libraryClass)
{
// Print the name of this class.
ps.println(ClassUtil.externalClassName(libraryClass.getName()));
// Print the reason for keeping this class.
ps.println(" is a library class.\n");
}
// Implementations for MemberVisitor.
public void visitProgramField(ProgramClass programClass, ProgramField programField)
{
// Print the name of this field.
String name = programField.getName(programClass);
String type = programField.getDescriptor(programClass);
ps.println(ClassUtil.externalClassName(programClass.getName()) +
(verbose ?
": " + ClassUtil.externalFullFieldDescription(0, name, type):
"." + name) +
lineNumberRange(programClass, programField));
// Print the reason for keeping this method.
printReason(programField);
}
public void visitProgramMethod(ProgramClass programClass, ProgramMethod programMethod)
{
// Print the name of this method.
String name = programMethod.getName(programClass);
String type = programMethod.getDescriptor(programClass);
ps.println(ClassUtil.externalClassName(programClass.getName()) +
(verbose ?
": " + ClassUtil.externalFullMethodDescription(programClass.getName(), 0, name, type):
"." + name) +
lineNumberRange(programClass, programMethod));
// Print the reason for keeping this method.
printReason(programMethod);
}
public void visitLibraryField(LibraryClass libraryClass, LibraryField libraryField)
{
// Print the name of this field.
String name = libraryField.getName(libraryClass);
String type = libraryField.getDescriptor(libraryClass);
ps.println(ClassUtil.externalClassName(libraryClass.getName()) +
(verbose ?
": " + ClassUtil.externalFullFieldDescription(0, name, type):
"." + name));
// Print the reason for keeping this field.
ps.println(" is a library field.\n");
}
public void visitLibraryMethod(LibraryClass libraryClass, LibraryMethod libraryMethod)
{
// Print the name of this method.
String name = libraryMethod.getName(libraryClass);
String type = libraryMethod.getDescriptor(libraryClass);
ps.println(ClassUtil.externalClassName(libraryClass.getName()) +
(verbose ?
": " + ClassUtil.externalFullMethodDescription(libraryClass.getName(), 0, name, type):
"." + name));
// Print the reason for keeping this method.
ps.println(" is a library method.\n");
}
// Small utility methods.
private void printReason(VisitorAccepter visitorAccepter)
{
if (shortestUsageMarker.isUsed(visitorAccepter))
{
ShortestUsageMark shortestUsageMark = shortestUsageMarker.getShortestUsageMark(visitorAccepter);
// Print the reason for keeping this class.
ps.print(" " + shortestUsageMark.getReason());
// Print the class or method that is responsible, with its reasons.
shortestUsageMark.acceptClassVisitor(this);
shortestUsageMark.acceptMemberVisitor(this);
}
else
{
ps.println(" is not being kept.\n");
}
}
/**
* Returns the line number range of the given class member, followed by a
* colon, or just an empty String if no range is available.
*/
private static String lineNumberRange(ProgramClass programClass, ProgramMember programMember)
{
String range = programMember.getLineNumberRange(programClass);
return range != null ?
(" (" + range + ")") :
"";
}
}
| gpl-2.0 |
Jenyay/tasks | src/main/java/com/todoroo/astrid/gtasks/api/GtasksApiUtilities.java | 2230 | /**
* Copyright (c) 2012 Todoroo Inc
*
* See the file "LICENSE" for the full license governing this code.
*/
package com.todoroo.astrid.gtasks.api;
import com.google.api.client.util.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Date;
import java.util.TimeZone;
import static org.tasks.date.DateTimeUtils.newDate;
public class GtasksApiUtilities {
private static final Logger log = LoggerFactory.getLogger(GtasksApiUtilities.class);
public static DateTime unixTimeToGtasksCompletionTime(long time) {
if (time < 0) {
return null;
}
return new DateTime(newDate(time), TimeZone.getDefault());
}
public static long gtasksCompletedTimeToUnixTime(DateTime gtasksCompletedTime) {
if (gtasksCompletedTime == null) {
return 0;
}
return gtasksCompletedTime.getValue();
}
/**
* Google deals only in dates for due times, so on the server side they normalize to utc time
* and then truncate h:m:s to 0. This can lead to a loss of date information for
* us, so we adjust here by doing the normalizing/truncating ourselves and
* then correcting the date we get back in a similar way.
*/
public static DateTime unixTimeToGtasksDueDate(long time) {
if (time < 0) {
return null;
}
Date date = newDate(time / 1000 * 1000);
date.setHours(0);
date.setMinutes(0);
date.setSeconds(0);
date.setTime(date.getTime() - date.getTimezoneOffset() * 60000);
return new DateTime(date, TimeZone.getTimeZone("UTC"));
}
//Adjust for google's rounding
public static long gtasksDueTimeToUnixTime(DateTime gtasksDueTime) {
if (gtasksDueTime == null) {
return 0;
}
try {
long utcTime = gtasksDueTime.getValue(); //DateTime.parseRfc3339(gtasksDueTime).value;
Date date = newDate(utcTime);
Date returnDate = newDate(date.getTime() + date.getTimezoneOffset() * 60000);
return returnDate.getTime();
} catch (NumberFormatException e) {
log.error(e.getMessage(), e);
return 0;
}
}
}
| gpl-3.0 |
mru00/jade_agents | src/jade/content/abs/AbsObject.java | 2714 | /**
* ***************************************************************
* JADE - Java Agent DEvelopment Framework is a framework to develop
* multi-agent systems in compliance with the FIPA specifications.
* Copyright (C) 2000 CSELT S.p.A.
*
* GNU Lesser General Public License
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation,
* version 2.1 of the License.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
* **************************************************************
*/
package jade.content.abs;
import jade.util.leap.Serializable;
/**
* The common ancestor of all abstract descriptors
* @author Federico Bergenti - Universita` di Parma
* @author Giovanni Caire - TILAB
*/
public interface AbsObject extends Serializable {
public static final int UNKNOWN = -1;
public static final int ABS_PREDICATE = 1;
public static final int ABS_CONCEPT = 2;
public static final int ABS_AGENT_ACTION = 3;
public static final int ABS_PRIMITIVE = 4;
public static final int ABS_AGGREGATE = 5;
public static final int ABS_IRE = 6;
public static final int ABS_VARIABLE = 7;
public static final int ABS_CONTENT_ELEMENT_LIST = 8;
public static final int ABS_CONCEPT_SLOT_FUNCTION = 9;
/**
* @return The name of the type of the object held by this
* abstract descriptor.
*/
public String getTypeName();
/**
* Gets the value of an attribute of the object held by this
* abstract descriptor.
* @param name The name of the attribute.
* @return value The value of the attribute.
*/
public AbsObject getAbsObject(String name);
/**
* @return the name of all attributes.
*/
public String[] getNames();
/**
* Tests if the object is grounded, i.e., if no one of its attributes
* is associated with a variable
* @return <code>true</code> if the object is grounded.
*/
public boolean isGrounded();
/**
* Gets the number of attributes.
* @return the number of attributes.
*/
public int getCount();
public int getAbsType();
}
| lgpl-2.1 |
archeng504/yammp | src/org/yammp/fragment/GenreFragment.java | 5204 | /*
* YAMMP - Yet Another Multi Media Player for android
* Copyright (C) 2011-2012 Mariotaku Lee <mariotaku.lee@gmail.com>
*
* This file is part of YAMMP.
*
* YAMMP is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* YAMMP is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with YAMMP. If not, see <http://www.gnu.org/licenses/>.
*/
package org.yammp.fragment;
import org.yammp.Constants;
import org.yammp.R;
import org.yammp.YAMMPApplication;
import org.yammp.app.TrackBrowserActivity;
import org.yammp.util.MediaUtils;
import android.content.Context;
import android.content.Intent;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.provider.MediaStore.Audio;
import android.support.v4.app.FragmentTransaction;
import android.support.v4.app.LoaderManager.LoaderCallbacks;
import android.support.v4.content.CursorLoader;
import android.support.v4.content.Loader;
import android.support.v4.widget.CursorAdapter;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ListView;
import android.widget.TextView;
import com.actionbarsherlock.app.SherlockListFragment;
public class GenreFragment extends SherlockListFragment implements LoaderCallbacks<Cursor>,
Constants {
private GenresAdapter mAdapter;
private int mNameIdx;
private MediaUtils mUtils;
public GenreFragment() {
}
public GenreFragment(Bundle args) {
setArguments(args);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
mUtils = ((YAMMPApplication) getSherlockActivity().getApplication()).getMediaUtils();
setHasOptionsMenu(true);
mAdapter = new GenresAdapter(getActivity(), null, false);
getLoaderManager().initLoader(0, null, this);
}
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle args) {
String[] cols = new String[] { Audio.Genres._ID, Audio.Genres.NAME };
String where = mUtils.getBetterGenresWhereClause();
Uri uri = Audio.Genres.EXTERNAL_CONTENT_URI;
return new CursorLoader(getActivity(), uri, cols, where, null,
Audio.Genres.DEFAULT_SORT_ORDER);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.playlists_browser, container, false);
return view;
}
@Override
public void onListItemClick(ListView listview, View view, int position, long id) {
showDetails(position, id);
}
@Override
public void onLoaderReset(Loader<Cursor> loader) {
mAdapter.swapCursor(null);
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor data) {
if (data == null) {
getActivity().finish();
return;
}
mNameIdx = data.getColumnIndexOrThrow(Audio.Genres.NAME);
mAdapter.changeCursor(data);
setListAdapter(mAdapter);
}
@Override
public void onSaveInstanceState(Bundle outState) {
outState.putAll(getArguments() != null ? getArguments() : new Bundle());
super.onSaveInstanceState(outState);
}
private void showDetails(int index, long id) {
View detailsFrame = getActivity().findViewById(R.id.frame_details);
boolean mDualPane = detailsFrame != null && detailsFrame.getVisibility() == View.VISIBLE;
Bundle bundle = new Bundle();
bundle.putString(INTENT_KEY_TYPE, Audio.Genres.CONTENT_TYPE);
bundle.putLong(Audio.Genres._ID, id);
if (mDualPane) {
TrackFragment fragment = new TrackFragment();
fragment.setArguments(bundle);
FragmentTransaction ft = getFragmentManager().beginTransaction();
ft.replace(R.id.frame_details, fragment);
ft.setTransition(FragmentTransaction.TRANSIT_FRAGMENT_FADE);
ft.commit();
} else {
Intent intent = new Intent(getActivity(), TrackBrowserActivity.class);
intent.putExtras(bundle);
startActivity(intent);
}
}
private class GenresAdapter extends CursorAdapter {
private GenresAdapter(Context context, Cursor cursor, boolean autoRequery) {
super(context, cursor, autoRequery);
}
@Override
public void bindView(View view, Context context, Cursor cursor) {
ViewHolder viewholder = (ViewHolder) view.getTag();
String genre_name = cursor.getString(mNameIdx);
viewholder.genre_name.setText(mUtils.parseGenreName(genre_name));
}
@Override
public View newView(Context context, Cursor cursor, ViewGroup parent) {
View view = LayoutInflater.from(context).inflate(R.layout.playlist_list_item, null);
ViewHolder viewholder = new ViewHolder(view);
view.setTag(viewholder);
return view;
}
private class ViewHolder {
TextView genre_name;
public ViewHolder(View view) {
genre_name = (TextView) view.findViewById(R.id.playlist_name);
}
}
}
}
| lgpl-3.0 |
marieke-bijlsma/molgenis | molgenis-data/src/test/java/org/molgenis/data/support/UuidGeneratorTest.java | 483 | package org.molgenis.data.support;
import static org.testng.Assert.assertEquals;
import java.util.HashSet;
import java.util.Set;
import org.testng.annotations.Test;
public class UuidGeneratorTest
{
private final UuidGenerator uuidGenerator = new UuidGenerator();
@Test
public void generateId()
{
Set<String> ids = new HashSet<>();
for (int i = 0; i < 1000000; i++)
{
ids.add(uuidGenerator.generateId().toLowerCase());
}
assertEquals(ids.size(), 1000000);
}
}
| lgpl-3.0 |
thiliniish/developer-studio | esb/org.wso2.developerstudio.eclipse.gmf.esb.persistence/src/org/wso2/developerstudio/eclipse/gmf/esb/internal/persistence/BeanMediatorTransformer.java | 4887 | /*
* Copyright 2012 WSO2, Inc. (http://wso2.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence;
import java.util.List;
import java.util.Map.Entry;
import org.apache.synapse.endpoints.Endpoint;
import org.apache.synapse.mediators.Value;
import org.apache.synapse.mediators.base.SequenceMediator;
import org.apache.synapse.mediators.bean.BeanMediator.Action;
import org.apache.synapse.util.xpath.SynapseXPath;
import org.eclipse.core.runtime.Assert;
import org.eclipse.emf.ecore.EObject;
import org.wso2.developerstudio.eclipse.gmf.esb.BeanMediator;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbNode;
import org.wso2.developerstudio.eclipse.gmf.esb.NamespacedProperty;
import org.wso2.developerstudio.eclipse.gmf.esb.PropertyValueType;
import org.wso2.developerstudio.eclipse.gmf.esb.internal.persistence.custom.BeanMediatorExt;
import org.wso2.developerstudio.eclipse.gmf.esb.persistence.TransformationInfo;
public class BeanMediatorTransformer extends AbstractEsbNodeTransformer{
public void transform(TransformationInfo information, EsbNode subject) throws Exception {
information.getParentSequence().addChild(
createBeanMediator(subject, information));
doTransform(information,
((BeanMediator) subject).getOutputConnector());
}
public void createSynapseObject(TransformationInfo info, EObject subject,
List<Endpoint> endPoints) {
}
public void transformWithinSequence(TransformationInfo information, EsbNode subject,
SequenceMediator sequence) throws Exception {
sequence.addChild(createBeanMediator(subject, information));
doTransformWithinSequence(information, ((BeanMediator) subject)
.getOutputConnector().getOutgoingLink(), sequence);
}
private BeanMediatorExt createBeanMediator(
EsbNode subject, TransformationInfo information) throws Exception {
/*
* Check subject.
*/
Assert.isTrue(subject instanceof BeanMediator, "Invalid subject.");
BeanMediator visualBeanMediator = (BeanMediator) subject;
BeanMediatorExt beanMediator = new BeanMediatorExt();
setCommonProperties(beanMediator, visualBeanMediator);
beanMediator.setVarName(visualBeanMediator.getVar());
switch (visualBeanMediator.getAction()) {
case CREATE:
beanMediator.setAction(Action.CREATE);
beanMediator.setClassName(visualBeanMediator.getClass_());
break;
case GET_PROPERTY:
beanMediator.setAction(Action.GET_PROPERTY);
beanMediator.setPropertyName(visualBeanMediator.getProperty());
if(visualBeanMediator.getTargetType()==PropertyValueType.EXPRESSION){
NamespacedProperty sessionExpression = visualBeanMediator.getTargetExpression();
if (sessionExpression != null
&& sessionExpression.getPropertyValue() != null) {
SynapseXPath expression = new SynapseXPath(sessionExpression.getPropertyValue());
for (Entry<String, String> entry : sessionExpression.getNamespaces().entrySet()) {
expression.addNamespace(entry.getKey(), entry.getValue());
}
Value targetValue = new Value(expression);
beanMediator.setTargetValue(targetValue);
}
} else{
if(visualBeanMediator.getTargetLiteral()!=null){
Value targetValue = new Value(visualBeanMediator.getTargetLiteral());
beanMediator.setTargetValue(targetValue);
}
}
break;
case REMOVE:
beanMediator.setAction(Action.REMOVE);
break;
case SET_PROPERTY:
beanMediator.setAction(Action.SET_PROPERTY);
beanMediator.setPropertyName(visualBeanMediator.getProperty());
if(visualBeanMediator.getValueType()==PropertyValueType.EXPRESSION){
NamespacedProperty sessionExpression = visualBeanMediator.getValueExpression();
if (sessionExpression != null
&& sessionExpression.getPropertyValue() != null) {
SynapseXPath expression = new SynapseXPath(sessionExpression.getPropertyValue());
for (Entry<String, String> entry : sessionExpression.getNamespaces().entrySet()) {
expression.addNamespace(entry.getKey(), entry.getValue());
}
Value targetValue = new Value(expression);
beanMediator.setValue(targetValue);
}
} else{
if(visualBeanMediator.getValueLiteral()!=null){
Value targetValue = new Value(visualBeanMediator.getValueLiteral());
beanMediator.setValue(targetValue);
}
}
break;
}
return beanMediator;
}
}
| apache-2.0 |
antz29/closure-compiler | test/com/google/javascript/jscomp/ScopedAliasesTest.java | 12851 | /*
* Copyright 2010 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.collect.Lists;
import com.google.javascript.rhino.JSDocInfo;
import com.google.javascript.rhino.Node;
import java.util.Collection;
import java.util.List;
/**
* Tests for {@link ScopedAliases}
*
* @author robbyw@google.com (Robby Walker)
*/
public class ScopedAliasesTest extends CompilerTestCase {
private static String EXTERNS = "var window;";
public ScopedAliasesTest() {
super(EXTERNS);
}
private void testScoped(String code, String expected) {
test("goog.scope(function() {" + code + "});", expected);
}
private void testScopedNoChanges(String aliases, String code) {
testScoped(aliases + code, code);
}
public void testOneLevel() {
testScoped("var g = goog;g.dom.createElement(g.dom.TagName.DIV);",
"goog.dom.createElement(goog.dom.TagName.DIV);");
}
public void testTwoLevel() {
testScoped("var d = goog.dom;d.createElement(d.TagName.DIV);",
"goog.dom.createElement(goog.dom.TagName.DIV);");
}
public void testTransitive() {
testScoped("var d = goog.dom;var DIV = d.TagName.DIV;d.createElement(DIV);",
"goog.dom.createElement(goog.dom.TagName.DIV);");
}
public void testTransitiveInSameVar() {
testScoped("var d = goog.dom, DIV = d.TagName.DIV;d.createElement(DIV);",
"goog.dom.createElement(goog.dom.TagName.DIV);");
}
public void testMultipleTransitive() {
testScoped(
"var g=goog;var d=g.dom;var t=d.TagName;var DIV=t.DIV;" +
"d.createElement(DIV);",
"goog.dom.createElement(goog.dom.TagName.DIV);");
}
public void testFourLevel() {
testScoped("var DIV = goog.dom.TagName.DIV;goog.dom.createElement(DIV);",
"goog.dom.createElement(goog.dom.TagName.DIV);");
}
public void testWorksInClosures() {
testScoped(
"var DIV = goog.dom.TagName.DIV;" +
"goog.x = function() {goog.dom.createElement(DIV);};",
"goog.x = function() {goog.dom.createElement(goog.dom.TagName.DIV);};");
}
public void testOverridden() {
// Test that the alias doesn't get unaliased when it's overriden by a
// parameter.
testScopedNoChanges(
"var g = goog;", "goog.x = function(g) {g.z()};");
// Same for a local.
testScopedNoChanges(
"var g = goog;", "goog.x = function() {var g = {}; g.z()};");
}
public void testTwoScopes() {
test(
"goog.scope(function() {var g = goog;g.method()});" +
"goog.scope(function() {g.method();});",
"goog.method();g.method();");
}
public void testTwoSymbolsInTwoScopes() {
test(
"var goog = {};" +
"goog.scope(function() { var g = goog; g.Foo = function() {}; });" +
"goog.scope(function() { " +
" var Foo = goog.Foo; goog.bar = function() { return new Foo(); };" +
"});",
"var goog = {};" +
"goog.Foo = function() {};" +
"goog.bar = function() { return new goog.Foo(); };");
}
public void testAliasOfSymbolInGoogScope() {
test(
"var goog = {};" +
"goog.scope(function() {" +
" var g = goog;" +
" g.Foo = function() {};" +
" var Foo = g.Foo;" +
" Foo.prototype.bar = function() {};" +
"});",
"var goog = {}; goog.Foo = function() {};" +
"goog.Foo.prototype.bar = function() {};");
}
public void testScopedFunctionReturnThis() {
test("goog.scope(function() { " +
" var g = goog; g.f = function() { return this; };" +
"});",
"goog.f = function() { return this; };");
}
public void testScopedFunctionAssignsToVar() {
test("goog.scope(function() { " +
" var g = goog; g.f = function(x) { x = 3; return x; };" +
"});",
"goog.f = function(x) { x = 3; return x; };");
}
public void testScopedFunctionThrows() {
test("goog.scope(function() { " +
" var g = goog; g.f = function() { throw 'error'; };" +
"});",
"goog.f = function() { throw 'error'; };");
}
public void testPropertiesNotChanged() {
testScopedNoChanges("var x = goog.dom;", "y.x();");
}
public void testShadowedVar() {
test("var Popup = {};" +
"var OtherPopup = {};" +
"goog.scope(function() {" +
" var Popup = OtherPopup;" +
" Popup.newMethod = function() { return new Popup(); };" +
"});",
"var Popup = {};" +
"var OtherPopup = {};" +
"OtherPopup.newMethod = function() { return new OtherPopup(); };");
}
private void testTypes(String aliases, String code) {
testScopedNoChanges(aliases, code);
Compiler lastCompiler = getLastCompiler();
new TypeVerifyingPass(lastCompiler).process(lastCompiler.externsRoot,
lastCompiler.jsRoot);
}
public void testJsDocType() {
testTypes(
"var x = goog.Timer;",
""
+ "/** @type {x} */ types.actual;"
+ "/** @type {goog.Timer} */ types.expected;");
}
public void testJsDocParameter() {
testTypes(
"var x = goog.Timer;",
""
+ "/** @param {x} a */ types.actual;"
+ "/** @param {goog.Timer} a */ types.expected;");
}
public void testJsDocExtends() {
testTypes(
"var x = goog.Timer;",
""
+ "/** @extends {x} */ types.actual;"
+ "/** @extends {goog.Timer} */ types.expected;");
}
public void testJsDocImplements() {
testTypes(
"var x = goog.Timer;",
""
+ "/** @implements {x} */ types.actual;"
+ "/** @implements {goog.Timer} */ types.expected;");
}
public void testJsDocEnum() {
testTypes(
"var x = goog.Timer;",
""
+ "/** @enum {x} */ types.actual;"
+ "/** @enum {goog.Timer} */ types.expected;");
}
public void testJsDocReturn() {
testTypes(
"var x = goog.Timer;",
""
+ "/** @return {x} */ types.actual;"
+ "/** @return {goog.Timer} */ types.expected;");
}
public void testJsDocThis() {
testTypes(
"var x = goog.Timer;",
""
+ "/** @this {x} */ types.actual;"
+ "/** @this {goog.Timer} */ types.expected;");
}
public void testJsDocThrows() {
testTypes(
"var x = goog.Timer;",
""
+ "/** @throws {x} */ types.actual;"
+ "/** @throws {goog.Timer} */ types.expected;");
}
public void testJsDocSubType() {
testTypes(
"var x = goog.Timer;",
""
+ "/** @type {x.Enum} */ types.actual;"
+ "/** @type {goog.Timer.Enum} */ types.expected;");
}
public void testJsDocTypedef() {
testTypes(
"var x = goog.Timer;",
""
+ "/** @typedef {x} */ types.actual;"
+ "/** @typedef {goog.Timer} */ types.expected;");
}
public void testArrayJsDoc() {
testTypes(
"var x = goog.Timer;",
""
+ "/** @type {Array.<x>} */ types.actual;"
+ "/** @type {Array.<goog.Timer>} */ types.expected;");
}
public void testObjectJsDoc() {
testTypes(
"var x = goog.Timer;",
""
+ "/** @type {{someKey: x}} */ types.actual;"
+ "/** @type {{someKey: goog.Timer}} */ types.expected;");
testTypes(
"var x = goog.Timer;",
""
+ "/** @type {{x: number}} */ types.actual;"
+ "/** @type {{x: number}} */ types.expected;");
}
public void testUnionJsDoc() {
testTypes(
"var x = goog.Timer;",
""
+ "/** @type {x|Object} */ types.actual;"
+ "/** @type {goog.Timer|Object} */ types.expected;");
}
public void testFunctionJsDoc() {
testTypes(
"var x = goog.Timer;",
""
+ "/** @type {function(x) : void} */ types.actual;"
+ "/** @type {function(goog.Timer) : void} */ types.expected;");
testTypes(
"var x = goog.Timer;",
""
+ "/** @type {function() : x} */ types.actual;"
+ "/** @type {function() : goog.Timer} */ types.expected;");
}
public void testTestTypes() {
try {
testTypes(
"var x = goog.Timer;",
""
+ "/** @type {function() : x} */ types.actual;"
+ "/** @type {function() : wrong.wrong} */ types.expected;");
fail("Test types should fail here.");
} catch (AssertionError e) {
}
}
public void testNullType() {
testTypes(
"var x = goog.Timer;",
"/** @param draggable */ types.actual;"
+ "/** @param draggable */ types.expected;");
}
// TODO(robbyw): What if it's recursive? var goog = goog.dom;
// FAILURE CASES
private void testFailure(String code, DiagnosticType expectedError) {
test(code, null, expectedError);
}
private void testScopedFailure(String code, DiagnosticType expectedError) {
test("goog.scope(function() {" + code + "});", null, expectedError);
}
public void testScopedThis() {
testScopedFailure("this.y = 10;", ScopedAliases.GOOG_SCOPE_REFERENCES_THIS);
testScopedFailure("var x = this;",
ScopedAliases.GOOG_SCOPE_REFERENCES_THIS);
testScopedFailure("fn(this);", ScopedAliases.GOOG_SCOPE_REFERENCES_THIS);
}
public void testAliasRedefinition() {
testScopedFailure("var x = goog.dom; x = goog.events;",
ScopedAliases.GOOG_SCOPE_ALIAS_REDEFINED);
}
public void testAliasNonRedefinition() {
test("var y = {}; goog.scope(function() { goog.dom = y; });",
"var y = {}; goog.dom = y;");
}
public void testScopedReturn() {
testScopedFailure("return;", ScopedAliases.GOOG_SCOPE_USES_RETURN);
testScopedFailure("var x = goog.dom; return;",
ScopedAliases.GOOG_SCOPE_USES_RETURN);
}
public void testScopedThrow() {
testScopedFailure("throw 'error';", ScopedAliases.GOOG_SCOPE_USES_THROW);
}
public void testUsedImproperly() {
testFailure("var x = goog.scope(function() {});",
ScopedAliases.GOOG_SCOPE_USED_IMPROPERLY);
}
public void testBadParameters() {
testFailure("goog.scope()", ScopedAliases.GOOG_SCOPE_HAS_BAD_PARAMETERS);
testFailure("goog.scope(10)", ScopedAliases.GOOG_SCOPE_HAS_BAD_PARAMETERS);
testFailure("goog.scope(function() {}, 10)",
ScopedAliases.GOOG_SCOPE_HAS_BAD_PARAMETERS);
testFailure("goog.scope(function z() {})",
ScopedAliases.GOOG_SCOPE_HAS_BAD_PARAMETERS);
testFailure("goog.scope(function(a, b, c) {})",
ScopedAliases.GOOG_SCOPE_HAS_BAD_PARAMETERS);
}
public void testNonAliasLocal() {
testScopedFailure("var x = 10", ScopedAliases.GOOG_SCOPE_NON_ALIAS_LOCAL);
testScopedFailure("var x = goog.dom + 10",
ScopedAliases.GOOG_SCOPE_NON_ALIAS_LOCAL);
testScopedFailure("var x = goog['dom']",
ScopedAliases.GOOG_SCOPE_NON_ALIAS_LOCAL);
testScopedFailure("var x = goog.dom, y = 10",
ScopedAliases.GOOG_SCOPE_NON_ALIAS_LOCAL);
}
@Override
protected ScopedAliases getProcessor(Compiler compiler) {
return new ScopedAliases(compiler);
}
private static class TypeVerifyingPass
implements CompilerPass, NodeTraversal.Callback {
private final Compiler compiler;
private List<String> actualTypes = null;
public TypeVerifyingPass(Compiler compiler) {
this.compiler = compiler;
}
public void process(Node externs, Node root) {
NodeTraversal.traverse(compiler, root, this);
}
public boolean shouldTraverse(NodeTraversal nodeTraversal, Node n,
Node parent) {
return true;
}
public void visit(NodeTraversal t, Node n, Node parent) {
JSDocInfo info = n.getJSDocInfo();
if (info != null) {
Collection<Node> typeNodes = info.getTypeNodes();
if (typeNodes.size() > 0) {
if (actualTypes != null) {
List<String> expectedTypes = Lists.newArrayList();
for (Node typeNode : info.getTypeNodes()) {
expectedTypes.add(typeNode.toStringTree());
}
assertEquals(expectedTypes, actualTypes);
} else {
actualTypes = Lists.newArrayList();
for (Node typeNode : info.getTypeNodes()) {
actualTypes.add(typeNode.toStringTree());
}
}
}
}
}
}
}
| apache-2.0 |
GabrielBrascher/cloudstack | services/console-proxy/rdpconsole/src/main/java/common/opt/UnknownOptionException.java | 1025 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package common.opt;
public class UnknownOptionException extends RuntimeException {
public UnknownOptionException(String message) {
super(message);
}
private static final long serialVersionUID = 1L;
}
| apache-2.0 |
dongaihua/highlight-elasticsearch | src/main/java/org/elasticsearch/index/store/fs/NioFsIndexStore.java | 1695 | /*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.store.fs;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.index.store.DirectoryService;
import org.elasticsearch.indices.store.IndicesStore;
/**
*
*/
public class NioFsIndexStore extends FsIndexStore {
@Inject
public NioFsIndexStore(Index index, @IndexSettings Settings indexSettings, IndexService indexService, IndicesStore indicesStore, NodeEnvironment nodeEnv) {
super(index, indexSettings, indexService, indicesStore, nodeEnv);
}
@Override
public Class<? extends DirectoryService> shardDirectory() {
return NioFsDirectoryService.class;
}
} | apache-2.0 |
wsargent/playframework | framework/src/play/src/main/java/play/inject/DelegateApplicationLifecycle.java | 782 | /*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.inject;
import javax.inject.Inject;
import javax.inject.Singleton;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletionStage;
@Singleton
public class DelegateApplicationLifecycle implements ApplicationLifecycle {
private final play.api.inject.ApplicationLifecycle delegate;
@Inject
public DelegateApplicationLifecycle(play.api.inject.ApplicationLifecycle delegate) {
this.delegate = delegate;
}
@Override
public void addStopHook(final Callable<? extends CompletionStage<?>> hook) {
delegate.addStopHook(hook);
}
@Override
public play.api.inject.ApplicationLifecycle asScala() {
return delegate;
}
}
| apache-2.0 |
lindzh/incubator-rocketmq | test/src/main/java/org/apache/rocketmq/test/clientinterface/MQConsumer.java | 985 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.rocketmq.test.clientinterface;
public interface MQConsumer {
void create();
void create(boolean useTLS);
void start();
void shutdown();
}
| apache-2.0 |
godghdai/mp4parser-1 | isoparser/src/test/java/com/googlecode/mp4parser/authoring/tracks/DTSTrackImplTest.java | 1913 | package com.googlecode.mp4parser.authoring.tracks;
import com.coremedia.iso.IsoFile;
import com.coremedia.iso.boxes.Container;
import com.googlecode.mp4parser.FileDataSourceImpl;
import com.googlecode.mp4parser.authoring.Movie;
import com.googlecode.mp4parser.authoring.Track;
import com.googlecode.mp4parser.authoring.builder.DefaultMp4Builder;
import com.googlecode.mp4parser.authoring.builder.Fragmenter;
import com.googlecode.mp4parser.authoring.builder.StaticFragmentIntersectionFinderImpl;
import org.junit.Test;
import java.io.ByteArrayOutputStream;
import java.io.FileOutputStream;
import java.nio.channels.Channels;
import java.util.Collections;
public class DTSTrackImplTest {
@Test
public void checkOutputIsStable() throws Exception {
Movie m = new Movie();
DTSTrackImpl dts = new DTSTrackImpl(new FileDataSourceImpl(DTSTrackImplTest.class.getProtectionDomain().getCodeSource().getLocation().getFile() + "/com/googlecode/mp4parser/authoring/tracks/dts-sample.dtshd"));
m.addTrack(dts);
Fragmenter fif = new StaticFragmentIntersectionFinderImpl(Collections.singletonMap((Track)dts, new long[]{1}));
DefaultMp4Builder mp4Builder = new DefaultMp4Builder();
mp4Builder.setFragmenter(fif);
Container c = mp4Builder.build(m);
//c.writeContainer(new FileOutputStream("C:\\dev\\mp4parser\\isoparser\\src\\test\\resources\\com\\googlecode\\mp4parser\\authoring\\tracks\\dts-sample.mp4").getChannel());
ByteArrayOutputStream baos = new ByteArrayOutputStream();
c.writeContainer(Channels.newChannel(baos));
IsoFile ref = new IsoFile(DTSTrackImplTest.class.getProtectionDomain().getCodeSource().getLocation().getFile() + "/com/googlecode/mp4parser/authoring/tracks/dts-sample.mp4");
BoxComparator.check(ref, c, "/moov[0]/mvhd[0]", "/moov[0]/trak[0]/tkhd[0]", "/moov[0]/trak[0]/mdia[0]/mdhd[0]");
}
} | apache-2.0 |
sushilsingh94/AppLocker-1 | src/com/twinone/locker/lock/LockPreferences.java | 5562 | /*
* Copyright 2014 Luuk Willemsen (Twinone)
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.twinone.locker.lock;
import java.io.Serializable;
import android.content.Context;
import com.twinone.locker.R;
import com.twinone.locker.pro.ProUtils;
import com.twinone.locker.util.PrefUtils;
public class LockPreferences implements Serializable {
/**
*
*/
private static final long serialVersionUID = 2334826883469805015L;
public static final int TYPE_PASSWORD = 1 << 0; // 1
public static final int TYPE_PATTERN = 1 << 1; // 2
// Common
/** Whether this user has pro features enabled or no */
public boolean pro;
public int type;
public String orientation;
public Boolean vibration;
public String message;
public int patternSize;
// Pro only
public String background;
public int showAnimationResId;
public int hideAnimationResId;
public int showAnimationMillis;
public int hideAnimationMillis;
// Password only
public String password;
public boolean passwordSwitchButtons;
// Pattern only
public String pattern;
public boolean patternStealth;
public boolean patternErrorStealth;
// Pro & pattern only
public int patternCircleResId;
public boolean showAds;
/**
* You should use this constructor which loads all properties into the
* object automatically
*
* @param c
* @return
*/
public LockPreferences(Context c) {
final ProUtils p = new ProUtils(c);
pro = p.proFeaturesEnabled();
showAds = p.showAds();
PrefUtils prefs = new PrefUtils(c);
// Common
type = prefs.getCurrentLockTypeInt();
orientation = prefs.getString(R.string.pref_key_orientation);
vibration = prefs.getBoolean(R.string.pref_key_vibrate,
R.bool.pref_def_vibrate);
message = prefs.getString(R.string.pref_key_lock_message);
if (pro) {
background = prefs.getString(R.string.pref_key_background,
R.string.pref_def_background);
// Show animation
final String showAnim = prefs.getString(
R.string.pref_key_anim_show_type,
R.string.pref_def_anim_show_type);
showAnimationResId = getAnimationResId(c, showAnim, true);
showAnimationMillis = prefs.parseInt(
R.string.pref_key_anim_show_millis,
R.string.pref_def_anim_show_millis);
// Hide animation
final String hideAnim = prefs.getString(
R.string.pref_key_anim_hide_type,
R.string.pref_def_anim_hide_type);
hideAnimationResId = getAnimationResId(c, hideAnim, false);
hideAnimationMillis = prefs.parseInt(
R.string.pref_key_anim_hide_millis,
R.string.pref_def_anim_hide_millis);
} else {
background = c.getString(R.string.pref_def_background);
// Show animation
final String showAnim = c
.getString(R.string.pref_def_anim_show_type);
showAnimationResId = getAnimationResId(c, showAnim, true);
showAnimationMillis = Integer.parseInt(c
.getString(R.string.pref_def_anim_show_millis));
// Hide animation
final String hideAnim = c
.getString(R.string.pref_def_anim_hide_type);
hideAnimationResId = getAnimationResId(c, hideAnim, false);
hideAnimationMillis = Integer.parseInt(c
.getString(R.string.pref_def_anim_hide_millis));
}
// Load both password and pattern because user could override the type
// setting
password = prefs.getString(R.string.pref_key_password);
passwordSwitchButtons = prefs.getBoolean(
R.string.pref_key_switch_buttons,
R.bool.pref_def_switch_buttons);
pattern = prefs.getString(R.string.pref_key_pattern);
patternStealth = prefs.getBoolean(R.string.pref_key_pattern_stealth,
R.bool.pref_def_pattern_stealth);
patternErrorStealth = prefs.getBoolean(
R.string.pref_key_pattern_hide_error,
R.bool.pref_def_pattern_error_stealth);
patternSize = prefs.parseInt(R.string.pref_key_pattern_size,
R.string.pref_def_pattern_size);
patternCircleResId = getPatternCircleResId(c, pro,
prefs.getString(R.string.pref_key_pattern_color));
}
/**
* @param animType
* the input animtype
* @param show
* true if show animation, false if hide animation
* @return the resid to be applied
*/
private static int getAnimationResId(Context c, String type, boolean show) {
if (type != null) {
if (type.equals(c.getString(R.string.pref_val_anim_slide_left)))
return show ? R.anim.slide_in_left : R.anim.slide_out_left;
else if (type.equals(c
.getString(R.string.pref_val_anim_slide_right)))
return show ? R.anim.slide_in_right : R.anim.slide_out_right;
else if (type.equals(c.getString(R.string.pref_val_anim_fade)))
return show ? R.anim.fade_in : R.anim.fade_out;
}
return 0;
}
private static int getPatternCircleResId(Context c, boolean hasPro,
String setting) {
if (setting != null && hasPro) {
if (setting.equals(c
.getString(R.string.pref_val_pattern_color_blue)))
return R.drawable.pattern_circle_blue;
if (setting.equals(c
.getString(R.string.pref_val_pattern_color_green)))
return R.drawable.pattern_circle_green;
}
return R.drawable.pattern_circle_white;
}
}
| apache-2.0 |
mycFelix/heron | heron/tools/apiserver/tests/java/org/apache/heron/apiserver/resources/TopologyResourceTests.java | 7724 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.heron.apiserver.resources;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedHashMap;
import javax.ws.rs.core.MultivaluedMap;
import javax.ws.rs.core.Response;
import org.glassfish.jersey.media.multipart.FormDataBodyPart;
import org.glassfish.jersey.media.multipart.FormDataMultiPart;
import org.junit.Before;
import org.junit.Test;
import org.apache.heron.apiserver.actions.Action;
import org.apache.heron.apiserver.actions.ActionFactory;
import org.apache.heron.apiserver.actions.ActionType;
import org.apache.heron.spi.common.Config;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class TopologyResourceTests {
private static final int HTTP_422 = 422;
private String cluster = "cluster";
private String role = "role";
private String environment = "environment";
private String topologyName = "topology";
private Action action;
private ActionFactory factory;
private TopologyResource resource;
private final List<String> requiredSubmitParamKeys = Arrays.asList(
"name", "cluster", "role", "definition", "topology"
);
@Before
public void before() {
action = mock(Action.class);
resource = spy(new TopologyResource());
factory = spy(createFactory(action));
doReturn(factory).when(resource).getActionFactory();
doReturn(Config.newBuilder().build()).when(resource).getBaseConfiguration();
doReturn(cluster).when(resource).getCluster();
}
@Test
public void testVerifyKeys() {
assertEquals(1,
TopologyResource.verifyKeys(new HashSet<>(), "key").size());
assertEquals(0,
TopologyResource.verifyKeys(new HashSet<>(Arrays.asList("key1")),
"key1").size());
}
@Test
public void testSubmitMissingParams() throws IOException {
FormDataMultiPart form = mock(FormDataMultiPart.class);
for (final String paramKey : requiredSubmitParamKeys) {
Set<String> keySet =
requiredSubmitParamKeys
.stream()
.filter(s -> s.equals(paramKey))
.collect(Collectors.toSet());
Map<String, List<FormDataBodyPart>> map = new HashMap<>();
keySet.forEach(t -> map.put(t, Collections.emptyList()));
when(form.getFields()).thenReturn(map);
Response response = resource.submit(form);
assertEquals(HTTP_422, response.getStatus());
}
}
@Test
public void testKillOk() {
final Response response = resource.kill(cluster, role, environment, topologyName);
assertEquals(Response.Status.OK.getStatusCode(), response.getStatus());
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getMediaType());
}
@Test
public void testOnKillActionCalled() {
resource.kill(cluster, role, environment, topologyName);
verify(factory, times(1))
.createRuntimeAction(any(Config.class), eq(ActionType.KILL));
verify(action, times(1)).execute();
}
@Test
public void testActivateOk() {
final Response response = resource.activate(cluster, role, environment, topologyName);
assertEquals(Response.Status.OK.getStatusCode(), response.getStatus());
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getMediaType());
}
@Test
public void testActivateActionCalled() {
resource.activate(cluster, role, environment, topologyName);
verify(factory, times(1))
.createRuntimeAction(any(Config.class), eq(ActionType.ACTIVATE));
verify(action, times(1)).execute();
}
@Test
public void testDeactivateOk() {
final Response response = resource.deactivate(cluster, role, environment, topologyName);
assertEquals(Response.Status.OK.getStatusCode(), response.getStatus());
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getMediaType());
}
@Test
public void testDeactivateActionCalled() {
resource.deactivate(cluster, role, environment, topologyName);
verify(factory, times(1))
.createRuntimeAction(any(Config.class), eq(ActionType.DEACTIVATE));
verify(action, times(1)).execute();
}
@Test
public void testRestartOk() {
final Response response = resource.restart(cluster, role, environment, topologyName, -1);
assertEquals(Response.Status.OK.getStatusCode(), response.getStatus());
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getMediaType());
}
@Test
public void testRestartActionCalled() {
resource.restart(cluster, role, environment, topologyName, -1);
verify(factory, times(1))
.createRuntimeAction(any(Config.class), eq(ActionType.RESTART));
verify(action, times(1)).execute();
}
@Test
public void testUpdateOk() {
MultivaluedMap<String, String> params = new MultivaluedHashMap<>();
params.putSingle("component_parallelism", "word:1");
Response response = resource.update(cluster, role, environment, topologyName, params);
assertEquals(Response.Status.OK.getStatusCode(), response.getStatus());
}
@Test
public void testUpdateActionCalled() {
MultivaluedMap<String, String> params = new MultivaluedHashMap<>();
params.putSingle("component_parallelism", "word:1");
resource.update(cluster, role, environment, topologyName, params);
verify(factory, times(1))
.createRuntimeAction(any(Config.class), eq(ActionType.UPDATE));
verify(action, times(1)).execute();
}
@Test
public void testUpdateMissingParams() {
Response response = resource.update(cluster, role, environment, topologyName, null);
assertEquals(HTTP_422, response.getStatus());
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getMediaType());
MultivaluedMap<String, String> params = new MultivaluedHashMap<>();
response = resource.update(cluster, role, environment, topologyName, params);
assertEquals(HTTP_422, response.getStatus());
}
private ActionFactory createFactory(Action a) {
return new TestActionFactory(a);
}
private static class TestActionFactory implements ActionFactory {
private final Action action;
TestActionFactory(Action a) {
action = a;
}
@Override
public Action createSubmitAction(Config config, String topologyPackagePath,
String topologyBinaryFileName, String topologyDefinitionPath) {
return action;
}
@Override
public Action createRuntimeAction(Config config, ActionType type) {
return action;
}
}
}
| apache-2.0 |
dineshkummarc/Gitak_r982 | server/selenium-remote-control-1.0.3/selenium-server/org/openqa/jetty/jetty/servlet/ServletHttpRequest.java | 27149 | // ========================================================================
// $Id: ServletHttpRequest.java,v 1.65 2005/08/13 00:01:27 gregwilkins Exp $
// Copyright 200-2004 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ========================================================================
package org.openqa.jetty.jetty.servlet;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStreamReader;
import java.io.UnsupportedEncodingException;
import java.security.Principal;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletInputStream;
import javax.servlet.ServletRequest;
import javax.servlet.ServletRequestWrapper;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import org.apache.commons.logging.Log;
import org.openqa.jetty.log.LogFactory;
import org.openqa.jetty.http.HttpConnection;
import org.openqa.jetty.http.HttpFields;
import org.openqa.jetty.http.HttpInputStream;
import org.openqa.jetty.http.HttpRequest;
import org.openqa.jetty.http.SecurityConstraint;
import org.openqa.jetty.util.LazyList;
import org.openqa.jetty.util.LogSupport;
import org.openqa.jetty.util.Resource;
import org.openqa.jetty.util.StringUtil;
import org.openqa.jetty.util.URI;
/* ------------------------------------------------------------ */
/** Servlet Request Wrapper.
* This class wraps a Jetty HTTP request as a 2.2 Servlet
* request.
* <P>
* Note that this wrapper is not synchronized and if a request is to
* be operated on by multiple threads, then higher level
* synchronizations may be required.
*
* @version $Id: ServletHttpRequest.java,v 1.65 2005/08/13 00:01:27 gregwilkins Exp $
* @author Greg Wilkins (gregw)
*/
public class ServletHttpRequest
implements HttpServletRequest
{
private static Log log = LogFactory.getLog(ServletHttpRequest.class);
/* -------------------------------------------------------------- */
public static final String
__SESSIONID_NOT_CHECKED = "not checked",
__SESSIONID_URL = "url",
__SESSIONID_COOKIE = "cookie",
__SESSIONID_NONE = "none";
private static final Enumeration __emptyEnum =
Collections.enumeration(Collections.EMPTY_LIST);
private static final Collection __defaultLocale =
Collections.singleton(Locale.getDefault());
private ServletHandler _servletHandler;
private HttpRequest _httpRequest;
private ServletHttpResponse _servletHttpResponse;
private String _contextPath=null;
private String _servletPath=null;
private String _pathInfo=null;
private String _query=null;
private String _pathTranslated=null;
private String _requestedSessionId=null;
private HttpSession _session=null;
private String _sessionIdState=__SESSIONID_NOT_CHECKED;
private ServletIn _in =null;
private BufferedReader _reader=null;
private int _inputState=0;
private ServletHolder _servletHolder;
private String _pathInContext;
/* ------------------------------------------------------------ */
/** Constructor.
*/
public ServletHttpRequest(ServletHandler servletHandler,
String pathInContext,
HttpRequest request)
{
_servletHandler=servletHandler;
_pathInContext=pathInContext;
_contextPath=_servletHandler.getHttpContext().getContextPath();
if (_contextPath.length()<=1)
_contextPath="";
_httpRequest=request;
}
/* ------------------------------------------------------------ */
void recycle(ServletHandler servletHandler,String pathInContext)
{
_servletHandler=servletHandler;
_pathInContext=pathInContext;
_servletPath=null;
_pathInfo=null;
_query=null;
_pathTranslated=null;
_requestedSessionId=null;
_session=null;
_sessionIdState=__SESSIONID_NOT_CHECKED;
_in=null;
_reader=null;
_inputState=0;
_servletHolder=null;
if (servletHandler!=null)
_contextPath=_servletHandler.getHttpContext().getContextPath();
if (_contextPath!=null&&_contextPath.length()<=1)
_contextPath="";
}
/* ------------------------------------------------------------ */
ServletHandler getServletHandler()
{
return _servletHandler;
}
/* ------------------------------------------------------------ */
void setServletHandler(ServletHandler servletHandler)
{
_servletHandler=servletHandler;
}
/* ------------------------------------------------------------ */
/** Set servletpath and pathInfo.
* Called by the Handler before passing a request to a particular
* holder to split the context path into a servlet path and path info.
* @param servletPath
* @param pathInfo
*/
void setServletPaths(String servletPath,
String pathInfo,
ServletHolder holder)
{
_servletPath=servletPath;
_pathInfo=pathInfo;
_servletHolder=holder;
}
/* ------------------------------------------------------------ */
ServletHolder getServletHolder()
{
return _servletHolder;
}
/* ------------------------------------------------------------ */
String getPathInContext()
{
return _pathInContext;
}
/* ------------------------------------------------------------ */
HttpRequest getHttpRequest()
{
return _httpRequest;
}
/* ------------------------------------------------------------ */
public ServletHttpResponse getServletHttpResponse()
{
return _servletHttpResponse;
}
/* ------------------------------------------------------------ */
void setServletHttpResponse(ServletHttpResponse response)
{
_servletHttpResponse = response;
}
/* ------------------------------------------------------------ */
public Locale getLocale()
{
Enumeration enm = _httpRequest.getFieldValues(HttpFields.__AcceptLanguage,
HttpFields.__separators);
// handle no locale
if (enm == null || !enm.hasMoreElements())
return Locale.getDefault();
// sort the list in quality order
List acceptLanguage = HttpFields.qualityList(enm);
if (acceptLanguage.size()==0)
return Locale.getDefault();
int size=acceptLanguage.size();
// convert to locals
for (int i=0; i<size; i++)
{
String language = (String)acceptLanguage.get(i);
language=HttpFields.valueParameters(language,null);
String country = "";
int dash = language.indexOf('-');
if (dash > -1)
{
country = language.substring(dash + 1).trim();
language = language.substring(0,dash).trim();
}
return new Locale(language,country);
}
return Locale.getDefault();
}
/* ------------------------------------------------------------ */
public Enumeration getLocales()
{
Enumeration enm = _httpRequest.getFieldValues(HttpFields.__AcceptLanguage,
HttpFields.__separators);
// handle no locale
if (enm == null || !enm.hasMoreElements())
return Collections.enumeration(__defaultLocale);
// sort the list in quality order
List acceptLanguage = HttpFields.qualityList(enm);
if (acceptLanguage.size()==0)
return
Collections.enumeration(__defaultLocale);
Object langs = null;
int size=acceptLanguage.size();
// convert to locals
for (int i=0; i<size; i++)
{
String language = (String)acceptLanguage.get(i);
language=HttpFields.valueParameters(language,null);
String country = "";
int dash = language.indexOf('-');
if (dash > -1)
{
country = language.substring(dash + 1).trim();
language = language.substring(0,dash).trim();
}
langs=LazyList.ensureSize(langs,size);
langs=LazyList.add(langs,new Locale(language,country));
}
if (LazyList.size(langs)==0)
return Collections.enumeration(__defaultLocale);
return Collections.enumeration(LazyList.getList(langs));
}
/* ------------------------------------------------------------ */
public boolean isSecure()
{
return _httpRequest.isConfidential();
}
/* ------------------------------------------------------------ */
public Cookie[] getCookies()
{
Cookie[] cookies = _httpRequest.getCookies();
if (cookies.length==0)
return null;
return cookies;
}
/* ------------------------------------------------------------ */
public long getDateHeader(String name)
{
return _httpRequest.getDateField(name);
}
/* ------------------------------------------------------------ */
public Enumeration getHeaderNames()
{
return _httpRequest.getFieldNames();
}
/* ------------------------------------------------------------ */
public String getHeader(String name)
{
return _httpRequest.getField(name);
}
/* ------------------------------------------------------------ */
public Enumeration getHeaders(String s)
{
Enumeration enm=_httpRequest.getFieldValues(s);
if (enm==null)
return __emptyEnum;
return enm;
}
/* ------------------------------------------------------------ */
public int getIntHeader(String name)
throws NumberFormatException
{
return _httpRequest.getIntField(name);
}
/* ------------------------------------------------------------ */
public String getMethod()
{
return _httpRequest.getMethod();
}
/* ------------------------------------------------------------ */
public String getContextPath()
{
return _contextPath;
}
/* ------------------------------------------------------------ */
public String getPathInfo()
{
if (_servletPath==null)
return null;
return _pathInfo;
}
/* ------------------------------------------------------------ */
public String getPathTranslated()
{
if (_pathInfo==null || _pathInfo.length()==0)
return null;
if (_pathTranslated==null)
{
Resource resource =
_servletHandler.getHttpContext().getBaseResource();
if (resource==null)
return null;
try
{
resource=resource.addPath(_pathInfo);
File file = resource.getFile();
if (file==null)
return null;
_pathTranslated=file.getAbsolutePath();
}
catch(Exception e)
{
log.debug(LogSupport.EXCEPTION,e);
}
}
return _pathTranslated;
}
/* ------------------------------------------------------------ */
public String getQueryString()
{
if (_query==null)
_query =_httpRequest.getQuery();
return _query;
}
/* ------------------------------------------------------------ */
public String getAuthType()
{
String at= _httpRequest.getAuthType();
if (at==SecurityConstraint.__BASIC_AUTH)
return HttpServletRequest.BASIC_AUTH;
if (at==SecurityConstraint.__FORM_AUTH)
return HttpServletRequest.FORM_AUTH;
if (at==SecurityConstraint.__DIGEST_AUTH)
return HttpServletRequest.DIGEST_AUTH;
if (at==SecurityConstraint.__CERT_AUTH)
return HttpServletRequest.CLIENT_CERT_AUTH;
if (at==SecurityConstraint.__CERT_AUTH2)
return HttpServletRequest.CLIENT_CERT_AUTH;
return at;
}
/* ------------------------------------------------------------ */
public String getRemoteUser()
{
return _httpRequest.getAuthUser();
}
/* ------------------------------------------------------------ */
public boolean isUserInRole(String role)
{
if (_servletHolder!=null)
role=_servletHolder.getUserRoleLink(role);
return _httpRequest.isUserInRole(role);
}
/* ------------------------------------------------------------ */
public Principal getUserPrincipal()
{
return _httpRequest.getUserPrincipal();
}
/* ------------------------------------------------------------ */
void setRequestedSessionId(String pathParams)
{
_requestedSessionId=null;
// try cookies first
if (_servletHandler.isUsingCookies())
{
Cookie[] cookies=_httpRequest.getCookies();
if (cookies!=null && cookies.length>0)
{
for (int i=0;i<cookies.length;i++)
{
if (SessionManager.__SessionCookie.equalsIgnoreCase(cookies[i].getName()))
{
if (_requestedSessionId!=null)
{
// Multiple jsessionid cookies. Probably due to
// multiple paths and/or domains. Pick the first
// known session or the last defined cookie.
SessionManager manager = _servletHandler.getSessionManager();
if (manager!=null && manager.getHttpSession(_requestedSessionId)!=null)
break;
log.debug("multiple session cookies");
}
_requestedSessionId=cookies[i].getValue();
_sessionIdState = __SESSIONID_COOKIE;
if(log.isDebugEnabled())log.debug("Got Session "+_requestedSessionId+" from cookie");
}
}
}
}
// check if there is a url encoded session param.
if (pathParams!=null && pathParams.startsWith(SessionManager.__SessionURL))
{
String id =
pathParams.substring(SessionManager.__SessionURL.length()+1);
if(log.isDebugEnabled())log.debug("Got Session "+id+" from URL");
if (_requestedSessionId==null)
{
_requestedSessionId=id;
_sessionIdState = __SESSIONID_URL;
}
else if (!id.equals(_requestedSessionId))
log.debug("Mismatched session IDs");
}
if (_requestedSessionId == null)
_sessionIdState = __SESSIONID_NONE;
}
/* ------------------------------------------------------------ */
public String getRequestedSessionId()
{
return _requestedSessionId;
}
/* ------------------------------------------------------------ */
public String getRequestURI()
{
return _httpRequest.getEncodedPath();
}
/* ------------------------------------------------------------ */
public StringBuffer getRequestURL()
{
StringBuffer buf = _httpRequest.getRootURL();
buf.append(getRequestURI());
return buf;
}
/* ------------------------------------------------------------ */
public String getServletPath()
{
if (_servletPath==null)
return _pathInContext;
return _servletPath;
}
/* ------------------------------------------------------------ */
public HttpSession getSession(boolean create)
{
if (_session != null && ((SessionManager.Session)_session).isValid())
return _session;
_session=null;
String id = getRequestedSessionId();
if (id != null)
{
_session=_servletHandler.getHttpSession(id);
if (_session == null && !create)
return null;
}
if (_session == null && create)
{
_session=newSession();
}
return _session;
}
/* ------------------------------------------------------------ */
/* Create a new HttpSession.
* If cookies are being used a set cookie is added to the response.
*/
HttpSession newSession()
{
HttpSession session=_servletHandler.newHttpSession(this);
Cookie cookie=_servletHandler.getSessionManager().getSessionCookie(session,isSecure());
if (cookie!=null)
_servletHttpResponse.getHttpResponse().addSetCookie(cookie);
return session;
}
/* ------------------------------------------------------------ */
public HttpSession getSession()
{
HttpSession session = getSession(true);
return session;
}
/* ------------------------------------------------------------ */
public boolean isRequestedSessionIdValid()
{
return _requestedSessionId != null && getSession(false) != null;
}
/* -------------------------------------------------------------- */
public boolean isRequestedSessionIdFromCookie()
{
return _sessionIdState == __SESSIONID_COOKIE;
}
/* -------------------------------------------------------------- */
public boolean isRequestedSessionIdFromURL()
{
return _sessionIdState == __SESSIONID_URL;
}
/* -------------------------------------------------------------- */
/**
* @deprecated
*/
public boolean isRequestedSessionIdFromUrl()
{
return isRequestedSessionIdFromURL();
}
/* -------------------------------------------------------------- */
public Enumeration getAttributeNames()
{
return _httpRequest.getAttributeNames();
}
/* -------------------------------------------------------------- */
public Object getAttribute(String name)
{
return _httpRequest.getAttribute(name);
}
/* -------------------------------------------------------------- */
public void setAttribute(String name, Object value)
{
_httpRequest.setAttribute(name,value);
}
/* -------------------------------------------------------------- */
public void removeAttribute(String name)
{
_httpRequest.removeAttribute(name);
}
/* -------------------------------------------------------------- */
public void setCharacterEncoding(String encoding)
throws UnsupportedEncodingException
{
if (_inputState!=0)
throw new IllegalStateException("getReader() or getInputStream() called");
"".getBytes(encoding);
_httpRequest.setCharacterEncoding(encoding,false);
}
/* -------------------------------------------------------------- */
public String getCharacterEncoding()
{
return _httpRequest.getCharacterEncoding();
}
/* -------------------------------------------------------------- */
public int getContentLength()
{
return _httpRequest.getContentLength();
}
/* -------------------------------------------------------------- */
public String getContentType()
{
return _httpRequest.getContentType();
}
/* -------------------------------------------------------------- */
public ServletInputStream getInputStream()
{
if (_inputState!=0 && _inputState!=1)
throw new IllegalStateException();
if (_in==null)
_in = new ServletIn((HttpInputStream)_httpRequest.getInputStream());
_inputState=1;
_reader=null;
return _in;
}
/* -------------------------------------------------------------- */
/**
* This method is not recommended as it forces the generation of a
* non-optimal data structure.
*/
public Map getParameterMap()
{
return Collections.unmodifiableMap(_httpRequest.getParameterStringArrayMap());
}
/* -------------------------------------------------------------- */
public String getParameter(String name)
{
return _httpRequest.getParameter(name);
}
/* -------------------------------------------------------------- */
public Enumeration getParameterNames()
{
return Collections.enumeration(_httpRequest.getParameterNames());
}
/* -------------------------------------------------------------- */
public String[] getParameterValues(String name)
{
List v=_httpRequest.getParameterValues(name);
if (v==null)
return null;
String[]a=new String[v.size()];
return (String[])v.toArray(a);
}
/* -------------------------------------------------------------- */
public String getProtocol()
{
return _httpRequest.getVersion();
}
/* -------------------------------------------------------------- */
public String getScheme()
{
return _httpRequest.getScheme();
}
/* -------------------------------------------------------------- */
public String getServerName()
{
return _httpRequest.getHost();
}
/* -------------------------------------------------------------- */
public int getServerPort()
{
int port = _httpRequest.getPort();
if (port==0)
{
if (getScheme().equalsIgnoreCase("https"))
return 443;
return 80;
}
return port;
}
/* -------------------------------------------------------------- */
public int getRemotePort()
{
HttpConnection connection= _httpRequest.getHttpConnection();
if (connection!=null)
return connection.getRemotePort();
return 0;
}
/* -------------------------------------------------------------- */
public String getLocalName()
{
HttpConnection connection= _httpRequest.getHttpConnection();
if (connection!=null)
return connection.getServerName();
return null;
}
/* -------------------------------------------------------------- */
public String getLocalAddr()
{
HttpConnection connection= _httpRequest.getHttpConnection();
if (connection!=null)
return connection.getServerAddr();
return null;
}
/* -------------------------------------------------------------- */
public int getLocalPort()
{
HttpConnection connection= _httpRequest.getHttpConnection();
if (connection!=null)
return connection.getServerPort();
return 0;
}
/* -------------------------------------------------------------- */
public BufferedReader getReader()
throws UnsupportedEncodingException
{
if (_inputState!=0 && _inputState!=2)
throw new IllegalStateException();
if (_reader==null)
{
String encoding=getCharacterEncoding();
if (encoding==null)
encoding=StringUtil.__ISO_8859_1;
_reader=new BufferedReader(new InputStreamReader(getInputStream(),encoding));
}
_inputState=2;
return _reader;
}
/* -------------------------------------------------------------- */
public String getRemoteAddr()
{
return _httpRequest.getRemoteAddr();
}
/* -------------------------------------------------------------- */
public String getRemoteHost()
{
if (_httpRequest.getHttpConnection()==null)
return null;
return _httpRequest.getRemoteHost();
}
/* -------------------------------------------------------------- */
/**
* @deprecated As of Version 2.1 of the Java Servlet API,
* use {@link javax.servlet.ServletContext#getRealPath} instead.
*/
public String getRealPath(String path)
{
return _servletHandler.getServletContext().getRealPath(path);
}
/* ------------------------------------------------------------ */
public RequestDispatcher getRequestDispatcher(String url)
{
if (url == null)
return null;
if (!url.startsWith("/"))
{
String relTo=URI.addPaths(_servletPath,_pathInfo);
int slash=relTo.lastIndexOf("/");
if (slash>1)
relTo=relTo.substring(0,slash+1);
else
relTo="/";
url=URI.addPaths(relTo,url);
}
return _servletHandler.getServletContext().getRequestDispatcher(url);
}
/* ------------------------------------------------------------ */
public String toString()
{
return
getContextPath()+"+"+getServletPath()+"+"+getPathInfo()+"\n"+
_httpRequest.toString();
}
/* ------------------------------------------------------------ */
/** Unwrap a ServletRequest.
*
* @see javax.servlet.ServletRequestWrapper
* @see javax.servlet.http.HttpServletRequestWrapper
* @param request
* @return The core ServletHttpRequest which must be the
* underlying request object
*/
public static ServletHttpRequest unwrap(ServletRequest request)
{
while (!(request instanceof ServletHttpRequest))
{
if (request instanceof ServletRequestWrapper)
{
ServletRequestWrapper wrapper =
(ServletRequestWrapper)request;
request=wrapper.getRequest();
}
else
throw new IllegalArgumentException("Does not wrap ServletHttpRequest");
}
return (ServletHttpRequest)request;
}
}
| apache-2.0 |
thiliniish/developer-studio | esb/org.wso2.developerstudio.eclipse.gmf.esb.edit/src/org/wso2/developerstudio/eclipse/gmf/esb/provider/URLRewriteMediatorInputConnectorItemProvider.java | 3071 | /**
* <copyright>
* </copyright>
*
* $Id$
*/
package org.wso2.developerstudio.eclipse.gmf.esb.provider;
import java.util.Collection;
import java.util.List;
import org.eclipse.emf.common.notify.AdapterFactory;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.edit.provider.IEditingDomainItemProvider;
import org.eclipse.emf.edit.provider.IItemLabelProvider;
import org.eclipse.emf.edit.provider.IItemPropertyDescriptor;
import org.eclipse.emf.edit.provider.IItemPropertySource;
import org.eclipse.emf.edit.provider.IStructuredItemContentProvider;
import org.eclipse.emf.edit.provider.ITreeItemContentProvider;
/**
* This is the item provider adapter for a {@link org.wso2.developerstudio.eclipse.gmf.esb.URLRewriteMediatorInputConnector} object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public class URLRewriteMediatorInputConnectorItemProvider
extends InputConnectorItemProvider
implements
IEditingDomainItemProvider,
IStructuredItemContentProvider,
ITreeItemContentProvider,
IItemLabelProvider,
IItemPropertySource {
/**
* This constructs an instance from a factory and a notifier.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public URLRewriteMediatorInputConnectorItemProvider(AdapterFactory adapterFactory) {
super(adapterFactory);
}
/**
* This returns the property descriptors for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public List<IItemPropertyDescriptor> getPropertyDescriptors(Object object) {
if (itemPropertyDescriptors == null) {
super.getPropertyDescriptors(object);
}
return itemPropertyDescriptors;
}
/**
* This returns URLRewriteMediatorInputConnector.gif.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object getImage(Object object) {
return overlayImage(object, getResourceLocator().getImage("full/obj16/URLRewriteMediatorInputConnector"));
}
/**
* This returns the label text for the adapted class.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String getText(Object object) {
return getString("_UI_URLRewriteMediatorInputConnector_type");
}
/**
* This handles model notifications by calling {@link #updateChildren} to update any cached
* children and by creating a viewer notification, which it passes to {@link #fireNotifyChanged}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void notifyChanged(Notification notification) {
updateChildren(notification);
super.notifyChanged(notification);
}
/**
* This adds {@link org.eclipse.emf.edit.command.CommandParameter}s describing the children
* that can be created under this object.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected void collectNewChildDescriptors(Collection<Object> newChildDescriptors, Object object) {
super.collectNewChildDescriptors(newChildDescriptors, object);
}
}
| apache-2.0 |
selkhateeb/closure-compiler | test/com/google/javascript/jscomp/PolymerPassFindExternsTest.java | 4193 | /*
* Copyright 2016 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.collect.ImmutableList;
import com.google.javascript.rhino.Node;
public final class PolymerPassFindExternsTest extends Es6CompilerTestCase {
private static final String EXTERNS =
LINE_JOINER.join(
"/** @constructor */",
"var HTMLElement = function() {};",
"/** @constructor @extends {HTMLElement} */",
"var HTMLInputElement = function() {};",
"/** @constructor @extends {HTMLElement} */",
"var PolymerElement = function() {};",
"/** @type {!Object} */",
"PolymerElement.prototype.$;",
"PolymerElement.prototype.created = function() {};",
"PolymerElement.prototype.ready = function() {};",
"PolymerElement.prototype.attached = function() {};",
"PolymerElement.prototype.domReady = function() {};",
"PolymerElement.prototype.detached = function() {};",
"/**",
" * Call the callback after a timeout. Calling job again with the same name",
" * resets the timer but will not result in additional calls to callback.",
" *",
" * @param {string} name",
" * @param {Function} callback",
" * @param {number} timeoutMillis The minimum delay in milliseconds before",
" * calling the callback.",
" */",
"PolymerElement.prototype.job = function(name, callback, timeoutMillis) {};",
"/**",
" * @param a {!Object}",
" * @return {!function()}",
" */",
"var Polymer = function(a) {};",
"var alert = function(msg) {};");
private PolymerPassFindExterns findExternsCallback;
public PolymerPassFindExternsTest() {
super(EXTERNS);
}
@Override
protected CompilerPass getProcessor(final Compiler compiler) {
findExternsCallback = new PolymerPassFindExterns();
return new CompilerPass() {
@Override
public void process(Node externs, Node root) {
NodeTraversal.traverseEs6(compiler, externs, findExternsCallback);
}
};
}
@Override
protected void setUp() throws Exception {
super.setUp();
allowExternsChanges(true);
enableTypeCheck();
runTypeCheckAfterProcessing = true;
parseTypeInfo = true;
}
@Override
protected int getNumRepetitions() {
return 1;
}
public void testFindsPolymerElementRoot() {
testSame("");
Node polymerElementNode = findExternsCallback.getPolymerElementExterns();
assertNotNull(polymerElementNode);
assertTrue(polymerElementNode.isVar());
assertTrue(polymerElementNode.getFirstChild().matchesQualifiedName("PolymerElement"));
}
public void testFindsPolymerElementProps() {
testSame("");
final ImmutableList<String> expectedProps = ImmutableList.of(
"$", "created", "ready", "attached", "domReady", "detached", "job");
ImmutableList<Node> polymerElementProps = findExternsCallback.getPolymerElementProps();
assertNotNull(polymerElementProps);
assertThat(polymerElementProps).hasSize(expectedProps.size());
for (int i = 0; i < polymerElementProps.size(); ++i) {
assertThat(getPropertyName(polymerElementProps.get(i))).isEqualTo(expectedProps.get(i));
}
}
private String getPropertyName(Node node) {
Node rightName = node.getFirstChild().getChildAtIndex(1);
return rightName.isFunction()
? node.getFirstFirstChild().getChildAtIndex(1).getString() : rightName.getString();
}
}
| apache-2.0 |
Ethanlm/hadoop | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/Crc32PerformanceTest.java | 13854 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.util;
import java.io.PrintStream;
import java.lang.reflect.Constructor;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.Random;
import java.util.zip.CRC32;
import java.util.zip.Checksum;
import org.apache.hadoop.fs.ChecksumException;
import org.apache.hadoop.test.GenericTestUtils;
import org.slf4j.event.Level;
import static org.slf4j.LoggerFactory.getLogger;
/**
* Performance tests to compare performance of Crc32|Crc32C implementations
* This can be run from the command line with:
*
* java -cp path/to/test/classes:path/to/common/classes \
* 'org.apache.hadoop.util.Crc32PerformanceTest'
*
* or
*
* hadoop org.apache.hadoop.util.Crc32PerformanceTest
*
* If any argument is provided, this test will run with non-directly buffer.
*
* The output is in JIRA table format.
*/
public class Crc32PerformanceTest {
static final int MB = 1024 * 1024;
interface Crc32 {
void verifyChunked(ByteBuffer data, int bytesPerCrc, ByteBuffer crcs,
String filename, long basePos) throws ChecksumException;
DataChecksum.Type crcType();
final class Native implements Crc32 {
@Override
public void verifyChunked(ByteBuffer data, int bytesPerSum,
ByteBuffer sums, String fileName, long basePos)
throws ChecksumException {
NativeCrc32.verifyChunkedSums(bytesPerSum, DataChecksum.Type.CRC32.id,
sums, data, fileName, basePos);
}
@Override
public DataChecksum.Type crcType() {
return DataChecksum.Type.CRC32;
}
}
final class NativeC implements Crc32 {
@Override
public void verifyChunked(ByteBuffer data, int bytesPerSum,
ByteBuffer sums, String fileName, long basePos)
throws ChecksumException {
if (data.isDirect()) {
NativeCrc32.verifyChunkedSums(bytesPerSum,
DataChecksum.Type.CRC32C.id, sums, data, fileName, basePos);
} else {
final int dataOffset = data.arrayOffset() + data.position();
final int crcsOffset = sums.arrayOffset() + sums.position();
NativeCrc32.verifyChunkedSumsByteArray(bytesPerSum,
DataChecksum.Type.CRC32C.id, sums.array(), crcsOffset,
data.array(), dataOffset, data.remaining(), fileName, basePos);
}
}
@Override
public DataChecksum.Type crcType() {
return DataChecksum.Type.CRC32C;
}
}
abstract class AbstractCrc32<T extends Checksum> implements Crc32 {
abstract T newAlgorithm();
@Override
public void verifyChunked(ByteBuffer data, int bytesPerCrc,
ByteBuffer sums, String filename, long basePos)
throws ChecksumException {
final Checksum algorithm = newAlgorithm();
final DataChecksum.Type type = crcType();
if (data.hasArray() && sums.hasArray()) {
DataChecksum.verifyChunked(type, algorithm, data.array(),
data.position(), data.remaining(), bytesPerCrc, sums.array(),
sums.position(), filename, basePos);
} else {
DataChecksum.verifyChunked(type, algorithm, data, bytesPerCrc,
sums, filename, basePos);
}
}
}
final class Zip extends AbstractCrc32<CRC32> {
@Override
public CRC32 newAlgorithm() {
return new CRC32();
}
@Override
public DataChecksum.Type crcType() {
return DataChecksum.Type.CRC32;
}
}
final class PureJava extends AbstractCrc32<PureJavaCrc32> {
@Override
public PureJavaCrc32 newAlgorithm() {
return new PureJavaCrc32();
}
@Override
public DataChecksum.Type crcType() {
return DataChecksum.Type.CRC32;
}
}
final class PureJavaC extends AbstractCrc32<PureJavaCrc32C> {
@Override
public PureJavaCrc32C newAlgorithm() {
return new PureJavaCrc32C();
}
@Override
public DataChecksum.Type crcType() {
return DataChecksum.Type.CRC32C;
}
}
}
final int dataLengthMB;
final int trials;
final boolean direct;
final PrintStream out = System.out;
final List<Class<? extends Crc32>> crcs = new ArrayList<>();
Crc32PerformanceTest(final int dataLengthMB, final int trials,
final boolean direct) {
this.dataLengthMB = dataLengthMB;
this.trials = trials;
this.direct = direct;
crcs.add(Crc32.Zip.class);
crcs.add(Crc32.PureJava.class);
crcs.add(Crc32.PureJavaC.class);
if (NativeCrc32.isAvailable()) {
if (direct) {
crcs.add(Crc32.Native.class);
}
crcs.add(Crc32.NativeC.class);
GenericTestUtils.setLogLevel(getLogger(NativeCodeLoader.class),
Level.TRACE);
}
}
void run() throws Exception {
final long startTime = System.nanoTime();
printSystemProperties(out);
out.println("Data Length = " + dataLengthMB + " MB");
out.println("Trials = " + trials);
doBench(crcs);
out.printf("Elapsed %.1fs\n", secondsElapsed(startTime));
}
public static void main(String[] args) throws Exception {
boolean isdirect = true;
if (args.length > 0) {
isdirect = false;
}
new Crc32PerformanceTest(64, 5, isdirect).run();
}
private static void printCell(String s, int width, PrintStream outCrc) {
final int w = s.length() > width? s.length(): width;
outCrc.printf(" %" + w + "s |", s);
}
private ByteBuffer allocateByteBuffer(int length) {
return direct? ByteBuffer.allocateDirect(length)
: ByteBuffer.allocate(length);
}
private ByteBuffer newData() {
final byte[] bytes = new byte[dataLengthMB << 20];
new Random().nextBytes(bytes);
final ByteBuffer dataBufs = allocateByteBuffer(bytes.length);
dataBufs.mark();
dataBufs.put(bytes);
dataBufs.reset();
return dataBufs;
}
private ByteBuffer computeCrc(ByteBuffer dataBufs, int bytePerCrc,
DataChecksum.Type type) {
final int size = 4 * (dataBufs.remaining() - 1) / bytePerCrc + 1;
final ByteBuffer crcBufs = allocateByteBuffer(size);
final DataChecksum checksum = DataChecksum.newDataChecksum(
type, bytePerCrc);
checksum.calculateChunkedSums(dataBufs, crcBufs);
return crcBufs;
}
private ByteBuffer computeCrc(Class<? extends Crc32> clazz,
ByteBuffer dataBufs, int bytePerCrc) throws Exception {
final Constructor<? extends Crc32> ctor = clazz.getConstructor();
final Crc32 crc = ctor.newInstance();
final int size = 4 * (dataBufs.remaining() - 1) / bytePerCrc + 1;
final ByteBuffer crcBufs = allocateByteBuffer(size);
final DataChecksum checksum = DataChecksum.newDataChecksum(
crc.crcType(), bytePerCrc);
checksum.calculateChunkedSums(dataBufs, crcBufs);
return crcBufs;
}
private void doBench(final List<Class<? extends Crc32>> crcTargets)
throws Exception {
final ByteBuffer[] dataBufs = new ByteBuffer[16];
for(int i = 0; i < dataBufs.length; i++) {
dataBufs[i] = newData();
}
// Print header
out.printf("\n%s Buffer Performance Table", direct? "Direct": "Non-direct");
out.printf(" (bpc: byte-per-crc in MB/sec; #T: #Theads)\n");
// Warm up implementations to get jit going.
for (Class<? extends Crc32> c : crcTargets) {
final ByteBuffer[] crc32 = {computeCrc(c, dataBufs[0], 32)};
final ByteBuffer[] crc512 = {computeCrc(c, dataBufs[0], 512)};
doBench(c, 1, dataBufs, crc32, 32);
doBench(c, 1, dataBufs, crc512, 512);
}
// Test on a variety of sizes with different number of threads
for (int i = 5; i <= 16; i++) {
doBench(crcs, dataBufs, 1 << i, out);
}
}
private void doBench(final List<Class<? extends Crc32>> crcTargets,
final ByteBuffer[] dataBufs, final int bytePerCrc,
final PrintStream outCrc)
throws Exception {
final ByteBuffer[] crcBufs = new ByteBuffer[dataBufs.length];
final ByteBuffer[] crcBufsC = new ByteBuffer[dataBufs.length];
for(int i = 0; i < dataBufs.length; i++) {
crcBufs[i] = computeCrc(dataBufs[i], bytePerCrc,
DataChecksum.Type.CRC32);
crcBufsC[i] = computeCrc(dataBufs[i], bytePerCrc,
DataChecksum.Type.CRC32C);
}
final String numBytesStr = " bpc ";
final String numThreadsStr = "#T";
final String diffStr = "% diff";
outCrc.print('|');
printCell(numBytesStr, 0, outCrc);
printCell(numThreadsStr, 0, outCrc);
for (int i = 0; i < crcTargets.size(); i++) {
final Class<? extends Crc32> c = crcTargets.get(i);
outCrc.print('|');
printCell(c.getSimpleName(), 8, outCrc);
if (i > 0) {
printCell(diffStr, diffStr.length(), outCrc);
}
}
outCrc.printf("\n");
for(int numThreads = 1; numThreads <= dataBufs.length; numThreads <<= 1) {
outCrc.printf("|");
printCell(String.valueOf(bytePerCrc), numBytesStr.length(), outCrc);
printCell(String.valueOf(numThreads), numThreadsStr.length(), outCrc);
final List<BenchResult> previous = new ArrayList<BenchResult>();
for(Class<? extends Crc32> c : crcTargets) {
System.gc();
final BenchResult result;
final Constructor<? extends Crc32> ctor = c.getConstructor();
final Crc32 crc = ctor.newInstance();
if (crc.crcType() == DataChecksum.Type.CRC32) {
result = doBench(c, numThreads, dataBufs, crcBufs, bytePerCrc);
} else {
result = doBench(c, numThreads, dataBufs, crcBufsC, bytePerCrc);
}
printCell(String.format("%9.1f", result.mbps),
c.getSimpleName().length() + 1, outCrc);
//compare result with the last previous.
final int size = previous.size();
if (size > 0) {
BenchResult p = previous.get(size - 1);
final double diff = (result.mbps - p.mbps) / p.mbps * 100;
printCell(String.format("%5.1f%%", diff), diffStr.length(), outCrc);
}
previous.add(result);
}
outCrc.printf("\n");
}
}
private BenchResult doBench(Class<? extends Crc32> clazz,
final int numThreads, final ByteBuffer[] dataBufs,
final ByteBuffer[] crcBufs, final int bytePerCrc)
throws Exception {
final Thread[] threads = new Thread[numThreads];
final BenchResult[] results = new BenchResult[threads.length];
final Constructor<? extends Crc32> ctor = clazz.getConstructor();
for(int i = 0; i < threads.length; i++) {
final Crc32 crc = ctor.newInstance();
final long byteProcessed = dataBufs[i].remaining() * trials;
final int index = i;
threads[i] = new Thread() {
@Override
public void run() {
final long startTime = System.nanoTime();
for (int i = 0; i < trials; i++) {
dataBufs[index].mark();
crcBufs[index].mark();
try {
crc.verifyChunked(dataBufs[index], bytePerCrc, crcBufs[index],
crc.getClass().getSimpleName(), dataBufs[index].position());
} catch (Throwable t) {
results[index] = new BenchResult(t);
return;
} finally {
dataBufs[index].reset();
crcBufs[index].reset();
}
}
final double secsElapsed = secondsElapsed(startTime);
results[index] = new BenchResult(byteProcessed/secsElapsed/MB);
}
};
}
for(Thread t : threads) {
t.start();
}
for(Thread t : threads) {
t.join();
}
double sum = 0;
for(int i = 0; i < results.length; i++) {
sum += results[i].getMbps();
}
return new BenchResult(sum/results.length);
}
private static class BenchResult {
/** Speed (MB per second). */
final double mbps;
final Throwable thrown;
BenchResult(double mbps) {
this.mbps = mbps;
this.thrown = null;
}
BenchResult(Throwable e) {
this.mbps = 0;
this.thrown = e;
}
double getMbps() {
if (thrown != null) {
throw new AssertionError(thrown);
}
return mbps;
}
}
static double secondsElapsed(final long startTime) {
return (System.nanoTime() - startTime) / 1000000000.0d;
}
static void printSystemProperties(PrintStream outCrc) {
final String[] names = {
"java.version",
"java.runtime.name",
"java.runtime.version",
"java.vm.version",
"java.vm.vendor",
"java.vm.name",
"java.vm.specification.version",
"java.specification.version",
"os.arch",
"os.name",
"os.version"
};
int max = 0;
for(String n : names) {
if (n.length() > max) {
max = n.length();
}
}
final Properties p = System.getProperties();
for(String n : names) {
outCrc.printf("%" + max + "s = %s\n", n, p.getProperty(n));
}
}
} | apache-2.0 |
OpenCollabZA/sakai | profile2/tool/src/java/org/sakaiproject/profile2/tool/pages/panels/MyContactEdit.java | 16339 | /**
* Copyright (c) 2008-2012 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.osedu.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sakaiproject.profile2.tool.pages.panels;
import org.apache.commons.lang3.StringUtils;
import org.apache.wicket.AttributeModifier;
import org.apache.wicket.Component;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.markup.html.form.AjaxFallbackButton;
import org.apache.wicket.behavior.AttributeAppender;
import org.apache.wicket.feedback.FeedbackMessage;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.form.TextField;
import org.apache.wicket.markup.html.panel.FeedbackPanel;
import org.apache.wicket.markup.html.panel.Panel;
import org.apache.wicket.model.Model;
import org.apache.wicket.model.PropertyModel;
import org.apache.wicket.model.ResourceModel;
import org.apache.wicket.model.StringResourceModel;
import org.apache.wicket.spring.injection.annot.SpringBean;
import org.apache.wicket.validation.validator.EmailAddressValidator;
import org.apache.wicket.validation.validator.UrlValidator;
import org.sakaiproject.api.common.edu.person.SakaiPerson;
import org.sakaiproject.profile2.logic.ProfileLogic;
import org.sakaiproject.profile2.logic.ProfileWallLogic;
import org.sakaiproject.profile2.logic.SakaiProxy;
import org.sakaiproject.profile2.model.UserProfile;
import org.sakaiproject.profile2.tool.components.ComponentVisualErrorBehaviour;
import org.sakaiproject.profile2.tool.components.ErrorLevelsFeedbackMessageFilter;
import org.sakaiproject.profile2.tool.components.FeedbackLabel;
import org.sakaiproject.profile2.tool.components.PhoneNumberValidator;
import org.sakaiproject.profile2.util.ProfileConstants;
import lombok.extern.slf4j.Slf4j;
@Slf4j
public class MyContactEdit extends Panel {
private static final long serialVersionUID = 1L;
@SpringBean(name="org.sakaiproject.profile2.logic.SakaiProxy")
private SakaiProxy sakaiProxy;
@SpringBean(name="org.sakaiproject.profile2.logic.ProfileWallLogic")
private ProfileWallLogic wallLogic;
@SpringBean(name="org.sakaiproject.profile2.logic.ProfileLogic")
private ProfileLogic profileLogic;
public MyContactEdit(final String id, final UserProfile userProfile) {
super(id);
log.debug("MyContactEdit()");
//this panel
final Component thisPanel = this;
//get userId
final String userId = userProfile.getUserUuid();
//heading
add(new Label("heading", new ResourceModel("heading.contact.edit")));
//setup form
Form form = new Form("form", new Model(userProfile));
form.setOutputMarkupId(true);
//form submit feedback
final Label formFeedback = new Label("formFeedback");
formFeedback.setOutputMarkupPlaceholderTag(true);
form.add(formFeedback);
//add warning message if superUser and not editing own profile
Label editWarning = new Label("editWarning");
editWarning.setVisible(false);
if(sakaiProxy.isSuperUserAndProxiedToUser(userId)) {
editWarning.setDefaultModel(new StringResourceModel("text.edit.other.warning", null, new Object[]{ userProfile.getDisplayName() } ));
editWarning.setEscapeModelStrings(false);
editWarning.setVisible(true);
}
form.add(editWarning);
//We don't need to get the info from userProfile, we load it into the form with a property model
//just make sure that the form element id's match those in the model
// FeedbackPanel
final FeedbackPanel feedback = new FeedbackPanel("feedback");
feedback.setOutputMarkupId(true);
form.add(feedback);
// filteredErrorLevels will not be shown in the FeedbackPanel
int[] filteredErrorLevels = new int[]{FeedbackMessage.ERROR};
feedback.setFilter(new ErrorLevelsFeedbackMessageFilter(filteredErrorLevels));
//email
WebMarkupContainer emailContainer = new WebMarkupContainer("emailContainer");
emailContainer.add(new Label("emailLabel", new ResourceModel("profile.email")));
final TextField email = new TextField("email", new PropertyModel(userProfile, "email"));
email.setOutputMarkupId(true);
email.setMarkupId("emailinput");
email.add(EmailAddressValidator.getInstance());
//readonly view
Label emailReadOnly = new Label("emailReadOnly", new PropertyModel(userProfile, "email"));
if(sakaiProxy.isAccountUpdateAllowed(userId)) {
emailReadOnly.setVisible(false);
} else {
email.setVisible(false);
}
emailContainer.add(email);
emailContainer.add(emailReadOnly);
//email feedback
final FeedbackLabel emailFeedback = new FeedbackLabel("emailFeedback", email);
emailFeedback.setMarkupId("emailFeedback");
emailFeedback.setOutputMarkupId(true);
emailContainer.add(emailFeedback);
email.add(new ComponentVisualErrorBehaviour("onblur", emailFeedback));
form.add(emailContainer);
//homepage
WebMarkupContainer homepageContainer = new WebMarkupContainer("homepageContainer");
homepageContainer.add(new Label("homepageLabel", new ResourceModel("profile.homepage")));
final TextField homepage = new TextField("homepage", new PropertyModel(userProfile, "homepage")) {
private static final long serialVersionUID = 1L;
// add http:// if missing
@Override
protected void convertInput() {
String input = getInput();
if (StringUtils.isNotBlank(input) && !(input.startsWith("http://") || input.startsWith("https://"))) {
setConvertedInput("http://" + input);
} else {
setConvertedInput(StringUtils.isBlank(input) ? null : input);
}
}
};
homepage.setMarkupId("homepageinput");
homepage.setOutputMarkupId(true);
homepage.add(new UrlValidator());
homepageContainer.add(homepage);
//homepage feedback
final FeedbackLabel homepageFeedback = new FeedbackLabel("homepageFeedback", homepage);
homepageFeedback.setMarkupId("homepageFeedback");
homepageFeedback.setOutputMarkupId(true);
homepageContainer.add(homepageFeedback);
homepage.add(new ComponentVisualErrorBehaviour("onblur", homepageFeedback));
form.add(homepageContainer);
//workphone
WebMarkupContainer workphoneContainer = new WebMarkupContainer("workphoneContainer");
workphoneContainer.add(new Label("workphoneLabel", new ResourceModel("profile.phone.work")));
final TextField workphone = new TextField("workphone", new PropertyModel(userProfile, "workphone"));
workphone.setMarkupId("workphoneinput");
workphone.setOutputMarkupId(true);
workphone.add(new PhoneNumberValidator());
workphoneContainer.add(workphone);
//workphone feedback
final FeedbackLabel workphoneFeedback = new FeedbackLabel("workphoneFeedback", workphone);
workphoneFeedback.setMarkupId("workphoneFeedback");
workphoneFeedback.setOutputMarkupId(true);
workphoneContainer.add(workphoneFeedback);
workphone.add(new ComponentVisualErrorBehaviour("onblur", workphoneFeedback));
form.add(workphoneContainer);
//homephone
WebMarkupContainer homephoneContainer = new WebMarkupContainer("homephoneContainer");
homephoneContainer.add(new Label("homephoneLabel", new ResourceModel("profile.phone.home")));
final TextField homephone = new TextField("homephone", new PropertyModel(userProfile, "homephone"));
homephone.setMarkupId("homephoneinput");
homephone.setOutputMarkupId(true);
homephone.add(new PhoneNumberValidator());
homephoneContainer.add(homephone);
//homephone feedback
final FeedbackLabel homephoneFeedback = new FeedbackLabel("homephoneFeedback", homephone);
homephoneFeedback.setMarkupId("homephoneFeedback");
homephoneFeedback.setOutputMarkupId(true);
homephoneContainer.add(homephoneFeedback);
homephone.add(new ComponentVisualErrorBehaviour("onblur", homephoneFeedback));
form.add(homephoneContainer);
//mobilephone
WebMarkupContainer mobilephoneContainer = new WebMarkupContainer("mobilephoneContainer");
mobilephoneContainer.add(new Label("mobilephoneLabel", new ResourceModel("profile.phone.mobile")));
final TextField mobilephone = new TextField("mobilephone", new PropertyModel(userProfile, "mobilephone"));
mobilephone.setMarkupId("mobilephoneinput");
mobilephone.setOutputMarkupId(true);
mobilephone.add(new PhoneNumberValidator());
mobilephoneContainer.add(mobilephone);
//mobilephone feedback
final FeedbackLabel mobilephoneFeedback = new FeedbackLabel("mobilephoneFeedback", mobilephone);
mobilephoneFeedback.setMarkupId("mobilephoneFeedback");
mobilephoneFeedback.setOutputMarkupId(true);
mobilephoneContainer.add(mobilephoneFeedback);
mobilephone.add(new ComponentVisualErrorBehaviour("onblur", mobilephoneFeedback));
form.add(mobilephoneContainer);
//facsimile
WebMarkupContainer facsimileContainer = new WebMarkupContainer("facsimileContainer");
facsimileContainer.add(new Label("facsimileLabel", new ResourceModel("profile.phone.facsimile")));
final TextField facsimile = new TextField("facsimile", new PropertyModel(userProfile, "facsimile"));
facsimile.setMarkupId("facsimileinput");
facsimile.setOutputMarkupId(true);
facsimile.add(new PhoneNumberValidator());
facsimileContainer.add(facsimile);
//facsimile feedback
final FeedbackLabel facsimileFeedback = new FeedbackLabel("facsimileFeedback", facsimile);
facsimileFeedback.setMarkupId("facsimileFeedback");
facsimileFeedback.setOutputMarkupId(true);
facsimileContainer.add(facsimileFeedback);
facsimile.add(new ComponentVisualErrorBehaviour("onblur", facsimileFeedback));
form.add(facsimileContainer);
//submit button
AjaxFallbackButton submitButton = new AjaxFallbackButton("submit", new ResourceModel("button.save.changes"), form) {
protected void onSubmit(AjaxRequestTarget target, Form form) {
//save() form, show message, then load display panel
if(save(form)) {
//post update event
sakaiProxy.postEvent(ProfileConstants.EVENT_PROFILE_CONTACT_UPDATE, "/profile/"+userId, true);
//post to wall if enabled
if (true == sakaiProxy.isWallEnabledGlobally() && false == sakaiProxy.isSuperUserAndProxiedToUser(userId)) {
wallLogic.addNewEventToWall(ProfileConstants.EVENT_PROFILE_CONTACT_UPDATE, sakaiProxy.getCurrentUserId());
}
//repaint panel
Component newPanel = new MyContactDisplay(id, userProfile);
newPanel.setOutputMarkupId(true);
thisPanel.replaceWith(newPanel);
if(target != null) {
target.add(newPanel);
//resize iframe
target.appendJavaScript("setMainFrameHeight(window.name);");
}
} else {
//String js = "alert('Failed to save information. Contact your system administrator.');";
//target.prependJavascript(js);
formFeedback.setDefaultModel(new ResourceModel("error.profile.save.contact.failed"));
formFeedback.add(new AttributeModifier("class", true, new Model<String>("save-failed-error")));
target.add(formFeedback);
}
}
// This is called if the form validation fails, ie Javascript turned off,
//or we had preexisting invalid data before this fix was introduced
protected void onError(AjaxRequestTarget target, Form form) {
//check which item didn't validate and update the class and feedback model for that component
if(!email.isValid()) {
email.add(new AttributeAppender("class", new Model<String>("invalid"), " "));
emailFeedback.setDefaultModel(new ResourceModel("EmailAddressValidator"));
target.add(email);
target.add(emailFeedback);
}
if(!homepage.isValid()) {
homepage.add(new AttributeAppender("class", new Model<String>("invalid"), " "));
homepageFeedback.setDefaultModel(new ResourceModel("UrlValidator"));
target.add(homepage);
target.add(homepageFeedback);
}
if(!facsimile.isValid()) {
facsimile.add(new AttributeAppender("class", new Model<String>("invalid"), " "));
facsimileFeedback.setDefaultModel(new ResourceModel("PhoneNumberValidator"));
target.add(facsimile);
target.add(facsimileFeedback);
}
if(!workphone.isValid()) {
workphone.add(new AttributeAppender("class", new Model<String>("invalid"), " "));
workphoneFeedback.setDefaultModel(new ResourceModel("PhoneNumberValidator"));
target.add(workphone);
target.add(workphoneFeedback);
}
if(!homephone.isValid()) {
homephone.add(new AttributeAppender("class", new Model<String>("invalid"), " "));
homephoneFeedback.setDefaultModel(new ResourceModel("PhoneNumberValidator"));
target.add(homephone);
target.add(homephoneFeedback);
}
if(!mobilephone.isValid()) {
mobilephone.add(new AttributeAppender("class", new Model<String>("invalid"), " "));
mobilephoneFeedback.setDefaultModel(new ResourceModel("PhoneNumberValidator"));
target.add(mobilephone);
target.add(mobilephoneFeedback);
}
if(!facsimile.isValid()) {
facsimile.add(new AttributeAppender("class", new Model<String>("invalid"), " "));
facsimileFeedback.setDefaultModel(new ResourceModel("PhoneNumberValidator"));
target.add(facsimile);
target.add(facsimileFeedback);
}
}
};
form.add(submitButton);
//cancel button
AjaxFallbackButton cancelButton = new AjaxFallbackButton("cancel", new ResourceModel("button.cancel"), form) {
private static final long serialVersionUID = 1L;
protected void onSubmit(AjaxRequestTarget target, Form form) {
Component newPanel = new MyContactDisplay(id, userProfile);
newPanel.setOutputMarkupId(true);
thisPanel.replaceWith(newPanel);
if(target != null) {
target.add(newPanel);
//resize iframe
target.appendJavaScript("setMainFrameHeight(window.name);");
}
}
};
cancelButton.setDefaultFormProcessing(false);
form.add(cancelButton);
//add form to page
add(form);
}
//called when the form is to be saved
private boolean save(Form form) {
//get the backing model
UserProfile userProfile = (UserProfile) form.getModelObject();
//get userId from the UserProfile (because admin could be editing), then get existing SakaiPerson for that userId
String userId = userProfile.getUserUuid();
SakaiPerson sakaiPerson = sakaiProxy.getSakaiPerson(userId);
//set the attributes from userProfile that this form dealt with, into sakaiPerson
//this WILL fail if there is no sakaiPerson for the user however this should have been caught already
//as a new Sakaiperson for a user is created in MyProfile if they don't have one.
//sakaiPerson.setMail(userProfile.getEmail()); //email
sakaiPerson.setLabeledURI(userProfile.getHomepage()); //homepage
sakaiPerson.setTelephoneNumber(userProfile.getWorkphone()); //workphone
sakaiPerson.setHomePhone(userProfile.getHomephone()); //homephone
sakaiPerson.setMobile(userProfile.getMobilephone()); //mobilephone
sakaiPerson.setFacsimileTelephoneNumber(userProfile.getFacsimile()); //facsimile
if(profileLogic.saveUserProfile(sakaiPerson)) {
log.info("Saved SakaiPerson for: " + userId );
//update their email address in their account if allowed
if(sakaiProxy.isAccountUpdateAllowed(userId)) {
sakaiProxy.updateEmailForUser(userId, userProfile.getEmail());
}
return true;
} else {
log.info("Couldn't save SakaiPerson for: " + userId);
return false;
}
}
}
| apache-2.0 |
apache/flink | flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/runtime/stream/jsonplan/CalcJsonPlanITCase.java | 3680 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.runtime.stream.jsonplan;
import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedScalarFunctions.JavaFunc0;
import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedScalarFunctions.JavaFunc2;
import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedScalarFunctions.UdfWithOpen;
import org.apache.flink.table.planner.runtime.utils.TestData;
import org.apache.flink.table.planner.utils.JavaScalaConversionUtil;
import org.apache.flink.table.planner.utils.JsonPlanTestBase;
import org.junit.Test;
import java.io.File;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/** Test for calc json plan. */
public class CalcJsonPlanITCase extends JsonPlanTestBase {
@Test
public void testSimpleCalc() throws Exception {
List<String> data = Arrays.asList("1,1,hi", "2,1,hello", "3,2,hello world");
createTestCsvSourceTable("MyTable", data, "a bigint", "b int not null", "c varchar");
File sinkPath =
createTestCsvSinkTable("MySink", "a bigint", "a1 varchar", "b int", "c1 varchar");
compileSqlAndExecutePlan(
"insert into MySink select "
+ "a, "
+ "cast(a as varchar) as a1, "
+ "b, "
+ "substring(c, 1, 8) as c1 "
+ "from MyTable where b > 1")
.await();
assertResult(Collections.singletonList("3,3,2,hello wo"), sinkPath);
}
@Test
public void testCalcWithUdf() throws Exception {
tableEnv.createTemporaryFunction("udf1", new JavaFunc0());
tableEnv.createTemporarySystemFunction("udf2", new JavaFunc2());
tableEnv.createFunction("udf3", UdfWithOpen.class);
createTestValuesSourceTable(
"MyTable",
JavaScalaConversionUtil.toJava(TestData.smallData3()),
"a int",
"b bigint",
"c varchar");
File sinkPath =
createTestCsvSinkTable(
"MySink", "a int", "a1 varchar", "b bigint", "c1 varchar", "c2 varchar");
compileSqlAndExecutePlan(
"insert into MySink select "
+ "a, "
+ "cast(a as varchar) as a1, "
+ "b, "
+ "udf2(c, a) as c1, "
+ "udf3(substring(c, 1, 8)) as c2 "
+ "from MyTable where "
+ "(udf1(a) > 2 or (a * b) > 1) and b > 0")
.await();
assertResult(
Arrays.asList("2,2,2,Hello2,$Hello", "3,3,2,Hello world3,$Hello wo"), sinkPath);
}
}
| apache-2.0 |
bazaarvoice/astyanax | astyanax-cassandra/src/main/java/com/netflix/astyanax/model/AbstractColumnImpl.java | 2878 | package com.netflix.astyanax.model;
import java.nio.ByteBuffer;
import java.util.Date;
import java.util.UUID;
import com.netflix.astyanax.Serializer;
import com.netflix.astyanax.serializers.BooleanSerializer;
import com.netflix.astyanax.serializers.ByteBufferSerializer;
import com.netflix.astyanax.serializers.ByteSerializer;
import com.netflix.astyanax.serializers.BytesArraySerializer;
import com.netflix.astyanax.serializers.DateSerializer;
import com.netflix.astyanax.serializers.DoubleSerializer;
import com.netflix.astyanax.serializers.FloatSerializer;
import com.netflix.astyanax.serializers.IntegerSerializer;
import com.netflix.astyanax.serializers.LongSerializer;
import com.netflix.astyanax.serializers.ShortSerializer;
import com.netflix.astyanax.serializers.StringSerializer;
import com.netflix.astyanax.serializers.UUIDSerializer;
public abstract class AbstractColumnImpl <C> implements Column<C> {
private final C name;
public AbstractColumnImpl(C name) {
this.name = name;
}
@Override
public final C getName() {
return name;
}
@Override
public final String getStringValue() {
return getValue(StringSerializer.get());
}
@Override
public final byte getByteValue() {
return getValue(ByteSerializer.get());
}
@Override
public final short getShortValue() {
return getValue(ShortSerializer.get());
}
@Override
public final int getIntegerValue() {
return getValue(IntegerSerializer.get());
}
@Override
public long getLongValue() {
return getValue(LongSerializer.get());
}
@Override
public final byte[] getByteArrayValue() {
return getValue(BytesArraySerializer.get());
}
@Override
public final boolean getBooleanValue() {
return getValue(BooleanSerializer.get());
}
@Override
public final ByteBuffer getByteBufferValue() {
return getValue(ByteBufferSerializer.get());
}
@Override
public final Date getDateValue() {
return getValue(DateSerializer.get());
}
@Override
public final UUID getUUIDValue() {
return getValue(UUIDSerializer.get());
}
@Override
public final float getFloatValue() {
return getValue(FloatSerializer.get());
}
@Override
public final double getDoubleValue() {
return getValue(DoubleSerializer.get());
}
@Override
public final String getCompressedStringValue() {
throw new UnsupportedOperationException("getCompressedString not yet implemented");
}
@Override
public <C2> ColumnList<C2> getSubColumns(Serializer<C2> ser) {
throw new UnsupportedOperationException("SimpleColumn \'" + name + "\' has no children");
}
@Override
public boolean isParentColumn() {
return false;
}
} | apache-2.0 |
McLeodMoores/starling | projects/analytics/src/test/java/com/opengamma/analytics/financial/covariance/CovarianceCalculatorTest.java | 2394 | /**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.covariance;
import org.testng.annotations.Test;
import com.opengamma.timeseries.DoubleTimeSeries;
import com.opengamma.timeseries.precise.instant.ImmutableInstantDoubleTimeSeries;
import com.opengamma.util.test.TestGroup;
/**
* Test.
*/
@Test(groups = TestGroup.UNIT)
public class CovarianceCalculatorTest {
private static final CovarianceCalculator CALCULATOR = new CovarianceCalculator() {
@Override
public Double evaluate(final DoubleTimeSeries<?>... x1) {
return null;
}
};
private static final DoubleTimeSeries<?> TS1 = ImmutableInstantDoubleTimeSeries.of(new long[] {2}, new double[] {1});
private static final DoubleTimeSeries<?> TS2 = ImmutableInstantDoubleTimeSeries.of(new long[] {2, 3}, new double[] {1, 2});
private static final DoubleTimeSeries<?> TS3 = ImmutableInstantDoubleTimeSeries.of(new long[] {4, 5}, new double[] {1, 2});
private static final DoubleTimeSeries<?> TS4 = ImmutableInstantDoubleTimeSeries.of(new long[] {4, 5, 6}, new double[] {1, 2, 3});
@Test(expectedExceptions = IllegalArgumentException.class)
public void testNullTS1() {
CALCULATOR.testTimeSeries(null, TS1);
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testNullTS2() {
CALCULATOR.testTimeSeries(TS2, null);
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testEmptyTS1() {
CALCULATOR.testTimeSeries(ImmutableInstantDoubleTimeSeries.EMPTY_SERIES, TS1);
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testEmptyTS2() {
CALCULATOR.testTimeSeries(TS2, ImmutableInstantDoubleTimeSeries.EMPTY_SERIES);
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testSmallTS1() {
CALCULATOR.testTimeSeries(TS1, TS2);
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testSmallTS2() {
CALCULATOR.testTimeSeries(TS2, TS1);
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testDifferentLength() {
CALCULATOR.testTimeSeries(TS2, TS4);
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testDifferentDates() {
CALCULATOR.testTimeSeries(TS2, TS3);
}
}
| apache-2.0 |
wjw465150/jodd | jodd-joy/src/test/java/jodd/joy/core/DefaultAppCoreTest.java | 2879 | // Copyright (c) 2003-present, Jodd Team (http://jodd.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.joy.core;
import jodd.petite.PetiteContainer;
import jodd.props.Props;
import org.junit.Assert;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
public class DefaultAppCoreTest {
@Test
public void testAppPropsNameAndPattern() {
AppCore appCore = new AppCore();
appCore.initCore();
Assert.assertEquals("app.props", appCore.appPropsName);
Assert.assertEquals("/app*.prop*", appCore.appPropsNamePattern);
Assert.assertEquals("core", AppCore.PETITE_CORE);
appCore.initLogger();
appCore.initProps();
appCore.initScanner();
appCore.startPetite();
PetiteContainer pc = appCore.petite;
AppScanner as = (AppScanner) pc.getBean(AppCore.PETITE_SCAN);
assertSame(appCore.appScanner, as);
assertTrue(as.ignoreExceptions);
assertEquals(3, as.includedEntries.length);
assertEquals("jodd.*", as.includedEntries[0]);
assertEquals("foo.*", as.includedEntries[1]);
assertEquals("bar.*", as.includedEntries[2]);
assertEquals(1, as.includedJars.length);
assertEquals("xxx", as.includedJars[0]);
}
public static class AppCore extends DefaultAppCore {
public AppCore() {
appDir = "";
}
@Override
protected void initProps() {
appProps = new Props();
appProps.setValue("scan.ignoreExceptions", "true");
appProps.setValue("scan.includedEntries", "jodd.*,foo.*,bar.*");
appProps.setValue("scan.includedJars", "xxx");
}
}
} | bsd-2-clause |
fatihboy/smarthome | bundles/automation/org.eclipse.smarthome.automation.api/src/main/java/org/eclipse/smarthome/automation/type/ModuleType.java | 7758 | /**
* Copyright (c) 1997, 2015 by ProSyst Software GmbH and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.eclipse.smarthome.automation.type;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import org.eclipse.smarthome.automation.Module;
import org.eclipse.smarthome.automation.Rule;
import org.eclipse.smarthome.automation.Visibility;
import org.eclipse.smarthome.automation.handler.ModuleHandler;
import org.eclipse.smarthome.config.core.ConfigDescriptionParameter;
/**
* Each {@link ModuleType} instance defines the meta-information needed for creation of {@link Module} instance which is
* a building block for the {@link Rule}. The meta-information describes the {@link ConfigDescriptionParameter}s,
* {@link Input}s and {@link Output}s of the {@link Module}s. Each {@link ModuleType} instance defines an unique id
* which is used as reference from the {@link Module}s, to find their meta-information.
* <p>
* Whether the {@link ModuleType}s can be used by anyone, depends from their visibility, but they can be modified only
* by their creator.
*
* @author Yordan Mihaylov - Initial Contribution
* @author Ana Dimova - Initial Contribution
*
*/
public abstract class ModuleType {
/**
* This field is used as reference from the {@link Module}s, to find their meta-information.
*/
private String uid;
/**
* The value of this field determines whether the {@link ModuleType}s can be used by anyone if they are
* {@link Visibility#VISIBLE} or only by their creator if they are {@link Visibility#HIDDEN}.
*/
private Visibility visibility = Visibility.VISIBLE;
/**
* Tags define categories that fit the particular {@link ModuleType} and which can serve as criteria for searching
* or filtering of {@link ModuleType}s.
*/
private Set<String> tags;
/**
* Short and accurate name of the {@link ModuleType}.
*/
private String label;
/**
* Short and understandable description of that for what can be used the {@link ModuleType}.
*/
private String description;
protected List<ConfigDescriptionParameter> configDescriptions;
/**
* Default constructor for deserialization e.g. by Gson.
*/
protected ModuleType() {
}
/**
* This constructor is responsible to initialize common base properties of the {@link ModuleType}s.
*
* @param UID is an unique id of the {@link ModuleType}, used as reference from the {@link Module}s, to find their
* meta-information.
* @param configDescriptions is a {@link List} of meta-information configuration descriptions.
*/
public ModuleType(String UID, List<ConfigDescriptionParameter> configDescriptions) {
this.uid = UID;
this.configDescriptions = configDescriptions;
}
/**
* This constructor is responsible to initialize all common properties of the {@link ModuleType}s.
*
* @param UID unique id of the {@link ModuleType}.
* @param configDescriptions is a {@link List} of meta-information configuration descriptions.
* @param label is a short and accurate name of the {@link ModuleType}.
* @param description is a short and understandable description of which can be used the {@link ModuleType}.
* @param tags defines categories that fit the {@link ModuleType} and which can serve as criteria for searching
* or filtering it.
* @param visibility determines whether the {@link ModuleType} can be used by anyone if it is
* {@link Visibility#VISIBLE} or only by its creator if it is {@link Visibility#HIDDEN}.
*/
public ModuleType(String UID, List<ConfigDescriptionParameter> configDescriptions, String label, String description,
Set<String> tags, Visibility visibility) {
this(UID, configDescriptions);
this.label = label;
this.description = description;
this.tags = tags;
this.visibility = visibility;
}
/**
* This method is used for getting the unique id (UID) of the {@link ModuleType}. It is unique in scope of
* RuleEngine. The UID
* can consists of segments separated by column. The first segment contains system {@link ModuleType}, which
* corresponds to the {@link ModuleHandler} of the same type, and the rest are optional and contains UIDs of custom
* the {@link ModuleType}s. It uses as reference from the {@link Module}s, to find their meta-information.
*
* @return the unique id (UID) of the {@link ModuleType}, corresponding to the some type of {@link Module}s.
*/
public String getUID() {
return uid;
}
/**
* This method is used for getting the Set of {@link ConfigDescriptionParameter}s defined by this {@link ModuleType}
* .<br/>
*
* @return a {@link Set} of meta-information configuration descriptions.
*/
public List<ConfigDescriptionParameter> getConfigurationDescription() {
return configDescriptions != null ? configDescriptions : Collections.<ConfigDescriptionParameter> emptyList();
}
/**
* {@link ModuleType}s can have
* <li><code>tags</code> which are non-hierarchical keywords or terms for describing
* them. The tags are used to filter the ModuleTypes. This method is used for getting the tags assign to this
* {@link ModuleType}.
*
* @return {@link #tags} assign to this {@link ModuleType}
*/
public Set<String> getTags() {
return tags != null ? tags : Collections.<String> emptySet();
}
/**
* This method is used for getting the label of the {@link ModuleType}. The label is a
* short, user friendly name of the {@link ModuleType}.
*
* @return the {@link #label} of the {@link ModuleType}.
*/
public String getLabel() {
return label;
}
/**
* This method is used for getting the description of the {@link ModuleType}. The description is a short and
* understandable description of that for what can be used the {@link ModuleType}.
*
* @return the {@link #description} of the ModuleType.
*/
public String getDescription() {
return description;
}
/**
* This method is used for getting visibility of the {@link ModuleType}. The visibility determines whether the
* {@link ModuleType}s can be used by anyone if they are {@link Visibility#VISIBLE} or only by their creator if they
* are {@link Visibility#HIDDEN}. The default visibility is {@link Visibility#VISIBLE}.
*
* @return the {@link #visibility} of the {@link ModuleType}.
*/
public Visibility getVisibility() {
if (visibility == null) {
return Visibility.VISIBLE;
}
return visibility;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((uid == null) ? 0 : uid.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
ModuleType other = (ModuleType) obj;
if (uid == null) {
if (other.uid != null) {
return false;
}
} else if (!uid.equals(other.uid)) {
return false;
}
return true;
}
}
| epl-1.0 |
galang-pradhana/vogella | de.vogella.rcp.intro.filteredtree/src/de/vogella/rcp/intro/filteredtree/ApplicationWorkbenchAdvisor.java | 631 | package de.vogella.rcp.intro.filteredtree;
import org.eclipse.ui.application.IWorkbenchWindowConfigurer;
import org.eclipse.ui.application.WorkbenchAdvisor;
import org.eclipse.ui.application.WorkbenchWindowAdvisor;
public class ApplicationWorkbenchAdvisor extends WorkbenchAdvisor {
private static final String PERSPECTIVE_ID = "de.vogella.intro.rcp.filteredtree.perspective";
public WorkbenchWindowAdvisor createWorkbenchWindowAdvisor(
IWorkbenchWindowConfigurer configurer) {
return new ApplicationWorkbenchWindowAdvisor(configurer);
}
public String getInitialWindowPerspectiveId() {
return PERSPECTIVE_ID;
}
}
| epl-1.0 |
logicmoo/jrelisp-abcl-ws | src/org/armedbear/lisp/FuncallableStandardObject.java | 8119 | /*
* FuncallableStandardObject.java
*
* Copyright (C) 2003-2006 Peter Graves, 2012 Rudolf Schlatte
* $Id$
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*
* As a special exception, the copyright holders of this library give you
* permission to link this library with independent modules to produce an
* executable, regardless of the license terms of these independent
* modules, and to copy and distribute the resulting executable under
* terms of your choice, provided that you also meet, for each linked
* independent module, the terms and conditions of the license of that
* module. An independent module is a module which is not derived from
* or based on this library. If you modify this library, you may extend
* this exception to your version of the library, but you are not
* obligated to do so. If you do not wish to do so, delete this
* exception statement from your version.
*/
// TODO: swap-slots is currently handled by StandardObject, so doesn't
// exchange the functions.
package org.armedbear.lisp;
import static org.armedbear.lisp.Lisp.*;
public class FuncallableStandardObject extends StandardObject
{
LispObject function;
// KLUDGE: this is only needed for generic functions, but doesn't hurt
// to have it here.
EMFCache cache = new EMFCache();
protected FuncallableStandardObject()
{
super();
}
protected FuncallableStandardObject(Layout layout)
{
this(layout, layout.getLength());
}
protected FuncallableStandardObject(Layout layout, int length)
{
super(layout, length);
}
protected FuncallableStandardObject(LispClass cls, int length)
{
super(cls, length);
}
protected FuncallableStandardObject(LispClass cls)
{
super(cls);
}
@Override
public LispObject typep(LispObject type)
{
if (type == Symbol.COMPILED_FUNCTION)
{
if (function != null)
return function.typep(type);
else
return NIL;
}
if (type == Symbol.FUNCALLABLE_STANDARD_OBJECT)
return T;
if (type == StandardClass.FUNCALLABLE_STANDARD_OBJECT)
return T;
return super.typep(type);
}
@Override
public LispObject execute()
{
return function.execute();
}
@Override
public LispObject execute(LispObject arg)
{
return function.execute(arg);
}
@Override
public LispObject execute(LispObject first, LispObject second)
{
return function.execute(first, second);
}
@Override
public LispObject execute(LispObject first, LispObject second,
LispObject third)
{
return function.execute(first, second, third);
}
@Override
public LispObject execute(LispObject first, LispObject second,
LispObject third, LispObject fourth)
{
return function.execute(first, second, third, fourth);
}
@Override
public LispObject execute(LispObject first, LispObject second,
LispObject third, LispObject fourth,
LispObject fifth)
{
return function.execute(first, second, third, fourth,
fifth);
}
@Override
public LispObject execute(LispObject first, LispObject second,
LispObject third, LispObject fourth,
LispObject fifth, LispObject sixth)
{
return function.execute(first, second, third, fourth,
fifth, sixth);
}
@Override
public LispObject execute(LispObject first, LispObject second,
LispObject third, LispObject fourth,
LispObject fifth, LispObject sixth,
LispObject seventh)
{
return function.execute(first, second, third, fourth,
fifth, sixth, seventh);
}
@Override
public LispObject execute(LispObject first, LispObject second,
LispObject third, LispObject fourth,
LispObject fifth, LispObject sixth,
LispObject seventh, LispObject eighth)
{
return function.execute(first, second, third, fourth,
fifth, sixth, seventh, eighth);
}
@Override
public LispObject execute(LispObject[] args)
{
return function.execute(args);
}
private static final Primitive _ALLOCATE_FUNCALLABLE_INSTANCE
= new pf__allocate_funcallable_instance();
@DocString(name="%allocate-funcallable-instance",
args="class",
returns="instance")
private static final class pf__allocate_funcallable_instance extends Primitive
{
pf__allocate_funcallable_instance()
{
super("%allocate-funcallable-instance", PACKAGE_SYS, true, "class");
}
@Override
public LispObject execute(LispObject arg)
{
if (arg.typep(StandardClass.FUNCALLABLE_STANDARD_CLASS) != NIL) {
LispObject l = Symbol.CLASS_LAYOUT.execute(arg);
if (! (l instanceof Layout)) {
return program_error("Invalid standard class layout for: "
+ arg.princToString() + ".");
}
return new FuncallableStandardObject((Layout)l);
}
return type_error(arg, Symbol.FUNCALLABLE_STANDARD_CLASS);
}
};
// AMOP p. 230
private static final Primitive SET_FUNCALLABLE_INSTANCE_FUNCTION
= new pf_set_funcallable_instance_function();
@DocString(name="set-funcallable-instance-function",
args="funcallable-instance function",
returns="unspecified")
private static final class pf_set_funcallable_instance_function extends Primitive
{
pf_set_funcallable_instance_function()
{
super("set-funcallable-instance-function", PACKAGE_MOP, true,
"funcallable-instance function");
}
@Override
public LispObject execute(LispObject first, LispObject second)
{
checkFuncallableStandardObject(first).function = second;
return second;
}
};
private static final Primitive FUNCALLABLE_INSTANCE_FUNCTION
= new pf_funcallable_instance_function();
@DocString(name="funcallable-instance-function",
args="funcallable-instance",
returns="function")
private static final class pf_funcallable_instance_function extends Primitive
{
pf_funcallable_instance_function()
{
super("funcallable-instance-function", PACKAGE_MOP, false,
"funcallable-instance");
}
@Override
public LispObject execute(LispObject arg)
{
return checkFuncallableStandardObject(arg).function;
}
};
// Profiling.
private int callCount;
private int hotCount;
@Override
public final int getCallCount()
{
return callCount;
}
@Override
public void setCallCount(int n)
{
callCount = n;
}
@Override
public final void incrementCallCount()
{
++callCount;
}
@Override
public final int getHotCount()
{
return hotCount;
}
@Override
public void setHotCount(int n)
{
hotCount = n;
}
@Override
public final void incrementHotCount()
{
++hotCount;
}
public static final FuncallableStandardObject checkFuncallableStandardObject(LispObject obj)
{
if (obj instanceof FuncallableStandardObject)
return (FuncallableStandardObject) obj;
return (FuncallableStandardObject) // Not reached.
type_error(obj, Symbol.FUNCALLABLE_STANDARD_OBJECT);
}
}
| gpl-2.0 |
rex-xxx/mt6572_x201 | libcore/luni/src/main/java/java/util/TreeSet.java | 15634 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.util;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
/**
* TreeSet is an implementation of SortedSet. All optional operations (adding
* and removing) are supported. The elements can be any objects which are
* comparable to each other either using their natural order or a specified
* Comparator.
*
* @since 1.2
*/
public class TreeSet<E> extends AbstractSet<E> implements NavigableSet<E>,
Cloneable, Serializable {
private static final long serialVersionUID = -2479143000061671589L;
/** Keys are this set's elements. Values are always Boolean.TRUE */
private transient NavigableMap<E, Object> backingMap;
private transient NavigableSet<E> descendingSet;
TreeSet(NavigableMap<E, Object> map) {
backingMap = map;
}
/**
* Constructs a new empty instance of {@code TreeSet} which uses natural
* ordering.
*/
public TreeSet() {
backingMap = new TreeMap<E, Object>();
}
/**
* Constructs a new instance of {@code TreeSet} which uses natural ordering
* and containing the unique elements in the specified collection.
*
* @param collection
* the collection of elements to add.
* @throws ClassCastException
* when an element in the collection does not implement the
* Comparable interface, or the elements in the collection
* cannot be compared.
*/
public TreeSet(Collection<? extends E> collection) {
this();
addAll(collection);
}
/**
* Constructs a new empty instance of {@code TreeSet} which uses the
* specified comparator.
*
* @param comparator
* the comparator to use.
*/
public TreeSet(Comparator<? super E> comparator) {
backingMap = new TreeMap<E, Object>(comparator);
}
/**
* Constructs a new instance of {@code TreeSet} containing the elements of
* the specified SortedSet and using the same Comparator.
*
* @param set
* the SortedSet of elements to add.
*/
public TreeSet(SortedSet<E> set) {
this(set.comparator());
Iterator<E> it = set.iterator();
while (it.hasNext()) {
add(it.next());
}
}
/**
* Adds the specified object to this {@code TreeSet}.
*
* @param object
* the object to add.
* @return {@code true} when this {@code TreeSet} did not already contain
* the object, {@code false} otherwise.
* @throws ClassCastException
* when the object cannot be compared with the elements in this
* {@code TreeSet}.
* @throws NullPointerException
* when the object is null and the comparator cannot handle
* null.
*/
@Override
public boolean add(E object) {
return backingMap.put(object, Boolean.TRUE) == null;
}
/**
* Adds the objects in the specified collection to this {@code TreeSet}.
*
* @param collection
* the collection of objects to add.
* @return {@code true} if this {@code TreeSet} was modified, {@code false}
* otherwise.
* @throws ClassCastException
* when an object in the collection cannot be compared with the
* elements in this {@code TreeSet}.
* @throws NullPointerException
* when an object in the collection is null and the comparator
* cannot handle null.
*/
@Override
public boolean addAll(Collection<? extends E> collection) {
return super.addAll(collection);
}
/**
* Removes all elements from this {@code TreeSet}, leaving it empty.
*
* @see #isEmpty
* @see #size
*/
@Override
public void clear() {
backingMap.clear();
}
/**
* Returns a new {@code TreeSet} with the same elements, size and comparator
* as this {@code TreeSet}.
*
* @return a shallow copy of this {@code TreeSet}.
* @see java.lang.Cloneable
*/
@SuppressWarnings("unchecked")
@Override
public Object clone() {
try {
TreeSet<E> clone = (TreeSet<E>) super.clone();
if (backingMap instanceof TreeMap) {
clone.backingMap = (NavigableMap<E, Object>) ((TreeMap<E, Object>) backingMap)
.clone();
} else {
clone.backingMap = new TreeMap<E, Object>(backingMap);
}
return clone;
} catch (CloneNotSupportedException e) {
throw new AssertionError(e);
}
}
/**
* Returns the comparator used to compare elements in this {@code TreeSet}.
*
* @return a Comparator or null if the natural ordering is used
*/
public Comparator<? super E> comparator() {
return backingMap.comparator();
}
/**
* Searches this {@code TreeSet} for the specified object.
*
* @param object
* the object to search for.
* @return {@code true} if {@code object} is an element of this
* {@code TreeSet}, {@code false} otherwise.
* @throws ClassCastException
* when the object cannot be compared with the elements in this
* {@code TreeSet}.
* @throws NullPointerException
* when the object is null and the comparator cannot handle
* null.
*/
@Override
public boolean contains(Object object) {
return backingMap.containsKey(object);
}
/**
* Returns true if this {@code TreeSet} has no element, otherwise false.
*
* @return true if this {@code TreeSet} has no element.
* @see #size
*/
@Override
public boolean isEmpty() {
return backingMap.isEmpty();
}
/**
* Returns an Iterator on the elements of this {@code TreeSet}.
*
* @return an Iterator on the elements of this {@code TreeSet}.
* @see Iterator
*/
@Override
public Iterator<E> iterator() {
return backingMap.keySet().iterator();
}
/**
* {@inheritDoc}
*
* @see java.util.NavigableSet#descendingIterator()
* @since 1.6
*/
public Iterator<E> descendingIterator() {
return descendingSet().iterator();
}
/**
* Removes an occurrence of the specified object from this {@code TreeSet}.
*
* @param object
* the object to remove.
* @return {@code true} if this {@code TreeSet} was modified, {@code false}
* otherwise.
* @throws ClassCastException
* when the object cannot be compared with the elements in this
* {@code TreeSet}.
* @throws NullPointerException
* when the object is null and the comparator cannot handle
* null.
*/
@Override
public boolean remove(Object object) {
return backingMap.remove(object) != null;
}
/**
* Returns the number of elements in this {@code TreeSet}.
*
* @return the number of elements in this {@code TreeSet}.
*/
@Override
public int size() {
return backingMap.size();
}
/**
* Returns the first element in this set.
* @exception NoSuchElementException when this TreeSet is empty
*/
public E first() {
return backingMap.firstKey();
}
/**
* Returns the last element in this set.
* @exception NoSuchElementException when this TreeSet is empty
*/
public E last() {
return backingMap.lastKey();
}
/**
* {@inheritDoc}
*
* @see java.util.NavigableSet#pollFirst()
* @since 1.6
*/
public E pollFirst() {
Map.Entry<E, Object> entry = backingMap.pollFirstEntry();
return (entry == null) ? null : entry.getKey();
}
/**
* {@inheritDoc}
*
* @see java.util.NavigableSet#pollLast()
* @since 1.6
*/
public E pollLast() {
Map.Entry<E, Object> entry = backingMap.pollLastEntry();
return (entry == null) ? null : entry.getKey();
}
/**
* {@inheritDoc}
*
* @see java.util.NavigableSet#higher(java.lang.Object)
* @since 1.6
*/
public E higher(E e) {
return backingMap.higherKey(e);
}
/**
* {@inheritDoc}
*
* @see java.util.NavigableSet#lower(java.lang.Object)
* @since 1.6
*/
public E lower(E e) {
return backingMap.lowerKey(e);
}
/**
* {@inheritDoc}
*
* @see java.util.NavigableSet#ceiling(java.lang.Object)
* @since 1.6
*/
public E ceiling(E e) {
return backingMap.ceilingKey(e);
}
/**
* {@inheritDoc}
*
* @see java.util.NavigableSet#floor(java.lang.Object)
* @since 1.6
*/
public E floor(E e) {
return backingMap.floorKey(e);
}
/**
* {@inheritDoc}
*
* @see java.util.NavigableSet#descendingSet()
* @since 1.6
*/
public NavigableSet<E> descendingSet() {
return (descendingSet != null) ? descendingSet
: (descendingSet = new TreeSet<E>(backingMap.descendingMap()));
}
/**
* {@inheritDoc}
*
* @see java.util.NavigableSet#subSet(Object, boolean, Object, boolean)
* @since 1.6
*/
@SuppressWarnings("unchecked")
public NavigableSet<E> subSet(E start, boolean startInclusive, E end,
boolean endInclusive) {
Comparator<? super E> c = backingMap.comparator();
int compare = (c == null) ? ((Comparable<E>) start).compareTo(end) : c
.compare(start, end);
if (compare <= 0) {
return new TreeSet<E>(backingMap.subMap(start, startInclusive, end,
endInclusive));
}
throw new IllegalArgumentException();
}
/**
* {@inheritDoc}
*
* @see java.util.NavigableSet#headSet(Object, boolean)
* @since 1.6
*/
@SuppressWarnings("unchecked")
public NavigableSet<E> headSet(E end, boolean endInclusive) {
// Check for errors
Comparator<? super E> c = backingMap.comparator();
if (c == null) {
((Comparable<E>) end).compareTo(end);
} else {
c.compare(end, end);
}
return new TreeSet<E>(backingMap.headMap(end, endInclusive));
}
/**
* {@inheritDoc}
*
* @see java.util.NavigableSet#tailSet(Object, boolean)
* @since 1.6
*/
@SuppressWarnings("unchecked")
public NavigableSet<E> tailSet(E start, boolean startInclusive) {
// Check for errors
Comparator<? super E> c = backingMap.comparator();
if (c == null) {
((Comparable<E>) start).compareTo(start);
} else {
c.compare(start, start);
}
return new TreeSet<E>(backingMap.tailMap(start, startInclusive));
}
/**
* Returns a {@code SortedSet} of the specified portion of this {@code TreeSet} which
* contains elements greater or equal to the start element but less than the
* end element. The returned SortedSet is backed by this TreeSet so changes
* to one are reflected by the other.
*
* @param start
* the start element
* @param end
* the end element
* @return a subset where the elements are greater or equal to
* <code>start</code> and less than <code>end</code>
*
* @exception ClassCastException
* when the start or end object cannot be compared with the
* elements in this TreeSet
* @exception NullPointerException
* when the start or end object is null and the comparator
* cannot handle null
*/
@SuppressWarnings("unchecked")
public SortedSet<E> subSet(E start, E end) {
return subSet(start, true, end, false);
}
/**
* Returns a {@code SortedSet} of the specified portion of this {@code TreeSet} which
* contains elements less than the end element. The returned SortedSet is
* backed by this TreeSet so changes to one are reflected by the other.
*
* @param end
* the end element
* @return a subset where the elements are less than <code>end</code>
*
* @exception ClassCastException
* when the end object cannot be compared with the elements
* in this TreeSet
* @exception NullPointerException
* when the end object is null and the comparator cannot
* handle null
*/
@SuppressWarnings("unchecked")
public SortedSet<E> headSet(E end) {
return headSet(end, false);
}
/**
* Returns a {@code SortedSet} of the specified portion of this {@code TreeSet} which
* contains elements greater or equal to the start element. The returned
* SortedSet is backed by this TreeSet so changes to one are reflected by
* the other.
*
* @param start
* the start element
* @return a subset where the elements are greater or equal to
* <code>start</code>
*
* @exception ClassCastException
* when the start object cannot be compared with the elements
* in this TreeSet
* @exception NullPointerException
* when the start object is null and the comparator cannot
* handle null
*/
@SuppressWarnings("unchecked")
public SortedSet<E> tailSet(E start) {
return tailSet(start, true);
}
private void writeObject(ObjectOutputStream stream) throws IOException {
stream.defaultWriteObject();
stream.writeObject(backingMap.comparator());
int size = backingMap.size();
stream.writeInt(size);
if (size > 0) {
Iterator<E> it = backingMap.keySet().iterator();
while (it.hasNext()) {
stream.writeObject(it.next());
}
}
}
@SuppressWarnings("unchecked")
private void readObject(ObjectInputStream stream) throws IOException,
ClassNotFoundException {
stream.defaultReadObject();
TreeMap<E, Object> map = new TreeMap<E, Object>(
(Comparator<? super E>) stream.readObject());
int size = stream.readInt();
if (size > 0) {
for (int i=0; i<size; i++) {
E elem = (E)stream.readObject();
map.put(elem, Boolean.TRUE);
}
}
backingMap = map;
}
}
| gpl-2.0 |
ua-eas/kfs-devops-automation-fork | kfs-bc/src/main/java/org/kuali/kfs/module/bc/document/dataaccess/impl/PayrateExportDaoOjb.java | 5691 | /*
* The Kuali Financial System, a comprehensive financial management system for higher education.
*
* Copyright 2005-2014 The Kuali Foundation
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.kuali.kfs.module.bc.document.dataaccess.impl;
import java.math.BigDecimal;
import java.util.Iterator;
import org.apache.ojb.broker.query.Criteria;
import org.apache.ojb.broker.query.QueryFactory;
import org.apache.ojb.broker.query.ReportQueryByCriteria;
import org.kuali.kfs.module.bc.BCConstants;
import org.kuali.kfs.module.bc.BCPropertyConstants;
import org.kuali.kfs.module.bc.businessobject.BudgetConstructionPayRateHolding;
import org.kuali.kfs.module.bc.businessobject.BudgetConstructionPosition;
import org.kuali.kfs.module.bc.businessobject.PendingBudgetConstructionAppointmentFunding;
import org.kuali.kfs.module.bc.document.dataaccess.PayrateExportDao;
import org.kuali.kfs.sys.KFSPropertyConstants;
import org.kuali.rice.core.framework.persistence.ojb.dao.PlatformAwareDaoBaseOjb;
public class PayrateExportDaoOjb extends PlatformAwareDaoBaseOjb implements PayrateExportDao {
/**
*
* @see org.kuali.kfs.module.bc.document.dataaccess.PayrateExportDao#isValidPositionUnionCode(java.lang.String)
*/
public boolean isValidPositionUnionCode(String positionUnionCode) {
Criteria criteria = new Criteria();
criteria.addEqualTo(BCPropertyConstants.POSITION_UNION_CODE, positionUnionCode);
if (getPersistenceBrokerTemplate().getCollectionByQuery(QueryFactory.newQuery(BudgetConstructionPosition.class, criteria)).size() == 0) return false;
return true;
}
/**
*
* @see org.kuali.kfs.module.bc.document.dataaccess.PayrateExportDao#buildPayRateHoldingRows(java.lang.Integer, java.lang.String, java.lang.String)
*/
public Integer buildPayRateHoldingRows(Integer budgetYear, String positionUnionCode, String principalId) {
Integer rowsSaved = 0;
Iterator<Object[]> payRateRows = getPersistenceBrokerTemplate().getReportQueryIteratorByQuery(queryForPayrateHoldingRecords(budgetYear, positionUnionCode));
while (payRateRows.hasNext())
{
Object[] payRateRow = payRateRows.next();
BudgetConstructionPayRateHolding payRateHolder = new BudgetConstructionPayRateHolding();
payRateHolder.setAppointmentRequestedPayRate(new BigDecimal(0));
payRateHolder.setEmplid((String) payRateRow[0]);
payRateHolder.setPositionNumber((String) payRateRow[1]);
payRateHolder.setName((String) payRateRow[2]);
payRateHolder.setSetidSalary((String) payRateRow[3]);
payRateHolder.setSalaryAdministrationPlan((String) payRateRow[4]);
payRateHolder.setGrade((String) payRateRow[5]);
payRateHolder.setUnionCode((String) payRateRow[6]);
payRateHolder.setPrincipalId(principalId);
getPersistenceBrokerTemplate().store(payRateHolder);
rowsSaved = rowsSaved+1;
}
return rowsSaved;
}
/**
* Selects the unique list of PendingBudgetConstructionAppointmentFunding to populate the payrate holding table for export
* This method...
* @param budgetYear
* @param positionUnionCode
* @return
*/
protected ReportQueryByCriteria queryForPayrateHoldingRecords(Integer budgetYear, String positionUnionCode) {
Criteria criteria = new Criteria();
criteria.addEqualTo(KFSPropertyConstants.UNIVERSITY_FISCAL_YEAR, budgetYear);
criteria.addNotEqualTo(KFSPropertyConstants.EMPLID, BCConstants.VACANT_EMPLID);
criteria.addEqualTo(KFSPropertyConstants.ACTIVE, "Y");
criteria.addEqualTo(BCPropertyConstants.BUDGET_CONSTRUCTION_POSITION + "." + BCPropertyConstants.POSITION_UNION_CODE, positionUnionCode);
criteria.addEqualTo(BCPropertyConstants.BUDGET_CONSTRUCTION_POSITION + "." + BCPropertyConstants.CONFIDENTIAL_POSITION, "N");
ReportQueryByCriteria queryId = new ReportQueryByCriteria(PendingBudgetConstructionAppointmentFunding.class, criteria, true);
String[] selectList = new String[7];
selectList[0] = KFSPropertyConstants.EMPLID;
selectList[1] = KFSPropertyConstants.POSITION_NUMBER;
selectList[2] = BCPropertyConstants.BUDGET_CONSTRUCTION_INTENDED_INCUMBENT + "." + KFSPropertyConstants.PERSON_NAME;
selectList[3] = BCPropertyConstants.BUDGET_CONSTRUCTION_POSITION + "." + BCPropertyConstants.SET_SALARY_ID;
selectList[4] = BCPropertyConstants.BUDGET_CONSTRUCTION_POSITION + "." + BCPropertyConstants.POSITION_SALARY_PLAN_DEFAULT;
selectList[5] = BCPropertyConstants.BUDGET_CONSTRUCTION_POSITION + "." + BCPropertyConstants.POSITION_GRADE_DEFAULT;
selectList[6] = BCPropertyConstants.BUDGET_CONSTRUCTION_POSITION + "." + BCPropertyConstants.POSITION_UNION_CODE;
queryId.setAttributes(selectList);
return queryId;
}
}
| agpl-3.0 |
danieljue/beast-mcmc | src/dr/evomodel/tree/RateCovarianceStatistic.java | 2815 | /*
* RateCovarianceStatistic.java
*
* Copyright (C) 2002-2006 Alexei Drummond and Andrew Rambaut
*
* This file is part of BEAST.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership and licensing.
*
* BEAST is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* BEAST is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with BEAST; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301 USA
*/
package dr.evomodel.tree;
import dr.evolution.tree.NodeRef;
import dr.evolution.tree.Tree;
import dr.evomodel.branchratemodel.BranchRateModel;
import dr.inference.model.Statistic;
import dr.stats.DiscreteStatistics;
/**
* A statistic that tracks the covariance of rates on branches
*
* @author Alexei Drummond
* @version $Id: RateCovarianceStatistic.java,v 1.5 2005/07/11 14:06:25 rambaut Exp $
*/
public class RateCovarianceStatistic extends Statistic.Abstract implements TreeStatistic {
public RateCovarianceStatistic(String name, Tree tree, BranchRateModel branchRateModel) {
super(name);
this.tree = tree;
this.branchRateModel = branchRateModel;
int n = tree.getExternalNodeCount();
childRate = new double[2 * n - 4];
parentRate = new double[childRate.length];
}
public void setTree(Tree tree) {
this.tree = tree;
}
public Tree getTree() {
return tree;
}
public int getDimension() {
return 1;
}
/**
* @return the height of the MRCA node.
*/
public double getStatisticValue(int dim) {
int n = tree.getNodeCount();
int index = 0;
for (int i = 0; i < n; i++) {
NodeRef child = tree.getNode(i);
NodeRef parent = tree.getParent(child);
if (parent != null & !tree.isRoot(parent)) {
childRate[index] = branchRateModel.getBranchRate(tree, child);
parentRate[index] = branchRateModel.getBranchRate(tree, parent);
index += 1;
}
}
return DiscreteStatistics.covariance(childRate, parentRate);
}
private Tree tree = null;
private BranchRateModel branchRateModel = null;
private double[] childRate = null;
private double[] parentRate = null;
}
| lgpl-2.1 |
gytis/narayana | qa/tests/src/org/jboss/jbossts/qa/OTSServerClients/Client07.java | 3340 | /*
* JBoss, Home of Professional Open Source
* Copyright 2007, Red Hat Middleware LLC, and individual contributors
* as indicated by the @author tags.
* See the copyright.txt in the distribution for a
* full listing of individual contributors.
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU Lesser General Public License, v. 2.1.
* This program is distributed in the hope that it will be useful, but WITHOUT A
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public License,
* v.2.1 along with this distribution; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*
* (C) 2005-2006,
* @author JBoss Inc.
*/
//
// Copyright (C) 1998, 1999, 2000, 2001, 2002, 2003
//
// Arjuna Technologies Ltd.,
// Newcastle upon Tyne,
// Tyne and Wear,
// UK.
//
package org.jboss.jbossts.qa.OTSServerClients;
/*
* Copyright (C) 1999-2001 by HP Bluestone Software, Inc. All rights Reserved.
*
* HP Arjuna Labs,
* Newcastle upon Tyne,
* Tyne and Wear,
* UK.
*
* $Id: Client07.java,v 1.2 2003/06/26 11:44:17 rbegg Exp $
*/
/*
* Try to get around the differences between Ansi CPP and
* K&R cpp with concatenation.
*/
/*
* Copyright (C) 1999-2001 by HP Bluestone Software, Inc. All rights Reserved.
*
* HP Arjuna Labs,
* Newcastle upon Tyne,
* Tyne and Wear,
* UK.
*
* $Id: Client07.java,v 1.2 2003/06/26 11:44:17 rbegg Exp $
*/
import org.jboss.jbossts.qa.Utils.OAInterface;
import org.jboss.jbossts.qa.Utils.ORBInterface;
import org.jboss.jbossts.qa.Utils.ORBServices;
import org.omg.CosTransactions.Control;
import org.omg.CosTransactions.Status;
import org.omg.CosTransactions.TransactionFactory;
import org.omg.CosTransactions.TransactionFactoryHelper;
public class Client07
{
public static void main(String[] args)
{
try
{
ORBInterface.initORB(args, null);
OAInterface.initOA();
TransactionFactory transactionFactory = null;
String[] transactionFactoryParams = new String[1];
transactionFactoryParams[0] = ORBServices.otsKind;
transactionFactory = TransactionFactoryHelper.narrow(ORBServices.getService(ORBServices.transactionService, transactionFactoryParams));
int numberOfControls = Integer.parseInt(args[args.length - 1]);
boolean correct = true;
for (int index = 0; correct && (index < numberOfControls); index++)
{
Control control = transactionFactory.create(0);
correct = correct && (control.get_coordinator().get_status() == Status.StatusActive);
control.get_terminator().commit(true);
}
if (correct)
{
System.out.println("Passed");
}
else
{
System.out.println("Failed");
}
}
catch (Exception exception)
{
System.out.println("Failed");
System.err.println("Client07.main: " + exception);
exception.printStackTrace(System.err);
}
try
{
OAInterface.shutdownOA();
ORBInterface.shutdownORB();
}
catch (Exception exception)
{
System.err.println("Client07.main: " + exception);
exception.printStackTrace(System.err);
}
}
}
| lgpl-2.1 |
apurtell/hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/JavaSandboxLinuxContainerRuntime.java | 22557 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager.
containermanager.linux.runtime;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.Groups;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.privileged.PrivilegedOperationExecutor;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerExecutionException;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerRuntimeContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.FilePermission;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.FileAttribute;
import java.nio.file.attribute.PosixFilePermission;
import java.nio.file.attribute.PosixFilePermissions;
import java.security.AllPermission;
import java.util.Formatter;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import static org.apache.hadoop.fs.Path.SEPARATOR;
import static org.apache.hadoop.util.Shell.SYSPROP_HADOOP_HOME_DIR;
import static org.apache.hadoop.yarn.api.ApplicationConstants.Environment.JAVA_HOME;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.YARN_CONTAINER_SANDBOX;
import static org.apache.hadoop.yarn.conf.YarnConfiguration.YARN_CONTAINER_SANDBOX_POLICY_GROUP_PREFIX;
import static org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.LinuxContainerRuntimeConstants.CONTAINER_ID_STR;
import static org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.LinuxContainerRuntimeConstants.CONTAINER_LOCAL_DIRS;
import static org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.LinuxContainerRuntimeConstants.CONTAINER_RUN_CMDS;
import static org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.LinuxContainerRuntimeConstants.LOCALIZED_RESOURCES;
import static org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.LinuxContainerRuntimeConstants.USER;
/**
* <p>This class extends the {@link DefaultLinuxContainerRuntime} specifically
* for containers which run Java commands. It generates a new java security
* policy file per container and modifies the java command to enable the
* Java Security Manager with the generated policy.</p>
*
* The behavior of the {@link JavaSandboxLinuxContainerRuntime} can be modified
* using the following settings:
*
* <ul>
* <li>
* {@value
* org.apache.hadoop.yarn.conf.YarnConfiguration#YARN_CONTAINER_SANDBOX} :
* This yarn-site.xml setting has three options:
* <ul>
* <li>disabled - Default behavior. {@link LinuxContainerRuntime}
* is disabled</li>
* <li>permissive - JVM containers will run with Java Security Manager
* enabled. Non-JVM containers will run normally</li>
* <li>enforcing - JVM containers will run with Java Security Manager
* enabled. Non-JVM containers will be prevented from executing and an
* {@link ContainerExecutionException} will be thrown.</li>
* </ul>
* </li>
* <li>
* {@value
* org.apache.hadoop.yarn.conf.YarnConfiguration#YARN_CONTAINER_SANDBOX_FILE_PERMISSIONS}
* :
* Determines the file permissions for the application directories. The
* permissions come in the form of comma separated values
* (e.g. read,write,execute,delete). Defaults to {@code read} for read-only.
* </li>
* <li>
* {@value
* org.apache.hadoop.yarn.conf.YarnConfiguration#YARN_CONTAINER_SANDBOX_POLICY}
* :
* Accepts canonical path to a java policy file on the local filesystem.
* This file will be loaded as the base policy, any additional container
* grants will be appended to this base file. If not specified, the default
* java.policy file provided with hadoop resources will be used.
* </li>
* <li>
* {@value
* org.apache.hadoop.yarn.conf.YarnConfiguration#YARN_CONTAINER_SANDBOX_WHITELIST_GROUP}
* :
* Optional setting to specify a YARN queue which will be exempt from the
* sand-boxing process.
* </li>
* <li>
* {@value
* org.apache.hadoop.yarn.conf.YarnConfiguration#YARN_CONTAINER_SANDBOX_POLICY_GROUP_PREFIX}$groupName
* :
* Optional setting to map groups to java policy files. The value is a path
* to the java policy file for $groupName. A user which is a member of
* multiple groups with different policies will receive the superset of all
* the permissions across their groups.
* </li>
* </ul>
*/
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class JavaSandboxLinuxContainerRuntime
extends DefaultLinuxContainerRuntime {
private static final Logger LOG =
LoggerFactory.getLogger(DefaultLinuxContainerRuntime.class);
private Configuration configuration;
private SandboxMode sandboxMode;
public static final String POLICY_FILE_DIR = "nm-sandbox-policies";
private static Path policyFileDir;
private static final FileAttribute<Set<PosixFilePermission>> POLICY_ATTR =
PosixFilePermissions.asFileAttribute(
PosixFilePermissions.fromString("rwxr-xr-x"));
private Map<String, Path> containerPolicies = new HashMap<>();
/**
* Create an instance using the given {@link PrivilegedOperationExecutor}
* instance for performing operations.
*
* @param privilegedOperationExecutor the {@link PrivilegedOperationExecutor}
* instance
*/
public JavaSandboxLinuxContainerRuntime(
PrivilegedOperationExecutor privilegedOperationExecutor) {
super(privilegedOperationExecutor);
}
@Override
public void initialize(Configuration conf, Context nmContext)
throws ContainerExecutionException {
this.configuration = conf;
this.sandboxMode =
SandboxMode.get(
this.configuration.get(YARN_CONTAINER_SANDBOX,
YarnConfiguration.DEFAULT_YARN_CONTAINER_SANDBOX));
super.initialize(conf, nmContext);
}
/**
* Initialize the Java Security Policy directory. Either creates the
* directory if it doesn't exist, or clears the contents of the directory if
* already created.
* @throws ContainerExecutionException If unable to resolve policy directory
*/
private void initializePolicyDir() throws ContainerExecutionException {
String hadoopTempDir = configuration.get("hadoop.tmp.dir");
if (hadoopTempDir == null) {
throw new ContainerExecutionException("hadoop.tmp.dir not set!");
}
policyFileDir = Paths.get(hadoopTempDir, POLICY_FILE_DIR);
//Delete any existing policy files if the directory has already been created
if(Files.exists(policyFileDir)){
try (DirectoryStream<Path> stream =
Files.newDirectoryStream(policyFileDir)){
for(Path policyFile : stream){
Files.delete(policyFile);
}
}catch(IOException e){
throw new ContainerExecutionException("Unable to initialize policy "
+ "directory: " + e);
}
} else {
try {
policyFileDir = Files.createDirectories(
Paths.get(hadoopTempDir, POLICY_FILE_DIR), POLICY_ATTR);
} catch (IOException e) {
throw new ContainerExecutionException("Unable to create policy file " +
"directory: " + e);
}
}
}
/**
* Prior to environment from being written locally need to generate
* policy file which limits container access to a small set of directories.
* Additionally the container run command needs to be modified to include
* flags to enable the java security manager with the generated policy.
* <br>
* The Java Sandbox will be circumvented if the user is a member of the
* group specified in:
* {@value
* org.apache.hadoop.yarn.conf.YarnConfiguration#YARN_CONTAINER_SANDBOX_WHITELIST_GROUP}
* and if they do not include the JVM flag
* <code>-Djava.security.manager</code>.
*
* @param ctx The {@link ContainerRuntimeContext} containing container
* setup properties.
* @throws ContainerExecutionException Exception thrown if temporary policy
* file directory can't be created, or if any exceptions occur during policy
* file parsing and generation.
*/
@Override
public void prepareContainer(ContainerRuntimeContext ctx)
throws ContainerExecutionException {
@SuppressWarnings("unchecked")
List<String> localDirs =
ctx.getExecutionAttribute(CONTAINER_LOCAL_DIRS);
@SuppressWarnings("unchecked")
Map<org.apache.hadoop.fs.Path, List<String>> resources =
ctx.getExecutionAttribute(LOCALIZED_RESOURCES);
@SuppressWarnings("unchecked")
List<String> commands =
ctx.getExecutionAttribute(CONTAINER_RUN_CMDS);
Map<String, String> env =
ctx.getContainer().getLaunchContext().getEnvironment();
String username =
ctx.getExecutionAttribute(USER);
if(!isSandboxContainerWhitelisted(username, commands)) {
String tmpDirBase = configuration.get("hadoop.tmp.dir");
if (tmpDirBase == null) {
throw new ContainerExecutionException("hadoop.tmp.dir not set!");
}
try {
String containerID = ctx.getExecutionAttribute(CONTAINER_ID_STR);
initializePolicyDir();
List<String> groupPolicyFiles =
getGroupPolicyFiles(configuration, ctx.getExecutionAttribute(USER));
Path policyFilePath = Files.createFile(
Paths.get(policyFileDir.toString(),
containerID + "-" + NMContainerPolicyUtils.POLICY_FILE),
POLICY_ATTR);
try(OutputStream policyOutputStream =
Files.newOutputStream(policyFilePath)) {
containerPolicies.put(containerID, policyFilePath);
NMContainerPolicyUtils.generatePolicyFile(policyOutputStream,
localDirs, groupPolicyFiles, resources, configuration);
NMContainerPolicyUtils.appendSecurityFlags(
commands, env, policyFilePath, sandboxMode);
}
} catch (IOException e) {
throw new ContainerExecutionException(e);
}
}
}
@Override
public void launchContainer(ContainerRuntimeContext ctx)
throws ContainerExecutionException {
try {
super.launchContainer(ctx);
} finally {
deletePolicyFiles(ctx);
}
}
@Override
public void relaunchContainer(ContainerRuntimeContext ctx)
throws ContainerExecutionException {
try {
super.relaunchContainer(ctx);
} finally {
deletePolicyFiles(ctx);
}
}
/**
* Determine if JVMSandboxLinuxContainerRuntime should be used. This is
* decided based on the value of
* {@value
* org.apache.hadoop.yarn.conf.YarnConfiguration#YARN_CONTAINER_SANDBOX}
* @param env the environment variable settings for the operation
* @return true if Sandbox is requested, false otherwise
*/
@Override
public boolean isRuntimeRequested(Map<String, String> env) {
return sandboxMode != SandboxMode.disabled;
}
private static List<String> getGroupPolicyFiles(Configuration conf,
String user) throws ContainerExecutionException {
Groups groups = Groups.getUserToGroupsMappingService(conf);
Set<String> userGroups;
try {
userGroups = groups.getGroupsSet(user);
} catch (IOException e) {
throw new ContainerExecutionException("Container user does not exist");
}
return userGroups.stream()
.map(group -> conf.get(YARN_CONTAINER_SANDBOX_POLICY_GROUP_PREFIX
+ group))
.filter(groupPolicy -> groupPolicy != null)
.collect(Collectors.toList());
}
/**
* Determine if the container should be whitelisted (i.e. exempt from the
* Java Security Manager).
* @param username The name of the user running the container
* @param commands The list of run commands for the container
* @return boolean value denoting whether the container should be whitelisted.
* @throws ContainerExecutionException If container user can not be resolved
*/
private boolean isSandboxContainerWhitelisted(String username,
List<String> commands) throws ContainerExecutionException {
String whitelistGroup = configuration.get(
YarnConfiguration.YARN_CONTAINER_SANDBOX_WHITELIST_GROUP);
Groups groups = Groups.getUserToGroupsMappingService(configuration);
Set<String> userGroups;
boolean isWhitelisted = false;
try {
userGroups = groups.getGroupsSet(username);
} catch (IOException e) {
throw new ContainerExecutionException("Container user does not exist");
}
if(whitelistGroup != null && userGroups.contains(whitelistGroup)) {
// If any command has security flag, whitelisting is disabled
for(String cmd : commands) {
if(cmd.contains(NMContainerPolicyUtils.SECURITY_FLAG)){
isWhitelisted = false;
break;
} else {
isWhitelisted = true;
}
}
}
return isWhitelisted;
}
/**
* Deletes policy files for container specified by parameter. Additionally
* this method will age off any stale policy files generated by
* {@link JavaSandboxLinuxContainerRuntime}
* @param ctx Container context for files to be deleted
* @throws ContainerExecutionException if unable to access or delete policy
* files or generated policy file directory
*/
private void deletePolicyFiles(ContainerRuntimeContext ctx)
throws ContainerExecutionException {
try {
Files.delete(containerPolicies.remove(
ctx.getExecutionAttribute(CONTAINER_ID_STR)));
} catch (IOException e) {
throw new ContainerExecutionException("Unable to delete policy file: "
+ e);
}
}
/**
* Enumeration of the modes the JavaSandboxLinuxContainerRuntime can use.
* See {@link JavaSandboxLinuxContainerRuntime} for details on the
* behavior of each setting.
*/
public enum SandboxMode {
enforcing("enforcing"),
permissive("permissive"),
disabled("disabled");
private final String mode;
SandboxMode(String mode){
this.mode = mode;
}
public static SandboxMode get(String mode) {
if(enforcing.mode.equals(mode)) {
return enforcing;
} else if(permissive.mode.equals(mode)) {
return permissive;
} else {
return disabled;
}
}
public String toString(){
return mode;
}
}
/**
* Static utility class defining String constants and static methods for the
* use of the {@link JavaSandboxLinuxContainerRuntime}.
*/
static final class NMContainerPolicyUtils{
static final String POLICY_FILE = "java.policy";
static final String SECURITY_DEBUG = " -Djava.security.debug=all";
static final String SECURITY_FLAG = "-Djava.security.manager";
static final String POLICY_APPEND_FLAG = "-Djava.security.policy=";
static final String POLICY_FLAG = POLICY_APPEND_FLAG + "=";
static final String JAVA_CMD = "/bin/java ";
static final String JVM_SECURITY_CMD =
JAVA_CMD + SECURITY_FLAG + " " + POLICY_FLAG;
static final String STRIP_POLICY_FLAG = POLICY_APPEND_FLAG + "[^ ]+";
static final String CONTAINS_JAVA_CMD = "\\$" + JAVA_HOME + JAVA_CMD + ".*";
static final String MULTI_COMMAND_REGEX =
"(?s).*(" + //command read as single line
"(&[^>]|&&)|(\\|{1,2})|(\\|&)|" + //Matches '&','&&','|','||' and '|&'
"(`[^`]+`)|(\\$\\([^)]+\\))|" + //Matches occurrences of $() or ``
"(;)" + //Matches end of statement ';'
").*";
static final String CLEAN_CMD_REGEX =
"(" + SECURITY_FLAG + ")|" +
"(" + STRIP_POLICY_FLAG + ")";
static final String FILE_PERMISSION_FORMAT = " permission "
+ FilePermission.class.getCanonicalName()
+ " \"%1$s" + SEPARATOR + "-\", \"%2$s\";%n";
static final String HADOOP_HOME_PERMISSION = "%ngrant codeBase \"file:"
+ Paths.get(System.getProperty(SYSPROP_HADOOP_HOME_DIR))
+ SEPARATOR + "-\" {%n" +
" permission " + AllPermission.class.getCanonicalName() + ";%n};%n";
static final Logger LOG =
LoggerFactory.getLogger(NMContainerPolicyUtils.class);
/**
* Write new policy file to policyOutStream which will include read access
* to localize resources. Optionally a default policyFilePath can be
* specified to append a custom policy implementation to the new policy file
* @param policyOutStream OutputStream pointing to java.policy file
* @param localDirs Container local directories
* @param resources List of local container resources
* @param conf YARN configuration
* @throws IOException - If policy file generation is unable to read the
* base policy file or if it is unable to create a new policy file.
*/
static void generatePolicyFile(OutputStream policyOutStream,
List<String> localDirs, List<String> groupPolicyPaths,
Map<org.apache.hadoop.fs.Path, List<String>> resources,
Configuration conf)
throws IOException {
String policyFilePath =
conf.get(YarnConfiguration.YARN_CONTAINER_SANDBOX_POLICY);
String filePermissions =
conf.get(YarnConfiguration.YARN_CONTAINER_SANDBOX_FILE_PERMISSIONS,
YarnConfiguration.DEFAULT_YARN_CONTAINER_SANDBOX_FILE_PERMISSIONS);
Set<String> cacheDirs = new HashSet<>();
for(org.apache.hadoop.fs.Path path : resources.keySet()) {
cacheDirs.add(path.getParent().toString());
}
if (groupPolicyPaths != null) {
for(String policyPath : groupPolicyPaths) {
Files.copy(Paths.get(policyPath), policyOutStream);
}
} else if (policyFilePath == null) {
IOUtils.copyBytes(
NMContainerPolicyUtils.class.getResourceAsStream("/" + POLICY_FILE),
policyOutStream, conf, false);
} else {
Files.copy(Paths.get(policyFilePath), policyOutStream);
}
Formatter filePermissionFormat = new Formatter(policyOutStream,
StandardCharsets.UTF_8.name());
filePermissionFormat.format(HADOOP_HOME_PERMISSION);
filePermissionFormat.format("grant {%n");
for(String localDir : localDirs) {
filePermissionFormat.format(
FILE_PERMISSION_FORMAT, localDir, filePermissions);
}
for(String cacheDir : cacheDirs) {
filePermissionFormat.format(
FILE_PERMISSION_FORMAT, cacheDir, filePermissions);
}
filePermissionFormat.format("};%n");
filePermissionFormat.flush();
}
/**
* Modify command to enable the Java Security Manager and specify
* java.policy file. Will modify the passed commands to strip any
* existing java security configurations. Expects a java command to be the
* first and only executable provided in enforcing mode. In passive mode
* any commands with '||' or '&&' will not be modified.
* @param commands List of container commands
* @param env Container environment variables
* @param policyPath Path to the container specific policy file
* @param sandboxMode (enforcing, permissive, disabled) Determines
* whether non-java containers will be launched
* @throws ContainerExecutionException - Exception thrown if
* JVM Sandbox enabled in 'enforcing' mode and a non-java command is
* provided in the list of commands
*/
static void appendSecurityFlags(List<String> commands,
Map<String, String> env, Path policyPath, SandboxMode sandboxMode)
throws ContainerExecutionException {
for(int i = 0; i < commands.size(); i++){
String command = commands.get(i);
if(validateJavaHome(env.get(JAVA_HOME.name()))
&& command.matches(CONTAINS_JAVA_CMD)
&& !command.matches(MULTI_COMMAND_REGEX)){
command = command.replaceAll(CLEAN_CMD_REGEX, "");
String securityString = JVM_SECURITY_CMD + policyPath + " ";
if(LOG.isDebugEnabled()) {
securityString += SECURITY_DEBUG;
}
commands.set(i, command.replaceFirst(JAVA_CMD, securityString));
} else if (sandboxMode == SandboxMode.enforcing){
throw new ContainerExecutionException(
"Only JVM containers permitted in YARN sandbox mode (enforcing). "
+ "The following command can not be executed securely: " + command);
} else if (sandboxMode == SandboxMode.permissive){
LOG.warn("The container will run without the java security manager"
+ " due to an unsupported container command. The command"
+ " will be permitted to run in Sandbox permissive mode: "
+ command);
}
}
}
private static boolean validateJavaHome(String containerJavaHome)
throws ContainerExecutionException{
if (System.getenv(JAVA_HOME.name()) == null) {
throw new ContainerExecutionException(
"JAVA_HOME is not set for NodeManager");
}
if (containerJavaHome == null) {
throw new ContainerExecutionException(
"JAVA_HOME is not set for container");
}
return System.getenv(JAVA_HOME.name()).equals(containerJavaHome);
}
}
}
| apache-2.0 |
siosio/intellij-community | plugins/InspectionGadgets/test/com/siyeh/igfixes/style/methodRefs2lambda/LocalClassReference.java | 148 | public class Test {
void test(){
class Local(){}
Supplier<Local> supplier = Local:<caret>:new;
}
}
interface Supplier<T> {
T get();
} | apache-2.0 |
howepeng/isis | core/metamodel/src/main/java/org/apache/isis/core/commons/lang/MapUtil.java | 1560 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.core.commons.lang;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
public final class MapUtil {
private MapUtil() {
}
/**
* Converts a list of objects [a, 1, b, 2] into a map {a -> 1; b -> 2}
*/
@SuppressWarnings("unchecked")
public static <K,V> Map<K,V> asMap(Object... keyValPair){
Map<K,V> map = new HashMap<K,V>();
if(keyValPair.length % 2 != 0){
throw new IllegalArgumentException("Keys and values must be pairs.");
}
for(int i = 0; i < keyValPair.length; i += 2){
map.put((K) keyValPair[i], (V) keyValPair[i+1]);
}
return Collections.unmodifiableMap(map);
}
}
| apache-2.0 |
siosio/intellij-community | python/src/com/jetbrains/python/PyEditorHighlighterProvider.java | 1095 | // Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.jetbrains.python;
import com.intellij.openapi.editor.colors.EditorColorsScheme;
import com.intellij.openapi.editor.highlighter.EditorHighlighter;
import com.intellij.openapi.fileTypes.EditorHighlighterProvider;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.vfs.VirtualFile;
import com.jetbrains.python.highlighting.PythonEditorHighlighter;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class PyEditorHighlighterProvider implements EditorHighlighterProvider {
@Override
public EditorHighlighter getEditorHighlighter(@Nullable Project project,
@NotNull FileType fileType, @Nullable VirtualFile virtualFile,
@NotNull EditorColorsScheme colors) {
return new PythonEditorHighlighter(colors, project, virtualFile);
}
}
| apache-2.0 |
nomakaFr/ofbiz_ynh | sources/framework/entity/src/org/ofbiz/entity/util/Converters.java | 3482 | /*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package org.ofbiz.entity.util;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import org.ofbiz.base.conversion.AbstractConverter;
import org.ofbiz.base.conversion.ConversionException;
import org.ofbiz.base.conversion.ConverterLoader;
import org.ofbiz.entity.GenericEntity;
import org.ofbiz.entity.GenericValue;
/** Entity Engine <code>Converter</code> classes. */
public class Converters implements ConverterLoader {
public static class GenericValueToList extends AbstractConverter<GenericValue, List<GenericValue>> {
public GenericValueToList() {
super(GenericValue.class, List.class);
}
public List<GenericValue> convert(GenericValue obj) throws ConversionException {
List<GenericValue> tempList = new LinkedList<GenericValue>();
tempList.add(obj);
return tempList;
}
}
public static class GenericValueToSet extends AbstractConverter<GenericValue, Set<GenericValue>> {
public GenericValueToSet() {
super(GenericValue.class, Set.class);
}
public Set<GenericValue> convert(GenericValue obj) throws ConversionException {
Set<GenericValue> tempSet = new HashSet<GenericValue>();
tempSet.add(obj);
return tempSet;
}
}
public static class GenericValueToString extends AbstractConverter<GenericValue, String> {
public GenericValueToString() {
super(GenericValue.class, String.class);
}
public String convert(GenericValue obj) throws ConversionException {
return obj.toString();
}
}
public static class NullFieldToObject extends AbstractConverter<GenericEntity.NullField, Object> {
public NullFieldToObject() {
super(GenericEntity.NullField.class, Object.class);
}
public Object convert(GenericEntity.NullField obj) throws ConversionException {
return null;
}
}
public static class ObjectToNullField extends AbstractConverter<Object, GenericEntity.NullField> {
public ObjectToNullField() {
super(Object.class, GenericEntity.NullField.class);
}
public GenericEntity.NullField convert(Object obj) throws ConversionException {
return GenericEntity.NULL_FIELD;
}
}
public void loadConverters() {
org.ofbiz.base.conversion.Converters.loadContainedConverters(Converters.class);
}
}
| apache-2.0 |
goodwinnk/intellij-community | platform/diff-impl/src/com/intellij/diff/actions/CompareFileWithEditorAction.java | 3308 | /*
* Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
*/
package com.intellij.diff.actions;
import com.intellij.diff.DiffRequestFactory;
import com.intellij.diff.contents.DiffContent;
import com.intellij.diff.contents.DocumentContent;
import com.intellij.diff.requests.ContentDiffRequest;
import com.intellij.diff.requests.DiffRequest;
import com.intellij.diff.util.DiffUserDataKeys;
import com.intellij.diff.util.Side;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.CommonDataKeys;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.fileEditor.ex.FileEditorManagerEx;
import com.intellij.openapi.fileEditor.impl.EditorWindow;
import com.intellij.openapi.fileEditor.impl.EditorWithProviderComposite;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.vfs.VirtualFile;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public class CompareFileWithEditorAction extends BaseShowDiffAction {
@Override
protected boolean isAvailable(@NotNull AnActionEvent e) {
VirtualFile selectedFile = getSelectedFile(e);
if (selectedFile == null) {
return false;
}
VirtualFile currentFile = getEditingFile(e);
if (currentFile == null) {
return false;
}
if (!canCompare(selectedFile, currentFile)) {
return false;
}
return true;
}
@Nullable
private static VirtualFile getSelectedFile(@NotNull AnActionEvent e) {
VirtualFile[] array = e.getData(CommonDataKeys.VIRTUAL_FILE_ARRAY);
if (array == null || array.length != 1 || array[0].isDirectory()) {
return null;
}
return array[0];
}
@Nullable
private static VirtualFile getEditingFile(@NotNull AnActionEvent e) {
Project project = e.getProject();
if (project == null) return null;
EditorWindow window = FileEditorManagerEx.getInstanceEx(project).getCurrentWindow();
if (window == null) return null;
EditorWithProviderComposite editor = window.getSelectedEditor(true);
return editor == null ? null : editor.getFile();
}
private static boolean canCompare(@NotNull VirtualFile file1, @NotNull VirtualFile file2) {
return !file1.equals(file2) && hasContent(file1) && hasContent(file2);
}
@Nullable
@Override
protected DiffRequest getDiffRequest(@NotNull AnActionEvent e) {
Project project = e.getProject();
VirtualFile selectedFile = getSelectedFile(e);
VirtualFile currentFile = getEditingFile(e);
assert selectedFile != null && currentFile != null;
ContentDiffRequest request = DiffRequestFactory.getInstance().createFromFiles(project, selectedFile, currentFile);
DiffContent editorContent = request.getContents().get(1);
if (editorContent instanceof DocumentContent) {
Editor[] editors = EditorFactory.getInstance().getEditors(((DocumentContent)editorContent).getDocument());
if (editors.length != 0) {
request.putUserData(DiffUserDataKeys.SCROLL_TO_LINE, Pair.create(Side.RIGHT, editors[0].getCaretModel().getLogicalPosition().line));
}
}
return request;
}
}
| apache-2.0 |
dsyang/buck | test/com/facebook/buck/model/ImmediateDirectoryBuildTargetPatternTest.java | 1476 | /*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.model;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.junit.Test;
public class ImmediateDirectoryBuildTargetPatternTest {
private static final Path ROOT = Paths.get("/opt/src/buck");
@Test
public void testApply() {
ImmediateDirectoryBuildTargetPattern pattern =
ImmediateDirectoryBuildTargetPattern.of(ROOT, Paths.get("src/com/facebook/buck/"));
assertTrue(
pattern.matches(BuildTargetFactory.newInstance(ROOT, "//src/com/facebook/buck", "buck")));
assertFalse(
pattern.matches(BuildTargetFactory.newInstance(ROOT, "//src/com/facebook/foo/", "foo")));
assertFalse(
pattern.matches(
BuildTargetFactory.newInstance(ROOT, "//src/com/facebook/buck/bar", "bar")));
}
}
| apache-2.0 |
BUPTAnderson/apache-hive-2.1.1-src | orc/src/java/org/apache/orc/impl/PositionRecorder.java | 969 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.orc.impl;
/**
* An interface for recording positions in a stream.
*/
public interface PositionRecorder {
void addPosition(long offset);
}
| apache-2.0 |
macs524/mybatis_learn | src/test/java/org/apache/ibatis/submitted/sqlprovider/OurSqlBuilder.java | 4469 | /**
* Copyright 2009-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ibatis.submitted.sqlprovider;
import org.apache.ibatis.annotations.Param;
import org.apache.ibatis.jdbc.SQL;
import java.util.List;
import java.util.Map;
public class OurSqlBuilder {
public String buildGetUsersQuery(Map<String, Object> parameter) {
// MyBatis wraps a single List parameter in a Map with the key="list",
// so need to pull it out
@SuppressWarnings("unchecked")
List<Integer> ids = (List<Integer>) parameter.get("list");
StringBuilder sb = new StringBuilder();
sb.append("select * from users where id in (");
for (int i = 0; i < ids.size(); i++) {
if (i > 0) {
sb.append(",");
}
sb.append("#{list[");
sb.append(i);
sb.append("]}");
}
sb.append(") order by id");
return sb.toString();
}
public String buildGetUserQuery(Number parameter) {
// parameter is not a single List or Array,
// so it is passed as is from the mapper
return "select * from users where id = #{value}";
}
public String buildGetAllUsersQuery() {
return "select * from users order by id";
}
public String buildGetUsersByCriteriaQuery(final User criteria) {
return new SQL(){{
SELECT("*");
FROM("users");
if (criteria.getId() != null) {
WHERE("id = #{id}");
}
if (criteria.getName() != null) {
WHERE("name like #{name} || '%'");
}
}}.toString();
}
public String buildGetUsersByCriteriaMapQuery(final Map<String, Object> criteria) {
return new SQL(){{
SELECT("*");
FROM("users");
if (criteria.get("id") != null) {
WHERE("id = #{id}");
}
if (criteria.get("name") != null) {
WHERE("name like #{name} || '%'");
}
}}.toString();
}
public String buildGetUsersByNameQuery(final String name, final String orderByColumn) {
return new SQL(){{
SELECT("*");
FROM("users");
if (name != null) {
WHERE("name like #{param1} || '%'");
}
ORDER_BY(orderByColumn);
}}.toString();
}
public String buildGetUsersByNameUsingMap(Map<String, Object> params) {
final String name = String.class.cast(params.get("param1"));
final String orderByColumn = String.class.cast(params.get("param2"));
return new SQL(){{
SELECT("*");
FROM("users");
if (name != null) {
WHERE("name like #{param1} || '%'");
}
ORDER_BY(orderByColumn);
}}.toString();
}
public String buildGetUsersByNameWithParamNameAndOrderByQuery(@Param("orderByColumn") final String orderByColumn, @Param("name") final String name) {
return new SQL(){{
SELECT("*");
FROM("users");
if (name != null) {
WHERE("name like #{name} || '%'");
}
ORDER_BY(orderByColumn);
}}.toString();
}
public String buildGetUsersByNameWithParamNameQuery(@Param("name") final String name) {
return new SQL(){{
SELECT("*");
FROM("users");
if (name != null) {
WHERE("name like #{name} || '%'");
}
ORDER_BY("id DESC");
}}.toString();
}
public String buildGetUsersByNameWithParamNameQueryUsingMap(Map<String, Object> params) {
final String name = String.class.cast(params.get("name"));
final String orderByColumn = String.class.cast(params.get("orderByColumn"));
return new SQL(){{
SELECT("*");
FROM("users");
if (name != null) {
WHERE("name like #{param1} || '%'");
}
ORDER_BY(orderByColumn);
}}.toString();
}
public String buildInsert() {
return "insert into users (id, name) values (#{id}, #{name})";
}
public String buildUpdate() {
return "update users set name = #{name} where id = #{id}";
}
public String buildDelete() {
return "delete from users where id = #{id}";
}
}
| apache-2.0 |
tsdl2013/Bitocle | src/Release/2.2/Bitocle/src/io/github/mthli/Bitocle/Content/ContentItem.java | 712 | package io.github.mthli.Bitocle.Content;
import org.eclipse.egit.github.core.TreeEntry;
public class ContentItem implements Comparable<ContentItem> {
private TreeEntry entry;
public ContentItem(TreeEntry entry) {
super();
this.entry = entry;
}
public TreeEntry getEntry() {
return entry;
}
public void setEntry(TreeEntry entry) {
this.entry = entry;
}
@Override
public int compareTo(ContentItem item) {
if (this.entry.getType() != null) {
return item.getEntry().getType().toLowerCase().compareTo(this.entry.getType().toLowerCase());
} else {
throw new IllegalArgumentException();
}
}
}
| apache-2.0 |
vrozov/apex-malhar | library/src/main/java/org/apache/apex/malhar/lib/io/block/BlockMetadata.java | 6411 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.apex.malhar.lib.io.block;
import javax.validation.constraints.NotNull;
import com.google.common.base.Preconditions;
/**
* Represents the metadata of a block.
*
* @since 2.1.0
*/
public interface BlockMetadata
{
/**
* @return unique block id.
*/
long getBlockId();
/**
* @return the start offset associated with the block.
*/
long getOffset();
/**
* @return the length of the source in the block.
*/
long getLength();
/**
* @return if this is the last block in file.
*/
boolean isLastBlock();
/**
* @return the previous block id.
*/
long getPreviousBlockId();
abstract class AbstractBlockMetadata implements BlockMetadata
{
private long offset;
private long length;
private boolean isLastBlock;
private long previousBlockId;
private long blockId;
@SuppressWarnings("unused")
protected AbstractBlockMetadata()
{
offset = -1;
length = -1;
isLastBlock = false;
previousBlockId = -1;
blockId = -1;
}
/**
* Constructs Block metadata
*
* @param offset offset of the file in the block
* @param length length of the file in the block
* @param isLastBlock true if this is the last block of file
* @param previousBlockId id of the previous block
*/
@Deprecated
public AbstractBlockMetadata(long offset, long length, boolean isLastBlock, long previousBlockId)
{
this.offset = offset;
this.length = length;
this.isLastBlock = isLastBlock;
this.previousBlockId = previousBlockId;
this.blockId = -1;
}
/**
* Constructs Block metadata
*
* @param blockId block id
* @param offset offset of the file in the block
* @param length length of the file in the block
* @param isLastBlock true if this is the last block of file
* @param previousBlockId id of the previous block
*/
public AbstractBlockMetadata(long blockId, long offset, long length, boolean isLastBlock, long previousBlockId)
{
this.blockId = blockId;
this.offset = offset;
this.length = length;
this.isLastBlock = isLastBlock;
this.previousBlockId = previousBlockId;
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (!(o instanceof AbstractBlockMetadata)) {
return false;
}
AbstractBlockMetadata that = (AbstractBlockMetadata)o;
return getBlockId() == that.getBlockId();
}
@Override
public int hashCode()
{
return (int)getBlockId();
}
@Override
public long getOffset()
{
return offset;
}
/**
* Sets the offset of the file in the block.
*/
public void setOffset(long offset)
{
this.offset = offset;
}
@Override
public long getLength()
{
return length;
}
/**
* Sets the length of the file in the block.
*/
public void setLength(long length)
{
this.length = length;
}
@Override
public boolean isLastBlock()
{
return isLastBlock;
}
public void setLastBlock(boolean lastBlock)
{
this.isLastBlock = lastBlock;
}
@Override
public long getPreviousBlockId()
{
return previousBlockId;
}
/**
* Sets the previous block id.
*
* @param previousBlockId previous block id.
*/
public void setPreviousBlockId(long previousBlockId)
{
this.previousBlockId = previousBlockId;
}
@Override
public long getBlockId()
{
return blockId;
}
public void setBlockId(long blockId)
{
this.blockId = blockId;
}
}
/**
* A block of file which contains file path and other block properties.
* It also controls if blocks should be read in sequence
*/
class FileBlockMetadata extends AbstractBlockMetadata
{
private final String filePath;
private long fileLength;
protected FileBlockMetadata()
{
super();
filePath = null;
}
public FileBlockMetadata(String filePath, long blockId, long offset, long length, boolean isLastBlock,
long previousBlockId)
{
super(blockId, offset, length, isLastBlock, previousBlockId);
this.filePath = filePath;
}
public FileBlockMetadata(String filePath, long blockId, long offset, long length, boolean isLastBlock,
long previousBlockId, long fileLength)
{
super(blockId, offset, length, isLastBlock, previousBlockId);
this.filePath = filePath;
this.fileLength = fileLength;
}
public FileBlockMetadata(String filePath)
{
this.filePath = filePath;
}
public FileBlockMetadata(String filePath, long fileLength)
{
this.filePath = filePath;
this.fileLength = fileLength;
}
public String getFilePath()
{
return filePath;
}
/**
* Returns the length of the file to which this block belongs
*
* @return length of the file to which this block belongs
*/
public long getFileLength()
{
return fileLength;
}
/**
* Set the length of the file to which this block belongs
*
* @param fileLength
*/
public void setFileLength(long fileLength)
{
this.fileLength = fileLength;
}
public FileBlockMetadata newInstance(@NotNull String filePath)
{
Preconditions.checkNotNull(filePath);
return new FileBlockMetadata(filePath);
}
}
}
| apache-2.0 |
pujav65/incubator-rya | extras/rya.geoindexing/src/test/java/org/apache/rya/indexing/accumulo/geo/GeoWaveGTQueryTest.java | 10838 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.rya.indexing.accumulo.geo;
import static org.junit.Assert.assertEquals;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.accumulo.core.client.AccumuloException;
import org.apache.accumulo.core.client.AccumuloSecurityException;
import org.apache.accumulo.minicluster.impl.MiniAccumuloClusterImpl;
import org.apache.accumulo.minicluster.impl.MiniAccumuloConfigImpl;
import org.apache.commons.io.FileUtils;
import org.geotools.feature.AttributeTypeBuilder;
import org.geotools.feature.simple.SimpleFeatureBuilder;
import org.geotools.feature.simple.SimpleFeatureTypeBuilder;
import org.geotools.filter.text.cql2.CQLException;
import org.geotools.filter.text.ecql.ECQL;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.opengis.feature.simple.SimpleFeature;
import org.opengis.feature.simple.SimpleFeatureType;
import org.opengis.filter.Filter;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.Files;
import com.vividsolutions.jts.geom.Coordinate;
import com.vividsolutions.jts.geom.Envelope;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.Polygon;
import mil.nga.giat.geowave.adapter.vector.FeatureDataAdapter;
import mil.nga.giat.geowave.adapter.vector.query.cql.CQLQuery;
import mil.nga.giat.geowave.core.geotime.GeometryUtils;
import mil.nga.giat.geowave.core.geotime.ingest.SpatialDimensionalityTypeProvider;
import mil.nga.giat.geowave.core.geotime.store.query.SpatialQuery;
import mil.nga.giat.geowave.core.store.CloseableIterator;
import mil.nga.giat.geowave.core.store.DataStore;
import mil.nga.giat.geowave.core.store.IndexWriter;
import mil.nga.giat.geowave.core.store.index.PrimaryIndex;
import mil.nga.giat.geowave.core.store.query.QueryOptions;
import mil.nga.giat.geowave.datastore.accumulo.AccumuloDataStore;
import mil.nga.giat.geowave.datastore.accumulo.BasicAccumuloOperations;
import mil.nga.giat.geowave.datastore.accumulo.minicluster.MiniAccumuloClusterFactory;
/**
* This class is intended to provide a self-contained, easy-to-follow example of
* a few GeoTools queries against GeoWave. For simplicity, a MiniAccumuloCluster
* is spun up and a few points from the DC area are ingested (Washington
* Monument, White House, FedEx Field). Two queries are executed against this
* data set.
*/
public class GeoWaveGTQueryTest {
private static File tempAccumuloDir;
private static MiniAccumuloClusterImpl accumulo;
private static DataStore dataStore;
private static final PrimaryIndex INDEX = new SpatialDimensionalityTypeProvider().createPrimaryIndex();
// Points (to be ingested into GeoWave Data Store)
private static final Coordinate WASHINGTON_MONUMENT = new Coordinate(-77.0352, 38.8895);
private static final Coordinate WHITE_HOUSE = new Coordinate(-77.0366, 38.8977);
private static final Coordinate FEDEX_FIELD = new Coordinate(-76.8644, 38.9078);
// cities used to construct Geometries for queries
private static final Coordinate BALTIMORE = new Coordinate(-76.6167, 39.2833);
private static final Coordinate RICHMOND = new Coordinate(-77.4667, 37.5333);
private static final Coordinate HARRISONBURG = new Coordinate(-78.8689, 38.4496);
private static final Map<String, Coordinate> CANNED_DATA = ImmutableMap.of(
"Washington Monument", WASHINGTON_MONUMENT,
"White House", WHITE_HOUSE,
"FedEx Field", FEDEX_FIELD
);
private static final FeatureDataAdapter ADAPTER = new FeatureDataAdapter(getPointSimpleFeatureType());
private static final String ACCUMULO_USER = "root";
private static final String ACCUMULO_PASSWORD = "password";
private static final String TABLE_NAMESPACE = "";
@BeforeClass
public static void setup() throws AccumuloException, AccumuloSecurityException, IOException, InterruptedException {
tempAccumuloDir = Files.createTempDir();
accumulo = MiniAccumuloClusterFactory.newAccumuloCluster(
new MiniAccumuloConfigImpl(tempAccumuloDir, ACCUMULO_PASSWORD),
GeoWaveGTQueryTest.class);
accumulo.start();
dataStore = new AccumuloDataStore(
new BasicAccumuloOperations(
accumulo.getZooKeepers(),
accumulo.getInstanceName(),
ACCUMULO_USER,
ACCUMULO_PASSWORD,
TABLE_NAMESPACE));
ingestCannedData();
}
private static void ingestCannedData() throws IOException {
final List<SimpleFeature> points = new ArrayList<>();
System.out.println("Building SimpleFeatures from canned data set...");
for (final Entry<String, Coordinate> entry : CANNED_DATA.entrySet()) {
System.out.println("Added point: " + entry.getKey());
points.add(buildSimpleFeature(entry.getKey(), entry.getValue()));
}
System.out.println("Ingesting canned data...");
try (final IndexWriter<SimpleFeature> indexWriter = dataStore.createWriter(ADAPTER, INDEX)) {
for (final SimpleFeature sf : points) {
indexWriter.write(sf);
}
}
System.out.println("Ingest complete.");
}
@Test
public void executeCQLQueryTest() throws IOException, CQLException {
System.out.println("Executing query, expecting to match two points...");
final Filter cqlFilter = ECQL.toFilter("BBOX(geometry,-77.6167,38.6833,-76.6,38.9200) and locationName like 'W%'");
final QueryOptions queryOptions = new QueryOptions(ADAPTER, INDEX);
final CQLQuery cqlQuery = new CQLQuery(null, cqlFilter, ADAPTER);
try (final CloseableIterator<SimpleFeature> iterator = dataStore.query(queryOptions, cqlQuery)) {
int count = 0;
while (iterator.hasNext()) {
System.out.println("Query match: " + iterator.next().getID());
count++;
}
System.out.println("executeCQLQueryTest count: " + count);
// Should match "Washington Monument" and "White House"
assertEquals(2, count);
}
}
@Test
public void executeBoundingBoxQueryTest() throws IOException {
System.out.println("Constructing bounding box for the area contained by [Baltimore, MD and Richmond, VA.");
final Geometry boundingBox = GeometryUtils.GEOMETRY_FACTORY.toGeometry(new Envelope(
BALTIMORE,
RICHMOND));
System.out.println("Executing query, expecting to match ALL points...");
final QueryOptions queryOptions = new QueryOptions(ADAPTER, INDEX);
final SpatialQuery spatialQuery = new SpatialQuery(boundingBox);
try (final CloseableIterator<SimpleFeature> iterator = dataStore.query(queryOptions, spatialQuery)) {
int count = 0;
while (iterator.hasNext()) {
System.out.println("Query match: " + iterator.next().getID());
count++;
}
System.out.println("executeBoundingBoxQueryTest count: " + count);
// Should match "FedEx Field", "Washington Monument", and "White House"
assertEquals(3, count);
}
}
@Test
public void executePolygonQueryTest() throws IOException {
System.out.println("Constructing polygon for the area contained by [Baltimore, MD; Richmond, VA; Harrisonburg, VA].");
final Polygon polygon = GeometryUtils.GEOMETRY_FACTORY.createPolygon(new Coordinate[] {
BALTIMORE,
RICHMOND,
HARRISONBURG,
BALTIMORE
});
System.out.println("Executing query, expecting to match ALL points...");
final QueryOptions queryOptions = new QueryOptions(ADAPTER, INDEX);
final SpatialQuery spatialQuery = new SpatialQuery(polygon);
/*
* NOTICE: In this query, the adapter is added to the query options. If
* an index has data from more than one adapter, the data associated
* with a specific adapter can be selected.
*/
try (final CloseableIterator<SimpleFeature> closableIterator = dataStore.query(queryOptions, spatialQuery)) {
int count = 0;
while (closableIterator.hasNext()) {
System.out.println("Query match: " + closableIterator.next().getID());
count++;
}
System.out.println("executePolygonQueryTest count: " + count);
// Should match "FedEx Field", "Washington Monument", and "White House"
assertEquals(3, count);
}
}
@AfterClass
public static void cleanup() throws IOException, InterruptedException {
try {
accumulo.stop();
} finally {
FileUtils.deleteDirectory(tempAccumuloDir);
}
}
private static SimpleFeatureType getPointSimpleFeatureType() {
final String name = "PointSimpleFeatureType";
final SimpleFeatureTypeBuilder sftBuilder = new SimpleFeatureTypeBuilder();
final AttributeTypeBuilder atBuilder = new AttributeTypeBuilder();
sftBuilder.setName(name);
sftBuilder.add(atBuilder.binding(String.class).nillable(false)
.buildDescriptor("locationName"));
sftBuilder.add(atBuilder.binding(Geometry.class).nillable(false)
.buildDescriptor("geometry"));
return sftBuilder.buildFeatureType();
}
private static SimpleFeature buildSimpleFeature(final String locationName, final Coordinate coordinate) {
final SimpleFeatureBuilder builder = new SimpleFeatureBuilder(getPointSimpleFeatureType());
builder.set("locationName", locationName);
builder.set("geometry", GeometryUtils.GEOMETRY_FACTORY.createPoint(coordinate));
return builder.buildFeature(locationName);
}
} | apache-2.0 |
coding0011/elasticsearch | x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/oidc/TransportOpenIdConnectLogoutAction.java | 6853 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.security.action.oidc;
import com.nimbusds.jwt.JWT;
import com.nimbusds.jwt.JWTParser;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchSecurityException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectLogoutAction;
import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectLogoutRequest;
import org.elasticsearch.xpack.core.security.action.oidc.OpenIdConnectLogoutResponse;
import org.elasticsearch.xpack.core.security.authc.Authentication;
import org.elasticsearch.xpack.core.security.authc.Realm;
import org.elasticsearch.xpack.core.security.authc.support.TokensInvalidationResult;
import org.elasticsearch.xpack.core.security.user.User;
import org.elasticsearch.xpack.security.authc.Realms;
import org.elasticsearch.xpack.security.authc.TokenService;
import org.elasticsearch.xpack.security.authc.oidc.OpenIdConnectRealm;
import java.text.ParseException;
import java.util.Map;
/**
* Transport action responsible for generating an OpenID connect logout request to be sent to an OpenID Connect Provider
*/
public class TransportOpenIdConnectLogoutAction extends HandledTransportAction<OpenIdConnectLogoutRequest, OpenIdConnectLogoutResponse> {
private final Realms realms;
private final TokenService tokenService;
private static final Logger logger = LogManager.getLogger(TransportOpenIdConnectLogoutAction.class);
@Inject
public TransportOpenIdConnectLogoutAction(TransportService transportService, ActionFilters actionFilters, Realms realms,
TokenService tokenService) {
super(OpenIdConnectLogoutAction.NAME, transportService, actionFilters,
(Writeable.Reader<OpenIdConnectLogoutRequest>) OpenIdConnectLogoutRequest::new);
this.realms = realms;
this.tokenService = tokenService;
}
@Override
protected void doExecute(Task task, OpenIdConnectLogoutRequest request, ActionListener<OpenIdConnectLogoutResponse> listener) {
invalidateRefreshToken(request.getRefreshToken(), ActionListener.wrap(ignore -> {
final String token = request.getToken();
tokenService.getAuthenticationAndMetaData(token, ActionListener.wrap(
tuple -> {
final Authentication authentication = tuple.v1();
final Map<String, Object> tokenMetadata = tuple.v2();
validateAuthenticationAndMetadata(authentication, tokenMetadata);
tokenService.invalidateAccessToken(token, ActionListener.wrap(
result -> {
if (logger.isTraceEnabled()) {
logger.trace("OpenID Connect Logout for user [{}] and token [{}...{}]",
authentication.getUser().principal(),
token.substring(0, 8),
token.substring(token.length() - 8));
}
OpenIdConnectLogoutResponse response = buildResponse(authentication, tokenMetadata);
listener.onResponse(response);
}, listener::onFailure)
);
}, listener::onFailure));
}, listener::onFailure));
}
private OpenIdConnectLogoutResponse buildResponse(Authentication authentication, Map<String, Object> tokenMetadata) {
final String idTokenHint = (String) getFromMetadata(tokenMetadata, "id_token_hint");
final Realm realm = this.realms.realm(authentication.getAuthenticatedBy().getName());
final JWT idToken;
try {
idToken = JWTParser.parse(idTokenHint);
} catch (ParseException e) {
throw new ElasticsearchSecurityException("Token Metadata did not contain a valid IdToken", e);
}
return ((OpenIdConnectRealm) realm).buildLogoutResponse(idToken);
}
private void validateAuthenticationAndMetadata(Authentication authentication, Map<String, Object> tokenMetadata) {
if (tokenMetadata == null) {
throw new ElasticsearchSecurityException("Authentication did not contain metadata");
}
if (authentication == null) {
throw new ElasticsearchSecurityException("No active authentication");
}
final User user = authentication.getUser();
if (user == null) {
throw new ElasticsearchSecurityException("No active user");
}
final Authentication.RealmRef ref = authentication.getAuthenticatedBy();
if (ref == null || Strings.isNullOrEmpty(ref.getName())) {
throw new ElasticsearchSecurityException("Authentication {} has no authenticating realm",
authentication);
}
final Realm realm = this.realms.realm(authentication.getAuthenticatedBy().getName());
if (realm == null) {
throw new ElasticsearchSecurityException("Authenticating realm {} does not exist", ref.getName());
}
if (realm instanceof OpenIdConnectRealm == false) {
throw new IllegalArgumentException("Access token is not valid for an OpenID Connect realm");
}
}
private Object getFromMetadata(Map<String, Object> metadata, String key) {
if (metadata.containsKey(key) == false) {
throw new ElasticsearchSecurityException("Authentication token does not have OpenID Connect metadata [{}]", key);
}
Object value = metadata.get(key);
if (null != value && value instanceof String == false) {
throw new ElasticsearchSecurityException("In authentication token, OpenID Connect metadata [{}] is [{}] rather than " +
"String", key, value.getClass());
}
return value;
}
private void invalidateRefreshToken(String refreshToken, ActionListener<TokensInvalidationResult> listener) {
if (refreshToken == null) {
listener.onResponse(null);
} else {
tokenService.invalidateRefreshToken(refreshToken, listener);
}
}
}
| apache-2.0 |
nmcl/scratch | graalvm/transactions/fork/narayana/XTS/localjunit/unit/src/test/java/com/arjuna/wst/tests/arq/BusinessAgreementWithParticipantCompletionCoordinatorTest.java | 14194 | /*
* JBoss, Home of Professional Open Source.
* Copyright 2012, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package com.arjuna.wst.tests.arq;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import javax.xml.namespace.QName;
import javax.xml.ws.wsaddressing.W3CEndpointReference;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.jboss.ws.api.addressing.MAP;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import com.arjuna.webservices.SoapFaultType;
import com.arjuna.webservices.wsarjtx.ArjunaTXConstants;
import com.arjuna.webservices11.ServiceRegistry;
import com.arjuna.webservices11.SoapFault11;
import com.arjuna.webservices11.wsaddr.AddressingHelper;
import com.arjuna.webservices11.wsarj.ArjunaContext;
import com.arjuna.webservices11.wsarj.InstanceIdentifier;
import com.arjuna.webservices11.wsba.State;
import com.arjuna.webservices11.wsba.client.ParticipantCompletionParticipantClient;
import com.arjuna.webservices11.wsba.processors.ParticipantCompletionParticipantProcessor;
import com.arjuna.wst.tests.TestUtil;
import com.arjuna.wst.tests.WarDeployment;
import com.arjuna.wst.tests.arq.TestParticipantCompletionParticipantProcessor.ParticipantCompletionParticipantDetails;
@RunWith(Arquillian.class)
public class BusinessAgreementWithParticipantCompletionCoordinatorTest extends BaseWSTTest {
@Deployment
public static WebArchive createDeployment() {
return WarDeployment.getDeployment(
ParticipantCompletionParticipantDetails.class,
TestParticipantCompletionParticipantProcessor.class);
}
private ParticipantCompletionParticipantProcessor origParticipantCompletionParticipantProcessor ;
private TestParticipantCompletionParticipantProcessor testParticipantCompletionParticipantProcessor = new TestParticipantCompletionParticipantProcessor() ;
@Before
public void setUp()
throws Exception
{
origParticipantCompletionParticipantProcessor = ParticipantCompletionParticipantProcessor.setProcessor(testParticipantCompletionParticipantProcessor) ;
final ServiceRegistry serviceRegistry = ServiceRegistry.getRegistry() ;
}
@Test
public void testSendClose()
throws Exception
{
final String messageId = "testSendClose" ;
final InstanceIdentifier instanceIdentifier = new InstanceIdentifier("1") ;
W3CEndpointReference endpoint = TestUtil.getParticipantCompletionParticipantEndpoint(instanceIdentifier.getInstanceIdentifier());
final MAP map = AddressingHelper.createRequestContext(TestUtil.participantCompletionParticipantServiceURI, messageId) ;
ParticipantCompletionParticipantClient.getClient().sendClose(endpoint, map, new InstanceIdentifier("sender")) ;
final ParticipantCompletionParticipantDetails details = testParticipantCompletionParticipantProcessor.getParticipantCompletionParticipantDetails(messageId, 10000) ;
assertTrue(details.hasClose()) ;
checkDetails(details, true, true, messageId, instanceIdentifier);
}
@Test
public void testSendCancel()
throws Exception
{
final String messageId = "testSendCancel" ;
final InstanceIdentifier instanceIdentifier = new InstanceIdentifier("2") ;
W3CEndpointReference endpoint = TestUtil.getParticipantCompletionParticipantEndpoint(instanceIdentifier.getInstanceIdentifier());
final MAP map = AddressingHelper.createRequestContext(TestUtil.participantCompletionParticipantServiceURI, messageId) ;
ParticipantCompletionParticipantClient.getClient().sendCancel(endpoint, map, new InstanceIdentifier("sender")) ;
final ParticipantCompletionParticipantDetails details = testParticipantCompletionParticipantProcessor.getParticipantCompletionParticipantDetails(messageId, 10000) ;
assertTrue(details.hasCancel()) ;
checkDetails(details, true, true, messageId, instanceIdentifier);
}
@Test
public void testSendCompensate()
throws Exception
{
final String messageId = "testSendCompensate" ;
final InstanceIdentifier instanceIdentifier = new InstanceIdentifier("3") ;
W3CEndpointReference endpoint = TestUtil.getParticipantCompletionParticipantEndpoint(instanceIdentifier.getInstanceIdentifier());
final MAP map = AddressingHelper.createRequestContext(TestUtil.participantCompletionParticipantServiceURI, messageId) ;
ParticipantCompletionParticipantClient.getClient().sendCompensate(endpoint, map, new InstanceIdentifier("sender")) ;
final ParticipantCompletionParticipantDetails details = testParticipantCompletionParticipantProcessor.getParticipantCompletionParticipantDetails(messageId, 10000) ;
assertTrue(details.hasCompensate()) ;
checkDetails(details, true, true, messageId, instanceIdentifier);
}
@Test
public void testSendFaulted()
throws Exception
{
final String messageId = "testSendFaulted" ;
final InstanceIdentifier instanceIdentifier = new InstanceIdentifier("4") ;
W3CEndpointReference endpoint = TestUtil.getParticipantCompletionParticipantEndpoint(instanceIdentifier.getInstanceIdentifier());
final MAP map = AddressingHelper.createRequestContext(TestUtil.participantCompletionParticipantServiceURI, messageId) ;
ParticipantCompletionParticipantClient.getClient().sendFailed(endpoint, map, new InstanceIdentifier("sender")) ;
final ParticipantCompletionParticipantDetails details = testParticipantCompletionParticipantProcessor.getParticipantCompletionParticipantDetails(messageId, 10000) ;
assertTrue(details.hasFaulted()) ;
checkDetails(details, false, true, messageId, instanceIdentifier);
}
@Test
public void testSendExited()
throws Exception
{
final String messageId = "testSendExited" ;
final InstanceIdentifier instanceIdentifier = new InstanceIdentifier("5") ;
W3CEndpointReference endpoint = TestUtil.getParticipantCompletionParticipantEndpoint(instanceIdentifier.getInstanceIdentifier());
final MAP map = AddressingHelper.createRequestContext(TestUtil.participantCompletionParticipantServiceURI, messageId) ;
ParticipantCompletionParticipantClient.getClient().sendExited(endpoint, map, new InstanceIdentifier("sender")) ;
final ParticipantCompletionParticipantDetails details = testParticipantCompletionParticipantProcessor.getParticipantCompletionParticipantDetails(messageId, 10000) ;
assertTrue(details.hasExited()) ;
checkDetails(details, false, true, messageId, instanceIdentifier);
}
@Test
public void testSendStatus()
throws Exception
{
final String messageId = "testSendStatus" ;
final InstanceIdentifier instanceIdentifier = new InstanceIdentifier("6") ;
W3CEndpointReference endpoint = TestUtil.getParticipantCompletionParticipantEndpoint(instanceIdentifier.getInstanceIdentifier());
final MAP map = AddressingHelper.createRequestContext(TestUtil.participantCompletionParticipantServiceURI, messageId) ;
final State state = State.STATE_ACTIVE ;
ParticipantCompletionParticipantClient.getClient().sendStatus(endpoint, map, new InstanceIdentifier("sender"), state.getValue()) ;
final ParticipantCompletionParticipantDetails details = testParticipantCompletionParticipantProcessor.getParticipantCompletionParticipantDetails(messageId, 10000) ;
assertNotNull(details.hasStatus()); ;
assertEquals(details.hasStatus().getState(), state.getValue());
checkDetails(details, true, true, messageId, instanceIdentifier);
}
@Test
public void testSendGetStatus()
throws Exception
{
final String messageId = "testSendGetStatus" ;
final InstanceIdentifier instanceIdentifier = new InstanceIdentifier("7") ;
W3CEndpointReference endpoint = TestUtil.getParticipantCompletionParticipantEndpoint(instanceIdentifier.getInstanceIdentifier());
final MAP map = AddressingHelper.createRequestContext(TestUtil.participantCompletionParticipantServiceURI, messageId) ;
ParticipantCompletionParticipantClient.getClient().sendGetStatus(endpoint, map, new InstanceIdentifier("sender")) ;
final ParticipantCompletionParticipantDetails details = testParticipantCompletionParticipantProcessor.getParticipantCompletionParticipantDetails(messageId, 10000) ;
assertTrue(details.hasGetStatus()) ;
checkDetails(details, true, true, messageId, instanceIdentifier);
}
@Test
public void testSendError()
throws Exception
{
final String messageId = "testSendGetStatus" ;
final MAP map = AddressingHelper.createRequestContext(TestUtil.participantCompletionParticipantServiceURI, messageId) ;
final InstanceIdentifier instanceIdentifier = new InstanceIdentifier("8") ;
final String reason = "testSendErrorReason" ;
final SoapFaultType soapFaultType = SoapFaultType.FAULT_SENDER ;
final QName subcode = ArjunaTXConstants.UNKNOWNERROR_ERROR_CODE_QNAME ;
final SoapFault11 soapFault = new SoapFault11(soapFaultType, subcode, reason) ;
ParticipantCompletionParticipantClient.getClient().sendSoapFault(soapFault, null, map, TestUtil.getBusinessActivityFaultAction()) ;
final ParticipantCompletionParticipantDetails details = testParticipantCompletionParticipantProcessor.getParticipantCompletionParticipantDetails(messageId, 10000) ;
assertNotNull(details.getSoapFault());
assertEquals(details.getSoapFault().getSoapFaultType(), soapFault.getSoapFaultType()) ;
assertEquals(details.getSoapFault().getReason(), soapFault.getReason()) ;
assertEquals(details.getSoapFault().getSubcode(), soapFault.getSubcode()) ;
checkDetails(details, false, false, messageId, null);
}
@Test
public void testSendNotCompleted()
throws Exception
{
final String messageId = "testSendNotCompleted" ;
final InstanceIdentifier instanceIdentifier = new InstanceIdentifier("9") ;
W3CEndpointReference endpoint = TestUtil.getParticipantCompletionParticipantEndpoint(instanceIdentifier.getInstanceIdentifier());
final MAP map = AddressingHelper.createRequestContext(TestUtil.participantCompletionParticipantServiceURI, messageId) ;
ParticipantCompletionParticipantClient.getClient().sendNotCompleted(endpoint, map, new InstanceIdentifier("sender")); ;
final ParticipantCompletionParticipantDetails details = testParticipantCompletionParticipantProcessor.getParticipantCompletionParticipantDetails(messageId, 10000) ;
assertTrue(details.hasNotCompleted()) ;
checkDetails(details, false, true, messageId, instanceIdentifier);
}
@After
public void tearDown()
throws Exception
{
ParticipantCompletionParticipantProcessor.setProcessor(origParticipantCompletionParticipantProcessor) ;
}
/**
* check the message details to see that they have the correct to and from address and message id, a null
* reply to address and an arjuna context containing the correct instannce identifier
* @param details
* @param hasFrom
* @param hasFaultTo
* @param messageId
* @param instanceIdentifier
*/
private void checkDetails(ParticipantCompletionParticipantDetails details, boolean hasFrom, boolean hasFaultTo, String messageId, InstanceIdentifier instanceIdentifier)
{
MAP inMAP = details.getMAP();
ArjunaContext inArjunaContext = details.getArjunaContext();
assertEquals(inMAP.getTo(), TestUtil.participantCompletionParticipantServiceURI);
assertNotNull(inMAP.getReplyTo());
assertTrue(AddressingHelper.isNoneReplyTo(inMAP));
if (hasFrom) {
assertNotNull(inMAP.getFrom());
assertEquals(inMAP.getFrom().getAddress(), TestUtil.participantCompletionCoordinatorServiceURI);
} else {
assertNull(inMAP.getFrom());
}
if (hasFaultTo) {
assertNotNull(inMAP.getFaultTo());
assertEquals(inMAP.getFaultTo().getAddress(), TestUtil.participantCompletionCoordinatorServiceURI);
} else {
assertNull(inMAP.getFrom());
}
assertNotNull(inMAP.getMessageID());
assertEquals(inMAP.getMessageID(), messageId);
if (instanceIdentifier == null) {
assertNull(inArjunaContext);
} else {
assertNotNull(inArjunaContext) ;
assertEquals(instanceIdentifier.getInstanceIdentifier(), inArjunaContext.getInstanceIdentifier().getInstanceIdentifier()) ;
}
}
}
| apache-2.0 |
blackberry/WebWorks-Community-APIs | Smartphone/Backlight/src/webworks/device/backlight/BacklightNamespace.java | 7622 | /*
* Copyright 2010-2012 Research In Motion Limited.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package webworks.device.backlight;
import net.rim.device.api.script.Scriptable;
import net.rim.device.api.script.ScriptableFunction;
import net.rim.device.api.system.Backlight;
public class BacklightNamespace extends Scriptable {
public static final String FUNCTION_BACKLIGHT = "manageBacklight";
public static final String BACKLIGHT_ENABLE = "backlightEnable";
public static final String GET_BRIGHTNESS = "getBrightness";
public static final String GET_BRIGHTNESS_DEFAULT = "getBrightnessDefault";
public static final String GET_BRIGHTNESS_INCREMENT = "getBrightnessIncrement";
public static final String GET_TIMEOUT_DEFAULT = "getTimeoutDefault";
public static final String IS_BRIGHTNESS_CONFIGURABLE = "isBrightnessConfigurable";
public static final String IS_ENABLED = "isEnabled";
public static final String SET_BRIGHTNESS = "setBrightness";
public static final String SET_TIMEOUT = "setTimeout";
public Object getField(String name) throws Exception
{
if (name.equals(FUNCTION_BACKLIGHT))
{
return new BacklightInfoProperties();
}
return super.getField(name);
}
public class BacklightInfoProperties extends ScriptableFunction
{
public Object invoke(Object obj, Object[] args) throws Exception
{
String jsonString = "";
String functionParameter = "";
if(args.length == 0) {
return UNDEFINED;
} else {
try {
if(args[0].toString().equalsIgnoreCase("all") || args[0].toString().equals("")) {
jsonString = getBacklightProperties("all");
return jsonString;
} else if(args[0].toString().equalsIgnoreCase(BacklightNamespace.BACKLIGHT_ENABLE)) {
if(args[1] != null || args[1].equals("")) {
if(args[1].toString().equalsIgnoreCase("on")) {
Backlight.enable(true);
return "Turned on the backlight";
} else if(args[1].toString().equalsIgnoreCase("off")) {
Backlight.enable(false);
return "Turned off the backlight";
}
return "Unable to turn on or off the backlight";
}
} else if(args[0].toString().equalsIgnoreCase(BacklightNamespace.SET_BRIGHTNESS)) {
if(args[1] != null || args[1].equals("")) {
int brightness = Integer.parseInt(args[1].toString());
Backlight.setBrightness(brightness);
return "Set the brightness level to " + args[0].toString();
}
return "Unable to set the brightness level. No brightness level provided.";
} else if(args[0].toString().equalsIgnoreCase(BacklightNamespace.SET_TIMEOUT)) {
if(args[1] != null || args[1].equals("")) {
int timeout = Integer.parseInt(args[1].toString());
Backlight.setTimeout(timeout);
return "Set the timeout value to " + args[0].toString();
}
return "Unable to set the timeout value. No timeout value provided.";
} else {
jsonString = "{ ";
for(int i=0; i<args.length; i++) {
functionParameter = args[i].toString();
if(functionParameter.equalsIgnoreCase(BacklightNamespace.GET_BRIGHTNESS)) {
jsonString += getBacklightProperties(BacklightNamespace.GET_BRIGHTNESS);
continue;
} else if (functionParameter.equals(BacklightNamespace.GET_BRIGHTNESS_DEFAULT)) {
jsonString += getBacklightProperties(BacklightNamespace.GET_BRIGHTNESS_DEFAULT);
continue;
}else if (functionParameter.equals(BacklightNamespace.GET_BRIGHTNESS_INCREMENT)) {
jsonString += getBacklightProperties(BacklightNamespace.GET_BRIGHTNESS_INCREMENT);
continue;
}else if (functionParameter.equals(BacklightNamespace.GET_TIMEOUT_DEFAULT)) {
jsonString += getBacklightProperties(BacklightNamespace.GET_TIMEOUT_DEFAULT);
continue;
}else if (functionParameter.equals(BacklightNamespace.IS_BRIGHTNESS_CONFIGURABLE)) {
jsonString += getBacklightProperties(BacklightNamespace.IS_BRIGHTNESS_CONFIGURABLE);
continue;
}else if (functionParameter.equals(BacklightNamespace.IS_ENABLED)) {
jsonString += getBacklightProperties(BacklightNamespace.IS_ENABLED);
continue;
}else {
//did not receive correct function parameter to build json string. send back the
//Json string with any errors.
jsonString += "\"Unknown backlight parameter received\" : \"" + functionParameter + "\", ";
}
}
int lastCommaIndex = jsonString.length();
jsonString = jsonString.substring(0, lastCommaIndex - 2);
jsonString += " }";
}
} catch (Exception ex) {
return "{\"Exception\":\"" + ex.getMessage() + "\"}";
}
}
return "Backlight properties returned or modified.";
}
public String getBacklightProperties(String props) {
BacklightProperties backlightProperties = new BacklightProperties();
String jsonString = "";
if(props.equalsIgnoreCase(GET_BRIGHTNESS)) {
jsonString = "\"getBrightness\" : \"" + backlightProperties.brightness + "\", ";
return jsonString;
} else if(props.equalsIgnoreCase(GET_BRIGHTNESS_DEFAULT)) {
jsonString = "\"getBrightnessDefault\" : \"" + backlightProperties.brightnessDefault + "\", ";
return jsonString;
} else if(props.equalsIgnoreCase(GET_BRIGHTNESS_INCREMENT)) {
jsonString = "\"getBrightnessIncrement\" : \"" + backlightProperties.brightnessIncrement + "\", ";
return jsonString;
} else if(props.equalsIgnoreCase(GET_TIMEOUT_DEFAULT)) {
jsonString = "\"getTimeoutDefault\" : \"" + backlightProperties.timeoutDefault + "\", ";
return jsonString;
} else if(props.equalsIgnoreCase(IS_BRIGHTNESS_CONFIGURABLE)) {
jsonString = "\"isBrightnessConfigurable\" : \"" + backlightProperties.isBrightnessConfigurable + "\", ";
return jsonString;
} else if(props.equalsIgnoreCase(IS_ENABLED)) {
jsonString = "\"isEnabled\" : \"" + backlightProperties.isEnabled + "\", ";
return jsonString;
} else if(props.equals("all")) {
return backlightProperties.objToJsonString();
}
return "\"PropertyUndefined\":\"" + props + "\"";
}
}
}
| apache-2.0 |
nknize/elasticsearch | x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/ccr/action/StatsResponsesTests.java | 2876 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ccr.action;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.test.AbstractWireSerializingTestCase;
import org.elasticsearch.xpack.core.ccr.ShardFollowNodeTaskStatus;
import org.elasticsearch.xpack.core.ccr.action.FollowStatsAction;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class StatsResponsesTests extends AbstractWireSerializingTestCase<FollowStatsAction.StatsResponses> {
@Override
protected Writeable.Reader<FollowStatsAction.StatsResponses> instanceReader() {
return FollowStatsAction.StatsResponses::new;
}
@Override
protected FollowStatsAction.StatsResponses createTestInstance() {
return createStatsResponse();
}
static FollowStatsAction.StatsResponses createStatsResponse() {
int numResponses = randomIntBetween(0, 8);
List<FollowStatsAction.StatsResponse> responses = new ArrayList<>(numResponses);
for (int i = 0; i < numResponses; i++) {
ShardFollowNodeTaskStatus status = new ShardFollowNodeTaskStatus(
randomAlphaOfLength(4),
randomAlphaOfLength(4),
randomAlphaOfLength(4),
randomInt(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomIntBetween(0, Integer.MAX_VALUE),
randomIntBetween(0, Integer.MAX_VALUE),
randomIntBetween(0, Integer.MAX_VALUE),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
randomNonNegativeLong(),
Collections.emptyNavigableMap(),
randomNonNegativeLong(),
randomBoolean() ? new ElasticsearchException("fatal error") : null);
responses.add(new FollowStatsAction.StatsResponse(status));
}
return new FollowStatsAction.StatsResponses(Collections.emptyList(), Collections.emptyList(), responses);
}
}
| apache-2.0 |
pspaude/uPortal | uportal-war/src/main/java/org/jasig/portal/permission/dao/jpa/JpaPermissionOwnerDao.java | 5965 | /**
* Licensed to Jasig under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Jasig licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.portal.permission.dao.jpa;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
import javax.persistence.criteria.JoinType;
import javax.persistence.criteria.Root;
import org.jasig.portal.jpa.BasePortalJpaDao;
import org.jasig.portal.jpa.OpenEntityManager;
import org.jasig.portal.permission.IPermissionActivity;
import org.jasig.portal.permission.IPermissionOwner;
import org.jasig.portal.permission.dao.IPermissionOwnerDao;
import org.springframework.stereotype.Repository;
import com.google.common.base.Function;
/**
* JpaPermissionOwnerDao provides a default JPA/Hibernate implementation of
* the IPermissionOwnerDao interface.
*
* @author Jen Bourey, jbourey@unicon.net
* @version $Revision$
* @since 3.3
*/
@Repository("permissionOwnerDao")
public class JpaPermissionOwnerDao extends BasePortalJpaDao implements IPermissionOwnerDao {
private CriteriaQuery<PermissionOwnerImpl> findAllPermissionOwners;
@Override
public void afterPropertiesSet() throws Exception {
this.findAllPermissionOwners = this.createCriteriaQuery(new Function<CriteriaBuilder, CriteriaQuery<PermissionOwnerImpl>>() {
@Override
public CriteriaQuery<PermissionOwnerImpl> apply(CriteriaBuilder cb) {
final CriteriaQuery<PermissionOwnerImpl> criteriaQuery = cb.createQuery(PermissionOwnerImpl.class);
final Root<PermissionOwnerImpl> ownerRoot = criteriaQuery.from(PermissionOwnerImpl.class);
criteriaQuery.select(ownerRoot);
ownerRoot.fetch(PermissionOwnerImpl_.activities, JoinType.LEFT);
return criteriaQuery;
}
});
}
@Override
public List<IPermissionOwner> getAllPermissionOwners() {
final TypedQuery<PermissionOwnerImpl> query = this.createCachedQuery(this.findAllPermissionOwners);
final List<PermissionOwnerImpl> resultList = query.getResultList();
return new ArrayList<IPermissionOwner>(new LinkedHashSet<IPermissionOwner>(resultList));
}
@Override
@PortalTransactional
public IPermissionOwner getOrCreatePermissionOwner(String name, String fname) {
IPermissionOwner owner = getPermissionOwner(fname);
if (owner == null) {
owner = new PermissionOwnerImpl(name, fname);
this.getEntityManager().persist(owner);
}
return owner;
}
@Override
public IPermissionOwner getPermissionOwner(long id){
return getEntityManager().find(PermissionOwnerImpl.class, id);
}
@OpenEntityManager(unitName = PERSISTENCE_UNIT_NAME)
@Override
public IPermissionOwner getPermissionOwner(String fname){
final NaturalIdQuery<PermissionOwnerImpl> query = this.createNaturalIdQuery(PermissionOwnerImpl.class);
query.using(PermissionOwnerImpl_.fname, fname);
return query.load();
}
@Override
@PortalTransactional
public IPermissionOwner saveOwner(IPermissionOwner owner) {
this.getEntityManager().persist(owner);
return owner;
}
@Override
@PortalTransactional
public IPermissionActivity getOrCreatePermissionActivity(
IPermissionOwner owner, String name, String fname, String targetProviderKey) {
IPermissionActivity activity = getPermissionActivity(owner.getId(), fname);
if (activity == null) {
activity = new PermissionActivityImpl(name, fname, targetProviderKey);
owner.getActivities().add(activity);
}
return activity;
}
@Override
public IPermissionActivity getPermissionActivity(long id) {
return getEntityManager().find(PermissionActivityImpl.class, id);
}
@Override
public IPermissionActivity getPermissionActivity(long ownerId, String activityFname) {
final IPermissionOwner permissionOwner = this.getPermissionOwner(ownerId);
return findActivity(permissionOwner, activityFname);
}
@Override
public IPermissionActivity getPermissionActivity(String ownerFname, String activityFname) {
final IPermissionOwner permissionOwner = this.getPermissionOwner(ownerFname);
return findActivity(permissionOwner, activityFname);
}
@Override
@PortalTransactional
public IPermissionActivity savePermissionActivity(IPermissionActivity activity) {
this.getEntityManager().persist(activity);
return activity;
}
protected IPermissionActivity findActivity(final IPermissionOwner permissionOwner, String activityFname) {
if (permissionOwner == null) {
return null;
}
final Set<IPermissionActivity> activities = permissionOwner.getActivities();
for (final IPermissionActivity permissionActivity : activities) {
if (activityFname.equals(permissionActivity.getFname())) {
return permissionActivity;
}
}
return null;
}
}
| apache-2.0 |
fnussber/ocs | bundle/edu.gemini.shared.util/src/main/java/edu/gemini/shared/util/StringUtil.java | 2195 | package edu.gemini.shared.util;
import java.text.CharacterIterator;
import java.text.StringCharacterIterator;
import java.util.Collection;
import java.util.Iterator;
public final class StringUtil {
private StringUtil() {
}
public static String toDisplayName(String propertyName) {
// SW: isn't there something in the JDK that does this?
CharacterIterator it = new StringCharacterIterator(propertyName);
StringBuilder res = new StringBuilder();
// Append the first character of the property name in title case
char c = it.first();
if (c == CharacterIterator.DONE) return "";
boolean wasUpper = Character.isUpperCase(c);
res.append(wasUpper ? c : Character.toUpperCase(c));
c = it.next();
while (c != CharacterIterator.DONE) {
boolean isUpper = Character.isUpperCase(c);
if (!wasUpper && isUpper) {
res.append(' ').append(Character.toUpperCase(c));
} else {
res.append(c);
}
wasUpper = isUpper;
c = it.next();
}
if (res.charAt(0) == '0') return res.substring(1);
return res.toString();
}
@FunctionalInterface
public interface MapToString<T> {
String apply(T t);
}
public static <T> String mkString(Collection<T> c) {
return mkString(c, ",");
}
public static <T> String mkString(Collection<T> c,String sep) {
return mkString(c, "[", sep, "]");
}
public static <T> String mkString(Collection<T> c, String prefix, String sep, String suffix) {
return mkString(c, prefix, sep, suffix, Object::toString);
}
public static <T> String mkString(Collection<T> c, String prefix, String sep, String suffix, MapToString<T> m) {
StringBuilder buf = new StringBuilder();
buf.append(prefix);
Iterator<T> it = c.iterator();
if (it.hasNext()) {
buf.append(m.apply(it.next()));
while (it.hasNext()) {
buf.append(sep).append(m.apply(it.next()));
}
}
buf.append(suffix);
return buf.toString();
}
}
| bsd-3-clause |
plumer/codana | tomcat_files/8.0.22/JMXAccessorInvokeTask.java | 5748 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.ant.jmx;
import java.util.ArrayList;
import java.util.List;
import javax.management.MBeanServerConnection;
import javax.management.ObjectName;
import org.apache.tools.ant.BuildException;
/**
* Access <em>JMX</em> JSR 160 MBeans Server.
* <ul>
* <li>open more then one JSR 160 rmi connection</li>
* <li>Get/Set Mbeans attributes</li>
* <li>Call Mbean Operation with arguments</li>
* <li>Argument values can be converted from string to int,long,float,double,boolean,ObjectName or InetAddress </li>
* <li>Query Mbeans</li>
* <li>Show Get, Call, Query result at Ant console log</li>
* <li>Bind Get, Call, Query result at Ant properties</li>
* </ul>
*
* Examples:
* <ul>
* <li>
* Get a session attribute hello from session with ref <em>${sessionid.0}</em> form
* app <em>Catalina:type=Manager,context=/ClusterTest,host=localhost</em>
* <pre>
* <jmx:invoke
* name="Catalina:type=Manager,context=/ClusterTest,host=localhost"
* operation="getSessionAttribute"
* resultproperty="hello">
* <arg value="${sessionid.0}"/>
* <arg value="Hello"/>
* </jmx:invoke>
* </pre>
* </li>
* <li>
* Create new AccessLogger at localhost
* <code>
* <jmx:invoke
* name="Catalina:type=MBeanFactory"
* operation="createAccessLoggerValve"
* resultproperty="accessLoggerObjectName"
* >
* <arg value="Catalina:type=Host,host=localhost"/>
* </jmx:invoke>
*
* </code>
* </li>
* <li>
* Remove existing AccessLogger at localhost
* <code>
* <jmx:invoke
* name="Catalina:type=MBeanFactory"
* operation="removeValve"
* >
* <arg value="Catalina:type=Valve,name=AccessLogValve,host=localhost"/>
* </jmx:invoke>
*
* </code>
* </li>
* </ul>
* <p>
* First call to a remote MBeanserver save the JMXConnection a referenz <em>jmx.server</em>
* </p>
* These tasks require Ant 1.6 or later interface.
*
* @author Peter Rossbach
* @since 5.5.10
*/
public class JMXAccessorInvokeTask extends JMXAccessorTask {
// ----------------------------------------------------- Instance Variables
private String operation ;
private List<Arg> args=new ArrayList<>();
// ------------------------------------------------------------- Properties
/**
* @return Returns the operation.
*/
public String getOperation() {
return operation;
}
/**
* @param operation The operation to set.
*/
public void setOperation(String operation) {
this.operation = operation;
}
public void addArg(Arg arg ) {
args.add(arg);
}
/**
* @return Returns the args.
*/
public List<Arg> getArgs() {
return args;
}
/**
* @param args The args to set.
*/
public void setArgs(List<Arg> args) {
this.args = args;
}
// ------------------------------------------------------ protected Methods
/**
* Execute the specified command, based on the configured properties. The
* input stream will be closed upon completion of this task, whether it was
* executed successfully or not.
*
* @exception BuildException
* if an error occurs
*/
@Override
public String jmxExecute(MBeanServerConnection jmxServerConnection)
throws Exception {
if (getName() == null) {
throw new BuildException("Must specify a 'name'");
}
if ((operation == null)) {
throw new BuildException(
"Must specify a 'operation' for call");
}
return jmxInvoke(jmxServerConnection, getName());
}
/**
* @param jmxServerConnection
* @throws Exception
*/
protected String jmxInvoke(MBeanServerConnection jmxServerConnection, String name) throws Exception {
Object result ;
if (args == null) {
result = jmxServerConnection.invoke(new ObjectName(name),
operation, null, null);
} else {
Object argsA[]=new Object[ args.size()];
String sigA[]=new String[args.size()];
for( int i=0; i<args.size(); i++ ) {
Arg arg=args.get(i);
if (arg.getType() == null) {
arg.setType("java.lang.String");
sigA[i]=arg.getType();
argsA[i]=arg.getValue();
} else {
sigA[i]=arg.getType();
argsA[i]=convertStringToType(arg.getValue(),arg.getType());
}
}
result = jmxServerConnection.invoke(new ObjectName(name), operation, argsA, sigA);
}
if(result != null) {
echoResult(operation,result);
createProperty(result);
}
return null;
}
}
| mit |
AlphaModder/SpongeAPI | src/main/java/org/spongepowered/api/service/user/UserStorageService.java | 4365 | /*
* This file is part of SpongeAPI, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.api.service.user;
import org.spongepowered.api.profile.GameProfile;
import org.spongepowered.api.entity.living.player.Player;
import org.spongepowered.api.entity.living.player.User;
import org.spongepowered.api.profile.GameProfileManager;
import java.util.Collection;
import java.util.Optional;
import java.util.UUID;
/**
* Stores the persistent {@link User} data of a {@link Player}.
*/
public interface UserStorageService {
/**
* Gets the data of a {@link User} by their unique id.
*
* @param uniqueId The UUID of the user
* @return {@link User} or Optional.empty() if not found
*/
Optional<User> get(UUID uniqueId);
/**
* Gets the data of a {@link User} by their last known user name
* (case-insensitive).
*
* <p>To get the current name of a player, use the
* {@link GameProfileManager} service.</p>
*
* @param lastKnownName The user name
* @return {@link User} or Optional.empty() if not found
*/
Optional<User> get(String lastKnownName);
/**
* Gets the data of a {@link User} by their {@link GameProfile}.
*
* @param profile The profile
* @return {@link User} or Optional.empty() if not found
*/
Optional<User> get(GameProfile profile);
/**
* Gets or creates a persistent {@link User} associated with the given
* {@link GameProfile}.
*
* <p>To obtain a {@link GameProfile}, use the {@link GameProfileManager}.
* </p>
*
* @param profile The profile
* @return The user object
*/
User getOrCreate(GameProfile profile);
/**
* Gets the collection of all {@link GameProfile}s with stored {@link User}
* data.
*
* <p>Note that this method is resource-intensive depending on the amount of
* stored data.</p>
*
* <p>Use {@link #get(GameProfile)} to get the {@link User} data
* corresponding to a {@link GameProfile}.</p>
*
* @return A {@link Collection} of {@link GameProfile}s
*/
Collection<GameProfile> getAll();
/**
* Deletes the data associated with a {@link User}.
*
* <p>This may not work if the user is logged in.</p>
*
* @param profile The profile of the user to delete
* @return true if the deletion was successful
*/
boolean delete(GameProfile profile);
/**
* Deletes the data associated with a {@link User}.
*
* <p>This may not work if the user is logged in.</p>
*
* @param user The user to delete
* @return true if the deletion was successful
*/
boolean delete(User user);
/**
* Returns a collection of matching {@link GameProfile}s with stored
* {@link User} data whose last known user names start with the given string
* (case-insensitive).
*
* <p>Use {@link #get(GameProfile)} to get the {@link User} data
* corresponding to a {@link GameProfile}.</p>
*
* @param lastKnownName The user name
* @return The result of the request
*/
Collection<GameProfile> match(String lastKnownName);
}
| mit |
dominicdesu/openhab | bundles/io/org.openhab.io.caldav/src/main/java/org/openhab/io/caldav/internal/job/EventReloaderJob.java | 20475 | /**
* Copyright (c) 2010-2016, openHAB.org and others.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.openhab.io.caldav.internal.job;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils;
import org.joda.time.DateTimeZone;
import org.joda.time.LocalDateTime;
import org.openhab.io.caldav.CalDavEvent;
import org.openhab.io.caldav.EventNotifier;
import org.openhab.io.caldav.internal.CalDavConfig;
import org.openhab.io.caldav.internal.CalDavLoaderImpl;
import org.openhab.io.caldav.internal.EventStorage;
import org.openhab.io.caldav.internal.EventStorage.CalendarRuntime;
import org.openhab.io.caldav.internal.EventStorage.EventContainer;
import org.openhab.io.caldav.internal.Util;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
import org.quartz.JobExecutionException;
import org.quartz.JobKey;
import org.quartz.SchedulerException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.github.sardine.DavResource;
import com.github.sardine.Sardine;
import com.github.sardine.impl.SardineException;
import net.fortuna.ical4j.data.CalendarBuilder;
import net.fortuna.ical4j.data.ParserException;
import net.fortuna.ical4j.data.UnfoldingReader;
import net.fortuna.ical4j.model.Calendar;
import net.fortuna.ical4j.model.Component;
import net.fortuna.ical4j.model.ComponentList;
import net.fortuna.ical4j.model.DateTime;
import net.fortuna.ical4j.model.Period;
import net.fortuna.ical4j.model.PeriodList;
import net.fortuna.ical4j.model.Property;
import net.fortuna.ical4j.model.component.CalendarComponent;
import net.fortuna.ical4j.model.component.VEvent;
public class EventReloaderJob implements Job {
public static final String KEY_CONFIG = "config";
private static final Logger log = LoggerFactory.getLogger(EventReloaderJob.class);
private static Map<String, Boolean> cachedEventsLoaded = new ConcurrentHashMap<String, Boolean>();
@Override
public void execute(JobExecutionContext context) throws JobExecutionException {
final String config = context.getJobDetail().getJobDataMap().getString(KEY_CONFIG);
CalendarRuntime eventRuntime = EventStorage.getInstance().getEventCache().get(config);
// reload cached events (if necessary)
if (!cachedEventsLoaded.containsKey(config)) {
try {
log.debug("reload cached events for config: {}", eventRuntime.getConfig().getKey());
for (File fileCalendarKeys : new File(CalDavLoaderImpl.CACHE_PATH).listFiles()) {
if (!eventRuntime.getConfig().getKey().equals(Util.getFilename(fileCalendarKeys.getName()))) {
continue;
}
final Collection<File> icsFiles = FileUtils.listFiles(fileCalendarKeys, new String[] { "ics" },
false);
for (File icsFile : icsFiles) {
try {
FileInputStream fis = new FileInputStream(icsFile);
loadEvents(Util.getFilename(icsFile.getAbsolutePath()),
new org.joda.time.DateTime(icsFile.lastModified()), fis, eventRuntime.getConfig(),
new ArrayList<String>(), true);
} catch (IOException e) {
log.error("cannot load events for file: " + icsFile, e);
} catch (ParserException e) {
log.error("cannot load events for file: " + icsFile, e);
}
}
break;
}
} catch (Throwable e) {
log.error("cannot load events", e);
} finally {
cachedEventsLoaded.put(config, true);
}
}
try {
log.debug("loading events for config: " + config);
List<String> oldEventIds = new ArrayList<String>();
for (EventContainer eventContainer : eventRuntime.getEventMap().values()) {
oldEventIds.add(eventContainer.getFilename());
}
loadEvents(eventRuntime, oldEventIds);
// stop all events in oldMap
removeDeletedEvents(config, oldEventIds);
for (EventNotifier notifier : CalDavLoaderImpl.instance.getEventListenerList()) {
try {
notifier.calendarReloaded(config);
} catch (Exception e) {
log.error("error while invoking listener", e);
}
}
// printAllEvents();
} catch (SardineException e) {
log.error(
"error while loading calendar entries: " + e.getMessage() + " (" + e.getStatusCode() + " - " + e.getResponsePhrase() + ")",
e);
throw new JobExecutionException(
"error while loading calendar entries", e, false);
} catch (Exception e) {
log.error(
"error while loading calendar entries: " + e.getMessage(),
e);
throw new JobExecutionException(
"error while loading calendar entries", e, false);
}
}
private synchronized void removeDeletedEvents(String calendarKey,
List<String> oldMap) {
final CalendarRuntime eventRuntime = EventStorage.getInstance()
.getEventCache().get(calendarKey);
for (String filename : oldMap) {
EventContainer eventContainer = eventRuntime.getEventContainerByFilename(filename);
if (eventContainer == null) {
log.error("cannot find event container for filename: {}", filename);
continue;
}
// cancel old jobs
for (String jobId : eventContainer.getTimerMap()) {
try {
String group;
if (jobId.startsWith(CalDavLoaderImpl.JOB_NAME_EVENT_START)) {
group = CalDavLoaderImpl.JOB_NAME_EVENT_START;
} else if (jobId.startsWith(CalDavLoaderImpl.JOB_NAME_EVENT_END)) {
group = CalDavLoaderImpl.JOB_NAME_EVENT_END;
} else {
throw new SchedulerException("unknown job id: " + jobId);
}
boolean deleteJob = CalDavLoaderImpl.instance.getScheduler().deleteJob(JobKey.jobKey(jobId, group));
log.debug("old job ({}) deleted? {}", jobId, deleteJob);
} catch (SchedulerException e) {
log.error("cannot delete job '{}'", jobId);
}
}
eventContainer.getTimerMap().clear();
for (EventNotifier notifier : CalDavLoaderImpl.instance.getEventListenerList()) {
for (CalDavEvent event : eventContainer.getEventList()) {
try {
notifier.eventRemoved(event);
} catch (Exception e) {
log.error("error while invoking listener", e);
}
}
}
ConcurrentHashMap<String, EventContainer> eventContainerMap = eventRuntime.getEventMap();
if (eventContainer != null) {
this.removeFromDisk(eventContainer);
log.debug("remove deleted event: {}", eventContainer.getEventId());
eventContainerMap.remove(eventContainer.getEventId());
}
}
}
private void removeFromDisk(EventContainer eventContainer) {
Util.getCacheFile(eventContainer.getCalendarId(), eventContainer.getFilename()).delete();
}
/**
* all events which are available must be removed from the oldEventIds list
*
* @param calendarRuntime
* @param oldEventIds
* @throws IOException
* @throws ParserException
*/
public synchronized void loadEvents(final CalendarRuntime calendarRuntime, final List<String> oldEventIds)
throws IOException, ParserException {
CalDavConfig config = calendarRuntime.getConfig();
Sardine sardine = Util.getConnection(config);
List<DavResource> list = sardine.list(config.getUrl(), 1, false);
for (DavResource resource : list) {
final String filename = Util.getFilename(resource.getName());
try {
if (resource.isDirectory()) {
continue;
}
oldEventIds.remove(filename);
// must not be loaded
EventContainer eventContainer = calendarRuntime
.getEventContainerByFilename(filename);
final org.joda.time.DateTime lastResourceChangeFS = new org.joda.time.DateTime(
resource.getModified());
log.trace("eventContainer found: {}", eventContainer != null);
log.trace("last resource modification: {}", lastResourceChangeFS);
log.trace("last change of already loaded event: {}",
eventContainer != null ? eventContainer.getLastChanged()
: null);
if (config.isLastModifiedFileTimeStampValid()) {
if (eventContainer != null
&& !lastResourceChangeFS.isAfter(eventContainer
.getLastChanged())) {
// check if some timers or single (from repeating events) have
// to be created
if (eventContainer.getCalculatedUntil() != null
&& eventContainer.getCalculatedUntil().isAfter(
org.joda.time.DateTime.now().plusMinutes(
config.getReloadMinutes()))) {
// the event is calculated as long as the next reload
// interval can handle this
log.trace("skipping resource {}, not changed (calculated until: {})",
resource.getName(), eventContainer.getCalculatedUntil());
continue;
}
if (eventContainer.isHistoricEvent()) {
// no more upcoming events, do nothing
log.trace("skipping resource {}, not changed (historic)",
resource.getName());
continue;
}
File icsFile = Util.getCacheFile(config.getKey(), filename);
if (icsFile != null && icsFile.exists()) {
FileInputStream fis = new FileInputStream(icsFile);
this.loadEvents(filename, lastResourceChangeFS, fis, config,
oldEventIds, false);
fis.close();
continue;
}
}
}
log.debug("loading resource: {}", resource);
// prepare resource url
URL url = new URL(config.getUrl());
String resourcePath = resource.getPath();
String escapedResource = resource.getName().replaceAll("/", "%2F");
resourcePath = resourcePath.replace(resource.getName(), escapedResource);
url = new URL(url.getProtocol(), url.getHost(), url.getPort(),
resourcePath);
InputStream inputStream = sardine.get(url.toString().replaceAll(
" ", "%20"));
this.loadEvents(filename, lastResourceChangeFS, inputStream, config,
oldEventIds, false);
} catch (ParserException e) {
log.error("error parsing ics file: " + filename, e);
} catch (SardineException e) {
log.error("error reading ics file: " + filename, e);
}
}
}
public void loadEvents(String filename,
org.joda.time.DateTime lastResourceChangeFS, final InputStream inputStream,
final CalDavConfig config, final List<String> oldEventIds,
boolean readFromFile) throws IOException, ParserException {
CalendarBuilder builder = new CalendarBuilder();
InputStreamReader is = new InputStreamReader(inputStream, config.getCharset());
BufferedReader in = new BufferedReader(
is, 50);
final UnfoldingReader uin = new UnfoldingReader(in, 50, true);
Calendar calendar = builder.build(uin);
uin.close();
// log.trace("calendar: {}", calendar);
EventContainer eventContainer = new EventContainer(config.getKey());
eventContainer.setFilename(filename);
eventContainer.setLastChanged(lastResourceChangeFS);
org.joda.time.DateTime loadFrom = org.joda.time.DateTime.now().minusMinutes(config.getHistoricLoadMinutes());
org.joda.time.DateTime loadTo = org.joda.time.DateTime.now().plusMinutes(config.getPreloadMinutes());
final ComponentList<CalendarComponent> vEventComponents = calendar.getComponents(Component.VEVENT);
if (vEventComponents.size() == 0) {
// no events inside
if (!readFromFile) {
Util.storeToDisk(config.getKey(), filename, calendar);
}
return;
}
org.joda.time.DateTime lastModifedVEventOverAll = null;
for (CalendarComponent comp : vEventComponents) {
VEvent vEvent = (VEvent) comp;
log.trace("loading event: " + vEvent.getUid().getValue() + ":" + vEvent.getSummary().getValue());
// fallback, because 'LastModified' in VEvent is optional
org.joda.time.DateTime lastModifedVEvent = lastResourceChangeFS;
if (vEvent.getLastModified() != null) {
lastModifedVEvent = new org.joda.time.DateTime(vEvent.getLastModified().getDateTime());
}
if (!config.isLastModifiedFileTimeStampValid()) {
if (lastModifedVEventOverAll == null || lastModifedVEvent.isAfter(lastModifedVEventOverAll)) {
lastModifedVEventOverAll = lastModifedVEvent;
}
if (eventContainer != null && !lastModifedVEvent.isBefore(eventContainer.getLastChanged())) {
// check if some timers or single (from repeating events) have
// to be created
if (eventContainer.getCalculatedUntil() != null && vEventComponents.size() == 1
&& eventContainer.getCalculatedUntil()
.isAfter(org.joda.time.DateTime.now().plusMinutes(config.getReloadMinutes()))) {
// the event is calculated as long as the next reload
// interval can handle this
log.trace("skipping resource processing {}, not changed", filename);
continue;
}
if (eventContainer.isHistoricEvent()) {
// no more upcoming events, do nothing
log.trace("skipping resource processing {}, not changed", filename);
continue;
}
}
}
Period period = new Period(new DateTime(loadFrom.toDate()), new DateTime(loadTo.toDate()));
PeriodList periods = vEvent.calculateRecurrenceSet(period);
periods = periods.normalise();
String eventId = vEvent.getUid().getValue();
final String eventName = vEvent.getSummary().getValue();
// no more upcoming events
if (periods.size() > 0) {
if (vEvent.getConsumedTime(new net.fortuna.ical4j.model.Date(),
new net.fortuna.ical4j.model.Date(org.joda.time.DateTime.now().plusYears(10).getMillis()))
.size() == 0) {
log.trace("event will never be occur (historic): {}", eventName);
eventContainer.setHistoricEvent(true);
}
}
// expecting this is for every vEvent inside a calendar equals
eventContainer.setEventId(eventId);
eventContainer.setCalculatedUntil(loadTo);
for (Period p : periods) {
org.joda.time.DateTime start = getDateTime("start", p.getStart(), p.getRangeStart());
org.joda.time.DateTime end = getDateTime("end", p.getEnd(), p.getRangeEnd());
CalDavEvent event = new CalDavEvent(eventName, vEvent.getUid().getValue(), config.getKey(), start, end);
event.setLastChanged(lastModifedVEvent);
if (vEvent.getLocation() != null) {
event.setLocation(vEvent.getLocation().getValue());
}
if (vEvent.getDescription() != null) {
event.setContent(vEvent.getDescription().getValue());
}
event.getCategoryList().addAll(readCategory(vEvent));
event.setFilename(filename);
log.trace("adding event: " + event.getShortName());
eventContainer.getEventList().add(event);
}
}
if (lastModifedVEventOverAll != null && !config.isLastModifiedFileTimeStampValid()) {
eventContainer.setLastChanged(lastModifedVEventOverAll);
}
// if (!eventContainer.getEventList().isEmpty()) {
CalDavLoaderImpl.instance.addEventToMap(eventContainer, true);
if (!readFromFile) {
Util.storeToDisk(config.getKey(), filename, calendar);
}
// }
}
/**
* Returns a list of categories or an empty list if none found.
* @param vEvent
* @return
*/
private List<String> readCategory(VEvent vEvent) {
Property propertyCategory = vEvent.getProperty(Property.CATEGORIES);
if (propertyCategory != null) {
String categories = propertyCategory.getValue();
if (categories != null) {
String[] categoriesSplit = StringUtils.split(categories, ",");
return Arrays.asList(categoriesSplit);
}
}
return new ArrayList<String>();
}
private org.joda.time.DateTime getDateTime(String dateType, DateTime date, Date rangeDate) {
org.joda.time.DateTime start;
if (date.getTimeZone() == null) {
if (date.isUtc()) {
log.trace("{} is without timezone, but UTC", dateType);
start = new org.joda.time.DateTime(rangeDate, DateTimeZone.UTC).toLocalDateTime()
.toDateTime(CalDavLoaderImpl.defaultTimeZone);
} else {
log.trace("{} is without timezone, not UTC", dateType);
start = new LocalDateTime(rangeDate).toDateTime();
}
} else if (DateTimeZone.getAvailableIDs().contains(date.getTimeZone().getID())) {
log.trace("{} is with known timezone: {}", dateType, date.getTimeZone().getID());
start = new org.joda.time.DateTime(rangeDate, DateTimeZone.forID(date.getTimeZone().getID()));
} else {
// unknown timezone
log.trace("{} is with unknown timezone: {}", dateType, date.getTimeZone().getID());
start = new org.joda.time.DateTime(rangeDate, CalDavLoaderImpl.defaultTimeZone);
}
return start;
}
}
| epl-1.0 |
md-5/jdk10 | test/jdk/java/util/ServiceLoader/basic/ServiceLoaderBasicTest.java | 6954 | /*
* Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
* @bug 4640520 6354623 7198496
* @summary Unit test for java.util.ServiceLoader
* @library /test/lib
* @build jdk.test.lib.process.*
* jdk.test.lib.util.JarUtils
* Basic Load FooService FooProvider1 FooProvider2 FooProvider3 BarProvider
* @run testng ServiceLoaderBasicTest
*/
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import jdk.test.lib.JDKToolFinder;
import jdk.test.lib.Utils;
import jdk.test.lib.process.ProcessTools;
import jdk.test.lib.util.JarUtils;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import static java.nio.file.StandardOpenOption.CREATE;
import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
import static java.util.Arrays.asList;
public class ServiceLoaderBasicTest {
private static final String METAINFO = "META-INF/services/FooService";
private static final Path XTEST_CONFIG = Path.of("x.test").resolve(METAINFO);
private static final Path XMETA_CONFIG = Path.of("x.meta").resolve(METAINFO);
private static final Path P2JAR = Path.of("p2.jar");
private static final Path P2DUPJAR = Path.of("p2dup.jar");
private static final Path P3JAR = Path.of("x.ext", "p3.jar");
private static final String XTEST = File.pathSeparator + "x.test";
private static final String XMETA = File.pathSeparator + "x.meta";
private static final String P2 = File.pathSeparator + P2JAR.toString();
private static final String P2DUP = File.pathSeparator + P2DUPJAR.toString();
private static final String P3 = File.pathSeparator + P3JAR.toString();
private static final String XTEST_CP = Utils.TEST_CLASS_PATH + XTEST;
private static final String P2_CP = Utils.TEST_CLASS_PATH + P2;
private static final String P2DUP_CP = P2_CP + P2DUP;
private static final String P3P2_CP = Utils.TEST_CLASS_PATH + P3 + P2;
private static final String XTESTP2_CP = XTEST_CP + P2;
private static final String P3XTEST_CP = Utils.TEST_CLASS_PATH + P3 + XTEST;
private static final String P3XTESTP2_CP = P3XTEST_CP + P2;
private static final String XMETA_CP = Utils.TEST_CLASS_PATH + XMETA;
private static final String XMETAXTEST_CP = XMETA_CP + XTEST;
private static final String XTESTXMETA_CP = XTEST_CP + XMETA;
private static final String XTESTXMETAP2_CP = XTESTXMETA_CP + P2;
@BeforeClass
public void initialize() throws Exception {
createProviderConfig(XTEST_CONFIG, "FooProvider1");
createProviderConfig(XMETA_CONFIG, "FooProvider42");
createJar(P2JAR, "FooProvider2", List.of("FooProvider2"));
createJar(P3JAR, "FooProvider3", List.of("FooProvider3", "FooService"));
Files.copy(P2JAR, P2DUPJAR, REPLACE_EXISTING);
}
@DataProvider
public Object[][] testCases() {
return new Object[][]{
// CLI options, Test, Runtime arguments
// Success cases
{List.of("-cp", XTESTP2_CP, "Basic")},
{List.of("-cp", XTEST_CP, "Load", "FooProvider1")},
{List.of("-cp", P2_CP, "Load", "FooProvider2")},
{List.of("-cp", P2DUP_CP, "Load", "FooProvider2")},
{List.of("-cp", P3P2_CP, "Load", "FooProvider3", "FooProvider2")},
{List.of("-cp", XTESTP2_CP, "Load", "FooProvider1", "FooProvider2")},
{List.of("-cp", P3XTEST_CP, "Load", "FooProvider3", "FooProvider1")},
{List.of("-cp", P3XTESTP2_CP, "Load", "FooProvider3",
"FooProvider1",
"FooProvider2")},
// Failures followed by successes
{List.of("-cp", XTESTXMETA_CP, "Load", "FooProvider1", "fail")},
{List.of("-cp", XMETAXTEST_CP, "Load", "fail", "FooProvider1")},
{List.of("-cp", XTESTXMETAP2_CP, "Load", "FooProvider1", "fail", "FooProvider2")}
};
}
@DataProvider
public Object[][] negativeTestCases() {
return new Object[][]{
{"blah blah"},
{"9234"},
{"X!"},
{"BarProvider"},
{"FooProvider42"}
};
}
@Test(dataProvider = "testCases")
public void testProvider(List<String> args) throws Throwable {
runJava(args);
}
@Test(dataProvider = "negativeTestCases")
public void testBadProvider(String providerName) throws Throwable {
Files.write(XMETA_CONFIG, providerName.getBytes());
runJava(List.of("-cp", XMETA_CP, "Load", "fail"));
}
private void runJava(List<String> opts) throws Throwable {
List<String> cmds = new ArrayList<>();
cmds.add(JDKToolFinder.getJDKTool("java"));
cmds.addAll(asList(Utils.getTestJavaOpts()));
cmds.addAll(opts);
ProcessTools.executeCommand(cmds.stream()
.filter(t -> !t.isEmpty())
.toArray(String[]::new))
.shouldHaveExitValue(0);
}
private void createProviderConfig(Path config, String providerName) throws Exception {
Files.createDirectories(config.getParent());
Files.write(config, providerName.getBytes(), CREATE);
}
private void createJar(Path jar, String provider, List<String> files) throws Exception {
Path xdir = Path.of(provider);
createProviderConfig(xdir.resolve(METAINFO), provider);
for (String f : files) {
Path source = Path.of(Utils.TEST_CLASSES, f + ".class");
Path target = xdir.resolve(source.getFileName());
Files.copy(source, target, REPLACE_EXISTING);
}
JarUtils.createJarFile(jar, xdir);
}
}
| gpl-2.0 |
jvanz/core | wizards/com/sun/star/wizards/db/DatabaseObjectWizard.java | 6295 | /*
* This file is part of the LibreOffice project.
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
* This file incorporates work covered by the following license notice:
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.apache.org/licenses/LICENSE-2.0 .
*/
package com.sun.star.wizards.db;
import com.sun.star.beans.PropertyValue;
import com.sun.star.container.NoSuchElementException;
import com.sun.star.frame.XController;
import com.sun.star.frame.XFrame;
import com.sun.star.lang.IllegalArgumentException;
import com.sun.star.lang.XMultiServiceFactory;
import com.sun.star.sdb.application.XDatabaseDocumentUI;
import com.sun.star.sdbc.SQLException;
import com.sun.star.uno.UnoRuntime;
import com.sun.star.wizards.common.Desktop;
import com.sun.star.wizards.common.NamedValueCollection;
import com.sun.star.wizards.common.Properties;
import com.sun.star.wizards.ui.WizardDialog;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* is a base class for a wizard creating a database object
*/
public abstract class DatabaseObjectWizard extends WizardDialog
{
protected final PropertyValue[] m_wizardContext;
protected final XDatabaseDocumentUI m_docUI;
protected final XFrame m_frame;
protected DatabaseObjectWizard( final XMultiServiceFactory i_orb, final int i_helpIDBase, final PropertyValue[] i_wizardContext )
{
super( i_orb, i_helpIDBase );
m_wizardContext = i_wizardContext;
final NamedValueCollection wizardContext = new NamedValueCollection( m_wizardContext );
m_docUI = wizardContext.queryOrDefault( "DocumentUI", (XDatabaseDocumentUI)null, XDatabaseDocumentUI.class );
if ( m_docUI != null )
{
XController docController = UnoRuntime.queryInterface( XController.class, m_docUI );
m_frame = docController.getFrame();
}
else
{
XFrame parentFrame = wizardContext.queryOrDefault( "ParentFrame", (XFrame)null, XFrame.class );
if ( parentFrame != null )
m_frame = parentFrame;
else
m_frame = Desktop.getActiveFrame( xMSF );
}
}
protected final void loadSubComponent( final int i_type, final String i_name, final boolean i_forEditing )
{
try
{
if ( m_docUI != null )
m_docUI.loadComponent( i_type, i_name, i_forEditing );
}
catch ( IllegalArgumentException ex )
{
Logger.getLogger( this.getClass().getName() ).log( Level.SEVERE, null, ex );
}
catch ( NoSuchElementException ex )
{
Logger.getLogger( this.getClass().getName() ).log( Level.SEVERE, null, ex );
}
catch ( SQLException ex )
{
Logger.getLogger( this.getClass().getName() ).log( Level.SEVERE, null, ex );
}
}
public interface WizardFromCommandLineStarter
{
void start(XMultiServiceFactory factory, PropertyValue[] curproperties);
}
protected static void executeWizardFromCommandLine( final String i_args[], WizardFromCommandLineStarter starter )
{
final String settings[] = new String[] { null, null, null };
final int IDX_PIPE_NAME = 0;
final int IDX_LOCATION = 1;
final int IDX_DSN = 2;
// some simple parsing
boolean failure = false;
int settingsIndex = -1;
for ( int i=0; i<i_args.length; ++i )
{
if ( settingsIndex >= 0 )
{
settings[ settingsIndex ] = i_args[i];
settingsIndex = -1;
continue;
}
if ( i_args[i].equals( "--pipe-name" ) )
{
settingsIndex = IDX_PIPE_NAME;
continue;
}
if ( i_args[i].equals( "--database-location" ) )
{
settingsIndex = IDX_LOCATION;
continue;
}
if ( i_args[i].equals( "--data-source-name" ) )
{
settingsIndex = IDX_DSN;
continue;
}
failure = true;
}
if ( settings[ IDX_PIPE_NAME ] == null )
failure = true;
if ( ( settings[ IDX_DSN ] == null ) && ( settings[ IDX_LOCATION ] == null ) )
failure = true;
if ( failure )
{
System.err.println( "supported arguments: " );
System.err.println( " --pipe-name <name> : specifies the name of the pipe to connect to the running OOo instance" );
System.err.println( " --database-location <url> : specifies the URL of the database document to work with" );
System.err.println( " --data-source-name <name> : specifies the name of the data source to work with" );
return;
}
final String ConnectStr = "uno:pipe,name=" + settings[IDX_PIPE_NAME] + ";urp;StarOffice.ServiceManager";
try
{
final XMultiServiceFactory serviceFactory = Desktop.connect(ConnectStr);
if (serviceFactory != null)
{
PropertyValue[] curproperties = new PropertyValue[1];
if ( settings[ IDX_LOCATION ] != null )
curproperties[0] = Properties.createProperty( "DatabaseLocation", settings[ IDX_LOCATION ] );
else
curproperties[0] = Properties.createProperty( "DataSourceName", settings[ IDX_DSN ] );
starter.start(serviceFactory, curproperties);
}
}
catch (java.lang.Exception jexception)
{
jexception.printStackTrace(System.err);
}
}
}
| gpl-3.0 |
kugelr/inspectIT | Agent/test/info/novatec/inspectit/agent/sensor/platform/ClassLoadingInformationTest.java | 10132 | package info.novatec.inspectit.agent.sensor.platform;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.is;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import info.novatec.inspectit.agent.AbstractLogSupport;
import info.novatec.inspectit.agent.core.ICoreService;
import info.novatec.inspectit.agent.core.IIdManager;
import info.novatec.inspectit.agent.core.IdNotAvailableException;
import info.novatec.inspectit.agent.sensor.platform.provider.RuntimeInfoProvider;
import info.novatec.inspectit.communication.SystemSensorData;
import info.novatec.inspectit.communication.data.ClassLoadingInformationData;
import java.lang.reflect.Field;
import java.util.logging.Level;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.slf4j.LoggerFactory;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
@SuppressWarnings("PMD")
public class ClassLoadingInformationTest extends AbstractLogSupport {
private ClassLoadingInformation classLoadingInfo;
@Mock
RuntimeInfoProvider runtimeBean;
@Mock
private IIdManager idManager;
@Mock
private ICoreService coreService;
@BeforeMethod(dependsOnMethods = { "initMocks" })
public void initTestClass() throws SecurityException, NoSuchFieldException, IllegalArgumentException, IllegalAccessException {
classLoadingInfo = new ClassLoadingInformation(idManager);
classLoadingInfo.log = LoggerFactory.getLogger(ClassLoadingInformation.class);
// we have to replace the real runtimeBean by the mocked one, so that we don't retrieve the
// info from the underlying JVM
Field field = classLoadingInfo.getClass().getDeclaredField("runtimeBean");
field.setAccessible(true);
field.set(classLoadingInfo, runtimeBean);
}
@Test
public void oneDataSet() throws IdNotAvailableException {
int loadedClassCount = 3;
long totalLoadedClassCount = 10L;
long unloadedClassCount = 2L;
long sensorTypeIdent = 13L;
long platformIdent = 11L;
when(idManager.getPlatformId()).thenReturn(platformIdent);
when(idManager.getRegisteredSensorTypeId(sensorTypeIdent)).thenReturn(sensorTypeIdent);
when(runtimeBean.getLoadedClassCount()).thenReturn(loadedClassCount);
when(runtimeBean.getTotalLoadedClassCount()).thenReturn(totalLoadedClassCount);
when(runtimeBean.getUnloadedClassCount()).thenReturn(unloadedClassCount);
// there is no current data object available
when(coreService.getPlatformSensorData(sensorTypeIdent)).thenReturn(null);
classLoadingInfo.update(coreService, sensorTypeIdent);
// -> The service must create a new one and add it to the storage
// We use an argument capturer to further inspect the given argument.
ArgumentCaptor<SystemSensorData> sensorDataCaptor = ArgumentCaptor.forClass(SystemSensorData.class);
verify(coreService, times(1)).addPlatformSensorData(eq(sensorTypeIdent), sensorDataCaptor.capture());
SystemSensorData sensorData = sensorDataCaptor.getValue();
assertThat(sensorData, is(instanceOf(ClassLoadingInformationData.class)));
assertThat(sensorData.getPlatformIdent(), is(equalTo(platformIdent)));
assertThat(sensorData.getSensorTypeIdent(), is(equalTo(sensorTypeIdent)));
ClassLoadingInformationData classLoadingData = (ClassLoadingInformationData) sensorData;
assertThat(classLoadingData.getCount(), is(equalTo(1)));
// as there was only one data object min/max/total the values must be the
// same
assertThat(classLoadingData.getMinLoadedClassCount(), is(equalTo(loadedClassCount)));
assertThat(classLoadingData.getMaxLoadedClassCount(), is(equalTo(loadedClassCount)));
assertThat(classLoadingData.getTotalLoadedClassCount(), is(equalTo(loadedClassCount)));
assertThat(classLoadingData.getMinTotalLoadedClassCount(), is(equalTo(totalLoadedClassCount)));
assertThat(classLoadingData.getMaxTotalLoadedClassCount(), is(equalTo(totalLoadedClassCount)));
assertThat(classLoadingData.getTotalTotalLoadedClassCount(), is(equalTo(totalLoadedClassCount)));
assertThat(classLoadingData.getMinUnloadedClassCount(), is(equalTo(unloadedClassCount)));
assertThat(classLoadingData.getMaxUnloadedClassCount(), is(equalTo(unloadedClassCount)));
assertThat(classLoadingData.getTotalUnloadedClassCount(), is(equalTo(unloadedClassCount)));
}
@Test
public void twoDataSets() throws IdNotAvailableException {
int loadedClassCount = 3;
int loadedClassCount2 = 5;
long totalLoadedClassCount = 10L;
long totalLoadedClassCount2 = 12L;
long unloadedClassCount = 2L;
long sensorTypeIdent = 13L;
long platformIdent = 11L;
when(idManager.getPlatformId()).thenReturn(platformIdent);
when(idManager.getRegisteredSensorTypeId(sensorTypeIdent)).thenReturn(sensorTypeIdent);
// ------------------------
// FIRST UPDATE CALL
// ------------------------
when(runtimeBean.getLoadedClassCount()).thenReturn(loadedClassCount);
when(runtimeBean.getTotalLoadedClassCount()).thenReturn(totalLoadedClassCount);
when(runtimeBean.getUnloadedClassCount()).thenReturn(unloadedClassCount);
// there is no current data object available
when(coreService.getPlatformSensorData(sensorTypeIdent)).thenReturn(null);
classLoadingInfo.update(coreService, sensorTypeIdent);
// -> The service must create a new one and add it to the storage
// We use an argument capturer to further inspect the given argument.
ArgumentCaptor<SystemSensorData> sensorDataCaptor = ArgumentCaptor.forClass(SystemSensorData.class);
verify(coreService, times(1)).addPlatformSensorData(eq(sensorTypeIdent), sensorDataCaptor.capture());
SystemSensorData parameter = sensorDataCaptor.getValue();
assertThat(parameter, is(instanceOf(ClassLoadingInformationData.class)));
assertThat(parameter.getPlatformIdent(), is(equalTo(platformIdent)));
assertThat(parameter.getSensorTypeIdent(), is(equalTo(sensorTypeIdent)));
ClassLoadingInformationData classLoadingData = (ClassLoadingInformationData) parameter;
assertThat(classLoadingData.getCount(), is(equalTo(1)));
// as there was only one data object min/max/total the values must be the
// same
assertThat(classLoadingData.getMinLoadedClassCount(), is(equalTo(loadedClassCount)));
assertThat(classLoadingData.getMaxLoadedClassCount(), is(equalTo(loadedClassCount)));
assertThat(classLoadingData.getTotalLoadedClassCount(), is(equalTo(loadedClassCount)));
assertThat(classLoadingData.getMinTotalLoadedClassCount(), is(equalTo(totalLoadedClassCount)));
assertThat(classLoadingData.getMaxTotalLoadedClassCount(), is(equalTo(totalLoadedClassCount)));
assertThat(classLoadingData.getTotalTotalLoadedClassCount(), is(equalTo(totalLoadedClassCount)));
assertThat(classLoadingData.getMinUnloadedClassCount(), is(equalTo(unloadedClassCount)));
assertThat(classLoadingData.getMaxUnloadedClassCount(), is(equalTo(unloadedClassCount)));
assertThat(classLoadingData.getTotalUnloadedClassCount(), is(equalTo(unloadedClassCount)));
// ------------------------
// SECOND UPDATE CALL
// ------------------------
when(runtimeBean.getLoadedClassCount()).thenReturn(loadedClassCount2);
when(runtimeBean.getTotalLoadedClassCount()).thenReturn(totalLoadedClassCount2);
when(coreService.getPlatformSensorData(sensorTypeIdent)).thenReturn(classLoadingData);
classLoadingInfo.update(coreService, sensorTypeIdent);
// -> The service adds the data object only once
// We use an argument capturer to further inspect the given argument.
verify(coreService, times(1)).addPlatformSensorData(eq(sensorTypeIdent), sensorDataCaptor.capture());
parameter = sensorDataCaptor.getValue();
assertThat(parameter, is(instanceOf(ClassLoadingInformationData.class)));
assertThat(parameter.getPlatformIdent(), is(equalTo(platformIdent)));
assertThat(parameter.getSensorTypeIdent(), is(equalTo(sensorTypeIdent)));
classLoadingData = (ClassLoadingInformationData) parameter;
assertThat(classLoadingData.getCount(), is(equalTo(2)));
assertThat(classLoadingData.getMinLoadedClassCount(), is(equalTo(loadedClassCount)));
assertThat(classLoadingData.getMaxLoadedClassCount(), is(equalTo(loadedClassCount2)));
assertThat(classLoadingData.getTotalLoadedClassCount(), is(equalTo(loadedClassCount + loadedClassCount2)));
assertThat(classLoadingData.getMinTotalLoadedClassCount(), is(equalTo(totalLoadedClassCount)));
assertThat(classLoadingData.getMaxTotalLoadedClassCount(), is(equalTo(totalLoadedClassCount2)));
assertThat(classLoadingData.getTotalTotalLoadedClassCount(), is(equalTo(totalLoadedClassCount + totalLoadedClassCount2)));
assertThat(classLoadingData.getMinUnloadedClassCount(), is(equalTo(unloadedClassCount)));
assertThat(classLoadingData.getMaxUnloadedClassCount(), is(equalTo(unloadedClassCount)));
assertThat(classLoadingData.getTotalUnloadedClassCount(), is(equalTo(unloadedClassCount + unloadedClassCount)));
}
@Test
public void idNotAvailableTest() throws IdNotAvailableException {
int loadedClassCount = 3;
long totalLoadedClassCount = 10L;
long unloadedClassCount = 2L;
long sensorTypeIdent = 13L;
when(runtimeBean.getLoadedClassCount()).thenReturn(loadedClassCount);
when(runtimeBean.getTotalLoadedClassCount()).thenReturn(totalLoadedClassCount);
when(runtimeBean.getUnloadedClassCount()).thenReturn(unloadedClassCount);
when(idManager.getPlatformId()).thenThrow(new IdNotAvailableException("expected"));
when(idManager.getRegisteredSensorTypeId(sensorTypeIdent)).thenThrow(new IdNotAvailableException("expected"));
// there is no current data object available
when(coreService.getPlatformSensorData(sensorTypeIdent)).thenReturn(null);
classLoadingInfo.update(coreService, sensorTypeIdent);
ArgumentCaptor<SystemSensorData> sensorDataCaptor = ArgumentCaptor.forClass(SystemSensorData.class);
verify(coreService, times(0)).addPlatformSensorData(eq(sensorTypeIdent), sensorDataCaptor.capture());
}
protected Level getLogLevel() {
return Level.FINEST;
}
}
| agpl-3.0 |
yersan/wildfly-core | controller/src/main/java/org/jboss/as/controller/OperationDescriptor.java | 1267 | /*
* JBoss, Home of Professional Open Source.
* Copyright 2020, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.as.controller;
import java.util.Collection;
/**
* Describes the parameters of operation..
* @author Paul Ferraro
*/
public interface OperationDescriptor {
Collection<? extends AttributeDefinition> getAttributes();
}
| lgpl-2.1 |
IMCG/priter | src/test/org/apache/hadoop/mapred/lib/TestKeyFieldBasedComparator.java | 5652 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred.lib;
import java.io.*;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.HadoopTestCase;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputLogFilter;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
public class TestKeyFieldBasedComparator extends HadoopTestCase {
JobConf conf;
JobConf localConf;
String line1 = "123 -123 005120 123.9 0.01 0.18 010 10.0 4444.1 011 011 234";
String line2 = "134 -12 005100 123.10 -1.01 0.19 02 10.1 4444";
public TestKeyFieldBasedComparator() throws IOException {
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
conf = createJobConf();
localConf = createJobConf();
localConf.set("map.output.key.field.separator", " ");
}
public void configure(String keySpec, int expect) throws Exception {
Path testdir = new Path("build/test/test.mapred.spill");
Path inDir = new Path(testdir, "in");
Path outDir = new Path(testdir, "out");
FileSystem fs = getFileSystem();
fs.delete(testdir, true);
conf.setInputFormat(TextInputFormat.class);
FileInputFormat.setInputPaths(conf, inDir);
FileOutputFormat.setOutputPath(conf, outDir);
conf.setOutputKeyClass(Text.class);
conf.setOutputValueClass(LongWritable.class);
conf.setNumMapTasks(1);
conf.setNumReduceTasks(2);
conf.setOutputFormat(TextOutputFormat.class);
conf.setOutputKeyComparatorClass(KeyFieldBasedComparator.class);
conf.setKeyFieldComparatorOptions(keySpec);
conf.setKeyFieldPartitionerOptions("-k1.1,1.1");
conf.set("map.output.key.field.separator", " ");
conf.setMapperClass(InverseMapper.class);
conf.setReducerClass(IdentityReducer.class);
if (!fs.mkdirs(testdir)) {
throw new IOException("Mkdirs failed to create " + testdir.toString());
}
if (!fs.mkdirs(inDir)) {
throw new IOException("Mkdirs failed to create " + inDir.toString());
}
// set up input data in 2 files
Path inFile = new Path(inDir, "part0");
FileOutputStream fos = new FileOutputStream(inFile.toString());
fos.write((line1 + "\n").getBytes());
fos.write((line2 + "\n").getBytes());
fos.close();
JobClient jc = new JobClient(conf);
RunningJob r_job = jc.submitJob(conf);
while (!r_job.isComplete()) {
Thread.sleep(1000);
}
if (!r_job.isSuccessful()) {
fail("Oops! The job broke due to an unexpected error");
}
Path[] outputFiles = FileUtil.stat2Paths(
getFileSystem().listStatus(outDir,
new OutputLogFilter()));
if (outputFiles.length > 0) {
InputStream is = getFileSystem().open(outputFiles[0]);
BufferedReader reader = new BufferedReader(new InputStreamReader(is));
String line = reader.readLine();
//make sure we get what we expect as the first line, and also
//that we have two lines (both the lines must end up in the same
//reducer since the partitioner takes the same key spec for all
//lines
if (expect == 1) {
assertTrue(line.startsWith(line1));
} else if (expect == 2) {
assertTrue(line.startsWith(line2));
}
line = reader.readLine();
if (expect == 1) {
assertTrue(line.startsWith(line2));
} else if (expect == 2) {
assertTrue(line.startsWith(line1));
}
reader.close();
}
}
public void testBasicUnixComparator() throws Exception {
configure("-k1,1n", 1);
configure("-k2,2n", 1);
configure("-k2.2,2n", 2);
configure("-k3.4,3n", 2);
configure("-k3.2,3.3n -k4,4n", 2);
configure("-k3.2,3.3n -k4,4nr", 1);
configure("-k2.4,2.4n", 2);
configure("-k7,7", 1);
configure("-k7,7n", 2);
configure("-k8,8n", 1);
configure("-k9,9", 2);
configure("-k11,11",2);
configure("-k10,10",2);
localTestWithoutMRJob("-k9,9", 1);
}
byte[] line1_bytes = line1.getBytes();
byte[] line2_bytes = line2.getBytes();
public void localTestWithoutMRJob(String keySpec, int expect) throws Exception {
KeyFieldBasedComparator<Void, Void> keyFieldCmp = new KeyFieldBasedComparator<Void, Void>();
localConf.setKeyFieldComparatorOptions(keySpec);
keyFieldCmp.configure(localConf);
int result = keyFieldCmp.compare(line1_bytes, 0, line1_bytes.length,
line2_bytes, 0, line2_bytes.length);
if ((expect >= 0 && result < 0) || (expect < 0 && result >= 0))
fail();
}
}
| apache-2.0 |
throughsky/lywebank | hbase-prefix-tree/src/main/java/org/apache/hadoop/hbase/codec/prefixtree/encode/EncoderPool.java | 1145 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.codec.prefixtree.encode;
import java.io.OutputStream;
import org.apache.hadoop.classification.InterfaceAudience;
@InterfaceAudience.Private
public interface EncoderPool {
PrefixTreeEncoder checkOut(OutputStream outputStream, boolean includeMvccVersion);
void checkIn(PrefixTreeEncoder encoder);
} | apache-2.0 |
nomakaFr/ofbiz_ynh | sources/framework/service/src/org/ofbiz/service/jms/JmsSerializer.java | 2458 | /*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
package org.ofbiz.service.jms;
import java.io.FileNotFoundException;
import java.io.IOException;
import javax.xml.parsers.ParserConfigurationException;
import org.ofbiz.base.util.Debug;
import org.ofbiz.base.util.UtilXml;
import org.ofbiz.entity.Delegator;
import org.ofbiz.entity.serialize.SerializeException;
import org.ofbiz.entity.serialize.XmlSerializer;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.xml.sax.SAXException;
/**
* A facade class used to connect JMS code to the legacy XML serialization code.
*
*/
public class JmsSerializer {
public static final String module = JmsSerializer.class.getName();
public static Object deserialize(String content, Delegator delegator) throws SerializeException, SAXException, ParserConfigurationException, IOException {
Document document = UtilXml.readXmlDocument(content, false);
if (document != null) {
return XmlSerializer.deserialize(document, delegator);
} else {
Debug.logWarning("Serialized document came back null", module);
return null;
}
}
public static String serialize(Object object) throws SerializeException, FileNotFoundException, IOException {
Document document = UtilXml.makeEmptyXmlDocument("ofbiz-ser");
Element rootElement = document.getDocumentElement();
rootElement.appendChild(XmlSerializer.serializeSingle(object, document));
return UtilXml.writeXmlDocument(document);
}
}
| apache-2.0 |
rhusar/undertow | servlet/src/test/java/io/undertow/servlet/test/session/CrossContextServletSharedSessionTestCase.java | 20317 | /*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.servlet.test.session;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import io.undertow.server.handlers.PathHandler;
import io.undertow.server.session.InMemorySessionManager;
import io.undertow.server.session.SessionManager;
import io.undertow.servlet.api.Deployment;
import io.undertow.servlet.api.DeploymentInfo;
import io.undertow.servlet.api.DeploymentManager;
import io.undertow.servlet.api.ServletContainer;
import io.undertow.servlet.api.ServletInfo;
import io.undertow.servlet.api.ServletSessionConfig;
import io.undertow.servlet.api.SessionManagerFactory;
import io.undertow.servlet.test.SimpleServletTestCase;
import io.undertow.servlet.test.util.TestClassIntrospector;
import io.undertow.testutils.DefaultServer;
import io.undertow.testutils.HttpClientUtils;
import io.undertow.testutils.TestHttpClient;
import io.undertow.util.StatusCodes;
/**
*
* Test that separate servlet deployments use seperate session managers, even in the presence of forwards,
* and that sessions created in a forwarded context are accessible to later direct requests
*
* @author Stuart Douglas
*/
@RunWith(DefaultServer.class)
public class CrossContextServletSharedSessionTestCase {
@BeforeClass
public static void setup() throws ServletException {
final ServletContainer container = ServletContainer.Factory.newInstance();
final PathHandler path = new PathHandler();
DefaultServer.setRootHandler(path);
InMemorySessionManager manager = new InMemorySessionManager("test");
createDeployment("1", container, path, manager);
createDeployment("2", container, path, manager);
}
private static void createDeployment(final String name, final ServletContainer container, final PathHandler path, InMemorySessionManager sessionManager) throws ServletException {
ServletInfo s = new ServletInfo("servlet", SessionServlet.class)
.addMapping("/servlet");
ServletInfo forward = new ServletInfo("forward", ForwardServlet.class)
.addMapping("/forward");
ServletInfo include = new ServletInfo("include", IncludeServlet.class)
.addMapping("/include");
ServletInfo includeAdd = new ServletInfo("includeadd", IncludeAddServlet.class)
.addMapping("/includeadd");
ServletInfo forwardAdd = new ServletInfo("forwardadd", ForwardAddServlet.class)
.addMapping("/forwardadd");
ServletInfo accessTimeServlet = new ServletInfo("accesstimeservlet", LastAccessTimeSessionServlet.class)
.addMapping("/accesstimeservlet");
DeploymentInfo builder = new DeploymentInfo()
.setClassLoader(SimpleServletTestCase.class.getClassLoader())
.setContextPath("/" + name)
.setClassIntrospecter(TestClassIntrospector.INSTANCE)
.setDeploymentName( name + ".war")
.setSessionManagerFactory(new SessionManagerFactory() {
@Override
public SessionManager createSessionManager(Deployment deployment) {
return sessionManager;
}
})
.setServletSessionConfig(new ServletSessionConfig().setPath("/"))
.addServlets(s, forward, include, forwardAdd, includeAdd, accessTimeServlet);
DeploymentManager manager = container.addDeployment(builder);
manager.deploy();
path.addPrefixPath(builder.getContextPath(), manager.start());
}
@Test
public void testSharedSessionCookieMultipleDeployments() throws IOException {
TestHttpClient client = new TestHttpClient();
try {
HttpGet direct1 = new HttpGet(DefaultServer.getDefaultServerURL() + "/1/servlet");
HttpGet direct2 = new HttpGet(DefaultServer.getDefaultServerURL() + "/2/servlet");
HttpResponse result = client.execute(direct1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
String response = HttpClientUtils.readResponse(result);
Assert.assertEquals("1", response);
result = client.execute(direct1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("2", response);
result = client.execute(direct2);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("3", response);
result = client.execute(direct2);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("4", response);
result = client.execute(direct1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("5", response);
result = client.execute(direct2);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("6", response);
} finally {
client.getConnectionManager().shutdown();
}
}
@Test
public void testCrossContextSessionForwardInvocation() throws IOException {
TestHttpClient client = new TestHttpClient();
try {
HttpGet direct1 = new HttpGet(DefaultServer.getDefaultServerURL() + "/1/servlet");
HttpGet forward1 = new HttpGet(DefaultServer.getDefaultServerURL() + "/1/forward?context=/2&path=/servlet");
HttpGet direct2 = new HttpGet(DefaultServer.getDefaultServerURL() + "/2/servlet");
HttpGet forward2 = new HttpGet(DefaultServer.getDefaultServerURL() + "/2/forward?context=/1&path=/servlet");
HttpResponse result = client.execute(direct1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
String response = HttpClientUtils.readResponse(result);
Assert.assertEquals("1", response);
result = client.execute(direct1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("2", response);
result = client.execute(forward2);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("3", response);
result = client.execute(forward2);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("4", response);
result = client.execute(forward1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("5", response);
result = client.execute(forward1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("6", response);
result = client.execute(direct2);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("7", response);
result = client.execute(direct2);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("8", response);
} finally {
client.getConnectionManager().shutdown();
}
}
@Test
public void testCrossContextSessionForwardAccessTimeInvocation() throws IOException, InterruptedException {
TestHttpClient client = new TestHttpClient();
try {
HttpGet direct1 = new HttpGet(DefaultServer.getDefaultServerURL() + "/1/accesstimeservlet");
HttpGet forward1 = new HttpGet(DefaultServer.getDefaultServerURL() + "/1/forward?context=/2&path=/accesstimeservlet");
HttpResponse result = client.execute(direct1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
String response = HttpClientUtils.readResponse(result);
Assert.assertTrue(response.startsWith("1 "));
result = client.execute(forward1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertTrue(response.startsWith("2 "));
Thread.sleep(50);
result = client.execute(forward1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertTrue(response.startsWith("3 "));
Long time1 = Long.parseLong(response.substring(2));
Thread.sleep(50);
result = client.execute(forward1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertTrue(response.startsWith("4 "));
Long time2 = Long.parseLong(response.substring(2));
Assert.assertTrue(time2 > time1); // access time updated in forward app
result = client.execute(direct1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertTrue(response.startsWith("5 "));
Long time3 = Long.parseLong(response.substring(2));
Assert.assertTrue(time3 > time2); // access time updated in outer app
} finally {
client.getConnectionManager().shutdown();
}
}
@Test
public void testCrossContextSessionForwardInvocationWithBothServletsAdding() throws IOException {
TestHttpClient client = new TestHttpClient();
try {
HttpGet direct1 = new HttpGet(DefaultServer.getDefaultServerURL() + "/1/servlet");
HttpGet forward1 = new HttpGet(DefaultServer.getDefaultServerURL() + "/1/forwardadd?context=/2&path=/servlet");
HttpGet direct2 = new HttpGet(DefaultServer.getDefaultServerURL() + "/2/servlet");
HttpGet forward2 = new HttpGet(DefaultServer.getDefaultServerURL() + "/2/forwardadd?context=/1&path=/servlet");
HttpResponse result = client.execute(forward1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
String response = HttpClientUtils.readResponse(result);
Assert.assertEquals("2", response);
result = client.execute(direct1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("3", response);
result = client.execute(forward2);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("5", response);
result = client.execute(forward2);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("7", response);
result = client.execute(forward1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("9", response);
} finally {
client.getConnectionManager().shutdown();
}
}
@Test
public void testCrossContextSessionIncludeInvocation() throws IOException {
TestHttpClient client = new TestHttpClient();
try {
HttpGet direct1 = new HttpGet(DefaultServer.getDefaultServerURL() + "/1/servlet");
HttpGet include1 = new HttpGet(DefaultServer.getDefaultServerURL() + "/1/include?context=/2&path=/servlet");
HttpGet direct2 = new HttpGet(DefaultServer.getDefaultServerURL() + "/2/servlet");
HttpGet include2 = new HttpGet(DefaultServer.getDefaultServerURL() + "/2/include?context=/1&path=/servlet");
HttpResponse result = client.execute(include2);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
String response = HttpClientUtils.readResponse(result);
Assert.assertEquals("1", response);
result = client.execute(direct1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("2", response);
result = client.execute(include2);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("3", response);
result = client.execute(include2);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("4", response);
result = client.execute(include1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("5", response);
result = client.execute(include1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("6", response);
result = client.execute(direct2);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("7", response);
result = client.execute(direct2);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertEquals("8", response);
} finally {
client.getConnectionManager().shutdown();
}
}
@Test
public void testCrossContextSessionIncludeAccessTimeInvocation() throws IOException, InterruptedException {
TestHttpClient client = new TestHttpClient();
try {
HttpGet direct1 = new HttpGet(DefaultServer.getDefaultServerURL() + "/1/accesstimeservlet");
HttpGet include1 = new HttpGet(DefaultServer.getDefaultServerURL() + "/1/include?context=/2&path=/accesstimeservlet");
HttpResponse result = client.execute(direct1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
String response = HttpClientUtils.readResponse(result);
Assert.assertTrue(response.startsWith("1 "));
result = client.execute(include1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertTrue(response.startsWith("2 "));
Thread.sleep(50);
result = client.execute(include1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertTrue(response.startsWith("3 "));
Long time1 = Long.parseLong(response.substring(2));
Thread.sleep(50);
result = client.execute(include1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertTrue(response.startsWith("4 "));
Long time2 = Long.parseLong(response.substring(2));
Assert.assertTrue(time2 > time1); // access time updated in include app
result = client.execute(direct1);
Assert.assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
response = HttpClientUtils.readResponse(result);
Assert.assertTrue(response.startsWith("5 "));
Long time3 = Long.parseLong(response.substring(2));
Assert.assertTrue(time3 > time2); // access time updated in outer app
} finally {
client.getConnectionManager().shutdown();
}
}
public static class ForwardServlet extends HttpServlet {
@Override
protected void doGet(final HttpServletRequest req, final HttpServletResponse resp) throws ServletException, IOException {
req.getServletContext().getContext(req.getParameter("context")).getRequestDispatcher(req.getParameter("path")).forward(req, resp);
}
}
public static class IncludeServlet extends HttpServlet {
@Override
protected void doGet(final HttpServletRequest req, final HttpServletResponse resp) throws ServletException, IOException {
req.getServletContext().getContext(req.getParameter("context")).getRequestDispatcher(req.getParameter("path")).include(req, resp);
}
}
public static class ForwardAddServlet extends HttpServlet {
@Override
protected void doGet(final HttpServletRequest req, final HttpServletResponse resp) throws ServletException, IOException {
HttpSession session = req.getSession();
Integer value = (Integer)session.getAttribute("key");
if(value == null) {
value = 1;
}
session.setAttribute("key", value + 1);
req.getServletContext().getContext(req.getParameter("context")).getRequestDispatcher(req.getParameter("path")).forward(req, resp);
}
}
public static class IncludeAddServlet extends HttpServlet {
@Override
protected void doGet(final HttpServletRequest req, final HttpServletResponse resp) throws ServletException, IOException {
HttpSession session = req.getSession();
Integer value = (Integer)session.getAttribute("key");
if(value == null) {
value = 1;
}
session.setAttribute("key", value + 1);
req.getServletContext().getContext(req.getParameter("context")).getRequestDispatcher(req.getParameter("path")).include(req, resp);
}
}
}
| apache-2.0 |
jomarko/kie-wb-common | kie-wb-common-stunner/kie-wb-common-stunner-client/kie-wb-common-stunner-widgets/src/main/java/org/kie/workbench/common/stunner/client/widgets/menu/dev/impl/LogMagnetsDevCommand.java | 3406 | /*
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.stunner.client.widgets.menu.dev.impl;
import java.util.Collection;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.enterprise.context.Dependent;
import javax.inject.Inject;
import com.ait.lienzo.client.core.shape.wires.MagnetManager;
import com.ait.lienzo.client.core.shape.wires.WiresConnection;
import com.ait.lienzo.client.core.shape.wires.WiresConnector;
import com.ait.lienzo.client.core.shape.wires.WiresMagnet;
import com.ait.lienzo.client.core.shape.wires.WiresShape;
import com.ait.tooling.nativetools.client.collection.NFastArrayList;
import org.kie.workbench.common.stunner.core.client.api.SessionManager;
import org.kie.workbench.common.stunner.core.client.shape.Shape;
import org.kie.workbench.common.stunner.core.graph.Element;
import org.kie.workbench.common.stunner.core.graph.content.view.View;
@Dependent
public class LogMagnetsDevCommand extends AbstractSelectionDevCommand {
private static Logger LOGGER = Logger.getLogger(LogMagnetsDevCommand.class.getName());
@Inject
public LogMagnetsDevCommand(SessionManager sessionManager) {
super(sessionManager);
}
@Override
public String getText() {
return "Log magnets";
}
@Override
protected void execute(final Collection<Element<? extends View<?>>> items) {
for (final Element<? extends View<?>> item : items) {
logTask(() -> logMagnets(item));
}
}
private void logMagnets(Element<? extends View<?>> element) {
if (null != element.asNode()) {
final Shape shape = getCanvasHandler().getCanvas().getShape(element.getUUID());
final WiresShape wiresShape = (WiresShape) shape.getShapeView();
final MagnetManager.Magnets magnets = wiresShape.getMagnets();
if (null != magnets) {
log("---- Magnets [" + element.getUUID() + "] ------");
for (int i = 0; i < magnets.size(); i++) {
WiresMagnet magnet = magnets.getMagnet(i);
NFastArrayList<WiresConnection> connections = magnet.getConnections();
WiresConnector connector = null;
if (null != connections && !connections.isEmpty()) {
WiresConnection connection = connections.iterator().next();
connector = connection.getConnector();
}
log("[" + i + "] - " + connector);
}
log("-------------------------------------------------------");
} else {
log("No magnets are set.");
}
}
}
private static void log(final String message) {
LOGGER.log(Level.INFO, message);
}
}
| apache-2.0 |
johnnywale/drill | exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DescribeSchemaHandler.java | 5640 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.planner.sql.handlers;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.SerializableString;
import com.fasterxml.jackson.core.io.CharacterEscapes;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.drill.exec.store.SchemaFactory;
import org.apache.drill.shaded.guava.com.google.common.base.Joiner;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.calcite.sql.SqlDescribeSchema;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.SqlNode;
import org.apache.drill.common.exceptions.DrillRuntimeException;
import org.apache.drill.common.exceptions.UserException;
import org.apache.drill.exec.physical.PhysicalPlan;
import org.apache.drill.exec.planner.sql.DirectPlan;
import org.apache.drill.exec.planner.sql.SchemaUtilites;
import org.apache.drill.exec.store.AbstractSchema;
import org.apache.drill.exec.store.StoragePlugin;
import org.apache.drill.exec.store.StoragePluginRegistry.PluginException;
import org.apache.drill.exec.store.dfs.FileSystemPlugin;
import org.apache.drill.exec.store.dfs.WorkspaceConfig;
import org.apache.drill.exec.work.foreman.ForemanSetupException;
import java.util.List;
import java.util.Map;
import static com.fasterxml.jackson.databind.SerializationFeature.INDENT_OUTPUT;
public class DescribeSchemaHandler extends DefaultSqlHandler {
private static final Logger logger = LoggerFactory.getLogger(DescribeSchemaHandler.class);
@SuppressWarnings("serial")
private static final ObjectMapper mapper = new ObjectMapper(
new ObjectMapper().getFactory().setCharacterEscapes(new CharacterEscapes() {
@Override
public int[] getEscapeCodesForAscii() {
// add standard set of escaping characters
int[] esc = CharacterEscapes.standardAsciiEscapesForJSON();
// don't escape backslash (not to corrupt windows path)
esc['\\'] = CharacterEscapes.ESCAPE_NONE;
return esc;
}
@Override
public SerializableString getEscapeSequence(int i) {
// no further escaping (beyond ASCII chars) needed
return null;
}
})).enable(INDENT_OUTPUT);
public DescribeSchemaHandler(SqlHandlerConfig config) {
super(config);
}
@Override
public PhysicalPlan getPlan(SqlNode sqlNode) throws ForemanSetupException {
SqlIdentifier schema = unwrap(sqlNode, SqlDescribeSchema.class).getSchema();
SchemaPlus schemaPlus = SchemaUtilites.findSchema(config.getConverter().getDefaultSchema(), schema.names);
if (schemaPlus == null) {
throw UserException.validationError()
.message("Invalid schema name [%s]", Joiner.on(".").join(schema.names))
.build(logger);
}
AbstractSchema drillSchema = SchemaUtilites.unwrapAsDrillSchemaInstance(schemaPlus);
StoragePlugin storagePlugin;
try {
storagePlugin = context.getStorage().getPlugin(drillSchema.getSchemaPath().get(0));
if (storagePlugin == null) {
throw new DrillRuntimeException(String.format("Unable to find storage plugin with the following name [%s].",
drillSchema.getSchemaPath().get(0)));
}
} catch (PluginException e) {
throw new DrillRuntimeException("Failure while retrieving storage plugin", e);
}
try {
Map configMap = mapper.convertValue(storagePlugin.getConfig(), Map.class);
if (storagePlugin instanceof FileSystemPlugin) {
transformWorkspaces(drillSchema.getSchemaPath(), configMap);
}
String properties = mapper.writeValueAsString(configMap);
return DirectPlan.createDirectPlan(context, new DescribeSchemaResult(drillSchema.getFullSchemaName(), properties));
} catch (JsonProcessingException e) {
throw new DrillRuntimeException("Error while trying to convert storage config to json string", e);
}
}
/**
* If storage plugin has several workspaces, picks appropriate one and removes the others.
*/
private void transformWorkspaces(List<String> names, Map configMap) {
Object workspaces = configMap.remove("workspaces");
if (workspaces != null) {
Map map = (Map) workspaces;
String key = names.size() > 1 ? names.get(1) : SchemaFactory.DEFAULT_WS_NAME;
Object workspace = map.get(key);
if (workspace != null) {
Map workspaceMap = (Map) map.get(key);
configMap.putAll(workspaceMap);
} else if (SchemaFactory.DEFAULT_WS_NAME.equals(key)) {
configMap.putAll(mapper.convertValue(WorkspaceConfig.DEFAULT, Map.class));
}
}
}
public static class DescribeSchemaResult {
public String schema;
public String properties;
public DescribeSchemaResult(String schema, String properties) {
this.schema = schema;
this.properties = properties;
}
}
}
| apache-2.0 |
ricepanda/rice-git3 | rice-middleware/kew/impl/src/main/java/org/kuali/rice/kew/impl/peopleflow/PeopleFlow.java | 722 | /**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.kew.impl.peopleflow;
/**
* TODO
*/
public class PeopleFlow { /* TODO */ }
| apache-2.0 |
kkashi01/appinventor-sources | appinventor/appengine/src/com/google/appinventor/client/explorer/project/ProjectNodeContextMenu.java | 1900 | // -*- mode: java; c-basic-offset: 2; -*-
// Copyright 2009-2011 Google, All Rights reserved
// Copyright 2011-2012 MIT, All rights reserved
// Released under the Apache License, Version 2.0
// http://www.apache.org/licenses/LICENSE-2.0
package com.google.appinventor.client.explorer.project;
import com.google.appinventor.client.Ode;
import com.google.appinventor.client.explorer.commands.CommandRegistry;
import com.google.appinventor.client.explorer.commands.ProjectNodeCommand;
import com.google.appinventor.client.widgets.ContextMenu;
import com.google.appinventor.shared.rpc.project.ProjectNode;
import com.google.gwt.user.client.Command;
import com.google.gwt.user.client.Window;
import com.google.gwt.user.client.ui.Widget;
import java.util.List;
/**
* A context menu for a project node.
*
*/
public final class ProjectNodeContextMenu {
private ProjectNodeContextMenu() {
}
/**
* Shows a context menu for a node.
*
* @param node node for which to show the context menu
* @param host widget to anchor context menu to
*/
public static void show(final ProjectNode node, Widget host, int clientX, int clientY) {
List<CommandRegistry.Entry> entries = Ode.getCommandRegistry().get(node);
if (entries.isEmpty()) {
return;
}
final ContextMenu menu = new ContextMenu();
// Position the context menu to the East of the host widget.
menu.setPopupPosition(Window.getScrollLeft() + clientX,
Window.getScrollTop() + clientY);
for (final CommandRegistry.Entry entry : entries) {
final ProjectNodeCommand cmd = entry.getCommand();
if (cmd.isSupported(node)) {
// Create the menu item.
menu.addItem(cmd.getLabel(), new Command() {
@Override
public void execute() {
menu.hide();
cmd.execute(node);
}
});
}
}
menu.show();
}
}
| apache-2.0 |
elubow/titan | titan-test/src/main/java/com/thinkaurelius/titan/graphdb/TitanGraphBaseTest.java | 16799 | package com.thinkaurelius.titan.graphdb;
import com.google.common.base.Preconditions;
import com.google.common.collect.Maps;
import com.thinkaurelius.titan.core.*;
import com.thinkaurelius.titan.core.Cardinality;
import com.thinkaurelius.titan.core.schema.TitanGraphIndex;
import com.thinkaurelius.titan.core.schema.TitanManagement;
import com.thinkaurelius.titan.diskstorage.BackendException;
import com.thinkaurelius.titan.diskstorage.Backend;
import com.thinkaurelius.titan.diskstorage.configuration.*;
import com.thinkaurelius.titan.diskstorage.keycolumnvalue.KeyColumnValueStoreManager;
import com.thinkaurelius.titan.diskstorage.keycolumnvalue.StoreFeatures;
import com.thinkaurelius.titan.diskstorage.log.Log;
import com.thinkaurelius.titan.diskstorage.log.LogManager;
import com.thinkaurelius.titan.diskstorage.log.kcvs.KCVSLogManager;
import com.thinkaurelius.titan.graphdb.configuration.GraphDatabaseConfiguration;
import com.thinkaurelius.titan.graphdb.database.StandardTitanGraph;
import com.thinkaurelius.titan.graphdb.internal.Order;
import com.thinkaurelius.titan.graphdb.types.StandardEdgeLabelMaker;
import com.thinkaurelius.titan.testutil.TestGraphConfigs;
import org.apache.tinkerpop.gremlin.structure.T;
import org.apache.tinkerpop.gremlin.structure.Edge;
import org.apache.tinkerpop.gremlin.structure.Element;
import org.apache.tinkerpop.gremlin.structure.Vertex;
import org.junit.After;
import org.junit.Before;
import java.time.Duration;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import static com.thinkaurelius.titan.graphdb.configuration.GraphDatabaseConfiguration.*;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
/**
* @author Matthias Broecheler (me@matthiasb.com)
*/
public abstract class TitanGraphBaseTest {
public static final String LABEL_NAME = T.label.getAccessor();
public static final String ID_NAME = T.id.getAccessor();
public WriteConfiguration config;
public BasicConfiguration readConfig;
public StandardTitanGraph graph;
public StoreFeatures features;
public TitanTransaction tx;
public TitanManagement mgmt;
public Map<String,LogManager> logManagers;
public TitanGraphBaseTest() {
}
public abstract WriteConfiguration getConfiguration();
public Configuration getConfig() {
return new BasicConfiguration(GraphDatabaseConfiguration.ROOT_NS,config.copy(), BasicConfiguration.Restriction.NONE);
}
public static void clearGraph(WriteConfiguration config) throws BackendException {
ModifiableConfiguration adjustedConfig = new ModifiableConfiguration(GraphDatabaseConfiguration.ROOT_NS,config.copy(), BasicConfiguration.Restriction.NONE);
adjustedConfig.set(GraphDatabaseConfiguration.LOCK_LOCAL_MEDIATOR_GROUP, "tmp");
adjustedConfig.set(GraphDatabaseConfiguration.UNIQUE_INSTANCE_ID, "inst");
Backend backend = new Backend(adjustedConfig);
backend.initialize(adjustedConfig);
backend.clearStorage();
}
@Before
public void setUp() throws Exception {
this.config = getConfiguration();
TestGraphConfigs.applyOverrides(config);
Preconditions.checkNotNull(config);
clearGraph(config);
readConfig = new BasicConfiguration(GraphDatabaseConfiguration.ROOT_NS, config, BasicConfiguration.Restriction.NONE);
open(config);
logManagers = new HashMap<String,LogManager>();
}
public void open(WriteConfiguration config) {
graph = (StandardTitanGraph) TitanFactory.open(config);
features = graph.getConfiguration().getStoreFeatures();
tx = graph.newTransaction();
mgmt = graph.openManagement();
}
@After
public void tearDown() throws Exception {
close();
closeLogs();
}
public void finishSchema() {
if (mgmt!=null && mgmt.isOpen())
mgmt.commit();
mgmt=graph.openManagement();
newTx();
graph.tx().commit();
}
public void close() {
if (mgmt!=null && mgmt.isOpen()) mgmt.rollback();
if (null != tx && tx.isOpen())
tx.commit();
if (null != graph && graph.isOpen())
graph.close();
}
public void newTx() {
if (null != tx && tx.isOpen())
tx.commit();
//tx = graph.newThreadBoundTransaction();
tx = graph.newTransaction();
}
public static Map<TestConfigOption,Object> validateConfigOptions(Object... settings) {
//Parse settings
Preconditions.checkArgument(settings.length%2==0,"Expected even number of settings: %s",settings);
Map<TestConfigOption,Object> options = Maps.newHashMap();
for (int i=0;i<settings.length;i=i+2) {
Preconditions.checkArgument(settings[i] instanceof TestConfigOption,"Expected configuration option but got: %s",settings[i]);
Preconditions.checkNotNull(settings[i+1],"Null setting at position [%s]",i+1);
options.put((TestConfigOption)settings[i],settings[i+1]);
}
return options;
}
public void clopen(Object... settings) {
config = getConfiguration();
if (mgmt!=null && mgmt.isOpen()) mgmt.rollback();
if (null != tx && tx.isOpen()) tx.commit();
if (settings!=null && settings.length>0) {
Map<TestConfigOption,Object> options = validateConfigOptions(settings);
TitanManagement gconf = null;
ModifiableConfiguration lconf = new ModifiableConfiguration(GraphDatabaseConfiguration.ROOT_NS,config, BasicConfiguration.Restriction.LOCAL);
for (Map.Entry<TestConfigOption,Object> option : options.entrySet()) {
if (option.getKey().option.isLocal()) {
lconf.set(option.getKey().option,option.getValue(),option.getKey().umbrella);
} else {
if (gconf==null) gconf = graph.openManagement();
gconf.set(ConfigElement.getPath(option.getKey().option,option.getKey().umbrella),option.getValue());
}
}
if (gconf!=null) gconf.commit();
lconf.close();
}
if (null != graph && graph.isOpen())
graph.close();
Preconditions.checkNotNull(config);
open(config);
}
public static final TestConfigOption option(ConfigOption option, String... umbrella) {
return new TestConfigOption(option,umbrella);
}
public static final class TestConfigOption {
public final ConfigOption option;
public final String[] umbrella;
public TestConfigOption(ConfigOption option, String... umbrella) {
Preconditions.checkNotNull(option);
this.option = option;
if (umbrella==null) umbrella=new String[0];
this.umbrella = umbrella;
}
}
/*
========= Log Helpers ============
*/
private KeyColumnValueStoreManager logStoreManager = null;
private void closeLogs() {
try {
for (LogManager lm : logManagers.values()) lm.close();
logManagers.clear();
if (logStoreManager!=null) {
logStoreManager.close();
logStoreManager=null;
}
} catch (BackendException e) {
throw new TitanException(e);
}
}
public void closeLogManager(String logManagerName) {
if (logManagers.containsKey(logManagerName)) {
try {
logManagers.remove(logManagerName).close();
} catch (BackendException e) {
throw new TitanException("Could not close log manager " + logManagerName,e);
}
}
}
public Log openUserLog(String identifier) {
return openLog(USER_LOG, GraphDatabaseConfiguration.USER_LOG_PREFIX +identifier);
}
public Log openTxLog() {
return openLog(TRANSACTION_LOG, Backend.SYSTEM_TX_LOG_NAME);
}
private Log openLog(String logManagerName, String logName) {
try {
ModifiableConfiguration configuration = new ModifiableConfiguration(GraphDatabaseConfiguration.ROOT_NS,config.copy(), BasicConfiguration.Restriction.NONE);
configuration.set(GraphDatabaseConfiguration.UNIQUE_INSTANCE_ID, "reader");
configuration.set(GraphDatabaseConfiguration.LOG_READ_INTERVAL, Duration.ofMillis(500L), logManagerName);
if (logStoreManager==null) {
logStoreManager = Backend.getStorageManager(configuration);
}
StoreFeatures f = logStoreManager.getFeatures();
boolean part = f.isDistributed() && f.isKeyOrdered();
if (part) {
for (String logname : new String[]{USER_LOG,TRANSACTION_LOG,MANAGEMENT_LOG})
configuration.set(KCVSLogManager.LOG_MAX_PARTITIONS,8,logname);
}
assert logStoreManager!=null;
if (!logManagers.containsKey(logManagerName)) {
//Open log manager - only supports KCVSLog
Configuration logConfig = configuration.restrictTo(logManagerName);
Preconditions.checkArgument(logConfig.get(LOG_BACKEND).equals(LOG_BACKEND.getDefaultValue()));
logManagers.put(logManagerName,new KCVSLogManager(logStoreManager,logConfig));
}
assert logManagers.containsKey(logManagerName);
return logManagers.get(logManagerName).openLog(logName);
} catch (BackendException e) {
throw new TitanException("Could not open log: "+ logName,e);
}
}
/*
========= Schema Type Definition Helpers ============
*/
public PropertyKey makeVertexIndexedKey(String name, Class datatype) {
PropertyKey key = mgmt.makePropertyKey(name).dataType(datatype).cardinality(Cardinality.SINGLE).make();
mgmt.buildIndex(name,Vertex.class).addKey(key).buildCompositeIndex();
return key;
}
public PropertyKey makeVertexIndexedUniqueKey(String name, Class datatype) {
PropertyKey key = mgmt.makePropertyKey(name).dataType(datatype).cardinality(Cardinality.SINGLE).make();
mgmt.buildIndex(name,Vertex.class).addKey(key).unique().buildCompositeIndex();
return key;
}
public void createExternalVertexIndex(PropertyKey key, String backingIndex) {
createExternalIndex(key,Vertex.class,backingIndex);
}
public void createExternalEdgeIndex(PropertyKey key, String backingIndex) {
createExternalIndex(key,Edge.class,backingIndex);
}
public TitanGraphIndex getExternalIndex(Class<? extends Element> clazz, String backingIndex) {
String prefix;
if (Vertex.class.isAssignableFrom(clazz)) prefix = "v";
else if (Edge.class.isAssignableFrom(clazz)) prefix = "e";
else if (TitanVertexProperty.class.isAssignableFrom(clazz)) prefix = "p";
else throw new AssertionError(clazz.toString());
String indexName = prefix+backingIndex;
TitanGraphIndex index = mgmt.getGraphIndex(indexName);
if (index==null) {
index = mgmt.buildIndex(indexName,clazz).buildMixedIndex(backingIndex);
}
return index;
}
private void createExternalIndex(PropertyKey key, Class<? extends Element> clazz, String backingIndex) {
mgmt.addIndexKey(getExternalIndex(clazz,backingIndex),key);
}
public PropertyKey makeKey(String name, Class datatype) {
PropertyKey key = mgmt.makePropertyKey(name).dataType(datatype).cardinality(Cardinality.SINGLE).make();
return key;
}
public EdgeLabel makeLabel(String name) {
return mgmt.makeEdgeLabel(name).make();
}
public EdgeLabel makeKeyedEdgeLabel(String name, PropertyKey sort, PropertyKey signature) {
EdgeLabel relType = ((StandardEdgeLabelMaker)tx.makeEdgeLabel(name)).
sortKey(sort).signature(signature).directed().make();
return relType;
}
/*
========= General Helpers ===========
*/
public static final int DEFAULT_THREAD_COUNT = 4;
public static int getThreadCount() {
String s = System.getProperty("titan.test.threads");
if (null != s)
return Integer.valueOf(s);
else
return DEFAULT_THREAD_COUNT;
}
public static int wrapAround(int value, int maxValue) {
value = value % maxValue;
if (value < 0) value = value + maxValue;
return value;
}
public TitanVertex getVertex(String key, Object value) {
return getVertex(tx,key,value);
}
public TitanVertex getVertex(PropertyKey key, Object value) {
return getVertex(tx,key,value);
}
public static TitanVertex getVertex(TitanTransaction tx, String key, Object value) {
return (TitanVertex)getOnlyElement(tx.query().has(key,value).vertices(),null);
}
public static TitanVertex getVertex(TitanTransaction tx, PropertyKey key, Object value) {
return getVertex(tx, key.name(), value);
}
public static double round(double d) {
return Math.round(d*1000.0)/1000.0;
}
public static TitanVertex getOnlyVertex(TitanGraphQuery<?> query) {
return (TitanVertex)getOnlyElement(query.vertices());
}
public static TitanEdge getOnlyEdge(TitanVertexQuery<?> query) {
return (TitanEdge)getOnlyElement(query.edges());
}
public static<E> E getOnlyElement(Iterable<E> traversal) {
return getOnlyElement(traversal.iterator());
}
public static<E> E getOnlyElement(Iterator<E> traversal) {
if (!traversal.hasNext()) throw new NoSuchElementException();
return getOnlyElement(traversal,null);
}
public static<E> E getOnlyElement(Iterable<E> traversal, E defaultElement) {
return getOnlyElement(traversal.iterator(),defaultElement);
}
public static<E> E getOnlyElement(Iterator<E> traversal, E defaultElement) {
if (!traversal.hasNext()) return defaultElement;
E result = traversal.next();
if (traversal.hasNext()) throw new IllegalArgumentException("Traversal contains more than 1 element: " + result + ", " + traversal.next());
return result;
}
// public static<E> E getOnlyElement(GraphTraversal<?,E> traversal) {
// if (!traversal.hasNext()) throw new NoSuchElementException();
// return getOnlyElement(traversal,null);
// }
//
// public static<E> E getOnlyElement(GraphTraversal<?,E> traversal, E defaultElement) {
// if (!traversal.hasNext()) return defaultElement;
// E result = traversal.next();
// if (traversal.hasNext()) throw new IllegalArgumentException("Traversal contains more than 1 element: " + result + ", " + traversal.next());
// return result;
// }
public static void assertMissing(TitanGraphTransaction g, Object vid) {
assertFalse(g.vertices(vid).hasNext());
}
public static TitanVertex getV(TitanGraphTransaction g, Object vid) {
if (!g.vertices(vid).hasNext()) return null;
return (TitanVertex)g.vertices(vid).next();
}
public static TitanEdge getE(TitanGraphTransaction g, Object eid) {
if (!g.edges(eid).hasNext()) return null;
return (TitanEdge)g.edges(eid).next();
}
public static String n(Object obj) {
if (obj instanceof RelationType) return ((RelationType)obj).name();
else return obj.toString();
}
public static long getId(Element e) {
return ((TitanElement)e).longId();
}
public static void verifyElementOrder(Iterable<? extends Element> elements, String key, Order order, int expectedCount) {
verifyElementOrder(elements.iterator(), key, order, expectedCount);
}
public static void verifyElementOrder(Iterator<? extends Element> elements, String key, Order order, int expectedCount) {
Comparable previous = null;
int count = 0;
while (elements.hasNext()) {
Element element = elements.next();
Comparable current = element.value(key);
if (previous != null) {
int cmp = previous.compareTo(current);
assertTrue(previous + " <> " + current + " @ " + count,
order == Order.ASC ? cmp <= 0 : cmp >= 0);
}
previous = current;
count++;
}
assertEquals(expectedCount, count);
}
public static <T> Stream<T> asStream(final Iterator<T> source) {
final Iterable<T> iterable = () -> source;
return StreamSupport.stream(iterable.spliterator(),false);
}
}
| apache-2.0 |
hequn8128/flink | flink-table/flink-table-api-java/src/main/java/org/apache/flink/table/api/TumbleWithSizeOnTime.java | 2228 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api;
import org.apache.flink.annotation.PublicEvolving;
import org.apache.flink.table.expressions.ApiExpressionUtils;
import org.apache.flink.table.expressions.Expression;
import org.apache.flink.table.expressions.ExpressionParser;
/**
* Tumbling window on time.
*/
@PublicEvolving
public final class TumbleWithSizeOnTime {
private final Expression time;
private final Expression size;
TumbleWithSizeOnTime(Expression time, Expression size) {
this.time = ApiExpressionUtils.unwrapFromApi(time);
this.size = ApiExpressionUtils.unwrapFromApi(size);
}
/**
* Assigns an alias for this window that the following {@code groupBy()} and {@code select()}
* clause can refer to. {@code select()} statement can access window properties such as window
* start or end time.
*
* @param alias alias for this window
* @return this window
*/
public TumbleWithSizeOnTimeWithAlias as(Expression alias) {
return new TumbleWithSizeOnTimeWithAlias(alias, time, size);
}
/**
* Assigns an alias for this window that the following {@code groupBy()} and {@code select()}
* clause can refer to. {@code select()} statement can access window properties such as window
* start or end time.
*
* @param alias alias for this window
* @return this window
*/
public TumbleWithSizeOnTimeWithAlias as(String alias) {
return as(ExpressionParser.parseExpression(alias));
}
}
| apache-2.0 |
jomarko/kie-wb-common | kie-wb-common-stunner/kie-wb-common-stunner-client/kie-wb-common-stunner-lienzo/src/main/java/org/kie/workbench/common/stunner/client/lienzo/canvas/controls/AbstractAcceptorControl.java | 4514 | /*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.stunner.client.lienzo.canvas.controls;
import java.util.List;
import java.util.Optional;
import java.util.function.Predicate;
import com.ait.lienzo.client.core.shape.wires.WiresContainer;
import com.ait.lienzo.client.core.shape.wires.WiresShape;
import org.kie.workbench.common.stunner.client.lienzo.canvas.wires.WiresCanvas;
import org.kie.workbench.common.stunner.client.lienzo.canvas.wires.WiresUtils;
import org.kie.workbench.common.stunner.core.client.canvas.AbstractCanvasHandler;
import org.kie.workbench.common.stunner.core.client.canvas.controls.CanvasControl;
import org.kie.workbench.common.stunner.core.client.command.CanvasCommandManager;
import org.kie.workbench.common.stunner.core.client.command.CanvasViolation;
import org.kie.workbench.common.stunner.core.client.command.RequiresCommandManager;
import org.kie.workbench.common.stunner.core.command.CommandResult;
import org.kie.workbench.common.stunner.core.command.util.CommandUtils;
import org.kie.workbench.common.stunner.core.graph.Edge;
import org.kie.workbench.common.stunner.core.graph.Node;
public abstract class AbstractAcceptorControl
implements CanvasControl<AbstractCanvasHandler>,
RequiresCommandManager<AbstractCanvasHandler> {
private AbstractCanvasHandler canvasHandler;
private CommandManagerProvider<AbstractCanvasHandler> commandManagerProvider;
protected abstract void onInit(final WiresCanvas canvas);
protected abstract void onDestroy(final WiresCanvas canvas);
@Override
public void init(final AbstractCanvasHandler canvasHandler) {
this.canvasHandler = canvasHandler;
onInit((WiresCanvas) canvasHandler.getCanvas());
}
@Override
public void destroy() {
if (null != canvasHandler && null != canvasHandler.getCanvas()) {
onDestroy((WiresCanvas) canvasHandler.getCanvas());
}
this.canvasHandler = null;
this.commandManagerProvider = null;
}
@Override
public void setCommandManagerProvider(final CommandManagerProvider<AbstractCanvasHandler> provider) {
this.commandManagerProvider = provider;
}
protected AbstractCanvasHandler getCanvasHandler() {
return canvasHandler;
}
protected CanvasCommandManager<AbstractCanvasHandler> getCommandManager() {
return commandManagerProvider.getCommandManager();
}
@SuppressWarnings("unchecked")
protected Optional<Edge<?, Node>> getFirstIncomingEdge(final Node child,
final Predicate<Edge> predicate) {
return getAnyEdge(child.getInEdges(),
predicate);
}
@SuppressWarnings("unchecked")
protected Optional<Edge<?, Node>> getFirstOutgoingEdge(final Node child,
final Predicate<Edge> predicate) {
return getAnyEdge(child.getOutEdges(),
predicate);
}
protected boolean isCommandSuccess(final CommandResult<CanvasViolation> result) {
return !CommandUtils.isError(result);
}
protected boolean isWiresParentAccept(final WiresContainer wiresContainer) {
return WiresUtils.isWiresShape(wiresContainer);
}
protected boolean isWiresViewAccept(final WiresContainer wiresContainer,
final WiresShape wiresShape) {
return WiresUtils.isWiresShape(wiresContainer) || WiresUtils.isWiresShape(wiresShape);
}
protected Optional<Edge<?, Node>> getAnyEdge(final List<Edge<?, Node>> edges,
final Predicate<Edge> predicate) {
if (null != edges) {
return edges.stream()
.filter(predicate)
.findAny();
}
return Optional.empty();
}
}
| apache-2.0 |
jushanghui/jsh | src/main/parser/com/baidu/hsb/parser/ast/stmt/dml/DMLInsertReplaceStatement.java | 2187 | /**
* Baidu.com,Inc.
* Copyright (c) 2000-2013 All Rights Reserved.
*/
package com.baidu.hsb.parser.ast.stmt.dml;
import java.util.List;
import com.baidu.hsb.parser.ast.expression.misc.QueryExpression;
import com.baidu.hsb.parser.ast.expression.primary.Identifier;
import com.baidu.hsb.parser.ast.expression.primary.RowExpression;
/**
* @author xiongzhao@baidu.com
*/
public abstract class DMLInsertReplaceStatement extends DMLStatement {
protected final Identifier table;
protected final List<Identifier> columnNameList;
protected List<RowExpression> rowList;
protected final QueryExpression select;
@SuppressWarnings("unchecked")
public DMLInsertReplaceStatement(Identifier table, List<Identifier> columnNameList, List<RowExpression> rowList) {
this.table = table;
this.columnNameList = ensureListType(columnNameList);
this.rowList = ensureListType(rowList);
this.select = null;
}
@SuppressWarnings("unchecked")
public DMLInsertReplaceStatement(Identifier table, List<Identifier> columnNameList, QueryExpression select) {
if (select == null) throw new IllegalArgumentException("argument 'select' is empty");
this.select = select;
this.table = table;
this.columnNameList = ensureListType(columnNameList);
this.rowList = null;
}
public Identifier getTable() {
return table;
}
/**
* @return {@link java.util.ArrayList ArrayList}
*/
public List<Identifier> getColumnNameList() {
return columnNameList;
}
/**
* @return {@link java.util.ArrayList ArrayList} or
* {@link java.util.Collections#emptyList() EMPTY_LIST}
*/
public List<RowExpression> getRowList() {
return rowList;
}
public QueryExpression getSelect() {
return select;
}
private List<RowExpression> rowListBak;
public void setReplaceRowList(List<RowExpression> list) {
rowListBak = rowList;
rowList = list;
}
public void clearReplaceRowList() {
if (rowListBak != null) {
rowList = rowListBak;
rowListBak = null;
}
}
}
| apache-2.0 |
mztaylor/rice-git | rice-framework/krad-web-framework/src/main/java/org/kuali/rice/krad/util/documentserializer/PropertyType.java | 835 | /**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.krad.util.documentserializer;
/**
* Represents various types a property may be.
*
*/
public enum PropertyType {
BUSINESS_OBJECT, PRIMITIVE, COLLECTION, MAP
}
| apache-2.0 |
AlienQueen/wicket | wicket-core/src/main/java/org/apache/wicket/request/handler/resource/ResourceReferenceRequestHandler.java | 3563 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.request.handler.resource;
import java.util.Locale;
import org.apache.wicket.request.ILoggableRequestHandler;
import org.apache.wicket.request.IRequestCycle;
import org.apache.wicket.request.IRequestHandler;
import org.apache.wicket.core.request.handler.logger.ResourceReferenceLogData;
import org.apache.wicket.request.mapper.parameter.PageParameters;
import org.apache.wicket.request.resource.IResource;
import org.apache.wicket.request.resource.ResourceReference;
import org.apache.wicket.util.lang.Args;
/**
* Request handler for {@link ResourceReference}. This handler is only used to generate URLs.
*
* @author Matej Knopp
*/
public class ResourceReferenceRequestHandler implements IRequestHandler, ILoggableRequestHandler
{
private final ResourceReference resourceReference;
private final PageParameters pageParameters;
private ResourceReferenceLogData logData;
/**
* Construct.
*
* @param resourceReference
*/
public ResourceReferenceRequestHandler(ResourceReference resourceReference)
{
this(resourceReference, null);
}
/**
* Construct.
*
* @param resourceReference
* @param pageParameters
*/
public ResourceReferenceRequestHandler(ResourceReference resourceReference,
PageParameters pageParameters)
{
Args.notNull(resourceReference, "resourceReference");
this.resourceReference = resourceReference;
this.pageParameters = pageParameters != null ? pageParameters : new PageParameters();
}
/**
* @return resource reference
*/
public ResourceReference getResourceReference()
{
return resourceReference;
}
/**
* @return page parameters
*/
public PageParameters getPageParameters()
{
return pageParameters;
}
@Override
public void detach(IRequestCycle requestCycle)
{
if (logData == null)
logData = new ResourceReferenceLogData(this);
}
@Override
public ResourceReferenceLogData getLogData()
{
return logData;
}
@Override
public void respond(IRequestCycle requestCycle)
{
new ResourceRequestHandler(getResourceReference().getResource(), getPageParameters()).respond(requestCycle);
}
/**
* @return reference locale
*/
public Locale getLocale()
{
return getResourceReference().getLocale();
}
/**
* @return resource
*/
public IResource getResource()
{
return getResourceReference().getResource();
}
/**
* @return style
*/
public String getStyle()
{
return getResourceReference().getStyle();
}
/**
* @return variation
*/
public String getVariation()
{
return getResourceReference().getVariation();
}
@Override
public String toString()
{
return "ResourceReferenceRequestHandler{" +
"resourceReference=" + resourceReference +
", pageParameters=" + pageParameters +
'}';
}
}
| apache-2.0 |
grainier/carbon-analytics | components/org.wso2.carbon.event.simulator.core/src/gen/java/org/wso2/carbon/event/simulator/core/model/FileNamesResponse.java | 2238 | package org.wso2.carbon.event.simulator.core.model;
import java.util.Objects;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import io.swagger.annotations.ApiModelProperty;
/**
* FileNamesResponse
*/
@javax.annotation.Generated(value = "io.swagger.codegen.languages.JavaMSF4JServerCodegen", date = "2017-07-20T09:30:14.336Z")
public class FileNamesResponse {
@JsonProperty("code")
private Integer code = null;
@JsonProperty("message")
private String message = null;
public FileNamesResponse code(Integer code) {
this.code = code;
return this;
}
/**
* Get code
* @return code
**/
@ApiModelProperty(example = "200", value = "")
public Integer getCode() {
return code;
}
public void setCode(Integer code) {
this.code = code;
}
public FileNamesResponse message(String message) {
this.message = message;
return this;
}
/**
* Get message
* @return message
**/
@ApiModelProperty(example = "", value = "")
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
FileNamesResponse fileNamesResponse = (FileNamesResponse) o;
return Objects.equals(this.code, fileNamesResponse.code) &&
Objects.equals(this.message, fileNamesResponse.message);
}
@Override
public int hashCode() {
return Objects.hash(code, message);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class FileNamesResponse {\n");
sb.append(" code: ").append(toIndentedString(code)).append("\n");
sb.append(" message: ").append(toIndentedString(message)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| apache-2.0 |
sunpy1106/SpringBeanLifeCycle | src/main/java/org/springframework/format/datetime/package-info.java | 106 | /**
* Formatters for {@code java.util.Date} properties.
*/
package org.springframework.format.datetime;
| apache-2.0 |
miniway/presto | presto-record-decoder/src/main/java/io/prestosql/decoder/csv/CsvRowDecoderFactory.java | 1021 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.decoder.csv;
import io.prestosql.decoder.DecoderColumnHandle;
import io.prestosql.decoder.RowDecoder;
import io.prestosql.decoder.RowDecoderFactory;
import java.util.Map;
import java.util.Set;
public class CsvRowDecoderFactory
implements RowDecoderFactory
{
@Override
public RowDecoder create(Map<String, String> decoderParams, Set<DecoderColumnHandle> columns)
{
return new CsvRowDecoder(columns);
}
}
| apache-2.0 |
alanfgates/hive | itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java | 5600 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hive.minikdc;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.Statement;
import java.util.HashMap;
import java.util.Map;
import org.junit.Assert;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.hooks.TestHs2Hooks.PostExecHook;
import org.apache.hadoop.hive.hooks.TestHs2Hooks.PreExecHook;
import org.apache.hadoop.hive.hooks.TestHs2Hooks.SemanticAnalysisHook;
import org.apache.hive.jdbc.miniHS2.MiniHS2;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* Tests information retrieved from hooks, in Kerberos mode.
*/
public class TestHs2HooksWithMiniKdc {
private static MiniHS2 miniHS2 = null;
private static MiniHiveKdc miniHiveKdc = null;
private static Map<String, String> confOverlay = new HashMap<String, String>();
private Connection hs2Conn;
@BeforeClass
public static void setUpBeforeClass() throws Exception {
Class.forName(MiniHS2.getJdbcDriverName());
confOverlay.put(ConfVars.POSTEXECHOOKS.varname, PostExecHook.class.getName());
confOverlay.put(ConfVars.PREEXECHOOKS.varname, PreExecHook.class.getName());
confOverlay.put(ConfVars.SEMANTIC_ANALYZER_HOOK.varname,
SemanticAnalysisHook.class.getName());
confOverlay.put(ConfVars.HIVE_SUPPORT_CONCURRENCY.varname, "" + Boolean.FALSE);
miniHiveKdc = new MiniHiveKdc();
HiveConf hiveConf = new HiveConf();
miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniHiveKdc, hiveConf);
miniHS2.start(confOverlay);
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
miniHS2.stop();
}
@Before
public void setUpTest() throws Exception {
PreExecHook.userName = null;
PreExecHook.ipAddress = null;
PreExecHook.operation = null;
PreExecHook.error = null;
PostExecHook.userName = null;
PostExecHook.ipAddress = null;
PostExecHook.operation = null;
PostExecHook.error = null;
SemanticAnalysisHook.userName = null;
SemanticAnalysisHook.ipAddress = null;
SemanticAnalysisHook.command = null;
SemanticAnalysisHook.preAnalyzeError = null;
SemanticAnalysisHook.postAnalyzeError = null;
}
@After
public void tearDownTest() throws Exception {
if (hs2Conn != null) {
try {
hs2Conn.close();
} catch (Exception e) {
// Ignore shutdown errors since there are negative tests
}
}
}
/**
* Test that hook context properties are correctly set.
*/
@Test
public void testHookContexts() throws Throwable {
miniHiveKdc.loginUser(MiniHiveKdc.HIVE_TEST_USER_1);
hs2Conn = DriverManager.getConnection(miniHS2.getJdbcURL());
Statement stmt = hs2Conn.createStatement();
stmt.executeQuery("show databases");
stmt.executeQuery("show tables");
Throwable error = PostExecHook.error;
if (error != null) {
throw error;
}
error = PreExecHook.error;
if (error != null) {
throw error;
}
Assert.assertNotNull("ipaddress is null", PostExecHook.ipAddress);
Assert.assertNotNull("userName is null", PostExecHook.userName);
Assert.assertNotNull("operation is null", PostExecHook.operation);
Assert.assertEquals(MiniHiveKdc.HIVE_TEST_USER_1, PostExecHook.userName);
Assert.assertTrue(PostExecHook.ipAddress, PostExecHook.ipAddress.contains("127.0.0.1"));
Assert.assertEquals("SHOWTABLES", PostExecHook.operation);
Assert.assertNotNull("ipaddress is null", PreExecHook.ipAddress);
Assert.assertNotNull("userName is null", PreExecHook.userName);
Assert.assertNotNull("operation is null", PreExecHook.operation);
Assert.assertEquals(MiniHiveKdc.HIVE_TEST_USER_1, PreExecHook.userName);
Assert.assertTrue(PreExecHook.ipAddress, PreExecHook.ipAddress.contains("127.0.0.1"));
Assert.assertEquals("SHOWTABLES", PreExecHook.operation);
error = SemanticAnalysisHook.preAnalyzeError;
if (error != null) {
throw error;
}
error = SemanticAnalysisHook.postAnalyzeError;
if (error != null) {
throw error;
}
Assert.assertNotNull("semantic hook context ipaddress is null",
SemanticAnalysisHook.ipAddress);
Assert.assertNotNull("semantic hook context userName is null",
SemanticAnalysisHook.userName);
Assert.assertNotNull("semantic hook context command is null",
SemanticAnalysisHook.command);
Assert.assertNotNull("semantic hook context commandType is null",
SemanticAnalysisHook.commandType);
Assert.assertTrue(SemanticAnalysisHook.ipAddress,
SemanticAnalysisHook.ipAddress.contains("127.0.0.1"));
Assert.assertEquals("show tables", SemanticAnalysisHook.command);
}
} | apache-2.0 |
uonafya/jphes-core | dhis-2/dhis-services/dhis-service-dxf2/src/test/java/org/hisp/dhis/dxf2/metadata/MetadataExportServiceTest.java | 6616 | package org.hisp.dhis.dxf2.metadata;
/*
* Copyright (c) 2004-2016, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import org.hisp.dhis.DhisSpringTest;
import org.hisp.dhis.common.IdentifiableObject;
import org.hisp.dhis.common.IdentifiableObjectManager;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataelement.DataElementGroup;
import org.hisp.dhis.dxf2.metadata.MetadataExportParams;
import org.hisp.dhis.dxf2.metadata.MetadataExportService;
import org.hisp.dhis.query.Disjunction;
import org.hisp.dhis.query.Query;
import org.hisp.dhis.query.Restrictions;
import org.hisp.dhis.schema.SchemaService;
import org.hisp.dhis.user.User;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.*;
/**
* @author Morten Olav Hansen <mortenoh@gmail.com>
*/
public class MetadataExportServiceTest
extends DhisSpringTest
{
@Autowired
private MetadataExportService metadataExportService;
@Autowired
private IdentifiableObjectManager manager;
@Autowired
private SchemaService schemaService;
@Test
public void testValidate()
{
MetadataExportParams params = new MetadataExportParams();
metadataExportService.validate( params );
}
@Test
public void testMetadataExport()
{
DataElementGroup deg1 = createDataElementGroup( 'A' );
DataElement de1 = createDataElement( 'A' );
DataElement de2 = createDataElement( 'B' );
DataElement de3 = createDataElement( 'C' );
manager.save( de1 );
manager.save( de2 );
manager.save( de3 );
User user = createUser( 'A' );
manager.save( user );
deg1.addDataElement( de1 );
deg1.addDataElement( de2 );
deg1.addDataElement( de3 );
deg1.setUser( user );
manager.save( deg1 );
MetadataExportParams params = new MetadataExportParams();
Map<Class<? extends IdentifiableObject>, List<? extends IdentifiableObject>> metadata = metadataExportService.getMetadata( params );
assertEquals( 1, metadata.get( User.class ).size() );
assertEquals( 1, metadata.get( DataElementGroup.class ).size() );
assertEquals( 3, metadata.get( DataElement.class ).size() );
}
@Test
public void testMetadataExportWithCustomClasses()
{
DataElementGroup deg1 = createDataElementGroup( 'A' );
DataElement de1 = createDataElement( 'A' );
DataElement de2 = createDataElement( 'B' );
DataElement de3 = createDataElement( 'C' );
manager.save( de1 );
manager.save( de2 );
manager.save( de3 );
User user = createUser( 'A' );
manager.save( user );
deg1.addDataElement( de1 );
deg1.addDataElement( de2 );
deg1.addDataElement( de3 );
deg1.setUser( user );
manager.save( deg1 );
MetadataExportParams params = new MetadataExportParams();
params.addClass( DataElement.class );
Map<Class<? extends IdentifiableObject>, List<? extends IdentifiableObject>> metadata = metadataExportService.getMetadata( params );
assertFalse( metadata.containsKey( User.class ) );
assertFalse( metadata.containsKey( DataElementGroup.class ) );
assertTrue( metadata.containsKey( DataElement.class ) );
assertEquals( 3, metadata.get( DataElement.class ).size() );
}
@Test
public void testMetadataExportWithCustomQueries()
{
DataElementGroup deg1 = createDataElementGroup( 'A' );
DataElement de1 = createDataElement( 'A' );
DataElement de2 = createDataElement( 'B' );
DataElement de3 = createDataElement( 'C' );
manager.save( de1 );
manager.save( de2 );
manager.save( de3 );
User user = createUser( 'A' );
manager.save( user );
deg1.addDataElement( de1 );
deg1.addDataElement( de2 );
deg1.addDataElement( de3 );
deg1.setUser( user );
manager.save( deg1 );
Query deQuery = Query.from( schemaService.getDynamicSchema( DataElement.class ) );
Disjunction disjunction = deQuery.disjunction();
disjunction.add( Restrictions.eq( "id", de1.getUid() ) );
disjunction.add( Restrictions.eq( "id", de2.getUid() ) );
deQuery.add( disjunction );
Query degQuery = Query.from( schemaService.getDynamicSchema( DataElementGroup.class ) );
degQuery.add( Restrictions.eq( "id", "INVALID UID" ) );
MetadataExportParams params = new MetadataExportParams();
params.addQuery( deQuery );
params.addQuery( degQuery );
Map<Class<? extends IdentifiableObject>, List<? extends IdentifiableObject>> metadata = metadataExportService.getMetadata( params );
assertFalse( metadata.containsKey( User.class ) );
assertFalse( metadata.containsKey( DataElementGroup.class ) );
assertTrue( metadata.containsKey( DataElement.class ) );
assertEquals( 2, metadata.get( DataElement.class ).size() );
}
}
| bsd-3-clause |
sguazt/rain-workload-toolkit | src/radlab/rain/workload/scadr/ScadrScenarioTrack.java | 13346 | /*
* Copyright (c) 2010, Regents of the University of California
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the University of California, Berkeley
* nor the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written
* permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package radlab.rain.workload.scadr;
import radlab.rain.DefaultScenarioTrack;
import radlab.rain.Scenario;
import radlab.rain.util.AppServerStats;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.ZooKeeper;
import org.apache.zookeeper.KeeperException.Code;
import org.apache.zookeeper.data.Stat;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.Hashtable;
import java.util.Iterator;
public class ScadrScenarioTrack extends DefaultScenarioTrack
{
public static final int DEFAULT_ZOOKEEPER_SESSION_TIMEOUT = 30000;
public static final String APP_SERVER_LIST_SEPARATOR = "\n";
public static int DEFAULT_RETRIES = 3;
public static long DEFAULT_RETRY_TIMEOUT = 3000; // 3 sec retry timeout
private NumberFormat _formatter = new DecimalFormat( "#0.0000" );
private boolean _isConfigured = false;
private ZooKeeper _zconn = null;
private boolean _appServerListChanged = false;
private String[] _appServers = null;
// Mechanisms associated with keeping traffic stats for request-gating.
// We want to get the generators to send requests to least loaded server
private long _totalTrafficLockWaitTime = 0;
private long _totalTrafficLockRequestCount = 0;
private long _maxTrafficLockWaitTime = 0;
private Object _trafficLock = new Object();
//private Hashtable<String,Long> _appServerTraffic = new Hashtable<String,Long>();
private Hashtable<String, AppServerStats> _appServerTraffic = new Hashtable<String, AppServerStats>();
// Accessor methods such that operations can indicate that they're targeting a
// specific server (based on the base url in the generator that created the operation)
public void requestIssue( String appServer )
{
long start = System.currentTimeMillis();
synchronized( this._trafficLock )
{
long end = System.currentTimeMillis();
// How long did we wait for the lock?
long lockWaitTime = end - start;
// Keep track of the total time waiting on the traffic lock
this._totalTrafficLockWaitTime += lockWaitTime;
// Keep counting the lock requests
this._totalTrafficLockRequestCount++;
// Track the worst case lock wait time seen so far
if( lockWaitTime > this._maxTrafficLockWaitTime )
this._maxTrafficLockWaitTime = lockWaitTime;
// Get the request counter for this server
//Long outstandingRequests = this._appServerTraffic.get( appServer );
AppServerStats stats = this._appServerTraffic.get( appServer );
if( stats == null )
{
// We might want to know whether requests are coming in for servers
// not in the Hashtable, that would mean that we're messing up somewhere
// re: keeping the Hashtable up-to-date with the latest info from
// ZooKeeper, e.g., the list changes before we get a chance to update the
// Hashtable
this._appServerTraffic.put( appServer, new AppServerStats(appServer, 0L ) );
}
else stats._outstandingRequests++;
}
}
public void requestRetire( String appServer )
{
long start = System.currentTimeMillis();
synchronized( this._trafficLock )
{
long end = System.currentTimeMillis();
// How long did we wait for the lock?
long lockWaitTime = end - start;
// Keep track of the total time waiting on the traffic lock
this._totalTrafficLockWaitTime += lockWaitTime;
// Keep counting the lock requests
this._totalTrafficLockRequestCount++;
// Track the worst case lock wait time seen so far
if( lockWaitTime > this._maxTrafficLockWaitTime )
this._maxTrafficLockWaitTime = lockWaitTime;
// Get the request counter for this server
//Long outstandingRequests = this._appServerTraffic.get( appServer );
AppServerStats stats = this._appServerTraffic.get( appServer );
// outstandingRequests should never be null on request retire (frequently) -
// a request can't be retired from a server that's not in the traffic stats hashtable
// unless a request was sent to a slow app server and by the time the request
// finished the slow server was purged from a recent ZooKeeper list. In that
// eventuality it's safe to ignore the retire message
if( stats != null )
stats._outstandingRequests--;
}
}
private String _zkConnString = "";
private String _zkPath = "";
public ScadrScenarioTrack(Scenario parent)
{
super(parent);
}
public ScadrScenarioTrack(String name, Scenario scenario)
{
super(name, scenario);
}
public boolean isConfigured()
{ return this._isConfigured; }
public void setAppServerListChanged( boolean val )
{
if( val )
System.out.println( this + " app server list changed." );
this._appServerListChanged = val;
}
public boolean getAppServerListChanged()
{ return this._appServerListChanged; }
/*public String[] getAppServers()
{
//if( this._appServerListChanged )
//; // do update
return this._appServers;
}*/
public Hashtable<String, AppServerStats> getAppServers()
{ return this._appServerTraffic; }
public synchronized boolean configureZooKeeper( String zkConnString, String zkPath )
{
// Don't double configure the Scenariotrack
if( this._isConfigured )
return true;
// Save the connection information
this._zkConnString = zkConnString;
this._zkPath = zkPath;
// Update the app server list
boolean res = this.initializeAppServerList( DEFAULT_ZOOKEEPER_SESSION_TIMEOUT );
if( !res )
{
System.out.println( this + " Error contacting zookeeper. Conn: " + zkConnString + " path: " + zkPath );
}
else
{
System.out.println( this + " Successfully contacted zookeeper. Conn: " + zkConnString + " path: " + zkPath );
this._isConfigured = true;
}
return res;
}
private boolean initializeAppServerList( int timeout )
{
try
{
// Establist a zookeeper connection
this._zconn = new ZooKeeper( this._zkConnString, timeout, new ZKAppServerWatcher( this ) );
int retries = DEFAULT_RETRIES;
long retryTimeout = DEFAULT_RETRY_TIMEOUT;
byte[] data = ScadrScenarioTrack.readZooKeeperData( this._zconn, this._zkPath, retries, retryTimeout );//this._zconn.getData( this._zkPath, true, new Stat() );
if( data == null )
throw new Exception( "No data returned from ZooKeeper path: " + this._zkPath + " after: " + retries + " retries." );
String list = new String( data );
if( list.trim().length() > 0 )
{
this._appServers = list.split( APP_SERVER_LIST_SEPARATOR );
System.out.println( this + " Appserver list initialized, " + this._appServers.length + " app servers found." );
for( String s : this._appServers )
{
// Set up empty stats
this._appServerTraffic.put( s, new AppServerStats( s, 0L ) );
//System.out.println( this + " Appserver: " + s );
}
return true; // Signal that we've initialized the app server list
}
else return false;
}
catch( Exception e )
{
System.out.println( this + " Error initializing app server list. Reason: " + e.toString() );
e.printStackTrace();
return false;
}
}
public synchronized boolean updateAppServerList()
{
try
{
int retries = DEFAULT_RETRIES;
long retryTimeout = DEFAULT_RETRY_TIMEOUT;
byte[] data = ScadrScenarioTrack.readZooKeeperData( this._zconn, this._zkPath, retries, retryTimeout );//this._zconn.getData( this._zkPath, true, new Stat() );
if( data == null )
throw new Exception( "No data returned from ZooKeeper path: " + this._zkPath + " after: " + retries + " retries." );
String list = new String( data );
if( list.trim().length() > 0 )
{
this._appServers = list.split( APP_SERVER_LIST_SEPARATOR );
System.out.println( this + " Appserver list updated, " + this._appServers.length + " app servers found." );
// Here's where we'd want to purge the traffic table of entries that are not in
// this list - we need to purge so that non-existent servers don't get
// picked as the least loaded
Hashtable<String,AppServerStats> newTrafficSnapshot = new Hashtable<String,AppServerStats>();
for( String s : this._appServers )
{
//System.out.println( this + " Appserver: " + s );
// Create a stats snapshot with just the server names,
// but null stats values
newTrafficSnapshot.put( s, new AppServerStats( s, 0L ) );
}
// Now we that we have the latest list of server names, but no stats
// copy over the latest stats from traffic stats table
synchronized( this._trafficLock )
{
// Go through the current stats, if that server is in the new snapshot
// then copy its stats otherwise ignore it
Iterator<String> appIt = this._appServerTraffic.keySet().iterator();
while( appIt.hasNext() )
{
AppServerStats currentServerStats = this._appServerTraffic.get( appIt.next() );
// If an existing server is still on the latest list of servers
// then copy over its stats
if( newTrafficSnapshot.containsKey( currentServerStats._appServer ) )
{
// Copy over the latest stats for this server
newTrafficSnapshot.put( currentServerStats._appServer, currentServerStats );
}
}
// Replace the current appServerTraffic table with the new version
this._appServerTraffic = newTrafficSnapshot;
}
this._appServerListChanged = false; // Now that we have the new list of appservers squelch the change
return true; // Signal that we've updated the app server list
}
else return false;
}
catch( Exception e )
{
System.out.println( this + " Error updating app server list. Reason: " + e.toString() );
e.printStackTrace();
return false;
}
}
public static byte[] readZooKeeperData( ZooKeeper zkConn, String zkPath, int retries, long retryTimeout )
{
if( retries <= 0 )
retries = DEFAULT_RETRIES;
if( retryTimeout < 0 )
retryTimeout = DEFAULT_RETRY_TIMEOUT;
byte[] data = null;
int i = 0;
while( i < retries )
{
try
{
i++;
data = zkConn.getData( zkPath, true, new Stat() );
// Check whether we found data at that path in ZooKeeper.
// If we find data there then break otherwise try again
if( data != null)
break;
else Thread.sleep( retryTimeout ); // Sleep for a while before retrying
}
catch( KeeperException ke )
{
if( ke.code() == Code.CONNECTIONLOSS )
{
try
{
Thread.sleep( retryTimeout ); // Sleep for a while before retrying
}
catch( InterruptedException ie ){}
continue; // try again if we can
}
}
catch( InterruptedException ie )
{
try
{
Thread.sleep( retryTimeout ); // Sleep for a while before retrying
}
catch( InterruptedException nie )
{}
continue; // try the transaction again if we can
}
}
return data;
}
@Override
public String toString()
{
return "[SCADRTRACK: " + this._name + "]";
}
@Override
public void end()
{
// Dump traffic lock stats
if( this._totalTrafficLockRequestCount > 0 )
System.out.println( this + " Gating stats - Average traffic lock wait time (ms) : " + this._formatter.format( (double) this._totalTrafficLockWaitTime / (double) this._totalTrafficLockRequestCount ) );
else System.out.println( this + " Gating stats - Average traffic lock wait time (ms) : " + this._formatter.format( 0.0 ) );
System.out.println( this + " Gating stats - Total traffic lock requests : " + this._totalTrafficLockRequestCount );
System.out.println( this + " Gating stats - Max traffic lock wait time (ms) : " + this._formatter.format( this._maxTrafficLockWaitTime ) );
// Let the base class finish its regular cleanup
super.end();
}
}
| bsd-3-clause |
arturog8m/ocs | bundle/edu.gemini.catalog/src/main/java/jsky/catalog/ArraySearchCondition.java | 2871 | package jsky.catalog;
import jsky.util.NameValue;
/**
* Represents a search condition with an array of possible values.
* The condition evaluates to true, if any one of the values in the array match.
*/
public class ArraySearchCondition extends AbstractSearchCondition {
/** The condition will match any of these values */
private Object[] _values;
/**
* Create a new ArraySearchCondition with the given values.
*/
public ArraySearchCondition(FieldDesc fieldDesc, Object[] values) {
super(fieldDesc);
_values = values;
}
/** Return the array of values */
public Object[] getValues() {
return _values;
}
/**
* Return true if the condition is true for the given value.
*
* @param val The value to be checked against the condition.
* @return true if the value satisfies the condition.
*/
public boolean isTrueFor(Comparable val) {
if ((_values == null) || (_values.length == 0)) return true;
for (int i = 0; i < _values.length; i++) {
if (val == null) {
if (_values[i] == null) return true;
} else if (_values[i] != null) {
// SW: Evil, but I don't care. We need to store NameValue
// pairs, not just strings
if (_values[i] instanceof NameValue) {
if (((NameValue) _values[i]).getValue().equals(val)) return true;
} else {
if (((Comparable) _values[i]).compareTo(val) == 0) return true;
}
}
}
return false;
}
/**
* Return true if the condition is true for the given numeric value.
* If the condition was specified as a String, the return value is false.
*
* @param val The value to be checked against the condition.
* @return true if the value satisfies the condition.
*/
public boolean isTrueFor(double val) {
return isTrueFor(new Double(val));
}
/** Return the values as a String in the format "val1,val2,val3". */
public String getValueAsString(String sep) {
StringBuffer sb = new StringBuffer();
for (int i = 0; i < _values.length; i++) {
sb.append(_values[i].toString());
if (i < _values.length - 1)
sb.append(sep);
}
return sb.toString();
}
/**
* Test cases
*/
public static void main(String[] args) {
ArraySearchCondition s = new ArraySearchCondition(new FieldDescAdapter("X"), new String[]{"AAA", "BBB", "CCC"});
if (!s.isTrueFor("BBB"))
throw new RuntimeException("test failed for BBB: " + s);
if (s.isTrueFor("CXX"))
throw new RuntimeException("test failed for CXX: " + s);
System.out.println("All tests passed");
}
}
| bsd-3-clause |
BeyondTheBoundary/cspoker | external/plcafe/src/main/java/jp/ac/kobe_u/cs/prolog/lang/Token.java | 9140 | package jp.ac.kobe_u.cs.prolog.lang;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.PushbackReader;
import java.io.Serializable;
/**
* The <code>Token</code> class contains methods for character input/output.<br>
* <font color="red">This document is under construction.</font>
*
* @author Mutsunori Banbara (banbara@kobe-u.ac.jp)
* @author Naoyuki Tamura (tamura@kobe-u.ac.jp)
* @version 1.2.5
*/
public class Token implements Serializable {
/**
*
*/
private static final long serialVersionUID = 6934618121985602394L;
public static boolean isSolo(int c) {
return c == '!' || c == ';';
}
public static boolean isSymbol(int c) {
switch (c) {
case '+':
case '-':
case '*':
case '/':
case '\\':
case '^':
case '<':
case '>':
case '=':
case '`':
case '~':
case ':':
case '.':
case '?':
case '@':
case '#':
case '$':
case '&':
return true;
default:
return false;
}
}
/*
* public static int read_token(StringBuffer s, PushbackReader in)
*
* This method reads one token from the input "in", sets the string, and
* returns the token type.
*
* Type String -2 "error message" -1 "end_of_file" '.' "." full stop ' ' " "
* space or comment or unknown chars ',' "," '(' "(" ')' ")" '[' "[" ']' "]"
* '{' "{" '}' "}" ',' "," '|' "|" 'I' "decimal" positive integer 'D'
* "decimal" positive double 'A' "atom name" 'V' "variable name" 'S'
* "string"
*/
public static int read_token(StringBuffer s, PushbackReader in)
throws IOException {
int c, c1;
int rc;
c = in.read(); // get 1st. char
if (c == -1) {
s.append("end_of_file");
return -1;
}
if (Character.isDigit((char) c)) {
rc = read_number(c, s, in);
if (rc == 1) {
rc = 'I';
} else if (rc == 2) {
rc = 'D';
}
return rc;
}
if (Character.isLowerCase((char) c)) {
rc = read_word(c, s, in);
if (rc > 0) {
rc = 'A';
}
return rc;
}
if (Character.isUpperCase((char) c) || c == '_') {
rc = read_word(c, s, in);
if (rc > 0) {
rc = 'V';
}
return rc;
}
switch (c) {
case '(':
case ')':
case '[':
case ']':
case '{':
case '}':
case ',':
case '|':
s.append((char) c);
return c;
case '.': /* full stop or symbol */
c1 = in.read();
if (c1 == -1 || c1 <= ' ') {
s.append(".");
return '.';
}
in.unread(c1);
break;
case '%': /* one line comment */
s.append(" ");
while ((c1 = in.read()) != '\n') {
if (c1 == -1) {
return ' ';
}
}
return ' ';
case '/': /* start of comment or symbol */
if ((c1 = in.read()) == '*') {
s.append(" ");
while (true) {
while ((c1 = in.read()) != '*') {
if (c1 == -1) {
s.append("unexpected end_of_file");
return -2;
}
}
if ((c1 = in.read()) == '/') {
return ' ';
}
in.unread(c1);
}
}
in.unread(c1);
break;
case '\'':
rc = read_quoted(c, s, in);
if (rc > 0) {
rc = 'A';
}
return rc;
case '"':
rc = read_quoted(c, s, in);
if (rc > 0) {
rc = 'S';
}
return rc;
default:
break;
}
if (isSolo(c)) {
s.append((char) c);
return 'A';
}
if (isSymbol(c)) {
rc = read_symbol(c, s, in);
if (rc > 0) {
rc = 'A';
}
return rc;
}
s.append(" ");
return ' ';
}
public static int read_number(int c, StringBuffer s, PushbackReader in)
throws IOException {
int c1, c2, c3;
in.unread(c);
for (;;) {
c1 = in.read();
if (!Character.isDigit((char) c1)) {
break;
}
s.append((char) c1);
}
if (c1 != '.') {
in.unread(c1);
return 1;
}
c2 = in.read();
if (!Character.isDigit((char) c2)) {
in.unread(c2);
in.unread(c1);
return 1;
}
s.append((char) c1);
in.unread(c2);
for (;;) {
c1 = in.read();
if (!Character.isDigit((char) c1)) {
break;
}
s.append((char) c1);
}
// in.unread(c1);
// return 2;
if (c1 != 'E' && c1 != 'e') {
in.unread(c1);
return 2;
}
c2 = in.read();
if (c2 == '-' || c2 == '+') {
c3 = in.read();
if (!Character.isDigit((char) c3)) {
in.unread(c3);
in.unread(c2);
in.unread(c1);
return 2;
}
s.append((char) c1);
s.append((char) c2);
in.unread(c3);
} else if (Character.isDigit((char) c2)) {
s.append((char) c1);
in.unread(c2);
} else {
in.unread(c2);
in.unread(c1);
return 2;
}
for (;;) {
c1 = in.read();
if (!Character.isDigit((char) c1)) {
break;
}
s.append((char) c1);
}
in.unread(c1);
return 2;
}
public static int read_word(int c, StringBuffer s, PushbackReader in)
throws IOException {
int c1;
in.unread(c);
for (;;) {
c1 = in.read();
if (!Character.isLetterOrDigit((char) c1) && c1 != '_') {
break;
}
s.append((char) c1);
}
in.unread(c1);
return 1;
}
public static int read_quoted(int quote, StringBuffer s, PushbackReader in)
throws IOException {
int rc;
int c1;
for (;;) {
c1 = in.read();
if (c1 == -1 || c1 == '\n') {
in.unread(c1);
return -2;
} else if (c1 == quote) {
c1 = in.read();
if (c1 != quote) {
in.unread(c1);
return 1;
}
c1 = quote;
} else if (c1 == '\\') {
rc = escapeSequences(c1, s, in);
if (rc > 0) {
continue;
} else {
return -2;
}
}
s.append((char) c1);
}
}
public static int escapeSequences(int backslash, StringBuffer s,
PushbackReader in) throws IOException {
int c;
c = in.read();
switch (c) {
case 'b': // backspace
s.append((char) 8);
break;
case 't': // horizontal tab
s.append((char) 9);
break;
case 'n': // newline
s.append((char) 10);
break;
case 'v': // vertical tab
s.append((char) 11);
break;
case 'f': // form feed
s.append((char) 12);
break;
case 'r': // carriage return
s.append((char) 13);
break;
case 'e': // escape
s.append((char) 27);
break;
case 'd': // delete
s.append((char) 127);
break;
case 'a': // alarm
s.append((char) 7);
break;
default:
s.append((char) c);
return 2;
}
return 1;
}
public static int read_symbol(int c, StringBuffer s, PushbackReader in)
throws IOException {
int c1;
s.append((char) c);
// in.unread(c);
for (;;) {
c1 = in.read();
if (!isSymbol(c1)) {
break;
}
s.append((char) c1);
}
in.unread(c1);
return 1;
}
/* Write */
public static void write_string(String s, PrintWriter out) {
out.print(s);
}
public static void writeq_string(String s, PrintWriter out) {
char[] ch;
ch = s.toCharArray();
if (getStringType(s) == 3) {
out.print("\'");
for (char element : ch) {
if (element == '\'') {
out.print("\\\'");
} else if (element == '\\') {
out.print("\\\\");
} else if (element == 8) {
out.print("\\b");
} else if (element == 9) {
out.print("\\t");
} else if (element == 10) {
out.print("\\n");
} else if (element == 11) {
out.print("\\v");
} else if (element == 12) {
out.print("\\f");
} else if (element == 13) {
out.print("\\r");
} else if (element == 27) {
out.print("\\e");
} else if (element == 127) {
out.print("\\d");
} else if (element == 7) {
out.print("\\a");
} else {
out.print(element);
}
}
out.print("\'");
} else {
write_string(s, out);
}
}
public static String toQuotedString(String s) {
StringBuffer quoted = new StringBuffer(s.length() * 2);
char[] ch;
ch = s.toCharArray();
if (getStringType(s) == 3) {
quoted.append("\'");
for (char element : ch) {
if (element == '\'') {
quoted.append("\\\'");
} else if (element == '\\') {
quoted.append("\\\\");
} else if (element == 8) {
quoted.append("\\b");
} else if (element == 9) {
quoted.append("\\t");
} else if (element == 10) {
quoted.append("\\n");
} else if (element == 11) {
quoted.append("\\v");
} else if (element == 12) {
quoted.append("\\f");
} else if (element == 13) {
quoted.append("\\r");
} else if (element == 27) {
quoted.append("\\e");
} else if (element == 127) {
quoted.append("\\d");
} else if (element == 7) {
quoted.append("\\a");
} else {
quoted.append(element);
}
}
quoted.append("\'");
return quoted.toString();
} else {
return s;
}
}
/*
* return value: 0 : if string is a lower case alphnum 1 : if string is a
* symbol 2 : if string is a solo 3 : others
*/
public static int getStringType(String s) {
char[] p;
if (s.equals("[]") || s.equals("{}")) {
return 0;
}
if (s.equals("") || s.equals(".")) {
return 3;
}
if (s.equals("!") || s.equals(";")) {
return 2;
}
p = s.toCharArray(); // string --> chars[]
if (Character.isLowerCase(p[0])) {
for (int i = 1; i < p.length; i++) {
if (!Character.isLetterOrDigit(p[i]) && p[i] != '_') {
return 3;
}
}
return 0;
}
if (isSymbol(p[0])) {
for (int i = 1; i < p.length; i++) {
if (!isSymbol(p[i])) {
return 3;
}
}
return 1;
}
return 3;
}
}
| gpl-2.0 |
md-5/jdk10 | test/jdk/java/awt/keyboard/AllKeyCode/AllKeyCode.java | 6758 | /*
* Copyright (c) 2016, 2017, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/**
* @test
* @key headful
* @bug 8149456 8147834 8150230 8155740 8163265
* @requires os.family == "mac"
* @summary Tests key codes for all keys supported in Java for Mac OS X.
* @run main AllKeyCode
*/
import java.awt.AWTException;
import java.awt.Robot;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.Frame;
import java.awt.TextArea;
public class AllKeyCode extends Frame {
private static Frame frame;
private static TextArea textArea;
private static KeyListener keyListener;
private static int allKeyArr[];
private static int keyPressedIndex;
AllKeyCode() {
AllKeyCode.allKeyArr = new int[] {
KeyEvent.VK_BACK_SPACE,
KeyEvent.VK_TAB,
KeyEvent.VK_ENTER,
KeyEvent.VK_CLEAR,
KeyEvent.VK_SHIFT,
KeyEvent.VK_CONTROL,
KeyEvent.VK_ALT,
KeyEvent.VK_CAPS_LOCK,
KeyEvent.VK_ESCAPE,
KeyEvent.VK_SPACE,
KeyEvent.VK_PAGE_UP,
KeyEvent.VK_PAGE_DOWN,
KeyEvent.VK_END,
KeyEvent.VK_HOME,
KeyEvent.VK_LEFT,
KeyEvent.VK_UP,
KeyEvent.VK_RIGHT,
KeyEvent.VK_DOWN,
KeyEvent.VK_COMMA,
KeyEvent.VK_MINUS,
KeyEvent.VK_PERIOD,
KeyEvent.VK_SLASH,
KeyEvent.VK_0,
KeyEvent.VK_1,
KeyEvent.VK_2,
KeyEvent.VK_3,
KeyEvent.VK_4,
KeyEvent.VK_5,
KeyEvent.VK_6,
KeyEvent.VK_7,
KeyEvent.VK_8,
KeyEvent.VK_9,
KeyEvent.VK_SEMICOLON,
KeyEvent.VK_EQUALS,
KeyEvent.VK_A,
KeyEvent.VK_B,
KeyEvent.VK_C,
KeyEvent.VK_D,
KeyEvent.VK_E,
KeyEvent.VK_F,
KeyEvent.VK_G,
KeyEvent.VK_H,
KeyEvent.VK_I,
KeyEvent.VK_J,
KeyEvent.VK_K,
KeyEvent.VK_L,
KeyEvent.VK_M,
KeyEvent.VK_N,
KeyEvent.VK_O,
KeyEvent.VK_P,
KeyEvent.VK_Q,
KeyEvent.VK_R,
KeyEvent.VK_S,
KeyEvent.VK_T,
KeyEvent.VK_U,
KeyEvent.VK_V,
KeyEvent.VK_W,
KeyEvent.VK_X,
KeyEvent.VK_Y,
KeyEvent.VK_Z,
KeyEvent.VK_OPEN_BRACKET,
KeyEvent.VK_BACK_SLASH,
KeyEvent.VK_CLOSE_BRACKET,
KeyEvent.VK_NUMPAD0,
KeyEvent.VK_NUMPAD1,
KeyEvent.VK_NUMPAD2,
KeyEvent.VK_NUMPAD3,
KeyEvent.VK_NUMPAD4,
KeyEvent.VK_NUMPAD5,
KeyEvent.VK_NUMPAD6,
KeyEvent.VK_NUMPAD7,
KeyEvent.VK_NUMPAD8,
KeyEvent.VK_NUMPAD9,
KeyEvent.VK_MULTIPLY,
KeyEvent.VK_ADD,
KeyEvent.VK_SUBTRACT,
KeyEvent.VK_DECIMAL,
KeyEvent.VK_DIVIDE,
KeyEvent.VK_F1,
KeyEvent.VK_F2,
KeyEvent.VK_F3,
KeyEvent.VK_F4,
KeyEvent.VK_F5,
KeyEvent.VK_F6,
KeyEvent.VK_F7,
KeyEvent.VK_F8,
KeyEvent.VK_F9,
KeyEvent.VK_F10,
KeyEvent.VK_F11,
KeyEvent.VK_F12,
KeyEvent.VK_DELETE,
KeyEvent.VK_HELP,
KeyEvent.VK_META,
KeyEvent.VK_BACK_QUOTE,
KeyEvent.VK_QUOTE,
KeyEvent.VK_F13,
KeyEvent.VK_F14,
KeyEvent.VK_F15,
KeyEvent.VK_F16,
KeyEvent.VK_F17,
KeyEvent.VK_F18,
KeyEvent.VK_F19,
KeyEvent.VK_F20,
KeyEvent.VK_ALT_GRAPH
};
keyPressedIndex = -1;
}
private void createAndShowGUI() {
frame = new Frame("Function Key Keycodes");
textArea = new TextArea();
textArea.setFocusable(true);
frame.add(textArea);
frame.pack();
frame.setSize(200, 200);
textArea.addKeyListener(keyListener = new KeyListener() {
@Override
public void keyTyped(KeyEvent ke) {
}
@Override
public void keyPressed(KeyEvent ke) {
if (allKeyArr[keyPressedIndex] != ke.getKeyCode()) {
throw new RuntimeException("Wrong keycode received");
}
}
@Override
public void keyReleased(KeyEvent ke) {
}
});
frame.setVisible(true);
}
private void removeListener() {
if (keyListener != null) {
textArea.removeKeyListener(keyListener);
keyListener = null;
}
}
@Override
public void dispose() {
if (null != frame) {
frame.dispose();
frame = null;
}
}
public void generateFunctionKeyPress() {
try {
Robot robot = new Robot();
robot.waitForIdle();
for (int i = 0; i < allKeyArr.length; i++) {
keyPressedIndex = i;
robot.keyPress(allKeyArr[i]);
robot.keyRelease(allKeyArr[i]);
robot.waitForIdle();
}
removeListener();
} catch (AWTException e) {
throw new RuntimeException("Robot creation failed");
}
}
public static void main(String args[]) {
AllKeyCode allKeyObj = new AllKeyCode();
allKeyObj.createAndShowGUI();
allKeyObj.generateFunctionKeyPress();
allKeyObj.dispose();
System.out.println("Test Passed");
}
}
| gpl-2.0 |
FauxFaux/jdk9-jdk | src/jdk.crypto.ec/share/classes/sun/security/ec/SunEC.java | 11547 | /*
* Copyright (c) 2009, 2016, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.security.ec;
import java.util.*;
import java.security.*;
import java.util.regex.Pattern;
import sun.security.util.CurveDB;
import sun.security.util.NamedCurve;
import sun.security.util.ECParameters;
import static sun.security.util.SecurityConstants.PROVIDER_VER;
/**
* Provider class for the Elliptic Curve provider.
* Supports EC keypair and parameter generation, ECDSA signing and
* ECDH key agreement.
*
* IMPLEMENTATION NOTE:
* The Java classes in this provider access a native ECC implementation
* via JNI to a C++ wrapper class which in turn calls C functions.
* The Java classes are packaged into the jdk.crypto.sunec module and the
* C++ and C functions are packaged into libsunec.so or sunec.dll in the
* JRE native libraries directory. If the native library is not present
* then this provider is registered with support for fewer ECC algorithms
* (KeyPairGenerator, Signature and KeyAgreement are omitted).
*
* @since 1.7
*/
public final class SunEC extends Provider {
private static final long serialVersionUID = -2279741672933606418L;
// flag indicating whether the full EC implementation is present
// (when native library is absent then fewer EC algorithms are available)
private static boolean useFullImplementation = true;
static {
try {
AccessController.doPrivileged(new PrivilegedAction<Void>() {
public Void run() {
System.loadLibrary("sunec"); // check for native library
return null;
}
});
} catch (UnsatisfiedLinkError e) {
useFullImplementation = false;
}
}
private static class ProviderService extends Provider.Service {
ProviderService(Provider p, String type, String algo, String cn) {
super(p, type, algo, cn, null, null);
}
ProviderService(Provider p, String type, String algo, String cn,
String[] aliases, HashMap<String, String> attrs) {
super(p, type, algo, cn,
(aliases == null? null : Arrays.asList(aliases)), attrs);
}
@Override
public Object newInstance(Object ctrParamObj)
throws NoSuchAlgorithmException {
String type = getType();
if (ctrParamObj != null) {
throw new InvalidParameterException
("constructorParameter not used with " + type + " engines");
}
String algo = getAlgorithm();
try {
if (type.equals("Signature")) {
boolean inP1363 = algo.endsWith("inP1363Format");
if (inP1363) {
algo = algo.substring(0, algo.length() - 13);
}
if (algo.equals("SHA1withECDSA")) {
return (inP1363? new ECDSASignature.SHA1inP1363Format() :
new ECDSASignature.SHA1());
} else if (algo.equals("SHA224withECDSA")) {
return (inP1363? new ECDSASignature.SHA224inP1363Format() :
new ECDSASignature.SHA224());
} else if (algo.equals("SHA256withECDSA")) {
return (inP1363? new ECDSASignature.SHA256inP1363Format() :
new ECDSASignature.SHA256());
} else if (algo.equals("SHA384withECDSA")) {
return (inP1363? new ECDSASignature.SHA384inP1363Format() :
new ECDSASignature.SHA384());
} else if (algo.equals("SHA512withECDSA")) {
return (inP1363? new ECDSASignature.SHA512inP1363Format() :
new ECDSASignature.SHA512());
} else if (algo.equals("NONEwithECDSA")) {
return (inP1363? new ECDSASignature.RawinP1363Format() :
new ECDSASignature.Raw());
}
} else if (type.equals("KeyFactory")) {
if (algo.equals("EC")) {
return new ECKeyFactory();
}
} else if (type.equals("AlgorithmParameters")) {
if (algo.equals("EC")) {
return new sun.security.util.ECParameters();
}
} else if (type.equals("KeyPairGenerator")) {
if (algo.equals("EC")) {
return new ECKeyPairGenerator();
}
} else if (type.equals("KeyAgreement")) {
if (algo.equals("ECDH")) {
return new ECDHKeyAgreement();
}
}
} catch (Exception ex) {
throw new NoSuchAlgorithmException("Error constructing " +
type + " for " + algo + " using SunEC", ex);
}
throw new ProviderException("No impl for " + algo +
" " + type);
}
}
public SunEC() {
super("SunEC", PROVIDER_VER,
"Sun Elliptic Curve provider (EC, ECDSA, ECDH)");
AccessController.doPrivileged(new PrivilegedAction<Void>() {
public Void run() {
putEntries(useFullImplementation);
return null;
}
});
}
void putEntries(boolean useFullImplementation) {
HashMap<String, String> ATTRS = new HashMap<>(3);
ATTRS.put("ImplementedIn", "Software");
String ecKeyClasses = "java.security.interfaces.ECPublicKey" +
"|java.security.interfaces.ECPrivateKey";
ATTRS.put("SupportedKeyClasses", ecKeyClasses);
ATTRS.put("KeySize", "256");
/*
* Key Factory engine
*/
putService(new ProviderService(this, "KeyFactory",
"EC", "sun.security.ec.ECKeyFactory",
new String[] { "EllipticCurve" }, ATTRS));
/*
* Algorithm Parameter engine
*/
// "AlgorithmParameters.EC SupportedCurves" prop used by unit test
boolean firstCurve = true;
StringBuilder names = new StringBuilder();
Pattern nameSplitPattern = Pattern.compile(CurveDB.SPLIT_PATTERN);
Collection<? extends NamedCurve> supportedCurves =
CurveDB.getSupportedCurves();
for (NamedCurve namedCurve : supportedCurves) {
if (!firstCurve) {
names.append("|");
} else {
firstCurve = false;
}
names.append("[");
String[] commonNames = nameSplitPattern.split(namedCurve.getName());
for (String commonName : commonNames) {
names.append(commonName.trim());
names.append(",");
}
names.append(namedCurve.getObjectId());
names.append("]");
}
HashMap<String, String> apAttrs = new HashMap<>(ATTRS);
apAttrs.put("SupportedCurves", names.toString());
putService(new ProviderService(this, "AlgorithmParameters",
"EC", "sun.security.util.ECParameters",
new String[] { "EllipticCurve", "1.2.840.10045.2.1", "OID.1.2.840.10045.2.1" },
apAttrs));
/*
* Register the algorithms below only when the full ECC implementation
* is available
*/
if (!useFullImplementation) {
return;
}
/*
* Signature engines
*/
putService(new ProviderService(this, "Signature",
"NONEwithECDSA", "sun.security.ec.ECDSASignature$Raw",
null, ATTRS));
putService(new ProviderService(this, "Signature",
"SHA1withECDSA", "sun.security.ec.ECDSASignature$SHA1",
new String[] { "1.2.840.10045.4.1", "OID.1.2.840.10045.4.1" },
ATTRS));
putService(new ProviderService(this, "Signature",
"SHA224withECDSA", "sun.security.ec.ECDSASignature$SHA224",
new String[] { "1.2.840.10045.4.3.1", "OID.1.2.840.10045.4.3.1"},
ATTRS));
putService(new ProviderService(this, "Signature",
"SHA256withECDSA", "sun.security.ec.ECDSASignature$SHA256",
new String[] { "1.2.840.10045.4.3.2", "OID.1.2.840.10045.4.3.2"},
ATTRS));
putService(new ProviderService(this, "Signature",
"SHA384withECDSA", "sun.security.ec.ECDSASignature$SHA384",
new String[] { "1.2.840.10045.4.3.3", "OID.1.2.840.10045.4.3.3" },
ATTRS));
putService(new ProviderService(this, "Signature",
"SHA512withECDSA", "sun.security.ec.ECDSASignature$SHA512",
new String[] { "1.2.840.10045.4.3.4", "OID.1.2.840.10045.4.3.4" },
ATTRS));
putService(new ProviderService(this, "Signature",
"NONEwithECDSAinP1363Format",
"sun.security.ec.ECDSASignature$RawinP1363Format"));
putService(new ProviderService(this, "Signature",
"SHA1withECDSAinP1363Format",
"sun.security.ec.ECDSASignature$SHA1inP1363Format"));
putService(new ProviderService(this, "Signature",
"SHA224withECDSAinP1363Format",
"sun.security.ec.ECDSASignature$SHA224inP1363Format"));
putService(new ProviderService(this, "Signature",
"SHA256withECDSAinP1363Format",
"sun.security.ec.ECDSASignature$SHA256inP1363Format"));
putService(new ProviderService(this, "Signature",
"SHA384withECDSAinP1363Format",
"sun.security.ec.ECDSASignature$SHA384inP1363Format"));
putService(new ProviderService(this, "Signature",
"SHA512withECDSAinP1363Format",
"sun.security.ec.ECDSASignature$SHA512inP1363Format"));
/*
* Key Pair Generator engine
*/
putService(new ProviderService(this, "KeyPairGenerator",
"EC", "sun.security.ec.ECKeyPairGenerator",
new String[] { "EllipticCurve" }, ATTRS));
/*
* Key Agreement engine
*/
putService(new ProviderService(this, "KeyAgreement",
"ECDH", "sun.security.ec.ECDHKeyAgreement", null, ATTRS));
}
}
| gpl-2.0 |
7ShaYaN7/Telegram | TMessagesProj/src/main/java/org/telegram/ui/Cells/TextInfoPrivacyCell.java | 1863 | /*
* This is the source code of Telegram for Android v. 3.x.x.
* It is licensed under GNU GPL v. 2 or later.
* You should have received a copy of the license in this archive (see LICENSE).
*
* Copyright Nikolai Kudashov, 2013-2016.
*/
package org.telegram.ui.Cells;
import android.content.Context;
import android.text.method.LinkMovementMethod;
import android.util.TypedValue;
import android.view.Gravity;
import android.widget.FrameLayout;
import android.widget.TextView;
import org.telegram.messenger.AndroidUtilities;
import org.telegram.messenger.LocaleController;
import org.telegram.ui.Components.LayoutHelper;
public class TextInfoPrivacyCell extends FrameLayout {
private TextView textView;
public TextInfoPrivacyCell(Context context) {
super(context);
textView = new TextView(context);
textView.setTextColor(0xff808080);
textView.setLinkTextColor(0xff316f9f);
textView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14);
textView.setGravity(LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT);
textView.setPadding(0, AndroidUtilities.dp(10), 0, AndroidUtilities.dp(17));
textView.setMovementMethod(LinkMovementMethod.getInstance());
addView(textView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, (LocaleController.isRTL ? Gravity.RIGHT : Gravity.LEFT) | Gravity.TOP, 17, 0, 17, 0));
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(MeasureSpec.makeMeasureSpec(MeasureSpec.getSize(widthMeasureSpec), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED));
}
public void setText(CharSequence text) {
textView.setText(text);
}
public void setTextColor(int color) {
textView.setTextColor(color);
}
}
| gpl-2.0 |
YouDiSN/OpenJDK-Research | jdk9/hotspot/src/jdk.internal.vm.compiler/share/classes/org.graalvm.compiler.jtt/src/org/graalvm/compiler/jtt/optimize/Reduce_Double01.java | 2217 | /*
* Copyright (c) 2009, 2012, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package org.graalvm.compiler.jtt.optimize;
import org.junit.Test;
import org.graalvm.compiler.jtt.JTTTest;
/*
* Tests optimization of double operations.
*/
public class Reduce_Double01 extends JTTTest {
public static double test(double arg) {
if (arg == 0) {
return add(10);
}
if (arg == 1) {
return sub(11);
}
if (arg == 2) {
return mul(12);
}
if (arg == 3) {
return div(13);
}
return 0;
}
public static double add(double x) {
return x + 0;
}
public static double sub(double x) {
return x - 0;
}
public static double mul(double x) {
return x * 1;
}
public static double div(double x) {
return x / 1;
}
@Test
public void run0() throws Throwable {
runTest("test", 0d);
}
@Test
public void run1() throws Throwable {
runTest("test", 1d);
}
@Test
public void run2() throws Throwable {
runTest("test", 2d);
}
@Test
public void run3() throws Throwable {
runTest("test", 3d);
}
}
| gpl-2.0 |
mydzigear/weka.kmeanspp.silhouette_score | src/weka/core/neighboursearch/TreePerformanceStats.java | 9821 | /*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* TreePerformanceStats.java
* Copyright (C) 2007-2012 University of Waikato, Hamilton, New Zealand
*/
package weka.core.neighboursearch;
import java.util.Collections;
import java.util.Enumeration;
import java.util.Vector;
import weka.core.RevisionUtils;
/**
* The class that measures the performance of a tree based
* nearest neighbour search algorithm.
*
* @author Ashraf M. Kibriya (amk14[at-the-rate]cs[dot]waikato[dot]ac[dot]nz)
* @version $Revision: 10141 $
*/
public class TreePerformanceStats
extends PerformanceStats {
/** for serialization. */
private static final long serialVersionUID = -6637636693340810373L;
// Variables for leaves
/** The min and max number leaf nodes looked
* for a query by the tree based NNS algorithm. */
protected int m_MinLeaves, m_MaxLeaves;
/** The sum of leaf nodes looked
* at for all the queries.
*/
protected int m_SumLeaves;
/** The squared sum of leaf nodes looked
* at for all the queries.
*/
protected int m_SumSqLeaves;
/** The number of leaf nodes looked at
* for the current/last query.
*/
protected int m_LeafCount;
// Variables for internal nodes
/** The min and max number internal nodes looked
* for a query by the tree based NNS algorithm. */
protected int m_MinIntNodes, m_MaxIntNodes;
/** The sum of internal nodes looked
* at for all the queries.
*/
protected int m_SumIntNodes;
/** The squared sum of internal nodes looked
* at for all the queries.
*/
protected int m_SumSqIntNodes;
/** The number of internal nodes looked at
* for the current/last query.
*/
protected int m_IntNodeCount;
/**
* Default constructor.
*/
public TreePerformanceStats() {
reset();
}
/**
* Resets all internal fields/counters.
*/
public void reset() {
super.reset();
//initializing leaf variables
m_SumLeaves = m_SumSqLeaves = m_LeafCount = 0;
m_MinLeaves = Integer.MAX_VALUE;
m_MaxLeaves = Integer.MIN_VALUE;
//initializing internal variables
m_SumIntNodes = m_SumSqIntNodes = m_IntNodeCount = 0;
m_MinIntNodes = Integer.MAX_VALUE;
m_MaxIntNodes = Integer.MIN_VALUE;
}
/**
* Signals start of the nearest neighbour search.
* Initializes the stats object.
*/
public void searchStart() {
super.searchStart();
m_LeafCount = 0;
m_IntNodeCount = 0;
}
/**
* Signals end of the nearest neighbour search.
* Calculates the statistics for the search.
*/
public void searchFinish() {
super.searchFinish();
//updating stats for leaf nodes
m_SumLeaves += m_LeafCount; m_SumSqLeaves += m_LeafCount*m_LeafCount;
if (m_LeafCount < m_MinLeaves) m_MinLeaves = m_LeafCount;
if (m_LeafCount > m_MaxLeaves) m_MaxLeaves = m_LeafCount;
//updating stats for internal nodes
m_SumIntNodes += m_IntNodeCount; m_SumSqIntNodes += m_IntNodeCount*m_IntNodeCount;
if (m_IntNodeCount < m_MinIntNodes) m_MinIntNodes = m_IntNodeCount;
if (m_IntNodeCount > m_MaxIntNodes) m_MaxIntNodes = m_IntNodeCount;
}
/**
* Increments the leaf count.
*/
public void incrLeafCount() {
m_LeafCount++;
}
/**
* Increments the internal node count.
*/
public void incrIntNodeCount() {
m_IntNodeCount++;
}
// Getter functions for leaves
/**
* Returns the total number of leaves visited.
*
* @return The total number.
*/
public int getTotalLeavesVisited() {
return m_SumLeaves;
}
/**
* Returns the mean of number of leaves visited.
*
* @return The mean number of leaves visited.
*/
public double getMeanLeavesVisited() {
return m_SumLeaves/(double)m_NumQueries;
}
/**
* Returns the standard deviation of leaves visited.
*
* @return The standard deviation of leaves visited.
*/
public double getStdDevLeavesVisited() {
return Math.sqrt((m_SumSqLeaves - (m_SumLeaves*m_SumLeaves)/(double)m_NumQueries)/(m_NumQueries-1));
}
/**
* Returns the minimum number of leaves visited.
*
* @return The minimum number of leaves visited.
*/
public int getMinLeavesVisited() {
return m_MinLeaves;
}
/**
* Returns the maximum number of leaves visited.
*
* @return The maximum number of leaves visited.
*/
public int getMaxLeavesVisited() {
return m_MaxLeaves;
}
// Getter functions for internal nodes
/**
* Returns the total number of internal nodes visited.
*
* @return The total number of internal nodes visited.
*/
public int getTotalIntNodesVisited() {
return m_SumIntNodes;
}
/**
* Returns the mean of internal nodes visited.
*
* @return The mean number of internal nodes
* visited.
*/
public double getMeanIntNodesVisited() {
return m_SumIntNodes/(double)m_NumQueries;
}
/**
* Returns the standard deviation of internal nodes visited.
*
* @return The standard deviation of internal nodes visited.
*/
public double getStdDevIntNodesVisited() {
return Math.sqrt((m_SumSqIntNodes - (m_SumIntNodes*m_SumIntNodes)/(double)m_NumQueries)/(m_NumQueries-1));
}
/**
* Returns the minimum of internal nodes visited.
*
* @return The minimum of internal nodes visited.
*/
public int getMinIntNodesVisited() {
return m_MinIntNodes;
}
/**
* returns the maximum of internal nodes visited.
*
* @return The maximum of internal nodes visited.
*/
public int getMaxIntNodesVisited() {
return m_MaxIntNodes;
}
/**
* Returns an enumeration of the additional measure names.
*
* @return An enumeration of the measure names.
*/
public Enumeration<String> enumerateMeasures() {
Vector<String> newVector = new Vector<String>();
newVector.addAll(Collections.list(super.enumerateMeasures()));
newVector.addElement("measureTotal_nodes_visited");
newVector.addElement("measureMean_nodes_visited");
newVector.addElement("measureStdDev_nodes_visited");
newVector.addElement("measureMin_nodes_visited");
newVector.addElement("measureMax_nodes_visited");
//coord stats
newVector.addElement("measureTotal_leaves_visited");
newVector.addElement("measureMean_leaves_visited");
newVector.addElement("measureStdDev_leaves_visited");
newVector.addElement("measureMin_leaves_visited");
newVector.addElement("measureMax_leaves_visited");
return newVector.elements();
}
/**
* Returns the value of the named measure.
*
* @param additionalMeasureName The name of the measure to query for
* its value.
* @return The value of the named measure.
* @throws IllegalArgumentException If the named measure is not
* supported.
*/
public double getMeasure(String additionalMeasureName) throws IllegalArgumentException {
if (additionalMeasureName.compareToIgnoreCase("measureTotal_nodes_visited") == 0) {
return (double) getTotalIntNodesVisited();
} else if (additionalMeasureName.compareToIgnoreCase("measureMean_nodes_visited") == 0) {
return (double) getMeanIntNodesVisited();
} else if (additionalMeasureName.compareToIgnoreCase("measureStdDev_nodes_visited") == 0) {
return (double) getStdDevIntNodesVisited();
} else if (additionalMeasureName.compareToIgnoreCase("measureMin_nodes_visited") == 0) {
return (double) getMinIntNodesVisited();
} else if (additionalMeasureName.compareToIgnoreCase("measureMax_nodes_visited") == 0) {
return (double) getMaxIntNodesVisited();
}
//coord stats
else if (additionalMeasureName.compareToIgnoreCase("measureTotal_leaves_visited") == 0) {
return (double) getTotalLeavesVisited();
} else if (additionalMeasureName.compareToIgnoreCase("measureMean_leaves_visited") == 0) {
return (double) getMeanLeavesVisited();
} else if (additionalMeasureName.compareToIgnoreCase("measureStdDev_leaves_visited") == 0) {
return (double) getStdDevLeavesVisited();
} else if (additionalMeasureName.compareToIgnoreCase("measureMin_leaves_visited") == 0) {
return (double) getMinLeavesVisited();
} else if (additionalMeasureName.compareToIgnoreCase("measureMax_leaves_visited") == 0) {
return (double) getMaxLeavesVisited();
} else {
return super.getMeasure(additionalMeasureName);
}
}
/**
* Returns a string representation of the statistics.
*
* @return The statistics as string.
*/
public String getStats() {
StringBuffer buf = new StringBuffer(super.getStats());
buf.append("leaves: "+getMinLeavesVisited()+", "+getMaxLeavesVisited()+
","+getTotalLeavesVisited()+","+getMeanLeavesVisited()+", "+getStdDevLeavesVisited()+"\n");
buf.append("Int nodes: "+getMinIntNodesVisited()+", "+getMaxIntNodesVisited()+
","+getTotalIntNodesVisited()+","+getMeanIntNodesVisited()+", "+getStdDevIntNodesVisited()+"\n");
return buf.toString();
}
/**
* Returns the revision string.
*
* @return the revision
*/
public String getRevision() {
return RevisionUtils.extract("$Revision: 10141 $");
}
}
| gpl-3.0 |
xasx/wildfly | testsuite/integration/basic/src/test/java/org/jboss/as/test/integration/jca/beanvalidation/ra/ValidMessageEndpoint.java | 2044 | /*
* JBoss, Home of Professional Open Source.
* Copyright 2011, Red Hat Middleware LLC, and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.as.test.integration.jca.beanvalidation.ra;
import java.lang.reflect.Method;
import javax.jms.Message;
import javax.jms.MessageListener;
import javax.resource.spi.endpoint.MessageEndpoint;
/**
* A simple message endpoint
*
* @author <a href="mailto:vrastsel@redhat.com>Vladimir Rastseluev</a>
*/
public class ValidMessageEndpoint implements MessageEndpoint, MessageListener {
private Message message;
/**
* Constructor
*/
public ValidMessageEndpoint() {
}
/**
* {@inheritDoc}
*/
public void onMessage(Message message) {
this.message = message;
}
/**
* Get the message
*
* @return The value
*/
public Message getMessage() {
return message;
}
/**
* {@inheritDoc}
*/
public void afterDelivery() {
}
/**
* {@inheritDoc}
*/
public void beforeDelivery(Method method) {
}
/**
* {@inheritDoc}
*/
public void release() {
}
}
| lgpl-2.1 |
xasx/wildfly | testsuite/domain/src/test/java/org/jboss/as/test/integration/domain/management/cli/RolloutPlanTestServlet.java | 4454 | /*
* JBoss, Home of Professional Open Source.
* Copyright 2011, Red Hat, Inc., and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.as.test.integration.domain.management.cli;
import java.io.IOException;
import java.io.PrintWriter;
import java.net.InetSocketAddress;
import java.net.ServerSocket;
import java.net.URL;
import java.util.Date;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.jboss.logging.Logger;
/**
* @author Dominik Pospisil <dpospisi@redhat.com>
*/
@WebServlet(urlPatterns = {"/RolloutServlet"}, loadOnStartup = 1)
public class RolloutPlanTestServlet extends HttpServlet {
public static final String BIND_PORT_PARAM = "bindPort";
public static final String OP_PARAM = "operation";
public static final String OP_BIND = "bind";
public static final String OP_UNBIND = "unbind";
private Date initDate;
private int bindPort;
private ServerSocket socket;
private String host;
private static final Logger log = Logger.getLogger(RolloutPlanTestServlet.class);
protected void processRequest(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
URL requestURL = new URL(request.getRequestURL().toString());
host = requestURL.getHost();
String op = request.getParameter(OP_PARAM);
if (OP_BIND.equals(op)) {
bindPort = Integer.valueOf(request.getParameter(BIND_PORT_PARAM));
bind();
} else if (OP_UNBIND.equals(op)) { unbind(); }
response.setContentType("text/plain");
PrintWriter out = response.getWriter();
out.print(String.valueOf(initDate.getTime()));
out.close();
}
@Override
public void init(ServletConfig config) throws ServletException {
initDate = new Date();
super.init(config);
log.trace("RolloutServlet initialized: " + String.valueOf(initDate.getTime()));
}
@Override
public void destroy() {
if (socket != null) {
try {
unbind();
} catch (ServletException se) {}
}
super.destroy();
}
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
processRequest(request, response);
}
private void bind() throws ServletException {
if (socket != null) { throw new ServletException("Already bound."); }
try {
socket = new ServerSocket();
socket.bind(new InetSocketAddress(host, bindPort));
log.trace("Bound to address " + host + " port " + bindPort + ".");
} catch (IOException ioe) {
throw new ServletException("Bind failed.", ioe);
}
}
private void unbind() throws ServletException {
if (socket == null) { throw new ServletException("Not bound."); }
try {
socket.close();
socket = null;
log.trace("Unbound from address " + host + " port " + bindPort + ".");
} catch (IOException ioe) {
throw new ServletException("Unbind failed.", ioe);
}
}
}
| lgpl-2.1 |
pwrose/biojava | biojava-structure-gui/src/main/java/demo/WebStartClientDemo.java | 1576 | /*
* BioJava development code
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. If you do not have a copy,
* see:
*
* http://www.gnu.org/copyleft/lesser.html
*
* Copyright for this code is held jointly by the individual
* authors. These should be listed in @author doc comments.
*
* For more information on the BioJava project and its aims,
* or to join the biojava-l mailing list, visit the home page
* at:
*
* http://www.biojava.org/
*
* Created on Feb 8, 2010
* Author: Andreas Prlic
*
*/
package demo;
import org.biojava.nbio.structure.align.webstart.WebStartMain;
public class WebStartClientDemo
{
public static void main(String[] args){
//client.main(new String[]{"fatcat", "3BMV.A","2GUY.A", "http://pdb114.rcsb.org:8080/jfatcatserver/align/"});
//client.main(new String[]{"fatcat", "2GUY.A","3BMV.A"});
//client.main( new String[]{"fatcat", "1EXQ.A","1EX4.B","http://pdb114.rcsb.org:8080/jfatcatserver/align/"} );
//WebStartMain.main( new String[]{"fatcat_flexible", "1cdg.A", "1tim.B"} );
//WebStartMain.main( new String[]{"ce", "1tim.B", "1cdg.A"} );
//WebStartMain.main( new String[]{"ce", "1cdg.A", "1tim.B"} );
//WebStartMain.main( new String[]{"ce_cp", "1vhr.A","2ihb.A"} );
//WebStartMain.main( new String[]{"fatcat", "2BC3.B","1SWG.D"} );
WebStartMain.main(new String[]{"fatcat","1P80.D","2IUF.E"});
//WebStartMain.main(new String[]{"fatcat","1O08.A","1FEZ.A"});
}
}
| lgpl-2.1 |
szpak/spock | spock-core/src/main/java/org/spockframework/runtime/JUnitDescriptionGenerator.java | 2752 | /*
* Copyright 2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.spockframework.runtime;
import java.lang.annotation.Annotation;
import org.junit.runner.Description;
import org.spockframework.runtime.model.*;
/**
* Generates and attaches JUnit Description's to a SpecInfo's nodes.
*
* @author Peter Niederwieser
*/
public class JUnitDescriptionGenerator {
private final SpecInfo spec;
public JUnitDescriptionGenerator(SpecInfo spec) {
this.spec = spec;
}
public void describeSpecMethods() {
Description desc = Description.createSuiteDescription(spec.getReflection());
spec.setDescription(desc);
for (FeatureInfo feature : spec.getAllFeatures())
describeFeature(feature);
for (MethodInfo fixtureMethod : spec.getAllFixtureMethods())
describeMethod(fixtureMethod);
}
public void describeSpec() {
Description desc = Description.createSuiteDescription(spec.getReflection());
spec.setDescription(desc);
// important for JUnit compatibility: Descriptions of specs that are reported
// as "ignored" to JUnit must not have any children
if (spec.isExcluded() || spec.isSkipped()) return;
for (FeatureInfo feature : spec.getAllFeaturesInExecutionOrder()) {
if (feature.isExcluded()) continue;
if (feature.isReportIterations()) continue; // don't report up-front because IDEs don't handle this well
desc.addChild(feature.getFeatureMethod().getDescription());
}
}
private void describeFeature(FeatureInfo feature) {
Description desc = describeMethod(feature.getFeatureMethod());
feature.setDescription(desc);
if (feature.getDataProcessorMethod() != null)
feature.getDataProcessorMethod().setDescription(desc);
for (DataProviderInfo prov : feature.getDataProviders())
prov.getDataProviderMethod().setDescription(desc);
}
private Description describeMethod(MethodInfo method) {
Annotation[] anns = method.getReflection() == null ?
new Annotation[0] : method.getAnnotations();
Description desc = Description.createTestDescription(spec.getReflection(),
method.getName(), anns);
method.setDescription(desc);
return desc;
}
}
| apache-2.0 |
packet-tracker/onos | incubator/store/src/main/java/org/onosproject/incubator/store/meter/impl/package-info.java | 774 | /*
* Copyright 2015 Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* A distributed meter store implementation that stores meter data consistently
* across the cluster.
*/
package org.onosproject.incubator.store.meter.impl;
| apache-2.0 |
nikhilvibhav/camel | components/camel-cdi/src/main/java/org/apache/camel/cdi/SyntheticBeanAttributes.java | 2652 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.cdi;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import java.util.Set;
import javax.enterprise.context.Dependent;
import javax.enterprise.inject.Vetoed;
import javax.enterprise.inject.spi.BeanManager;
import javax.inject.Named;
import static java.util.Collections.emptySet;
import static java.util.stream.Collectors.toSet;
import static org.apache.camel.cdi.CdiSpiHelper.isAnnotationType;
@Vetoed
class SyntheticBeanAttributes<T> {
private final BeanManager manager;
private final SyntheticAnnotated annotated;
SyntheticBeanAttributes(BeanManager manager, SyntheticAnnotated annotated) {
this.manager = manager;
this.annotated = annotated;
}
<A extends Annotation> void addQualifier(A qualifier) {
annotated.addAnnotation(qualifier);
}
public Class<? extends Annotation> getScope() {
return annotated.getAnnotations().stream()
.map(Annotation::annotationType)
.filter(manager::isScope)
.findAny()
.orElse(Dependent.class);
}
public Set<Annotation> getQualifiers() {
return annotated.getAnnotations().stream()
.filter(a -> manager.isQualifier(a.annotationType()))
.collect(toSet());
}
public String getName() {
return annotated.getAnnotations().stream()
.filter(isAnnotationType(Named.class))
.map(Named.class::cast)
.map(Named::value)
.findFirst()
.orElse(null);
}
public Set<Class<? extends Annotation>> getStereotypes() {
return emptySet();
}
public Set<Type> getTypes() {
return annotated.getTypeClosure();
}
public boolean isAlternative() {
return false;
}
}
| apache-2.0 |
sdmcraft/jackrabbit | jackrabbit-core/src/main/java/org/apache/jackrabbit/core/query/lucene/DocId.java | 11805 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.core.query.lucene;
import java.io.IOException;
import java.util.BitSet;
import org.apache.jackrabbit.core.id.NodeId;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Implements a document id which can be based on a Node uuid or a lucene
* document number.
*/
abstract class DocId {
static final int[] EMPTY = new int[0];
/**
* All DocIds with a value smaller than {@link Short#MAX_VALUE}.
*/
private static final PlainDocId[] LOW_DOC_IDS = new PlainDocId[Short.MAX_VALUE];
static {
for (int i = 0; i < LOW_DOC_IDS.length; i++) {
LOW_DOC_IDS[i] = new PlainDocId(i);
}
}
/**
* Indicates a null DocId. Will be returned if the root node is asked for
* its parent.
*/
static final DocId NULL = new DocId() {
/**
* Always returns an empty array.
* @param reader the index reader.
* @param docNumbers a int array for reuse as return value.
* @return always an empty array.
*/
final int[] getDocumentNumbers(MultiIndexReader reader,
int[] docNumbers) {
return EMPTY;
}
/**
* Always returns <code>this</code>.
* @param offset the offset to apply.
* @return always <code>this</code>.
*/
final DocId applyOffset(int offset) {
return this;
}
/**
* Always returns <code>true</code>.
* @param deleted the deleted documents.
* @return always <code>true</code>.
*/
final boolean isValid(BitSet deleted) {
return true;
}
};
/**
* Returns the document numbers of this <code>DocId</code>. An empty array
* is returned if this id is invalid.
*
* @param reader the IndexReader to resolve this <code>DocId</code>.
* @param docNumbers an array for reuse. An implementation should use the
* passed array as a container for the return value,
* unless the length of the returned array is different
* from <code>docNumbers</code>. In which case an
* implementation will create a new array with an
* appropriate size.
* @return the document numbers of this <code>DocId</code> or
* empty if it is invalid (e.g. does not exist).
* @throws IOException if an error occurs while reading from the index.
*/
abstract int[] getDocumentNumbers(MultiIndexReader reader, int[] docNumbers)
throws IOException;
/**
* Applies an offset to this <code>DocId</code>. The returned <code>DocId</code>
* may be the same as <code>this</code> if this <code>DocId</code> does
* not need to know about an offset.
*
* @param offset the offset to apply to.
* @return <code>DocId</code> with <code>offset</code> applied.
*/
abstract DocId applyOffset(int offset);
/**
* Returns <code>true</code> if this <code>DocId</code> is valid against the
* set of <code>deleted</code> documents; otherwise <code>false</code>.
*
* @param deleted the deleted documents.
* @return <code>true</code> if this <code>DocId</code> is not delted;
* otherwise <code>false</code>.
*/
abstract boolean isValid(BitSet deleted);
/**
* Creates a <code>DocId</code> based on a document number.
*
* @param docNumber the document number.
* @return a <code>DocId</code> based on a document number.
*/
static DocId create(int docNumber) {
if (docNumber < Short.MAX_VALUE) {
// use cached values for docNumbers up to 32k
return LOW_DOC_IDS[docNumber];
} else {
return new PlainDocId(docNumber);
}
}
/**
* Creates a <code>DocId</code> based on a UUID.
*
* @param uuid the UUID
* @return a <code>DocId</code> based on the UUID.
* @throws IllegalArgumentException if the <code>uuid</code> is malformed.
*/
static DocId create(String uuid) {
return create(new NodeId(uuid));
}
/**
* Creates a <code>DocId</code> based on a node id.
*
* @param id the node id
* @return a <code>DocId</code> based on the node id
*/
static DocId create(NodeId id) {
return new UUIDDocId(id);
}
/**
* Creates a <code>DocId</code> that references multiple UUIDs.
*
* @param uuids the UUIDs of the referenced nodes.
* @return a <code>DocId</code> based on multiple node UUIDs.
*/
static DocId create(String[] uuids) {
return new MultiUUIDDocId(uuids);
}
//--------------------------< internal >------------------------------------
/**
* <code>DocId</code> based on a document number.
*/
private static final class PlainDocId extends DocId {
/**
* The document number or <code>-1</code> if not set.
*/
private final int docNumber;
/**
* Creates a <code>DocId</code> based on a document number.
*
* @param docNumber the lucene document number.
*/
PlainDocId(int docNumber) {
this.docNumber = docNumber;
}
/**
* @inheritDoc
*/
int[] getDocumentNumbers(MultiIndexReader reader, int[] docNumbers) {
if (docNumbers.length == 1) {
docNumbers[0] = docNumber;
return docNumbers;
} else {
return new int[]{docNumber};
}
}
/**
* @inheritDoc
*/
DocId applyOffset(int offset) {
return new PlainDocId(docNumber + offset);
}
/**
* @inheritDoc
*/
boolean isValid(BitSet deleted) {
return !deleted.get(docNumber);
}
/**
* Returns a String representation for this <code>DocId</code>.
*
* @return a String representation for this <code>DocId</code>.
*/
public String toString() {
return "PlainDocId(" + docNumber + ")";
}
}
/**
* <code>DocId</code> based on a node id.
*/
private static final class UUIDDocId extends DocId {
/**
* The logger instance for this class.
*/
private static final Logger log = LoggerFactory.getLogger(UUIDDocId.class);
/**
* The node identifier.
*/
private final NodeId id;
/**
* The previously calculated foreign segment document id.
*/
private ForeignSegmentDocId doc;
/**
* Creates a <code>DocId</code> based on a node id.
*
* @param id the node id.
*/
UUIDDocId(NodeId id) {
this.id = id;
}
/**
* @inheritDoc
*/
int[] getDocumentNumbers(MultiIndexReader reader, int[] docNumbers)
throws IOException {
int realDoc = -1;
ForeignSegmentDocId segDocId = doc;
if (segDocId != null) {
realDoc = reader.getDocumentNumber(segDocId);
}
if (realDoc == -1) {
// Cached doc was invalid => create new one
segDocId = reader.createDocId(id);
if (segDocId != null) {
realDoc = reader.getDocumentNumber(segDocId);
doc = segDocId;
} else {
log.warn("Unknown parent node with id {}", id);
return EMPTY;
}
}
if (docNumbers.length == 1) {
docNumbers[0] = realDoc;
return docNumbers;
} else {
return new int[]{realDoc};
}
}
/**
* This implementation will return <code>this</code>. Document number is
* not known until resolved in {@link #getDocumentNumbers(MultiIndexReader,int[])}.
*
* @inheritDoc
*/
DocId applyOffset(int offset) {
return this;
}
/**
* Always returns <code>true</code>.
*
* @param deleted the deleted documents.
* @return always <code>true</code>.
*/
boolean isValid(BitSet deleted) {
return true;
}
/**
* Returns a String representation for this <code>DocId</code>.
*
* @return a String representation for this <code>DocId</code>.
*/
public String toString() {
return "UUIDDocId(" + id + ")";
}
}
/**
* A DocId based on multiple UUIDDocIds.
*/
private static final class MultiUUIDDocId extends DocId {
/**
* The internal uuid based doc ids.
*/
private final UUIDDocId[] docIds;
/**
* @param uuids the uuids of the referenced nodes.
* @throws IllegalArgumentException if one of the uuids is malformed.
*/
MultiUUIDDocId(String[] uuids) {
this.docIds = new UUIDDocId[uuids.length];
for (int i = 0; i < uuids.length; i++) {
docIds[i] = new UUIDDocId(new NodeId(uuids[i]));
}
}
/**
* @inheritDoc
*/
int[] getDocumentNumbers(MultiIndexReader reader, int[] docNumbers)
throws IOException {
int[] tmp = new int[1];
docNumbers = new int[docIds.length];
for (int i = 0; i < docNumbers.length; i++) {
docNumbers[i] = docIds[i].getDocumentNumbers(reader, tmp)[0];
}
return docNumbers;
}
/**
* This implementation will return <code>this</code>. Document number is
* not known until resolved in {@link #getDocumentNumbers(MultiIndexReader,int[])}.
*
* @inheritDoc
*/
DocId applyOffset(int offset) {
return this;
}
/**
* Always returns <code>true</code>.
*
* @param deleted the deleted documents.
* @return always <code>true</code>.
*/
boolean isValid(BitSet deleted) {
return true;
}
/**
* Returns a String representation for this <code>DocId</code>.
*
* @return a String representation for this <code>DocId</code>.
*/
public String toString() {
StringBuffer sb = new StringBuffer("MultiUUIDDocId(");
String separator = "";
for (UUIDDocId docId : docIds) {
sb.append(separator);
separator = ", ";
sb.append(docId.id);
}
sb.append(")");
return sb.toString();
}
}
}
| apache-2.0 |
apache/drill | exec/vector/src/main/java/org/apache/drill/exec/vector/complex/MapVector.java | 12486 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.vector.complex;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
import org.apache.drill.common.types.TypeProtos.MajorType;
import org.apache.drill.common.types.TypeProtos.MinorType;
import org.apache.drill.common.types.Types;
import org.apache.drill.exec.expr.BasicTypeHelper;
import org.apache.drill.exec.expr.holders.ComplexHolder;
import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.proto.UserBitShared.SerializedField;
import org.apache.drill.exec.record.MaterializedField;
import org.apache.drill.exec.record.TransferPair;
import org.apache.drill.exec.util.CallBack;
import org.apache.drill.exec.util.JsonStringHashMap;
import org.apache.drill.exec.vector.BaseValueVector;
import org.apache.drill.exec.vector.SchemaChangeCallBack;
import org.apache.drill.exec.vector.ValueVector;
import org.apache.drill.exec.vector.complex.RepeatedMapVector.MapSingleCopier;
import org.apache.drill.exec.vector.complex.impl.SingleMapReaderImpl;
import org.apache.drill.exec.vector.complex.reader.FieldReader;
import org.apache.drill.shaded.guava.com.google.common.base.Preconditions;
import org.apache.drill.shaded.guava.com.google.common.collect.Ordering;
import org.apache.drill.shaded.guava.com.google.common.primitives.Ints;
import io.netty.buffer.DrillBuf;
public class MapVector extends AbstractMapVector {
public final static MajorType TYPE = Types.required(MinorType.MAP);
private final SingleMapReaderImpl reader = new SingleMapReaderImpl(MapVector.this);
private final Accessor accessor = new Accessor();
private final Mutator mutator = new Mutator();
private int valueCount;
public MapVector(String path, BufferAllocator allocator, CallBack callBack) {
this(MaterializedField.create(path, TYPE), allocator, callBack);
}
public MapVector(MaterializedField field, BufferAllocator allocator, CallBack callBack) {
super(field, allocator, callBack);
}
@Override
public FieldReader getReader() { return reader; }
transient private MapTransferPair ephPair;
transient private MapSingleCopier ephPair2;
public void copyFromSafe(int fromIndex, int thisIndex, MapVector from) {
if (ephPair == null || ephPair.from != from) {
ephPair = (MapTransferPair) from.makeTransferPair(this);
}
ephPair.copyValueSafe(fromIndex, thisIndex);
}
public void copyFromSafe(int fromSubIndex, int thisIndex, RepeatedMapVector from) {
if (ephPair2 == null || ephPair2.from != from) {
ephPair2 = from.makeSingularCopier(this);
}
ephPair2.copySafe(fromSubIndex, thisIndex);
}
@Override
public void copyEntry(int toIndex, ValueVector from, int fromIndex) {
copyFromSafe(fromIndex, toIndex, (MapVector) from);
}
@Override
protected boolean supportsDirectRead() { return true; }
public Iterator<String> fieldNameIterator() {
return getChildFieldNames().iterator();
}
@Override
public void setInitialCapacity(int numRecords) {
for (ValueVector v : this) {
v.setInitialCapacity(numRecords);
}
}
@Override
public int getBufferSize() {
if (valueCount == 0 || size() == 0) {
return 0;
}
long buffer = 0;
for (ValueVector v : this) {
buffer += v.getBufferSize();
}
return (int) buffer;
}
@Override
public int getAllocatedSize() {
int size = 0;
for (ValueVector v : this) {
size += v.getAllocatedSize();
}
return size;
}
@Override
public int getBufferSizeFor(int valueCount) {
if (valueCount == 0) {
return 0;
}
long bufferSize = 0;
for (ValueVector v : this) {
bufferSize += v.getBufferSizeFor(valueCount);
}
return (int) bufferSize;
}
@Override
public DrillBuf[] getBuffers(boolean clear) {
return super.getBuffers(clear);
}
@Override
public TransferPair getTransferPair(BufferAllocator allocator) {
return new MapTransferPair(this, getField().getName(), allocator);
}
@Override
public TransferPair makeTransferPair(ValueVector to) {
return new MapTransferPair(this, (MapVector) to);
}
@Override
public TransferPair getTransferPair(String ref, BufferAllocator allocator) {
return new MapTransferPair(this, ref, allocator);
}
protected static class MapTransferPair implements TransferPair{
private final TransferPair[] pairs;
private final MapVector from;
private final MapVector to;
public MapTransferPair(MapVector from, String path, BufferAllocator allocator) {
this(from, new MapVector(MaterializedField.create(path, TYPE), allocator, new SchemaChangeCallBack()), false);
}
public MapTransferPair(MapVector from, MapVector to) {
this(from, to, true);
}
protected MapTransferPair(MapVector from, MapVector to, boolean allocate) {
this.from = from;
this.to = to;
this.pairs = new TransferPair[from.size()];
this.to.ephPair = null;
this.to.ephPair2 = null;
int i = 0;
ValueVector vector;
for (String child:from.getChildFieldNames()) {
int preSize = to.size();
vector = from.getChild(child);
if (vector == null) {
continue;
}
//DRILL-1872: we add the child fields for the vector, looking up the field by name. For a map vector,
// the child fields may be nested fields of the top level child. For example if the structure
// of a child field is oa.oab.oabc then we add oa, then add oab to oa then oabc to oab.
// But the children member of a Materialized field is a HashSet. If the fields are added in the
// children HashSet, and the hashCode of the Materialized field includes the hash code of the
// children, the hashCode value of oa changes *after* the field has been added to the HashSet.
// (This is similar to what happens in ScanBatch where the children cannot be added till they are
// read). To take care of this, we ensure that the hashCode of the MaterializedField does not
// include the hashCode of the children but is based only on MaterializedField$key.
ValueVector newVector = to.addOrGet(child, vector.getField().getType(), vector.getClass());
if (allocate && to.size() != preSize) {
newVector.allocateNew();
}
pairs[i++] = vector.makeTransferPair(newVector);
}
}
@Override
public void transfer() {
for (TransferPair p : pairs) {
p.transfer();
}
to.valueCount = from.valueCount;
from.clear();
}
@Override
public ValueVector getTo() {
return to;
}
@Override
public void copyValueSafe(int from, int to) {
for (TransferPair p : pairs) {
p.copyValueSafe(from, to);
}
}
@Override
public void splitAndTransfer(int startIndex, int length) {
for (TransferPair p : pairs) {
p.splitAndTransfer(startIndex, length);
}
to.getMutator().setValueCount(length);
}
}
@Override
public int getValueCapacity() {
if (size() == 0) {
return 0;
}
Ordering<ValueVector> natural = new Ordering<ValueVector>() {
@Override
public int compare(@Nullable ValueVector left, @Nullable ValueVector right) {
return Ints.compare(
Preconditions.checkNotNull(left).getValueCapacity(),
Preconditions.checkNotNull(right).getValueCapacity()
);
}
};
return natural.min(getChildren()).getValueCapacity();
}
@Override
public Accessor getAccessor() {
return accessor;
}
@Override
public void load(SerializedField metadata, DrillBuf buf) {
List<SerializedField> fields = metadata.getChildList();
valueCount = metadata.getValueCount();
int bufOffset = 0;
for (SerializedField child : fields) {
MaterializedField fieldDef = MaterializedField.create(child);
ValueVector vector = getChild(fieldDef.getName());
if (vector == null) {
// if we arrive here, we didn't have a matching vector.
vector = BasicTypeHelper.getNewVector(fieldDef, allocator);
putChild(fieldDef.getName(), vector);
}
if (child.getValueCount() == 0) {
vector.clear();
} else {
vector.load(child, buf.slice(bufOffset, child.getBufferLength()));
}
bufOffset += child.getBufferLength();
}
// We should have consumed all bytes written into the buffer
// during deserialization.
assert bufOffset == buf.writerIndex();
}
@Override
public SerializedField getMetadata() {
SerializedField.Builder b = getField()
.getAsBuilder()
.setBufferLength(getBufferSize())
.setValueCount(valueCount);
for(ValueVector v : getChildren()) {
b.addChild(v.getMetadata());
}
return b.build();
}
@Override
public Mutator getMutator() {
return mutator;
}
public class Accessor extends BaseValueVector.BaseAccessor {
@Override
public Object getObject(int index) {
Map<String, Object> vv = new JsonStringHashMap<>();
for (String child:getChildFieldNames()) {
ValueVector v = getChild(child);
// TODO(DRILL-4001): Resolve this hack:
// The index/value count check in the following if statement is a hack
// to work around the current fact that RecordBatchLoader.load and
// MapVector.load leave child vectors with a length of zero (as opposed
// to matching the lengths of siblings and the parent map vector)
// because they don't remove (or set the lengths of) vectors from
// previous batches that aren't in the current batch.
if (v != null && index < v.getAccessor().getValueCount()) {
Object value = v.getAccessor().getObject(index);
if (value != null) {
vv.put(child, value);
}
}
}
return vv;
}
public void get(int index, ComplexHolder holder) {
reader.setPosition(index);
holder.reader = reader;
}
@Override
public int getValueCount() {
return valueCount;
}
}
public ValueVector getVectorById(int id) {
return getChildByOrdinal(id);
}
/**
* Set the value count for the map without setting the counts for the contained
* vectors. Use this only when the values of the contained vectors are set
* elsewhere in the code.
*
* @param valueCount number of items in the map
*/
public void setMapValueCount(int valueCount) {
this.valueCount = valueCount;
}
public class Mutator extends BaseValueVector.BaseMutator {
@Override
public void setValueCount(int valueCount) {
for (ValueVector v : getChildren()) {
v.getMutator().setValueCount(valueCount);
}
setMapValueCount(valueCount);
}
@Override
public void reset() { }
@Override
public void generateTestData(int values) { }
}
@Override
public void clear() {
for (ValueVector v : getChildren()) {
v.clear();
}
valueCount = 0;
}
@Override
public void close() {
Collection<ValueVector> vectors = getChildren();
for (ValueVector v : vectors) {
v.close();
}
vectors.clear();
valueCount = 0;
super.close();
}
@Override
public void toNullable(ValueVector nullableVector) {
throw new UnsupportedOperationException();
}
@Override
public void exchange(ValueVector other) {
super.exchange(other);
MapVector otherMap = (MapVector) other;
int temp = otherMap.valueCount;
otherMap.valueCount = valueCount;
valueCount = temp;
}
}
| apache-2.0 |
stevem999/gocd | server/src/main/java/com/thoughtworks/studios/shine/XSLTTransformerExecutor.java | 953 | /*
* Copyright 2016 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.studios.shine;
import com.thoughtworks.studios.shine.semweb.grddl.GrddlTransformException;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
public interface XSLTTransformerExecutor<T> {
T execute(Transformer transformer) throws TransformerException, GrddlTransformException;
}
| apache-2.0 |
jsdjayanga/wso2-axis2 | modules/testutils/src/main/java/org/apache/axis2/testutils/UtilServer.java | 4156 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.axis2.testutils;
import java.io.File;
import javax.xml.namespace.QName;
import org.apache.axis2.AxisFault;
import org.apache.axis2.context.ConfigurationContext;
import org.apache.axis2.context.ConfigurationContextFactory;
import org.apache.axis2.description.AxisService;
import org.apache.axis2.engine.ListenerManager;
import org.apache.axis2.transport.http.SimpleHTTPServer;
public class UtilServer {
private static SimpleHTTPServer receiver;
public static synchronized void deployService(AxisService service)
throws AxisFault {
receiver.getConfigurationContext().getAxisConfiguration().addService(
service);
}
public static synchronized void unDeployService(QName service)
throws AxisFault {
receiver.getConfigurationContext().getAxisConfiguration()
.removeService(service.getLocalPart());
}
public static synchronized void start(int testingPort, String repository) throws Exception {
start(testingPort, repository, null);
}
public static synchronized void start(int testingPort, String repository, String axis2xml) throws Exception {
if (receiver != null) {
System.out.println("Server already started !!");
throw new IllegalStateException("Server already started");
}
ConfigurationContext er = getNewConfigurationContext(repository, axis2xml);
receiver = new SimpleHTTPServer(er, testingPort);
try {
receiver.start();
} catch (AxisFault e) {
System.out.println("Error occurred starting the server. " + e.getMessage());
throw e;
}
System.out.print("Server started on port " + testingPort + ".....");
}
public static ConfigurationContext getNewConfigurationContext(
String repository, String axis2xml) throws Exception {
File file = new File(repository);
if (!file.exists()) {
throw new Exception("repository directory "
+ file.getAbsolutePath() + " does not exists");
}
if (axis2xml == null) {
axis2xml = file.getAbsolutePath() + "/conf/axis2.xml";
}
return ConfigurationContextFactory.createConfigurationContextFromFileSystem(file.getAbsolutePath(), axis2xml);
}
public static synchronized void stop() throws AxisFault {
if (receiver == null) {
throw new IllegalStateException("Server not started");
}
receiver.stop();
waitUntilStopped();
// tp.doStop();
System.out.print("Server stopped .....");
ListenerManager listenerManager =
receiver.getConfigurationContext().getListenerManager();
if (listenerManager != null) {
listenerManager.stop();
}
receiver = null;
}
public static ConfigurationContext getConfigurationContext() {
return receiver.getConfigurationContext();
}
public static boolean waitUntilStopped() {
System.out.println("Waiting until receiver stopped ..");
while (receiver != null && receiver.isRunning()) {
try {
Thread.sleep(1000);
} catch (InterruptedException e1) {
//ignore
}
}
return true;
}
}
| apache-2.0 |
Digas29/bazel | src/main/java/com/google/devtools/build/lib/rules/android/AndroidIdlProvider.java | 2588 | // Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.android;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.analysis.TransitiveInfoProvider;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.collect.nestedset.Order;
import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable;
/**
* Configured targets implementing this provider can contribute Android IDL information to the
* compilation.
*/
@Immutable
public final class AndroidIdlProvider implements TransitiveInfoProvider {
public static final AndroidIdlProvider EMPTY = new AndroidIdlProvider(
NestedSetBuilder.<String>emptySet(Order.STABLE_ORDER),
NestedSetBuilder.<Artifact>emptySet(Order.STABLE_ORDER),
NestedSetBuilder.<Artifact>emptySet(Order.STABLE_ORDER));
private final NestedSet<String> transitiveIdlImportRoots;
private final NestedSet<Artifact> transitiveIdlImports;
private final NestedSet<Artifact> transitiveIdlJars;
public AndroidIdlProvider(
NestedSet<String> transitiveIdlImportRoots,
NestedSet<Artifact> transitiveIdlImports,
NestedSet<Artifact> transitiveIdlJars) {
this.transitiveIdlImportRoots = transitiveIdlImportRoots;
this.transitiveIdlImports = transitiveIdlImports;
this.transitiveIdlJars = transitiveIdlJars;
}
/**
* The set of IDL import roots need for compiling the IDL sources in the transitive closure.
*/
public NestedSet<String> getTransitiveIdlImportRoots() {
return transitiveIdlImportRoots;
}
/**
* The IDL files in the transitive closure.
*/
public NestedSet<Artifact> getTransitiveIdlImports() {
return transitiveIdlImports;
}
/**
* The IDL jars in the transitive closure, both class and source jars.
*/
public NestedSet<Artifact> getTransitiveIdlJars() {
return transitiveIdlJars;
}
}
| apache-2.0 |