repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15
values |
|---|---|---|---|---|
jdahlstrom/vaadin.react | shared/src/main/java/com/vaadin/shared/data/DataRequestRpc.java | 1787 | /*
* Copyright 2000-2014 Vaadin Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.vaadin.shared.data;
import com.vaadin.shared.annotations.Delayed;
import com.vaadin.shared.annotations.NoLoadingIndicator;
import com.vaadin.shared.communication.ServerRpc;
import elemental.json.JsonArray;
/**
* RPC interface used for requesting container data to the client.
*
* @since 7.4
* @author Vaadin Ltd
*/
public interface DataRequestRpc extends ServerRpc {
/**
* Request rows from the server.
*
* @param firstRowIndex
* the index of the first requested row
* @param numberOfRows
* the number of requested rows
* @param firstCachedRowIndex
* the index of the first cached row
* @param cacheSize
* the number of cached rows
*/
@NoLoadingIndicator
public void requestRows(int firstRowIndex, int numberOfRows,
int firstCachedRowIndex, int cacheSize);
/**
* Informs the server that items have been dropped from the client cache.
*
* @since 7.6
* @param rowKeys
* array of dropped keys mapping to items
*/
@Delayed
@NoLoadingIndicator
public void dropRows(JsonArray rowKeys);
}
| apache-2.0 |
fdefalco/flyway | flyway-core/src/test/java/org/flywaydb/core/internal/dbsupport/vertica/VerticaDbSupportMediumTest.java | 2855 | /**
* Copyright 2010-2016 Boxfuse GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flywaydb.core.internal.dbsupport.vertica;
import org.flywaydb.core.internal.dbsupport.Schema;
import org.flywaydb.core.internal.util.jdbc.DriverDataSource;
import org.flywaydb.core.internal.util.jdbc.JdbcUtils;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.flywaydb.core.DbCategory;
import javax.sql.DataSource;
import java.io.File;
import java.io.FileInputStream;
import java.sql.Connection;
import java.util.Properties;
import static org.junit.Assert.assertEquals;
/**
* Test for VerticaDbSupport.
*/
@Category(DbCategory.Vertica.class)
public class VerticaDbSupportMediumTest {
/**
* Checks that the search_path is extended and not overwritten so that objects in PUBLIC can still be found.
*/
@Test
public void setCurrentSchema() throws Exception {
Connection connection = createDataSource().getConnection();
VerticaDbSupport dbSupport = new VerticaDbSupport(connection);
Schema schema = dbSupport.getSchema("search_path_test");
schema.create();
dbSupport.changeCurrentSchemaTo(dbSupport.getSchema("search_path_test"));
String searchPath = dbSupport.doGetCurrentSchemaName();
assertEquals("search_path_test, \"$user\", public, v_catalog, v_monitor, v_internal", searchPath);
schema.drop();
JdbcUtils.closeConnection(connection);
}
/**
* Creates a datasource for use in tests.
*
* @return The new datasource.
*/
private DataSource createDataSource() throws Exception {
File customPropertiesFile = new File(System.getProperty("user.home") + "/flyway-mediumtests.properties");
Properties customProperties = new Properties();
if (customPropertiesFile.canRead()) {
customProperties.load(new FileInputStream(customPropertiesFile));
}
String user = customProperties.getProperty("vertica.user", "dbadmin");
String password = customProperties.getProperty("vertica.password", "flyway");
String url = customProperties.getProperty("vertica.url", "jdbc:vertica://localhost/flyway");
return new DriverDataSource(Thread.currentThread().getContextClassLoader(), null, url, user, password);
}
}
| apache-2.0 |
rpau/java-symbol-solver | java-symbol-solver-testing/src/test/resources/javasymbolsolver_0_6_0/src/java-symbol-solver-core/com/github/javaparser/symbolsolver/resolution/typesolvers/CombinedTypeSolver.java | 2410 | /*
* Copyright 2016 Federico Tomassetti
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.javaparser.symbolsolver.resolution.typesolvers;
import com.github.javaparser.symbolsolver.javaparsermodel.UnsolvedSymbolException;
import com.github.javaparser.symbolsolver.model.declarations.ReferenceTypeDeclaration;
import com.github.javaparser.symbolsolver.model.resolution.SymbolReference;
import com.github.javaparser.symbolsolver.model.resolution.TypeSolver;
import java.util.ArrayList;
import java.util.List;
/**
* @author Federico Tomassetti
*/
public class CombinedTypeSolver implements TypeSolver {
private TypeSolver parent;
private List<TypeSolver> elements = new ArrayList<>();
public CombinedTypeSolver(TypeSolver... elements) {
for (TypeSolver el : elements) {
add(el);
}
}
@Override
public TypeSolver getParent() {
return parent;
}
@Override
public void setParent(TypeSolver parent) {
this.parent = parent;
}
public void add(TypeSolver typeSolver) {
this.elements.add(typeSolver);
typeSolver.setParent(this);
}
@Override
public SymbolReference<ReferenceTypeDeclaration> tryToSolveType(String name) {
for (TypeSolver ts : elements) {
SymbolReference<ReferenceTypeDeclaration> res = ts.tryToSolveType(name);
if (res.isSolved()) {
return res;
}
}
return SymbolReference.unsolved(ReferenceTypeDeclaration.class);
}
@Override
public ReferenceTypeDeclaration solveType(String name) throws UnsolvedSymbolException {
SymbolReference<ReferenceTypeDeclaration> res = tryToSolveType(name);
if (res.isSolved()) {
return res.getCorrespondingDeclaration();
} else {
throw new UnsolvedSymbolException(name);
}
}
}
| mit |
jonathan-major/Rapture | Libs/RaptureAPI/src/main/java/rapture/notification/MessageNotificationManager.java | 6517 | /**
* The MIT License (MIT)
*
* Copyright (c) 2011-2016 Incapture Technologies LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package rapture.notification;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArraySet;
import org.apache.log4j.Logger;
import rapture.common.CallingContext;
import rapture.common.NotificationInfo;
import rapture.common.NotificationResult;
import rapture.common.api.NotificationApi;
import rapture.common.exception.ExceptionToString;
import rapture.common.exception.RaptureException;
import rapture.common.impl.jackson.JacksonUtil;
/**
* The base notification manager has the logic for notifications
*
* @author amkimian
*
*/
public class MessageNotificationManager {
private NotificationApi notificationApi;
private NotificationApiRetriever retriever;
private Long currentEpoch;
private String notificationName;
private CallingContext context;
private Map<String, Set<RaptureMessageListener<NotificationMessage>>> subscriptions = new HashMap<String, Set<RaptureMessageListener<NotificationMessage>>>();
private RefreshThread thread;
private static Logger log = Logger.getLogger(MessageNotificationManager.class);
public MessageNotificationManager(NotificationApiRetriever retriever, String notificationName) {
this.retriever = retriever;
this.notificationName = notificationName;
resetApi();
currentEpoch = notificationApi.getLatestNotificationEpoch(context, notificationName);
}
private void resetApi() {
notificationApi = retriever.getNotification();
context = retriever.getCallingContext();
}
public void startNotificationManager() {
thread = new RefreshThread(notificationName);
thread.start();
}
public void stopNotificationManager() {
if (thread != null) {
thread.setQuit();
// kill it right away.
thread.interrupt();
try {
thread.join();
} catch (Exception e) {
}
thread = null;
}
}
public void registerSubscription(String msgType, RaptureMessageListener<NotificationMessage> listener) {
if (!subscriptions.containsKey(msgType)) {
// CopyOnWriteArraySet eliminates a ConcurrentModificationException in handleReference.
// I would have used ConcurrentSkipListSet, but then the RaptureMessageListener elements
// would have to support Comparable and it's too big a change
subscriptions.put(msgType, new CopyOnWriteArraySet<RaptureMessageListener<NotificationMessage>>());
}
subscriptions.get(msgType).add(listener);
}
private void deregisterSubscription(String notificationName, RaptureMessageListener<NotificationMessage> listener) {
if (subscriptions.containsKey(notificationName)) {
subscriptions.get(notificationName).remove(listener);
}
}
public void deregisterAllSubscriptions(RaptureMessageListener<NotificationMessage> listener) {
for (String key : subscriptions.keySet()) {
deregisterSubscription(key, listener);
}
}
public void publishMessage(NotificationMessage msg) {
String content = JacksonUtil.jsonFromObject(msg);
String reference = msg.getMessageType();
notificationApi.publishNotification(context, notificationName, reference, content, NotificationType.STRING.toString());
}
class RefreshThread extends Thread {
public RefreshThread(String name) {
this.setName("MRThread-" + name);
}
private boolean shouldQuit = false;
public void setQuit() {
shouldQuit = true;
}
public void run() {
while (!shouldQuit) {
try {
NotificationResult result = notificationApi.findNotificationsAfterEpoch(context, notificationName, currentEpoch);
currentEpoch = result.getCurrentEpoch();
for (String change : result.getReferences()) {
handleReference(change);
}
} catch (RaptureException e) {
resetApi();
}
if (!shouldQuit) {
try {
sleep(1000);
} catch (InterruptedException e) {
}
}
}
}
}
public void handleReference(String change) {
// This change has occurred, do we need to signal someone?
if (subscriptions.isEmpty()) {
return;
}
NotificationInfo info = notificationApi.getNotification(context, notificationName, change);
if (subscriptions.containsKey(info.getReference())) {
Set<RaptureMessageListener<NotificationMessage>> refSubscriptions = subscriptions.get(info.getReference());
for (RaptureMessageListener<NotificationMessage> subscription : refSubscriptions) {
try {
NotificationMessage msg = JacksonUtil.objectFromJson(info.getContent(), NotificationMessage.class);
subscription.signalMessage(msg);
} catch (Exception e) {
// at least acknowledge it
log.debug(ExceptionToString.format(e));
}
}
}
}
}
| mit |
SpongePowered/SpongeCommon | src/main/java/org/spongepowered/common/world/gen/populators/SnowPopulator.java | 2804 | /*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.common.world.gen.populators;
import com.flowpowered.math.vector.Vector3i;
import net.minecraft.init.Blocks;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.World;
import org.spongepowered.api.world.extent.Extent;
import org.spongepowered.api.world.gen.Populator;
import org.spongepowered.api.world.gen.PopulatorType;
import org.spongepowered.common.world.gen.InternalPopulatorTypes;
import java.util.Random;
public class SnowPopulator implements Populator {
@Override
public PopulatorType getType() {
return InternalPopulatorTypes.SNOW;
}
@Override
public void populate(org.spongepowered.api.world.World world, Extent extent, Random random) {
Vector3i min = extent.getBlockMin();
Vector3i size = extent.getBlockSize();
World worldObj = (World) world;
BlockPos blockpos = new BlockPos(min.getX(), min.getY(), min.getZ());
for (int x = 0; x < size.getX(); ++x) {
for (int y = 0; y < size.getZ(); ++y) {
BlockPos blockpos1 = worldObj.getPrecipitationHeight(blockpos.add(x, 0, y));
BlockPos blockpos2 = blockpos1.down();
if (worldObj.canBlockFreezeWater(blockpos2)) {
worldObj.setBlockState(blockpos2, Blocks.ICE.getDefaultState(), 2);
}
if (worldObj.canSnowAt(blockpos1, true)) {
worldObj.setBlockState(blockpos1, Blocks.SNOW_LAYER.getDefaultState(), 2);
}
}
}
}
}
| mit |
RallySoftware/eclipselink.runtime | foundation/eclipselink.core.test/src/org/eclipse/persistence/testing/tests/unitofwork/referencesettings/WeakReferenceTest.java | 1559 | package org.eclipse.persistence.testing.tests.unitofwork.referencesettings;
import java.math.BigDecimal;
import org.eclipse.persistence.config.ReferenceMode;
import org.eclipse.persistence.internal.sessions.UnitOfWorkImpl;
import org.eclipse.persistence.sessions.UnitOfWork;
import org.eclipse.persistence.testing.framework.AutoVerifyTestCase;
import org.eclipse.persistence.testing.tests.unitofwork.changeflag.model.ALCTEmployee;
import org.eclipse.persistence.testing.framework.TestErrorException;
public class WeakReferenceTest extends AutoVerifyTestCase {
public void test(){
UnitOfWork uow = getSession().acquireUnitOfWork(ReferenceMode.WEAK);
int size = uow.readAllObjects(ALCTEmployee.class).size();
for (int i = 0; i < 200; ++i){
//force cacheKey cleanup
uow.setShouldNewObjectsBeCached(true);
ALCTEmployee emp = new ALCTEmployee();
emp.setId(new BigDecimal(i));
uow.registerObject(emp);
}
try{
Long[] arr = new Long[10000000];
for (int i = 0; i< 10000000; ++i){
arr[i] = new Long(i);
}
System.gc();
try{
Thread.currentThread().sleep(200);
}catch (InterruptedException ex){
}
System.gc();
}catch (Error er){
//ignore
}
if (((UnitOfWorkImpl)uow).getCloneMapping().size() == size){
throw new TestErrorException("Did not release weak references.");
}
}
}
;
| epl-1.0 |
RallySoftware/eclipselink.runtime | utils/eclipselink.utils.workbench/uitools/source/org/eclipse/persistence/tools/workbench/uitools/swing/TriStateCheckBox.java | 12303 | /*
* Copyright (c) 2006, 2015, Oracle. All rights reserved.
*
* This software is the proprietary information of Oracle Corporation.
* Use is subject to license terms.
*/
package org.eclipse.persistence.tools.workbench.uitools.swing;
import java.awt.event.ActionListener;
import java.awt.event.ItemListener;
import javax.swing.ButtonGroup;
import javax.swing.ButtonModel;
import javax.swing.Icon;
import javax.swing.JCheckBox;
import javax.swing.event.ChangeListener;
import org.eclipse.persistence.tools.workbench.utility.TriStateBoolean;
/**
* This extension over the Swing's <code>JCheckBox</code> adds support for a
* partially selected state.
* <p>
* This code was found at: <a
* href="http://forum.java.sun.com/thread.jspa?threadID=593755&messageID=3116647">http://forum.java.sun.com/thread.jspa?threadID=593755&messageID=3116647</a>
* <p>
* The Sun's bug number is 4079882: <a
* href="http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4079882">http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4079882</a>
*
* @version 11.0.0
* @since 11.0.0
* @author Pascal Filion
*/
public class TriStateCheckBox extends JCheckBox
{
/**
* Cache the check icon in order to compose it with a secondary icon.
*/
private Icon checkIcon;
/**
* Constants used to determine to set the selection state as partially
* selected.
*/
public static TriStateBoolean PARTIALLY_SELECTED = TriStateBoolean.UNDEFINED;
/**
* Constants used to determine to set the selection state as selected.
*/
public static TriStateBoolean SELECTED = TriStateBoolean.TRUE;
/**
* Constants used to determine to set the selection state as partially
* selected.
*/
public static TriStateBoolean UNSELECTED = TriStateBoolean.FALSE;
/*
* @see JCheckBox()
*/
public TriStateCheckBox()
{
this(null);
}
/*
* @see JCheckBox(String)
*/
public TriStateCheckBox(String text)
{
this(text, UNSELECTED);
}
/*
* @see JCheckBox(String, Icon, boolean)
*/
public TriStateCheckBox(String text, Icon icon, TriStateBoolean selectedState)
{
super(text, icon);
initialize(selectedState);
}
/*
* @see JCheckBox(String, boolean)
*/
public TriStateCheckBox(String text, TriStateBoolean selectedState)
{
this(text, null, selectedState);
}
/**
* Returns the current state, which is determined by the selection status of
* the model.
*/
public TriStateBoolean getState()
{
return getTriStateModel().getState();
}
/**
* Returns this button's model.
* <p>
* <b>Note:</b> {@link javax.swing.AbstractButton#getModel()} is not
* overriden just in case the UI delegate calls it before we reset the model.
*
* @return This button's model
*/
public TriStateButtonModel getTriStateModel()
{
return (TriStateButtonModel) super.getModel();
}
/**
* Initializes the model and icon to support the partially selected state.
*
* @param selectedState The initial selection state
*/
protected void initialize(TriStateBoolean selectedState)
{
setOpaque(false);
// Install the tri-state button model
setModel(new TriStateButtonModel(getModel()));
setState(selectedState);
}
/**
* Determines whether the selection state is set to be partially selected.
*
* @return <code>true</code> if the selection is set as partially selected or
* <code>false</code> if it is set as unselected or selected
*/
public boolean isPartiallySelected()
{
return getTriStateModel().isPartiallySelected();
}
/*
* (non-Javadoc)
*/
@Override
public void setIcon(Icon icon)
{
setSecondaryIcon(icon);
}
/**
* Sets the secondary icon, which is shown after the check icon.
*
* @param icon The secondary icon or <code>null</code> to clear a previously
* set secondary icon
*/
public void setSecondaryIcon(Icon icon)
{
if (icon == null)
{
super.setIcon(checkIcon);
}
else
{
super.setIcon(new CompositeIcon(checkIcon, icon));
}
}
/*
* (non-Javadoc)
*/
@Override
public void setSelected(boolean selected)
{
setState(selected ? SELECTED : UNSELECTED);
}
/**
* Sets the new state to either {@link #SELECTED}, {@link #UNSELECTED} or
* {@link #PARTIALLY_SELECTED}. If <code>null</code>, then it is treated as
* {@link #PARTIALLY_SELECTED}.
*
* @param state The new selection state
*/
public void setState(TriStateBoolean state)
{
getTriStateModel().setState(state);
}
/**
* Exactly which Design Pattern is this? Is it an Adapter, a Proxy or a
* Decorator? In this case, my vote lies with the Decorator, because we are
* extending functionality and "decorating" the original model with a more
* powerful model.
*/
public static class TriStateButtonModel implements ButtonModel
{
/**
* The wrapped <code>ButtonModel</code> set by the UI delegate.
*/
private final ButtonModel delegate;
/**
* The selection state supporting three states: selected, partially
* selected or unselected.
*/
private TriStateBoolean selectionState;
/**
* Creates a new <code>TriStateButtonModel</code>.
*
* @param delegate The wrapped <code>ButtonModel</code> set by the UI
* delegate
*/
public TriStateButtonModel(ButtonModel delegate)
{
super();
this.delegate = delegate;
this.selectionState = TriStateBoolean.valueOf(delegate.isSelected());
}
/*
* (non-Javadoc)
*/
public void addActionListener(ActionListener listener)
{
delegate.addActionListener(listener);
}
/*
* (non-Javadoc)
*/
public void addChangeListener(ChangeListener listener)
{
delegate.addChangeListener(listener);
}
/*
* (non-Javadoc)
*/
public void addItemListener(ItemListener listener)
{
delegate.addItemListener(listener);
}
/*
* (non-Javadoc)
*/
public String getActionCommand()
{
return delegate.getActionCommand();
}
/**
* Returns the wrapped <code>ButtonModel</code> set by the UI delegate.
*
* @return The model used to store the actual properties
*/
protected final ButtonModel getDelegate()
{
return delegate;
}
/*
* (non-Javadoc)
*/
public int getMnemonic()
{
return delegate.getMnemonic();
}
/*
* (non-Javadoc)
*/
public Object[] getSelectedObjects()
{
return delegate.getSelectedObjects();
}
/**
* Returns the current selection state.
*
* @return One of the three possible selection states
*/
protected TriStateBoolean getState()
{
return selectionState;
}
/*
* (non-Javadoc)
*/
public boolean isArmed()
{
return delegate.isArmed();
}
/*
* (non-Javadoc)
*/
public boolean isEnabled()
{
return delegate.isEnabled();
}
/**
* Determines whether the selection state is set to be partially selected.
*
* @return <code>true</code> if the selection is set as partially selected or
* <code>false</code> if it is set as unselected or selected
*/
public boolean isPartiallySelected()
{
return getState() == PARTIALLY_SELECTED;
}
/*
* (non-Javadoc)
*/
public boolean isPressed()
{
return delegate.isPressed();
}
/*
* (non-Javadoc)
*/
public boolean isRollover()
{
return delegate.isRollover();
}
/*
* (non-Javadoc)
*/
public boolean isSelected()
{
return delegate.isSelected();
}
/**
* Rotates between {@link TriStateCheckBox#PARTIALLY_SELECTED},
* {@link TriStateCheckBox#SELECTED} and {@link TriStateCheckBox#UNSELECTED}.
*/
protected void nextState()
{
TriStateBoolean current = getState();
if (current == UNSELECTED)
{
setState(SELECTED);
}
else if (current == SELECTED)
{
setState(PARTIALLY_SELECTED);
}
else if (current == PARTIALLY_SELECTED)
{
setState(UNSELECTED);
}
}
/*
* (non-Javadoc)
*/
public void removeActionListener(ActionListener listener)
{
delegate.removeActionListener(listener);
}
/*
* (non-Javadoc)
*/
public void removeChangeListener(ChangeListener listener)
{
delegate.removeChangeListener(listener);
}
/*
* (non-Javadoc)
*/
public void removeItemListener(ItemListener listener)
{
delegate.removeItemListener(listener);
}
/*
* (non-Javadoc)
*/
public void setActionCommand(String actionCommand)
{
delegate.setActionCommand(actionCommand);
}
/*
* (non-Javadoc)
*/
public void setArmed(boolean armed)
{
delegate.setArmed(armed);
}
/*
* (non-Javadoc)
*/
public void setEnabled(boolean enabled)
{
delegate.setEnabled(enabled);
}
/*
* (non-Javadoc)
*/
public void setGroup(ButtonGroup group)
{
delegate.setGroup(group);
}
/*
* (non-Javadoc)
*/
public void setMnemonic(int mnemonic)
{
delegate.setMnemonic(mnemonic);
}
/*
* (non-Javadoc)
*/
public void setPressed(boolean pressed)
{
if ((isPressed() != pressed) && isEnabled())
{
if (!pressed && isArmed())
{
nextState();
}
// The temporary selected flag prevents the UI from showing the
// partially selected state as selected
boolean selected = isSelected();
delegate.setPressed(pressed);
delegate.setSelected(selected);
}
}
/*
* (non-Javadoc)
*/
public void setRollover(boolean rollover)
{
delegate.setRollover(rollover);
}
/*
* (non-Javadoc)
*/
public void setSelected(boolean selected)
{
delegate.setSelected(selected);
}
/**
* Sets the new state to either {@link #SELECTED}, {@link #UNSELECTED} or
* {@link #PARTIALLY_SELECTED}. If <code>null</code>, then it is treated as
* {@link #PARTIALLY_SELECTED}.
*
* @param state The new selection state
*/
protected void setState(TriStateBoolean selectionState)
{
if (selectionState == null)
{
selectionState = PARTIALLY_SELECTED;
}
this.selectionState = selectionState;
if (selectionState == PARTIALLY_SELECTED)
{
delegate.setSelected(false);
}
else
{
delegate.setSelected(selectionState.booleanValue());
}
}
}
}
| epl-1.0 |
RallySoftware/eclipselink.runtime | moxy/org.eclipse.persistence.moxy/src/org/eclipse/persistence/internal/jaxb/many/ManyValue.java | 1638 | /*******************************************************************************
* Copyright (c) 1998, 2015 Oracle and/or its affiliates. All rights reserved.
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0
* which accompanies this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Eclipse Distribution License is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*
* Contributors:
* Denise Smith June 05, 2009 - Initial implementation
******************************************************************************/
package org.eclipse.persistence.internal.jaxb.many;
import java.util.Collection;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlTransient;
/**
*
* This abstract class is used to support JAXBContext creation with an array or Collection class.
*
* Subclasses are CollectionValue, ObjectArrayValue and PrimitiveArrayValue
*/
@XmlTransient
@XmlAccessorType(XmlAccessType.NONE)
public abstract class ManyValue<T, ITEM> {
protected Collection<T> adaptedValue;
public abstract boolean isArray();
public abstract ITEM getItem();
public abstract void setItem(ITEM item);
public Collection<T> getAdaptedValue() {
return adaptedValue;
}
public void setAdaptedValue(Collection<T> adaptedValue) {
this.adaptedValue = adaptedValue;
}
public abstract Class<?> containerClass();
}
| epl-1.0 |
Sushma7785/OpenDayLight-Load-Balancer | opendaylight/md-sal/sal-distributed-datastore/src/main/java/org/opendaylight/controller/cluster/datastore/AbstractDataTreeCandidateNode.java | 2653 | /*
* Copyright (c) 2015 Cisco Systems, Inc. and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.controller.cluster.datastore;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import java.util.Collection;
import org.opendaylight.yangtools.yang.data.api.YangInstanceIdentifier.PathArgument;
import org.opendaylight.yangtools.yang.data.api.schema.NormalizedNode;
import org.opendaylight.yangtools.yang.data.api.schema.tree.DataTreeCandidateNode;
import org.opendaylight.yangtools.yang.data.api.schema.tree.ModificationType;
/**
* Abstract base class for our internal implementation of {@link DataTreeCandidateNode},
* which we instantiate from a serialized stream. We do not retain the before-image and
* do not implement {@link #getModifiedChild(PathArgument)}, as that method is only
* useful for end users. Instances based on this class should never be leaked outside of
* this component.
*/
abstract class AbstractDataTreeCandidateNode implements DataTreeCandidateNode {
private final ModificationType type;
protected AbstractDataTreeCandidateNode(final ModificationType type) {
this.type = Preconditions.checkNotNull(type);
}
@Override
public final DataTreeCandidateNode getModifiedChild(final PathArgument identifier) {
throw new UnsupportedOperationException("Not implemented");
}
@Override
public final ModificationType getModificationType() {
return type;
}
@Override
public final Optional<NormalizedNode<?, ?>> getDataBefore() {
throw new UnsupportedOperationException("Before-image not available after serialization");
}
static DataTreeCandidateNode createUnmodified() {
return new AbstractDataTreeCandidateNode(ModificationType.UNMODIFIED) {
@Override
public PathArgument getIdentifier() {
throw new UnsupportedOperationException("Root node does not have an identifier");
}
@Override
public Optional<NormalizedNode<?, ?>> getDataAfter() {
throw new UnsupportedOperationException("After-image not available after serialization");
}
@Override
public Collection<DataTreeCandidateNode> getChildNodes() {
throw new UnsupportedOperationException("Children not available after serialization");
}
};
}
}
| epl-1.0 |
RallySoftware/eclipselink.runtime | jpa/org.eclipse.persistence.jpa.jpql.test/src/org/eclipse/persistence/jpa/tests/jpql/parser/AsOfClauseTest.java | 7029 | /*******************************************************************************
* Copyright (c) 2012, 2015 Oracle and/or its affiliates. All rights reserved.
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0
* which accompanies this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Eclipse Distribution License is available at
* http://www.eclipse.org/org/documents/edl-v10.php.
*
* Contributors:
* Oracle - initial API and implementation
*
******************************************************************************/
package org.eclipse.persistence.jpa.tests.jpql.parser;
import org.junit.Test;
import static org.eclipse.persistence.jpa.jpql.parser.Expression.*;
import static org.eclipse.persistence.jpa.tests.jpql.EclipseLinkJPQLQueries2_5.*;
import static org.eclipse.persistence.jpa.tests.jpql.parser.JPQLParserTester.*;
/**
* Unit-tests for {@link org.eclipse.persistence.jpa.jpql.parser.AsOfClause AsOfClause}.
*
* @version 2.5
* @since 2.3
* @author Pascal Filion
*/
@SuppressWarnings("nls")
public final class AsOfClauseTest extends JPQLParserTest {
@Test
public void test_JPQLQuery_01() throws Exception {
// SELECT e
// FROM Employee e
// AS OF TIMESTAMP FUNC('TO_TIMESTAMP', '2003-04-04 09:30:00', 'YYYY-MM-DD HH:MI:SS')
// WHERE e.name = 'JPQL'
ExpressionTester selectStatement = selectStatement(
select(variable("e")),
from(
"Employee",
"e",
asOfTimestamp(
function(FUNC, "'TO_TIMESTAMP'", string("'2003-04-04 09:30:00'"), string("'YYYY-MM-DD HH:MI:SS'"))
)
),
where(path("e.name").equal(string("'JPQL'")))
);
testQuery(query_004(), selectStatement);
}
@Test
public void test_JPQLQuery_05() throws Exception {
// select e
// from Employee e
// as of scn 7920
// where e.id = 222
ExpressionTester selectStatement = selectStatement(
select(variable("e")),
from("Employee", "e", asOfScn(7920)),
where(path("e.id").equal(numeric(222)))
);
testQuery(query_005(), selectStatement);
}
@Test
public void test_JPQLQuery_06() throws Exception {
String jpqlQuery = "SELECT e FROM Employee e AS OF";
AsOfClauseTester asOfClause = asOf(nullExpression());
asOfClause.hasSpaceAfterIdentifier = false;
ExpressionTester selectStatement = selectStatement(
select(variable("e")),
from("Employee", "e", asOfClause)
);
testInvalidQuery(jpqlQuery, selectStatement);
}
@Test
public void test_JPQLQuery_07() throws Exception {
String jpqlQuery = "SELECT e FROM Employee e AS OF ";
AsOfClauseTester asOfClause = asOf(nullExpression());
asOfClause.hasSpaceAfterIdentifier = true;
ExpressionTester selectStatement = selectStatement(
select(variable("e")),
from("Employee", "e", asOfClause)
);
testInvalidQuery(jpqlQuery, selectStatement);
}
@Test
public void test_JPQLQuery_08() throws Exception {
String jpqlQuery = "SELECT e FROM Employee e AS OF SCN";
AsOfClauseTester asOfClause = asOfScn(nullExpression());
asOfClause.hasSpaceAfterIdentifier = true;
ExpressionTester selectStatement = selectStatement(
select(variable("e")),
from("Employee", "e", asOfClause)
);
testInvalidQuery(jpqlQuery, selectStatement);
}
@Test
public void test_JPQLQuery_09() throws Exception {
String jpqlQuery = "SELECT e FROM Employee e AS OF SCN ";
AsOfClauseTester asOfClause = asOfScn(nullExpression());
asOfClause.hasSpaceAfterIdentifier = true;
asOfClause.hasSpaceAfterCategory = true;
ExpressionTester selectStatement = selectStatement(
select(variable("e")),
from("Employee", "e", asOfClause)
);
testInvalidQuery(jpqlQuery, selectStatement);
}
@Test
public void test_JPQLQuery_10() throws Exception {
String jpqlQuery = "SELECT e FROM Employee e AS OF TIMESTAMP";
AsOfClauseTester asOfClause = asOfTimestamp(nullExpression());
asOfClause.hasSpaceAfterIdentifier = true;
ExpressionTester selectStatement = selectStatement(
select(variable("e")),
from("Employee", "e", asOfClause)
);
testInvalidQuery(jpqlQuery, selectStatement);
}
@Test
public void test_JPQLQuery_11() throws Exception {
String jpqlQuery = "SELECT e FROM Employee e AS OF TIMESTAMP ";
AsOfClauseTester asOfClause = asOfTimestamp(nullExpression());
asOfClause.hasSpaceAfterIdentifier = true;
asOfClause.hasSpaceAfterCategory = true;
ExpressionTester selectStatement = selectStatement(
select(variable("e")),
from("Employee", "e", asOfClause)
);
testInvalidQuery(jpqlQuery, selectStatement);
}
@Test
public void test_JPQLQuery_12() throws Exception {
String jpqlQuery = "SELECT e FROM Employee e AS OF WHERE e.name = 'JPQL'";
AsOfClauseTester asOfClause = asOf(nullExpression());
asOfClause.hasSpaceAfterIdentifier = true;
ExpressionTester selectStatement = selectStatement(
select(variable("e")),
from("Employee", "e", asOfClause),
where(path("e.name").equal(string("'JPQL'")))
);
testInvalidQuery(jpqlQuery, selectStatement);
}
@Test
public void test_JPQLQuery_13() throws Exception {
String jpqlQuery = "SELECT e FROM Employee e AS OF SCN WHERE e.name = 'JPQL'";
AsOfClauseTester asOfClause = asOfScn(nullExpression());
asOfClause.hasSpaceAfterIdentifier = true;
asOfClause.hasSpaceAfterCategory = true;
ExpressionTester selectStatement = selectStatement(
select(variable("e")),
from("Employee", "e", asOfClause),
where(path("e.name").equal(string("'JPQL'")))
);
testInvalidQuery(jpqlQuery, selectStatement);
}
@Test
public void test_JPQLQuery_14() throws Exception {
String jpqlQuery = "SELECT e FROM Employee e AS OF TIMESTAMP WHERE e.name = 'JPQL'";
AsOfClauseTester asOfClause = asOfTimestamp(nullExpression());
asOfClause.hasSpaceAfterIdentifier = true;
asOfClause.hasSpaceAfterCategory = true;
ExpressionTester selectStatement = selectStatement(
select(variable("e")),
from("Employee", "e", asOfClause),
where(path("e.name").equal(string("'JPQL'")))
);
testInvalidQuery(jpqlQuery, selectStatement);
}
}
| epl-1.0 |
RallySoftware/eclipselink.runtime | sdo/eclipselink.sdo.test/resource/org/eclipse/persistence/testing/sdo/helper/classgen/nestedBaseTypes/org/example/Sub2Sibling.java | 389 | package org.example;
public interface Sub2Sibling extends org.example.Sub1 {
public java.lang.String getSub2SiblingElem();
public void setSub2SiblingElem(java.lang.String value);
public java.lang.String getSub2SiblingElem2();
public void setSub2SiblingElem2(java.lang.String value);
public int getSub2SiblingAttr();
public void setSub2SiblingAttr(int value);
}
| epl-1.0 |
google/desugar_jdk_libs | jdk11/src/java.base/share/classes/java/lang/StackWalker.java | 25810 | /*
* Copyright (c) 2015, 2017, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.lang;
import jdk.internal.reflect.CallerSensitive;
import java.lang.invoke.MethodType;
import java.util.EnumSet;
import java.util.Objects;
import java.util.Set;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.stream.Stream;
/**
* A stack walker.
*
* <p> The {@link StackWalker#walk walk} method opens a sequential stream
* of {@link StackFrame StackFrame}s for the current thread and then applies
* the given function to walk the {@code StackFrame} stream.
* The stream reports stack frame elements in order, from the top most frame
* that represents the execution point at which the stack was generated to
* the bottom most frame.
* The {@code StackFrame} stream is closed when the {@code walk} method returns.
* If an attempt is made to reuse the closed stream,
* {@code IllegalStateException} will be thrown.
*
* <p> The {@linkplain Option <em>stack walking options</em>} of a
* {@code StackWalker} determines the information of
* {@link StackFrame StackFrame} objects to be returned.
* By default, stack frames of the reflection API and implementation
* classes are {@linkplain Option#SHOW_HIDDEN_FRAMES hidden}
* and {@code StackFrame}s have the class name and method name
* available but not the {@link StackFrame#getDeclaringClass() Class reference}.
*
* <p> {@code StackWalker} is thread-safe. Multiple threads can share
* a single {@code StackWalker} object to traverse its own stack.
* A permission check is performed when a {@code StackWalker} is created,
* according to the options it requests.
* No further permission check is done at stack walking time.
*
* @apiNote
* Examples
*
* <p>1. To find the first caller filtering a known list of implementation class:
* <pre>{@code
* StackWalker walker = StackWalker.getInstance(Option.RETAIN_CLASS_REFERENCE);
* Optional<Class<?>> callerClass = walker.walk(s ->
* s.map(StackFrame::getDeclaringClass)
* .filter(interestingClasses::contains)
* .findFirst());
* }</pre>
*
* <p>2. To snapshot the top 10 stack frames of the current thread,
* <pre>{@code
* List<StackFrame> stack = StackWalker.getInstance().walk(s ->
* s.limit(10).collect(Collectors.toList()));
* }</pre>
*
* Unless otherwise noted, passing a {@code null} argument to a
* constructor or method in this {@code StackWalker} class
* will cause a {@link NullPointerException NullPointerException}
* to be thrown.
*
* @since 9
*/
public final class StackWalker {
/**
* A {@code StackFrame} object represents a method invocation returned by
* {@link StackWalker}.
*
* <p> The {@link #getDeclaringClass()} method may be unsupported as determined
* by the {@linkplain Option stack walking options} of a {@linkplain
* StackWalker stack walker}.
*
* @since 9
* @jvms 2.6
*/
public interface StackFrame {
/**
* Gets the <a href="ClassLoader.html#name">binary name</a>
* of the declaring class of the method represented by this stack frame.
*
* @return the binary name of the declaring class of the method
* represented by this stack frame
*
* @jls 13.1 The Form of a Binary
*/
public String getClassName();
/**
* Gets the name of the method represented by this stack frame.
* @return the name of the method represented by this stack frame
*/
public String getMethodName();
/**
* Gets the declaring {@code Class} for the method represented by
* this stack frame.
*
* @return the declaring {@code Class} of the method represented by
* this stack frame
*
* @throws UnsupportedOperationException if this {@code StackWalker}
* is not configured with {@link Option#RETAIN_CLASS_REFERENCE
* Option.RETAIN_CLASS_REFERENCE}.
*/
public Class<?> getDeclaringClass();
/**
* Returns the {@link MethodType} representing the parameter types and
* the return type for the method represented by this stack frame.
*
* @implSpec
* The default implementation throws {@code UnsupportedOperationException}.
*
* @return the {@code MethodType} for this stack frame
*
* @throws UnsupportedOperationException if this {@code StackWalker}
* is not configured with {@link Option#RETAIN_CLASS_REFERENCE
* Option.RETAIN_CLASS_REFERENCE}.
*
* @since 10
*/
public default MethodType getMethodType() {
throw new UnsupportedOperationException();
}
/**
* Returns the <i>descriptor</i> of the method represented by
* this stack frame as defined by
* <cite>The Java Virtual Machine Specification</cite>.
*
* @implSpec
* The default implementation throws {@code UnsupportedOperationException}.
*
* @return the descriptor of the method represented by
* this stack frame
*
* @see MethodType#fromMethodDescriptorString(String, ClassLoader)
* @see MethodType#toMethodDescriptorString()
* @jvms 4.3.3 Method Descriptor
*
* @since 10
*/
public default String getDescriptor() {
throw new UnsupportedOperationException();
}
/**
* Returns the index to the code array of the {@code Code} attribute
* containing the execution point represented by this stack frame.
* The code array gives the actual bytes of Java Virtual Machine code
* that implement the method.
*
* @return the index to the code array of the {@code Code} attribute
* containing the execution point represented by this stack frame,
* or a negative number if the method is native.
*
* @jvms 4.7.3 The {@code Code} Attribute
*/
public int getByteCodeIndex();
/**
* Returns the name of the source file containing the execution point
* represented by this stack frame. Generally, this corresponds
* to the {@code SourceFile} attribute of the relevant {@code class}
* file as defined by <cite>The Java Virtual Machine Specification</cite>.
* In some systems, the name may refer to some source code unit
* other than a file, such as an entry in a source repository.
*
* @return the name of the file containing the execution point
* represented by this stack frame, or {@code null} if
* this information is unavailable.
*
* @jvms 4.7.10 The {@code SourceFile} Attribute
*/
public String getFileName();
/**
* Returns the line number of the source line containing the execution
* point represented by this stack frame. Generally, this is
* derived from the {@code LineNumberTable} attribute of the relevant
* {@code class} file as defined by <cite>The Java Virtual Machine
* Specification</cite>.
*
* @return the line number of the source line containing the execution
* point represented by this stack frame, or a negative number if
* this information is unavailable.
*
* @jvms 4.7.12 The {@code LineNumberTable} Attribute
*/
public int getLineNumber();
/**
* Returns {@code true} if the method containing the execution point
* represented by this stack frame is a native method.
*
* @return {@code true} if the method containing the execution point
* represented by this stack frame is a native method.
*/
public boolean isNativeMethod();
/**
* Gets a {@code StackTraceElement} for this stack frame.
*
* @return {@code StackTraceElement} for this stack frame.
*/
public StackTraceElement toStackTraceElement();
}
/**
* Stack walker option to configure the {@linkplain StackFrame stack frame}
* information obtained by a {@code StackWalker}.
*
* @since 9
*/
public enum Option {
/**
* Retains {@code Class} object in {@code StackFrame}s
* walked by this {@code StackWalker}.
*
* <p> A {@code StackWalker} configured with this option will support
* {@link StackWalker#getCallerClass()} and
* {@link StackFrame#getDeclaringClass() StackFrame.getDeclaringClass()}.
*/
RETAIN_CLASS_REFERENCE,
/**
* Shows all reflection frames.
*
* <p>By default, reflection frames are hidden. A {@code StackWalker}
* configured with this {@code SHOW_REFLECT_FRAMES} option
* will show all reflection frames that
* include {@link java.lang.reflect.Method#invoke} and
* {@link java.lang.reflect.Constructor#newInstance(Object...)}
* and their reflection implementation classes.
*
* <p>The {@link #SHOW_HIDDEN_FRAMES} option can also be used to show all
* reflection frames and it will also show other hidden frames that
* are implementation-specific.
*
* @apiNote
* This option includes the stack frames representing the invocation of
* {@code Method} and {@code Constructor}. Any utility methods that
* are equivalent to calling {@code Method.invoke} or
* {@code Constructor.newInstance} such as {@code Class.newInstance}
* are not filtered or controlled by any stack walking option.
*/
SHOW_REFLECT_FRAMES,
/**
* Shows all hidden frames.
*
* <p>A Java Virtual Machine implementation may hide implementation
* specific frames in addition to {@linkplain #SHOW_REFLECT_FRAMES
* reflection frames}. A {@code StackWalker} with this {@code SHOW_HIDDEN_FRAMES}
* option will show all hidden frames (including reflection frames).
*/
SHOW_HIDDEN_FRAMES;
}
enum ExtendedOption {
/**
* Obtain monitors, locals and operands.
*/
LOCALS_AND_OPERANDS
};
static final EnumSet<Option> DEFAULT_EMPTY_OPTION = EnumSet.noneOf(Option.class);
private final static StackWalker DEFAULT_WALKER =
new StackWalker(DEFAULT_EMPTY_OPTION);
private final Set<Option> options;
private final ExtendedOption extendedOption;
private final int estimateDepth;
final boolean retainClassRef; // cached for performance
/**
* Returns a {@code StackWalker} instance.
*
* <p> This {@code StackWalker} is configured to skip all
* {@linkplain Option#SHOW_HIDDEN_FRAMES hidden frames} and
* no {@linkplain Option#RETAIN_CLASS_REFERENCE class reference} is retained.
*
* @return a {@code StackWalker} configured to skip all
* {@linkplain Option#SHOW_HIDDEN_FRAMES hidden frames} and
* no {@linkplain Option#RETAIN_CLASS_REFERENCE class reference} is retained.
*
*/
public static StackWalker getInstance() {
// no permission check needed
return DEFAULT_WALKER;
}
/**
* Returns a {@code StackWalker} instance with the given option specifying
* the stack frame information it can access.
*
* <p>
* If a security manager is present and the given {@code option} is
* {@link Option#RETAIN_CLASS_REFERENCE Option.RETAIN_CLASS_REFERENCE},
* it calls its {@link SecurityManager#checkPermission checkPermission}
* method for {@code RuntimePermission("getStackWalkerWithClassReference")}.
*
* @param option {@link Option stack walking option}
*
* @return a {@code StackWalker} configured with the given option
*
* @throws SecurityException if a security manager exists and its
* {@code checkPermission} method denies access.
*/
public static StackWalker getInstance(Option option) {
return getInstance(EnumSet.of(Objects.requireNonNull(option)));
}
/**
* Returns a {@code StackWalker} instance with the given {@code options} specifying
* the stack frame information it can access. If the given {@code options}
* is empty, this {@code StackWalker} is configured to skip all
* {@linkplain Option#SHOW_HIDDEN_FRAMES hidden frames} and no
* {@linkplain Option#RETAIN_CLASS_REFERENCE class reference} is retained.
*
* <p>
* If a security manager is present and the given {@code options} contains
* {@link Option#RETAIN_CLASS_REFERENCE Option.RETAIN_CLASS_REFERENCE},
* it calls its {@link SecurityManager#checkPermission checkPermission}
* method for {@code RuntimePermission("getStackWalkerWithClassReference")}.
*
* @param options {@link Option stack walking option}
*
* @return a {@code StackWalker} configured with the given options
*
* @throws SecurityException if a security manager exists and its
* {@code checkPermission} method denies access.
*/
public static StackWalker getInstance(Set<Option> options) {
if (options.isEmpty()) {
return DEFAULT_WALKER;
}
EnumSet<Option> optionSet = toEnumSet(options);
checkPermission(optionSet);
return new StackWalker(optionSet);
}
/**
* Returns a {@code StackWalker} instance with the given {@code options} specifying
* the stack frame information it can access. If the given {@code options}
* is empty, this {@code StackWalker} is configured to skip all
* {@linkplain Option#SHOW_HIDDEN_FRAMES hidden frames} and no
* {@linkplain Option#RETAIN_CLASS_REFERENCE class reference} is retained.
*
* <p>
* If a security manager is present and the given {@code options} contains
* {@link Option#RETAIN_CLASS_REFERENCE Option.RETAIN_CLASS_REFERENCE},
* it calls its {@link SecurityManager#checkPermission checkPermission}
* method for {@code RuntimePermission("getStackWalkerWithClassReference")}.
*
* <p>
* The {@code estimateDepth} specifies the estimate number of stack frames
* this {@code StackWalker} will traverse that the {@code StackWalker} could
* use as a hint for the buffer size.
*
* @param options {@link Option stack walking options}
* @param estimateDepth Estimate number of stack frames to be traversed.
*
* @return a {@code StackWalker} configured with the given options
*
* @throws IllegalArgumentException if {@code estimateDepth <= 0}
* @throws SecurityException if a security manager exists and its
* {@code checkPermission} method denies access.
*/
public static StackWalker getInstance(Set<Option> options, int estimateDepth) {
if (estimateDepth <= 0) {
throw new IllegalArgumentException("estimateDepth must be > 0");
}
EnumSet<Option> optionSet = toEnumSet(options);
checkPermission(optionSet);
return new StackWalker(optionSet, estimateDepth);
}
// ----- private constructors ------
private StackWalker(EnumSet<Option> options) {
this(options, 0, null);
}
private StackWalker(EnumSet<Option> options, int estimateDepth) {
this(options, estimateDepth, null);
}
private StackWalker(EnumSet<Option> options, int estimateDepth, ExtendedOption extendedOption) {
this.options = options;
this.estimateDepth = estimateDepth;
this.extendedOption = extendedOption;
this.retainClassRef = hasOption(Option.RETAIN_CLASS_REFERENCE);
}
private static void checkPermission(Set<Option> options) {
Objects.requireNonNull(options);
SecurityManager sm = System.getSecurityManager();
if (sm != null) {
if (options.contains(Option.RETAIN_CLASS_REFERENCE)) {
sm.checkPermission(new RuntimePermission("getStackWalkerWithClassReference"));
}
}
}
/*
* Returns a defensive copy
*/
private static EnumSet<Option> toEnumSet(Set<Option> options) {
Objects.requireNonNull(options);
if (options.isEmpty()) {
return DEFAULT_EMPTY_OPTION;
} else {
return EnumSet.copyOf(options);
}
}
/**
* Applies the given function to the stream of {@code StackFrame}s
* for the current thread, traversing from the top frame of the stack,
* which is the method calling this {@code walk} method.
*
* <p>The {@code StackFrame} stream will be closed when
* this method returns. When a closed {@code Stream<StackFrame>} object
* is reused, {@code IllegalStateException} will be thrown.
*
* @apiNote
* For example, to find the first 10 calling frames, first skipping those frames
* whose declaring class is in package {@code com.foo}:
* <blockquote>
* <pre>{@code
* List<StackFrame> frames = StackWalker.getInstance().walk(s ->
* s.dropWhile(f -> f.getClassName().startsWith("com.foo."))
* .limit(10)
* .collect(Collectors.toList()));
* }</pre></blockquote>
*
* <p>This method takes a {@code Function} accepting a {@code Stream<StackFrame>},
* rather than returning a {@code Stream<StackFrame>} and allowing the
* caller to directly manipulate the stream. The Java virtual machine is
* free to reorganize a thread's control stack, for example, via
* deoptimization. By taking a {@code Function} parameter, this method
* allows access to stack frames through a stable view of a thread's control
* stack.
*
* <p>Parallel execution is effectively disabled and stream pipeline
* execution will only occur on the current thread.
*
* @implNote The implementation stabilizes the stack by anchoring a frame
* specific to the stack walking and ensures that the stack walking is
* performed above the anchored frame. When the stream object is closed or
* being reused, {@code IllegalStateException} will be thrown.
*
* @param function a function that takes a stream of
* {@linkplain StackFrame stack frames} and returns a result.
* @param <T> The type of the result of applying the function to the
* stream of {@linkplain StackFrame stack frame}.
*
* @return the result of applying the function to the stream of
* {@linkplain StackFrame stack frame}.
*/
@CallerSensitive
public <T> T walk(Function<? super Stream<StackFrame>, ? extends T> function) {
// Returning a Stream<StackFrame> would be unsafe, as the stream could
// be used to access the stack frames in an uncontrolled manner. For
// example, a caller might pass a Spliterator of stack frames after one
// or more frames had been traversed. There is no robust way to detect
// whether the execution point when
// Spliterator.tryAdvance(java.util.function.Consumer<? super T>) is
// invoked is the exact same execution point where the stack frame
// traversal is expected to resume.
Objects.requireNonNull(function);
return StackStreamFactory.makeStackTraverser(this, function)
.walk();
}
/**
* Performs the given action on each element of {@code StackFrame} stream
* of the current thread, traversing from the top frame of the stack,
* which is the method calling this {@code forEach} method.
*
* <p> This method is equivalent to calling
* <blockquote>
* {@code walk(s -> { s.forEach(action); return null; });}
* </blockquote>
*
* @param action an action to be performed on each {@code StackFrame}
* of the stack of the current thread
*/
@CallerSensitive
public void forEach(Consumer<? super StackFrame> action) {
Objects.requireNonNull(action);
StackStreamFactory.makeStackTraverser(this, s -> {
s.forEach(action);
return null;
}).walk();
}
/**
* Gets the {@code Class} object of the caller who invoked the method
* that invoked {@code getCallerClass}.
*
* <p> This method filters {@linkplain Option#SHOW_REFLECT_FRAMES reflection
* frames}, {@link java.lang.invoke.MethodHandle}, and
* {@linkplain Option#SHOW_HIDDEN_FRAMES hidden frames} regardless of the
* {@link Option#SHOW_REFLECT_FRAMES SHOW_REFLECT_FRAMES}
* and {@link Option#SHOW_HIDDEN_FRAMES SHOW_HIDDEN_FRAMES} options
* this {@code StackWalker} has been configured with.
*
* <p> This method should be called when a caller frame is present. If
* it is called from the bottom most frame on the stack,
* {@code IllegalCallerException} will be thrown.
*
* <p> This method throws {@code UnsupportedOperationException}
* if this {@code StackWalker} is not configured with the
* {@link Option#RETAIN_CLASS_REFERENCE RETAIN_CLASS_REFERENCE} option.
*
* @apiNote
* For example, {@code Util::getResourceBundle} loads a resource bundle
* on behalf of the caller. It invokes {@code getCallerClass} to identify
* the class whose method called {@code Util::getResourceBundle}.
* Then, it obtains the class loader of that class, and uses
* the class loader to load the resource bundle. The caller class
* in this example is {@code MyTool}.
*
* <pre>{@code
* class Util {
* private final StackWalker walker = StackWalker.getInstance(Option.RETAIN_CLASS_REFERENCE);
* public ResourceBundle getResourceBundle(String bundleName) {
* Class<?> caller = walker.getCallerClass();
* return ResourceBundle.getBundle(bundleName, Locale.getDefault(), caller.getClassLoader());
* }
* }
*
* class MyTool {
* private final Util util = new Util();
* private void init() {
* ResourceBundle rb = util.getResourceBundle("mybundle");
* }
* }
* }</pre>
*
* An equivalent way to find the caller class using the
* {@link StackWalker#walk walk} method is as follows
* (filtering the reflection frames, {@code MethodHandle} and hidden frames
* not shown below):
* <pre>{@code
* Optional<Class<?>> caller = walker.walk(s ->
* s.map(StackFrame::getDeclaringClass)
* .skip(2)
* .findFirst());
* }</pre>
*
* When the {@code getCallerClass} method is called from a method that
* is the bottom most frame on the stack,
* for example, {@code static public void main} method launched by the
* {@code java} launcher, or a method invoked from a JNI attached thread,
* {@code IllegalCallerException} is thrown.
*
* @return {@code Class} object of the caller's caller invoking this method.
*
* @throws UnsupportedOperationException if this {@code StackWalker}
* is not configured with {@link Option#RETAIN_CLASS_REFERENCE
* Option.RETAIN_CLASS_REFERENCE}.
* @throws IllegalCallerException if there is no caller frame, i.e.
* when this {@code getCallerClass} method is called from a method
* which is the last frame on the stack.
*/
@CallerSensitive
public Class<?> getCallerClass() {
if (!retainClassRef) {
throw new UnsupportedOperationException("This stack walker " +
"does not have RETAIN_CLASS_REFERENCE access");
}
return StackStreamFactory.makeCallerFinder(this).findCaller();
}
// ---- package access ----
static StackWalker newInstance(Set<Option> options, ExtendedOption extendedOption) {
EnumSet<Option> optionSet = toEnumSet(options);
checkPermission(optionSet);
return new StackWalker(optionSet, 0, extendedOption);
}
int estimateDepth() {
return estimateDepth;
}
boolean hasOption(Option option) {
return options.contains(option);
}
boolean hasLocalsOperandsOption() {
return extendedOption == ExtendedOption.LOCALS_AND_OPERANDS;
}
}
| gpl-2.0 |
markuskeunecke/stendhal | src/games/stendhal/server/entity/npc/action/ListProducedItemDetailAction.java | 2177 | /* $Id$ */
/***************************************************************************
* (C) Copyright 2003-2010 - Stendhal *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package games.stendhal.server.entity.npc.action;
import games.stendhal.common.parser.Sentence;
import games.stendhal.server.core.config.annotations.Dev;
import games.stendhal.server.core.config.annotations.Dev.Category;
import games.stendhal.server.core.engine.SingletonRepository;
import games.stendhal.server.entity.npc.ChatAction;
import games.stendhal.server.entity.npc.EventRaiser;
import games.stendhal.server.entity.npc.behaviour.journal.ProducerRegister;
import games.stendhal.server.entity.player.Player;
/**
* List details about a produced item
*
* @author kymara
*/
@Dev(category=Category.ITEMS_PRODUCER, label="List")
public class ListProducedItemDetailAction implements ChatAction {
private final ProducerRegister producerRegister = SingletonRepository.getProducerRegister();
@Override
public void fire(final Player player, final Sentence sentence, final EventRaiser raiser) {
String itemName = sentence.getTriggerExpression().toString();
String message = producerRegister.getProducedItemDetails(itemName);
raiser.say(message);
}
@Override
public String toString() {
return "ListProducedItemDetailAction";
}
@Override
public int hashCode() {
return 5297;
}
@Override
public boolean equals(final Object obj) {
return (obj instanceof ListProducedItemDetailAction);
}
}
| gpl-2.0 |
tomas-pluskal/mzmine2 | src/main/java/net/sf/mzmine/modules/peaklistmethods/identification/onlinedbsearch/databases/PubChemGateway.java | 6242 | /*
* Copyright 2006-2015 The MZmine 2 Development Team
*
* This file is part of MZmine 2.
*
* MZmine 2 is free software; you can redistribute it and/or modify it under the
* terms of the GNU General Public License as published by the Free Software
* Foundation; either version 2 of the License, or (at your option) any later
* version.
*
* MZmine 2 is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
* A PARTICULAR PURPOSE. See the GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along with
* MZmine 2; if not, write to the Free Software Foundation, Inc., 51 Franklin St,
* Fifth Floor, Boston, MA 02110-1301 USA
*/
package net.sf.mzmine.modules.peaklistmethods.identification.onlinedbsearch.databases;
import java.io.IOException;
import java.net.URL;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathFactory;
import net.sf.mzmine.modules.peaklistmethods.identification.onlinedbsearch.DBCompound;
import net.sf.mzmine.modules.peaklistmethods.identification.onlinedbsearch.DBGateway;
import net.sf.mzmine.modules.peaklistmethods.identification.onlinedbsearch.OnlineDatabase;
import net.sf.mzmine.parameters.ParameterSet;
import net.sf.mzmine.parameters.parametertypes.tolerances.MZTolerance;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import com.google.common.collect.Range;
public class PubChemGateway implements DBGateway {
public static final String pubchemEntryAddress = "http://pubchem.ncbi.nlm.nih.gov/summary/summary.cgi?cid=";
public static final String pubchem2DStructureAddress = "http://pubchem.ncbi.nlm.nih.gov/summary/summary.cgi?disopt=SaveSDF&cid=";
public static final String pubchem3DStructureAddress = "http://pubchem.ncbi.nlm.nih.gov/summary/summary.cgi?disopt=3DSaveSDF&cid=";
/**
* Searches for CIDs of PubChem compounds based on their exact
* (monoisotopic) mass. Returns maximum numOfResults results sorted by the
* CID. If chargedOnly parameter is set, returns only molecules with
* non-zero charge.
*/
public String[] findCompounds(double mass, MZTolerance mzTolerance,
int numOfResults, ParameterSet parameters) throws IOException {
Range<Double> toleranceRange = mzTolerance.getToleranceRange(mass);
StringBuilder pubchemUrl = new StringBuilder();
pubchemUrl
.append("http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?usehistory=n&db=pccompound&sort=cida&retmax=");
pubchemUrl.append(numOfResults);
pubchemUrl.append("&term=");
pubchemUrl.append(toleranceRange.lowerEndpoint());
pubchemUrl.append(":");
pubchemUrl.append(toleranceRange.upperEndpoint());
pubchemUrl.append("[MonoisotopicMass]");
NodeList cidElements;
try {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = dbf.newDocumentBuilder();
Document parsedResult = builder.parse(pubchemUrl.toString());
XPathFactory factory = XPathFactory.newInstance();
XPath xpath = factory.newXPath();
XPathExpression expr = xpath.compile("//eSearchResult/IdList/Id");
cidElements = (NodeList) expr.evaluate(parsedResult,
XPathConstants.NODESET);
} catch (Exception e) {
throw (new IOException(e));
}
String cidArray[] = new String[cidElements.getLength()];
for (int i = 0; i < cidElements.getLength(); i++) {
Element cidElement = (Element) cidElements.item(i);
cidArray[i] = cidElement.getTextContent();
}
return cidArray;
}
/**
* This method retrieves the details about a PubChem compound
*
*/
public DBCompound getCompound(String CID, ParameterSet parameters)
throws IOException {
String url = "http://eutils.ncbi.nlm.nih.gov/entrez/eutils/esummary.fcgi?db=pccompound&id="
+ CID;
Element nameElement, formulaElement;
try {
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = dbf.newDocumentBuilder();
Document parsedResult = builder.parse(url);
XPathFactory factory = XPathFactory.newInstance();
XPath xpath = factory.newXPath();
XPathExpression expr = xpath
.compile("//eSummaryResult/DocSum/Item[@Name='MeSHHeadingList']/Item");
NodeList nameElementNL = (NodeList) expr.evaluate(parsedResult,
XPathConstants.NODESET);
nameElement = (Element) nameElementNL.item(0);
if (nameElement == null) {
expr = xpath
.compile("//eSummaryResult/DocSum/Item[@Name='SynonymList']/Item");
nameElementNL = (NodeList) expr.evaluate(parsedResult,
XPathConstants.NODESET);
nameElement = (Element) nameElementNL.item(0);
}
if (nameElement == null) {
expr = xpath
.compile("//eSummaryResult/DocSum/Item[@Name='IUPACName']");
nameElementNL = (NodeList) expr.evaluate(parsedResult,
XPathConstants.NODESET);
nameElement = (Element) nameElementNL.item(0);
}
if (nameElement == null)
throw new IOException("Could not parse compound name");
expr = xpath
.compile("//eSummaryResult/DocSum/Item[@Name='MolecularFormula']");
NodeList formulaElementNL = (NodeList) expr.evaluate(parsedResult,
XPathConstants.NODESET);
formulaElement = (Element) formulaElementNL.item(0);
} catch (Exception e) {
throw new IOException(e);
}
String compoundName = nameElement.getTextContent();
String compoundFormula = formulaElement.getTextContent();
URL entryURL = new URL(pubchemEntryAddress + CID);
URL structure2DURL = new URL(pubchem2DStructureAddress + CID);
URL structure3DURL = new URL(pubchem3DStructureAddress + CID);
DBCompound newCompound = new DBCompound(OnlineDatabase.PubChem, CID,
compoundName, compoundFormula, entryURL, structure2DURL,
structure3DURL);
return newCompound;
}
}
| gpl-2.0 |
md-5/jdk10 | test/hotspot/jtreg/vmTestbase/jit/t/t065/t065.java | 2392 | /*
* Copyright (c) 2008, 2018, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
*
* @summary converted from VM Testbase jit/t/t065.
* VM Testbase keywords: [jit, quick]
*
* @library /vmTestbase
* /test/lib
* @run driver jdk.test.lib.FileInstaller . .
* @build jit.t.t065.t065
* @run driver ExecDriver --java jit.t.t065.t065
*/
package jit.t.t065;
import nsk.share.TestFailure;
import nsk.share.GoldChecker;
// Main() does getfields of k.b and putfields of l.b. K.set() does putfields
// of k.b; l.show() does getfields of l.b. The idea is, you jit only
// main. If the jit and the VM agree about the container size of a static
// field of type byte, you get the right answers. If not, the test fails.
class k
{
byte b;
int i = -129;
void set()
{
b = (byte) i;
++i;
}
}
class l
{
byte b;
int i = -129;
void show()
{
t065.goldChecker.println("lo.b == " + b);
}
}
class t065
{
public static final GoldChecker goldChecker = new GoldChecker( "t065" );
public static void main(String argv[])
{
k ko = new k();
l lo = new l();
int i;
for(i=0; i<258; i+=1)
{
ko.set();
t065.goldChecker.println("ko.b == " + ko.b);
lo.b = (byte) lo.i;
++lo.i;
lo.show();
}
t065.goldChecker.check();
}
}
| gpl-2.0 |
archienz/universal-media-server | src/main/java/net/pms/dlna/Feed.java | 7867 | /*
* PS3 Media Server, for streaming any medias to your PS3.
* Copyright (C) 2008 A.Brochard
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; version 2
* of the License only.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package net.pms.dlna;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.jdom.Content;
import org.jdom.Element;
import com.sun.syndication.feed.synd.SyndCategory;
import com.sun.syndication.feed.synd.SyndEnclosure;
import com.sun.syndication.feed.synd.SyndEntry;
import com.sun.syndication.feed.synd.SyndFeed;
import com.sun.syndication.io.SyndFeedInput;
import com.sun.syndication.io.XmlReader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* TODO: Change all instance variables to private. For backwards compatibility
* with external plugin code the variables have all been marked as deprecated
* instead of changed to private, but this will surely change in the future.
* When everything has been changed to private, the deprecated note can be
* removed.
*/
public class Feed extends DLNAResource {
private static final Logger LOGGER = LoggerFactory.getLogger(Feed.class);
/**
* @deprecated Use standard getter and setter to access this variable.
*/
@Deprecated
protected String name;
/**
* @deprecated Use standard getter and setter to access this variable.
*/
@Deprecated
protected String url;
/**
* @deprecated Use standard getter and setter to access this variable.
*/
@Deprecated
protected String tempItemTitle;
/**
* @deprecated Use standard getter and setter to access this variable.
*/
@Deprecated
protected String tempItemLink;
/**
* @deprecated Use standard getter and setter to access this variable.
*/
@Deprecated
protected String tempFeedLink;
/**
* @deprecated Use standard getter and setter to access this variable.
*/
@Deprecated
protected String tempCategory;
/**
* @deprecated Use standard getter and setter to access this variable.
*/
@Deprecated
protected String tempItemThumbURL;
@Override
public void resolve() {
super.resolve();
try {
parse();
} catch (Exception e) {
LOGGER.error("Error in parsing stream: " + url, e);
}
}
public Feed(String name, String url, int type) {
super(type);
setUrl(url);
setName(name);
}
@SuppressWarnings("unchecked")
public void parse() throws Exception {
SyndFeedInput input = new SyndFeedInput();
byte b[] = downloadAndSendBinary(url);
if (b != null) {
SyndFeed feed = input.build(new XmlReader(new ByteArrayInputStream(b)));
setName(feed.getTitle());
if (feed.getCategories() != null && feed.getCategories().size() > 0) {
SyndCategory category = (SyndCategory) feed.getCategories().get(0);
setTempCategory(category.getName());
}
List<SyndEntry> entries = feed.getEntries();
for (SyndEntry entry : entries) {
setTempItemTitle(entry.getTitle());
setTempItemLink(entry.getLink());
setTempFeedLink(entry.getUri());
setTempItemThumbURL(null);
ArrayList<Element> elements = (ArrayList<Element>) entry.getForeignMarkup();
for (Element elt : elements) {
if ("group".equals(elt.getName()) && "media".equals(elt.getNamespacePrefix())) {
List<Content> subElts = elt.getContent();
for (Content subelt : subElts) {
if (subelt instanceof Element) {
parseElement((Element) subelt, false);
}
}
}
parseElement(elt, true);
}
List<SyndEnclosure> enclosures = entry.getEnclosures();
for (SyndEnclosure enc : enclosures) {
if (StringUtils.isNotBlank(enc.getUrl())) {
setTempItemLink(enc.getUrl());
}
}
manageItem();
}
}
setLastmodified(System.currentTimeMillis());
}
@SuppressWarnings("unchecked")
private void parseElement(Element elt, boolean parseLink) {
if ("content".equals(elt.getName()) && "media".equals(elt.getNamespacePrefix())) {
if (parseLink) {
setTempItemLink(elt.getAttribute("url").getValue());
}
List<Content> subElts = elt.getContent();
for (Content subelt : subElts) {
if (subelt instanceof Element) {
parseElement((Element) subelt, false);
}
}
}
if ("thumbnail".equals(elt.getName()) && "media".equals(elt.getNamespacePrefix())
&& getTempItemThumbURL() == null) {
setTempItemThumbURL(elt.getAttribute("url").getValue());
}
if ("image".equals(elt.getName()) && "exInfo".equals(elt.getNamespacePrefix())
&& getTempItemThumbURL() == null) {
setTempItemThumbURL(elt.getValue());
}
}
public InputStream getInputStream() throws IOException {
return null;
}
public String getName() {
return name;
}
public boolean isFolder() {
return true;
}
public long length() {
return 0;
}
public long lastModified() {
return 0;
}
@Override
public String getSystemName() {
return url;
}
@Override
public boolean isValid() {
return true;
}
protected void manageItem() {
FeedItem fi = new FeedItem(getTempItemTitle(), getTempItemLink(), getTempItemThumbURL(), null, getSpecificType());
addChild(fi);
}
@Override
public boolean isRefreshNeeded() {
return (System.currentTimeMillis() - getLastmodified() > 3600000);
}
@Override
public void doRefreshChildren() {
try {
getChildren().clear();
parse();
} catch (Exception e) {
LOGGER.error("Error in parsing stream: " + url, e);
}
}
/**
* @return the url
* @since 1.50
*/
protected String getUrl() {
return url;
}
/**
* @param url the url to set
* @since 1.50
*/
protected void setUrl(String url) {
this.url = url;
}
/**
* @return the tempItemTitle
* @since 1.50
*/
protected String getTempItemTitle() {
return tempItemTitle;
}
/**
* @param tempItemTitle the tempItemTitle to set
* @since 1.50
*/
protected void setTempItemTitle(String tempItemTitle) {
this.tempItemTitle = tempItemTitle;
}
/**
* @return the tempItemLink
* @since 1.50
*/
protected String getTempItemLink() {
return tempItemLink;
}
/**
* @param tempItemLink the tempItemLink to set
* @since 1.50
*/
protected void setTempItemLink(String tempItemLink) {
this.tempItemLink = tempItemLink;
}
/**
* @return the tempFeedLink
* @since 1.50
*/
protected String getTempFeedLink() {
return tempFeedLink;
}
/**
* @param tempFeedLink the tempFeedLink to set
* @since 1.50
*/
protected void setTempFeedLink(String tempFeedLink) {
this.tempFeedLink = tempFeedLink;
}
/**
* @return the tempCategory
* @since 1.50
*/
protected String getTempCategory() {
return tempCategory;
}
/**
* @param tempCategory the tempCategory to set
* @since 1.50
*/
protected void setTempCategory(String tempCategory) {
this.tempCategory = tempCategory;
}
/**
* @return the tempItemThumbURL
* @since 1.50
*/
protected String getTempItemThumbURL() {
return tempItemThumbURL;
}
/**
* @param tempItemThumbURL the tempItemThumbURL to set
* @since 1.50
*/
protected void setTempItemThumbURL(String tempItemThumbURL) {
this.tempItemThumbURL = tempItemThumbURL;
}
/**
* @param name the name to set
* @since 1.50
*/
protected void setName(String name) {
this.name = name;
}
}
| gpl-2.0 |
bseznec/TP4INFO | Reseau/TP5_Client/src/hello/GetArea.java | 1093 |
package hello;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for getArea complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="getArea">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="arg0" type="{http://www.w3.org/2001/XMLSchema}double"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "getArea", propOrder = {
"arg0"
})
public class GetArea {
protected double arg0;
/**
* Gets the value of the arg0 property.
*
*/
public double getArg0() {
return arg0;
}
/**
* Sets the value of the arg0 property.
*
*/
public void setArg0(double value) {
this.arg0 = value;
}
}
| gpl-3.0 |
INSA-Rennes/TP4INFO | IDM/IDM/src/robotG/robot/RobotFactory.java | 2535 | /**
*/
package robotG.robot;
import org.eclipse.emf.ecore.EFactory;
/**
* <!-- begin-user-doc -->
* The <b>Factory</b> for the model.
* It provides a create method for each non-abstract class of the model.
* <!-- end-user-doc -->
* @see robotG.robot.RobotPackage
* @generated
*/
public interface RobotFactory extends EFactory {
/**
* The singleton instance of the factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
RobotFactory eINSTANCE = robotG.robot.impl.RobotFactoryImpl.init();
/**
* Returns a new object of class '<em>Move</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>Move</em>'.
* @generated
*/
Move createMove();
/**
* Returns a new object of class '<em>Bip</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>Bip</em>'.
* @generated
*/
Bip createBip();
/**
* Returns a new object of class '<em>Turn</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>Turn</em>'.
* @generated
*/
Turn createTurn();
/**
* Returns a new object of class '<em>Set Turn Angle</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>Set Turn Angle</em>'.
* @generated
*/
SetTurnAngle createSetTurnAngle();
/**
* Returns a new object of class '<em>Has Turned</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>Has Turned</em>'.
* @generated
*/
HasTurned createHasTurned();
/**
* Returns a new object of class '<em>Display</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>Display</em>'.
* @generated
*/
Display createDisplay();
/**
* Returns a new object of class '<em>Obstacle</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>Obstacle</em>'.
* @generated
*/
Obstacle createObstacle();
/**
* Returns a new object of class '<em>Stop Engine</em>'.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return a new object of class '<em>Stop Engine</em>'.
* @generated
*/
StopEngine createStopEngine();
/**
* Returns the package supported by this factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @return the package supported by this factory.
* @generated
*/
RobotPackage getRobotPackage();
} //RobotFactory
| gpl-3.0 |
hltfbk/Excitement-TDMLEDA | biutee/src/main/java/eu/excitementproject/eop/biutee/rteflow/macro/gap/baseline/GapBaselineV1Tools.java | 7208 | package eu.excitementproject.eop.biutee.rteflow.macro.gap.baseline;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import eu.excitementproject.eop.biutee.classifiers.ClassifierException;
import eu.excitementproject.eop.biutee.classifiers.LinearClassifier;
import eu.excitementproject.eop.biutee.rteflow.macro.Feature;
import eu.excitementproject.eop.biutee.rteflow.macro.gap.GapDescription;
import eu.excitementproject.eop.biutee.rteflow.macro.gap.GapDescriptionGenerator;
import eu.excitementproject.eop.biutee.rteflow.macro.gap.GapEnvironment;
import eu.excitementproject.eop.biutee.rteflow.macro.gap.GapException;
import eu.excitementproject.eop.biutee.rteflow.macro.gap.GapFeaturesUpdate;
import eu.excitementproject.eop.biutee.rteflow.macro.gap.GapHeuristicMeasure;
import eu.excitementproject.eop.biutee.utilities.BiuteeConstants;
import eu.excitementproject.eop.common.codeannotations.NotThreadSafe;
import eu.excitementproject.eop.common.datastructures.immutable.ImmutableSet;
import eu.excitementproject.eop.common.representation.parse.representation.basic.InfoGetFields;
import eu.excitementproject.eop.common.representation.parse.tree.TreeAndParentMap;
import eu.excitementproject.eop.transformations.alignment.AlignmentCriteria;
import eu.excitementproject.eop.transformations.representation.ExtendedInfo;
import eu.excitementproject.eop.transformations.representation.ExtendedNode;
import eu.excitementproject.eop.transformations.utilities.UnigramProbabilityEstimation;
/**
*
* @author Asher Stern
* @since Sep 1, 2013
*
* @param <I>
* @param <ExtendedNode>
*/
@NotThreadSafe
public class GapBaselineV1Tools implements GapFeaturesUpdate<ExtendedInfo, ExtendedNode>, GapHeuristicMeasure<ExtendedInfo, ExtendedNode>, GapDescriptionGenerator<ExtendedInfo, ExtendedNode>
{
public GapBaselineV1Tools(TreeAndParentMap<ExtendedInfo, ExtendedNode> hypothesisTree,
LinearClassifier classifierForSearch,
UnigramProbabilityEstimation mleEstimation,
ImmutableSet<String> stopWords,
AlignmentCriteria<ExtendedInfo, ExtendedNode> alignmentCriteria)
{
super();
this.hypothesisTree = hypothesisTree;
this.classifierForSearch = classifierForSearch;
this.mleEstimation = mleEstimation;
this.stopWords = stopWords;
this.alignmentCriteria = alignmentCriteria;
}
@Override
public GapDescription describeGap(TreeAndParentMap<ExtendedInfo, ExtendedNode> tree,
GapEnvironment<ExtendedInfo, ExtendedNode> environment) throws GapException
{
GapBaselineV1Calculator calculator = getCalculator(tree,environment);
String description =
strListNodes("missing named entities: ",calculator.getUncoveredNodesNamedEntities(),false)+
strListNodes("missing nodes: ",calculator.getUncoveredNodesNotNamedEntities(),false)+
strListNodes("missing non-content words: ",calculator.getUncoveredNodesNonContentWords(),false)+
strListNodes("missing edges: ",calculator.getUncoveredEdges(),true);
return new GapDescription(description);
}
@Override
public double measure(TreeAndParentMap<ExtendedInfo, ExtendedNode> tree,
Map<Integer, Double> featureVector, GapEnvironment<ExtendedInfo, ExtendedNode> environment)
throws GapException
{
try
{
double costWithoutGap = -classifierForSearch.getProduct(featureVector);
Map<Integer, Double> featureVectorWithGap = updateForGap(tree,featureVector,environment);
double costWithGap = -classifierForSearch.getProduct(featureVectorWithGap);
double ret = costWithGap-costWithoutGap;
if (ret<0) throw new GapException("gap measure is negative: "+String.format("%-4.4f", ret));
//logger.info("gap measure = "+String.format("%-6.6f", ret));
return ret;
}
catch(ClassifierException e){throw new GapException("Failed to calculate gap measure, due to a problem in the classifier.",e);}
}
@Override
public Map<Integer, Double> updateForGap(TreeAndParentMap<ExtendedInfo, ExtendedNode> tree,
Map<Integer, Double> featureVector, GapEnvironment<ExtendedInfo, ExtendedNode> environment)
throws GapException
{
GapBaselineV1Calculator calculator = getCalculator(tree,environment);
Map<Integer, Double> newFeatureVector = new LinkedHashMap<>();
newFeatureVector.putAll(featureVector);
newFeatureVector.put(Feature.GAP_BASELINE_V1_MISSING_NODE.getFeatureIndex(),
featureValueMissingNodes(calculator.getUncoveredNodesNotNamedEntities()));
newFeatureVector.put(Feature.GAP_BASELINE_V1_MISSING_NODE_NON_CONTENT_WORD.getFeatureIndex(),
featureValueMissingNodes(calculator.getUncoveredNodesNonContentWords()));
newFeatureVector.put(Feature.GAP_BASELINE_V1_MISSING_NODE_NAMED_ENTITY.getFeatureIndex(),
featureValueMissingNodes(calculator.getUncoveredNodesNamedEntities()));
newFeatureVector.put(Feature.GAP_BASELINE_V1_MISSING_EDGE.getFeatureIndex(),
(double)(-calculator.getUncoveredEdges().size()) );
return newFeatureVector;
}
private double featureValueMissingNodes(List<ExtendedNode> nodes) throws GapException
{
double ret = 0.0;
if (BiuteeConstants.USE_MLE_FOR_GAP)
{
for (ExtendedNode node : nodes)
{
String lemma = InfoGetFields.getLemma(node.getInfo());
ret += Math.log(mleEstimation.getEstimationFor(lemma));
}
}
else
{
ret = (double)(-nodes.size());
}
if (ret>0.0) {throw new GapException("Bug or corrupted Unigram-MLE: invalid feature value. Feature value is higher than zero.");}
return ret;
}
private synchronized GapBaselineV1Calculator getCalculator(TreeAndParentMap<ExtendedInfo, ExtendedNode> givenTree, GapEnvironment<ExtendedInfo, ExtendedNode> environment)
{
ExtendedNode tree = givenTree.getTree();
if ( (lastTree==tree) && (lastCalculator!=null) )
{
return lastCalculator;
}
else
{
lastTree = null;
lastCalculator = null;
lastCalculator = new GapBaselineV1Calculator(givenTree, hypothesisTree, environment, alignmentCriteria);
lastCalculator.calculate();
lastTree = tree;
return lastCalculator;
}
}
private String strListNodes(String prefix, List<ExtendedNode> nodes, boolean edge)
{
String strOfNodes = strListNodes(nodes,edge);
if (strOfNodes.length()>0)
{
return prefix+strOfNodes+"\n";
}
else return "";
}
private String strListNodes(List<ExtendedNode> nodes, boolean edge)
{
StringBuilder sb = new StringBuilder();
boolean firstIteration = true;
for (ExtendedNode node : nodes)
{
if (firstIteration){firstIteration=false;}
else {sb.append(", ");}
sb.append(InfoGetFields.getLemma(node.getInfo()));
if (edge)
{
ExtendedNode parent = hypothesisTree.getParentMap().get(node);
if (parent!=null)
{
sb.append("<").append(InfoGetFields.getLemma(parent.getInfo()));
}
}
}
return sb.toString();
}
private final TreeAndParentMap<ExtendedInfo, ExtendedNode> hypothesisTree;
private final LinearClassifier classifierForSearch;
private final UnigramProbabilityEstimation mleEstimation;
@SuppressWarnings("unused")
private final ImmutableSet<String> stopWords;
private final AlignmentCriteria<ExtendedInfo, ExtendedNode> alignmentCriteria;
private ExtendedNode lastTree = null;
private GapBaselineV1Calculator lastCalculator = null;
}
| gpl-3.0 |
jtux270/translate | ovirt/3.6_source/backend/manager/modules/utils/src/test/java/org/ovirt/engine/core/utils/AbstractPropertiesTestBase.java | 1680 | package org.ovirt.engine.core.utils;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeNotNull;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import org.junit.Before;
import org.junit.Test;
import org.ovirt.engine.core.common.errors.EngineError;
import org.ovirt.engine.core.common.errors.EngineMessage;
public class AbstractPropertiesTestBase {
private String relativePath;
private File file;
public AbstractPropertiesTestBase(String relativePath) {
this.relativePath = relativePath;
}
@Before
public void loadFileFromPath() {
String baseDir = System.getProperty("basedir");
assumeNotNull(baseDir);
file = new File(baseDir, relativePath);
}
@Test
public void testDuplicateKeys() throws IOException {
NoDuplicateProperties props = new NoDuplicateProperties();
try (InputStream is = new FileInputStream(file)) {
props.load(is);
}
catch (DuplicatePropertyException exception) {
fail("Check for duplicate keys in " + file.getAbsolutePath() + " failed: " + exception.getMessage());
}
}
@Test
public void testRedundantMessages() throws IOException {
EnumTranslationProperties props = new EnumTranslationProperties(EngineMessage.class, EngineError.class);
try (InputStream is = new FileInputStream(file)) {
props.load(is);
} catch (MissingEnumTranslationException exception) {
fail("Check for redundant keys in " + file.getAbsolutePath() + " failed: " + exception.getMessage());
}
}
}
| gpl-3.0 |
eethomas/eucalyptus | clc/modules/msgs/src/main/java/edu/ucsb/eucalyptus/msgs/BaseMessages.java | 2224 | /*************************************************************************
* Copyright 2009-2014 Eucalyptus Systems, Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see http://www.gnu.org/licenses/.
*
* Please contact Eucalyptus Systems, Inc., 6755 Hollister Ave., Goleta
* CA 93117, USA or visit http://www.eucalyptus.com/licenses/ if you need
* additional information or have any questions.
************************************************************************/
package edu.ucsb.eucalyptus.msgs;
import java.io.IOException;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import org.codehaus.jackson.map.ObjectMapper;
import org.codehaus.jackson.map.SerializationConfig;
/**
*
*/
public class BaseMessages {
private static final ObjectMapper mapper = new ObjectMapper( );
static {
mapper.getSerializationConfig().addMixInAnnotations( BaseMessage.class, BaseMessageMixIn.class);
mapper.getDeserializationConfig().addMixInAnnotations( BaseMessage.class, BaseMessageMixIn.class);
mapper.getSerializationConfig().set( SerializationConfig.Feature.FAIL_ON_EMPTY_BEANS, false );
}
@SuppressWarnings( "unchecked" )
public static <T extends BaseMessage> T deepCopy( final T message ) throws IOException {
return (T) deepCopy( message, message.getClass( ) );
}
public static <T extends BaseMessage, R extends BaseMessage> R deepCopy(
final T message,
final Class<R> resultType
) throws IOException {
return (R) mapper.readValue( mapper.valueToTree( message ), resultType );
}
@JsonIgnoreProperties( { "correlationId", "effectiveUserId", "reply", "statusMessage", "userId" } )
private static final class BaseMessageMixIn { }
}
| gpl-3.0 |
eethomas/eucalyptus | clc/modules/cluster-manager/src/main/java/com/eucalyptus/compute/vpc/VpcInvalidator.java | 1127 | /*************************************************************************
* Copyright 2009-2014 Eucalyptus Systems, Inc.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see http://www.gnu.org/licenses/.
*
* Please contact Eucalyptus Systems, Inc., 6755 Hollister Ave., Goleta
* CA 93117, USA or visit http://www.eucalyptus.com/licenses/ if you need
* additional information or have any questions.
************************************************************************/
package com.eucalyptus.compute.vpc;
/**
*
*/
public interface VpcInvalidator {
void invalidate( String resourceIdentifier );
}
| gpl-3.0 |
open-health-hub/openMAXIMS | openmaxims_workspace/ValueObjects/src/ims/coe/vo/MicturitionVoCollection.java | 7841 | //#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751)
// Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.coe.vo;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import ims.framework.enumerations.SortOrder;
/**
* Linked to nursing.assessment.Bladder Micturition business object (ID: 1015100002).
*/
public class MicturitionVoCollection extends ims.vo.ValueObjectCollection implements ims.vo.ImsCloneable, Iterable<MicturitionVo>
{
private static final long serialVersionUID = 1L;
private ArrayList<MicturitionVo> col = new ArrayList<MicturitionVo>();
public String getBoClassName()
{
return "ims.nursing.assessment.domain.objects.BladderMicturition";
}
public boolean add(MicturitionVo value)
{
if(value == null)
return false;
if(this.col.indexOf(value) < 0)
{
return this.col.add(value);
}
return false;
}
public boolean add(int index, MicturitionVo value)
{
if(value == null)
return false;
if(this.col.indexOf(value) < 0)
{
this.col.add(index, value);
return true;
}
return false;
}
public void clear()
{
this.col.clear();
}
public void remove(int index)
{
this.col.remove(index);
}
public int size()
{
return this.col.size();
}
public int indexOf(MicturitionVo instance)
{
return col.indexOf(instance);
}
public MicturitionVo get(int index)
{
return this.col.get(index);
}
public boolean set(int index, MicturitionVo value)
{
if(value == null)
return false;
this.col.set(index, value);
return true;
}
public void remove(MicturitionVo instance)
{
if(instance != null)
{
int index = indexOf(instance);
if(index >= 0)
remove(index);
}
}
public boolean contains(MicturitionVo instance)
{
return indexOf(instance) >= 0;
}
public Object clone()
{
MicturitionVoCollection clone = new MicturitionVoCollection();
for(int x = 0; x < this.col.size(); x++)
{
if(this.col.get(x) != null)
clone.col.add((MicturitionVo)this.col.get(x).clone());
else
clone.col.add(null);
}
return clone;
}
public boolean isValidated()
{
for(int x = 0; x < col.size(); x++)
if(!this.col.get(x).isValidated())
return false;
return true;
}
public String[] validate()
{
return validate(null);
}
public String[] validate(String[] existingErrors)
{
if(col.size() == 0)
return null;
java.util.ArrayList<String> listOfErrors = new java.util.ArrayList<String>();
if(existingErrors != null)
{
for(int x = 0; x < existingErrors.length; x++)
{
listOfErrors.add(existingErrors[x]);
}
}
for(int x = 0; x < col.size(); x++)
{
String[] listOfOtherErrors = this.col.get(x).validate();
if(listOfOtherErrors != null)
{
for(int y = 0; y < listOfOtherErrors.length; y++)
{
listOfErrors.add(listOfOtherErrors[y]);
}
}
}
int errorCount = listOfErrors.size();
if(errorCount == 0)
return null;
String[] result = new String[errorCount];
for(int x = 0; x < errorCount; x++)
result[x] = (String)listOfErrors.get(x);
return result;
}
public MicturitionVoCollection sort()
{
return sort(SortOrder.ASCENDING);
}
public MicturitionVoCollection sort(boolean caseInsensitive)
{
return sort(SortOrder.ASCENDING, caseInsensitive);
}
public MicturitionVoCollection sort(SortOrder order)
{
return sort(new MicturitionVoComparator(order));
}
public MicturitionVoCollection sort(SortOrder order, boolean caseInsensitive)
{
return sort(new MicturitionVoComparator(order, caseInsensitive));
}
@SuppressWarnings("unchecked")
public MicturitionVoCollection sort(Comparator comparator)
{
Collections.sort(col, comparator);
return this;
}
public ims.nursing.assessment.vo.BladderMicturitionRefVoCollection toRefVoCollection()
{
ims.nursing.assessment.vo.BladderMicturitionRefVoCollection result = new ims.nursing.assessment.vo.BladderMicturitionRefVoCollection();
for(int x = 0; x < this.col.size(); x++)
{
result.add(this.col.get(x));
}
return result;
}
public MicturitionVo[] toArray()
{
MicturitionVo[] arr = new MicturitionVo[col.size()];
col.toArray(arr);
return arr;
}
public Iterator<MicturitionVo> iterator()
{
return col.iterator();
}
@Override
protected ArrayList getTypedCollection()
{
return col;
}
private class MicturitionVoComparator implements Comparator
{
private int direction = 1;
private boolean caseInsensitive = true;
public MicturitionVoComparator()
{
this(SortOrder.ASCENDING);
}
public MicturitionVoComparator(SortOrder order)
{
if (order == SortOrder.DESCENDING)
{
direction = -1;
}
}
public MicturitionVoComparator(SortOrder order, boolean caseInsensitive)
{
if (order == SortOrder.DESCENDING)
{
direction = -1;
}
this.caseInsensitive = caseInsensitive;
}
public int compare(Object obj1, Object obj2)
{
MicturitionVo voObj1 = (MicturitionVo)obj1;
MicturitionVo voObj2 = (MicturitionVo)obj2;
return direction*(voObj1.compareTo(voObj2, this.caseInsensitive));
}
public boolean equals(Object obj)
{
return false;
}
}
public ims.coe.vo.beans.MicturitionVoBean[] getBeanCollection()
{
return getBeanCollectionArray();
}
public ims.coe.vo.beans.MicturitionVoBean[] getBeanCollectionArray()
{
ims.coe.vo.beans.MicturitionVoBean[] result = new ims.coe.vo.beans.MicturitionVoBean[col.size()];
for(int i = 0; i < col.size(); i++)
{
MicturitionVo vo = ((MicturitionVo)col.get(i));
result[i] = (ims.coe.vo.beans.MicturitionVoBean)vo.getBean();
}
return result;
}
public static MicturitionVoCollection buildFromBeanCollection(java.util.Collection beans)
{
MicturitionVoCollection coll = new MicturitionVoCollection();
if(beans == null)
return coll;
java.util.Iterator iter = beans.iterator();
while (iter.hasNext())
{
coll.add(((ims.coe.vo.beans.MicturitionVoBean)iter.next()).buildVo());
}
return coll;
}
public static MicturitionVoCollection buildFromBeanCollection(ims.coe.vo.beans.MicturitionVoBean[] beans)
{
MicturitionVoCollection coll = new MicturitionVoCollection();
if(beans == null)
return coll;
for(int x = 0; x < beans.length; x++)
{
coll.add(beans[x].buildVo());
}
return coll;
}
}
| agpl-3.0 |
open-health-hub/openMAXIMS | openmaxims_workspace/ValueObjects/src/ims/emergency/vo/TriageForRelvantPMHVoCollection.java | 8167 | //#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751)
// Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.emergency.vo;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import ims.framework.enumerations.SortOrder;
/**
* Linked to emergency.Triage business object (ID: 1086100004).
*/
public class TriageForRelvantPMHVoCollection extends ims.vo.ValueObjectCollection implements ims.vo.ImsCloneable, Iterable<TriageForRelvantPMHVo>
{
private static final long serialVersionUID = 1L;
private ArrayList<TriageForRelvantPMHVo> col = new ArrayList<TriageForRelvantPMHVo>();
public String getBoClassName()
{
return "ims.emergency.domain.objects.Triage";
}
public boolean add(TriageForRelvantPMHVo value)
{
if(value == null)
return false;
if(this.col.indexOf(value) < 0)
{
return this.col.add(value);
}
return false;
}
public boolean add(int index, TriageForRelvantPMHVo value)
{
if(value == null)
return false;
if(this.col.indexOf(value) < 0)
{
this.col.add(index, value);
return true;
}
return false;
}
public void clear()
{
this.col.clear();
}
public void remove(int index)
{
this.col.remove(index);
}
public int size()
{
return this.col.size();
}
public int indexOf(TriageForRelvantPMHVo instance)
{
return col.indexOf(instance);
}
public TriageForRelvantPMHVo get(int index)
{
return this.col.get(index);
}
public boolean set(int index, TriageForRelvantPMHVo value)
{
if(value == null)
return false;
this.col.set(index, value);
return true;
}
public void remove(TriageForRelvantPMHVo instance)
{
if(instance != null)
{
int index = indexOf(instance);
if(index >= 0)
remove(index);
}
}
public boolean contains(TriageForRelvantPMHVo instance)
{
return indexOf(instance) >= 0;
}
public Object clone()
{
TriageForRelvantPMHVoCollection clone = new TriageForRelvantPMHVoCollection();
for(int x = 0; x < this.col.size(); x++)
{
if(this.col.get(x) != null)
clone.col.add((TriageForRelvantPMHVo)this.col.get(x).clone());
else
clone.col.add(null);
}
return clone;
}
public boolean isValidated()
{
for(int x = 0; x < col.size(); x++)
if(!this.col.get(x).isValidated())
return false;
return true;
}
public String[] validate()
{
return validate(null);
}
public String[] validate(String[] existingErrors)
{
if(col.size() == 0)
return null;
java.util.ArrayList<String> listOfErrors = new java.util.ArrayList<String>();
if(existingErrors != null)
{
for(int x = 0; x < existingErrors.length; x++)
{
listOfErrors.add(existingErrors[x]);
}
}
for(int x = 0; x < col.size(); x++)
{
String[] listOfOtherErrors = this.col.get(x).validate();
if(listOfOtherErrors != null)
{
for(int y = 0; y < listOfOtherErrors.length; y++)
{
listOfErrors.add(listOfOtherErrors[y]);
}
}
}
int errorCount = listOfErrors.size();
if(errorCount == 0)
return null;
String[] result = new String[errorCount];
for(int x = 0; x < errorCount; x++)
result[x] = (String)listOfErrors.get(x);
return result;
}
public TriageForRelvantPMHVoCollection sort()
{
return sort(SortOrder.ASCENDING);
}
public TriageForRelvantPMHVoCollection sort(boolean caseInsensitive)
{
return sort(SortOrder.ASCENDING, caseInsensitive);
}
public TriageForRelvantPMHVoCollection sort(SortOrder order)
{
return sort(new TriageForRelvantPMHVoComparator(order));
}
public TriageForRelvantPMHVoCollection sort(SortOrder order, boolean caseInsensitive)
{
return sort(new TriageForRelvantPMHVoComparator(order, caseInsensitive));
}
@SuppressWarnings("unchecked")
public TriageForRelvantPMHVoCollection sort(Comparator comparator)
{
Collections.sort(col, comparator);
return this;
}
public ims.emergency.vo.TriageRefVoCollection toRefVoCollection()
{
ims.emergency.vo.TriageRefVoCollection result = new ims.emergency.vo.TriageRefVoCollection();
for(int x = 0; x < this.col.size(); x++)
{
result.add(this.col.get(x));
}
return result;
}
public TriageForRelvantPMHVo[] toArray()
{
TriageForRelvantPMHVo[] arr = new TriageForRelvantPMHVo[col.size()];
col.toArray(arr);
return arr;
}
public Iterator<TriageForRelvantPMHVo> iterator()
{
return col.iterator();
}
@Override
protected ArrayList getTypedCollection()
{
return col;
}
private class TriageForRelvantPMHVoComparator implements Comparator
{
private int direction = 1;
private boolean caseInsensitive = true;
public TriageForRelvantPMHVoComparator()
{
this(SortOrder.ASCENDING);
}
public TriageForRelvantPMHVoComparator(SortOrder order)
{
if (order == SortOrder.DESCENDING)
{
direction = -1;
}
}
public TriageForRelvantPMHVoComparator(SortOrder order, boolean caseInsensitive)
{
if (order == SortOrder.DESCENDING)
{
direction = -1;
}
this.caseInsensitive = caseInsensitive;
}
public int compare(Object obj1, Object obj2)
{
TriageForRelvantPMHVo voObj1 = (TriageForRelvantPMHVo)obj1;
TriageForRelvantPMHVo voObj2 = (TriageForRelvantPMHVo)obj2;
return direction*(voObj1.compareTo(voObj2, this.caseInsensitive));
}
public boolean equals(Object obj)
{
return false;
}
}
public ims.emergency.vo.beans.TriageForRelvantPMHVoBean[] getBeanCollection()
{
return getBeanCollectionArray();
}
public ims.emergency.vo.beans.TriageForRelvantPMHVoBean[] getBeanCollectionArray()
{
ims.emergency.vo.beans.TriageForRelvantPMHVoBean[] result = new ims.emergency.vo.beans.TriageForRelvantPMHVoBean[col.size()];
for(int i = 0; i < col.size(); i++)
{
TriageForRelvantPMHVo vo = ((TriageForRelvantPMHVo)col.get(i));
result[i] = (ims.emergency.vo.beans.TriageForRelvantPMHVoBean)vo.getBean();
}
return result;
}
public static TriageForRelvantPMHVoCollection buildFromBeanCollection(java.util.Collection beans)
{
TriageForRelvantPMHVoCollection coll = new TriageForRelvantPMHVoCollection();
if(beans == null)
return coll;
java.util.Iterator iter = beans.iterator();
while (iter.hasNext())
{
coll.add(((ims.emergency.vo.beans.TriageForRelvantPMHVoBean)iter.next()).buildVo());
}
return coll;
}
public static TriageForRelvantPMHVoCollection buildFromBeanCollection(ims.emergency.vo.beans.TriageForRelvantPMHVoBean[] beans)
{
TriageForRelvantPMHVoCollection coll = new TriageForRelvantPMHVoCollection();
if(beans == null)
return coll;
for(int x = 0; x < beans.length; x++)
{
coll.add(beans[x].buildVo());
}
return coll;
}
}
| agpl-3.0 |
open-health-hub/openMAXIMS | openmaxims_workspace/DomainObjects/src/ims/RefMan/domain/objects/CatsReportNote.java | 13004 | /*
* This code was generated
* Copyright (C) 1995-2004 IMS MAXIMS plc. All rights reserved.
* IMS Development Environment (version 1.80 build 5007.25751)
* WARNING: DO NOT MODIFY the content of this file
* Generated: 16/04/2014, 12:34
*
*/
package ims.RefMan.domain.objects;
/**
*
* @author Neil McAnaspie
* Generated.
*/
public class CatsReportNote extends ims.domain.DomainObject implements ims.domain.SystemInformationRetainer, java.io.Serializable {
public static final int CLASSID = 1096100039;
private static final long serialVersionUID = 1096100039L;
public static final String CLASSVERSION = "${ClassVersion}";
@Override
public boolean shouldCapQuery()
{
return true;
}
/** Cats Referral */
private ims.RefMan.domain.objects.CatsReferral catsReferral;
/** AuthoringInformation */
private ims.core.clinical.domain.objects.AuthoringInformation authoringInformation;
/** Final Note */
private String finalNote;
/** RecordingInformation */
private ims.core.clinical.domain.objects.AuthoringInformation recordingInformation;
/** SystemInformation */
private ims.domain.SystemInformation systemInformation = new ims.domain.SystemInformation();
public CatsReportNote (Integer id, int ver)
{
super(id, ver);
}
public CatsReportNote ()
{
super();
}
public CatsReportNote (Integer id, int ver, Boolean includeRecord)
{
super(id, ver, includeRecord);
}
public Class getRealDomainClass()
{
return ims.RefMan.domain.objects.CatsReportNote.class;
}
public ims.RefMan.domain.objects.CatsReferral getCatsReferral() {
return catsReferral;
}
public void setCatsReferral(ims.RefMan.domain.objects.CatsReferral catsReferral) {
this.catsReferral = catsReferral;
}
public ims.core.clinical.domain.objects.AuthoringInformation getAuthoringInformation() {
return authoringInformation;
}
public void setAuthoringInformation(ims.core.clinical.domain.objects.AuthoringInformation authoringInformation) {
this.authoringInformation = authoringInformation;
}
public String getFinalNote() {
return finalNote;
}
public void setFinalNote(String finalNote) {
this.finalNote = finalNote;
}
public ims.core.clinical.domain.objects.AuthoringInformation getRecordingInformation() {
return recordingInformation;
}
public void setRecordingInformation(ims.core.clinical.domain.objects.AuthoringInformation recordingInformation) {
this.recordingInformation = recordingInformation;
}
public ims.domain.SystemInformation getSystemInformation() {
if (systemInformation == null) systemInformation = new ims.domain.SystemInformation();
return systemInformation;
}
/**
* isConfigurationObject
* Taken from the Usage property of the business object, this method will return
* a boolean indicating whether this is a configuration object or not
* Configuration = true, Instantiation = false
*/
public static boolean isConfigurationObject()
{
if ( "Instantiation".equals("Configuration") )
return true;
else
return false;
}
public int getClassId() {
return CLASSID;
}
public String getClassVersion()
{
return CLASSVERSION;
}
public String toAuditString()
{
StringBuffer auditStr = new StringBuffer();
auditStr.append("\r\n*catsReferral* :");
if (catsReferral != null)
{
auditStr.append(toShortClassName(catsReferral));
auditStr.append(catsReferral.getId());
}
auditStr.append("; ");
auditStr.append("\r\n*authoringInformation* :");
if (authoringInformation != null)
{
auditStr.append(toShortClassName(authoringInformation));
auditStr.append(authoringInformation.toString());
}
auditStr.append("; ");
auditStr.append("\r\n*finalNote* :");
auditStr.append(finalNote);
auditStr.append("; ");
auditStr.append("\r\n*recordingInformation* :");
if (recordingInformation != null)
{
auditStr.append(toShortClassName(recordingInformation));
auditStr.append(recordingInformation.toString());
}
auditStr.append("; ");
return auditStr.toString();
}
public String toXMLString()
{
return toXMLString(new java.util.HashMap());
}
public String toXMLString(java.util.HashMap domMap)
{
StringBuffer sb = new StringBuffer();
sb.append("<class type=\"" + this.getClass().getName() + "\" ");
sb.append(" id=\"" + this.getId() + "\"");
sb.append(" source=\"" + ims.configuration.EnvironmentConfig.getImportExportSourceName() + "\" ");
sb.append(" classVersion=\"" + this.getClassVersion() + "\" ");
sb.append(" component=\"" + this.getIsComponentClass() + "\" >");
if (domMap.get(this) == null)
{
domMap.put(this, this);
sb.append(this.fieldsToXMLString(domMap));
}
sb.append("</class>");
String keyClassName = "CatsReportNote";
String externalSource = ims.configuration.EnvironmentConfig.getImportExportSourceName();
ims.configuration.ImportedObject impObj = (ims.configuration.ImportedObject)domMap.get(keyClassName + "_" + externalSource + "_" + this.getId());
if (impObj == null)
{
impObj = new ims.configuration.ImportedObject();
impObj.setExternalId(this.getId());
impObj.setExternalSource(externalSource);
impObj.setDomainObject(this);
impObj.setLocalId(this.getId());
impObj.setClassName(keyClassName);
domMap.put(keyClassName + "_" + externalSource + "_" + this.getId(), impObj);
}
return sb.toString();
}
public String fieldsToXMLString(java.util.HashMap domMap)
{
StringBuffer sb = new StringBuffer();
if (this.getCatsReferral() != null)
{
sb.append("<catsReferral>");
sb.append(this.getCatsReferral().toXMLString(domMap));
sb.append("</catsReferral>");
}
if (this.getAuthoringInformation() != null)
{
sb.append("<authoringInformation>");
sb.append(this.getAuthoringInformation().toXMLString(domMap));
sb.append("</authoringInformation>");
}
if (this.getFinalNote() != null)
{
sb.append("<finalNote>");
sb.append(ims.framework.utils.StringUtils.encodeXML(this.getFinalNote().toString()));
sb.append("</finalNote>");
}
if (this.getRecordingInformation() != null)
{
sb.append("<recordingInformation>");
sb.append(this.getRecordingInformation().toXMLString(domMap));
sb.append("</recordingInformation>");
}
return sb.toString();
}
public static java.util.List fromListXMLString(org.dom4j.Element el, ims.domain.DomainFactory factory, java.util.List list, java.util.HashMap domMap) throws Exception
{
if (list == null)
list = new java.util.ArrayList();
fillListFromXMLString(list, el, factory, domMap);
return list;
}
public static java.util.Set fromSetXMLString(org.dom4j.Element el, ims.domain.DomainFactory factory, java.util.Set set, java.util.HashMap domMap) throws Exception
{
if (set == null)
set = new java.util.HashSet();
fillSetFromXMLString(set, el, factory, domMap);
return set;
}
private static void fillSetFromXMLString(java.util.Set set, org.dom4j.Element el, ims.domain.DomainFactory factory, java.util.HashMap domMap) throws Exception
{
if (el == null)
return;
java.util.List cl = el.elements("class");
int size = cl.size();
java.util.Set newSet = new java.util.HashSet();
for(int i=0; i<size; i++)
{
org.dom4j.Element itemEl = (org.dom4j.Element)cl.get(i);
CatsReportNote domainObject = getCatsReportNotefromXML(itemEl, factory, domMap);
if (domainObject == null)
{
continue;
}
//Trying to avoid the hibernate collection being marked as dirty via its public interface methods. (like add)
if (!set.contains(domainObject))
set.add(domainObject);
newSet.add(domainObject);
}
java.util.Set removedSet = new java.util.HashSet();
java.util.Iterator iter = set.iterator();
//Find out which objects need to be removed
while (iter.hasNext())
{
ims.domain.DomainObject o = (ims.domain.DomainObject)iter.next();
if ((o == null || o.getIsRIE() == null || !o.getIsRIE().booleanValue()) && !newSet.contains(o))
{
removedSet.add(o);
}
}
iter = removedSet.iterator();
//Remove the unwanted objects
while (iter.hasNext())
{
set.remove(iter.next());
}
}
private static void fillListFromXMLString(java.util.List list, org.dom4j.Element el, ims.domain.DomainFactory factory, java.util.HashMap domMap) throws Exception
{
if (el == null)
return;
java.util.List cl = el.elements("class");
int size = cl.size();
for(int i=0; i<size; i++)
{
org.dom4j.Element itemEl = (org.dom4j.Element)cl.get(i);
CatsReportNote domainObject = getCatsReportNotefromXML(itemEl, factory, domMap);
if (domainObject == null)
{
continue;
}
int domIdx = list.indexOf(domainObject);
if (domIdx == -1)
{
list.add(i, domainObject);
}
else if (i != domIdx && i < list.size())
{
Object tmp = list.get(i);
list.set(i, list.get(domIdx));
list.set(domIdx, tmp);
}
}
//Remove all ones in domList where index > voCollection.size() as these should
//now represent the ones removed from the VO collection. No longer referenced.
int i1=list.size();
while (i1 > size)
{
list.remove(i1-1);
i1=list.size();
}
}
public static CatsReportNote getCatsReportNotefromXML(String xml, ims.domain.DomainFactory factory, java.util.HashMap domMap) throws Exception
{
org.dom4j.Document doc = new org.dom4j.io.SAXReader().read(new org.xml.sax.InputSource(xml));
return getCatsReportNotefromXML(doc.getRootElement(), factory, domMap);
}
public static CatsReportNote getCatsReportNotefromXML(org.dom4j.Element el, ims.domain.DomainFactory factory, java.util.HashMap domMap) throws Exception
{
if (el == null)
return null;
String className = el.attributeValue("type");
if (!CatsReportNote.class.getName().equals(className))
{
Class clz = Class.forName(className);
if (!CatsReportNote.class.isAssignableFrom(clz))
throw new Exception("Element of type = " + className + " cannot be imported using the CatsReportNote class");
String shortClassName = className.substring(className.lastIndexOf(".")+1);
String methodName = "get" + shortClassName + "fromXML";
java.lang.reflect.Method m = clz.getMethod(methodName, new Class[]{org.dom4j.Element.class, ims.domain.DomainFactory.class, java.util.HashMap.class});
return (CatsReportNote)m.invoke(null, new Object[]{el, factory, domMap});
}
String impVersion = el.attributeValue("classVersion");
if(!impVersion.equals(CatsReportNote.CLASSVERSION))
{
throw new Exception("Incompatible class structure found. Cannot import instance.");
}
CatsReportNote ret = null;
int extId = Integer.parseInt(el.attributeValue("id"));
String externalSource = el.attributeValue("source");
ret = (CatsReportNote)factory.getImportedDomainObject(CatsReportNote.class, externalSource, extId);
if (ret == null)
{
ret = new CatsReportNote();
}
String keyClassName = "CatsReportNote";
ims.configuration.ImportedObject impObj = (ims.configuration.ImportedObject)domMap.get(keyClassName + "_" + externalSource + "_" + extId);
if (impObj != null)
{
return (CatsReportNote)impObj.getDomainObject();
}
else
{
impObj = new ims.configuration.ImportedObject();
impObj.setExternalId(extId);
impObj.setExternalSource(externalSource);
impObj.setDomainObject(ret);
domMap.put(keyClassName + "_" + externalSource + "_" + extId, impObj);
}
fillFieldsfromXML(el, factory, ret, domMap);
return ret;
}
public static void fillFieldsfromXML(org.dom4j.Element el, ims.domain.DomainFactory factory, CatsReportNote obj, java.util.HashMap domMap) throws Exception
{
org.dom4j.Element fldEl;
fldEl = el.element("catsReferral");
if(fldEl != null)
{
fldEl = fldEl.element("class");
obj.setCatsReferral(ims.RefMan.domain.objects.CatsReferral.getCatsReferralfromXML(fldEl, factory, domMap));
}
fldEl = el.element("authoringInformation");
if(fldEl != null)
{
fldEl = fldEl.element("class");
obj.setAuthoringInformation(ims.core.clinical.domain.objects.AuthoringInformation.getAuthoringInformationfromXML(fldEl, factory, domMap));
}
fldEl = el.element("finalNote");
if(fldEl != null)
{
obj.setFinalNote(new String(fldEl.getTextTrim()));
}
fldEl = el.element("recordingInformation");
if(fldEl != null)
{
fldEl = fldEl.element("class");
obj.setRecordingInformation(ims.core.clinical.domain.objects.AuthoringInformation.getAuthoringInformationfromXML(fldEl, factory, domMap));
}
}
public static String[] getCollectionFields()
{
return new String[]{
};
}
public static class FieldNames
{
public static final String ID = "id";
public static final String CatsReferral = "catsReferral";
public static final String AuthoringInformation = "authoringInformation";
public static final String FinalNote = "finalNote";
public static final String RecordingInformation = "recordingInformation";
}
}
| agpl-3.0 |
open-health-hub/openMAXIMS | openmaxims_workspace/ValueObjects/src/ims/ocrr/vo/domain/OcsOrderWebServiceListVoAssembler.java | 18654 | //#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
/*
* This code was generated
* Copyright (C) 1995-2004 IMS MAXIMS plc. All rights reserved.
* IMS Development Environment (version 1.80 build 5007.25751)
* WARNING: DO NOT MODIFY the content of this file
* Generated on 16/04/2014, 12:32
*
*/
package ims.ocrr.vo.domain;
import ims.vo.domain.DomainObjectMap;
import java.util.HashMap;
import org.hibernate.proxy.HibernateProxy;
/**
* @author Marius Mihalec
*/
public class OcsOrderWebServiceListVoAssembler
{
/**
* Copy one ValueObject to another
* @param valueObjectDest to be updated
* @param valueObjectSrc to copy values from
*/
public static ims.ocrr.vo.OcsOrderWebServiceListVo copy(ims.ocrr.vo.OcsOrderWebServiceListVo valueObjectDest, ims.ocrr.vo.OcsOrderWebServiceListVo valueObjectSrc)
{
if (null == valueObjectSrc)
{
return valueObjectSrc;
}
valueObjectDest.setID_OcsOrderSession(valueObjectSrc.getID_OcsOrderSession());
valueObjectDest.setIsRIE(valueObjectSrc.getIsRIE());
// HasNewOrUpdatedResults
valueObjectDest.setHasNewOrUpdatedResults(valueObjectSrc.getHasNewOrUpdatedResults());
// ResponsibleClinician
valueObjectDest.setResponsibleClinician(valueObjectSrc.getResponsibleClinician());
// ResponsibleGp
valueObjectDest.setResponsibleGp(valueObjectSrc.getResponsibleGp());
// PatientLocation
valueObjectDest.setPatientLocation(valueObjectSrc.getPatientLocation());
// PatientClinic
valueObjectDest.setPatientClinic(valueObjectSrc.getPatientClinic());
// OrderedBy
valueObjectDest.setOrderedBy(valueObjectSrc.getOrderedBy());
// SysInfo
valueObjectDest.setSysInfo(valueObjectSrc.getSysInfo());
return valueObjectDest;
}
/**
* Create the ValueObject collection to hold the set of DomainObjects.
* This is a convenience method only.
* It is intended to be used when one called to an Assembler is made.
* If more than one call to an Assembler is made then #createOcsOrderWebServiceListVoCollectionFromOcsOrderSession(DomainObjectMap, Set) should be used.
* @param domainObjectSet - Set of ims.ocrr.orderingresults.domain.objects.OcsOrderSession objects.
*/
public static ims.ocrr.vo.OcsOrderWebServiceListVoCollection createOcsOrderWebServiceListVoCollectionFromOcsOrderSession(java.util.Set domainObjectSet)
{
return createOcsOrderWebServiceListVoCollectionFromOcsOrderSession(new DomainObjectMap(), domainObjectSet);
}
/**
* Create the ValueObject collection to hold the set of DomainObjects.
* @param map - maps DomainObjects to created ValueObjects
* @param domainObjectSet - Set of ims.ocrr.orderingresults.domain.objects.OcsOrderSession objects.
*/
public static ims.ocrr.vo.OcsOrderWebServiceListVoCollection createOcsOrderWebServiceListVoCollectionFromOcsOrderSession(DomainObjectMap map, java.util.Set domainObjectSet)
{
ims.ocrr.vo.OcsOrderWebServiceListVoCollection voList = new ims.ocrr.vo.OcsOrderWebServiceListVoCollection();
if ( null == domainObjectSet )
{
return voList;
}
int rieCount=0;
int activeCount=0;
java.util.Iterator iterator = domainObjectSet.iterator();
while( iterator.hasNext() )
{
ims.ocrr.orderingresults.domain.objects.OcsOrderSession domainObject = (ims.ocrr.orderingresults.domain.objects.OcsOrderSession) iterator.next();
ims.ocrr.vo.OcsOrderWebServiceListVo vo = create(map, domainObject);
if (vo != null)
voList.add(vo);
if (domainObject != null)
{
if (domainObject.getIsRIE() != null && domainObject.getIsRIE().booleanValue() == true)
rieCount++;
else
activeCount++;
}
}
voList.setRieCount(rieCount);
voList.setActiveCount(activeCount);
return voList;
}
/**
* Create the ValueObject collection to hold the list of DomainObjects.
* @param domainObjectList - List of ims.ocrr.orderingresults.domain.objects.OcsOrderSession objects.
*/
public static ims.ocrr.vo.OcsOrderWebServiceListVoCollection createOcsOrderWebServiceListVoCollectionFromOcsOrderSession(java.util.List domainObjectList)
{
return createOcsOrderWebServiceListVoCollectionFromOcsOrderSession(new DomainObjectMap(), domainObjectList);
}
/**
* Create the ValueObject collection to hold the list of DomainObjects.
* @param map - maps DomainObjects to created ValueObjects
* @param domainObjectList - List of ims.ocrr.orderingresults.domain.objects.OcsOrderSession objects.
*/
public static ims.ocrr.vo.OcsOrderWebServiceListVoCollection createOcsOrderWebServiceListVoCollectionFromOcsOrderSession(DomainObjectMap map, java.util.List domainObjectList)
{
ims.ocrr.vo.OcsOrderWebServiceListVoCollection voList = new ims.ocrr.vo.OcsOrderWebServiceListVoCollection();
if ( null == domainObjectList )
{
return voList;
}
int rieCount=0;
int activeCount=0;
for (int i = 0; i < domainObjectList.size(); i++)
{
ims.ocrr.orderingresults.domain.objects.OcsOrderSession domainObject = (ims.ocrr.orderingresults.domain.objects.OcsOrderSession) domainObjectList.get(i);
ims.ocrr.vo.OcsOrderWebServiceListVo vo = create(map, domainObject);
if (vo != null)
voList.add(vo);
if (domainObject != null)
{
if (domainObject.getIsRIE() != null && domainObject.getIsRIE().booleanValue() == true)
rieCount++;
else
activeCount++;
}
}
voList.setRieCount(rieCount);
voList.setActiveCount(activeCount);
return voList;
}
/**
* Create the ims.ocrr.orderingresults.domain.objects.OcsOrderSession set from the value object collection.
* @param domainFactory - used to create existing (persistent) domain objects.
* @param voCollection - the collection of value objects
*/
public static java.util.Set extractOcsOrderSessionSet(ims.domain.ILightweightDomainFactory domainFactory, ims.ocrr.vo.OcsOrderWebServiceListVoCollection voCollection)
{
return extractOcsOrderSessionSet(domainFactory, voCollection, null, new HashMap());
}
public static java.util.Set extractOcsOrderSessionSet(ims.domain.ILightweightDomainFactory domainFactory, ims.ocrr.vo.OcsOrderWebServiceListVoCollection voCollection, java.util.Set domainObjectSet, HashMap domMap)
{
int size = (null == voCollection) ? 0 : voCollection.size();
if (domainObjectSet == null)
{
domainObjectSet = new java.util.HashSet();
}
java.util.Set newSet = new java.util.HashSet();
for(int i=0; i<size; i++)
{
ims.ocrr.vo.OcsOrderWebServiceListVo vo = voCollection.get(i);
ims.ocrr.orderingresults.domain.objects.OcsOrderSession domainObject = OcsOrderWebServiceListVoAssembler.extractOcsOrderSession(domainFactory, vo, domMap);
//TODO: This can only occur in the situation of a stale object exception. For now leave it to the Interceptor to handle it.
if (domainObject == null)
{
continue;
}
//Trying to avoid the hibernate collection being marked as dirty via its public interface methods. (like add)
if (!domainObjectSet.contains(domainObject)) domainObjectSet.add(domainObject);
newSet.add(domainObject);
}
java.util.Set removedSet = new java.util.HashSet();
java.util.Iterator iter = domainObjectSet.iterator();
//Find out which objects need to be removed
while (iter.hasNext())
{
ims.domain.DomainObject o = (ims.domain.DomainObject)iter.next();
if ((o == null || o.getIsRIE() == null || !o.getIsRIE().booleanValue()) && !newSet.contains(o))
{
removedSet.add(o);
}
}
iter = removedSet.iterator();
//Remove the unwanted objects
while (iter.hasNext())
{
domainObjectSet.remove(iter.next());
}
return domainObjectSet;
}
/**
* Create the ims.ocrr.orderingresults.domain.objects.OcsOrderSession list from the value object collection.
* @param domainFactory - used to create existing (persistent) domain objects.
* @param voCollection - the collection of value objects
*/
public static java.util.List extractOcsOrderSessionList(ims.domain.ILightweightDomainFactory domainFactory, ims.ocrr.vo.OcsOrderWebServiceListVoCollection voCollection)
{
return extractOcsOrderSessionList(domainFactory, voCollection, null, new HashMap());
}
public static java.util.List extractOcsOrderSessionList(ims.domain.ILightweightDomainFactory domainFactory, ims.ocrr.vo.OcsOrderWebServiceListVoCollection voCollection, java.util.List domainObjectList, HashMap domMap)
{
int size = (null == voCollection) ? 0 : voCollection.size();
if (domainObjectList == null)
{
domainObjectList = new java.util.ArrayList();
}
for(int i=0; i<size; i++)
{
ims.ocrr.vo.OcsOrderWebServiceListVo vo = voCollection.get(i);
ims.ocrr.orderingresults.domain.objects.OcsOrderSession domainObject = OcsOrderWebServiceListVoAssembler.extractOcsOrderSession(domainFactory, vo, domMap);
//TODO: This can only occur in the situation of a stale object exception. For now leave it to the Interceptor to handle it.
if (domainObject == null)
{
continue;
}
int domIdx = domainObjectList.indexOf(domainObject);
if (domIdx == -1)
{
domainObjectList.add(i, domainObject);
}
else if (i != domIdx && i < domainObjectList.size())
{
Object tmp = domainObjectList.get(i);
domainObjectList.set(i, domainObjectList.get(domIdx));
domainObjectList.set(domIdx, tmp);
}
}
//Remove all ones in domList where index > voCollection.size() as these should
//now represent the ones removed from the VO collection. No longer referenced.
int i1=domainObjectList.size();
while (i1 > size)
{
domainObjectList.remove(i1-1);
i1=domainObjectList.size();
}
return domainObjectList;
}
/**
* Create the ValueObject from the ims.ocrr.orderingresults.domain.objects.OcsOrderSession object.
* @param domainObject ims.ocrr.orderingresults.domain.objects.OcsOrderSession
*/
public static ims.ocrr.vo.OcsOrderWebServiceListVo create(ims.ocrr.orderingresults.domain.objects.OcsOrderSession domainObject)
{
if (null == domainObject)
{
return null;
}
DomainObjectMap map = new DomainObjectMap();
return create(map, domainObject);
}
/**
* Create the ValueObject from the ims.ocrr.orderingresults.domain.objects.OcsOrderSession object.
* @param map DomainObjectMap of DomainObjects to already created ValueObjects.
* @param domainObject
*/
public static ims.ocrr.vo.OcsOrderWebServiceListVo create(DomainObjectMap map, ims.ocrr.orderingresults.domain.objects.OcsOrderSession domainObject)
{
if (null == domainObject)
{
return null;
}
// check if the domainObject already has a valueObject created for it
ims.ocrr.vo.OcsOrderWebServiceListVo valueObject = (ims.ocrr.vo.OcsOrderWebServiceListVo) map.getValueObject(domainObject, ims.ocrr.vo.OcsOrderWebServiceListVo.class);
if ( null == valueObject )
{
valueObject = new ims.ocrr.vo.OcsOrderWebServiceListVo(domainObject.getId(), domainObject.getVersion());
map.addValueObject(domainObject, valueObject);
valueObject = insert(map, valueObject, domainObject);
}
return valueObject;
}
/**
* Update the ValueObject with the Domain Object.
* @param valueObject to be updated
* @param domainObject ims.ocrr.orderingresults.domain.objects.OcsOrderSession
*/
public static ims.ocrr.vo.OcsOrderWebServiceListVo insert(ims.ocrr.vo.OcsOrderWebServiceListVo valueObject, ims.ocrr.orderingresults.domain.objects.OcsOrderSession domainObject)
{
if (null == domainObject)
{
return valueObject;
}
DomainObjectMap map = new DomainObjectMap();
return insert(map, valueObject, domainObject);
}
/**
* Update the ValueObject with the Domain Object.
* @param map DomainObjectMap of DomainObjects to already created ValueObjects.
* @param valueObject to be updated
* @param domainObject ims.ocrr.orderingresults.domain.objects.OcsOrderSession
*/
public static ims.ocrr.vo.OcsOrderWebServiceListVo insert(DomainObjectMap map, ims.ocrr.vo.OcsOrderWebServiceListVo valueObject, ims.ocrr.orderingresults.domain.objects.OcsOrderSession domainObject)
{
if (null == domainObject)
{
return valueObject;
}
if (null == map)
{
map = new DomainObjectMap();
}
valueObject.setID_OcsOrderSession(domainObject.getId());
valueObject.setIsRIE(domainObject.getIsRIE());
// If this is a recordedInError record, and the domainObject
// value isIncludeRecord has not been set, then we return null and
// not the value object
if (valueObject.getIsRIE() != null && valueObject.getIsRIE().booleanValue() == true && !domainObject.isIncludeRecord())
return null;
// If this is not a recordedInError record, and the domainObject
// value isIncludeRecord has been set, then we return null and
// not the value object
if ((valueObject.getIsRIE() == null || valueObject.getIsRIE().booleanValue() == false) && domainObject.isIncludeRecord())
return null;
// HasNewOrUpdatedResults
valueObject.setHasNewOrUpdatedResults( domainObject.isHasNewOrUpdatedResults() );
// ResponsibleClinician
valueObject.setResponsibleClinician(ims.core.vo.domain.HcpLiteVoAssembler.create(map, domainObject.getResponsibleClinician()) );
// ResponsibleGp
valueObject.setResponsibleGp(ims.core.vo.domain.GpLiteVoAssembler.create(map, domainObject.getResponsibleGp()) );
// PatientLocation
valueObject.setPatientLocation(ims.core.vo.domain.LocationLiteVoAssembler.create(map, domainObject.getPatientLocation()) );
// PatientClinic
valueObject.setPatientClinic(ims.core.vo.domain.ClinicLiteVoAssembler.create(map, domainObject.getPatientClinic()) );
// OrderedBy
valueObject.setOrderedBy(ims.core.vo.domain.MemberOfStaffLiteVoAssembler.create(map, domainObject.getOrderedBy()) );
// SysInfo
// set system information
valueObject.setSysInfo(ims.vo.domain.SystemInformationAssembler.create(domainObject.getSystemInformation()));
return valueObject;
}
/**
* Create the domain object from the value object.
* @param domainFactory - used to create existing (persistent) domain objects.
* @param valueObject - extract the domain object fields from this.
*/
public static ims.ocrr.orderingresults.domain.objects.OcsOrderSession extractOcsOrderSession(ims.domain.ILightweightDomainFactory domainFactory, ims.ocrr.vo.OcsOrderWebServiceListVo valueObject)
{
return extractOcsOrderSession(domainFactory, valueObject, new HashMap());
}
public static ims.ocrr.orderingresults.domain.objects.OcsOrderSession extractOcsOrderSession(ims.domain.ILightweightDomainFactory domainFactory, ims.ocrr.vo.OcsOrderWebServiceListVo valueObject, HashMap domMap)
{
if (null == valueObject)
{
return null;
}
Integer id = valueObject.getID_OcsOrderSession();
ims.ocrr.orderingresults.domain.objects.OcsOrderSession domainObject = null;
if ( null == id)
{
if (domMap.get(valueObject) != null)
{
return (ims.ocrr.orderingresults.domain.objects.OcsOrderSession)domMap.get(valueObject);
}
// ims.ocrr.vo.OcsOrderWebServiceListVo ID_OcsOrderSession field is unknown
domainObject = new ims.ocrr.orderingresults.domain.objects.OcsOrderSession();
domMap.put(valueObject, domainObject);
}
else
{
String key = (valueObject.getClass().getName() + "__" + valueObject.getID_OcsOrderSession());
if (domMap.get(key) != null)
{
return (ims.ocrr.orderingresults.domain.objects.OcsOrderSession)domMap.get(key);
}
domainObject = (ims.ocrr.orderingresults.domain.objects.OcsOrderSession) domainFactory.getDomainObject(ims.ocrr.orderingresults.domain.objects.OcsOrderSession.class, id );
//TODO: Not sure how this should be handled. Effectively it must be a staleobject exception, but maybe should be handled as that further up.
if (domainObject == null)
return null;
domMap.put(key, domainObject);
}
domainObject.setVersion(valueObject.getVersion_OcsOrderSession());
domainObject.setHasNewOrUpdatedResults(valueObject.getHasNewOrUpdatedResults());
domainObject.setResponsibleClinician(ims.core.vo.domain.HcpLiteVoAssembler.extractHcp(domainFactory, valueObject.getResponsibleClinician(), domMap));
domainObject.setResponsibleGp(ims.core.vo.domain.GpLiteVoAssembler.extractGp(domainFactory, valueObject.getResponsibleGp(), domMap));
domainObject.setPatientLocation(ims.core.vo.domain.LocationLiteVoAssembler.extractLocation(domainFactory, valueObject.getPatientLocation(), domMap));
domainObject.setPatientClinic(ims.core.vo.domain.ClinicLiteVoAssembler.extractClinic(domainFactory, valueObject.getPatientClinic(), domMap));
// SaveAsRefVO - treated as a refVo in extract methods
ims.core.resource.people.domain.objects.MemberOfStaff value6 = null;
if ( null != valueObject.getOrderedBy() )
{
if (valueObject.getOrderedBy().getBoId() == null)
{
if (domMap.get(valueObject.getOrderedBy()) != null)
{
value6 = (ims.core.resource.people.domain.objects.MemberOfStaff)domMap.get(valueObject.getOrderedBy());
}
}
else
{
value6 = (ims.core.resource.people.domain.objects.MemberOfStaff)domainFactory.getDomainObject(ims.core.resource.people.domain.objects.MemberOfStaff.class, valueObject.getOrderedBy().getBoId());
}
}
domainObject.setOrderedBy(value6);
return domainObject;
}
}
| agpl-3.0 |
tdefilip/opennms | features/topology-map/plugins/org.opennms.features.topology.plugins.ssh/src/main/java/org/opennms/features/topology/ssh/internal/gwt/client/ui/Code.java | 4726 | /*******************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2012-2014 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2014 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <license@opennms.org>
* http://www.opennms.org/
* http://www.opennms.com/
*******************************************************************************/
package org.opennms.features.topology.ssh.internal.gwt.client.ui;
import com.google.gwt.event.dom.client.KeyDownEvent;
import com.google.gwt.event.dom.client.KeyEvent;
import com.google.gwt.event.dom.client.KeyPressEvent;
/**
* The Code class takes generic KeyEvents and extracts all relevant information from them.
* @author Leonardo Bell
* @author Philip Grenon
*/
public class Code {
private int keyCode = 0; //Key code from the passed in event
private int charCode = 0; //Char code from the passed in event
private KeyPressEvent kP_Event = null; //remains null unless event is an instance of KeyPressEvent
private KeyDownEvent kD_Event = null; //remains null unless event is an instance of KeyDownEvent
private boolean isCtrlDown; //Whether the CTRL key is currently held down or not
private boolean isAltDown; //Whether the ALT key is currently held down or not
private boolean isShiftDown; //Whether the SHIFT key is current held down or not
private boolean isFunctionKey; //Whether the event was a function key or not
/*List of special key codes*/
private final int[] keyCodes = new int[] { 9, 8, 13, 27, 33, 34, 35, 36, 37, 38, 39, 40, 45, 46, 112,
113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123 };
/**
* The Code(KeyEvent event) constructor takes a generic KeyEvent and decides whether
* it is a KeyPressEvent or KeyDownEvent. Any relevant information about the event
* is extracted and stored in class variables
* @param event generic KeyEvent
*/
@SuppressWarnings("rawtypes")
public Code(KeyEvent event){
if (event != null){
if (event instanceof KeyPressEvent){
kP_Event = (KeyPressEvent)event;
} else if (event instanceof KeyDownEvent){
kD_Event = (KeyDownEvent)event;
}
isCtrlDown = event.isControlKeyDown();
isAltDown = event.isAltKeyDown();
isShiftDown = event.isShiftKeyDown();
}
if (kP_Event != null){
charCode = kP_Event.getUnicodeCharCode();
} else if (kD_Event != null){
keyCode = kD_Event.getNativeKeyCode();
}
isFunctionKey = false;
for (int k : keyCodes){
if (keyCode == k) {
isFunctionKey = true;
break;
}
}
}
/**
* The getCharCode method returns the Char code extracted from the event
* @return Char code of event
*/
public int getCharCode() {
return charCode;
}
/**
* The getKeyCode method returns the Key code extracted from the event
* @return Key code of event
*/
public int getKeyCode() {
return keyCode;
}
/**
* The isCtrlDown method returns whether the CTRL key was held down
* during the event
* @return Whether CTRL was held down
*/
public boolean isCtrlDown() {
return isCtrlDown;
}
/**
* The isAltDown method returns whether the ALT key was held down
* during the event
* @return Whether ALT was held down
*/
public boolean isAltDown() {
return isAltDown;
}
/**
* The isShitDown method returns whether the SHIFT key was held down
* during the event
* @return Whether SHIFT was held down
*/
public boolean isShiftDown() {
return isShiftDown;
}
/**
* The isFunctionKey method returns whether the event was
* a special, non printable key
* @return Whether the event was a function Key
*/
public boolean isFunctionKey() {
return isFunctionKey;
}
/**
* The isFunctionKey method returns whether the event was
* the CTRL key
* @return Whether the event was a CTLR Key
*/
public boolean isControlKey() {
return (getKeyCode() >= 16 && getKeyCode() <= 18);
}
}
| agpl-3.0 |
open-health-hub/openMAXIMS | openmaxims_workspace/DomainObjects/src/ims/therapies/treatment/domain/objects/PlinthWorkTreatment.java | 11887 | //#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
/*
* This code was generated
* Copyright (C) 1995-2004 IMS MAXIMS plc. All rights reserved.
* IMS Development Environment (version 1.80 build 5007.25751)
* WARNING: DO NOT MODIFY the content of this file
* Generated: 16/04/2014, 12:34
*
*/
package ims.therapies.treatment.domain.objects;
/**
*
* @author Sinead McDermott
* Generated.
*/
public class PlinthWorkTreatment extends ims.domain.DomainObject implements java.io.Serializable {
public static final int CLASSID = 1044100003;
private static final long serialVersionUID = 1044100003L;
public static final String CLASSVERSION = "${ClassVersion}";
@Override
public boolean shouldCapQuery()
{
return true;
}
/** Treatment */
private ims.domain.lookups.LookupInstance treatment;
/** Details */
private String details;
public PlinthWorkTreatment (Integer id, int ver)
{
super(id, ver);
}
public PlinthWorkTreatment ()
{
super();
}
public PlinthWorkTreatment (Integer id, int ver, Boolean includeRecord)
{
super(id, ver, includeRecord);
}
public Class getRealDomainClass()
{
return ims.therapies.treatment.domain.objects.PlinthWorkTreatment.class;
}
public ims.domain.lookups.LookupInstance getTreatment() {
return treatment;
}
public void setTreatment(ims.domain.lookups.LookupInstance treatment) {
this.treatment = treatment;
}
public String getDetails() {
return details;
}
public void setDetails(String details) {
if ( null != details && details.length() > 255 ) {
throw new ims.domain.exceptions.DomainRuntimeException("MaxLength ($MaxLength) exceeded for details. Tried to set value: "+
details);
}
this.details = details;
}
/**
* isConfigurationObject
* Taken from the Usage property of the business object, this method will return
* a boolean indicating whether this is a configuration object or not
* Configuration = true, Instantiation = false
*/
public static boolean isConfigurationObject()
{
if ( "Instantiation".equals("Configuration") )
return true;
else
return false;
}
public int getClassId() {
return CLASSID;
}
public String getClassVersion()
{
return CLASSVERSION;
}
public String toAuditString()
{
StringBuffer auditStr = new StringBuffer();
auditStr.append("\r\n*treatment* :");
if (treatment != null)
auditStr.append(treatment.getText());
auditStr.append("; ");
auditStr.append("\r\n*details* :");
auditStr.append(details);
auditStr.append("; ");
return auditStr.toString();
}
public String toXMLString()
{
return toXMLString(new java.util.HashMap());
}
public String toXMLString(java.util.HashMap domMap)
{
StringBuffer sb = new StringBuffer();
sb.append("<class type=\"" + this.getClass().getName() + "\" ");
sb.append(" id=\"" + this.getId() + "\"");
sb.append(" source=\"" + ims.configuration.EnvironmentConfig.getImportExportSourceName() + "\" ");
sb.append(" classVersion=\"" + this.getClassVersion() + "\" ");
sb.append(" component=\"" + this.getIsComponentClass() + "\" >");
if (domMap.get(this) == null)
{
domMap.put(this, this);
sb.append(this.fieldsToXMLString(domMap));
}
sb.append("</class>");
String keyClassName = "PlinthWorkTreatment";
String externalSource = ims.configuration.EnvironmentConfig.getImportExportSourceName();
ims.configuration.ImportedObject impObj = (ims.configuration.ImportedObject)domMap.get(keyClassName + "_" + externalSource + "_" + this.getId());
if (impObj == null)
{
impObj = new ims.configuration.ImportedObject();
impObj.setExternalId(this.getId());
impObj.setExternalSource(externalSource);
impObj.setDomainObject(this);
impObj.setLocalId(this.getId());
impObj.setClassName(keyClassName);
domMap.put(keyClassName + "_" + externalSource + "_" + this.getId(), impObj);
}
return sb.toString();
}
public String fieldsToXMLString(java.util.HashMap domMap)
{
StringBuffer sb = new StringBuffer();
if (this.getTreatment() != null)
{
sb.append("<treatment>");
sb.append(this.getTreatment().toXMLString());
sb.append("</treatment>");
}
if (this.getDetails() != null)
{
sb.append("<details>");
sb.append(ims.framework.utils.StringUtils.encodeXML(this.getDetails().toString()));
sb.append("</details>");
}
return sb.toString();
}
public static java.util.List fromListXMLString(org.dom4j.Element el, ims.domain.DomainFactory factory, java.util.List list, java.util.HashMap domMap) throws Exception
{
if (list == null)
list = new java.util.ArrayList();
fillListFromXMLString(list, el, factory, domMap);
return list;
}
public static java.util.Set fromSetXMLString(org.dom4j.Element el, ims.domain.DomainFactory factory, java.util.Set set, java.util.HashMap domMap) throws Exception
{
if (set == null)
set = new java.util.HashSet();
fillSetFromXMLString(set, el, factory, domMap);
return set;
}
private static void fillSetFromXMLString(java.util.Set set, org.dom4j.Element el, ims.domain.DomainFactory factory, java.util.HashMap domMap) throws Exception
{
if (el == null)
return;
java.util.List cl = el.elements("class");
int size = cl.size();
java.util.Set newSet = new java.util.HashSet();
for(int i=0; i<size; i++)
{
org.dom4j.Element itemEl = (org.dom4j.Element)cl.get(i);
PlinthWorkTreatment domainObject = getPlinthWorkTreatmentfromXML(itemEl, factory, domMap);
if (domainObject == null)
{
continue;
}
//Trying to avoid the hibernate collection being marked as dirty via its public interface methods. (like add)
if (!set.contains(domainObject))
set.add(domainObject);
newSet.add(domainObject);
}
java.util.Set removedSet = new java.util.HashSet();
java.util.Iterator iter = set.iterator();
//Find out which objects need to be removed
while (iter.hasNext())
{
ims.domain.DomainObject o = (ims.domain.DomainObject)iter.next();
if ((o == null || o.getIsRIE() == null || !o.getIsRIE().booleanValue()) && !newSet.contains(o))
{
removedSet.add(o);
}
}
iter = removedSet.iterator();
//Remove the unwanted objects
while (iter.hasNext())
{
set.remove(iter.next());
}
}
private static void fillListFromXMLString(java.util.List list, org.dom4j.Element el, ims.domain.DomainFactory factory, java.util.HashMap domMap) throws Exception
{
if (el == null)
return;
java.util.List cl = el.elements("class");
int size = cl.size();
for(int i=0; i<size; i++)
{
org.dom4j.Element itemEl = (org.dom4j.Element)cl.get(i);
PlinthWorkTreatment domainObject = getPlinthWorkTreatmentfromXML(itemEl, factory, domMap);
if (domainObject == null)
{
continue;
}
int domIdx = list.indexOf(domainObject);
if (domIdx == -1)
{
list.add(i, domainObject);
}
else if (i != domIdx && i < list.size())
{
Object tmp = list.get(i);
list.set(i, list.get(domIdx));
list.set(domIdx, tmp);
}
}
//Remove all ones in domList where index > voCollection.size() as these should
//now represent the ones removed from the VO collection. No longer referenced.
int i1=list.size();
while (i1 > size)
{
list.remove(i1-1);
i1=list.size();
}
}
public static PlinthWorkTreatment getPlinthWorkTreatmentfromXML(String xml, ims.domain.DomainFactory factory, java.util.HashMap domMap) throws Exception
{
org.dom4j.Document doc = new org.dom4j.io.SAXReader().read(new org.xml.sax.InputSource(xml));
return getPlinthWorkTreatmentfromXML(doc.getRootElement(), factory, domMap);
}
public static PlinthWorkTreatment getPlinthWorkTreatmentfromXML(org.dom4j.Element el, ims.domain.DomainFactory factory, java.util.HashMap domMap) throws Exception
{
if (el == null)
return null;
String className = el.attributeValue("type");
if (!PlinthWorkTreatment.class.getName().equals(className))
{
Class clz = Class.forName(className);
if (!PlinthWorkTreatment.class.isAssignableFrom(clz))
throw new Exception("Element of type = " + className + " cannot be imported using the PlinthWorkTreatment class");
String shortClassName = className.substring(className.lastIndexOf(".")+1);
String methodName = "get" + shortClassName + "fromXML";
java.lang.reflect.Method m = clz.getMethod(methodName, new Class[]{org.dom4j.Element.class, ims.domain.DomainFactory.class, java.util.HashMap.class});
return (PlinthWorkTreatment)m.invoke(null, new Object[]{el, factory, domMap});
}
String impVersion = el.attributeValue("classVersion");
if(!impVersion.equals(PlinthWorkTreatment.CLASSVERSION))
{
throw new Exception("Incompatible class structure found. Cannot import instance.");
}
PlinthWorkTreatment ret = null;
int extId = Integer.parseInt(el.attributeValue("id"));
String externalSource = el.attributeValue("source");
ret = (PlinthWorkTreatment)factory.getImportedDomainObject(PlinthWorkTreatment.class, externalSource, extId);
if (ret == null)
{
ret = new PlinthWorkTreatment();
}
String keyClassName = "PlinthWorkTreatment";
ims.configuration.ImportedObject impObj = (ims.configuration.ImportedObject)domMap.get(keyClassName + "_" + externalSource + "_" + extId);
if (impObj != null)
{
return (PlinthWorkTreatment)impObj.getDomainObject();
}
else
{
impObj = new ims.configuration.ImportedObject();
impObj.setExternalId(extId);
impObj.setExternalSource(externalSource);
impObj.setDomainObject(ret);
domMap.put(keyClassName + "_" + externalSource + "_" + extId, impObj);
}
fillFieldsfromXML(el, factory, ret, domMap);
return ret;
}
public static void fillFieldsfromXML(org.dom4j.Element el, ims.domain.DomainFactory factory, PlinthWorkTreatment obj, java.util.HashMap domMap) throws Exception
{
org.dom4j.Element fldEl;
fldEl = el.element("treatment");
if(fldEl != null)
{
fldEl = fldEl.element("lki");
obj.setTreatment(ims.domain.lookups.LookupInstance.fromXMLString(fldEl, factory));
}
fldEl = el.element("details");
if(fldEl != null)
{
obj.setDetails(new String(fldEl.getTextTrim()));
}
}
public static String[] getCollectionFields()
{
return new String[]{
};
}
public static class FieldNames
{
public static final String ID = "id";
public static final String Treatment = "treatment";
public static final String Details = "details";
}
}
| agpl-3.0 |
open-health-hub/openMAXIMS | openmaxims_workspace/OCRR/src/ims/ocrr/forms/newresultsoutpatienttabcomponent/GenForm.java | 57857 | //#############################################################################
//# #
//# Copyright (C) <2014> <IMS MAXIMS> #
//# #
//# This program is free software: you can redistribute it and/or modify #
//# it under the terms of the GNU Affero General Public License as #
//# published by the Free Software Foundation, either version 3 of the #
//# License, or (at your option) any later version. #
//# #
//# This program is distributed in the hope that it will be useful, #
//# but WITHOUT ANY WARRANTY; without even the implied warranty of #
//# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
//# GNU Affero General Public License for more details. #
//# #
//# You should have received a copy of the GNU Affero General Public License #
//# along with this program. If not, see <http://www.gnu.org/licenses/>. #
//# #
//#############################################################################
//#EOH
// This code was generated by Barbara Worwood using IMS Development Environment (version 1.80 build 5007.25751)
// Copyright (C) 1995-2014 IMS MAXIMS. All rights reserved.
// WARNING: DO NOT MODIFY the content of this file
package ims.ocrr.forms.newresultsoutpatienttabcomponent;
import ims.framework.*;
import ims.framework.controls.*;
import ims.framework.enumerations.*;
import ims.framework.utils.RuntimeAnchoring;
public class GenForm extends FormBridge
{
private static final long serialVersionUID = 1L;
protected void fireCustomControlValueChanged()
{
super.fireValueChanged();
}
public boolean canProvideData(IReportSeed[] reportSeeds)
{
return new ReportDataProvider(reportSeeds, this.getFormReportFields()).canProvideData();
}
public boolean hasData(IReportSeed[] reportSeeds)
{
return new ReportDataProvider(reportSeeds, this.getFormReportFields()).hasData();
}
public IReportField[] getData(IReportSeed[] reportSeeds)
{
return getData(reportSeeds, false);
}
public IReportField[] getData(IReportSeed[] reportSeeds, boolean excludeNulls)
{
return new ReportDataProvider(reportSeeds, this.getFormReportFields(), excludeNulls).getData();
}
public static class cmbDaysComboBox extends ComboBoxBridge
{
private static final long serialVersionUID = 1L;
public void newRow(Integer value, String text)
{
super.control.newRow(value, text);
}
public void newRow(Integer value, String text, ims.framework.utils.Image image)
{
super.control.newRow(value, text, image);
}
public void newRow(Integer value, String text, ims.framework.utils.Color textColor)
{
super.control.newRow(value, text, textColor);
}
public void newRow(Integer value, String text, ims.framework.utils.Image image, ims.framework.utils.Color textColor)
{
super.control.newRow(value, text, image, textColor);
}
public boolean removeRow(Integer value)
{
return super.control.removeRow(value);
}
public Integer getValue()
{
return (Integer)super.control.getValue();
}
public void setValue(Integer value)
{
super.control.setValue(value);
}
}
public static class cmbHospitalComboBox extends ComboBoxBridge
{
private static final long serialVersionUID = 1L;
public void newRow(ims.core.vo.LocationLiteVo value, String text)
{
super.control.newRow(value, text);
}
public void newRow(ims.core.vo.LocationLiteVo value, String text, ims.framework.utils.Image image)
{
super.control.newRow(value, text, image);
}
public void newRow(ims.core.vo.LocationLiteVo value, String text, ims.framework.utils.Color textColor)
{
super.control.newRow(value, text, textColor);
}
public void newRow(ims.core.vo.LocationLiteVo value, String text, ims.framework.utils.Image image, ims.framework.utils.Color textColor)
{
super.control.newRow(value, text, image, textColor);
}
public boolean removeRow(ims.core.vo.LocationLiteVo value)
{
return super.control.removeRow(value);
}
public ims.core.vo.LocationLiteVo getValue()
{
return (ims.core.vo.LocationLiteVo)super.control.getValue();
}
public void setValue(ims.core.vo.LocationLiteVo value)
{
super.control.setValue(value);
}
}
public static class qmbReviewingHCPComboBox extends ComboBoxBridge
{
private static final long serialVersionUID = 1L;
public void newRow(ims.core.vo.HcpLiteVo value, String text)
{
super.control.newRow(value, text);
}
public void newRow(ims.core.vo.HcpLiteVo value, String text, ims.framework.utils.Image image)
{
super.control.newRow(value, text, image);
}
public void newRow(ims.core.vo.HcpLiteVo value, String text, ims.framework.utils.Color textColor)
{
super.control.newRow(value, text, textColor);
}
public void newRow(ims.core.vo.HcpLiteVo value, String text, ims.framework.utils.Image image, ims.framework.utils.Color textColor)
{
super.control.newRow(value, text, image, textColor);
}
public boolean removeRow(ims.core.vo.HcpLiteVo value)
{
return super.control.removeRow(value);
}
public ims.core.vo.HcpLiteVo getValue()
{
return (ims.core.vo.HcpLiteVo)super.control.getValue();
}
public void setValue(ims.core.vo.HcpLiteVo value)
{
super.control.setValue(value);
}
public void setEditedText(String text)
{
super.control.setEditedText(text);
}
public String getEditedText()
{
return super.control.getEditedText();
}
}
public static class qmbOrderingLocationComboBox extends ComboBoxBridge
{
private static final long serialVersionUID = 1L;
public void newRow(ims.core.vo.LocationLiteVo value, String text)
{
super.control.newRow(value, text);
}
public void newRow(ims.core.vo.LocationLiteVo value, String text, ims.framework.utils.Image image)
{
super.control.newRow(value, text, image);
}
public void newRow(ims.core.vo.LocationLiteVo value, String text, ims.framework.utils.Color textColor)
{
super.control.newRow(value, text, textColor);
}
public void newRow(ims.core.vo.LocationLiteVo value, String text, ims.framework.utils.Image image, ims.framework.utils.Color textColor)
{
super.control.newRow(value, text, image, textColor);
}
public boolean removeRow(ims.core.vo.LocationLiteVo value)
{
return super.control.removeRow(value);
}
public ims.core.vo.LocationLiteVo getValue()
{
return (ims.core.vo.LocationLiteVo)super.control.getValue();
}
public void setValue(ims.core.vo.LocationLiteVo value)
{
super.control.setValue(value);
}
public void setEditedText(String text)
{
super.control.setEditedText(text);
}
public String getEditedText()
{
return super.control.getEditedText();
}
}
public static class grdDisciplinesRow extends GridRowBridge
{
private static final long serialVersionUID = 1L;
protected grdDisciplinesRow(GridRow row)
{
super(row);
}
public void showOpened(int column)
{
super.row.showOpened(column);
}
public void setColDisReadOnly(boolean value)
{
super.row.setReadOnly(0, value);
}
public boolean isColDisReadOnly()
{
return super.row.isReadOnly(0);
}
public void showColDisOpened()
{
super.row.showOpened(0);
}
public String getColDis()
{
return (String)super.row.get(0);
}
public void setColDis(String value)
{
super.row.set(0, value);
}
public void setCellColDisTooltip(String value)
{
super.row.setTooltip(0, value);
}
public ims.core.vo.ServiceLiteVo getValue()
{
return (ims.core.vo.ServiceLiteVo)super.row.getValue();
}
public void setValue(ims.core.vo.ServiceLiteVo value)
{
super.row.setValue(value);
}
}
public static class grdDisciplinesRowCollection extends GridRowCollectionBridge
{
private static final long serialVersionUID = 1L;
private grdDisciplinesRowCollection(GridRowCollection collection)
{
super(collection);
}
public grdDisciplinesRow get(int index)
{
return new grdDisciplinesRow(super.collection.get(index));
}
public grdDisciplinesRow newRow()
{
return new grdDisciplinesRow(super.collection.newRow());
}
public grdDisciplinesRow newRow(boolean autoSelect)
{
return new grdDisciplinesRow(super.collection.newRow(autoSelect));
}
public grdDisciplinesRow newRowAt(int index)
{
return new grdDisciplinesRow(super.collection.newRowAt(index));
}
public grdDisciplinesRow newRowAt(int index, boolean autoSelect)
{
return new grdDisciplinesRow(super.collection.newRowAt(index, autoSelect));
}
}
public static class grdDisciplinesGrid extends GridBridge
{
private static final long serialVersionUID = 1L;
private void addStringColumn(String caption, int captionAlignment, int alignment, int width, boolean readOnly, boolean bold, int sortOrder, int maxLength, boolean canGrow, ims.framework.enumerations.CharacterCasing casing)
{
super.grid.addStringColumn(caption, captionAlignment, alignment, width, readOnly, bold, sortOrder, maxLength, canGrow, casing);
}
public ims.core.vo.ServiceLiteVoCollection getValues()
{
ims.core.vo.ServiceLiteVoCollection listOfValues = new ims.core.vo.ServiceLiteVoCollection();
for(int x = 0; x < this.getRows().size(); x++)
{
listOfValues.add(this.getRows().get(x).getValue());
}
return listOfValues;
}
public ims.core.vo.ServiceLiteVo getValue()
{
return (ims.core.vo.ServiceLiteVo)super.grid.getValue();
}
public void setValue(ims.core.vo.ServiceLiteVo value)
{
super.grid.setValue(value);
}
public grdDisciplinesRow getSelectedRow()
{
return super.grid.getSelectedRow() == null ? null : new grdDisciplinesRow(super.grid.getSelectedRow());
}
public int getSelectedRowIndex()
{
return super.grid.getSelectedRowIndex();
}
public grdDisciplinesRowCollection getRows()
{
return new grdDisciplinesRowCollection(super.grid.getRows());
}
public grdDisciplinesRow getRowByValue(ims.core.vo.ServiceLiteVo value)
{
GridRow row = super.grid.getRowByValue(value);
return row == null?null:new grdDisciplinesRow(row);
}
public void setColDisHeaderTooltip(String value)
{
super.grid.setColumnHeaderTooltip(0, value);
}
public String getColDisHeaderTooltip()
{
return super.grid.getColumnHeaderTooltip(0);
}
}
public static class ListResultsByRadioButton extends RadioButtonBridge
{
private static final long serialVersionUID = 1L;
protected void setContext(Integer startTabIndex, ims.framework.utils.SizeInfo designSize, ims.framework.utils.SizeInfo runtimeSize)
{
if(startTabIndex == null)
throw new RuntimeException("Invalid startTabIndex ");
RuntimeAnchoring anchoringHelper1 = new RuntimeAnchoring(designSize, runtimeSize, 632, 104, 104, 16, ims.framework.enumerations.ControlAnchoring.TOPRIGHT);
control.addButton(0, anchoringHelper1.getX(), anchoringHelper1.getY(), anchoringHelper1.getWidth(), "Oldest First", startTabIndex.intValue() + 2015);
RuntimeAnchoring anchoringHelper2 = new RuntimeAnchoring(designSize, runtimeSize, 520, 104, 88, 16, ims.framework.enumerations.ControlAnchoring.TOPRIGHT);
control.addButton(1, anchoringHelper2.getX(), anchoringHelper2.getY(), anchoringHelper2.getWidth(), "Newest First", startTabIndex.intValue() + 2014);
}
public void setText(ListResultsByEnumeration option, String value)
{
if(option != null && option.id >= 0 && value != null)
control.setText(option.id, value);
}
public ListResultsByEnumeration getValue()
{
switch (super.control.getValue())
{
case -1: return ListResultsByEnumeration.None;
case 0: return ListResultsByEnumeration.rdoOldestFirst;
case 1: return ListResultsByEnumeration.rdoNewestFirst;
}
return null;
}
public void setValue(ListResultsByEnumeration value)
{
if(value != null)
super.control.setValue(value.id);
else
super.control.setValue(ListResultsByEnumeration.None.id);
}
public boolean isEnabled(ListResultsByEnumeration option)
{
return super.control.isEnabled(option.id);
}
public void setEnabled(ListResultsByEnumeration option, boolean value)
{
super.control.setEnabled(option.id, value);
}
public boolean isVisible(ListResultsByEnumeration option)
{
return super.control.isVisible(option.id);
}
public void setVisible(ListResultsByEnumeration option, boolean value)
{
super.control.setVisible(option.id, value);
}
public void setVisible(boolean value)
{
super.control.setVisible(value);
}
public void setEnabled(boolean value)
{
super.control.setEnabled(value);
}
}
public static class ListResultsByEnumeration implements java.io.Serializable
{
private static final long serialVersionUID = 1L;
public static ListResultsByEnumeration None = new ListResultsByEnumeration(-1);
public static ListResultsByEnumeration rdoOldestFirst = new ListResultsByEnumeration(0);
public static ListResultsByEnumeration rdoNewestFirst = new ListResultsByEnumeration(1);
private ListResultsByEnumeration(int id)
{
this.id = id;
}
public boolean equals(Object o)
{
return this.id == ((ListResultsByEnumeration)o).id;
}
private int id;
}
private void validateContext(ims.framework.Context context)
{
if(context == null)
return;
}
public boolean supportsRecordedInError()
{
return false;
}
public ims.vo.ValueObject getRecordedInErrorVo()
{
return null;
}
protected void setContext(FormLoader loader, Form form, ims.framework.interfaces.IAppForm appForm, UIFactory factory, Context context) throws Exception
{
setContext(loader, form, appForm, factory, context, Boolean.FALSE, new Integer(0), null, null, new Integer(0));
}
protected void setContext(FormLoader loader, Form form, ims.framework.interfaces.IAppForm appForm, UIFactory factory, Context context, Boolean skipContextValidation) throws Exception
{
setContext(loader, form, appForm, factory, context, skipContextValidation, new Integer(0), null, null, new Integer(0));
}
protected void setContext(FormLoader loader, Form form, ims.framework.interfaces.IAppForm appForm, UIFactory factory, ims.framework.Context context, Boolean skipContextValidation, Integer startControlID, ims.framework.utils.SizeInfo runtimeSize, ims.framework.Control control, Integer startTabIndex) throws Exception
{
if(loader == null); // this is to avoid eclipse warning only.
if(factory == null); // this is to avoid eclipse warning only.
if(runtimeSize == null); // this is to avoid eclipse warning only.
if(appForm == null)
throw new RuntimeException("Invalid application form");
if(startControlID == null)
throw new RuntimeException("Invalid startControlID");
if(control == null); // this is to avoid eclipse warning only.
if(startTabIndex == null)
throw new RuntimeException("Invalid startTabIndex");
this.context = context;
this.componentIdentifier = startControlID.toString();
this.formInfo = form.getFormInfo();
this.globalContext = new GlobalContext(context);
if(skipContextValidation == null || !skipContextValidation.booleanValue())
{
validateContext(context);
}
super.setContext(form);
ims.framework.utils.SizeInfo designSize = new ims.framework.utils.SizeInfo(824, 232);
if(runtimeSize == null)
runtimeSize = designSize;
form.setWidth(runtimeSize.getWidth());
form.setHeight(runtimeSize.getHeight());
super.setFormReferences(FormReferencesFlyweightFactory.getInstance().create(Forms.class));
super.setImageReferences(ImageReferencesFlyweightFactory.getInstance().create(Images.class));
super.setGlobalContext(ContextBridgeFlyweightFactory.getInstance().create(GlobalContextBridge.class, context, false));
super.setLocalContext(new LocalContext(context, form.getFormInfo(), componentIdentifier));
// Custom Controls
ims.framework.CustomComponent instance1 = factory.getEmptyCustomComponent();
RuntimeAnchoring anchoringHelper3 = new RuntimeAnchoring(designSize, runtimeSize, 128, 176, 288, 20, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT);
ims.framework.FormUiLogic m_ccOrderingHCPForm = loader.loadComponent(102256, appForm, startControlID * 10 + 1000, anchoringHelper3.getSize(), instance1, startTabIndex.intValue() + 1004, skipContextValidation);
//ims.framework.Control m_ccOrderingHCPControl = factory.getControl(CustomComponent.class, new Object[] { control, new Integer(startControlID.intValue() + 1000), new Integer(128), new Integer(176), new Integer(288), new Integer(20), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT, new Integer(startTabIndex.intValue() + 1004), m_ccOrderingHCPForm, instance1 } );
ims.framework.Control m_ccOrderingHCPControl = factory.getControl(CustomComponent.class, new Object[] { control, new Integer(startControlID.intValue() + 1001), new Integer(anchoringHelper3.getX()), new Integer(anchoringHelper3.getY()), new Integer(anchoringHelper3.getWidth()), new Integer(anchoringHelper3.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT, new Integer(startTabIndex.intValue() + 1004), m_ccOrderingHCPForm, instance1, Boolean.FALSE } );
super.addControl(m_ccOrderingHCPControl);
Menu[] menus1 = m_ccOrderingHCPForm.getForm().getRegisteredMenus();
for(int x = 0; x < menus1.length; x++)
{
form.registerMenu(menus1[x]);
}
ims.framework.CustomComponent instance2 = factory.getEmptyCustomComponent();
RuntimeAnchoring anchoringHelper4 = new RuntimeAnchoring(designSize, runtimeSize, 128, 152, 288, 20, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT);
ims.framework.FormUiLogic m_ccResponsibleHCPForm = loader.loadComponent(102256, appForm, startControlID * 10 + 2000, anchoringHelper4.getSize(), instance2, startTabIndex.intValue() + 3, skipContextValidation);
//ims.framework.Control m_ccResponsibleHCPControl = factory.getControl(CustomComponent.class, new Object[] { control, new Integer(startControlID.intValue() + 1002), new Integer(128), new Integer(152), new Integer(288), new Integer(20), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT, new Integer(startTabIndex.intValue() + 3), m_ccResponsibleHCPForm, instance2 } );
ims.framework.Control m_ccResponsibleHCPControl = factory.getControl(CustomComponent.class, new Object[] { control, new Integer(startControlID.intValue() + 1003), new Integer(anchoringHelper4.getX()), new Integer(anchoringHelper4.getY()), new Integer(anchoringHelper4.getWidth()), new Integer(anchoringHelper4.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT, new Integer(startTabIndex.intValue() + 3), m_ccResponsibleHCPForm, instance2, Boolean.FALSE } );
super.addControl(m_ccResponsibleHCPControl);
Menu[] menus2 = m_ccResponsibleHCPForm.getForm().getRegisteredMenus();
for(int x = 0; x < menus2.length; x++)
{
form.registerMenu(menus2[x]);
}
// Label Controls
RuntimeAnchoring anchoringHelper5 = new RuntimeAnchoring(designSize, runtimeSize, 424, 128, 63, 17, ims.framework.enumerations.ControlAnchoring.TOPRIGHT);
super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1004), new Integer(anchoringHelper5.getX()), new Integer(anchoringHelper5.getY()), new Integer(anchoringHelper5.getWidth()), new Integer(anchoringHelper5.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPRIGHT, "Discipline:", new Integer(1), null, new Integer(0)}));
RuntimeAnchoring anchoringHelper6 = new RuntimeAnchoring(designSize, runtimeSize, 424, 8, 232, 17, ims.framework.enumerations.ControlAnchoring.TOPRIGHT);
super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1005), new Integer(anchoringHelper6.getX()), new Integer(anchoringHelper6.getY()), new Integer(anchoringHelper6.getWidth()), new Integer(anchoringHelper6.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPRIGHT, "Include Results where current status is:", new Integer(1), null, new Integer(0)}));
RuntimeAnchoring anchoringHelper7 = new RuntimeAnchoring(designSize, runtimeSize, 640, 58, 52, 17, ims.framework.enumerations.ControlAnchoring.TOPRIGHT);
super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1006), new Integer(anchoringHelper7.getX()), new Integer(anchoringHelper7.getY()), new Integer(anchoringHelper7.getWidth()), new Integer(anchoringHelper7.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPRIGHT, "To Date:", new Integer(1), null, new Integer(0)}));
RuntimeAnchoring anchoringHelper8 = new RuntimeAnchoring(designSize, runtimeSize, 424, 58, 68, 17, ims.framework.enumerations.ControlAnchoring.TOPRIGHT);
super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1007), new Integer(anchoringHelper8.getX()), new Integer(anchoringHelper8.getY()), new Integer(anchoringHelper8.getWidth()), new Integer(anchoringHelper8.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPRIGHT, "From Date:", new Integer(1), null, new Integer(0)}));
RuntimeAnchoring anchoringHelper9 = new RuntimeAnchoring(designSize, runtimeSize, 696, 82, 30, 17, ims.framework.enumerations.ControlAnchoring.TOPRIGHT);
super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1008), new Integer(anchoringHelper9.getX()), new Integer(anchoringHelper9.getY()), new Integer(anchoringHelper9.getWidth()), new Integer(anchoringHelper9.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPRIGHT, "days", new Integer(1), null, new Integer(0)}));
RuntimeAnchoring anchoringHelper10 = new RuntimeAnchoring(designSize, runtimeSize, 424, 82, 154, 17, ims.framework.enumerations.ControlAnchoring.TOPRIGHT);
super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1009), new Integer(anchoringHelper10.getX()), new Integer(anchoringHelper10.getY()), new Integer(anchoringHelper10.getWidth()), new Integer(anchoringHelper10.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPRIGHT, "or reported within the last", new Integer(1), null, new Integer(0)}));
RuntimeAnchoring anchoringHelper11 = new RuntimeAnchoring(designSize, runtimeSize, 8, 202, 95, 17, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1010), new Integer(anchoringHelper11.getX()), new Integer(anchoringHelper11.getY()), new Integer(anchoringHelper11.getWidth()), new Integer(anchoringHelper11.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT, "Reviewing HCP:", new Integer(1), null, new Integer(0)}));
RuntimeAnchoring anchoringHelper12 = new RuntimeAnchoring(designSize, runtimeSize, 8, 178, 85, 17, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1011), new Integer(anchoringHelper12.getX()), new Integer(anchoringHelper12.getY()), new Integer(anchoringHelper12.getWidth()), new Integer(anchoringHelper12.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT, "Ordering HCP:", new Integer(1), null, new Integer(0)}));
RuntimeAnchoring anchoringHelper13 = new RuntimeAnchoring(designSize, runtimeSize, 8, 154, 104, 17, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1012), new Integer(anchoringHelper13.getX()), new Integer(anchoringHelper13.getY()), new Integer(anchoringHelper13.getWidth()), new Integer(anchoringHelper13.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT, "Responsible HCP:", new Integer(1), null, new Integer(0)}));
RuntimeAnchoring anchoringHelper14 = new RuntimeAnchoring(designSize, runtimeSize, 8, 58, 104, 17, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1013), new Integer(anchoringHelper14.getX()), new Integer(anchoringHelper14.getY()), new Integer(anchoringHelper14.getWidth()), new Integer(anchoringHelper14.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT, "Outpatient Dept.:", new Integer(1), null, new Integer(0)}));
RuntimeAnchoring anchoringHelper15 = new RuntimeAnchoring(designSize, runtimeSize, 8, 10, 55, 17, ims.framework.enumerations.ControlAnchoring.TOPLEFT);
super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1014), new Integer(anchoringHelper15.getX()), new Integer(anchoringHelper15.getY()), new Integer(anchoringHelper15.getWidth()), new Integer(anchoringHelper15.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFT, "Hospital:", new Integer(1), null, new Integer(0)}));
RuntimeAnchoring anchoringHelper16 = new RuntimeAnchoring(designSize, runtimeSize, 424, 104, 92, 17, ims.framework.enumerations.ControlAnchoring.TOPRIGHT);
super.addControl(factory.getControl(Label.class, new Object[] { control, new Integer(startControlID.intValue() + 1015), new Integer(anchoringHelper16.getX()), new Integer(anchoringHelper16.getY()), new Integer(anchoringHelper16.getWidth()), new Integer(anchoringHelper16.getHeight()), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPRIGHT, "List Results By:", new Integer(1), null, new Integer(0)}));
// Date Controls
RuntimeAnchoring anchoringHelper17 = new RuntimeAnchoring(designSize, runtimeSize, 696, 56, 112, 20, ims.framework.enumerations.ControlAnchoring.TOPRIGHT);
super.addControl(factory.getControl(DateControl.class, new Object[] { control, new Integer(startControlID.intValue() + 1016), new Integer(anchoringHelper17.getX()), new Integer(anchoringHelper17.getY()), new Integer(anchoringHelper17.getWidth()), new Integer(anchoringHelper17.getHeight()), new Integer(startTabIndex.intValue() + 2012), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPRIGHT,Boolean.TRUE, null, Boolean.TRUE, null, Boolean.TRUE, null}));
RuntimeAnchoring anchoringHelper18 = new RuntimeAnchoring(designSize, runtimeSize, 496, 56, 112, 20, ims.framework.enumerations.ControlAnchoring.TOPRIGHT);
super.addControl(factory.getControl(DateControl.class, new Object[] { control, new Integer(startControlID.intValue() + 1017), new Integer(anchoringHelper18.getX()), new Integer(anchoringHelper18.getY()), new Integer(anchoringHelper18.getWidth()), new Integer(anchoringHelper18.getHeight()), new Integer(startTabIndex.intValue() + 2011), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPRIGHT,Boolean.TRUE, null, Boolean.TRUE, null, Boolean.TRUE, null}));
// ComboBox Controls
RuntimeAnchoring anchoringHelper19 = new RuntimeAnchoring(designSize, runtimeSize, 616, 80, 72, 21, ims.framework.enumerations.ControlAnchoring.TOPRIGHT);
ComboBox m_cmbDaysTemp = (ComboBox)factory.getControl(ComboBox.class, new Object[] { control, new Integer(startControlID.intValue() + 1018), new Integer(anchoringHelper19.getX()), new Integer(anchoringHelper19.getY()), new Integer(anchoringHelper19.getWidth()), new Integer(anchoringHelper19.getHeight()), new Integer(startTabIndex.intValue() + 2013), ControlState.UNKNOWN, ControlState.UNKNOWN,ims.framework.enumerations.ControlAnchoring.TOPRIGHT ,Boolean.TRUE, Boolean.TRUE, SortOrder.NONE, Boolean.FALSE, new Integer(1), null, Boolean.FALSE, new Integer(-1)});
addControl(m_cmbDaysTemp);
cmbDaysComboBox cmbDays = (cmbDaysComboBox)ComboBoxFlyweightFactory.getInstance().createComboBoxBridge(cmbDaysComboBox.class, m_cmbDaysTemp);
super.addComboBox(cmbDays);
RuntimeAnchoring anchoringHelper20 = new RuntimeAnchoring(designSize, runtimeSize, 128, 8, 271, 21, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT);
ComboBox m_cmbHospitalTemp = (ComboBox)factory.getControl(ComboBox.class, new Object[] { control, new Integer(startControlID.intValue() + 1019), new Integer(anchoringHelper20.getX()), new Integer(anchoringHelper20.getY()), new Integer(anchoringHelper20.getWidth()), new Integer(anchoringHelper20.getHeight()), new Integer(startTabIndex.intValue() + 1), ControlState.UNKNOWN, ControlState.UNKNOWN,ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT ,Boolean.TRUE, Boolean.TRUE, SortOrder.NONE, Boolean.FALSE, new Integer(1), null, Boolean.FALSE, new Integer(-1)});
addControl(m_cmbHospitalTemp);
cmbHospitalComboBox cmbHospital = (cmbHospitalComboBox)ComboBoxFlyweightFactory.getInstance().createComboBoxBridge(cmbHospitalComboBox.class, m_cmbHospitalTemp);
super.addComboBox(cmbHospital);
// Query ComboBox Controls
RuntimeAnchoring anchoringHelper21 = new RuntimeAnchoring(designSize, runtimeSize, 128, 200, 272, 21, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT);
ComboBox m_qmbReviewingHCPTemp = (ComboBox)factory.getControl(ComboBox.class, new Object[] { control, new Integer(startControlID.intValue() + 1020), new Integer(anchoringHelper21.getX()), new Integer(anchoringHelper21.getY()), new Integer(anchoringHelper21.getWidth()), new Integer(anchoringHelper21.getHeight()), new Integer(startTabIndex.intValue() + 2005), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT,Boolean.TRUE, Boolean.TRUE, SortOrder.NONE, Boolean.TRUE, new Integer(3), null, Boolean.FALSE, new Integer(-1), Boolean.FALSE});
addControl(m_qmbReviewingHCPTemp);
qmbReviewingHCPComboBox qmbReviewingHCP = (qmbReviewingHCPComboBox)ComboBoxFlyweightFactory.getInstance().createComboBoxBridge(qmbReviewingHCPComboBox.class, m_qmbReviewingHCPTemp);
super.addComboBox(qmbReviewingHCP);
RuntimeAnchoring anchoringHelper22 = new RuntimeAnchoring(designSize, runtimeSize, 128, 56, 271, 21, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT);
ComboBox m_qmbOrderingLocationTemp = (ComboBox)factory.getControl(ComboBox.class, new Object[] { control, new Integer(startControlID.intValue() + 1021), new Integer(anchoringHelper22.getX()), new Integer(anchoringHelper22.getY()), new Integer(anchoringHelper22.getWidth()), new Integer(anchoringHelper22.getHeight()), new Integer(startTabIndex.intValue() + 2), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPLEFTRIGHT,Boolean.TRUE, Boolean.TRUE, SortOrder.NONE, Boolean.TRUE, new Integer(1), null, Boolean.FALSE, new Integer(-1), Boolean.FALSE});
addControl(m_qmbOrderingLocationTemp);
qmbOrderingLocationComboBox qmbOrderingLocation = (qmbOrderingLocationComboBox)ComboBoxFlyweightFactory.getInstance().createComboBoxBridge(qmbOrderingLocationComboBox.class, m_qmbOrderingLocationTemp);
super.addComboBox(qmbOrderingLocation);
// CheckBox Controls
RuntimeAnchoring anchoringHelper23 = new RuntimeAnchoring(designSize, runtimeSize, 424, 208, 240, 16, ims.framework.enumerations.ControlAnchoring.TOPRIGHT);
super.addControl(factory.getControl(CheckBox.class, new Object[] { control, new Integer(startControlID.intValue() + 1022), new Integer(anchoringHelper23.getX()), new Integer(anchoringHelper23.getY()), new Integer(anchoringHelper23.getWidth()), new Integer(anchoringHelper23.getHeight()), new Integer(startTabIndex.intValue() + 2019), ControlState.UNKNOWN, ControlState.UNKNOWN,ims.framework.enumerations.ControlAnchoring.TOPRIGHT ,"Abnormal Pathology Results Only", Boolean.FALSE, null}));
RuntimeAnchoring anchoringHelper24 = new RuntimeAnchoring(designSize, runtimeSize, 576, 32, 64, 16, ims.framework.enumerations.ControlAnchoring.TOPRIGHT);
super.addControl(factory.getControl(CheckBox.class, new Object[] { control, new Integer(startControlID.intValue() + 1023), new Integer(anchoringHelper24.getX()), new Integer(anchoringHelper24.getY()), new Integer(anchoringHelper24.getWidth()), new Integer(anchoringHelper24.getHeight()), new Integer(startTabIndex.intValue() + 2008), ControlState.UNKNOWN, ControlState.UNKNOWN,ims.framework.enumerations.ControlAnchoring.TOPRIGHT ,"Checked", Boolean.FALSE, null}));
RuntimeAnchoring anchoringHelper25 = new RuntimeAnchoring(designSize, runtimeSize, 736, 32, 88, 16, ims.framework.enumerations.ControlAnchoring.TOPRIGHT);
super.addControl(factory.getControl(CheckBox.class, new Object[] { control, new Integer(startControlID.intValue() + 1024), new Integer(anchoringHelper25.getX()), new Integer(anchoringHelper25.getY()), new Integer(anchoringHelper25.getWidth()), new Integer(anchoringHelper25.getHeight()), new Integer(startTabIndex.intValue() + 2010), ControlState.UNKNOWN, ControlState.UNKNOWN,ims.framework.enumerations.ControlAnchoring.TOPRIGHT ,"Completed", Boolean.FALSE, null}));
RuntimeAnchoring anchoringHelper26 = new RuntimeAnchoring(designSize, runtimeSize, 650, 32, 80, 16, ims.framework.enumerations.ControlAnchoring.TOPRIGHT);
super.addControl(factory.getControl(CheckBox.class, new Object[] { control, new Integer(startControlID.intValue() + 1025), new Integer(anchoringHelper26.getX()), new Integer(anchoringHelper26.getY()), new Integer(anchoringHelper26.getWidth()), new Integer(anchoringHelper26.getHeight()), new Integer(startTabIndex.intValue() + 2009), ControlState.UNKNOWN, ControlState.UNKNOWN,ims.framework.enumerations.ControlAnchoring.TOPRIGHT ,"For Review", Boolean.FALSE, null}));
RuntimeAnchoring anchoringHelper27 = new RuntimeAnchoring(designSize, runtimeSize, 523, 32, 56, 16, ims.framework.enumerations.ControlAnchoring.TOPRIGHT);
super.addControl(factory.getControl(CheckBox.class, new Object[] { control, new Integer(startControlID.intValue() + 1026), new Integer(anchoringHelper27.getX()), new Integer(anchoringHelper27.getY()), new Integer(anchoringHelper27.getWidth()), new Integer(anchoringHelper27.getHeight()), new Integer(startTabIndex.intValue() + 2007), ControlState.UNKNOWN, ControlState.UNKNOWN,ims.framework.enumerations.ControlAnchoring.TOPRIGHT ,"Seen", Boolean.FALSE, null}));
RuntimeAnchoring anchoringHelper28 = new RuntimeAnchoring(designSize, runtimeSize, 424, 32, 88, 16, ims.framework.enumerations.ControlAnchoring.TOPRIGHT);
super.addControl(factory.getControl(CheckBox.class, new Object[] { control, new Integer(startControlID.intValue() + 1027), new Integer(anchoringHelper28.getX()), new Integer(anchoringHelper28.getY()), new Integer(anchoringHelper28.getWidth()), new Integer(anchoringHelper28.getHeight()), new Integer(startTabIndex.intValue() + 2006), ControlState.UNKNOWN, ControlState.UNKNOWN,ims.framework.enumerations.ControlAnchoring.TOPRIGHT ,"New/Updated", Boolean.FALSE, null}));
// Grid Controls
RuntimeAnchoring anchoringHelper29 = new RuntimeAnchoring(designSize, runtimeSize, 496, 128, 208, 72, ims.framework.enumerations.ControlAnchoring.TOPRIGHT);
Grid m_grdDisciplinesTemp = (Grid)factory.getControl(Grid.class, new Object[] { control, new Integer(startControlID.intValue() + 1028), new Integer(anchoringHelper29.getX()), new Integer(anchoringHelper29.getY()), new Integer(anchoringHelper29.getWidth()), new Integer(anchoringHelper29.getHeight()), new Integer(startTabIndex.intValue() + 2016), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPRIGHT,Boolean.TRUE, Boolean.FALSE, new Integer(24), Boolean.TRUE, null, Boolean.FALSE, Boolean.FALSE, new Integer(0), null, Boolean.FALSE, Boolean.TRUE});
addControl(m_grdDisciplinesTemp);
grdDisciplinesGrid grdDisciplines = (grdDisciplinesGrid)GridFlyweightFactory.getInstance().createGridBridge(grdDisciplinesGrid.class, m_grdDisciplinesTemp);
grdDisciplines.addStringColumn("Name", 0, 0, -1, true, false, 0, 0, true, ims.framework.enumerations.CharacterCasing.NORMAL);
super.addGrid(grdDisciplines);
// Image Buttons Controls
RuntimeAnchoring anchoringHelper30 = new RuntimeAnchoring(designSize, runtimeSize, 712, 152, 20, 20, ims.framework.enumerations.ControlAnchoring.TOPRIGHT);
super.addControl(factory.getControl(ImageButton.class, new Object[] { control, new Integer(startControlID.intValue() + 1029), new Integer(anchoringHelper30.getX()), new Integer(anchoringHelper30.getY()), new Integer(anchoringHelper30.getWidth()), new Integer(anchoringHelper30.getHeight()), new Integer(startTabIndex.intValue() + 2018), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPRIGHT, this.getImages().Core.Minus, this.getImages().Core.MinusDisabled, "Remove Discipline", Boolean.FALSE, Boolean.FALSE, Boolean.TRUE, Boolean.FALSE, null}));
RuntimeAnchoring anchoringHelper31 = new RuntimeAnchoring(designSize, runtimeSize, 712, 128, 20, 20, ims.framework.enumerations.ControlAnchoring.TOPRIGHT);
super.addControl(factory.getControl(ImageButton.class, new Object[] { control, new Integer(startControlID.intValue() + 1030), new Integer(anchoringHelper31.getX()), new Integer(anchoringHelper31.getY()), new Integer(anchoringHelper31.getWidth()), new Integer(anchoringHelper31.getHeight()), new Integer(startTabIndex.intValue() + 2017), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPRIGHT, this.getImages().Core.Plus, this.getImages().Core.PlusDisabled, "Add Discipline", Boolean.FALSE, Boolean.FALSE, Boolean.TRUE, Boolean.FALSE, null}));
// RadioButton Controls
RadioButton tmpListResultsBy = (RadioButton)factory.getControl(RadioButton.class, new Object[] { control, new Integer(startControlID.intValue() + 1031), new Integer(0), ControlState.UNKNOWN, ControlState.UNKNOWN, ims.framework.enumerations.ControlAnchoring.TOPRIGHT,Boolean.FALSE});
super.addControl(tmpListResultsBy);
ListResultsByRadioButton ListResultsBy = (ListResultsByRadioButton)RadioButtonBridgeFlyweightFactory.getInstance().createRadioButtonBridge(ListResultsByRadioButton.class, tmpListResultsBy);
ListResultsBy.setContext(startTabIndex, designSize, runtimeSize);
super.addRadioButton(ListResultsBy);
}
public Forms getForms()
{
return (Forms)super.getFormReferences();
}
public Images getImages()
{
return (Images)super.getImageReferences();
}
public ims.core.forms.mosquery.IComponent ccOrderingHCP()
{
return (ims.core.forms.mosquery.IComponent)((ims.framework.cn.controls.CustomComponent)super.getControl(0)).getLogic();
}
public void setccOrderingHCPValueChangedEvent(ims.framework.delegates.ValueChanged delegate)
{
((CustomComponent)super.getControl(0)).setValueChangedEvent(delegate);
}
public void setccOrderingHCPVisible(boolean value)
{
((ims.framework.Control)super.getControl(0)).setVisible(value);
}
public boolean isccOrderingHCPVisible()
{
return ((ims.framework.Control)super.getControl(0)).isVisible();
}
public void setccOrderingHCPEnabled(boolean value)
{
((ims.framework.Control)super.getControl(0)).setEnabled(value);
}
public boolean isccOrderingHCPEnabled()
{
return ((ims.framework.Control)super.getControl(0)).isEnabled();
}
public ims.core.forms.mosquery.IComponent ccResponsibleHCP()
{
return (ims.core.forms.mosquery.IComponent)((ims.framework.cn.controls.CustomComponent)super.getControl(1)).getLogic();
}
public void setccResponsibleHCPValueChangedEvent(ims.framework.delegates.ValueChanged delegate)
{
((CustomComponent)super.getControl(1)).setValueChangedEvent(delegate);
}
public void setccResponsibleHCPVisible(boolean value)
{
((ims.framework.Control)super.getControl(1)).setVisible(value);
}
public boolean isccResponsibleHCPVisible()
{
return ((ims.framework.Control)super.getControl(1)).isVisible();
}
public void setccResponsibleHCPEnabled(boolean value)
{
((ims.framework.Control)super.getControl(1)).setEnabled(value);
}
public boolean isccResponsibleHCPEnabled()
{
return ((ims.framework.Control)super.getControl(1)).isEnabled();
}
public Label lblReviewingHCP()
{
return (Label)super.getControl(8);
}
public Label lblOrderingHCP()
{
return (Label)super.getControl(9);
}
public Label lblResponsibleHCP()
{
return (Label)super.getControl(10);
}
public DateControl dteTo()
{
return (DateControl)super.getControl(14);
}
public DateControl dteFrom()
{
return (DateControl)super.getControl(15);
}
public cmbDaysComboBox cmbDays()
{
return (cmbDaysComboBox)super.getComboBox(0);
}
public cmbHospitalComboBox cmbHospital()
{
return (cmbHospitalComboBox)super.getComboBox(1);
}
public qmbReviewingHCPComboBox qmbReviewingHCP()
{
return (qmbReviewingHCPComboBox)super.getComboBox(2);
}
public qmbOrderingLocationComboBox qmbOrderingLocation()
{
return (qmbOrderingLocationComboBox)super.getComboBox(3);
}
public CheckBox chkAbnormalPathologyResultsOnly()
{
return (CheckBox)super.getControl(20);
}
public CheckBox chkChecked()
{
return (CheckBox)super.getControl(21);
}
public CheckBox chkCompleted()
{
return (CheckBox)super.getControl(22);
}
public CheckBox chkReview()
{
return (CheckBox)super.getControl(23);
}
public CheckBox chkSeenChecked()
{
return (CheckBox)super.getControl(24);
}
public CheckBox chkNew()
{
return (CheckBox)super.getControl(25);
}
public grdDisciplinesGrid grdDisciplines()
{
return (grdDisciplinesGrid)super.getGrid(0);
}
public ImageButton imbRemoveDiscipline()
{
return (ImageButton)super.getControl(27);
}
public ImageButton imbAddDiscipline()
{
return (ImageButton)super.getControl(28);
}
public ListResultsByRadioButton ListResultsBy()
{
return (ListResultsByRadioButton)super.getRadioButton(0);
}
public static class Forms implements java.io.Serializable
{
private static final long serialVersionUID = 1L;
protected final class LocalFormName extends FormName
{
private static final long serialVersionUID = 1L;
private LocalFormName(int name)
{
super(name);
}
}
private Forms()
{
OCRR = new OCRRForms();
}
public final class OCRRForms implements java.io.Serializable
{
private static final long serialVersionUID = 1L;
private OCRRForms()
{
SearchDisciplines = new LocalFormName(116177);
}
public final FormName SearchDisciplines;
}
public OCRRForms OCRR;
}
public static class Images implements java.io.Serializable
{
private static final long serialVersionUID = 1L;
private final class ImageHelper extends ims.framework.utils.ImagePath
{
private static final long serialVersionUID = 1L;
private ImageHelper(int id, String path, Integer width, Integer height)
{
super(id, path, width, height);
}
}
private Images()
{
Core = new CoreImages();
}
public final class CoreImages implements java.io.Serializable
{
private static final long serialVersionUID = 1L;
private CoreImages()
{
Plus = new ImageHelper(102116, "Images/Core/plus.gif", new Integer(16), new Integer(16));
PlusDisabled = new ImageHelper(102117, "Images/Core/plus_disabled.gif", new Integer(16), new Integer(16));
Minus = new ImageHelper(102118, "Images/Core/minus.gif", new Integer(16), new Integer(16));
MinusDisabled = new ImageHelper(102119, "Images/Core/minus_disabled.gif", new Integer(16), new Integer(16));
}
public final ims.framework.utils.Image Plus;
public final ims.framework.utils.Image PlusDisabled;
public final ims.framework.utils.Image Minus;
public final ims.framework.utils.Image MinusDisabled;
}
public final CoreImages Core;
}
public GlobalContext getGlobalContext()
{
return this.globalContext;
}
public static class GlobalContextBridge extends ContextBridge
{
private static final long serialVersionUID = 1L;
}
public LocalContext getLocalContext()
{
return (LocalContext)super.getLocalCtx();
}
public class LocalContext extends ContextBridge
{
private static final long serialVersionUID = 1L;
public LocalContext(Context context, ims.framework.FormInfo formInfo, String componentIdentifier)
{
super.setContext(context);
String prefix = formInfo.getLocalVariablesPrefix();
cxl_CurrentHCP = new ims.framework.ContextVariable("CurrentHCP", prefix + "_lv_OCRR.NewResultsOutpatientTabComponent.__internal_x_context__CurrentHCP_" + componentIdentifier + "");
}
public boolean getCurrentHCPIsNotNull()
{
return !cxl_CurrentHCP.getValueIsNull(context);
}
public ims.core.vo.HcpLiteVo getCurrentHCP()
{
return (ims.core.vo.HcpLiteVo)cxl_CurrentHCP.getValue(context);
}
public void setCurrentHCP(ims.core.vo.HcpLiteVo value)
{
cxl_CurrentHCP.setValue(context, value);
}
private ims.framework.ContextVariable cxl_CurrentHCP = null;
}
private IReportField[] getFormReportFields()
{
if(this.context == null)
return null;
if(this.reportFields == null)
this.reportFields = new ReportFields(this.context, this.formInfo, this.componentIdentifier).getReportFields();
return this.reportFields;
}
private class ReportFields
{
public ReportFields(Context context, ims.framework.FormInfo formInfo, String componentIdentifier)
{
this.context = context;
this.formInfo = formInfo;
this.componentIdentifier = componentIdentifier;
}
public IReportField[] getReportFields()
{
String prefix = formInfo.getLocalVariablesPrefix();
IReportField[] fields = new IReportField[76];
fields[0] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-ID", "ID_Patient");
fields[1] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-SEX", "Sex");
fields[2] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-DOB", "Dob");
fields[3] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-DOD", "Dod");
fields[4] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-RELIGION", "Religion");
fields[5] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-ISACTIVE", "IsActive");
fields[6] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-ETHNICORIGIN", "EthnicOrigin");
fields[7] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-MARITALSTATUS", "MaritalStatus");
fields[8] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-SCN", "SCN");
fields[9] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-SOURCEOFINFORMATION", "SourceOfInformation");
fields[10] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-TIMEOFDEATH", "TimeOfDeath");
fields[11] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-ISQUICKREGISTRATIONPATIENT", "IsQuickRegistrationPatient");
fields[12] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientShort", "BO-1001100000-CURRENTRESPONSIBLECONSULTANT", "CurrentResponsibleConsultant");
fields[13] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientFilter", "BO-1001100000-ID", "ID_Patient");
fields[14] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientFilter", "BO-1001100000-SEX", "Sex");
fields[15] = new ims.framework.ReportField(this.context, "_cvp_Core.PatientFilter", "BO-1001100000-DOB", "Dob");
fields[16] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentClinicalContact", "BO-1004100003-ID", "ID_ClinicalContact");
fields[17] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentClinicalContact", "BO-1004100003-SPECIALTY", "Specialty");
fields[18] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentClinicalContact", "BO-1004100003-CONTACTTYPE", "ContactType");
fields[19] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentClinicalContact", "BO-1004100003-STARTDATETIME", "StartDateTime");
fields[20] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentClinicalContact", "BO-1004100003-ENDDATETIME", "EndDateTime");
fields[21] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentClinicalContact", "BO-1004100003-CARECONTEXT", "CareContext");
fields[22] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentClinicalContact", "BO-1004100003-ISCLINICALNOTECREATED", "IsClinicalNoteCreated");
fields[23] = new ims.framework.ReportField(this.context, "_cvp_Core.RecordingHCP", "BO-1006100000-ID", "ID_Hcp");
fields[24] = new ims.framework.ReportField(this.context, "_cvp_Core.RecordingHCP", "BO-1006100000-HCPTYPE", "HcpType");
fields[25] = new ims.framework.ReportField(this.context, "_cvp_Core.RecordingHCP", "BO-1006100000-ISACTIVE", "IsActive");
fields[26] = new ims.framework.ReportField(this.context, "_cvp_Core.RecordingHCP", "BO-1006100000-ISHCPARESPONSIBLEHCP", "IsHCPaResponsibleHCP");
fields[27] = new ims.framework.ReportField(this.context, "_cvp_Core.RecordingHCP", "BO-1006100000-ISARESPONSIBLEEDCLINICIAN", "IsAResponsibleEDClinician");
fields[28] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentCareContext", "BO-1004100019-ID", "ID_CareContext");
fields[29] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentCareContext", "BO-1004100019-CONTEXT", "Context");
fields[30] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentCareContext", "BO-1004100019-ORDERINGHOSPITAL", "OrderingHospital");
fields[31] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentCareContext", "BO-1004100019-ESTIMATEDDISCHARGEDATE", "EstimatedDischargeDate");
fields[32] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentCareContext", "BO-1004100019-STARTDATETIME", "StartDateTime");
fields[33] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentCareContext", "BO-1004100019-ENDDATETIME", "EndDateTime");
fields[34] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentCareContext", "BO-1004100019-LOCATIONTYPE", "LocationType");
fields[35] = new ims.framework.ReportField(this.context, "_cvp_Core.CurrentCareContext", "BO-1004100019-RESPONSIBLEHCP", "ResponsibleHCP");
fields[36] = new ims.framework.ReportField(this.context, "_cvp_Core.EpisodeofCareShort", "BO-1004100018-ID", "ID_EpisodeOfCare");
fields[37] = new ims.framework.ReportField(this.context, "_cvp_Core.EpisodeofCareShort", "BO-1004100018-CARESPELL", "CareSpell");
fields[38] = new ims.framework.ReportField(this.context, "_cvp_Core.EpisodeofCareShort", "BO-1004100018-SPECIALTY", "Specialty");
fields[39] = new ims.framework.ReportField(this.context, "_cvp_Core.EpisodeofCareShort", "BO-1004100018-RELATIONSHIP", "Relationship");
fields[40] = new ims.framework.ReportField(this.context, "_cvp_Core.EpisodeofCareShort", "BO-1004100018-STARTDATE", "StartDate");
fields[41] = new ims.framework.ReportField(this.context, "_cvp_Core.EpisodeofCareShort", "BO-1004100018-ENDDATE", "EndDate");
fields[42] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-ID", "ID_ClinicalNotes");
fields[43] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-CLINICALNOTE", "ClinicalNote");
fields[44] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-NOTETYPE", "NoteType");
fields[45] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-DISCIPLINE", "Discipline");
fields[46] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-CLINICALCONTACT", "ClinicalContact");
fields[47] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-ISDERIVEDNOTE", "IsDerivedNote");
fields[48] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-FORREVIEW", "ForReview");
fields[49] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-FORREVIEWDISCIPLINE", "ForReviewDiscipline");
fields[50] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-REVIEWINGDATETIME", "ReviewingDateTime");
fields[51] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-ISCORRECTED", "IsCorrected");
fields[52] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-ISTRANSCRIBED", "IsTranscribed");
fields[53] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-SOURCEOFNOTE", "SourceOfNote");
fields[54] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-RECORDINGDATETIME", "RecordingDateTime");
fields[55] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-INHOSPITALREPORT", "InHospitalReport");
fields[56] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-CARECONTEXT", "CareContext");
fields[57] = new ims.framework.ReportField(this.context, "_cvp_Clinical.CurrentClinicalNote", "BO-1011100000-NOTECLASSIFICATION", "NoteClassification");
fields[58] = new ims.framework.ReportField(this.context, "_cvp_STHK.AvailableBedsListFilter", "BO-1014100009-ID", "ID_BedSpaceState");
fields[59] = new ims.framework.ReportField(this.context, "_cvp_STHK.PendingEmergencyAdmissionsFilter", "BO-1014100011-ID", "ID_PendingEmergencyAdmission");
fields[60] = new ims.framework.ReportField(this.context, "_cvp_STHK.PendingEmergencyAdmissionsFilter", "BO-1014100011-ADMISSIONSTATUS", "AdmissionStatus");
fields[61] = new ims.framework.ReportField(this.context, "_cvp_STHK.PendingDischargesListFilter", "BO-1014100000-ID", "ID_InpatientEpisode");
fields[62] = new ims.framework.ReportField(this.context, "_cvp_STHK.PendingDischargesListFilter", "BO-1014100000-ESTDISCHARGEDATE", "EstDischargeDate");
fields[63] = new ims.framework.ReportField(this.context, "_cvp_Clinical.ExtendedClinicalNotesListFilter", "BO-1011100000-ID", "ID_ClinicalNotes");
fields[64] = new ims.framework.ReportField(this.context, "_cvp_Clinical.ExtendedClinicalNotesListFilter", "BO-1011100000-FORREVIEW", "ForReview");
fields[65] = new ims.framework.ReportField(this.context, "_cvp_Clinical.ExtendedClinicalNotesListFilter", "BO-1011100000-FORREVIEWDISCIPLINE", "ForReviewDiscipline");
fields[66] = new ims.framework.ReportField(this.context, "_cvp_Clinical.ExtendedClinicalNotesListFilter", "BO-1011100000-NOTECLASSIFICATION", "NoteClassification");
fields[67] = new ims.framework.ReportField(this.context, "_cvp_Clinical.ExtendedClinicalNotesListFilter", "BO-1011100000-CARECONTEXT", "CareContext");
fields[68] = new ims.framework.ReportField(this.context, "_cvp_Core.PasEvent", "BO-1014100003-ID", "ID_PASEvent");
fields[69] = new ims.framework.ReportField(this.context, "_cvp_Correspondence.CorrespondenceDetails", "BO-1052100001-ID", "ID_CorrespondenceDetails");
fields[70] = new ims.framework.ReportField(this.context, "_cvp_RefMan.CatsReferral", "BO-1004100035-ID", "ID_CatsReferral");
fields[71] = new ims.framework.ReportField(this.context, prefix + "_lv_OCRR.NewResultsOutpatientTabComponent.__internal_x_context__CurrentHCP_" + componentIdentifier, "BO-1006100000-ID", "ID_Hcp");
fields[72] = new ims.framework.ReportField(this.context, prefix + "_lv_OCRR.NewResultsOutpatientTabComponent.__internal_x_context__CurrentHCP_" + componentIdentifier, "BO-1006100000-HCPTYPE", "HcpType");
fields[73] = new ims.framework.ReportField(this.context, prefix + "_lv_OCRR.NewResultsOutpatientTabComponent.__internal_x_context__CurrentHCP_" + componentIdentifier, "BO-1006100000-ISACTIVE", "IsActive");
fields[74] = new ims.framework.ReportField(this.context, prefix + "_lv_OCRR.NewResultsOutpatientTabComponent.__internal_x_context__CurrentHCP_" + componentIdentifier, "BO-1006100000-ISHCPARESPONSIBLEHCP", "IsHCPaResponsibleHCP");
fields[75] = new ims.framework.ReportField(this.context, prefix + "_lv_OCRR.NewResultsOutpatientTabComponent.__internal_x_context__CurrentHCP_" + componentIdentifier, "BO-1006100000-ISARESPONSIBLEEDCLINICIAN", "IsAResponsibleEDClinician");
return fields;
}
protected Context context = null;
protected ims.framework.FormInfo formInfo;
protected String componentIdentifier;
}
public String getUniqueIdentifier()
{
return formInfo.getLocalVariablesPrefix() + formInfo.getNamespaceName() + formInfo.getFormName() + formInfo.getFormId() + "_" + this.componentIdentifier;
}
private Context context = null;
private ims.framework.FormInfo formInfo = null;
private String componentIdentifier;
private GlobalContext globalContext = null;
private IReportField[] reportFields = null;
}
| agpl-3.0 |
wesley1001/orbeon-forms | src/main/java/org/orbeon/oxf/xforms/function/xxforms/XXFormsFormURLEncode.java | 1897 | /**
* Copyright (C) 2009 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.function.xxforms;
import org.dom4j.Document;
import org.orbeon.oxf.xforms.function.XFormsFunction;
import org.orbeon.oxf.xforms.submission.XFormsSubmissionUtils;
import org.orbeon.oxf.xml.TransformerUtils;
import org.orbeon.saxon.expr.Expression;
import org.orbeon.saxon.expr.XPathContext;
import org.orbeon.saxon.om.Item;
import org.orbeon.saxon.om.NodeInfo;
import org.orbeon.saxon.trans.XPathException;
import org.orbeon.saxon.value.StringValue;
/**
* xxf:form-urlencode() performs application/x-www-form-urlencoded encoding on an XML document.
*/
public class XXFormsFormURLEncode extends XFormsFunction {
public Item evaluateItem(XPathContext xpathContext) throws XPathException {
// Get item
final Expression itemExpression = argument[0];
final Item item = itemExpression.evaluateItem(xpathContext);
// Make sure it is a NodeInfo
if (!(item instanceof NodeInfo)) {
return null;
}
// Convert and return
final NodeInfo nodeInfo = (NodeInfo) item;
final Document document = TransformerUtils.tinyTreeToDom4j(nodeInfo);
return new StringValue(XFormsSubmissionUtils.createWwwFormUrlEncoded(document, "&"));
}
}
| lgpl-2.1 |
paulklinkenberg/Lucee4 | lucee-java/lucee-core/src/lucee/runtime/functions/dateTime/CreateDate.java | 1814 | /**
*
* Copyright (c) 2014, the Railo Company Ltd. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library. If not, see <http://www.gnu.org/licenses/>.
*
**/
/**
* Implements the CFML Function createdate
*/
package lucee.runtime.functions.dateTime;
import java.util.TimeZone;
import lucee.commons.date.DateTimeUtil;
import lucee.commons.date.TimeZoneUtil;
import lucee.runtime.PageContext;
import lucee.runtime.exp.ExpressionException;
import lucee.runtime.ext.function.Function;
import lucee.runtime.type.dt.DateTime;
public final class CreateDate implements Function {
public static DateTime call(PageContext pc , double year, double month, double day) throws ExpressionException {
return _call(pc,year,month,day,pc.getTimeZone());
}
public static DateTime call(PageContext pc , double year, double month, double day,String strTimezone) throws ExpressionException {
return _call(pc,year,month,day,strTimezone==null?pc.getTimeZone():TimeZoneUtil.toTimeZone(strTimezone));
}
private static DateTime _call(PageContext pc , double year, double month, double day,TimeZone tz) throws ExpressionException {
return DateTimeUtil.getInstance().toDateTime(tz,(int)year,(int)month,(int)day, 0, 0, 0,0);
}
} | lgpl-2.1 |
smsunarto/loklak_server | src/org/loklak/server/ClientIdentity.java | 2320 | /**
* Identity
* Copyright 24.05.2016 by Michael Peter Christen, @0rb1t3r
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program in the file lgpl21.txt
* If not, see <http://www.gnu.org/licenses/>.
*/
package org.loklak.server;
import org.json.JSONObject;
/**
* an identity is only a string which contains details sufficient enough to
* identify a user and to send data to that user
*/
public class ClientIdentity extends Client {
public enum Type {
email(true), // non-anonymous identity
host(false); // anonymous identity users which do not authentify; they are identified by their host name
private final boolean persistent;
Type(final boolean persistent) {
this.persistent = persistent;
}
public boolean isPersistent() {
return this.persistent;
}
}
private final boolean persistent;
public ClientIdentity(String rawIdString) {
super(rawIdString);
this.persistent = Type.valueOf(super.getKey()).isPersistent();
}
public ClientIdentity(Type type, String untypedId) {
super(type.name(), untypedId);
this.persistent = type.isPersistent();
}
public boolean isPersistent() {
return this.persistent;
}
public boolean isEmail() {
return this.getKey().equals(Type.email.name());
}
public boolean isAnonymous() {
return this.getKey().equals(Type.host.name());
}
public Type getType() {
return Type.valueOf(this.getKey());
}
public JSONObject toJSON() {
JSONObject json = super.toJSON();
json.put("anonymous", this.isAnonymous());
return json;
}
}
| lgpl-2.1 |
zwobit/exist | src/org/exist/util/XMLFilenameFilter.java | 884 |
package org.exist.util;
import java.io.File;
import java.io.FilenameFilter;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.function.Predicate;
public class XMLFilenameFilter implements FilenameFilter {
public XMLFilenameFilter() {
}
public boolean accept(File dir, String name) {
final MimeTable mimetab = MimeTable.getInstance();
final MimeType mime = mimetab.getContentTypeFor(name);
return mime != null && mime.isXMLType();
}
public static Predicate<Path> asPredicate() {
final MimeTable mimetab = MimeTable.getInstance();
return path -> {
if(!Files.isDirectory(path)) {
final MimeType mime = mimetab.getContentTypeFor(FileUtils.fileName(path));
return mime != null && mime.isXMLType();
}
return false;
};
}
}
| lgpl-2.1 |
elsiklab/intermine | intermine/objectstore/main/src/org/intermine/util/CacheHoldingArrayList.java | 1940 | package org.intermine.util;
/*
* Copyright (C) 2002-2017 FlyMine
*
* This code may be freely distributed and modified under the
* terms of the GNU Lesser General Public Licence. This should
* be distributed with the code. See the LICENSE file for more
* information or http://www.gnu.org/copyleft/lesser.html.
*
*/
import java.util.ArrayList;
import java.util.Collection;
/**
* This is an extension of the the ArrayList class, designed to be used by systems that return
* Lists of data, with extra data that should not be flushed out of a cache until the List is
* garbage collected.
*
* This is used for example in the ObjectStoreItemPathFollowingImpl, when it generates a batch.
* The objectstore fetches additional useful objects, and stores them in the holder of a
* CacheHoldingArrayList as well as a cache. The holder them prevents the extra useful objects from
* being flushed out of the cache until the DataTranslator has finished with the batch.
*
* @author Matthew Wakeling
* @param <E> The element type
*/
public class CacheHoldingArrayList<E> extends ArrayList<E>
{
private ArrayList<Object> holder = new ArrayList<Object>();
/**
* Empty constructor
*/
public CacheHoldingArrayList() {
super();
}
/**
* Constructs a new instance from another Collection.
*
* @param col a Collection
*/
public CacheHoldingArrayList(Collection<E> col) {
super(col);
}
/**
* Constructs a new instance with the given initial capacity.
*
* @param capacity the initial capacity
*/
public CacheHoldingArrayList(int capacity) {
super(capacity);
}
/**
* Adds an object to the holder. This prevents the given object from being garbage collected
* until this List is garbage-collected.
*
* @param o any Object
*/
public void addToHolder(Object o) {
holder.add(o);
}
}
| lgpl-2.1 |
MatthiasMann/EnderIO | src/main/java/crazypants/enderio/item/darksteel/ItemGliderWing.java | 2753 | package crazypants.enderio.item.darksteel;
import java.util.List;
import net.minecraft.client.renderer.texture.IIconRegister;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.util.IIcon;
import net.minecraft.util.MathHelper;
import com.enderio.core.api.client.gui.IResourceTooltipProvider;
import cpw.mods.fml.common.registry.GameRegistry;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import crazypants.enderio.EnderIOTab;
import crazypants.enderio.ModObject;
import crazypants.enderio.power.BasicCapacitor;
import crazypants.enderio.power.Capacitors;
public class ItemGliderWing extends Item implements IResourceTooltipProvider {
private static final BasicCapacitor CAP = new BasicCapacitor();
public static ItemGliderWing create() {
ItemGliderWing result = new ItemGliderWing();
result.init();
return result;
}
private IIcon wingsIcon;
protected ItemGliderWing() {
setCreativeTab(EnderIOTab.tabEnderIO);
setUnlocalizedName(ModObject.itemGliderWing.unlocalisedName);
setHasSubtypes(true);
setMaxDamage(0);
setMaxStackSize(64);
}
protected void init() {
GameRegistry.registerItem(this, ModObject.itemGliderWing.unlocalisedName);
}
@Override
@SideOnly(Side.CLIENT)
public IIcon getIconFromDamage(int damage) {
damage = MathHelper.clamp_int(damage, 0, 1);
if(damage == 0) {
return itemIcon;
}
return wingsIcon;
}
@Override
@SideOnly(Side.CLIENT)
public void registerIcons(IIconRegister register) {
itemIcon = register.registerIcon("enderio:itemGliderWing");
wingsIcon = register.registerIcon("enderio:itemGliderWings");
}
@Override
public String getUnlocalizedName(ItemStack par1ItemStack) {
int i = MathHelper.clamp_int(par1ItemStack.getItemDamage(), 0, Capacitors.values().length - 1);
if(i == 0) {
return super.getUnlocalizedName();
}
return super.getUnlocalizedName() + "s";
}
@Override
@SuppressWarnings({ "rawtypes", "unchecked" })
@SideOnly(Side.CLIENT)
public void getSubItems(Item par1, CreativeTabs par2CreativeTabs, List par3List) {
for (int j = 0; j < 2; ++j) {
par3List.add(new ItemStack(par1, 1, j));
}
}
@Override
public String getUnlocalizedNameForTooltip(ItemStack itemStack) {
return getUnlocalizedName(itemStack);
}
// @Override
// @SideOnly(Side.CLIENT)
// public void addInformation(ItemStack par1ItemStack, EntityPlayer par2EntityPlayer, List par3List, boolean par4) {
// if(par1ItemStack != null && par1ItemStack.getItemDamage() > 0) {
// par3List.add(EnderIO.lang.localize("machine.tooltip.upgrade"));
// }
//
// }
}
| unlicense |
steventrigg/AlarmClock | AlarmClock/src/com/trigg/alarmclock/AlarmDBHelper.java | 4626 | package com.trigg.alarmclock;
import java.util.ArrayList;
import java.util.List;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.net.Uri;
import com.trigg.alarmclock.AlarmContract.Alarm;
public class AlarmDBHelper extends SQLiteOpenHelper {
public static final int DATABASE_VERSION = 1;
public static final String DATABASE_NAME = "alarmclock.db";
private static final String SQL_CREATE_ALARM = "CREATE TABLE " + Alarm.TABLE_NAME + " (" +
Alarm._ID + " INTEGER PRIMARY KEY AUTOINCREMENT," +
Alarm.COLUMN_NAME_ALARM_NAME + " TEXT," +
Alarm.COLUMN_NAME_ALARM_TIME_HOUR + " INTEGER," +
Alarm.COLUMN_NAME_ALARM_TIME_MINUTE + " INTEGER," +
Alarm.COLUMN_NAME_ALARM_REPEAT_DAYS + " TEXT," +
Alarm.COLUMN_NAME_ALARM_REPEAT_WEEKLY + " BOOLEAN," +
Alarm.COLUMN_NAME_ALARM_TONE + " TEXT," +
Alarm.COLUMN_NAME_ALARM_ENABLED + " BOOLEAN" +
" )";
private static final String SQL_DELETE_ALARM =
"DROP TABLE IF EXISTS " + Alarm.TABLE_NAME;
public AlarmDBHelper(Context context) {
super(context, DATABASE_NAME, null, DATABASE_VERSION);
}
@Override
public void onCreate(SQLiteDatabase db) {
db.execSQL(SQL_CREATE_ALARM);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
db.execSQL(SQL_DELETE_ALARM);
onCreate(db);
}
private AlarmModel populateModel(Cursor c) {
AlarmModel model = new AlarmModel();
model.id = c.getLong(c.getColumnIndex(Alarm._ID));
model.name = c.getString(c.getColumnIndex(Alarm.COLUMN_NAME_ALARM_NAME));
model.timeHour = c.getInt(c.getColumnIndex(Alarm.COLUMN_NAME_ALARM_TIME_HOUR));
model.timeMinute = c.getInt(c.getColumnIndex(Alarm.COLUMN_NAME_ALARM_TIME_MINUTE));
model.repeatWeekly = c.getInt(c.getColumnIndex(Alarm.COLUMN_NAME_ALARM_REPEAT_WEEKLY)) == 0 ? false : true;
model.alarmTone = c.getString(c.getColumnIndex(Alarm.COLUMN_NAME_ALARM_TONE)) != "" ? Uri.parse(c.getString(c.getColumnIndex(Alarm.COLUMN_NAME_ALARM_TONE))) : null;
model.isEnabled = c.getInt(c.getColumnIndex(Alarm.COLUMN_NAME_ALARM_ENABLED)) == 0 ? false : true;
String[] repeatingDays = c.getString(c.getColumnIndex(Alarm.COLUMN_NAME_ALARM_REPEAT_DAYS)).split(",");
for (int i = 0; i < repeatingDays.length; ++i) {
model.setRepeatingDay(i, repeatingDays[i].equals("false") ? false : true);
}
return model;
}
private ContentValues populateContent(AlarmModel model) {
ContentValues values = new ContentValues();
values.put(Alarm.COLUMN_NAME_ALARM_NAME, model.name);
values.put(Alarm.COLUMN_NAME_ALARM_TIME_HOUR, model.timeHour);
values.put(Alarm.COLUMN_NAME_ALARM_TIME_MINUTE, model.timeMinute);
values.put(Alarm.COLUMN_NAME_ALARM_REPEAT_WEEKLY, model.repeatWeekly);
values.put(Alarm.COLUMN_NAME_ALARM_TONE, model.alarmTone != null ? model.alarmTone.toString() : "");
values.put(Alarm.COLUMN_NAME_ALARM_ENABLED, model.isEnabled);
String repeatingDays = "";
for (int i = 0; i < 7; ++i) {
repeatingDays += model.getRepeatingDay(i) + ",";
}
values.put(Alarm.COLUMN_NAME_ALARM_REPEAT_DAYS, repeatingDays);
return values;
}
public long createAlarm(AlarmModel model) {
ContentValues values = populateContent(model);
return getWritableDatabase().insert(Alarm.TABLE_NAME, null, values);
}
public long updateAlarm(AlarmModel model) {
ContentValues values = populateContent(model);
return getWritableDatabase().update(Alarm.TABLE_NAME, values, Alarm._ID + " = ?", new String[] { String.valueOf(model.id) });
}
public AlarmModel getAlarm(long id) {
SQLiteDatabase db = this.getReadableDatabase();
String select = "SELECT * FROM " + Alarm.TABLE_NAME + " WHERE " + Alarm._ID + " = " + id;
Cursor c = db.rawQuery(select, null);
if (c.moveToNext()) {
return populateModel(c);
}
return null;
}
public List<AlarmModel> getAlarms() {
SQLiteDatabase db = this.getReadableDatabase();
String select = "SELECT * FROM " + Alarm.TABLE_NAME;
Cursor c = db.rawQuery(select, null);
List<AlarmModel> alarmList = new ArrayList<AlarmModel>();
while (c.moveToNext()) {
alarmList.add(populateModel(c));
}
if (!alarmList.isEmpty()) {
return alarmList;
}
return null;
}
public int deleteAlarm(long id) {
return getWritableDatabase().delete(Alarm.TABLE_NAME, Alarm._ID + " = ?", new String[] { String.valueOf(id) });
}
}
| unlicense |
bulldog2011/nano | sample/webservice/eBayDemoApp/src/com/ebay/trading/api/StoreCustomListingHeaderType.java | 1020 | // Generated by xsd compiler for android/java
// DO NOT CHANGE!
package com.ebay.trading.api;
import java.io.Serializable;
import com.leansoft.nano.annotation.*;
import java.util.List;
/**
*
* Configuration of a Store custom listing header.
*
*/
public class StoreCustomListingHeaderType implements Serializable {
private static final long serialVersionUID = -1L;
@Element(name = "DisplayType")
@Order(value=0)
public StoreCustomListingHeaderDisplayCodeType displayType;
@Element(name = "Logo")
@Order(value=1)
public Boolean logo;
@Element(name = "SearchBox")
@Order(value=2)
public Boolean searchBox;
@Element(name = "LinkToInclude")
@Order(value=3)
public List<StoreCustomListingHeaderLinkType> linkToInclude;
@Element(name = "AddToFavoriteStores")
@Order(value=4)
public Boolean addToFavoriteStores;
@Element(name = "SignUpForStoreNewsletter")
@Order(value=5)
public Boolean signUpForStoreNewsletter;
@AnyElement
@Order(value=6)
public List<Object> any;
} | apache-2.0 |
shakamunyi/hadoop-20 | src/core/org/apache/hadoop/util/InjectionEventI.java | 967 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.util;
/**
* Base interface for enumerating injection events. Please see InjectionHandler.
*/
public interface InjectionEventI {
}
| apache-2.0 |
mkis-/elasticsearch | src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsTests.java | 12721 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.indices.mapping;
import com.google.common.collect.Maps;
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import org.hamcrest.Matchers;
import org.junit.Test;
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
import static org.elasticsearch.cluster.metadata.IndexMetaData.*;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked;
import static org.hamcrest.Matchers.*;
public class SimpleGetFieldMappingsTests extends ElasticsearchIntegrationTest {
public void getMappingsWhereThereAreNone() {
createIndex("index");
ensureYellow();
GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings().get();
assertThat(response.mappings().size(), equalTo(1));
assertThat(response.mappings().get("index").size(), equalTo(0));
assertThat(response.fieldMappings("index", "type", "field"), Matchers.nullValue());
}
private XContentBuilder getMappingForType(String type) throws IOException {
return jsonBuilder().startObject().startObject(type).startObject("properties")
.startObject("field1").field("type", "string").endObject()
.startObject("obj").startObject("properties").startObject("subfield").field("type", "string").field("index", "not_analyzed").endObject().endObject().endObject()
.endObject().endObject().endObject();
}
public void simpleGetFieldMappings() throws Exception {
assertAcked(prepareCreate("indexa")
.addMapping("typeA", getMappingForType("typeA"))
.addMapping("typeB", getMappingForType("typeB")));
assertAcked(client().admin().indices().prepareCreate("indexb")
.addMapping("typeA", getMappingForType("typeA"))
.addMapping("typeB", getMappingForType("typeB")));
ensureYellow();
// Get mappings by full name
GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings("indexa").setTypes("typeA").setFields("field1", "obj.subfield").get();
assertThat(response.fieldMappings("indexa", "typeA", "field1").fullName(), equalTo("field1"));
assertThat(response.fieldMappings("indexa", "typeA", "field1").sourceAsMap(), hasKey("field1"));
assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").fullName(), equalTo("obj.subfield"));
assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").sourceAsMap(), hasKey("subfield"));
assertThat(response.mappings().get("indexa"), not(hasKey("typeB")));
assertThat(response.fieldMappings("indexa", "typeB", "field1"), nullValue());
assertThat(response.mappings(), not(hasKey("indexb")));
assertThat(response.fieldMappings("indexb", "typeB", "field1"), nullValue());
// Get mappings by name
response = client().admin().indices().prepareGetFieldMappings("indexa").setTypes("typeA").setFields("field1", "obj.subfield").get();
assertThat(response.fieldMappings("indexa", "typeA", "field1").fullName(), equalTo("field1"));
assertThat(response.fieldMappings("indexa", "typeA", "field1").sourceAsMap(), hasKey("field1"));
assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").fullName(), equalTo("obj.subfield"));
assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").sourceAsMap(), hasKey("subfield"));
assertThat(response.fieldMappings("indexa", "typeB", "field1"), nullValue());
assertThat(response.fieldMappings("indexb", "typeB", "field1"), nullValue());
// get mappings by name across multiple indices
response = client().admin().indices().prepareGetFieldMappings().setTypes("typeA").setFields("obj.subfield").get();
assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").fullName(), equalTo("obj.subfield"));
assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").sourceAsMap(), hasKey("subfield"));
assertThat(response.fieldMappings("indexa", "typeB", "obj.subfield"), nullValue());
assertThat(response.fieldMappings("indexb", "typeA", "obj.subfield").fullName(), equalTo("obj.subfield"));
assertThat(response.fieldMappings("indexb", "typeA", "obj.subfield").sourceAsMap(), hasKey("subfield"));
assertThat(response.fieldMappings("indexb", "typeB", "obj.subfield"), nullValue());
// get mappings by name across multiple types
response = client().admin().indices().prepareGetFieldMappings("indexa").setFields("obj.subfield").get();
assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").fullName(), equalTo("obj.subfield"));
assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").sourceAsMap(), hasKey("subfield"));
assertThat(response.fieldMappings("indexa", "typeA", "field1"), nullValue());
assertThat(response.fieldMappings("indexa", "typeB", "obj.subfield").fullName(), equalTo("obj.subfield"));
assertThat(response.fieldMappings("indexa", "typeB", "obj.subfield").sourceAsMap(), hasKey("subfield"));
assertThat(response.fieldMappings("indexa", "typeB", "field1"), nullValue());
assertThat(response.fieldMappings("indexb", "typeA", "obj.subfield"), nullValue());
assertThat(response.fieldMappings("indexb", "typeA", "field1"), nullValue());
assertThat(response.fieldMappings("indexb", "typeB", "obj.subfield"), nullValue());
assertThat(response.fieldMappings("indexb", "typeB", "field1"), nullValue());
// get mappings by name across multiple types & indices
response = client().admin().indices().prepareGetFieldMappings().setFields("obj.subfield").get();
assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").fullName(), equalTo("obj.subfield"));
assertThat(response.fieldMappings("indexa", "typeA", "obj.subfield").sourceAsMap(), hasKey("subfield"));
assertThat(response.fieldMappings("indexa", "typeA", "field1"), nullValue());
assertThat(response.fieldMappings("indexa", "typeB", "obj.subfield").fullName(), equalTo("obj.subfield"));
assertThat(response.fieldMappings("indexa", "typeB", "obj.subfield").sourceAsMap(), hasKey("subfield"));
assertThat(response.fieldMappings("indexa", "typeB", "field1"), nullValue());
assertThat(response.fieldMappings("indexb", "typeA", "obj.subfield").fullName(), equalTo("obj.subfield"));
assertThat(response.fieldMappings("indexb", "typeA", "obj.subfield").sourceAsMap(), hasKey("subfield"));
assertThat(response.fieldMappings("indexb", "typeB", "field1"), nullValue());
assertThat(response.fieldMappings("indexb", "typeB", "obj.subfield").fullName(), equalTo("obj.subfield"));
assertThat(response.fieldMappings("indexb", "typeB", "obj.subfield").sourceAsMap(), hasKey("subfield"));
assertThat(response.fieldMappings("indexb", "typeB", "field1"), nullValue());
}
@SuppressWarnings("unchecked")
public void simpleGetFieldMappingsWithDefaults() throws Exception {
assertAcked(prepareCreate("test").addMapping("type", getMappingForType("type")));
client().prepareIndex("test", "type", "1").setSource("num", 1).get();
ensureYellow();
waitForConcreteMappingsOnAll("test", "type", "num"); // for num, we need to wait...
GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings().setFields("num", "field1", "obj.subfield").includeDefaults(true).get();
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "num").sourceAsMap().get("num"), hasEntry("index", (Object) "not_analyzed"));
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "num").sourceAsMap().get("num"), hasEntry("type", (Object) "long"));
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "field1").sourceAsMap().get("field1"), hasEntry("index", (Object) "analyzed"));
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "field1").sourceAsMap().get("field1"), hasEntry("type", (Object) "string"));
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "obj.subfield").sourceAsMap().get("subfield"), hasEntry("index", (Object) "not_analyzed"));
assertThat((Map<String, Object>) response.fieldMappings("test", "type", "obj.subfield").sourceAsMap().get("subfield"), hasEntry("type", (Object) "string"));
}
//fix #6552
public void simpleGetFieldMappingsWithPretty() throws Exception {
assertAcked(prepareCreate("index").addMapping("type", getMappingForType("type")));
Map<String, String> params = Maps.newHashMap();
params.put("pretty", "true");
ensureYellow();
GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings("index").setTypes("type").setFields("field1", "obj.subfield").get();
XContentBuilder responseBuilder = XContentFactory.jsonBuilder().prettyPrint();
responseBuilder.startObject();
response.toXContent(responseBuilder, new ToXContent.MapParams(params));
responseBuilder.endObject();
String responseStrings = responseBuilder.string();
XContentBuilder prettyJsonBuilder = XContentFactory.jsonBuilder().prettyPrint();
prettyJsonBuilder.copyCurrentStructure(XContentFactory.xContent(responseStrings).createParser(responseStrings));
assertThat(responseStrings, equalTo(prettyJsonBuilder.string()));
params.put("pretty", "false");
response = client().admin().indices().prepareGetFieldMappings("index").setTypes("type").setFields("field1", "obj.subfield").get();
responseBuilder = XContentFactory.jsonBuilder().prettyPrint().lfAtEnd();
responseBuilder.startObject();
response.toXContent(responseBuilder, new ToXContent.MapParams(params));
responseBuilder.endObject();
responseStrings = responseBuilder.string();
prettyJsonBuilder = XContentFactory.jsonBuilder().prettyPrint();
prettyJsonBuilder.copyCurrentStructure(XContentFactory.xContent(responseStrings).createParser(responseStrings));
assertThat(responseStrings, not(equalTo(prettyJsonBuilder.string())));
}
@Test
public void testGetFieldMappingsWithBlocks() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("typeA", getMappingForType("typeA"))
.addMapping("typeB", getMappingForType("typeB")));
ensureYellow();
for (String block : Arrays.asList(SETTING_BLOCKS_READ, SETTING_BLOCKS_WRITE, SETTING_READ_ONLY)) {
try {
enableIndexBlock("test", block);
GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings("test").setTypes("typeA").setFields("field1", "obj.subfield").get();
assertThat(response.fieldMappings("test", "typeA", "field1").fullName(), equalTo("field1"));
} finally {
disableIndexBlock("test", block);
}
}
try {
enableIndexBlock("test", SETTING_BLOCKS_METADATA);
assertBlocked(client().admin().indices().prepareGetMappings(), INDEX_METADATA_BLOCK);
} finally {
disableIndexBlock("test", SETTING_BLOCKS_METADATA);
}
}
}
| apache-2.0 |
googleapis/google-api-java-client-services | clients/google-api-services-appengine/v1/1.31.0/com/google/api/services/appengine/v1/model/UrlDispatchRule.java | 4241 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.appengine.v1.model;
/**
* Rules to match an HTTP request and dispatch that request to a service.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the App Engine Admin API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class UrlDispatchRule extends com.google.api.client.json.GenericJson {
/**
* Domain name to match against. The wildcard "*" is supported if specified before a period:
* "*.".Defaults to matching all domains: "*".
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String domain;
/**
* Pathname within the host. Must start with a "/". A single "*" can be included at the end of the
* path.The sum of the lengths of the domain and path may not exceed 100 characters.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String path;
/**
* Resource ID of a service in this application that should serve the matched request. The service
* must already exist. Example: default.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String service;
/**
* Domain name to match against. The wildcard "*" is supported if specified before a period:
* "*.".Defaults to matching all domains: "*".
* @return value or {@code null} for none
*/
public java.lang.String getDomain() {
return domain;
}
/**
* Domain name to match against. The wildcard "*" is supported if specified before a period:
* "*.".Defaults to matching all domains: "*".
* @param domain domain or {@code null} for none
*/
public UrlDispatchRule setDomain(java.lang.String domain) {
this.domain = domain;
return this;
}
/**
* Pathname within the host. Must start with a "/". A single "*" can be included at the end of the
* path.The sum of the lengths of the domain and path may not exceed 100 characters.
* @return value or {@code null} for none
*/
public java.lang.String getPath() {
return path;
}
/**
* Pathname within the host. Must start with a "/". A single "*" can be included at the end of the
* path.The sum of the lengths of the domain and path may not exceed 100 characters.
* @param path path or {@code null} for none
*/
public UrlDispatchRule setPath(java.lang.String path) {
this.path = path;
return this;
}
/**
* Resource ID of a service in this application that should serve the matched request. The service
* must already exist. Example: default.
* @return value or {@code null} for none
*/
public java.lang.String getService() {
return service;
}
/**
* Resource ID of a service in this application that should serve the matched request. The service
* must already exist. Example: default.
* @param service service or {@code null} for none
*/
public UrlDispatchRule setService(java.lang.String service) {
this.service = service;
return this;
}
@Override
public UrlDispatchRule set(String fieldName, Object value) {
return (UrlDispatchRule) super.set(fieldName, value);
}
@Override
public UrlDispatchRule clone() {
return (UrlDispatchRule) super.clone();
}
}
| apache-2.0 |
romartin/kie-wb-common | kie-wb-common-ala/kie-wb-common-ala-ui/kie-wb-common-ala-ui-client/src/test/java/org/guvnor/ala/ui/client/wizard/NewProviderWizardTest.java | 8313 | /*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.guvnor.ala.ui.client.wizard;
import com.google.gwtmockito.GwtMockitoTestRunner;
import org.guvnor.ala.ui.client.events.ProviderTypeSelectedEvent;
import org.guvnor.ala.ui.client.handler.ClientProviderHandler;
import org.guvnor.ala.ui.client.handler.ClientProviderHandlerRegistry;
import org.guvnor.ala.ui.client.handler.FormResolver;
import org.guvnor.ala.ui.client.handler.ProviderConfigurationForm;
import org.guvnor.ala.ui.client.util.PopupHelper;
import org.guvnor.ala.ui.client.wizard.provider.ProviderConfigurationPagePresenter;
import org.guvnor.ala.ui.model.ProviderConfiguration;
import org.guvnor.ala.ui.model.ProviderType;
import org.guvnor.ala.ui.service.ProviderService;
import org.jboss.errai.bus.client.api.messaging.Message;
import org.jboss.errai.common.client.api.Caller;
import org.jboss.errai.common.client.api.ErrorCallback;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.uberfire.mocks.CallerMock;
import org.uberfire.mocks.EventSourceMock;
import org.uberfire.workbench.events.NotificationEvent;
import static org.guvnor.ala.ui.ProvisioningManagementTestCommons.ERROR_MESSAGE;
import static org.guvnor.ala.ui.ProvisioningManagementTestCommons.SUCCESS_MESSAGE;
import static org.guvnor.ala.ui.ProvisioningManagementTestCommons.mockProviderType;
import static org.guvnor.ala.ui.ProvisioningManagementTestCommons.prepareServiceCallerError;
import static org.guvnor.ala.ui.client.resources.i18n.GuvnorAlaUIConstants.NewProviderWizard_ProviderCreateSuccessMessage;
import static org.guvnor.ala.ui.client.resources.i18n.GuvnorAlaUIConstants.NewProviderWizard_ProviderNotProperlyConfiguredInSystemErrorMessage;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(GwtMockitoTestRunner.class)
public class NewProviderWizardTest
extends WizardBaseTest {
@Mock
private ProviderConfigurationPagePresenter configurationPage;
@Mock
private ClientProviderHandlerRegistry handlerRegistry;
@Mock
private ErrorCallback<Message> defaultErrorCallback;
@Mock
private PopupHelper popupHelper;
@Mock
private ProviderService providerService;
private Caller<ProviderService> providerServiceCaller;
@Mock
private EventSourceMock<ProviderTypeSelectedEvent> providerTypeSelectedEvent;
private NewProviderWizard wizard;
@Mock
private ClientProviderHandler providerHandler;
@Mock
private FormResolver formResolver;
@Mock
private ProviderConfigurationForm configurationForm;
@Mock
private ProviderConfiguration providerConfiguration;
private ProviderType providerType;
@Before
public void setUp() {
when(popupHelper.getPopupErrorCallback()).thenReturn(defaultErrorCallback);
providerServiceCaller = spy(new CallerMock<>(providerService));
wizard = new NewProviderWizard(configurationPage,
handlerRegistry,
popupHelper,
translationService,
providerServiceCaller,
notification,
providerTypeSelectedEvent) {
{
this.view = wizardView;
}
};
wizard.init();
providerType = mockProviderType("NewProviderWizardTest");
when(handlerRegistry.isProviderInstalled(providerType.getKey())).thenReturn(true);
when(handlerRegistry.getProviderHandler(providerType.getKey())).thenReturn(providerHandler);
when(providerHandler.getFormResolver()).thenReturn(formResolver);
when(formResolver.newProviderConfigurationForm()).thenReturn(configurationForm);
when(translationService.format(NewProviderWizard_ProviderNotProperlyConfiguredInSystemErrorMessage,
providerType.getName()))
.thenReturn(ERROR_MESSAGE);
when(translationService.getTranslation(NewProviderWizard_ProviderCreateSuccessMessage)).thenReturn(SUCCESS_MESSAGE);
}
@Test
public void testStartProviderConfigured() {
wizard.start(providerType);
verify(handlerRegistry,
times(2)).getProviderHandler(providerType.getKey());
verify(providerHandler,
times(2)).getFormResolver();
verify(formResolver,
times(1)).newProviderConfigurationForm();
verify(configurationPage,
times(1)).setProviderConfigurationForm(configurationForm);
}
@Test
public void testStartProviderNotConfigured() {
//the provider is not configured
when(handlerRegistry.isProviderInstalled(providerType.getKey())).thenReturn(false);
wizard.start(providerType);
verify(handlerRegistry,
never()).getProviderHandler(providerType.getKey());
verify(providerHandler,
never()).getFormResolver();
verify(formResolver,
never()).newProviderConfigurationForm();
verify(configurationPage,
never()).setProviderConfigurationForm(configurationForm);
wizard.start();
verify(popupHelper,
times(1)).showErrorPopup(ERROR_MESSAGE);
}
@Test
public void testCreateProviderSuccess() {
//initialize and start the wizard.
wizard.start(providerType);
//emulate the user completing the wizard.
preCompleteWizard();
//emulate the user pressing the finish button.
wizard.complete();
//verify that the provider has been created and the proper notifications were fired.
verify(providerService,
times(1)).createProvider(providerType,
providerConfiguration);
verify(notification,
times(1)).fire(new NotificationEvent(SUCCESS_MESSAGE,
NotificationEvent.NotificationType.SUCCESS));
verify(providerTypeSelectedEvent,
times(1)).fire(new ProviderTypeSelectedEvent(providerType.getKey(),
providerConfiguration.getId()));
}
@Test
public void testCreateProviderFailure() {
//initialize and start the wizard.
wizard.start(providerType);
//emulate the user completing the wizard.
preCompleteWizard();
prepareServiceCallerError(providerService,
providerServiceCaller);
//emulate the user pressing the finish button.
wizard.complete();
verify(providerService,
times(1)).createProvider(providerType,
providerConfiguration);
verify(popupHelper,
times(1)).getPopupErrorCallback();
verify(defaultErrorCallback,
times(1)).error(any(Message.class),
any(Throwable.class));
verify(providerTypeSelectedEvent,
never()).fire(any(ProviderTypeSelectedEvent.class));
}
private void preCompleteWizard() {
//emulate that the page was completed.
when(configurationPage.buildProviderConfiguration()).thenReturn(providerConfiguration);
preparePageCompletion(configurationPage);
wizard.isComplete(Assert::assertTrue);
}
}
| apache-2.0 |
haikuowuya/android_system_code | src/org/apache/http/protocol/RequestConnControl.java | 2439 | /*
* $HeadURL: http://svn.apache.org/repos/asf/httpcomponents/httpcore/trunk/module-main/src/main/java/org/apache/http/protocol/RequestConnControl.java $
* $Revision: 496070 $
* $Date: 2007-01-14 04:18:34 -0800 (Sun, 14 Jan 2007) $
*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.http.protocol;
import java.io.IOException;
import org.apache.http.HttpException;
import org.apache.http.HttpRequest;
import org.apache.http.HttpRequestInterceptor;
/**
* A request interceptor that suggests connection keep-alive to the server.
*
* @author <a href="mailto:oleg at ural.ru">Oleg Kalnichevski</a>
*
* @version $Revision: 496070 $
*
* @since 4.0
*/
public class RequestConnControl implements HttpRequestInterceptor {
public RequestConnControl() {
super();
}
public void process(final HttpRequest request, final HttpContext context)
throws HttpException, IOException {
if (request == null) {
throw new IllegalArgumentException("HTTP request may not be null");
}
if (!request.containsHeader(HTTP.CONN_DIRECTIVE)) {
// Default policy is to keep connection alive
// whenever possible
request.addHeader(HTTP.CONN_DIRECTIVE, HTTP.CONN_KEEP_ALIVE);
}
}
}
| apache-2.0 |
play2-maven-plugin/play2-maven-test-projects | play28/java/websocket-example-using-webjars-assets/app/stocks/StockQuote.java | 334 | package stocks;
import java.util.Objects;
import static java.util.Objects.requireNonNull;
public class StockQuote {
public final String symbol;
public final Double price;
public StockQuote(String symbol, Double price) {
this.symbol = requireNonNull(symbol);
this.price = requireNonNull(price);
}
}
| apache-2.0 |
zhaocloud/elasticsearch | src/main/java/org/elasticsearch/rest/action/explain/RestExplainAction.java | 7848 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.rest.action.explain;
import org.apache.lucene.search.Explanation;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.action.explain.ExplainRequest;
import org.elasticsearch.action.explain.ExplainResponse;
import org.elasticsearch.action.support.QuerySourceBuilder;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentBuilderString;
import org.elasticsearch.index.get.GetResult;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.query.QueryStringQueryBuilder;
import org.elasticsearch.rest.*;
import org.elasticsearch.rest.action.support.RestBuilderListener;
import org.elasticsearch.search.fetch.source.FetchSourceContext;
import java.io.IOException;
import static org.elasticsearch.rest.RestRequest.Method.GET;
import static org.elasticsearch.rest.RestRequest.Method.POST;
import static org.elasticsearch.rest.RestStatus.NOT_FOUND;
import static org.elasticsearch.rest.RestStatus.OK;
/**
* Rest action for computing a score explanation for specific documents.
*/
public class RestExplainAction extends BaseRestHandler {
@Inject
public RestExplainAction(Settings settings, Client client, RestController controller) {
super(settings, client);
controller.registerHandler(GET, "/{index}/{type}/{id}/_explain", this);
controller.registerHandler(POST, "/{index}/{type}/{id}/_explain", this);
}
@Override
public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) {
final ExplainRequest explainRequest = new ExplainRequest(request.param("index"), request.param("type"), request.param("id"));
explainRequest.parent(request.param("parent"));
explainRequest.routing(request.param("routing"));
explainRequest.preference(request.param("preference"));
String sourceString = request.param("source");
String queryString = request.param("q");
if (request.hasContent()) {
explainRequest.source(request.content(), request.contentUnsafe());
} else if (sourceString != null) {
explainRequest.source(new BytesArray(request.param("source")), false);
} else if (queryString != null) {
QueryStringQueryBuilder queryStringBuilder = QueryBuilders.queryString(queryString);
queryStringBuilder.defaultField(request.param("df"));
queryStringBuilder.analyzer(request.param("analyzer"));
queryStringBuilder.analyzeWildcard(request.paramAsBoolean("analyze_wildcard", false));
queryStringBuilder.lowercaseExpandedTerms(request.paramAsBoolean("lowercase_expanded_terms", true));
queryStringBuilder.lenient(request.paramAsBoolean("lenient", null));
String defaultOperator = request.param("default_operator");
if (defaultOperator != null) {
if ("OR".equals(defaultOperator)) {
queryStringBuilder.defaultOperator(QueryStringQueryBuilder.Operator.OR);
} else if ("AND".equals(defaultOperator)) {
queryStringBuilder.defaultOperator(QueryStringQueryBuilder.Operator.AND);
} else {
throw new ElasticsearchIllegalArgumentException("Unsupported defaultOperator [" + defaultOperator + "], can either be [OR] or [AND]");
}
}
QuerySourceBuilder querySourceBuilder = new QuerySourceBuilder();
querySourceBuilder.setQuery(queryStringBuilder);
explainRequest.source(querySourceBuilder);
}
String sField = request.param("fields");
if (sField != null) {
String[] sFields = Strings.splitStringByCommaToArray(sField);
if (sFields != null) {
explainRequest.fields(sFields);
}
}
explainRequest.fetchSourceContext(FetchSourceContext.parseFromRestRequest(request));
client.explain(explainRequest, new RestBuilderListener<ExplainResponse>(channel) {
@Override
public RestResponse buildResponse(ExplainResponse response, XContentBuilder builder) throws Exception {
builder.startObject();
builder.field(Fields._INDEX, explainRequest.index())
.field(Fields._TYPE, explainRequest.type())
.field(Fields._ID, explainRequest.id())
.field(Fields.MATCHED, response.isMatch());
if (response.hasExplanation()) {
builder.startObject(Fields.EXPLANATION);
buildExplanation(builder, response.getExplanation());
builder.endObject();
}
GetResult getResult = response.getGetResult();
if (getResult != null) {
builder.startObject(Fields.GET);
response.getGetResult().toXContentEmbedded(builder, request);
builder.endObject();
}
builder.endObject();
return new BytesRestResponse(response.isExists() ? OK : NOT_FOUND, builder);
}
private void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException {
builder.field(Fields.VALUE, explanation.getValue());
builder.field(Fields.DESCRIPTION, explanation.getDescription());
Explanation[] innerExps = explanation.getDetails();
if (innerExps != null) {
builder.startArray(Fields.DETAILS);
for (Explanation exp : innerExps) {
builder.startObject();
buildExplanation(builder, exp);
builder.endObject();
}
builder.endArray();
}
}
});
}
static class Fields {
static final XContentBuilderString _INDEX = new XContentBuilderString("_index");
static final XContentBuilderString _TYPE = new XContentBuilderString("_type");
static final XContentBuilderString _ID = new XContentBuilderString("_id");
static final XContentBuilderString MATCHED = new XContentBuilderString("matched");
static final XContentBuilderString EXPLANATION = new XContentBuilderString("explanation");
static final XContentBuilderString VALUE = new XContentBuilderString("value");
static final XContentBuilderString DESCRIPTION = new XContentBuilderString("description");
static final XContentBuilderString DETAILS = new XContentBuilderString("details");
static final XContentBuilderString GET = new XContentBuilderString("get");
}
}
| apache-2.0 |
macs524/mybatis_learn | src/main/java/org/apache/ibatis/plugin/Signature.java | 1031 | /**
* Copyright 2009-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ibatis.plugin;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* @author Clinton Begin
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
@Target({})
public @interface Signature {
Class<?> type();
String method();
Class<?>[] args();
} | apache-2.0 |
DuncanDoyle/jbpm | jbpm-flow-builder/src/main/java/org/jbpm/process/builder/AssignmentBuilder.java | 1155 | /*
* Copyright 2017 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.process.builder;
import org.drools.compiler.rule.builder.PackageBuildContext;
import org.jbpm.process.core.ContextResolver;
import org.jbpm.workflow.core.node.Assignment;
public interface AssignmentBuilder {
public void build(final PackageBuildContext context,
final Assignment assignment,
final String sourceExpr,
final String targetExpr,
final ContextResolver contextResolver,
boolean isInput);
}
| apache-2.0 |
matsprea/omim | android/src/com/mapswithme/maps/ads/AdDataAdapter.java | 1575 | package com.mapswithme.maps.ads;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import com.mopub.nativeads.BaseNativeAd;
import com.mopub.nativeads.StaticNativeAd;
public abstract class AdDataAdapter<T extends BaseNativeAd>
{
@NonNull
private final T mAd;
protected AdDataAdapter(@NonNull T ad)
{
mAd = ad;
}
@NonNull
protected T getAd()
{
return mAd;
}
@Nullable
public abstract String getTitle();
@Nullable
public abstract String getText();
@Nullable
public abstract String getIconImageUrl();
@Nullable
public abstract String getCallToAction();
@Nullable
public abstract String getPrivacyInfoUrl();
@NonNull
public abstract NetworkType getType();
public static class StaticAd extends AdDataAdapter<StaticNativeAd>
{
public StaticAd(@NonNull StaticNativeAd ad)
{
super(ad);
}
@Nullable
@Override
public String getTitle()
{
return getAd().getTitle();
}
@Nullable
@Override
public String getText()
{
return getAd().getText();
}
@Nullable
@Override
public String getIconImageUrl()
{
return getAd().getIconImageUrl();
}
@Nullable
@Override
public String getCallToAction()
{
return getAd().getCallToAction();
}
@Nullable
@Override
public String getPrivacyInfoUrl()
{
return getAd().getPrivacyInformationIconClickThroughUrl();
}
@NonNull
@Override
public NetworkType getType()
{
return NetworkType.MOPUB;
}
}
}
| apache-2.0 |
jimhooker2002/elasticsearch | core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java | 100920 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.engine;
import com.google.common.collect.ImmutableMap;
import org.apache.log4j.AppenderSkeleton;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.apache.log4j.spi.LoggingEvent;
import org.apache.lucene.codecs.Codec;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.*;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.TestUtil;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.bwcompat.OldIndexBackwardsCompatibilityIT;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Base64;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.analysis.AnalysisService;
import org.elasticsearch.index.codec.CodecService;
import org.elasticsearch.index.deletionpolicy.KeepOnlyLastDeletionPolicy;
import org.elasticsearch.index.deletionpolicy.SnapshotDeletionPolicy;
import org.elasticsearch.index.engine.Engine.Searcher;
import org.elasticsearch.index.indexing.ShardIndexingService;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.index.mapper.Mapper.BuilderContext;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
import org.elasticsearch.index.shard.MergeSchedulerConfig;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardUtils;
import org.elasticsearch.index.shard.TranslogRecoveryPerformer;
import org.elasticsearch.index.similarity.SimilarityLookupService;
import org.elasticsearch.index.store.DirectoryService;
import org.elasticsearch.index.store.DirectoryUtils;
import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.index.translog.TranslogConfig;
import org.elasticsearch.index.translog.TranslogTests;
import org.elasticsearch.test.DummyShardLock;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.hamcrest.MatcherAssert;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.Charset;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.*;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Pattern;
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
import static org.elasticsearch.index.engine.Engine.Operation.Origin.PRIMARY;
import static org.elasticsearch.index.engine.Engine.Operation.Origin.REPLICA;
import static org.hamcrest.Matchers.*;
public class InternalEngineTests extends ESTestCase {
private static final Pattern PARSE_LEGACY_ID_PATTERN = Pattern.compile("^" + Translog.TRANSLOG_FILE_PREFIX + "(\\d+)((\\.recovering))?$");
protected final ShardId shardId = new ShardId(new Index("index"), 1);
protected ThreadPool threadPool;
private Store store;
private Store storeReplica;
protected InternalEngine engine;
protected InternalEngine replicaEngine;
private Settings defaultSettings;
private int indexConcurrency;
private String codecName;
private Path primaryTranslogDir;
private Path replicaTranslogDir;
@Override
@Before
public void setUp() throws Exception {
super.setUp();
CodecService codecService = new CodecService(shardId.index());
indexConcurrency = randomIntBetween(1, 20);
String name = Codec.getDefault().getName();
if (Arrays.asList(codecService.availableCodecs()).contains(name)) {
// some codecs are read only so we only take the ones that we have in the service and randomly
// selected by lucene test case.
codecName = name;
} else {
codecName = "default";
}
defaultSettings = Settings.builder()
.put(EngineConfig.INDEX_COMPOUND_ON_FLUSH, randomBoolean())
.put(EngineConfig.INDEX_GC_DELETES_SETTING, "1h") // make sure this doesn't kick in on us
.put(EngineConfig.INDEX_CODEC_SETTING, codecName)
.put(EngineConfig.INDEX_CONCURRENCY_SETTING, indexConcurrency)
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.build(); // TODO randomize more settings
threadPool = new ThreadPool(getClass().getName());
store = createStore();
storeReplica = createStore();
Lucene.cleanLuceneIndex(store.directory());
Lucene.cleanLuceneIndex(storeReplica.directory());
primaryTranslogDir = createTempDir("translog-primary");
engine = createEngine(store, primaryTranslogDir);
LiveIndexWriterConfig currentIndexWriterConfig = engine.getCurrentIndexWriterConfig();
assertEquals(engine.config().getCodec().getName(), codecService.codec(codecName).getName());
assertEquals(currentIndexWriterConfig.getCodec().getName(), codecService.codec(codecName).getName());
if (randomBoolean()) {
engine.config().setEnableGcDeletes(false);
}
replicaTranslogDir = createTempDir("translog-replica");
replicaEngine = createEngine(storeReplica, replicaTranslogDir);
currentIndexWriterConfig = replicaEngine.getCurrentIndexWriterConfig();
assertEquals(replicaEngine.config().getCodec().getName(), codecService.codec(codecName).getName());
assertEquals(currentIndexWriterConfig.getCodec().getName(), codecService.codec(codecName).getName());
if (randomBoolean()) {
engine.config().setEnableGcDeletes(false);
}
}
@Override
@After
public void tearDown() throws Exception {
super.tearDown();
IOUtils.close(
replicaEngine, storeReplica,
engine, store);
terminate(threadPool);
}
private Document testDocumentWithTextField() {
Document document = testDocument();
document.add(new TextField("value", "test", Field.Store.YES));
return document;
}
private Document testDocument() {
return new Document();
}
private ParsedDocument testParsedDocument(String uid, String id, String type, String routing, long timestamp, long ttl, Document document, BytesReference source, Mapping mappingUpdate) {
Field uidField = new Field("_uid", uid, UidFieldMapper.Defaults.FIELD_TYPE);
Field versionField = new NumericDocValuesField("_version", 0);
document.add(uidField);
document.add(versionField);
return new ParsedDocument(uidField, versionField, id, type, routing, timestamp, ttl, Arrays.asList(document), source, mappingUpdate);
}
protected Store createStore() throws IOException {
return createStore(newDirectory());
}
protected Store createStore(final Directory directory) throws IOException {
final DirectoryService directoryService = new DirectoryService(shardId, EMPTY_SETTINGS) {
@Override
public Directory newDirectory() throws IOException {
return directory;
}
@Override
public long throttleTimeInNanos() {
return 0;
}
};
return new Store(shardId, EMPTY_SETTINGS, directoryService, new DummyShardLock(shardId));
}
protected Translog createTranslog() throws IOException {
return createTranslog(primaryTranslogDir);
}
protected Translog createTranslog(Path translogPath) throws IOException {
TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, EMPTY_SETTINGS, Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, threadPool);
return new Translog(translogConfig);
}
protected IndexDeletionPolicy createIndexDeletionPolicy() {
return new KeepOnlyLastDeletionPolicy(shardId, EMPTY_SETTINGS);
}
protected SnapshotDeletionPolicy createSnapshotDeletionPolicy() {
return new SnapshotDeletionPolicy(createIndexDeletionPolicy());
}
protected InternalEngine createEngine(Store store, Path translogPath, IndexSearcherWrapper... wrappers) {
return createEngine(defaultSettings, store, translogPath, new MergeSchedulerConfig(defaultSettings), newMergePolicy(), wrappers);
}
protected InternalEngine createEngine(Settings indexSettings, Store store, Path translogPath, MergeSchedulerConfig mergeSchedulerConfig, MergePolicy mergePolicy, IndexSearcherWrapper... wrappers) {
return new InternalEngine(config(indexSettings, store, translogPath, mergeSchedulerConfig, mergePolicy, wrappers), false);
}
public EngineConfig config(Settings indexSettings, Store store, Path translogPath, MergeSchedulerConfig mergeSchedulerConfig, MergePolicy mergePolicy, IndexSearcherWrapper... wrappers) {
IndexWriterConfig iwc = newIndexWriterConfig();
TranslogConfig translogConfig = new TranslogConfig(shardId, translogPath, indexSettings, Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, threadPool);
EngineConfig config = new EngineConfig(shardId, threadPool, new ShardIndexingService(shardId, indexSettings), indexSettings
, null, store, createSnapshotDeletionPolicy(), mergePolicy, mergeSchedulerConfig,
iwc.getAnalyzer(), iwc.getSimilarity(), new CodecService(shardId.index()), new Engine.FailedEngineListener() {
@Override
public void onFailedEngine(ShardId shardId, String reason, @Nullable Throwable t) {
// we don't need to notify anybody in this test
}
}, new TranslogHandler(shardId.index().getName()), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), new IndexSearcherWrappingService(new HashSet<>(Arrays.asList(wrappers))), translogConfig);
try {
config.setCreate(Lucene.indexExists(store.directory()) == false);
} catch (IOException e) {
throw new ElasticsearchException("can't find index?", e);
}
return config;
}
protected static final BytesReference B_1 = new BytesArray(new byte[]{1});
protected static final BytesReference B_2 = new BytesArray(new byte[]{2});
protected static final BytesReference B_3 = new BytesArray(new byte[]{3});
@Test
public void testSegments() throws Exception {
try (Store store = createStore();
Engine engine = createEngine(defaultSettings, store, createTempDir(), new MergeSchedulerConfig(defaultSettings), NoMergePolicy.INSTANCE)) {
List<Segment> segments = engine.segments(false);
assertThat(segments.isEmpty(), equalTo(true));
assertThat(engine.segmentsStats().getCount(), equalTo(0l));
assertThat(engine.segmentsStats().getMemoryInBytes(), equalTo(0l));
final boolean defaultCompound = defaultSettings.getAsBoolean(EngineConfig.INDEX_COMPOUND_ON_FLUSH, true);
// create a doc and refresh
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
engine.create(new Engine.Create(newUid("1"), doc));
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, null);
engine.create(new Engine.Create(newUid("2"), doc2));
engine.refresh("test");
segments = engine.segments(false);
assertThat(segments.size(), equalTo(1));
SegmentsStats stats = engine.segmentsStats();
assertThat(stats.getCount(), equalTo(1l));
assertThat(stats.getTermsMemoryInBytes(), greaterThan(0l));
assertThat(stats.getStoredFieldsMemoryInBytes(), greaterThan(0l));
assertThat(stats.getTermVectorsMemoryInBytes(), equalTo(0l));
assertThat(stats.getNormsMemoryInBytes(), greaterThan(0l));
assertThat(stats.getDocValuesMemoryInBytes(), greaterThan(0l));
assertThat(segments.get(0).isCommitted(), equalTo(false));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(2));
assertThat(segments.get(0).getDeletedDocs(), equalTo(0));
assertThat(segments.get(0).isCompound(), equalTo(defaultCompound));
assertThat(segments.get(0).ramTree, nullValue());
engine.flush();
segments = engine.segments(false);
assertThat(segments.size(), equalTo(1));
assertThat(engine.segmentsStats().getCount(), equalTo(1l));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(2));
assertThat(segments.get(0).getDeletedDocs(), equalTo(0));
assertThat(segments.get(0).isCompound(), equalTo(defaultCompound));
engine.config().setCompoundOnFlush(false);
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, null);
engine.create(new Engine.Create(newUid("3"), doc3));
engine.refresh("test");
segments = engine.segments(false);
assertThat(segments.size(), equalTo(2));
assertThat(engine.segmentsStats().getCount(), equalTo(2l));
assertThat(engine.segmentsStats().getTermsMemoryInBytes(), greaterThan(stats.getTermsMemoryInBytes()));
assertThat(engine.segmentsStats().getStoredFieldsMemoryInBytes(), greaterThan(stats.getStoredFieldsMemoryInBytes()));
assertThat(engine.segmentsStats().getTermVectorsMemoryInBytes(), equalTo(0l));
assertThat(engine.segmentsStats().getNormsMemoryInBytes(), greaterThan(stats.getNormsMemoryInBytes()));
assertThat(engine.segmentsStats().getDocValuesMemoryInBytes(), greaterThan(stats.getDocValuesMemoryInBytes()));
assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(2));
assertThat(segments.get(0).getDeletedDocs(), equalTo(0));
assertThat(segments.get(0).isCompound(), equalTo(defaultCompound));
assertThat(segments.get(1).isCommitted(), equalTo(false));
assertThat(segments.get(1).isSearch(), equalTo(true));
assertThat(segments.get(1).getNumDocs(), equalTo(1));
assertThat(segments.get(1).getDeletedDocs(), equalTo(0));
assertThat(segments.get(1).isCompound(), equalTo(false));
engine.delete(new Engine.Delete("test", "1", newUid("1")));
engine.refresh("test");
segments = engine.segments(false);
assertThat(segments.size(), equalTo(2));
assertThat(engine.segmentsStats().getCount(), equalTo(2l));
assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(1));
assertThat(segments.get(0).getDeletedDocs(), equalTo(1));
assertThat(segments.get(0).isCompound(), equalTo(defaultCompound));
assertThat(segments.get(1).isCommitted(), equalTo(false));
assertThat(segments.get(1).isSearch(), equalTo(true));
assertThat(segments.get(1).getNumDocs(), equalTo(1));
assertThat(segments.get(1).getDeletedDocs(), equalTo(0));
assertThat(segments.get(1).isCompound(), equalTo(false));
engine.config().setCompoundOnFlush(true);
ParsedDocument doc4 = testParsedDocument("4", "4", "test", null, -1, -1, testDocumentWithTextField(), B_3, null);
engine.create(new Engine.Create(newUid("4"), doc4));
engine.refresh("test");
segments = engine.segments(false);
assertThat(segments.size(), equalTo(3));
assertThat(engine.segmentsStats().getCount(), equalTo(3l));
assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(1));
assertThat(segments.get(0).getDeletedDocs(), equalTo(1));
assertThat(segments.get(0).isCompound(), equalTo(defaultCompound));
assertThat(segments.get(1).isCommitted(), equalTo(false));
assertThat(segments.get(1).isSearch(), equalTo(true));
assertThat(segments.get(1).getNumDocs(), equalTo(1));
assertThat(segments.get(1).getDeletedDocs(), equalTo(0));
assertThat(segments.get(1).isCompound(), equalTo(false));
assertThat(segments.get(2).isCommitted(), equalTo(false));
assertThat(segments.get(2).isSearch(), equalTo(true));
assertThat(segments.get(2).getNumDocs(), equalTo(1));
assertThat(segments.get(2).getDeletedDocs(), equalTo(0));
assertThat(segments.get(2).isCompound(), equalTo(true));
}
}
public void testVerboseSegments() throws Exception {
try (Store store = createStore();
Engine engine = createEngine(defaultSettings, store, createTempDir(), new MergeSchedulerConfig(defaultSettings), NoMergePolicy.INSTANCE)) {
List<Segment> segments = engine.segments(true);
assertThat(segments.isEmpty(), equalTo(true));
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
engine.create(new Engine.Create(newUid("1"), doc));
engine.refresh("test");
segments = engine.segments(true);
assertThat(segments.size(), equalTo(1));
assertThat(segments.get(0).ramTree, notNullValue());
ParsedDocument doc2 = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), B_2, null);
engine.create(new Engine.Create(newUid("2"), doc2));
engine.refresh("test");
ParsedDocument doc3 = testParsedDocument("3", "3", "test", null, -1, -1, testDocumentWithTextField(), B_3, null);
engine.create(new Engine.Create(newUid("3"), doc3));
engine.refresh("test");
segments = engine.segments(true);
assertThat(segments.size(), equalTo(3));
assertThat(segments.get(0).ramTree, notNullValue());
assertThat(segments.get(1).ramTree, notNullValue());
assertThat(segments.get(2).ramTree, notNullValue());
}
}
@Test
public void testSegmentsWithMergeFlag() throws Exception {
try (Store store = createStore();
Engine engine = createEngine(defaultSettings, store, createTempDir(), new MergeSchedulerConfig(defaultSettings), new TieredMergePolicy())) {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc);
engine.index(index);
engine.flush();
assertThat(engine.segments(false).size(), equalTo(1));
index = new Engine.Index(newUid("2"), doc);
engine.index(index);
engine.flush();
List<Segment> segments = engine.segments(false);
assertThat(segments.size(), equalTo(2));
for (Segment segment : segments) {
assertThat(segment.getMergeId(), nullValue());
}
index = new Engine.Index(newUid("3"), doc);
engine.index(index);
engine.flush();
segments = engine.segments(false);
assertThat(segments.size(), equalTo(3));
for (Segment segment : segments) {
assertThat(segment.getMergeId(), nullValue());
}
index = new Engine.Index(newUid("4"), doc);
engine.index(index);
engine.flush();
final long gen1 = store.readLastCommittedSegmentsInfo().getGeneration();
// now, optimize and wait for merges, see that we have no merge flag
engine.forceMerge(true);
for (Segment segment : engine.segments(false)) {
assertThat(segment.getMergeId(), nullValue());
}
// we could have multiple underlying merges, so the generation may increase more than once
assertTrue(store.readLastCommittedSegmentsInfo().getGeneration() > gen1);
final boolean flush = randomBoolean();
final long gen2 = store.readLastCommittedSegmentsInfo().getGeneration();
engine.forceMerge(flush);
for (Segment segment : engine.segments(false)) {
assertThat(segment.getMergeId(), nullValue());
}
if (flush) {
// we should have had just 1 merge, so last generation should be exact
assertEquals(gen2 + 1, store.readLastCommittedSegmentsInfo().getLastGeneration());
}
}
}
public void testCommitStats() {
Document document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null);
engine.create(new Engine.Create(newUid("1"), doc));
CommitStats stats1 = engine.commitStats();
assertThat(stats1.getGeneration(), greaterThan(0l));
assertThat(stats1.getId(), notNullValue());
assertThat(stats1.getUserData(), hasKey(Translog.TRANSLOG_GENERATION_KEY));
engine.flush(true, true);
CommitStats stats2 = engine.commitStats();
assertThat(stats2.getGeneration(), greaterThan(stats1.getGeneration()));
assertThat(stats2.getId(), notNullValue());
assertThat(stats2.getId(), not(equalTo(stats1.getId())));
assertThat(stats2.getUserData(), hasKey(Translog.TRANSLOG_GENERATION_KEY));
assertThat(stats2.getUserData(), hasKey(Translog.TRANSLOG_UUID_KEY));
assertThat(stats2.getUserData().get(Translog.TRANSLOG_GENERATION_KEY), not(equalTo(stats1.getUserData().get(Translog.TRANSLOG_GENERATION_KEY))));
assertThat(stats2.getUserData().get(Translog.TRANSLOG_UUID_KEY), equalTo(stats1.getUserData().get(Translog.TRANSLOG_UUID_KEY)))
;
}
@Test
public void testIndexSearcherWrapper() throws Exception {
final AtomicInteger counter = new AtomicInteger();
IndexSearcherWrapper wrapper = new IndexSearcherWrapper() {
@Override
public DirectoryReader wrap(DirectoryReader reader) {
counter.incrementAndGet();
return reader;
}
@Override
public IndexSearcher wrap(EngineConfig engineConfig, IndexSearcher searcher) throws EngineException {
counter.incrementAndGet();
return searcher;
}
};
Store store = createStore();
Path translog = createTempDir("translog-test");
InternalEngine engine = createEngine(store, translog, wrapper);
Engine.Searcher searcher = engine.acquireSearcher("test");
assertThat(counter.get(), equalTo(2));
searcher.close();
IOUtils.close(store, engine);
}
@Test
public void testSimpleOperations() throws Exception {
Engine.Searcher searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
searchResult.close();
// create a document
Document document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null);
engine.create(new Engine.Create(newUid("1"), doc));
// its not there...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 0));
searchResult.close();
// but, we can still get it (in realtime)
Engine.GetResult getResult = engine.get(new Engine.Get(true, newUid("1")));
assertThat(getResult.exists(), equalTo(true));
assertThat(getResult.source().source.toBytesArray(), equalTo(B_1.toBytesArray()));
assertThat(getResult.docIdAndVersion(), nullValue());
getResult.release();
// but, not there non realtime
getResult = engine.get(new Engine.Get(false, newUid("1")));
assertThat(getResult.exists(), equalTo(false));
getResult.release();
// refresh and it should be there
engine.refresh("test");
// now its there...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 1));
searchResult.close();
// also in non realtime
getResult = engine.get(new Engine.Get(false, newUid("1")));
assertThat(getResult.exists(), equalTo(true));
assertThat(getResult.docIdAndVersion(), notNullValue());
getResult.release();
// now do an update
document = testDocument();
document.add(new TextField("value", "test1", Field.Store.YES));
document.add(new Field(SourceFieldMapper.NAME, B_2.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, null);
engine.index(new Engine.Index(newUid("1"), doc));
// its not updated yet...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 1));
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 0));
searchResult.close();
// but, we can still get it (in realtime)
getResult = engine.get(new Engine.Get(true, newUid("1")));
assertThat(getResult.exists(), equalTo(true));
assertThat(getResult.source().source.toBytesArray(), equalTo(B_2.toBytesArray()));
assertThat(getResult.docIdAndVersion(), nullValue());
getResult.release();
// refresh and it should be updated
engine.refresh("test");
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 0));
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 1));
searchResult.close();
// now delete
engine.delete(new Engine.Delete("test", "1", newUid("1")));
// its not deleted yet
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 0));
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 1));
searchResult.close();
// but, get should not see it (in realtime)
getResult = engine.get(new Engine.Get(true, newUid("1")));
assertThat(getResult.exists(), equalTo(false));
getResult.release();
// refresh and it should be deleted
engine.refresh("test");
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 0));
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 0));
searchResult.close();
// add it back
document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, B_1.toBytes(), SourceFieldMapper.Defaults.FIELD_TYPE));
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null);
engine.create(new Engine.Create(newUid("1"), doc));
// its not there...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 0));
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 0));
searchResult.close();
// refresh and it should be there
engine.refresh("test");
// now its there...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 1));
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 0));
searchResult.close();
// now flush
engine.flush();
// and, verify get (in real time)
getResult = engine.get(new Engine.Get(true, newUid("1")));
assertThat(getResult.exists(), equalTo(true));
assertThat(getResult.source(), nullValue());
assertThat(getResult.docIdAndVersion(), notNullValue());
getResult.release();
// make sure we can still work with the engine
// now do an update
document = testDocument();
document.add(new TextField("value", "test1", Field.Store.YES));
doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_1, null);
engine.index(new Engine.Index(newUid("1"), doc));
// its not updated yet...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 1));
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 0));
searchResult.close();
// refresh and it should be updated
engine.refresh("test");
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 0));
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 1));
searchResult.close();
}
@Test
public void testSearchResultRelease() throws Exception {
Engine.Searcher searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
searchResult.close();
// create a document
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
engine.create(new Engine.Create(newUid("1"), doc));
// its not there...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 0));
searchResult.close();
// refresh and it should be there
engine.refresh("test");
// now its there...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 1));
// don't release the search result yet...
// delete, refresh and do a new search, it should not be there
engine.delete(new Engine.Delete("test", "1", newUid("1")));
engine.refresh("test");
Engine.Searcher updateSearchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(updateSearchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
updateSearchResult.close();
// the non release search result should not see the deleted yet...
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 1));
searchResult.close();
}
public void testSyncedFlush() throws IOException {
try (Store store = createStore();
Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), new MergeSchedulerConfig(defaultSettings),
new LogByteSizeMergePolicy()), false)) {
final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
engine.create(new Engine.Create(newUid("1"), doc));
Engine.CommitId commitID = engine.flush();
assertThat(commitID, equalTo(new Engine.CommitId(store.readLastCommittedSegmentsInfo().getId())));
byte[] wrongBytes = Base64.decode(commitID.toString());
wrongBytes[0] = (byte) ~wrongBytes[0];
Engine.CommitId wrongId = new Engine.CommitId(wrongBytes);
assertEquals("should fail to sync flush with wrong id (but no docs)", engine.syncFlush(syncId + "1", wrongId),
Engine.SyncedFlushResult.COMMIT_MISMATCH);
engine.create(new Engine.Create(newUid("2"), doc));
assertEquals("should fail to sync flush with right id but pending doc", engine.syncFlush(syncId + "2", commitID),
Engine.SyncedFlushResult.PENDING_OPERATIONS);
commitID = engine.flush();
assertEquals("should succeed to flush commit with right id and no pending doc", engine.syncFlush(syncId, commitID),
Engine.SyncedFlushResult.SUCCESS);
assertEquals(store.readLastCommittedSegmentsInfo().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
}
}
public void testSycnedFlushSurvivesEngineRestart() throws IOException {
final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
engine.create(new Engine.Create(newUid("1"), doc));
final Engine.CommitId commitID = engine.flush();
assertEquals("should succeed to flush commit with right id and no pending doc", engine.syncFlush(syncId, commitID),
Engine.SyncedFlushResult.SUCCESS);
assertEquals(store.readLastCommittedSegmentsInfo().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
EngineConfig config = engine.config();
if (randomBoolean()) {
engine.close();
} else {
engine.flushAndClose();
}
engine = new InternalEngine(config, randomBoolean());
assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
}
public void testSycnedFlushVanishesOnReplay() throws IOException {
final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20);
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
engine.create(new Engine.Create(newUid("1"), doc));
final Engine.CommitId commitID = engine.flush();
assertEquals("should succeed to flush commit with right id and no pending doc", engine.syncFlush(syncId, commitID),
Engine.SyncedFlushResult.SUCCESS);
assertEquals(store.readLastCommittedSegmentsInfo().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
doc = testParsedDocument("2", "2", "test", null, -1, -1, testDocumentWithTextField(), new BytesArray("{}"), null);
engine.create(new Engine.Create(newUid("2"), doc));
EngineConfig config = engine.config();
engine.close();
final MockDirectoryWrapper directory = DirectoryUtils.getLeaf(store.directory(), MockDirectoryWrapper.class);
if (directory != null) {
// since we rollback the IW we are writing the same segment files again after starting IW but MDW prevents
// this so we have to disable the check explicitly
directory.setPreventDoubleWrite(false);
}
config.setCreate(false);
engine = new InternalEngine(config, false);
assertNull("Sync ID must be gone since we have a document to replay", engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID));
}
@Test
public void testVersioningNewCreate() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Create create = new Engine.Create(newUid("1"), doc);
engine.create(create);
assertThat(create.version(), equalTo(1l));
create = new Engine.Create(newUid("1"), doc, create.version(), create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.create(create);
assertThat(create.version(), equalTo(1l));
}
@Test
public void testExternalVersioningNewCreate() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Create create = new Engine.Create(newUid("1"), doc, 12, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, 0);
engine.create(create);
assertThat(create.version(), equalTo(12l));
create = new Engine.Create(newUid("1"), doc, create.version(), create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.create(create);
assertThat(create.version(), equalTo(12l));
}
@Test
public void testVersioningNewIndex() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(1l));
index = new Engine.Index(newUid("1"), doc, index.version(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index);
assertThat(index.version(), equalTo(1l));
}
@Test
public void testExternalVersioningNewIndex() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index);
assertThat(index.version(), equalTo(12l));
index = new Engine.Index(newUid("1"), doc, index.version(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index);
assertThat(index.version(), equalTo(12l));
}
@Test
public void testVersioningIndexConflict() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(1l));
index = new Engine.Index(newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(2l));
index = new Engine.Index(newUid("1"), doc, 1l, VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY, 0);
try {
engine.index(index);
fail();
} catch (VersionConflictEngineException e) {
// all is well
}
// future versions should not work as well
index = new Engine.Index(newUid("1"), doc, 3l, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.index(index);
fail();
} catch (VersionConflictEngineException e) {
// all is well
}
}
@Test
public void testExternalVersioningIndexConflict() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index);
assertThat(index.version(), equalTo(12l));
index = new Engine.Index(newUid("1"), doc, 14, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index);
assertThat(index.version(), equalTo(14l));
index = new Engine.Index(newUid("1"), doc, 13, VersionType.EXTERNAL, PRIMARY, 0);
try {
engine.index(index);
fail();
} catch (VersionConflictEngineException e) {
// all is well
}
}
@Test
public void testVersioningIndexConflictWithFlush() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(1l));
index = new Engine.Index(newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(2l));
engine.flush();
index = new Engine.Index(newUid("1"), doc, 1l, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.index(index);
fail();
} catch (VersionConflictEngineException e) {
// all is well
}
// future versions should not work as well
index = new Engine.Index(newUid("1"), doc, 3l, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.index(index);
fail();
} catch (VersionConflictEngineException e) {
// all is well
}
}
@Test
public void testExternalVersioningIndexConflictWithFlush() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc, 12, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index);
assertThat(index.version(), equalTo(12l));
index = new Engine.Index(newUid("1"), doc, 14, VersionType.EXTERNAL, PRIMARY, 0);
engine.index(index);
assertThat(index.version(), equalTo(14l));
engine.flush();
index = new Engine.Index(newUid("1"), doc, 13, VersionType.EXTERNAL, PRIMARY, 0);
try {
engine.index(index);
fail();
} catch (VersionConflictEngineException e) {
// all is well
}
}
public void testForceMerge() throws IOException {
try (Store store = createStore();
Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), new MergeSchedulerConfig(defaultSettings),
new LogByteSizeMergePolicy()), false)) { // use log MP here we test some behavior in ESMP
int numDocs = randomIntBetween(10, 100);
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid(Integer.toString(i)), doc);
engine.index(index);
engine.refresh("test");
}
try (Engine.Searcher test = engine.acquireSearcher("test")) {
assertEquals(numDocs, test.reader().numDocs());
}
engine.forceMerge(true, 1, false, false, false);
assertEquals(engine.segments(true).size(), 1);
ParsedDocument doc = testParsedDocument(Integer.toString(0), Integer.toString(0), "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid(Integer.toString(0)), doc);
engine.delete(new Engine.Delete(index.type(), index.id(), index.uid()));
engine.forceMerge(true, 10, true, false, false); //expunge deletes
assertEquals(engine.segments(true).size(), 1);
try (Engine.Searcher test = engine.acquireSearcher("test")) {
assertEquals(numDocs - 1, test.reader().numDocs());
assertEquals(engine.config().getMergePolicy().toString(), numDocs - 1, test.reader().maxDoc());
}
doc = testParsedDocument(Integer.toString(1), Integer.toString(1), "test", null, -1, -1, testDocument(), B_1, null);
index = new Engine.Index(newUid(Integer.toString(1)), doc);
engine.delete(new Engine.Delete(index.type(), index.id(), index.uid()));
engine.forceMerge(true, 10, false, false, false); //expunge deletes
assertEquals(engine.segments(true).size(), 1);
try (Engine.Searcher test = engine.acquireSearcher("test")) {
assertEquals(numDocs - 2, test.reader().numDocs());
assertEquals(numDocs - 1, test.reader().maxDoc());
}
}
}
public void testForceMergeAndClose() throws IOException, InterruptedException {
int numIters = randomIntBetween(2, 10);
for (int j = 0; j < numIters; j++) {
try (Store store = createStore()) {
final InternalEngine engine = createEngine(store, createTempDir());
final CountDownLatch startGun = new CountDownLatch(1);
final CountDownLatch indexed = new CountDownLatch(1);
Thread thread = new Thread() {
public void run() {
try {
try {
startGun.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
int i = 0;
while (true) {
int numDocs = randomIntBetween(1, 20);
for (int j = 0; j < numDocs; j++) {
i++;
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid(Integer.toString(i)), doc);
engine.index(index);
}
engine.refresh("test");
indexed.countDown();
try {
engine.forceMerge(randomBoolean(), 1, false, randomBoolean(), randomBoolean());
} catch (ForceMergeFailedEngineException ex) {
// ok
return;
}
}
} catch (AlreadyClosedException | EngineClosedException ex) {
// fine
}
}
};
thread.start();
startGun.countDown();
int someIters = randomIntBetween(1, 10);
for (int i = 0; i < someIters; i++) {
engine.forceMerge(randomBoolean(), 1, false, randomBoolean(), randomBoolean());
}
indexed.await();
IOUtils.close(engine);
thread.join();
}
}
}
@Test
public void testVersioningDeleteConflict() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(1l));
index = new Engine.Index(newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(2l));
Engine.Delete delete = new Engine.Delete("test", "1", newUid("1"), 1l, VersionType.INTERNAL, PRIMARY, 0, false);
try {
engine.delete(delete);
fail();
} catch (VersionConflictEngineException e) {
// all is well
}
// future versions should not work as well
delete = new Engine.Delete("test", "1", newUid("1"), 3l, VersionType.INTERNAL, PRIMARY, 0, false);
try {
engine.delete(delete);
fail();
} catch (VersionConflictEngineException e) {
// all is well
}
// now actually delete
delete = new Engine.Delete("test", "1", newUid("1"), 2l, VersionType.INTERNAL, PRIMARY, 0, false);
engine.delete(delete);
assertThat(delete.version(), equalTo(3l));
// now check if we can index to a delete doc with version
index = new Engine.Index(newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.index(index);
fail();
} catch (VersionConflictEngineException e) {
// all is well
}
// we shouldn't be able to create as well
Engine.Create create = new Engine.Create(newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.create(create);
} catch (VersionConflictEngineException e) {
// all is well
}
}
@Test
public void testVersioningDeleteConflictWithFlush() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(1l));
index = new Engine.Index(newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(2l));
engine.flush();
Engine.Delete delete = new Engine.Delete("test", "1", newUid("1"), 1l, VersionType.INTERNAL, PRIMARY, 0, false);
try {
engine.delete(delete);
fail();
} catch (VersionConflictEngineException e) {
// all is well
}
// future versions should not work as well
delete = new Engine.Delete("test", "1", newUid("1"), 3l, VersionType.INTERNAL, PRIMARY, 0, false);
try {
engine.delete(delete);
fail();
} catch (VersionConflictEngineException e) {
// all is well
}
engine.flush();
// now actually delete
delete = new Engine.Delete("test", "1", newUid("1"), 2l, VersionType.INTERNAL, PRIMARY, 0, false);
engine.delete(delete);
assertThat(delete.version(), equalTo(3l));
engine.flush();
// now check if we can index to a delete doc with version
index = new Engine.Index(newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.index(index);
fail();
} catch (VersionConflictEngineException e) {
// all is well
}
// we shouldn't be able to create as well
Engine.Create create = new Engine.Create(newUid("1"), doc, 2l, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.create(create);
} catch (VersionConflictEngineException e) {
// all is well
}
}
@Test
public void testVersioningCreateExistsException() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Create create = new Engine.Create(newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
engine.create(create);
assertThat(create.version(), equalTo(1l));
create = new Engine.Create(newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.create(create);
fail();
} catch (DocumentAlreadyExistsException e) {
// all is well
}
}
@Test
public void testVersioningCreateExistsExceptionWithFlush() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Create create = new Engine.Create(newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
engine.create(create);
assertThat(create.version(), equalTo(1l));
engine.flush();
create = new Engine.Create(newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, 0);
try {
engine.create(create);
fail();
} catch (DocumentAlreadyExistsException e) {
// all is well
}
}
@Test
public void testVersioningReplicaConflict1() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(1l));
index = new Engine.Index(newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(2l));
// apply the second index to the replica, should work fine
index = new Engine.Index(newUid("1"), doc, index.version(), VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index);
assertThat(index.version(), equalTo(2l));
// now, the old one should not work
index = new Engine.Index(newUid("1"), doc, 1l, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
try {
replicaEngine.index(index);
fail();
} catch (VersionConflictEngineException e) {
// all is well
}
// second version on replica should fail as well
try {
index = new Engine.Index(newUid("1"), doc, 2l
, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index);
assertThat(index.version(), equalTo(2l));
} catch (VersionConflictEngineException e) {
// all is well
}
}
@Test
public void testVersioningReplicaConflict2() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(1l));
// apply the first index to the replica, should work fine
index = new Engine.Index(newUid("1"), doc, 1l
, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index);
assertThat(index.version(), equalTo(1l));
// index it again
index = new Engine.Index(newUid("1"), doc);
engine.index(index);
assertThat(index.version(), equalTo(2l));
// now delete it
Engine.Delete delete = new Engine.Delete("test", "1", newUid("1"));
engine.delete(delete);
assertThat(delete.version(), equalTo(3l));
// apply the delete on the replica (skipping the second index)
delete = new Engine.Delete("test", "1", newUid("1"), 3l
, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0, false);
replicaEngine.delete(delete);
assertThat(delete.version(), equalTo(3l));
// second time delete with same version should fail
try {
delete = new Engine.Delete("test", "1", newUid("1"), 3l
, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0, false);
replicaEngine.delete(delete);
fail("excepted VersionConflictEngineException to be thrown");
} catch (VersionConflictEngineException e) {
// all is well
}
// now do the second index on the replica, it should fail
try {
index = new Engine.Index(newUid("1"), doc, 2l, VersionType.INTERNAL.versionTypeForReplicationAndRecovery(), REPLICA, 0);
replicaEngine.index(index);
fail("excepted VersionConflictEngineException to be thrown");
} catch (VersionConflictEngineException e) {
// all is well
}
}
@Test
public void testBasicCreatedFlag() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc);
assertTrue(engine.index(index));
index = new Engine.Index(newUid("1"), doc);
assertFalse(engine.index(index));
engine.delete(new Engine.Delete(null, "1", newUid("1")));
index = new Engine.Index(newUid("1"), doc);
assertTrue(engine.index(index));
}
@Test
public void testCreatedFlagAfterFlush() {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
Engine.Index index = new Engine.Index(newUid("1"), doc);
assertTrue(engine.index(index));
engine.delete(new Engine.Delete(null, "1", newUid("1")));
engine.flush();
index = new Engine.Index(newUid("1"), doc);
assertTrue(engine.index(index));
}
private static class MockAppender extends AppenderSkeleton {
public boolean sawIndexWriterMessage;
public boolean sawIndexWriterIFDMessage;
@Override
protected void append(LoggingEvent event) {
if (event.getLevel() == Level.TRACE && event.getMessage().toString().contains("[index][1] ")) {
if (event.getLoggerName().endsWith("lucene.iw") &&
event.getMessage().toString().contains("IW: apply all deletes during flush")) {
sawIndexWriterMessage = true;
}
if (event.getLoggerName().endsWith("lucene.iw.ifd")) {
sawIndexWriterIFDMessage = true;
}
}
}
@Override
public boolean requiresLayout() {
return false;
}
@Override
public void close() {
}
}
// #5891: make sure IndexWriter's infoStream output is
// sent to lucene.iw with log level TRACE:
@Test
public void testIndexWriterInfoStream() {
assumeFalse("who tests the tester?", VERBOSE);
MockAppender mockAppender = new MockAppender();
Logger rootLogger = Logger.getRootLogger();
Level savedLevel = rootLogger.getLevel();
rootLogger.addAppender(mockAppender);
rootLogger.setLevel(Level.DEBUG);
try {
// First, with DEBUG, which should NOT log IndexWriter output:
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
engine.create(new Engine.Create(newUid("1"), doc));
engine.flush();
assertFalse(mockAppender.sawIndexWriterMessage);
// Again, with TRACE, which should log IndexWriter output:
rootLogger.setLevel(Level.TRACE);
engine.create(new Engine.Create(newUid("2"), doc));
engine.flush();
assertTrue(mockAppender.sawIndexWriterMessage);
} finally {
rootLogger.removeAppender(mockAppender);
rootLogger.setLevel(savedLevel);
}
}
// #8603: make sure we can separately log IFD's messages
public void testIndexWriterIFDInfoStream() {
assumeFalse("who tests the tester?", VERBOSE);
MockAppender mockAppender = new MockAppender();
// Works when running this test inside Intellij:
Logger iwIFDLogger = LogManager.exists("org.elasticsearch.index.engine.lucene.iw.ifd");
if (iwIFDLogger == null) {
// Works when running this test from command line:
iwIFDLogger = LogManager.exists("index.engine.lucene.iw.ifd");
assertNotNull(iwIFDLogger);
}
iwIFDLogger.addAppender(mockAppender);
iwIFDLogger.setLevel(Level.DEBUG);
try {
// First, with DEBUG, which should NOT log IndexWriter output:
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocumentWithTextField(), B_1, null);
engine.create(new Engine.Create(newUid("1"), doc));
engine.flush();
assertFalse(mockAppender.sawIndexWriterMessage);
assertFalse(mockAppender.sawIndexWriterIFDMessage);
// Again, with TRACE, which should only log IndexWriter IFD output:
iwIFDLogger.setLevel(Level.TRACE);
engine.create(new Engine.Create(newUid("2"), doc));
engine.flush();
assertFalse(mockAppender.sawIndexWriterMessage);
assertTrue(mockAppender.sawIndexWriterIFDMessage);
} finally {
iwIFDLogger.removeAppender(mockAppender);
iwIFDLogger.setLevel(null);
}
}
@Test
public void testEnableGcDeletes() throws Exception {
try (Store store = createStore();
Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), new MergeSchedulerConfig(defaultSettings), newMergePolicy()), false)) {
engine.config().setEnableGcDeletes(false);
// Add document
Document document = testDocument();
document.add(new TextField("value", "test1", Field.Store.YES));
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, document, B_2, null);
engine.index(new Engine.Index(newUid("1"), doc, 1, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), false));
// Delete document we just added:
engine.delete(new Engine.Delete("test", "1", newUid("1"), 10, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), false));
// Get should not find the document
Engine.GetResult getResult = engine.get(new Engine.Get(true, newUid("1")));
assertThat(getResult.exists(), equalTo(false));
// Give the gc pruning logic a chance to kick in
Thread.sleep(1000);
if (randomBoolean()) {
engine.refresh("test");
}
// Delete non-existent document
engine.delete(new Engine.Delete("test", "2", newUid("2"), 10, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), false));
// Get should not find the document (we never indexed uid=2):
getResult = engine.get(new Engine.Get(true, newUid("2")));
assertThat(getResult.exists(), equalTo(false));
// Try to index uid=1 with a too-old version, should fail:
try {
engine.index(new Engine.Index(newUid("1"), doc, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime()));
fail("did not hit expected exception");
} catch (VersionConflictEngineException vcee) {
// expected
}
// Get should still not find the document
getResult = engine.get(new Engine.Get(true, newUid("1")));
assertThat(getResult.exists(), equalTo(false));
// Try to index uid=2 with a too-old version, should fail:
try {
engine.index(new Engine.Index(newUid("2"), doc, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime()));
fail("did not hit expected exception");
} catch (VersionConflictEngineException vcee) {
// expected
}
// Get should not find the document
getResult = engine.get(new Engine.Get(true, newUid("2")));
assertThat(getResult.exists(), equalTo(false));
}
}
protected Term newUid(String id) {
return new Term("_uid", id);
}
@Test
public void testExtractShardId() {
try (Engine.Searcher test = this.engine.acquireSearcher("test")) {
ShardId shardId = ShardUtils.extractShardId(test.reader());
assertNotNull(shardId);
assertEquals(shardId, engine.config().getShardId());
}
}
/**
* Random test that throws random exception and ensures all references are
* counted down / released and resources are closed.
*/
@Test
public void testFailStart() throws IOException {
// this test fails if any reader, searcher or directory is not closed - MDW FTW
final int iters = scaledRandomIntBetween(10, 100);
for (int i = 0; i < iters; i++) {
MockDirectoryWrapper wrapper = newMockDirectory();
wrapper.setFailOnOpenInput(randomBoolean());
wrapper.setAllowRandomFileNotFoundException(randomBoolean());
wrapper.setRandomIOExceptionRate(randomDouble());
wrapper.setRandomIOExceptionRateOnOpen(randomDouble());
final Path translogPath = createTempDir("testFailStart");
try (Store store = createStore(wrapper)) {
int refCount = store.refCount();
assertTrue("refCount: " + store.refCount(), store.refCount() > 0);
InternalEngine holder;
try {
holder = createEngine(store, translogPath);
} catch (EngineCreationFailureException ex) {
assertEquals(store.refCount(), refCount);
continue;
}
assertEquals(store.refCount(), refCount + 1);
final int numStarts = scaledRandomIntBetween(1, 5);
for (int j = 0; j < numStarts; j++) {
try {
assertEquals(store.refCount(), refCount + 1);
holder.close();
holder = createEngine(store, translogPath);
assertEquals(store.refCount(), refCount + 1);
} catch (EngineCreationFailureException ex) {
// all is fine
assertEquals(store.refCount(), refCount);
break;
}
}
holder.close();
assertEquals(store.refCount(), refCount);
}
}
}
@Test
public void testSettings() {
CodecService codecService = new CodecService(shardId.index());
LiveIndexWriterConfig currentIndexWriterConfig = engine.getCurrentIndexWriterConfig();
assertEquals(engine.config().getCodec().getName(), codecService.codec(codecName).getName());
assertEquals(currentIndexWriterConfig.getCodec().getName(), codecService.codec(codecName).getName());
assertEquals(engine.config().getIndexConcurrency(), indexConcurrency);
assertEquals(currentIndexWriterConfig.getMaxThreadStates(), indexConcurrency);
}
@Test
public void testRetryWithAutogeneratedIdWorksAndNoDuplicateDocs() throws IOException {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
boolean canHaveDuplicates = false;
boolean autoGeneratedId = true;
Engine.Create index = new Engine.Create(newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime(), canHaveDuplicates, autoGeneratedId);
engine.create(index);
assertThat(index.version(), equalTo(1l));
index = new Engine.Create(newUid("1"), doc, index.version(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, System.nanoTime(), canHaveDuplicates, autoGeneratedId);
replicaEngine.create(index);
assertThat(index.version(), equalTo(1l));
canHaveDuplicates = true;
index = new Engine.Create(newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime(), canHaveDuplicates, autoGeneratedId);
engine.create(index);
assertThat(index.version(), equalTo(1l));
engine.refresh("test");
Engine.Searcher searcher = engine.acquireSearcher("test");
TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), 10);
assertThat(topDocs.totalHits, equalTo(1));
index = new Engine.Create(newUid("1"), doc, index.version(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, System.nanoTime(), canHaveDuplicates, autoGeneratedId);
try {
replicaEngine.create(index);
fail();
} catch (VersionConflictEngineException e) {
// we ignore version conflicts on replicas, see TransportReplicationAction.ignoreReplicaException
}
replicaEngine.refresh("test");
Engine.Searcher replicaSearcher = replicaEngine.acquireSearcher("test");
topDocs = replicaSearcher.searcher().search(new MatchAllDocsQuery(), 10);
assertThat(topDocs.totalHits, equalTo(1));
searcher.close();
replicaSearcher.close();
}
@Test
public void testRetryWithAutogeneratedIdsAndWrongOrderWorksAndNoDuplicateDocs() throws IOException {
ParsedDocument doc = testParsedDocument("1", "1", "test", null, -1, -1, testDocument(), B_1, null);
boolean canHaveDuplicates = true;
boolean autoGeneratedId = true;
Engine.Create firstIndexRequest = new Engine.Create(newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime(), canHaveDuplicates, autoGeneratedId);
engine.create(firstIndexRequest);
assertThat(firstIndexRequest.version(), equalTo(1l));
Engine.Create firstIndexRequestReplica = new Engine.Create(newUid("1"), doc, firstIndexRequest.version(), firstIndexRequest.versionType().versionTypeForReplicationAndRecovery(), REPLICA, System.nanoTime(), canHaveDuplicates, autoGeneratedId);
replicaEngine.create(firstIndexRequestReplica);
assertThat(firstIndexRequestReplica.version(), equalTo(1l));
canHaveDuplicates = false;
Engine.Create secondIndexRequest = new Engine.Create(newUid("1"), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime(), canHaveDuplicates, autoGeneratedId);
try {
engine.create(secondIndexRequest);
fail();
} catch (DocumentAlreadyExistsException e) {
// we can ignore the exception. In case this happens because the retry request arrived first then this error will not be sent back anyway.
// in any other case this is an actual error
}
engine.refresh("test");
Engine.Searcher searcher = engine.acquireSearcher("test");
TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), 10);
assertThat(topDocs.totalHits, equalTo(1));
Engine.Create secondIndexRequestReplica = new Engine.Create(newUid("1"), doc, firstIndexRequest.version(), firstIndexRequest.versionType().versionTypeForReplicationAndRecovery(), REPLICA, System.nanoTime(), canHaveDuplicates, autoGeneratedId);
try {
replicaEngine.create(secondIndexRequestReplica);
fail();
} catch (VersionConflictEngineException e) {
// we ignore version conflicts on replicas, see TransportReplicationAction.ignoreReplicaException.
}
replicaEngine.refresh("test");
Engine.Searcher replicaSearcher = replicaEngine.acquireSearcher("test");
topDocs = replicaSearcher.searcher().search(new MatchAllDocsQuery(), 10);
assertThat(topDocs.totalHits, equalTo(1));
searcher.close();
replicaSearcher.close();
}
// #10312
@Test
public void testDeletesAloneCanTriggerRefresh() throws Exception {
// Tiny indexing buffer:
Settings indexSettings = Settings.builder().put(defaultSettings)
.put(EngineConfig.INDEX_BUFFER_SIZE_SETTING, "1kb").build();
try (Store store = createStore();
Engine engine = new InternalEngine(config(indexSettings, store, createTempDir(), new MergeSchedulerConfig(defaultSettings), newMergePolicy()),
false)) {
for (int i = 0; i < 100; i++) {
String id = Integer.toString(i);
ParsedDocument doc = testParsedDocument(id, id, "test", null, -1, -1, testDocument(), B_1, null);
engine.index(new Engine.Index(newUid(id), doc, 2, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime()));
}
// Force merge so we know all merges are done before we start deleting:
engine.forceMerge(true, 1, false, false, false);
Searcher s = engine.acquireSearcher("test");
final long version1 = ((DirectoryReader) s.reader()).getVersion();
s.close();
for (int i = 0; i < 100; i++) {
String id = Integer.toString(i);
engine.delete(new Engine.Delete("test", id, newUid(id), 10, VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), false));
}
// We must assertBusy because refresh due to version map being full is done in background (REFRESH) thread pool:
assertBusy(new Runnable() {
@Override
public void run() {
Searcher s2 = engine.acquireSearcher("test");
long version2 = ((DirectoryReader) s2.reader()).getVersion();
s2.close();
// 100 buffered deletes will easily exceed 25% of our 1 KB indexing buffer so it should have forced a refresh:
assertThat(version2, greaterThan(version1));
}
});
}
}
public void testMissingTranslog() throws IOException {
// test that we can force start the engine , even if the translog is missing.
engine.close();
// fake a new translog, causing the engine to point to a missing one.
Translog translog = createTranslog();
long id = translog.currentFileGeneration();
translog.close();
IOUtils.rm(translog.location().resolve(Translog.getFilename(id)));
try {
engine = createEngine(store, primaryTranslogDir);
fail("engine shouldn't start without a valid translog id");
} catch (EngineCreationFailureException ex) {
// expected
}
// now it should be OK.
Settings indexSettings = Settings.builder().put(defaultSettings).put(EngineConfig.INDEX_FORCE_NEW_TRANSLOG, true).build();
engine = createEngine(indexSettings, store, primaryTranslogDir, new MergeSchedulerConfig(indexSettings), newMergePolicy());
}
public void testTranslogReplayWithFailure() throws IOException {
boolean canHaveDuplicates = true;
boolean autoGeneratedId = true;
final int numDocs = randomIntBetween(1, 10);
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null);
Engine.Create firstIndexRequest = new Engine.Create(newUid(Integer.toString(i)), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime(), canHaveDuplicates, autoGeneratedId);
engine.create(firstIndexRequest);
assertThat(firstIndexRequest.version(), equalTo(1l));
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10));
assertThat(topDocs.totalHits, equalTo(numDocs));
}
engine.close();
boolean recoveredButFailed = false;
final MockDirectoryWrapper directory = DirectoryUtils.getLeaf(store.directory(), MockDirectoryWrapper.class);
if (directory != null) {
// since we rollback the IW we are writing the same segment files again after starting IW but MDW prevents
// this so we have to disable the check explicitly
directory.setPreventDoubleWrite(false);
boolean started = false;
final int numIters = randomIntBetween(10, 20);
for (int i = 0; i < numIters; i++) {
directory.setRandomIOExceptionRateOnOpen(randomDouble());
directory.setRandomIOExceptionRate(randomDouble());
directory.setFailOnOpenInput(randomBoolean());
directory.setAllowRandomFileNotFoundException(randomBoolean());
try {
engine = createEngine(store, primaryTranslogDir);
started = true;
break;
} catch (EngineCreationFailureException ex) {
}
}
directory.setRandomIOExceptionRateOnOpen(0.0);
directory.setRandomIOExceptionRate(0.0);
directory.setFailOnOpenInput(false);
directory.setAllowRandomFileNotFoundException(false);
if (started == false) {
engine = createEngine(store, primaryTranslogDir);
}
} else {
// no mock directory, no fun.
engine = createEngine(store, primaryTranslogDir);
}
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10));
assertThat(topDocs.totalHits, equalTo(numDocs));
}
}
@Test
public void testSkipTranslogReplay() throws IOException {
boolean canHaveDuplicates = true;
boolean autoGeneratedId = true;
final int numDocs = randomIntBetween(1, 10);
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null);
Engine.Create firstIndexRequest = new Engine.Create(newUid(Integer.toString(i)), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime(), canHaveDuplicates, autoGeneratedId);
engine.create(firstIndexRequest);
assertThat(firstIndexRequest.version(), equalTo(1l));
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10));
assertThat(topDocs.totalHits, equalTo(numDocs));
}
final MockDirectoryWrapper directory = DirectoryUtils.getLeaf(store.directory(), MockDirectoryWrapper.class);
if (directory != null) {
// since we rollback the IW we are writing the same segment files again after starting IW but MDW prevents
// this so we have to disable the check explicitly
directory.setPreventDoubleWrite(false);
}
engine.close();
engine = new InternalEngine(engine.config(), true);
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10));
assertThat(topDocs.totalHits, equalTo(0));
}
}
private Mapping dynamicUpdate() {
BuilderContext context = new BuilderContext(Settings.EMPTY, new ContentPath());
final RootObjectMapper root = MapperBuilders.rootObject("some_type").build(context);
return new Mapping(Version.CURRENT, root, new MetadataFieldMapper[0], new Mapping.SourceTransform[0], ImmutableMap.<String, Object>of());
}
public void testUpgradeOldIndex() throws IOException {
List<Path> indexes = new ArrayList<>();
Path dir = getDataPath("/" + OldIndexBackwardsCompatibilityIT.class.getPackage().getName().replace('.', '/')); // the files are in the same pkg as the OldIndexBackwardsCompatibilityTests test
try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir, "index-*.zip")) {
for (Path path : stream) {
indexes.add(path);
}
}
Collections.shuffle(indexes, random());
for (Path indexFile : indexes.subList(0, scaledRandomIntBetween(1, indexes.size() / 2))) {
final String indexName = indexFile.getFileName().toString().replace(".zip", "").toLowerCase(Locale.ROOT);
Version version = Version.fromString(indexName.replace("index-", ""));
if (version.onOrAfter(Version.V_2_0_0_beta1)) {
continue;
}
Path unzipDir = createTempDir();
Path unzipDataDir = unzipDir.resolve("data");
// decompress the index
try (InputStream stream = Files.newInputStream(indexFile)) {
TestUtil.unzip(stream, unzipDir);
}
// check it is unique
assertTrue(Files.exists(unzipDataDir));
Path[] list = filterExtraFSFiles(FileSystemUtils.files(unzipDataDir));
if (list.length != 1) {
throw new IllegalStateException("Backwards index must contain exactly one cluster but was " + list.length + " " + Arrays.toString(list));
}
// the bwc scripts packs the indices under this path
Path src = list[0].resolve("nodes/0/indices/" + indexName);
Path translog = list[0].resolve("nodes/0/indices/" + indexName).resolve("0").resolve("translog");
assertTrue("[" + indexFile + "] missing index dir: " + src.toString(), Files.exists(src));
assertTrue("[" + indexFile + "] missing translog dir: " + translog.toString(), Files.exists(translog));
Path[] tlogFiles = filterExtraFSFiles(FileSystemUtils.files(translog));
assertEquals(Arrays.toString(tlogFiles), tlogFiles.length, 1);
final long size = Files.size(tlogFiles[0]);
final long generation = TranslogTests.parseLegacyTranslogFile(tlogFiles[0]);
assertTrue(generation >= 1);
logger.debug("upgrading index {} file: {} size: {}", indexName, tlogFiles[0].getFileName(), size);
Directory directory = newFSDirectory(src.resolve("0").resolve("index"));
Store store = createStore(directory);
final int iters = randomIntBetween(0, 2);
int numDocs = -1;
for (int i = 0; i < iters; i++) { // make sure we can restart on an upgraded index
try (InternalEngine engine = createEngine(store, translog)) {
try (Searcher searcher = engine.acquireSearcher("test")) {
if (i > 0) {
assertEquals(numDocs, searcher.reader().numDocs());
}
TopDocs search = searcher.searcher().search(new MatchAllDocsQuery(), 1);
numDocs = searcher.reader().numDocs();
assertTrue(search.totalHits > 1);
}
CommitStats commitStats = engine.commitStats();
Map<String, String> userData = commitStats.getUserData();
assertTrue("userdata dosn't contain uuid",userData.containsKey(Translog.TRANSLOG_UUID_KEY));
assertTrue("userdata doesn't contain generation key", userData.containsKey(Translog.TRANSLOG_GENERATION_KEY));
assertFalse("userdata contains legacy marker", userData.containsKey("translog_id"));
}
}
try (InternalEngine engine = createEngine(store, translog)) {
if (numDocs == -1) {
try (Searcher searcher = engine.acquireSearcher("test")) {
numDocs = searcher.reader().numDocs();
}
}
final int numExtraDocs = randomIntBetween(1, 10);
for (int i = 0; i < numExtraDocs; i++) {
ParsedDocument doc = testParsedDocument("extra" + Integer.toString(i), "extra" + Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null);
Engine.Create firstIndexRequest = new Engine.Create(newUid(Integer.toString(i)), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime(), false, false);
engine.create(firstIndexRequest);
assertThat(firstIndexRequest.version(), equalTo(1l));
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + numExtraDocs));
assertThat(topDocs.totalHits, equalTo(numDocs + numExtraDocs));
}
}
IOUtils.close(store, directory);
}
}
private Path[] filterExtraFSFiles(Path[] files) {
List<Path> paths = new ArrayList<>();
for (Path p : files) {
if (p.getFileName().toString().startsWith("extra")) {
continue;
}
paths.add(p);
}
return paths.toArray(new Path[0]);
}
public void testTranslogReplay() throws IOException {
boolean canHaveDuplicates = true;
boolean autoGeneratedId = true;
final int numDocs = randomIntBetween(1, 10);
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null);
Engine.Create firstIndexRequest = new Engine.Create(newUid(Integer.toString(i)), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime(), canHaveDuplicates, autoGeneratedId);
engine.create(firstIndexRequest);
assertThat(firstIndexRequest.version(), equalTo(1l));
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10));
assertThat(topDocs.totalHits, equalTo(numDocs));
}
final MockDirectoryWrapper directory = DirectoryUtils.getLeaf(store.directory(), MockDirectoryWrapper.class);
if (directory != null) {
// since we rollback the IW we are writing the same segment files again after starting IW but MDW prevents
// this so we have to disable the check explicitly
directory.setPreventDoubleWrite(false);
}
TranslogHandler parser = (TranslogHandler) engine.config().getTranslogRecoveryPerformer();
parser.mappingUpdate = dynamicUpdate();
engine.close();
engine.config().setCreate(false);
engine = new InternalEngine(engine.config(), false); // we need to reuse the engine config unless the parser.mappingModified won't work
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10));
assertThat(topDocs.totalHits, equalTo(numDocs));
}
parser = (TranslogHandler) engine.config().getTranslogRecoveryPerformer();
assertEquals(numDocs, parser.recoveredOps.get());
if (parser.mappingUpdate != null) {
assertEquals(1, parser.getRecoveredTypes().size());
assertTrue(parser.getRecoveredTypes().containsKey("test"));
} else {
assertEquals(0, parser.getRecoveredTypes().size());
}
engine.close();
engine = createEngine(store, primaryTranslogDir);
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10));
assertThat(topDocs.totalHits, equalTo(numDocs));
}
parser = (TranslogHandler) engine.config().getTranslogRecoveryPerformer();
assertEquals(0, parser.recoveredOps.get());
final boolean flush = randomBoolean();
int randomId = randomIntBetween(numDocs + 1, numDocs + 10);
String uuidValue = "test#" + Integer.toString(randomId);
ParsedDocument doc = testParsedDocument(uuidValue, Integer.toString(randomId), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null);
Engine.Create firstIndexRequest = new Engine.Create(newUid(uuidValue), doc, 1, VersionType.EXTERNAL, PRIMARY, System.nanoTime(), canHaveDuplicates, autoGeneratedId);
engine.create(firstIndexRequest);
assertThat(firstIndexRequest.version(), equalTo(1l));
if (flush) {
engine.flush();
}
doc = testParsedDocument(uuidValue, Integer.toString(randomId), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null);
Engine.Index idxRequest = new Engine.Index(newUid(uuidValue), doc, 2, VersionType.EXTERNAL, PRIMARY, System.nanoTime());
engine.index(idxRequest);
engine.refresh("test");
assertThat(idxRequest.version(), equalTo(2l));
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), numDocs + 1);
assertThat(topDocs.totalHits, equalTo(numDocs + 1));
}
engine.close();
engine = createEngine(store, primaryTranslogDir);
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), numDocs + 1);
assertThat(topDocs.totalHits, equalTo(numDocs + 1));
}
parser = (TranslogHandler) engine.config().getTranslogRecoveryPerformer();
assertEquals(flush ? 1 : 2, parser.recoveredOps.get());
engine.delete(new Engine.Delete("test", Integer.toString(randomId), newUid(uuidValue)));
if (randomBoolean()) {
engine.refresh("test");
} else {
engine.close();
engine = createEngine(store, primaryTranslogDir);
}
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), numDocs);
assertThat(topDocs.totalHits, equalTo(numDocs));
}
}
public static class TranslogHandler extends TranslogRecoveryPerformer {
private final DocumentMapper docMapper;
public Mapping mappingUpdate = null;
public final AtomicInteger recoveredOps = new AtomicInteger(0);
public TranslogHandler(String indexName) {
super(new ShardId("test", 0), null, null, null, null);
Settings settings = Settings.settingsBuilder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build();
RootObjectMapper.Builder rootBuilder = new RootObjectMapper.Builder("test");
Index index = new Index(indexName);
AnalysisService analysisService = new AnalysisService(index, settings);
SimilarityLookupService similarityLookupService = new SimilarityLookupService(index, settings);
MapperService mapperService = new MapperService(index, settings, analysisService, similarityLookupService, null);
DocumentMapper.Builder b = new DocumentMapper.Builder(settings, rootBuilder, mapperService);
DocumentMapperParser parser = new DocumentMapperParser(settings, mapperService, analysisService, similarityLookupService, null);
this.docMapper = b.build(mapperService, parser);
}
@Override
protected DocumentMapperForType docMapper(String type) {
return new DocumentMapperForType(docMapper, mappingUpdate);
}
@Override
protected void operationProcessed() {
recoveredOps.incrementAndGet();
}
@Override
public void performRecoveryOperation(Engine engine, Translog.Operation operation, boolean allowMappingUpdates) {
if (operation.opType() != Translog.Operation.Type.DELETE_BY_QUERY) { // we don't support del by query in this test
super.performRecoveryOperation(engine, operation, allowMappingUpdates);
}
}
}
public void testRecoverFromForeignTranslog() throws IOException {
boolean canHaveDuplicates = true;
boolean autoGeneratedId = true;
final int numDocs = randomIntBetween(1, 10);
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), Integer.toString(i), "test", null, -1, -1, testDocument(), new BytesArray("{}"), null);
Engine.Create firstIndexRequest = new Engine.Create(newUid(Integer.toString(i)), doc, Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, System.nanoTime(), canHaveDuplicates, autoGeneratedId);
engine.create(firstIndexRequest);
assertThat(firstIndexRequest.version(), equalTo(1l));
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10));
assertThat(topDocs.totalHits, equalTo(numDocs));
}
final MockDirectoryWrapper directory = DirectoryUtils.getLeaf(store.directory(), MockDirectoryWrapper.class);
if (directory != null) {
// since we rollback the IW we are writing the same segment files again after starting IW but MDW prevents
// this so we have to disable the check explicitly
directory.setPreventDoubleWrite(false);
}
Translog.TranslogGeneration generation = engine.getTranslog().getGeneration();
engine.close();
Translog translog = new Translog(new TranslogConfig(shardId, createTempDir(), Settings.EMPTY, Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, threadPool));
translog.add(new Translog.Create("test", "SomeBogusId", "{}".getBytes(Charset.forName("UTF-8"))));
assertEquals(generation.translogFileGeneration, translog.currentFileGeneration());
translog.close();
EngineConfig config = engine.config();
/* create a TranslogConfig that has been created with a different UUID */
TranslogConfig translogConfig = new TranslogConfig(shardId, translog.location(), config.getIndexSettings(), Translog.Durabilty.REQUEST, BigArrays.NON_RECYCLING_INSTANCE, threadPool);
EngineConfig brokenConfig = new EngineConfig(shardId, threadPool, config.getIndexingService(), config.getIndexSettings()
, null, store, createSnapshotDeletionPolicy(), newMergePolicy(), config.getMergeSchedulerConfig(),
config.getAnalyzer(), config.getSimilarity(), new CodecService(shardId.index()), config.getFailedEngineListener()
, config.getTranslogRecoveryPerformer(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), new IndexSearcherWrappingService(), translogConfig);
try {
new InternalEngine(brokenConfig, false);
fail("translog belongs to a different engine");
} catch (EngineCreationFailureException ex) {
}
engine = createEngine(store, primaryTranslogDir); // and recover again!
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.searcher().search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10));
assertThat(topDocs.totalHits, equalTo(numDocs));
}
}
}
| apache-2.0 |
tmcsantos/pentaho-kettle | ui/src/main/java/org/pentaho/di/ui/trans/step/common/CsvInputAwareStepDialog.java | 6884 | /*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2018-2019 by Hitachi Vantara : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.trans.step.common;
import org.apache.commons.lang.StringUtils;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.logging.LogChannel;
import org.pentaho.di.core.util.Utils;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.steps.common.CsvInputAwareMeta;
import org.pentaho.di.trans.steps.csvinput.CsvInput;
import org.pentaho.di.trans.steps.fileinput.text.EncodingType;
import org.pentaho.di.trans.steps.fileinput.text.TextFileInputUtils;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.text.DecimalFormat;
/**
* A common interface for all step dialogs aware of the csv input format, such as {@link
* org.pentaho.di.ui.trans.steps.csvinput.CsvInputDialog} and
* {@link org.pentaho.di.ui.trans.steps.fileinput.text.TextFileInputDialog}
*/
public interface CsvInputAwareStepDialog {
/**
* @See {@link GetFieldsCapableStepDialog#getFieldNames(org.pentaho.di.trans.step.BaseStepMeta)}
*/
default String[] getFieldNames( final CsvInputAwareMeta meta ) {
String[] fieldNames = new String[] {};
final InputStream inputStream = getInputStream( meta );
final InputStreamReader reader = getReader( meta, inputStream );
try {
fieldNames = getFieldNamesImpl( reader, meta );
} catch ( final KettleException e ) {
logError( BaseMessages.getString( "Dialog.ErrorGettingFields.Message" ), e );
} finally {
try {
inputStream.close();
} catch ( Exception e ) {
// Ignore close errors
}
}
return fieldNames;
}
default String[] getFieldNamesImpl( final InputStreamReader reader, final CsvInputAwareMeta meta )
throws KettleException {
String[] fieldNames = new String[] {};
if ( reader == null || meta == null ) {
logError( BaseMessages.getString( "Dialog.ErrorGettingFields.Message" ) );
return fieldNames;
}
final String delimiter = getTransMeta().environmentSubstitute( meta.getDelimiter() );
final String enclosure = getTransMeta().environmentSubstitute( meta.getEnclosure() );
final EncodingType encodingType = EncodingType.guessEncodingType( reader.getEncoding() );
// Read a line of data to determine the number of rows...
final String line = TextFileInputUtils.getLine( getLogChannel(), reader, encodingType, meta.getFileFormatTypeNr(),
new StringBuilder( 1000 ), enclosure );
if ( !StringUtils.isBlank( line ) ) {
fieldNames = CsvInput.guessStringsFromLine( getLogChannel(), line, delimiter, enclosure,
meta.getEscapeCharacter() );
}
if ( Utils.isEmpty( fieldNames ) ) {
logError( BaseMessages.getString( "Dialog.ErrorGettingFields.Message" ) );
return fieldNames;
}
// Massage field names
for ( int i = 0; i < fieldNames.length; i++ ) {
fieldNames[ i ] = Const.trim( fieldNames[ i ] );
if ( !meta.hasHeader() ) {
final DecimalFormat df = new DecimalFormat( "000" );
fieldNames[ i ] = "Field_" + df.format( i );
} else if ( !Utils.isEmpty( meta.getEnclosure() ) && fieldNames[ i ].startsWith( meta.getEnclosure() )
&& fieldNames[ i ].endsWith( meta.getEnclosure() ) && fieldNames[ i ].length() > 1 ) {
fieldNames[ i ] = fieldNames[ i ].substring( 1, fieldNames[ i ].length() - 1 );
}
// trim again, now that the enclosure characters have been removed
fieldNames[ i ] = Const.trim( fieldNames[ i ] );
fieldNames[ i ] = massageFieldName( fieldNames[ i ] );
}
return fieldNames;
}
/**
* Custom handling of each field can be implemented here.
*/
default String massageFieldName( final String fieldName ) {
return fieldName;
}
/**
* Returns the {@link InputStream} corresponding to the csv file, or null if the file cannot be read.
*
* @return the {@link InputStream} corresponding to the csv file, or null if the file cannot be read
*/
InputStream getInputStream( final CsvInputAwareMeta meta );
/**
* Returns the {@link InputStreamReader} corresponding to the csv file, or null if the file cannot be read.
*
* @return the {@link InputStreamReader} corresponding to the csv file, or null if the file cannot be read
*/
default InputStreamReader getReader( final CsvInputAwareMeta meta, final InputStream inputStream ) {
InputStreamReader reader = null;
try {
String realEncoding = getTransMeta().environmentSubstitute( meta.getEncoding() );
if ( Utils.isEmpty( realEncoding ) ) {
reader = new InputStreamReader( inputStream );
} else {
reader = new InputStreamReader( inputStream, realEncoding );
}
} catch ( final Exception e ) {
logError( BaseMessages.getString( "Dialog.ErrorGettingFileDesc.DialogMessage" ), e );
}
return reader;
}
/**
* @See {@link GetFieldsCapableStepDialog#loadFieldsImpl(BaseStepMeta, int)}
*/
default String loadFieldsImpl( final CsvInputAwareMeta meta, final int samples ) {
InputStream inputStream = getInputStream( meta );
try {
final InputStreamReader reader = getReader( meta, inputStream );
final CsvInputAwareImportProgressDialog pd = getCsvImportProgressDialog( meta, samples, reader );
String message = pd.open( false );
return message;
} finally {
try {
inputStream.close();
} catch ( Exception e ) {
// Ignore close errors
}
}
}
CsvInputAwareImportProgressDialog getCsvImportProgressDialog(
final CsvInputAwareMeta meta, final int samples, final InputStreamReader reader );
default void logError( final String message, final Exception exception ) {
getLogChannel().logError( message, exception );
}
default void logError( final String message ) {
getLogChannel().logError( message );
}
LogChannel getLogChannel();
TransMeta getTransMeta();
}
| apache-2.0 |
robzor92/hops | hadoop-common-project/hadoop-kms/src/main/java/org/apache/hadoop/crypto/key/kms/server/KMSWebApp.java | 10514 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.crypto.key.kms.server;
import com.codahale.metrics.JmxReporter;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.crypto.key.CachingKeyProvider;
import org.apache.hadoop.crypto.key.KeyProvider;
import org.apache.hadoop.crypto.key.KeyProviderCryptoExtension;
import org.apache.hadoop.crypto.key.KeyProviderFactory;
import org.apache.hadoop.http.HttpServer2;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.util.VersionInfo;
import org.apache.log4j.PropertyConfigurator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.slf4j.bridge.SLF4JBridgeHandler;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URL;
@InterfaceAudience.Private
public class KMSWebApp implements ServletContextListener {
private static final String LOG4J_PROPERTIES = "kms-log4j.properties";
private static final String METRICS_PREFIX = "hadoop.kms.";
private static final String ADMIN_CALLS_METER = METRICS_PREFIX +
"admin.calls.meter";
private static final String KEY_CALLS_METER = METRICS_PREFIX +
"key.calls.meter";
private static final String INVALID_CALLS_METER = METRICS_PREFIX +
"invalid.calls.meter";
private static final String UNAUTHORIZED_CALLS_METER = METRICS_PREFIX +
"unauthorized.calls.meter";
private static final String UNAUTHENTICATED_CALLS_METER = METRICS_PREFIX +
"unauthenticated.calls.meter";
private static final String GENERATE_EEK_METER = METRICS_PREFIX +
"generate_eek.calls.meter";
private static final String DECRYPT_EEK_METER = METRICS_PREFIX +
"decrypt_eek.calls.meter";
private static Logger LOG;
private static MetricRegistry metricRegistry;
private JmxReporter jmxReporter;
private static Configuration kmsConf;
private static KMSACLs kmsAcls;
private static Meter adminCallsMeter;
private static Meter keyCallsMeter;
private static Meter unauthorizedCallsMeter;
private static Meter unauthenticatedCallsMeter;
private static Meter decryptEEKCallsMeter;
private static Meter generateEEKCallsMeter;
private static Meter invalidCallsMeter;
private static KMSAudit kmsAudit;
private static KeyProviderCryptoExtension keyProviderCryptoExtension;
static {
SLF4JBridgeHandler.removeHandlersForRootLogger();
SLF4JBridgeHandler.install();
}
private void initLogging(String confDir) {
if (System.getProperty("log4j.configuration") == null) {
System.setProperty("log4j.defaultInitOverride", "true");
boolean fromClasspath = true;
File log4jConf = new File(confDir, LOG4J_PROPERTIES).getAbsoluteFile();
if (log4jConf.exists()) {
PropertyConfigurator.configureAndWatch(log4jConf.getPath(), 1000);
fromClasspath = false;
} else {
ClassLoader cl = Thread.currentThread().getContextClassLoader();
URL log4jUrl = cl.getResource(LOG4J_PROPERTIES);
if (log4jUrl != null) {
PropertyConfigurator.configure(log4jUrl);
}
}
LOG = LoggerFactory.getLogger(KMSWebApp.class);
LOG.debug("KMS log starting");
if (fromClasspath) {
LOG.warn("Log4j configuration file '{}' not found", LOG4J_PROPERTIES);
LOG.warn("Logging with INFO level to standard output");
}
} else {
LOG = LoggerFactory.getLogger(KMSWebApp.class);
}
}
@Override
public void contextInitialized(ServletContextEvent sce) {
try {
String confDir = System.getProperty(KMSConfiguration.KMS_CONFIG_DIR);
if (confDir == null) {
throw new RuntimeException("System property '" +
KMSConfiguration.KMS_CONFIG_DIR + "' not defined");
}
kmsConf = KMSConfiguration.getKMSConf();
initLogging(confDir);
UserGroupInformation.setConfiguration(kmsConf);
LOG.info("-------------------------------------------------------------");
LOG.info(" Java runtime version : {}", System.getProperty(
"java.runtime.version"));
LOG.info(" User: {}", System.getProperty("user.name"));
LOG.info(" KMS Hadoop Version: " + VersionInfo.getVersion());
LOG.info("-------------------------------------------------------------");
kmsAcls = new KMSACLs();
kmsAcls.startReloader();
metricRegistry = new MetricRegistry();
jmxReporter = JmxReporter.forRegistry(metricRegistry).build();
jmxReporter.start();
generateEEKCallsMeter = metricRegistry.register(GENERATE_EEK_METER,
new Meter());
decryptEEKCallsMeter = metricRegistry.register(DECRYPT_EEK_METER,
new Meter());
adminCallsMeter = metricRegistry.register(ADMIN_CALLS_METER, new Meter());
keyCallsMeter = metricRegistry.register(KEY_CALLS_METER, new Meter());
invalidCallsMeter = metricRegistry.register(INVALID_CALLS_METER,
new Meter());
unauthorizedCallsMeter = metricRegistry.register(UNAUTHORIZED_CALLS_METER,
new Meter());
unauthenticatedCallsMeter = metricRegistry.register(
UNAUTHENTICATED_CALLS_METER, new Meter());
kmsAudit =
new KMSAudit(kmsConf.getLong(
KMSConfiguration.KMS_AUDIT_AGGREGATION_WINDOW,
KMSConfiguration.KMS_AUDIT_AGGREGATION_WINDOW_DEFAULT));
// this is required for the the JMXJsonServlet to work properly.
// the JMXJsonServlet is behind the authentication filter,
// thus the '*' ACL.
sce.getServletContext().setAttribute(HttpServer2.CONF_CONTEXT_ATTRIBUTE,
kmsConf);
sce.getServletContext().setAttribute(HttpServer2.ADMINS_ACL,
new AccessControlList(AccessControlList.WILDCARD_ACL_VALUE));
// intializing the KeyProvider
String providerString = kmsConf.get(KMSConfiguration.KEY_PROVIDER_URI);
if (providerString == null) {
throw new IllegalStateException("No KeyProvider has been defined");
}
KeyProvider keyProvider =
KeyProviderFactory.get(new URI(providerString), kmsConf);
if (kmsConf.getBoolean(KMSConfiguration.KEY_CACHE_ENABLE,
KMSConfiguration.KEY_CACHE_ENABLE_DEFAULT)) {
long keyTimeOutMillis =
kmsConf.getLong(KMSConfiguration.KEY_CACHE_TIMEOUT_KEY,
KMSConfiguration.KEY_CACHE_TIMEOUT_DEFAULT);
long currKeyTimeOutMillis =
kmsConf.getLong(KMSConfiguration.CURR_KEY_CACHE_TIMEOUT_KEY,
KMSConfiguration.CURR_KEY_CACHE_TIMEOUT_DEFAULT);
keyProvider = new CachingKeyProvider(keyProvider, keyTimeOutMillis,
currKeyTimeOutMillis);
}
LOG.info("Initialized KeyProvider " + keyProvider);
keyProviderCryptoExtension = KeyProviderCryptoExtension.
createKeyProviderCryptoExtension(keyProvider);
keyProviderCryptoExtension =
new EagerKeyGeneratorKeyProviderCryptoExtension(kmsConf,
keyProviderCryptoExtension);
if (kmsConf.getBoolean(KMSConfiguration.KEY_AUTHORIZATION_ENABLE,
KMSConfiguration.KEY_AUTHORIZATION_ENABLE_DEFAULT)) {
keyProviderCryptoExtension =
new KeyAuthorizationKeyProvider(
keyProviderCryptoExtension, kmsAcls);
}
LOG.info("Initialized KeyProviderCryptoExtension "
+ keyProviderCryptoExtension);
final int defaultBitlength = kmsConf
.getInt(KeyProvider.DEFAULT_BITLENGTH_NAME,
KeyProvider.DEFAULT_BITLENGTH);
LOG.info("Default key bitlength is {}", defaultBitlength);
LOG.info("KMS Started");
} catch (Throwable ex) {
System.out.println();
System.out.println("ERROR: Hadoop KMS could not be started");
System.out.println();
System.out.println("REASON: " + ex.toString());
System.out.println();
System.out.println("Stacktrace:");
System.out.println("---------------------------------------------------");
ex.printStackTrace(System.out);
System.out.println("---------------------------------------------------");
System.out.println();
System.exit(1);
}
}
@Override
public void contextDestroyed(ServletContextEvent sce) {
try {
keyProviderCryptoExtension.close();
} catch (IOException ioe) {
LOG.error("Error closing KeyProviderCryptoExtension", ioe);
}
kmsAudit.shutdown();
kmsAcls.stopReloader();
jmxReporter.stop();
jmxReporter.close();
metricRegistry = null;
LOG.info("KMS Stopped");
}
public static Configuration getConfiguration() {
return new Configuration(kmsConf);
}
public static KMSACLs getACLs() {
return kmsAcls;
}
public static Meter getAdminCallsMeter() {
return adminCallsMeter;
}
public static Meter getKeyCallsMeter() {
return keyCallsMeter;
}
public static Meter getInvalidCallsMeter() {
return invalidCallsMeter;
}
public static Meter getGenerateEEKCallsMeter() {
return generateEEKCallsMeter;
}
public static Meter getDecryptEEKCallsMeter() {
return decryptEEKCallsMeter;
}
public static Meter getUnauthorizedCallsMeter() {
return unauthorizedCallsMeter;
}
public static Meter getUnauthenticatedCallsMeter() {
return unauthenticatedCallsMeter;
}
public static KeyProviderCryptoExtension getKeyProvider() {
return keyProviderCryptoExtension;
}
public static KMSAudit getKMSAudit() {
return kmsAudit;
}
}
| apache-2.0 |
zwets/flowable-engine | modules/flowable-rest/src/test/java/org/flowable/rest/service/api/repository/DeploymentResourceResourceTest.java | 5829 | package org.flowable.rest.service.api.repository;
import java.io.ByteArrayInputStream;
import java.util.List;
import org.apache.commons.io.IOUtils;
import org.apache.http.HttpHeaders;
import org.apache.http.HttpStatus;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.message.BasicHeader;
import org.flowable.engine.repository.Deployment;
import org.flowable.rest.service.BaseSpringRestTestCase;
import org.flowable.rest.service.api.RestUrls;
import com.fasterxml.jackson.databind.JsonNode;
/**
* Test for all REST-operations related to a resources that is part of a deployment.
*
* @author Frederik Heremans
*/
public class DeploymentResourceResourceTest extends BaseSpringRestTestCase {
/**
* Test getting a single resource, deployed in a deployment. GET repository/deployments/{deploymentId}/resources/{resourceId}
*/
public void testGetDeploymentResource() throws Exception {
try {
String rawResourceName = "org/flowable/rest/service/api/repository/oneTaskProcess.bpmn20.xml";
Deployment deployment = repositoryService.createDeployment().name("Deployment 1").addClasspathResource(rawResourceName)
.addInputStream("test.txt", new ByteArrayInputStream("Test content".getBytes())).deploy();
// Build up the URL manually to make sure resource-id gets encoded
// correctly as one piece
HttpGet httpGet = new HttpGet(buildUrl(RestUrls.URL_DEPLOYMENT_RESOURCE, deployment.getId(), encode(rawResourceName)));
httpGet.addHeader(new BasicHeader(HttpHeaders.ACCEPT, "application/json"));
CloseableHttpResponse response = executeRequest(httpGet, HttpStatus.SC_OK);
JsonNode responseNode = objectMapper.readTree(response.getEntity().getContent());
closeResponse(response);
// Check URL's for the resource
assertEquals(responseNode.get("url").textValue(), buildUrl(RestUrls.URL_DEPLOYMENT_RESOURCE, deployment.getId(), rawResourceName));
assertEquals(responseNode.get("contentUrl").textValue(), buildUrl(RestUrls.URL_DEPLOYMENT_RESOURCE_CONTENT, deployment.getId(), rawResourceName));
assertEquals("text/xml", responseNode.get("mediaType").textValue());
assertEquals("processDefinition", responseNode.get("type").textValue());
} finally {
// Always cleanup any created deployments, even if the test failed
List<Deployment> deployments = repositoryService.createDeploymentQuery().list();
for (Deployment deployment : deployments) {
repositoryService.deleteDeployment(deployment.getId(), true);
}
}
}
/**
* Test getting a single resource for an unexisting deployment. GET repository/deployments/{deploymentId}/resources/{resourceId}
*/
public void testGetDeploymentResourceUnexistingDeployment() throws Exception {
HttpGet httpGet = new HttpGet(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_DEPLOYMENT_RESOURCE, "unexisting", "resource.png"));
httpGet.addHeader(new BasicHeader(HttpHeaders.ACCEPT, "image/png,application/json"));
closeResponse(executeRequest(httpGet, HttpStatus.SC_NOT_FOUND));
}
/**
* Test getting an unexisting resource for an existing deployment. GET repository/deployments/{deploymentId}/resources/{resourceId}
*/
public void testGetDeploymentResourceUnexistingResource() throws Exception {
try {
Deployment deployment = repositoryService.createDeployment().name("Deployment 1").addInputStream("test.txt", new ByteArrayInputStream("Test content".getBytes())).deploy();
HttpGet httpGet = new HttpGet(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_DEPLOYMENT_RESOURCE, deployment.getId(), "unexisting-resource.png"));
httpGet.addHeader(new BasicHeader(HttpHeaders.ACCEPT, "image/png,application/json"));
closeResponse(executeRequest(httpGet, HttpStatus.SC_NOT_FOUND));
} finally {
// Always cleanup any created deployments, even if the test failed
List<Deployment> deployments = repositoryService.createDeploymentQuery().list();
for (Deployment deployment : deployments) {
repositoryService.deleteDeployment(deployment.getId(), true);
}
}
}
/**
* Test getting a deployment resource content. GET repository/deployments/{deploymentId}/resources/{resourceId}
*/
public void testGetDeploymentResourceContent() throws Exception {
try {
Deployment deployment = repositoryService.createDeployment().name("Deployment 1").addInputStream("test.txt", new ByteArrayInputStream("Test content".getBytes())).deploy();
HttpGet httpGet = new HttpGet(SERVER_URL_PREFIX + RestUrls.createRelativeResourceUrl(RestUrls.URL_DEPLOYMENT_RESOURCE_CONTENT, deployment.getId(), "test.txt"));
httpGet.addHeader(new BasicHeader(HttpHeaders.ACCEPT, "text/plain"));
CloseableHttpResponse response = executeRequest(httpGet, HttpStatus.SC_OK);
String responseAsString = IOUtils.toString(response.getEntity().getContent());
closeResponse(response);
assertNotNull(responseAsString);
assertEquals("Test content", responseAsString);
} finally {
// Always cleanup any created deployments, even if the test failed
List<Deployment> deployments = repositoryService.createDeploymentQuery().list();
for (Deployment deployment : deployments) {
repositoryService.deleteDeployment(deployment.getId(), true);
}
}
}
}
| apache-2.0 |
punkhorn/camel-upstream | core/camel-core/src/test/java/org/apache/camel/processor/CamelContextLogExhaustedMessageBodyTest.java | 1999 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor;
import org.apache.camel.CamelContext;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.builder.RouteBuilder;
import org.junit.Test;
public class CamelContextLogExhaustedMessageBodyTest extends ContextTestSupport {
@Test
public void testLogExhaustedMessageHistoryWithMessageBody() throws Exception {
try {
template.sendBody("direct:start", "Hello World");
fail("should fail");
} catch (Exception e) {
// expected and the message body should be in logged
}
}
@Override
protected CamelContext createCamelContext() throws Exception {
CamelContext context = super.createCamelContext();
context.setLogExhaustedMessageBody(true);
return context;
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start")
.log("Incoming ${body}")
.throwException(new IllegalArgumentException("Forced"));
}
};
}
}
| apache-2.0 |
electrum/presto | plugin/trino-raptor-legacy/src/main/java/io/trino/plugin/raptor/legacy/backup/HttpBackupConfig.java | 1077 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.raptor.legacy.backup;
import io.airlift.configuration.Config;
import io.airlift.configuration.ConfigDescription;
import javax.validation.constraints.NotNull;
import java.net.URI;
public class HttpBackupConfig
{
private URI uri;
@NotNull
public URI getUri()
{
return uri;
}
@Config("backup.http.uri")
@ConfigDescription("Backup service base URI")
public HttpBackupConfig setUri(URI uri)
{
this.uri = uri;
return this;
}
}
| apache-2.0 |
shyTNT/googleads-java-lib | modules/dfp_axis/src/main/java/com/google/api/ads/dfp/axis/v201505/CreativeSetErrorReason.java | 4151 | /**
* CreativeSetErrorReason.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.dfp.axis.v201505;
public class CreativeSetErrorReason implements java.io.Serializable {
private java.lang.String _value_;
private static java.util.HashMap _table_ = new java.util.HashMap();
// Constructor
protected CreativeSetErrorReason(java.lang.String value) {
_value_ = value;
_table_.put(_value_,this);
}
public static final java.lang.String _VIDEO_FEATURE_REQUIRED = "VIDEO_FEATURE_REQUIRED";
public static final java.lang.String _CANNOT_CREATE_OR_UPDATE_VIDEO_CREATIVES = "CANNOT_CREATE_OR_UPDATE_VIDEO_CREATIVES";
public static final java.lang.String _ROADBLOCK_FEATURE_REQUIRED = "ROADBLOCK_FEATURE_REQUIRED";
public static final java.lang.String _MASTER_CREATIVE_CANNOT_BE_COMPANION = "MASTER_CREATIVE_CANNOT_BE_COMPANION";
public static final java.lang.String _INVALID_ADVERTISER = "INVALID_ADVERTISER";
public static final java.lang.String _UPDATE_MASTER_CREATIVE_NOT_ALLOWED = "UPDATE_MASTER_CREATIVE_NOT_ALLOWED";
public static final java.lang.String _UNKNOWN = "UNKNOWN";
public static final CreativeSetErrorReason VIDEO_FEATURE_REQUIRED = new CreativeSetErrorReason(_VIDEO_FEATURE_REQUIRED);
public static final CreativeSetErrorReason CANNOT_CREATE_OR_UPDATE_VIDEO_CREATIVES = new CreativeSetErrorReason(_CANNOT_CREATE_OR_UPDATE_VIDEO_CREATIVES);
public static final CreativeSetErrorReason ROADBLOCK_FEATURE_REQUIRED = new CreativeSetErrorReason(_ROADBLOCK_FEATURE_REQUIRED);
public static final CreativeSetErrorReason MASTER_CREATIVE_CANNOT_BE_COMPANION = new CreativeSetErrorReason(_MASTER_CREATIVE_CANNOT_BE_COMPANION);
public static final CreativeSetErrorReason INVALID_ADVERTISER = new CreativeSetErrorReason(_INVALID_ADVERTISER);
public static final CreativeSetErrorReason UPDATE_MASTER_CREATIVE_NOT_ALLOWED = new CreativeSetErrorReason(_UPDATE_MASTER_CREATIVE_NOT_ALLOWED);
public static final CreativeSetErrorReason UNKNOWN = new CreativeSetErrorReason(_UNKNOWN);
public java.lang.String getValue() { return _value_;}
public static CreativeSetErrorReason fromValue(java.lang.String value)
throws java.lang.IllegalArgumentException {
CreativeSetErrorReason enumeration = (CreativeSetErrorReason)
_table_.get(value);
if (enumeration==null) throw new java.lang.IllegalArgumentException();
return enumeration;
}
public static CreativeSetErrorReason fromString(java.lang.String value)
throws java.lang.IllegalArgumentException {
return fromValue(value);
}
public boolean equals(java.lang.Object obj) {return (obj == this);}
public int hashCode() { return toString().hashCode();}
public java.lang.String toString() { return _value_;}
public java.lang.Object readResolve() throws java.io.ObjectStreamException { return fromValue(_value_);}
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.EnumSerializer(
_javaType, _xmlType);
}
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.EnumDeserializer(
_javaType, _xmlType);
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(CreativeSetErrorReason.class);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201505", "CreativeSetError.Reason"));
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
}
| apache-2.0 |
jmluy/elasticsearch | modules/lang-painless/src/test/java/org/elasticsearch/painless/UserFunctionTests.java | 6948 | /*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.painless;
import java.util.List;
import java.util.Map;
public class UserFunctionTests extends ScriptTestCase {
public void testZeroArgumentUserFunction() {
String source = "def twofive() { return 25; } twofive()";
assertEquals(25, exec(source));
}
public void testUserFunctionDefCallRef() {
String source = "String getSource() { 'source'; }\n"
+ "int myCompare(int a, int b) { getMulti() * Integer.compare(a, b) }\n"
+ "int getMulti() { return -1 }\n"
+ "def l = [1, 100, -100];\n"
+ "if (myCompare(10, 50) > 0) { l.add(50 + getMulti()) }\n"
+ "l.sort(this::myCompare);\n"
+ "if (l[0] == 100) { l.remove(l.size() - 1) ; l.sort((a, b) -> -1 * myCompare(a, b)) } \n"
+ "if (getSource().startsWith('sour')) { l.add(255); }\n"
+ "return l;";
assertEquals(List.of(1, 49, 100, 255), exec(source));
assertBytecodeExists(source, "public &getSource()Ljava/lang/String");
assertBytecodeExists(source, "public &getMulti()I");
assertBytecodeExists(source, "INVOKEVIRTUAL org/elasticsearch/painless/PainlessScript$Script.&getMulti ()I");
assertBytecodeExists(source, "public &myCompare(II)I");
assertBytecodeExists(source, "INVOKEVIRTUAL org/elasticsearch/painless/PainlessScript$Script.&myCompare (II)I");
}
public void testChainedUserMethods() {
String source = "int myCompare(int a, int b) { getMulti() * (a - b) }\n"
+ "int getMulti() { -1 }\n"
+ "List l = [1, 100, -100];\n"
+ "l.sort(this::myCompare);\n"
+ "l;\n";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
}
public void testChainedUserMethodsLambda() {
String source = "int myCompare(int a, int b) { getMulti() * (a - b) }\n"
+ "int getMulti() { -1 }\n"
+ "List l = [1, 100, -100];\n"
+ "l.sort((a, b) -> myCompare(a, b));\n"
+ "l;\n";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
}
public void testChainedUserMethodsDef() {
String source = "int myCompare(int a, int b) { getMulti() * (a - b) }\n"
+ "int getMulti() { -1 }\n"
+ "def l = [1, 100, -100];\n"
+ "l.sort(this::myCompare);\n"
+ "l;\n";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
}
public void testChainedUserMethodsLambdaDef() {
String source = "int myCompare(int a, int b) { getMulti() * (a - b) }\n"
+ "int getMulti() { -1 }\n"
+ "def l = [1, 100, -100];\n"
+ "l.sort((a, b) -> myCompare(a, b));\n"
+ "l;\n";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
}
public void testChainedUserMethodsLambdaCaptureDef() {
String source = "int myCompare(int a, int b, int x, int m) { getMulti(m) * (a - b + x) }\n"
+ "int getMulti(int m) { -1 * m }\n"
+ "def l = [1, 100, -100];\n"
+ "int cx = 100;\n"
+ "int cm = 1;\n"
+ "l.sort((a, b) -> myCompare(a, b, cx, cm));\n"
+ "l;\n";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
}
public void testMethodReferenceInUserFunction() {
String source = "int myCompare(int a, int b, String s) { "
+ " Map m = ['f': 5];"
+ " a - b + m.computeIfAbsent(s, this::getLength) "
+ "}\n"
+ "int getLength(String s) { s.length() }\n"
+ "def l = [1, 0, -2];\n"
+ "String s = 'g';\n"
+ "l.sort((a, b) -> myCompare(a, b, s));\n"
+ "l;\n";
assertEquals(List.of(-2, 1, 0), exec(source, Map.of("a", 1), false));
}
public void testUserFunctionVirtual() {
String source = "int myCompare(int x, int y) { return -1 * (x - y) }\n" + "return myCompare(100, 90);";
assertEquals(-10, exec(source, Map.of("a", 1), false));
assertBytecodeExists(source, "INVOKEVIRTUAL org/elasticsearch/painless/PainlessScript$Script.&myCompare (II)I");
}
public void testUserFunctionRef() {
String source = "int myCompare(int x, int y) { return -1 * x - y }\n"
+ "List l = [1, 100, -100];\n"
+ "l.sort(this::myCompare);\n"
+ "return l;";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
assertBytecodeExists(source, "public &myCompare(II)I");
}
public void testUserFunctionRefEmpty() {
String source = "int myCompare(int x, int y) { return -1 * x - y }\n" + "[].sort((a, b) -> myCompare(a, b));\n";
assertNull(exec(source, Map.of("a", 1), false));
assertBytecodeExists(source, "public &myCompare(II)I");
assertBytecodeExists(source, "INVOKEVIRTUAL org/elasticsearch/painless/PainlessScript$Script.&myCompare (II)I");
}
public void testUserFunctionCallInLambda() {
String source = "int myCompare(int x, int y) { -1 * ( x - y ) }\n"
+ "List l = [1, 100, -100];\n"
+ "l.sort((a, b) -> myCompare(a, b));\n"
+ "return l;";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
assertBytecodeExists(source, "public &myCompare(II)I");
assertBytecodeExists(source, "INVOKEVIRTUAL org/elasticsearch/painless/PainlessScript$Script.&myCompare (II)I");
}
public void testUserFunctionLambdaCapture() {
String source = "int myCompare(Object o, int x, int y) { return o != null ? -1 * ( x - y ) : ( x - y ) }\n"
+ "List l = [1, 100, -100];\n"
+ "Object q = '';\n"
+ "l.sort((a, b) -> myCompare(q, a, b));\n"
+ "return l;";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
assertBytecodeExists(source, "public &myCompare(Ljava/lang/Object;II)I");
assertBytecodeExists(source, "INVOKEVIRTUAL org/elasticsearch/painless/PainlessScript$Script.&myCompare (Ljava/lang/Object;II)I");
}
public void testLambdaCapture() {
String source = "List l = [1, 100, -100];\n" + "int q = -1;\n" + "l.sort((a, b) -> q * ( a - b ));\n" + "return l;";
assertEquals(List.of(100, 1, -100), exec(source, Map.of("a", 1), false));
assertBytecodeExists(source, "public static synthetic lambda$synthetic$0(ILjava/lang/Object;Ljava/lang/Object;)I");
}
}
| apache-2.0 |
pleacu/jbpm | jbpm-services/jbpm-kie-services/src/main/java/org/jbpm/kie/services/impl/bpmn2/ProcessDescriptor.java | 6922 | /*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jbpm.kie.services.impl.bpmn2;
import java.io.Serializable;
import java.util.ArrayDeque;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import org.jbpm.kie.services.impl.model.ProcessAssetDesc;
import org.jbpm.process.instance.StartProcessHelper;
import org.jbpm.services.api.model.UserTaskDefinition;
import org.kie.api.definition.process.Process;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This is a package level class that is used by different BPMN2 handlers ( in this package) to store information
* about a BPMN2 process.
*/
public class ProcessDescriptor implements Serializable {
private static final long serialVersionUID = -6304675827486128074L;
private static final Logger logger = LoggerFactory.getLogger(ProcessDescriptor.class);
private ProcessAssetDesc process;
private Map<String, UserTaskDefinition> tasks = new HashMap<String, UserTaskDefinition>();
private Map<String, Map<String, String>> taskInputMappings = new HashMap<String, Map<String, String>>();
private Map<String, Map<String, String>> taskOutputMappings = new HashMap<String, Map<String, String>>();
private Map<String, String> inputs = new HashMap<String, String>();
private Map<String, Collection<String>> taskAssignments = new HashMap<String, Collection<String>>();
private Map<String, String> itemDefinitions = new HashMap<String, String>();
private Map<String, String> serviceTasks = new HashMap<String, String>();
private Map<String, String> globalItemDefinitions = new HashMap<String, String>();
private Collection<String> reusableSubProcesses = new HashSet<String>(1);
private Set<String> referencedClasses = new HashSet<String>(1);
private Set<String> unqualifiedClasses = new HashSet<String>(1);
private Set<String> referencedRules = new HashSet<String>(1);
private Collection<String> signals = Collections.emptySet();
private Collection<String> globals = Collections.emptySet();
private Queue<String> unresolvedReusableSubProcessNames = new ArrayDeque<String>();
public ProcessDescriptor() {
}
public void setProcess(ProcessAssetDesc process) {
this.process = process;
}
public boolean hasUnresolvedReusableSubProcessNames() {
return ! unresolvedReusableSubProcessNames.isEmpty();
}
public void resolveReusableSubProcessNames( Collection<Process> deploymentProcesses ) {
// build map of process name -> process id
Map<String, Process> processNameProcessIdMap = new HashMap<String, Process>(deploymentProcesses.size());
for( Process process : deploymentProcesses ) {
String processName = process.getName();
Process previousProcess = processNameProcessIdMap.put(processName, process);
if( previousProcess != null ) {
Comparator<Process> processComparator = StartProcessHelper.getComparator(processName);
if( processComparator.compare(previousProcess, process) > 0 ) {
processNameProcessIdMap.put(processName, previousProcess);
}
}
}
// resolve process names called in process
synchronized(unresolvedReusableSubProcessNames) {
Iterator<String> iter = unresolvedReusableSubProcessNames.iterator();
while( iter.hasNext() ) {
String processName = iter.next();
Process deploymentProcess = processNameProcessIdMap.get(processName);
if( deploymentProcess == null ) {
logger.error("Unable to resolve process name '{}' called in process '{}'", processName, getProcess().getId());
} else {
String processIdForProcessName = deploymentProcess.getId();
reusableSubProcesses.add(processIdForProcessName);
iter.remove();
}
}
}
}
public ProcessAssetDesc getProcess() {
return process;
}
public Map<String, UserTaskDefinition> getTasks() {
return tasks;
}
public Map<String, Map<String, String>> getTaskInputMappings() {
return taskInputMappings;
}
public Map<String, Map<String, String>> getTaskOutputMappings() {
return taskOutputMappings;
}
public Map<String, String> getInputs() {
return inputs;
}
public Map<String, Collection<String>> getTaskAssignments() {
return taskAssignments;
}
public Map<String, String> getItemDefinitions() {
return itemDefinitions;
}
public Map<String, String> getServiceTasks() {
return serviceTasks;
}
public Map<String, String> getGlobalItemDefinitions() {
return globalItemDefinitions;
}
public Collection<String> getReusableSubProcesses() {
return reusableSubProcesses;
}
public void addReusableSubProcessName(String processName) {
synchronized(unresolvedReusableSubProcessNames) {
unresolvedReusableSubProcessNames.add(processName);
}
}
public Set<String> getReferencedClasses() {
return referencedClasses;
}
public Set<String> getUnqualifiedClasses() {
return unqualifiedClasses;
}
public Set<String> getReferencedRules() {
return referencedRules;
}
public Collection<String> getSignals() {
return signals;
}
public void setSignals( Collection<String> signals ) {
this.signals = signals;
}
public Collection<String> getGlobals() {
return globals;
}
public void setGlobals( Collection<String> globals ) {
this.globals = globals;
}
public void clear(){
process = null;
tasks.clear();
taskInputMappings.clear();
taskOutputMappings.clear();
inputs.clear();
taskAssignments.clear();
reusableSubProcesses.clear();
itemDefinitions.clear();
serviceTasks.clear();
globalItemDefinitions.clear();
referencedClasses.clear();
referencedRules.clear();
}
}
| apache-2.0 |
danteinforno/flex-sdk | modules/asc/src/java/macromedia/abc/OpcodeVisitor.java | 9096 | /*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package macromedia.abc;
public class OpcodeVisitor implements Visitor
{
public final void methodInfo(int returnType, int[] paramTypes, int nativeName, int flags, int[] values, int[] value_kinds, int[] param_names) {}
public final void metadataInfo(int index, int name, int[] keys, int[] values) {}
public final void startInstance(int name, int superName, boolean isDynamic, boolean isFinal, boolean isInterface, int[] interfaces, int iinit, int protectedNamespace) {}
public final void endInstance() {}
public final void startClass(int name, int cinit) {}
public final void endClass() {}
public final void startScript(int initID) {}
public final void endScript() {}
public final void startMethodBody(int methodInfo, int maxStack, int maxRegs, int scopeDepth, int maxScope, int codeStart, long codeLength) {}
public final void endMethodBody() {}
public final void startOpcodes(int methodInfo) {}
public final void endOpcodes() {}
public final void exception(long start, long end, long target, int type, int name) {}
public final void startExceptions(int exceptionCount) {}
public final void endExceptions() {}
public final void traitCount(int traitCount) {}
public final void slotTrait(int kind, int name, int slotId, int type, int value, int value_kind, int[] metadata) {}
public final void methodTrait(int kind, int name, int dispId, int methodInfo, int[] metadata) {}
public final void classTrait(int kind, int name, int slotId, int classIndex, int[] metadata) {}
public final void functionTrait(int kind, int name, int slotId, int methodInfo, int[] metadata) {}
public void target(int pos) {}
public void OP_returnvoid() {}
public void OP_returnvalue() {}
public void OP_nop() {}
public void OP_bkpt() {}
public void OP_timestamp() {}
public void OP_debugline(int linenum) {}
public void OP_bkptline() {}
public void OP_debug(int di_local, int index, int slot, int linenum) {}
public void OP_debugfile(int index) {}
public void OP_jump(int jump, int pos) {}
public void OP_pushnull() {}
public void OP_pushundefined() {}
public void OP_pushstring(int index) {}
public void OP_pushnamespace(int index) {}
public void OP_pushint(int index) {}
public void OP_pushuint(int index) {}
public void OP_pushdouble(int index) {}
public void OP_pushdecimal(int index) {}
public void OP_getlocal(int index) {}
public void OP_pushtrue() {}
public void OP_pushfalse() {}
public void OP_pushnan() {}
public void OP_pushdnan() {}
public void OP_pop() {}
public void OP_dup() {}
public void OP_swap() {}
public void OP_convert_s() {}
public void OP_esc_xelem() {}
public void OP_esc_xattr() {}
public void OP_checkfilter() {}
public void OP_convert_d() {}
public void OP_convert_m() {}
public void OP_convert_m_p(int params) {}
public void OP_convert_b() {}
public void OP_convert_o() {}
public void OP_negate() {}
public void OP_negate_p(int params) {}
public void OP_negate_i() {}
public void OP_increment() {}
public void OP_increment_p(int params) {}
public void OP_increment_i() {}
public void OP_inclocal(int index) {}
public void OP_inclocal_p(int params, int index) {}
public void OP_kill(int index) {}
public void OP_inclocal_i(int index) {}
public void OP_decrement() {}
public void OP_decrement_p(int params) {}
public void OP_decrement_i() {}
public void OP_declocal(int index) {}
public void OP_declocal_p(int params, int index) {}
public void OP_declocal_i(int index) {}
public void OP_typeof() {}
public void OP_not() {}
public void OP_bitnot() {}
public void OP_setlocal(int index) {}
public void OP_add() {}
public void OP_add_i() {}
public void OP_subtract() {}
public void OP_subtract_i() {}
public void OP_multiply() {}
public void OP_multiply_i() {}
public void OP_divide() {}
public void OP_divide_i() {}
public void OP_modulo() {}
public void OP_add_p(int params) {}
public void OP_subtract_p(int params) {}
public void OP_multiply_p(int params) {}
public void OP_divide_p(int params) {}
public void OP_modulo_p(int params) {}
public void OP_lshift() {}
public void OP_rshift() {}
public void OP_urshift() {}
public void OP_bitand() {}
public void OP_bitor() {}
public void OP_bitxor() {}
public void OP_equals() {}
public void OP_strictequals() {}
public void OP_lookupswitch(int defaultPos, int[] casePos, int p1, int p2) {}
public void OP_iftrue(int offset, int pos) {}
public void OP_iffalse(int offset, int pos) {}
public void OP_ifeq(int offset, int pos) {}
public void OP_ifne(int offset, int pos) {}
public void OP_ifstricteq(int offset, int pos) {}
public void OP_ifstrictne(int offset, int pos) {}
public void OP_iflt(int offset, int pos) {}
public void OP_ifle(int offset, int pos) {}
public void OP_ifgt(int offset, int pos) {}
public void OP_ifge(int offset, int pos) {}
public void OP_lessthan() {}
public void OP_lessequals() {}
public void OP_greaterthan() {}
public void OP_greaterequals() {}
public void OP_newobject(int size) {}
public void OP_newarray(int size) {}
public void OP_getproperty(int index) {}
public void OP_setproperty(int index) {}
public void OP_initproperty(int index) {}
public void OP_getdescendants(int index) {}
public void OP_findpropstrict(int index) {}
public void OP_findproperty(int index) {}
public void OP_finddef(int index) {}
public void OP_getlex(int index) {}
public void OP_nextname() {}
public void OP_nextvalue() {}
public void OP_hasnext() {}
public void OP_hasnext2(int objectRegister, int indexRegister) {}
public void OP_deleteproperty(int index) {}
public void OP_setslot(int index) {}
public void OP_getslot(int index) {}
public void OP_setglobalslot(int index) {}
public void OP_getglobalslot(int index) {}
public void OP_call(int size) {}
public void OP_construct(int size) {}
public void OP_applytype(int size) {}
public void OP_newfunction(int id) {}
public void OP_newclass(int id) {}
public void OP_callstatic(int id, int argc) {}
public void OP_callmethod(int id, int argc) {}
public void OP_callproperty(int index, int argc) {}
public void OP_callproplex(int index, int argc) {}
public void OP_constructprop(int index, int argc) {}
public void OP_callsuper(int index, int argc) {}
public void OP_getsuper(int index) {}
public void OP_setsuper(int index) {}
public void OP_constructsuper(int argc) {}
public void OP_pushshort(int n) {}
public void OP_astype(int index) {}
public void OP_astypelate() {}
public void OP_coerce(int index) {}
public void OP_coerce_b() {}
public void OP_coerce_o() {}
public void OP_coerce_a() {}
public void OP_coerce_i() {}
public void OP_coerce_u() {}
public void OP_coerce_d() {}
public void OP_coerce_s() {}
public void OP_istype(int index) {}
public void OP_istypelate() {}
public void OP_pushbyte(int n) {}
public void OP_getscopeobject(int index) {}
public void OP_pushscope() {}
public void OP_popscope() {}
public void OP_convert_i() {}
public void OP_convert_u() {}
public void OP_throw() {}
public void OP_instanceof() {}
public void OP_in() {}
public void OP_dxns(int index) {}
public void OP_dxnslate() {}
public void OP_ifnlt(int offset, int pos) {}
public void OP_ifnle(int offset, int pos) {}
public void OP_ifngt(int offset, int pos) {}
public void OP_ifnge(int offset, int pos) {}
public void OP_pushwith() {}
public void OP_newactivation() {}
public void OP_newcatch(int index) {}
public void OP_deldescendants() {}
public void OP_getglobalscope() {}
public void OP_getlocal0() {}
public void OP_getlocal1() {}
public void OP_getlocal2() {}
public void OP_getlocal3() {}
public void OP_setlocal0() {}
public void OP_setlocal1() {}
public void OP_setlocal2() {}
public void OP_setlocal3() {}
public void OP_label() {}
public void OP_pushconstant(int id) {}
public void OP_callsupervoid(int index, int argc) {}
public void OP_callpropvoid(int index, int argc) {}
public void OP_li8(){}
public void OP_li16(){}
public void OP_li32(){}
public void OP_lf32(){}
public void OP_lf64(){}
public void OP_si8(){}
public void OP_si16(){}
public void OP_si32(){}
public void OP_sf32(){}
public void OP_sf64(){}
public void OP_sxi1(){}
public void OP_sxi8(){}
public void OP_sxi16(){}
}
| apache-2.0 |
milleruntime/accumulo | core/src/main/thrift-gen-java/org/apache/accumulo/core/manager/thrift/RecoveryException.java | 12706 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.15.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.accumulo.core.manager.thrift;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
public class RecoveryException extends org.apache.thrift.TException implements org.apache.thrift.TBase<RecoveryException, RecoveryException._Fields>, java.io.Serializable, Cloneable, Comparable<RecoveryException> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("RecoveryException");
private static final org.apache.thrift.protocol.TField WHY_FIELD_DESC = new org.apache.thrift.protocol.TField("why", org.apache.thrift.protocol.TType.STRING, (short)1);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new RecoveryExceptionStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new RecoveryExceptionTupleSchemeFactory();
public @org.apache.thrift.annotation.Nullable java.lang.String why; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
WHY((short)1, "why");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // WHY
return WHY;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
@org.apache.thrift.annotation.Nullable
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.WHY, new org.apache.thrift.meta_data.FieldMetaData("why", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(RecoveryException.class, metaDataMap);
}
public RecoveryException() {
}
public RecoveryException(
java.lang.String why)
{
this();
this.why = why;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public RecoveryException(RecoveryException other) {
if (other.isSetWhy()) {
this.why = other.why;
}
}
public RecoveryException deepCopy() {
return new RecoveryException(this);
}
@Override
public void clear() {
this.why = null;
}
@org.apache.thrift.annotation.Nullable
public java.lang.String getWhy() {
return this.why;
}
public RecoveryException setWhy(@org.apache.thrift.annotation.Nullable java.lang.String why) {
this.why = why;
return this;
}
public void unsetWhy() {
this.why = null;
}
/** Returns true if field why is set (has been assigned a value) and false otherwise */
public boolean isSetWhy() {
return this.why != null;
}
public void setWhyIsSet(boolean value) {
if (!value) {
this.why = null;
}
}
public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) {
switch (field) {
case WHY:
if (value == null) {
unsetWhy();
} else {
setWhy((java.lang.String)value);
}
break;
}
}
@org.apache.thrift.annotation.Nullable
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case WHY:
return getWhy();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case WHY:
return isSetWhy();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that instanceof RecoveryException)
return this.equals((RecoveryException)that);
return false;
}
public boolean equals(RecoveryException that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_why = true && this.isSetWhy();
boolean that_present_why = true && that.isSetWhy();
if (this_present_why || that_present_why) {
if (!(this_present_why && that_present_why))
return false;
if (!this.why.equals(that.why))
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + ((isSetWhy()) ? 131071 : 524287);
if (isSetWhy())
hashCode = hashCode * 8191 + why.hashCode();
return hashCode;
}
@Override
public int compareTo(RecoveryException other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.compare(isSetWhy(), other.isSetWhy());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetWhy()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.why, other.why);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
@org.apache.thrift.annotation.Nullable
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("RecoveryException(");
boolean first = true;
sb.append("why:");
if (this.why == null) {
sb.append("null");
} else {
sb.append(this.why);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class RecoveryExceptionStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public RecoveryExceptionStandardScheme getScheme() {
return new RecoveryExceptionStandardScheme();
}
}
private static class RecoveryExceptionStandardScheme extends org.apache.thrift.scheme.StandardScheme<RecoveryException> {
public void read(org.apache.thrift.protocol.TProtocol iprot, RecoveryException struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // WHY
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.why = iprot.readString();
struct.setWhyIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, RecoveryException struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.why != null) {
oprot.writeFieldBegin(WHY_FIELD_DESC);
oprot.writeString(struct.why);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class RecoveryExceptionTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public RecoveryExceptionTupleScheme getScheme() {
return new RecoveryExceptionTupleScheme();
}
}
private static class RecoveryExceptionTupleScheme extends org.apache.thrift.scheme.TupleScheme<RecoveryException> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, RecoveryException struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetWhy()) {
optionals.set(0);
}
oprot.writeBitSet(optionals, 1);
if (struct.isSetWhy()) {
oprot.writeString(struct.why);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, RecoveryException struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
struct.why = iprot.readString();
struct.setWhyIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
private static void unusedMethod() {}
}
| apache-2.0 |
shakamunyi/hadoop-20 | src/contrib/raid/src/java/org/apache/hadoop/raid/ChecksumStore.java | 2607 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.raid;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.protocol.Block;
public abstract class ChecksumStore {
/**
* Initialize the checksum store with config
*/
abstract public void initialize(Configuration conf,
boolean createStore) throws IOException;
/**
* Fetch the checksum for a lost block
* lookup the checksum from the store with blockId and generationStamp
* @param blk
* @return checksum
*/
abstract public Long getChecksum(Block blk) throws IOException;
/**
* Save the checksum for a raided block into store and compare the old value
* with new value, if different throw an exception
* @param blk
* @param newChecksum
* @param oldChecksum
* @throws IOException
*/
public Long putIfAbsentChecksum(Block blk, Long newChecksum)
throws IOException {
Long oldChecksum = putIfAbsent(blk, newChecksum);
if (oldChecksum!= null && !oldChecksum.equals(newChecksum)) {
throw new IOException("Block " + blk.toString()
+ " has different checksums " + oldChecksum + "(old) and " +
newChecksum+ "(new)");
}
return oldChecksum;
}
/**
* Save the checksum for a block into store without comparing the values
* @param blk
* @param newChecksum
* @throws IOException
*/
abstract public void putChecksum(Block blk, Long newChecksum)
throws IOException;
abstract public Long putIfAbsent(Block blk, Long newChecksum)
throws IOException;
abstract public int size() throws IOException;
abstract public boolean isEmpty() throws IOException;
abstract public boolean hasChecksum(Block blk) throws IOException;
abstract public void clear() throws IOException;
}
| apache-2.0 |
facebook/buck | src/com/facebook/buck/cxx/toolchain/nativelink/NativeLinkStrategy.java | 1673 | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.cxx.toolchain.nativelink;
/**
* The ways that other language (e.g. Python, Lua) binaries pull in native linkable dependencies.
*/
public enum NativeLinkStrategy {
/**
* Pull transitive native deps in as fully linked standalone shared libraries. This is typically
* the fastest build-time link strategy, as it requires no top-level context and therefore can
* shared build artifacts with all other binaries using this strategy.
*/
SEPARATE,
/**
* Statically link all transitive native deps, which don't have an explicit dep from non-C/C++
* code (e.g. Python), into a monolithic shared library. Native dep roots, which have an explicit
* dep from non-C/C++ code, remain as fully linked standalone shared libraries so that, typically,
* application code doesn't need to change to work with this strategy. This strategy incurs a
* relatively big build-time cost, but can significantly reduce the size of native code and number
* of shared libraries pulled into the binary.
*/
MERGED,
}
| apache-2.0 |
hawkular/hawkular-btm | tests/instrumentation/src/test/java/org/hawkular/apm/tests/client/http/NettyNoResponseHttpITest.java | 7730 | /*
* Copyright 2015-2017 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.apm.tests.client.http;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.net.InetSocketAddress;
import java.net.SocketAddress;
import java.nio.charset.Charset;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.hawkular.apm.api.model.Constants;
import org.hawkular.apm.api.model.trace.Producer;
import org.hawkular.apm.api.utils.NodeUtil;
import org.hawkular.apm.tests.common.ClientTestBase;
import org.hawkular.apm.tests.common.Wait;
import org.junit.Test;
import io.netty.buffer.ByteBuf;
import io.netty.handler.codec.http.HttpMethod;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.netty.handler.logging.LogLevel;
import io.reactivex.netty.protocol.http.client.HttpClient;
import io.reactivex.netty.protocol.http.client.HttpClientRequest;
import io.reactivex.netty.protocol.http.client.HttpClientResponse;
import io.reactivex.netty.protocol.http.server.HttpServer;
import rx.Observable;
/**
* @author gbrown
*/
public class NettyNoResponseHttpITest extends ClientTestBase {
private static final String HELLO_THERE = "Hello there";
private static final String QUERY_1 = "name=value";
private static final String PATH_1 = "/hello";
private static final String PATH_2 = "/world";
private static final String PATH_3 = "/space";
private HttpServer<ByteBuf, ByteBuf> server;
@Override
public void init() {
server = HttpServer.newServer()
.enableWireLogging(LogLevel.DEBUG)
.start((req, resp) -> {
if (req.getHeader(Constants.HAWKULAR_APM_TRACEID) == null) {
return resp.setStatus(HttpResponseStatus.BAD_REQUEST);
}
if (req.getHttpMethod() == HttpMethod.POST
|| req.getHttpMethod() == HttpMethod.PUT) {
req.getContent().subscribe(bb -> System.out.println("DATA = " + bb.toString()));
}
resp.setStatus(HttpResponseStatus.OK);
return resp;
}
);
super.init();
}
@Override
public void close() {
server.shutdown();
server.awaitShutdown();
super.close();
}
@Test
public void testGET() throws InterruptedException, ExecutionException, TimeoutException {
SocketAddress serverAddress = new InetSocketAddress("127.0.0.1", server.getServerPort());
/*Create a new client for the server address*/
HttpClient<ByteBuf, ByteBuf> client = HttpClient.newClient(serverAddress);
HttpClientRequest<ByteBuf, ByteBuf> req1 = client.createGet(PATH_1 + "?" + QUERY_1);
Object result1 = req1
.flatMap((HttpClientResponse<ByteBuf> resp) -> resp.getContent()
.map(bb -> bb.toString(Charset.defaultCharset())))
.singleOrDefault(null).toBlocking().toFuture().get(5, TimeUnit.SECONDS);
assertNull(result1);
Wait.until(() -> getApmMockServer().getTraces().size() == 1);
// Check stored traces (including 1 for the test client)
assertEquals(1, getApmMockServer().getTraces().size());
List<Producer> producers = NodeUtil.findNodes(getApmMockServer().getTraces().get(0).getNodes(), Producer.class);
assertEquals("Expecting 1 producers", 1, producers.size());
Producer testProducer = producers.get(0);
assertEquals(PATH_1, testProducer.getUri());
assertEquals(QUERY_1, testProducer.getProperties(Constants.PROP_HTTP_QUERY).iterator().next().getValue());
assertEquals("GET", testProducer.getOperation());
assertEquals("GET", testProducer.getProperties("http_method").iterator().next().getValue());
}
@Test
public void testPOST() throws InterruptedException, ExecutionException, TimeoutException {
SocketAddress serverAddress = new InetSocketAddress("127.0.0.1", server.getServerPort());
/*Create a new client for the server address*/
HttpClient<ByteBuf, ByteBuf> client = HttpClient.newClient(serverAddress);
HttpClientRequest<ByteBuf, ByteBuf> req1 = client.createPost(PATH_2);
req1.writeStringContent(Observable.just(HELLO_THERE));
Object result1 = req1
.flatMap((HttpClientResponse<ByteBuf> resp) -> resp.getContent()
.map(bb -> bb.toString(Charset.defaultCharset())))
.singleOrDefault(null).toBlocking().toFuture().get(5, TimeUnit.SECONDS);
assertNull(result1);
Wait.until(() -> getApmMockServer().getTraces().size() == 1);
// Check stored traces (including 1 for the test client)
assertEquals(1, getApmMockServer().getTraces().size());
List<Producer> producers = NodeUtil.findNodes(getApmMockServer().getTraces().get(0).getNodes(), Producer.class);
assertEquals("Expecting 1 producers", 1, producers.size());
Producer testProducer = producers.get(0);
assertEquals(PATH_2, testProducer.getUri());
assertTrue(testProducer.getProperties(Constants.PROP_HTTP_QUERY).isEmpty());
assertEquals("POST", testProducer.getOperation());
assertEquals("POST", testProducer.getProperties("http_method").iterator().next().getValue());
}
@Test
public void testPUT() throws InterruptedException, ExecutionException, TimeoutException {
SocketAddress serverAddress = new InetSocketAddress("127.0.0.1", server.getServerPort());
/*Create a new client for the server address*/
HttpClient<ByteBuf, ByteBuf> client = HttpClient.newClient(serverAddress);
HttpClientRequest<ByteBuf, ByteBuf> req1 = client.createPut(PATH_3);
req1.writeStringContent(Observable.just(HELLO_THERE));
Object result1 = req1
.flatMap((HttpClientResponse<ByteBuf> resp) -> resp.getContent()
.map(bb -> bb.toString(Charset.defaultCharset())))
.singleOrDefault(null).toBlocking().toFuture().get(5, TimeUnit.SECONDS);
assertNull(result1);
Wait.until(() -> getApmMockServer().getTraces().size() == 1);
// Check stored traces (including 1 for the test client)
assertEquals(1, getApmMockServer().getTraces().size());
List<Producer> producers = NodeUtil.findNodes(getApmMockServer().getTraces().get(0).getNodes(), Producer.class);
assertEquals("Expecting 1 producers", 1, producers.size());
Producer testProducer = producers.get(0);
assertEquals(PATH_3, testProducer.getUri());
assertTrue(testProducer.getProperties(Constants.PROP_HTTP_QUERY).isEmpty());
assertEquals("PUT", testProducer.getOperation());
assertEquals("PUT", testProducer.getProperties("http_method").iterator().next().getValue());
}
}
| apache-2.0 |
jonmcewen/camel | platforms/spring-boot/components-starter/camel-mllp-starter/src/main/java/org/apache/camel/component/mllp/springboot/MllpComponentConfiguration.java | 18592 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.mllp.springboot;
import javax.annotation.Generated;
import org.apache.camel.ExchangePattern;
import org.apache.camel.component.mllp.MllpComponent;
import org.apache.camel.spring.boot.ComponentConfigurationPropertiesCommon;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.DeprecatedConfigurationProperty;
/**
* Provides functionality required by Healthcare providers to communicate with
* other systems using the MLLP protocol.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@Generated("org.apache.camel.maven.packaging.SpringBootAutoConfigurationMojo")
@ConfigurationProperties(prefix = "camel.component.mllp")
public class MllpComponentConfiguration
extends
ComponentConfigurationPropertiesCommon {
/**
* Set the component to log PHI data.
*/
private Boolean logPhi = true;
/**
* Set the maximum number of bytes of PHI that will be logged in a log
* entry.
*/
private Integer logPhiMaxBytes = 5120;
/**
* Set the default character set to use for byte to/from String conversions.
*/
private String defaultCharset = "ISO-8859-1";
/**
* Sets the default configuration to use when creating MLLP endpoints.
*/
private MllpConfigurationNestedConfiguration configuration;
/**
* Whether the component should resolve property placeholders on itself when
* starting. Only properties which are of String type can use property
* placeholders.
*/
private Boolean resolvePropertyPlaceholders = true;
public Boolean getLogPhi() {
return logPhi;
}
public void setLogPhi(Boolean logPhi) {
this.logPhi = logPhi;
}
public Integer getLogPhiMaxBytes() {
return logPhiMaxBytes;
}
public void setLogPhiMaxBytes(Integer logPhiMaxBytes) {
this.logPhiMaxBytes = logPhiMaxBytes;
}
public String getDefaultCharset() {
return defaultCharset;
}
public void setDefaultCharset(String defaultCharset) {
this.defaultCharset = defaultCharset;
}
public MllpConfigurationNestedConfiguration getConfiguration() {
return configuration;
}
public void setConfiguration(
MllpConfigurationNestedConfiguration configuration) {
this.configuration = configuration;
}
public Boolean getResolvePropertyPlaceholders() {
return resolvePropertyPlaceholders;
}
public void setResolvePropertyPlaceholders(
Boolean resolvePropertyPlaceholders) {
this.resolvePropertyPlaceholders = resolvePropertyPlaceholders;
}
public static class MllpConfigurationNestedConfiguration {
public static final Class CAMEL_NESTED_CLASS = org.apache.camel.component.mllp.MllpConfiguration.class;
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* receive incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. If disabled, the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions by logging them at WARN or ERROR level and ignored.
*
* @param bridgeErrorHandler
*/
private Boolean bridgeErrorHandler = true;
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* @param exchangePattern
*/
private ExchangePattern exchangePattern = ExchangePattern.InOut;
/**
* Sets whether synchronous processing should be strictly used (this
* component only supports synchronous operations).
*
* @param synchronous
*/
private Boolean synchronous = true;
/**
* Set the CamelCharsetName property on the exchange
*
* @param charsetName
* the charset
*/
private String charsetName;
/**
* The maximum queue length for incoming connection indications (a
* request to connect) is set to the backlog parameter. If a connection
* indication arrives when the queue is full, the connection is refused.
*/
private Integer backlog = 5;
/**
* TCP Server Only - The number of milliseconds to retry binding to a
* server port
*/
private Integer bindTimeout = 30000;
/**
* TCP Server Only - The number of milliseconds to wait between bind
* attempts
*/
private Integer bindRetryInterval = 5000;
/**
* Timeout (in milliseconds) while waiting for a TCP connection
* <p/>
* TCP Server Only
*
* @param acceptTimeout
* timeout in milliseconds
*/
private Integer acceptTimeout = 60000;
/**
* TCP Server Only - Allow the endpoint to start before the TCP
* ServerSocket is bound. In some environments, it may be desirable to
* allow the endpoint to start before the TCP ServerSocket is bound.
*
* @param lenientBind
* if true, the ServerSocket will be bound asynchronously;
* otherwise the ServerSocket will be bound synchronously.
*/
private Boolean lenientBind = false;
/**
* Timeout (in milliseconds) for establishing for a TCP connection
* <p/>
* TCP Client only
*
* @param connectTimeout
* timeout in milliseconds
*/
private Integer connectTimeout = 30000;
/**
* The SO_TIMEOUT value (in milliseconds) used when waiting for the
* start of an MLLP frame
*
* @param receiveTimeout
* timeout in milliseconds
*/
private Integer receiveTimeout = 15000;
/**
* The maximum number of concurrent MLLP Consumer connections that will
* be allowed. If a new connection is received and the maximum is number
* are already established, the new connection will be reset
* immediately.
*
* @param maxConcurrentConsumers
* the maximum number of concurrent consumer connections
* allowed
*/
private Integer maxConcurrentConsumers = 5;
/**
* The maximum number of timeouts (specified by receiveTimeout) allowed
* before the TCP Connection will be reset.
*
* @param maxReceiveTimeouts
* maximum number of receiveTimeouts
* @deprecated Use the idleTimeout URI parameter. For backward
* compibility, setting this parameter will result in an
* idle timeout of maxReceiveTimeouts * receiveTimeout. If
* idleTimeout is also specified, this parameter will be
* ignored.
*/
@Deprecated
private Integer maxReceiveTimeouts;
/**
* The approximate idle time allowed before the Client TCP Connection
* will be reset. A null value or a value less than or equal to zero
* will disable the idle timeout.
*
* @param idleTimeout
* timeout in milliseconds
*/
private Integer idleTimeout;
/**
* The SO_TIMEOUT value (in milliseconds) used after the start of an
* MLLP frame has been received
*
* @param readTimeout
* timeout in milliseconds
*/
private Integer readTimeout = 5000;
/**
* Enable/disable the SO_KEEPALIVE socket option.
*
* @param keepAlive
* enable SO_KEEPALIVE when true; disable SO_KEEPALIVE when
* false; use system default when null
*/
private Boolean keepAlive = true;
/**
* Enable/disable the TCP_NODELAY socket option.
*
* @param tcpNoDelay
* enable TCP_NODELAY when true; disable TCP_NODELAY when
* false; use system default when null
*/
private Boolean tcpNoDelay = true;
/**
* Enable/disable the SO_REUSEADDR socket option.
*
* @param reuseAddress
* enable SO_REUSEADDR when true; disable SO_REUSEADDR when
* false; use system default when null
*/
private Boolean reuseAddress = false;
/**
* Sets the SO_RCVBUF option to the specified value (in bytes)
*
* @param receiveBufferSize
* the SO_RCVBUF option value. If null, the system default is
* used
*/
private Integer receiveBufferSize = 8192;
/**
* Sets the SO_SNDBUF option to the specified value (in bytes)
*
* @param sendBufferSize
* the SO_SNDBUF option value. If null, the system default is
* used
*/
private Integer sendBufferSize = 8192;
/**
* Enable/Disable the automatic generation of a MLLP Acknowledgement
* MLLP Consumers only
*
* @param autoAck
* enabled if true, otherwise disabled
*/
private Boolean autoAck = true;
/**
* Enable/Disable the automatic generation of message headers from the
* HL7 Message MLLP Consumers only
*
* @param hl7Headers
* enabled if true, otherwise disabled
*/
private Boolean hl7Headers = true;
/**
* Enable/Disable strict compliance to the MLLP standard. The MLLP
* standard specifies [START_OF_BLOCK]hl7
* payload[END_OF_BLOCK][END_OF_DATA], however, some systems do not send
* the final END_OF_DATA byte. This setting controls whether or not the
* final END_OF_DATA byte is required or optional.
*
* @param requireEndOfData
* the trailing END_OF_DATA byte is required if true;
* optional otherwise
*/
private Boolean requireEndOfData = true;
/**
* Enable/Disable converting the payload to a String. If enabled, HL7
* Payloads received from external systems will be validated converted
* to a String. If the charsetName property is set, that character set
* will be used for the conversion. If the charsetName property is not
* set, the value of MSH-18 will be used to determine th appropriate
* character set. If MSH-18 is not set, then the default ISO-8859-1
* character set will be use.
*
* @param stringPayload
* enabled if true, otherwise disabled
*/
private Boolean stringPayload = true;
/**
* Enable/Disable the validation of HL7 Payloads If enabled, HL7
* Payloads received from external systems will be validated (see
* Hl7Util.generateInvalidPayloadExceptionMessage for details on the
* validation). If and invalid payload is detected, a
* MllpInvalidMessageException (for consumers) or a
* MllpInvalidAcknowledgementException will be thrown.
*
* @param validatePayload
* enabled if true, otherwise disabled
*/
private Boolean validatePayload = false;
/**
* Enable/Disable the buffering of HL7 payloads before writing to the
* socket.
*
* @deprecated the parameter will be ignored
* @param bufferWrites
* enabled if true, otherwise disabled
*/
@Deprecated
private Boolean bufferWrites = false;
public Boolean getBridgeErrorHandler() {
return bridgeErrorHandler;
}
public void setBridgeErrorHandler(Boolean bridgeErrorHandler) {
this.bridgeErrorHandler = bridgeErrorHandler;
}
public ExchangePattern getExchangePattern() {
return exchangePattern;
}
public void setExchangePattern(ExchangePattern exchangePattern) {
this.exchangePattern = exchangePattern;
}
public Boolean getSynchronous() {
return synchronous;
}
public void setSynchronous(Boolean synchronous) {
this.synchronous = synchronous;
}
public String getCharsetName() {
return charsetName;
}
public void setCharsetName(String charsetName) {
this.charsetName = charsetName;
}
public Integer getBacklog() {
return backlog;
}
public void setBacklog(Integer backlog) {
this.backlog = backlog;
}
public Integer getBindTimeout() {
return bindTimeout;
}
public void setBindTimeout(Integer bindTimeout) {
this.bindTimeout = bindTimeout;
}
public Integer getBindRetryInterval() {
return bindRetryInterval;
}
public void setBindRetryInterval(Integer bindRetryInterval) {
this.bindRetryInterval = bindRetryInterval;
}
public Integer getAcceptTimeout() {
return acceptTimeout;
}
public void setAcceptTimeout(Integer acceptTimeout) {
this.acceptTimeout = acceptTimeout;
}
public Boolean getLenientBind() {
return lenientBind;
}
public void setLenientBind(Boolean lenientBind) {
this.lenientBind = lenientBind;
}
public Integer getConnectTimeout() {
return connectTimeout;
}
public void setConnectTimeout(Integer connectTimeout) {
this.connectTimeout = connectTimeout;
}
public Integer getReceiveTimeout() {
return receiveTimeout;
}
public void setReceiveTimeout(Integer receiveTimeout) {
this.receiveTimeout = receiveTimeout;
}
public Integer getMaxConcurrentConsumers() {
return maxConcurrentConsumers;
}
public void setMaxConcurrentConsumers(Integer maxConcurrentConsumers) {
this.maxConcurrentConsumers = maxConcurrentConsumers;
}
@Deprecated
@DeprecatedConfigurationProperty
public Integer getMaxReceiveTimeouts() {
return maxReceiveTimeouts;
}
@Deprecated
public void setMaxReceiveTimeouts(Integer maxReceiveTimeouts) {
this.maxReceiveTimeouts = maxReceiveTimeouts;
}
public Integer getIdleTimeout() {
return idleTimeout;
}
public void setIdleTimeout(Integer idleTimeout) {
this.idleTimeout = idleTimeout;
}
public Integer getReadTimeout() {
return readTimeout;
}
public void setReadTimeout(Integer readTimeout) {
this.readTimeout = readTimeout;
}
public Boolean getKeepAlive() {
return keepAlive;
}
public void setKeepAlive(Boolean keepAlive) {
this.keepAlive = keepAlive;
}
public Boolean getTcpNoDelay() {
return tcpNoDelay;
}
public void setTcpNoDelay(Boolean tcpNoDelay) {
this.tcpNoDelay = tcpNoDelay;
}
public Boolean getReuseAddress() {
return reuseAddress;
}
public void setReuseAddress(Boolean reuseAddress) {
this.reuseAddress = reuseAddress;
}
public Integer getReceiveBufferSize() {
return receiveBufferSize;
}
public void setReceiveBufferSize(Integer receiveBufferSize) {
this.receiveBufferSize = receiveBufferSize;
}
public Integer getSendBufferSize() {
return sendBufferSize;
}
public void setSendBufferSize(Integer sendBufferSize) {
this.sendBufferSize = sendBufferSize;
}
public Boolean getAutoAck() {
return autoAck;
}
public void setAutoAck(Boolean autoAck) {
this.autoAck = autoAck;
}
public Boolean getHl7Headers() {
return hl7Headers;
}
public void setHl7Headers(Boolean hl7Headers) {
this.hl7Headers = hl7Headers;
}
public Boolean getRequireEndOfData() {
return requireEndOfData;
}
public void setRequireEndOfData(Boolean requireEndOfData) {
this.requireEndOfData = requireEndOfData;
}
public Boolean getStringPayload() {
return stringPayload;
}
public void setStringPayload(Boolean stringPayload) {
this.stringPayload = stringPayload;
}
public Boolean getValidatePayload() {
return validatePayload;
}
public void setValidatePayload(Boolean validatePayload) {
this.validatePayload = validatePayload;
}
@Deprecated
@DeprecatedConfigurationProperty
public Boolean getBufferWrites() {
return bufferWrites;
}
@Deprecated
public void setBufferWrites(Boolean bufferWrites) {
this.bufferWrites = bufferWrites;
}
}
} | apache-2.0 |
swarwick/moquette | parser_commons/src/main/java/org/eclipse/moquette/proto/messages/PubAckMessage.java | 853 | /*
* Copyright (c) 2012-2014 The original author or authors
* ------------------------------------------------------
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
*
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* The Apache License v2.0 is available at
* http://www.opensource.org/licenses/apache2.0.php
*
* You may elect to redistribute this code under either of these licenses.
*/
package org.eclipse.moquette.proto.messages;
/**
* Placeholder for PUBACK message.
*
* @author andrea
*/
public class PubAckMessage extends MessageIDMessage {
public PubAckMessage() {
m_messageType = AbstractMessage.PUBACK;
}
}
| apache-2.0 |
facebook/buck | src/com/facebook/buck/util/environment/MacNetworkConfiguration.java | 5995 | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.util.environment;
import com.facebook.buck.core.util.log.Logger;
import com.facebook.buck.util.ForwardingProcessListener;
import com.facebook.buck.util.ListeningProcessExecutor;
import com.facebook.buck.util.ProcessExecutorParams;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/** Mac OS X implementation for finding likely network states for diagnostic purposes. */
public class MacNetworkConfiguration {
private static final Logger LOG = Logger.get(MacNetworkConfiguration.class);
private static final long COMMAND_TIMEOUT_MS = 1000L;
// Utility class, do not instantiate.
private MacNetworkConfiguration() {}
/** Returns a string representing likely active network; eg 'Wired', 'WiFi:<ssid>'. */
public static Network getLikelyActiveNetwork() {
try {
for (String device : getDevicesByServiceOrder()) {
if (isDeviceActive(device)) {
Optional<String> ssid = getDeviceSSID(device);
if (ssid.isPresent()) {
return new Network(NetworkMedium.WIRELESS, ssid);
}
return new Network(NetworkMedium.WIRED);
}
}
return new Network(NetworkMedium.UNKNOWN);
} catch (InterruptedException e) {
return new Network(NetworkMedium.UNKNOWN);
}
}
static Pattern devicePattern = Pattern.compile("Device: ([^)]*)\\)");
/** Returns a list of the network devices in order of their service priorities. */
private static List<String> getDevicesByServiceOrder() throws InterruptedException {
/*
$ networksetup -listnetworkserviceorder
An asterisk (*) denotes that a network service is disabled.
(1) Display Ethernet (en6)
(Hardware Port: Display Ethernet, Device: en6)
(2) Wi-Fi
(Hardware Port: Wi-Fi, Device: en0)
*/
LOG.debug("Determine network service order and extract device names");
String serviceOrderOutput = runNetworkSetupCommand("listnetworkserviceorder");
Matcher matcher = devicePattern.matcher(serviceOrderOutput);
List<String> devices = new ArrayList<String>();
while (matcher.find()) {
devices.add(matcher.group(1));
}
return devices;
}
static Pattern activePattern = Pattern.compile("Active: (.*)$");
/** Indicates whether device is active (i.e. physically connected). */
private static boolean isDeviceActive(String device) throws InterruptedException {
/*
$ networksetup -getMedia "en0"
Current: autoselect
Active: autoselect
$ networksetup -getMedia "en6"
Current: autoselect
Active: none
*/
LOG.debug("Determine active state of media for device");
String mediaOutput = runNetworkSetupCommand("getMedia", device);
Matcher matcher = activePattern.matcher(mediaOutput);
return (matcher.find() && !matcher.group(1).equals("none"));
}
static Pattern ssidPattern = Pattern.compile("Current Wi-Fi Network: (.*)$");
/** Gets the SSID of a device (sadly the most definitive way to determine wired vs wireless). */
private static Optional<String> getDeviceSSID(String device) throws InterruptedException {
/*
$ networksetup -getairportnetwork "en0"
Current Wi-Fi Network: lighthouse
-- or
$ networksetup -getairportnetwork "en0"
You are not associated with an AirPort network.
Wi-Fi power is currently off.
$ networksetup -getairportnetwork "en6"
en6 is not a Wi-Fi interface.
** Error: Error obtaining wireless information.
*/
LOG.debug("Determine WiFi SSID of device");
String mediaOutput = runNetworkSetupCommand("getairportnetwork", device);
Matcher matcher = ssidPattern.matcher(mediaOutput);
if (matcher.find()) {
return Optional.of(matcher.group(1));
}
return Optional.empty();
}
private static String runNetworkSetupCommand(String subCommand) throws InterruptedException {
return runNetworkSetupCommand(subCommand, "");
}
/** Naive `networksetup` invocation; returns non-empty string of stdout if all went well. */
private static String runNetworkSetupCommand(String subCommand, String argument)
throws InterruptedException {
ListeningProcessExecutor executor = new ListeningProcessExecutor();
ByteArrayOutputStream stdout = new ByteArrayOutputStream();
ByteArrayOutputStream stderr = new ByteArrayOutputStream();
ForwardingProcessListener listener = new ForwardingProcessListener(stdout, stderr);
ProcessExecutorParams params =
ProcessExecutorParams.builder()
.addCommand("networksetup")
.addCommand(String.format("-%s", subCommand))
.addCommand(argument)
.build();
ListeningProcessExecutor.LaunchedProcess process = null;
try {
process = executor.launchProcess(params, listener);
if (executor.waitForProcess(process, COMMAND_TIMEOUT_MS, TimeUnit.MILLISECONDS) != 0) {
return "";
}
return stdout.toString();
} catch (IOException e) {
LOG.debug(e, "Exception while running networksetup command");
return "";
} finally {
if (process != null) {
executor.destroyProcess(process, /* force */ true);
}
}
}
}
| apache-2.0 |
ascherbakoff/ignite | modules/indexing/src/main/java/org/apache/ignite/internal/processors/query/h2/sql/GridSqlInsert.java | 4122 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.query.h2.sql;
import java.util.List;
import org.h2.util.StatementBuilder;
import static org.apache.ignite.internal.processors.query.QueryUtils.delimeter;
/** */
public class GridSqlInsert extends GridSqlStatement {
/** */
private GridSqlElement into;
/** */
private GridSqlColumn[] cols;
/** */
private List<GridSqlElement[]> rows;
/** Insert subquery. */
private GridSqlQuery qry;
/**
* Not supported, introduced for clarity and correct SQL generation.
* @see org.h2.command.dml.Insert#insertFromSelect
*/
private boolean direct;
/**
* Not supported, introduced for clarity and correct SQL generation.
* @see org.h2.command.dml.Insert#sortedInsertMode
*/
private boolean sorted;
/** {@inheritDoc} */
@Override public String getSQL() {
char delim = delimeter();
StatementBuilder buff = new StatementBuilder(explain() ? "EXPLAIN " : "");
buff.append("INSERT")
.append(delim).append("INTO ")
.append(into.getSQL())
.append('(');
for (GridSqlColumn col : cols) {
buff.appendExceptFirst(",");
buff.append(delim)
.append(col.getSQL());
}
buff.append(delim).append(')').append(delim);
if (direct)
buff.append("DIRECT ");
if (sorted)
buff.append("SORTED ");
if (!rows.isEmpty()) {
buff.append("VALUES").append(delim);
StatementBuilder valuesBuff = new StatementBuilder();
for (GridSqlElement[] row : rows()) {
valuesBuff.appendExceptFirst("," + delim);
StatementBuilder rowBuff = new StatementBuilder("(");
for (GridSqlElement e : row) {
rowBuff.appendExceptFirst(", ");
rowBuff.append(e != null ? e.getSQL() : "DEFAULT");
}
rowBuff.append(')');
valuesBuff.append(rowBuff.toString());
}
buff.append(valuesBuff.toString());
}
else
buff.append(delim)
.append(qry.getSQL());
return buff.toString();
}
/** */
public GridSqlElement into() {
return into;
}
/** */
public GridSqlInsert into(GridSqlElement from) {
this.into = from;
return this;
}
/** */
public List<GridSqlElement[]> rows() {
return rows;
}
/** */
public GridSqlInsert rows(List<GridSqlElement[]> rows) {
assert rows != null;
this.rows = rows;
return this;
}
/** */
public GridSqlQuery query() {
return qry;
}
/** */
public GridSqlInsert query(GridSqlQuery qry) {
this.qry = qry;
return this;
}
/** */
public GridSqlColumn[] columns() {
return cols;
}
/** */
public GridSqlInsert columns(GridSqlColumn[] cols) {
this.cols = cols;
return this;
}
/** */
public GridSqlInsert direct(boolean direct) {
this.direct = direct;
return this;
}
/** */
public GridSqlInsert sorted(boolean sorted) {
this.sorted = sorted;
return this;
}
}
| apache-2.0 |
LegNeato/buck | src-gen/com/facebook/buck/distributed/thrift/ObtainAllAvailableCapacityResponse.java | 13422 | /**
* Autogenerated by Thrift Compiler (0.11.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package com.facebook.buck.distributed.thrift;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.11.0)")
public class ObtainAllAvailableCapacityResponse implements org.apache.thrift.TBase<ObtainAllAvailableCapacityResponse, ObtainAllAvailableCapacityResponse._Fields>, java.io.Serializable, Cloneable, Comparable<ObtainAllAvailableCapacityResponse> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ObtainAllAvailableCapacityResponse");
private static final org.apache.thrift.protocol.TField OBTAINED_CAPACITY_FIELD_DESC = new org.apache.thrift.protocol.TField("obtainedCapacity", org.apache.thrift.protocol.TType.I32, (short)1);
private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new ObtainAllAvailableCapacityResponseStandardSchemeFactory();
private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new ObtainAllAvailableCapacityResponseTupleSchemeFactory();
public int obtainedCapacity; // optional
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
OBTAINED_CAPACITY((short)1, "obtainedCapacity");
private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>();
static {
for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // OBTAINED_CAPACITY
return OBTAINED_CAPACITY;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(java.lang.String name) {
return byName.get(name);
}
private final short _thriftId;
private final java.lang.String _fieldName;
_Fields(short thriftId, java.lang.String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public java.lang.String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __OBTAINEDCAPACITY_ISSET_ID = 0;
private byte __isset_bitfield = 0;
private static final _Fields optionals[] = {_Fields.OBTAINED_CAPACITY};
public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.OBTAINED_CAPACITY, new org.apache.thrift.meta_data.FieldMetaData("obtainedCapacity", org.apache.thrift.TFieldRequirementType.OPTIONAL,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
metaDataMap = java.util.Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ObtainAllAvailableCapacityResponse.class, metaDataMap);
}
public ObtainAllAvailableCapacityResponse() {
}
/**
* Performs a deep copy on <i>other</i>.
*/
public ObtainAllAvailableCapacityResponse(ObtainAllAvailableCapacityResponse other) {
__isset_bitfield = other.__isset_bitfield;
this.obtainedCapacity = other.obtainedCapacity;
}
public ObtainAllAvailableCapacityResponse deepCopy() {
return new ObtainAllAvailableCapacityResponse(this);
}
@Override
public void clear() {
setObtainedCapacityIsSet(false);
this.obtainedCapacity = 0;
}
public int getObtainedCapacity() {
return this.obtainedCapacity;
}
public ObtainAllAvailableCapacityResponse setObtainedCapacity(int obtainedCapacity) {
this.obtainedCapacity = obtainedCapacity;
setObtainedCapacityIsSet(true);
return this;
}
public void unsetObtainedCapacity() {
__isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __OBTAINEDCAPACITY_ISSET_ID);
}
/** Returns true if field obtainedCapacity is set (has been assigned a value) and false otherwise */
public boolean isSetObtainedCapacity() {
return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __OBTAINEDCAPACITY_ISSET_ID);
}
public void setObtainedCapacityIsSet(boolean value) {
__isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __OBTAINEDCAPACITY_ISSET_ID, value);
}
public void setFieldValue(_Fields field, java.lang.Object value) {
switch (field) {
case OBTAINED_CAPACITY:
if (value == null) {
unsetObtainedCapacity();
} else {
setObtainedCapacity((java.lang.Integer)value);
}
break;
}
}
public java.lang.Object getFieldValue(_Fields field) {
switch (field) {
case OBTAINED_CAPACITY:
return getObtainedCapacity();
}
throw new java.lang.IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new java.lang.IllegalArgumentException();
}
switch (field) {
case OBTAINED_CAPACITY:
return isSetObtainedCapacity();
}
throw new java.lang.IllegalStateException();
}
@Override
public boolean equals(java.lang.Object that) {
if (that == null)
return false;
if (that instanceof ObtainAllAvailableCapacityResponse)
return this.equals((ObtainAllAvailableCapacityResponse)that);
return false;
}
public boolean equals(ObtainAllAvailableCapacityResponse that) {
if (that == null)
return false;
if (this == that)
return true;
boolean this_present_obtainedCapacity = true && this.isSetObtainedCapacity();
boolean that_present_obtainedCapacity = true && that.isSetObtainedCapacity();
if (this_present_obtainedCapacity || that_present_obtainedCapacity) {
if (!(this_present_obtainedCapacity && that_present_obtainedCapacity))
return false;
if (this.obtainedCapacity != that.obtainedCapacity)
return false;
}
return true;
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = hashCode * 8191 + ((isSetObtainedCapacity()) ? 131071 : 524287);
if (isSetObtainedCapacity())
hashCode = hashCode * 8191 + obtainedCapacity;
return hashCode;
}
@Override
public int compareTo(ObtainAllAvailableCapacityResponse other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = java.lang.Boolean.valueOf(isSetObtainedCapacity()).compareTo(other.isSetObtainedCapacity());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetObtainedCapacity()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.obtainedCapacity, other.obtainedCapacity);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
scheme(iprot).read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
scheme(oprot).write(oprot, this);
}
@Override
public java.lang.String toString() {
java.lang.StringBuilder sb = new java.lang.StringBuilder("ObtainAllAvailableCapacityResponse(");
boolean first = true;
if (isSetObtainedCapacity()) {
sb.append("obtainedCapacity:");
sb.append(this.obtainedCapacity);
first = false;
}
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class ObtainAllAvailableCapacityResponseStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public ObtainAllAvailableCapacityResponseStandardScheme getScheme() {
return new ObtainAllAvailableCapacityResponseStandardScheme();
}
}
private static class ObtainAllAvailableCapacityResponseStandardScheme extends org.apache.thrift.scheme.StandardScheme<ObtainAllAvailableCapacityResponse> {
public void read(org.apache.thrift.protocol.TProtocol iprot, ObtainAllAvailableCapacityResponse struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // OBTAINED_CAPACITY
if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
struct.obtainedCapacity = iprot.readI32();
struct.setObtainedCapacityIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, ObtainAllAvailableCapacityResponse struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
if (struct.isSetObtainedCapacity()) {
oprot.writeFieldBegin(OBTAINED_CAPACITY_FIELD_DESC);
oprot.writeI32(struct.obtainedCapacity);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class ObtainAllAvailableCapacityResponseTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory {
public ObtainAllAvailableCapacityResponseTupleScheme getScheme() {
return new ObtainAllAvailableCapacityResponseTupleScheme();
}
}
private static class ObtainAllAvailableCapacityResponseTupleScheme extends org.apache.thrift.scheme.TupleScheme<ObtainAllAvailableCapacityResponse> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, ObtainAllAvailableCapacityResponse struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet optionals = new java.util.BitSet();
if (struct.isSetObtainedCapacity()) {
optionals.set(0);
}
oprot.writeBitSet(optionals, 1);
if (struct.isSetObtainedCapacity()) {
oprot.writeI32(struct.obtainedCapacity);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, ObtainAllAvailableCapacityResponse struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot;
java.util.BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
struct.obtainedCapacity = iprot.readI32();
struct.setObtainedCapacityIsSet(true);
}
}
}
private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) {
return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme();
}
}
| apache-2.0 |
vega113/incubator-wave | wave/src/test/java/com/google/wave/api/BlipIteratorRobotTest.java | 5331 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.google.wave.api;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import com.google.wave.api.BlipIterator.ElementIterator;
import com.google.wave.api.BlipIterator.SingleshotIterator;
import com.google.wave.api.BlipIterator.TextIterator;
import junit.framework.TestCase;
import org.waveprotocol.wave.model.id.WaveId;
import org.waveprotocol.wave.model.id.WaveletId;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
/**
* Test cases for {@link BlipIterator}.
*/
public class BlipIteratorRobotTest extends TestCase {
private Wavelet wavelet;
@Override
protected void setUp() throws Exception {
wavelet = mock(Wavelet.class);
when(wavelet.getOperationQueue()).thenReturn(new OperationQueue());
when(wavelet.getWaveId()).thenReturn(WaveId.of("example.com", "wave1"));
when(wavelet.getWaveletId()).thenReturn(WaveletId.of("example.com", "wavelet1"));
}
public void testSingleshotIterator() throws Exception {
Blip blip = new Blip("blip1", "\n1 1 1", null, null, wavelet);
SingleshotIterator iterator = new BlipIterator.SingleshotIterator(blip, 0, 1);
assertTrue(iterator.hasNext());
Range range = iterator.next();
assertEquals(0, range.getStart());
assertEquals(1, range.getEnd());
assertFalse(iterator.hasNext());
}
public void testTextIteratorWithoutShift() {
Blip blip = new Blip("blip1", "\n1 1 1", null, null, wavelet);
TextIterator iterator = new BlipIterator.TextIterator(blip, "1", -1);
for (int i = 0; i < 3; ++i) {
assertTrue(iterator.hasNext());
Range range = iterator.next();
assertNotNull(range);
assertEquals(i * 2 + 1, range.getStart());
assertEquals(i * 2 + 2, range.getEnd());
}
assertFalse(iterator.hasNext());
}
public void testTextIteratorDeletingMatches() {
Blip blip = new Blip("blip1", "\n1 1 1", null, null, wavelet);
TextIterator iterator = new BlipIterator.TextIterator(blip, "1", -1);
for (int i = 0; i < 3; ++i) {
assertTrue(iterator.hasNext());
Range range = iterator.next();
assertEquals(i + 1, range.getStart());
blip.setContent(blip.getContent().substring(0, range.getStart()) +
blip.getContent().substring(range.getEnd()));
iterator.shift(-1);
}
assertFalse(iterator.hasNext());
assertEquals("\n ", blip.getContent());
}
public void testTextIteratorShiftInsertAfter() {
Blip blip = new Blip("blip1", "\nfoofoofoo", null, null, wavelet);
TextIterator iterator = new BlipIterator.TextIterator(blip, "foo", -1);
for (int i = 0; i < 3; ++i) {
assertTrue(iterator.hasNext());
Range range = iterator.next();
assertEquals(i * 6 + 1, range.getStart());
blip.setContent(blip.getContent().substring(0, range.getEnd()) + "foo" +
blip.getContent().substring(range.getEnd()));
iterator.shift(range.getEnd() - range.getStart() + 2);
}
assertFalse(iterator.hasNext());
assertEquals("\nfoofoofoofoofoofoo", blip.getContent());
}
public void testElementIterator() {
Element element1 = new Gadget("http://www.google.com/gadget.xml");
Element element2 = new Image("attachment1", "the coolest photo");
Element element3 = new Gadget("http://www.google.com/foo.xml");
Element element4 = new Gadget("http://www.google.com/gadget.xml");
SortedMap<Integer, Element> elements = new TreeMap<Integer, Element>();
elements.put(1, element1);
elements.put(2, element2);
elements.put(4, element3);
elements.put(5, element4);
Blip blip = new Blip("blip1", Collections.<String>emptyList(), "\n a ",
Collections.<String>emptyList(), null, -1, -1, null, null, new ArrayList<Annotation>(), elements,
new ArrayList<String>(), wavelet);
Map<String, String> restrictions = new HashMap<String, String>();
restrictions.put("url", "http://www.google.com/gadget.xml");
ElementIterator iterator = new BlipIterator.ElementIterator(blip, ElementType.GADGET,
restrictions, -1);
List<Range> hits = new ArrayList<Range>();
while (iterator.hasNext()) {
hits.add(iterator.next());
}
assertEquals(2, hits.size());
assertEquals(1, hits.get(0).getStart());
assertEquals(2, hits.get(0).getEnd());
assertEquals(5, hits.get(1).getStart());
assertEquals(6, hits.get(1).getEnd());
}
}
| apache-2.0 |
besom/bbossgroups-mvn | bboss_security/src/main/java/org/frameworkset/security/session/domain/CrossDomain.java | 4590 | package org.frameworkset.security.session.domain;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.frameworkset.util.SimpleStringUtil;
import com.frameworkset.util.StringUtil;
public class CrossDomain {
private String rootDomain;
/**
* 改为使用rootDomain属性
*/
@Deprecated
private String domain;
private List<App> domainApps;
/**
* session共享对应的cookie 路径名称,多个应用path以,号分隔
*/
private String path;
private transient List<String> _paths;
/**
* session共享区属性,多个属性以,号分隔
* 只要不包含在这个属性列表区的的属性都是应用特定的属性
*/
private String shareSessionAttrs;
private transient List<String> _shareSessionAttrs;
public CrossDomain() {
// TODO Auto-generated constructor stub
}
public List<String> get_shareSessionAttrs() {
return _shareSessionAttrs;
}
public void init()
{
if(SimpleStringUtil.isNotEmpty(path))
{
String[] temp = path.split(",");
this._paths = Arrays.asList(temp);
}
if(SimpleStringUtil.isEmpty(rootDomain))
this.rootDomain = this.domain;
if(SimpleStringUtil.isNotEmpty(this.shareSessionAttrs))
{
String[] temp = shareSessionAttrs.split(",");
this._shareSessionAttrs = Arrays.asList(temp);
if(this.domainApps != null)
{
appsIdxs = new HashMap<String,App>();
for(App app:domainApps)
{
if(SimpleStringUtil.isEmpty(app.getAttributeNamespace()))
{
if(StringUtil.isEmpty(app.getDomain()))
{
String ns = this.rootDomain.replace('.', '_') + "#";
if(app.getPath().equals("/"))
{
ns = "ROOT_"+ns;
}
else if(app.getPath().startsWith("/"))
{
ns = app.getPath().substring(1)+"_"+ns;
}
else
{
ns = app.getPath()+"_"+ns;
}
app.setAttributeNamespace(ns);
}
else
{
String ns = app.getDomain().replace('.', '_') + "#";
if(app.getPath().equals("/"))
{
ns = "ROOT_"+ns;
}
else if(app.getPath().startsWith("/"))
{
ns = app.getPath().substring(1)+"_"+ns;
}
else
{
ns = app.getPath()+"_"+ns;
}
app.setAttributeNamespace(ns);
}
}
else
{
if(!app.getAttributeNamespace().endsWith("#"))
app.setAttributeNamespace(app.getAttributeNamespace()+"#");
}
app.initUUID();
appsIdxs.put(app.getUuid(), app);
if(app.isCurrentApp())
this.currentApp = app;
}
}
}
}
public App getApp(String contextPath)
{
if(appsIdxs == null)
return null;
if(currentApp != null)
{
return currentApp;
}
return currentApp = this.appsIdxs.get(contextPath);
}
private App currentApp;
private Map<String,App> appsIdxs ;
public List<String> get_paths() {
return _paths;
}
public boolean isShareAttribute(String attribute)
{
if(SimpleStringUtil.isEmpty(this.shareSessionAttrs))
return true;
return this.shareSessionAttrs.contains(attribute);
}
public String wraperAttributeName(String appkey,String contextpath, String attribute)
{
App app = this.getApp(contextpath);
if(app == null)
return attribute;
if(isShareAttribute(attribute))
{
return attribute;
}
return app.getAttributeNamespace() + attribute;
}
/**
* 如果属性石共享属性或者是contextpath应用的属性则返回相应的属性,否则返回null
* @param appkey
* @param contextpath
* @param attribute
* @return
*/
public String dewraperAttributeName(String appkey,String contextpath, String attribute)
{
App app = this.getApp(contextpath);
if(app == null)
return attribute;
if(isShareAttribute(attribute))
{
return attribute;
}
if(attribute.startsWith(app.getAttributeNamespace()))
return attribute.substring(app.getAttributeNamespace().length());
else
return null;
}
public List<App> getDomainApps() {
return domainApps;
}
public void setDomainApps(List<App> domainApps) {
this.domainApps = domainApps;
}
public String getRootDomain() {
return rootDomain;
}
public void setRootDomain(String rootDomain) {
this.rootDomain = rootDomain;
}
public String getDomain() {
return domain;
}
public void setDomain(String domain) {
this.domain = domain;
}
}
| apache-2.0 |
alina-ipatina/pentaho-kettle | engine/test-src/org/pentaho/di/trans/steps/csvinput/UnnamedFieldsMappingTest.java | 1719 | /*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2017 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.csvinput;
import static org.junit.Assert.*;
import org.junit.Before;
import org.junit.Test;
public class UnnamedFieldsMappingTest {
private UnnamedFieldsMapping fieldsMapping;
@Before
public void before() {
fieldsMapping = new UnnamedFieldsMapping( 2 );
}
@Test
public void fieldMetaIndex() {
assertEquals( 1, fieldsMapping.fieldMetaIndex( 1 ) );
}
@Test
public void fieldMetaIndexWithUnexistingField() {
assertEquals( FieldsMapping.FIELD_DOES_NOT_EXIST, fieldsMapping.fieldMetaIndex( 2 ) );
}
@Test
public void size() {
assertEquals( 2, fieldsMapping.size() );
}
@Test
public void mapping() {
UnnamedFieldsMapping mapping = UnnamedFieldsMapping.mapping( 2 );
assertEquals( 1, mapping.fieldMetaIndex( 1 ) );
}
}
| apache-2.0 |
twitter-forks/bazel | third_party/java/proguard/proguard6.2.2/src/proguard/classfile/editor/VariableSizeUpdater.java | 3626 | /*
* ProGuard -- shrinking, optimization, obfuscation, and preverification
* of Java bytecode.
*
* Copyright (c) 2002-2019 Guardsquare NV
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the Free
* Software Foundation; either version 2 of the License, or (at your option)
* any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package proguard.classfile.editor;
import proguard.classfile.*;
import proguard.classfile.attribute.*;
import proguard.classfile.attribute.visitor.AttributeVisitor;
import proguard.classfile.instruction.*;
import proguard.classfile.instruction.visitor.InstructionVisitor;
import proguard.classfile.util.*;
/**
* This AttributeVisitor computes and updates the maximum local variable frame
* size of the code attributes that it visits. It also cleans up the local
* variable tables.
*
* @author Eric Lafortune
*/
public class VariableSizeUpdater
extends SimplifiedVisitor
implements AttributeVisitor,
InstructionVisitor
{
//*
private static final boolean DEBUG = false;
/*/
private static boolean DEBUG = true;
//*/
private VariableCleaner variableCleaner = new VariableCleaner();
// Implementations for AttributeVisitor.
public void visitAnyAttribute(Clazz clazz, Attribute attribute) {}
public void visitCodeAttribute(Clazz clazz, Method method, CodeAttribute codeAttribute)
{
// DEBUG =
// clazz.getName().equals("abc/Def") &&
// method.getName(clazz).equals("abc");
// The minimum variable size is determined by the arguments.
codeAttribute.u2maxLocals =
ClassUtil.internalMethodParameterSize(method.getDescriptor(clazz),
method.getAccessFlags());
if (DEBUG)
{
System.out.println("VariableSizeUpdater: "+clazz.getName()+"."+method.getName(clazz)+method.getDescriptor(clazz));
System.out.println(" Max locals: "+codeAttribute.u2maxLocals+" <- parameters");
}
// Go over all instructions.
codeAttribute.instructionsAccept(clazz, method, this);
// Remove the unused variables of the attributes.
variableCleaner.visitCodeAttribute(clazz, method, codeAttribute);
}
// Implementations for InstructionVisitor.
public void visitAnyInstruction(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, Instruction instruction) {}
public void visitVariableInstruction(Clazz clazz, Method method, CodeAttribute codeAttribute, int offset, VariableInstruction variableInstruction)
{
int variableSize = variableInstruction.variableIndex + 1;
if (variableInstruction.isCategory2())
{
variableSize++;
}
if (codeAttribute.u2maxLocals < variableSize)
{
codeAttribute.u2maxLocals = variableSize;
if (DEBUG)
{
System.out.println(" Max locals: "+codeAttribute.u2maxLocals+" <- "+variableInstruction.toString(offset));
}
}
}
}
| apache-2.0 |
FingolfinTEK/camel | components/camel-cxf/src/main/java/org/apache/camel/component/cxf/converter/CxfPayloadConverter.java | 10603 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.cxf.converter;
import java.io.InputStream;
import java.io.Reader;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.List;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import javax.xml.transform.Source;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stax.StAXSource;
import javax.xml.transform.stream.StreamSource;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.apache.camel.Converter;
import org.apache.camel.Exchange;
import org.apache.camel.FallbackConverter;
import org.apache.camel.StreamCache;
import org.apache.camel.TypeConverter;
import org.apache.camel.component.cxf.CxfPayload;
import org.apache.camel.converter.jaxp.XmlConverter;
import org.apache.camel.spi.TypeConverterRegistry;
import org.apache.cxf.staxutils.StaxSource;
import org.apache.cxf.staxutils.StaxUtils;
@Converter
public final class CxfPayloadConverter {
private static XmlConverter xml = new XmlConverter();
private CxfPayloadConverter() {
// Helper class
}
@Converter
public static <T> CxfPayload<T> documentToCxfPayload(Document doc, Exchange exchange) {
return elementToCxfPayload(doc.getDocumentElement(), exchange);
}
@Converter
public static <T> CxfPayload<T> elementToCxfPayload(Element element, Exchange exchange) {
List<T> headers = new ArrayList<T>();
List<Element> body = new ArrayList<Element>();
body.add(element);
return new CxfPayload<T>(headers, body);
}
@Converter
public static <T> CxfPayload<T> nodeListToCxfPayload(NodeList nodeList, Exchange exchange) {
List<T> headers = new ArrayList<T>();
List<Element> body = new ArrayList<Element>();
for (int i = 0; i < nodeList.getLength(); i++) {
Node node = nodeList.item(i);
// add all nodes to the body that are elements
if (Element.class.isAssignableFrom(node.getClass())) {
body.add((Element) node);
}
}
return new CxfPayload<T>(headers, body);
}
@Converter
public static <T> CxfPayload<T> sourceToCxfPayload(Source src, Exchange exchange) {
List<T> headers = new ArrayList<T>();
List<Source> body = new ArrayList<Source>();
body.add(src);
return new CxfPayload<T>(headers, body, null);
}
@Converter
public static <T> NodeList cxfPayloadToNodeList(CxfPayload<T> payload, Exchange exchange) {
return new NodeListWrapper(payload.getBody());
}
@Converter
public static <T> Node cxfPayLoadToNode(CxfPayload<T> payload, Exchange exchange) {
List<Element> payloadBodyElements = payload.getBody();
if (payloadBodyElements.size() > 0) {
return payloadBodyElements.get(0);
}
return null;
}
@Converter
public static <T> Source cxfPayLoadToSource(CxfPayload<T> payload, Exchange exchange) {
List<Source> payloadBody = payload.getBodySources();
if (payloadBody.size() > 0) {
return payloadBody.get(0);
}
return null;
}
@Converter
public static <T> StreamCache cxfPayLoadToStreamCache(CxfPayload<T> payload, Exchange exchange) {
return new CachedCxfPayload<T>(payload, exchange, xml);
}
@SuppressWarnings("unchecked")
@FallbackConverter
public static <T> T convertTo(Class<T> type, Exchange exchange, Object value, TypeConverterRegistry registry) {
// use fallback type converter, so we can probably convert into
// CxfPayloads from other types
if (type.isAssignableFrom(CxfPayload.class)) {
if (!value.getClass().isArray()) {
Source src = null;
// many of the common format that can have a Source created directly
if (value instanceof InputStream) {
src = new StreamSource((InputStream) value);
} else if (value instanceof Reader) {
src = new StreamSource((Reader) value);
} else if (value instanceof String) {
src = new StreamSource(new StringReader((String) value));
} else if (value instanceof Node) {
src = new DOMSource((Node) value);
} else if (value instanceof Source) {
src = (Source) value;
}
if (src == null) {
// assuming staxsource is preferred, otherwise use the one preferred
TypeConverter tc = registry.lookup(javax.xml.transform.stax.StAXSource.class, value.getClass());
if (tc == null) {
tc = registry.lookup(Source.class, value.getClass());
}
if (tc != null) {
src = tc.convertTo(Source.class, exchange, value);
}
}
if (src != null) {
return (T) sourceToCxfPayload(src, exchange);
}
}
TypeConverter tc = registry.lookup(NodeList.class, value.getClass());
if (tc != null) {
NodeList nodeList = tc.convertTo(NodeList.class, exchange, value);
return (T) nodeListToCxfPayload(nodeList, exchange);
}
tc = registry.lookup(Document.class, value.getClass());
if (tc != null) {
Document document = tc.convertTo(Document.class, exchange, value);
return (T) documentToCxfPayload(document, exchange);
}
// maybe we can convert via an InputStream
CxfPayload<?> p;
p = convertVia(InputStream.class, exchange, value, registry);
if (p != null) {
return (T) p;
}
// String is the converter of last resort
p = convertVia(String.class, exchange, value, registry);
if (p != null) {
return (T) p;
}
// no we could not do it currently
return (T) Void.TYPE;
}
// Convert a CxfPayload into something else
if (CxfPayload.class.isAssignableFrom(value.getClass())) {
CxfPayload<?> payload = (CxfPayload<?>) value;
int size = payload.getBodySources().size();
if (size == 1) {
if (type.isAssignableFrom(Document.class)) {
Source s = payload.getBodySources().get(0);
Document d;
try {
d = StaxUtils.read(s);
} catch (XMLStreamException e) {
throw new RuntimeException(e);
}
return type.cast(d);
}
// CAMEL-8410 Just make sure we get the Source object directly from the payload body source
Source s = payload.getBodySources().get(0);
if (type.isInstance(s)) {
return type.cast(s);
}
TypeConverter tc = registry.lookup(type, Source.class);
if (tc != null) {
XMLStreamReader r = null;
if (payload.getNsMap() != null) {
if (s instanceof StaxSource) {
r = ((StaxSource) s).getXMLStreamReader();
} else if (s instanceof StAXSource) {
r = ((StAXSource) s).getXMLStreamReader();
}
if (r != null) {
s = new StAXSource(new DelegatingXMLStreamReader(r, payload.getNsMap()));
}
}
T t = tc.convertTo(type, s);
return t;
}
}
TypeConverter tc = registry.lookup(type, NodeList.class);
if (tc != null) {
Object result = tc.convertTo(type, cxfPayloadToNodeList((CxfPayload<?>) value, exchange));
if (result == null) {
// no we could not do it currently, and we just abort the convert here
return (T) Void.TYPE;
} else {
return (T) result;
}
}
// we cannot convert a node list, so we try the first item from the
// node list
tc = registry.lookup(type, Node.class);
if (tc != null) {
NodeList nodeList = cxfPayloadToNodeList((CxfPayload<?>) value, exchange);
if (nodeList.getLength() > 0) {
return tc.convertTo(type, nodeList.item(0));
} else {
// no we could not do it currently
return (T) Void.TYPE;
}
} else {
if (size == 0) {
// empty size so we cannot convert
return (T) Void.TYPE;
}
}
}
return null;
}
private static <T, V> CxfPayload<T> convertVia(Class<V> via, Exchange exchange, Object value, TypeConverterRegistry registry) {
TypeConverter tc = registry.lookup(via, value.getClass());
if (tc != null) {
TypeConverter tc1 = registry.lookup(Document.class, via);
if (tc1 != null) {
V is = tc.convertTo(via, exchange, value);
Document document = tc1.convertTo(Document.class, exchange, is);
return documentToCxfPayload(document, exchange);
}
}
return null;
}
}
| apache-2.0 |
UniTime/unitime | JavaSource/org/unitime/timetable/tags/MarkBack.java | 1895 | /*
* Licensed to The Apereo Foundation under one or more contributor license
* agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*
* The Apereo Foundation licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.unitime.timetable.tags;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.jsp.JspException;
import javax.servlet.jsp.tagext.TagSupport;
import org.unitime.timetable.webutil.BackTracker;
/**
* @author Tomas Muller
*/
public class MarkBack extends TagSupport {
private static final long serialVersionUID = 6247051046382946227L;
boolean iBack = true;
boolean iClear = false;
String iUri = null;
String iTitle = null;
public boolean getBack() { return iBack; }
public void setBack(boolean back) { iBack = back; }
public boolean getClear() { return iClear; }
public void setClear(boolean clear) { iClear = clear; }
public String getUri() { return iUri; }
public void setUri(String uri) { iUri = uri; }
public String getTitle() { return iTitle; }
public void setTitle(String title) { iTitle = title; }
public int doStartTag() throws JspException {
BackTracker.markForBack((HttpServletRequest)pageContext.getRequest(), getUri(), getTitle(), getBack(), getClear());
return SKIP_BODY;
}
public int doEndTag() {
return EVAL_PAGE;
}
}
| apache-2.0 |
treejames/GeoprocessingAppstore | src/com/esri/gpt/control/livedata/WMSRendererFactory.java | 7160 | /* See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* Esri Inc. licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.esri.gpt.control.livedata;
import com.esri.gpt.control.livedata.selector.HttpRequestDefinition;
import com.esri.gpt.control.livedata.selector.IHttpResponseListener;
import com.esri.gpt.control.livedata.selector.IRegistry;
import com.esri.gpt.control.livedata.selector.ISetter;
import com.esri.gpt.framework.geometry.Envelope;
import com.esri.gpt.framework.http.ResponseInfo;
import com.esri.gpt.framework.util.Val;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
/**
* WMS renderer factory.
*/
/*packge*/ class WMSRendererFactory extends MapBasedRendererFactory {
/** get capabilities request */
private static String GET_CAPABILITIES_REQUEST = "service=WMS&request=GetCapabilities";
/** context path */
private String contextPath = "";
/** proxy URL */
private String proxyUrl = "";
@Override
public boolean isDefinitive() {
return true;
}
@Override
public void register(IRegistry reg, final ISetter setter, final String url) {
int qmark = url.lastIndexOf("?");
final String serviceUrl = qmark >= 0 ? url.substring(0, qmark) : url;
final String queryString = qmark >= 0 ? url.substring(qmark + 1) : "";
Query query = new Query(queryString);
if (query.containsKey("service") && !query.get("service").equalsIgnoreCase("WMS"))
return;
query = new Query(GET_CAPABILITIES_REQUEST).mixin(query);
final String getCapabilitiesUrl = serviceUrl + "?" + query;
reg.register(new HttpRequestDefinition(getCapabilitiesUrl), new IHttpResponseListener() {
public void onResponse(ResponseInfo info, String strContent, Document docContent) {
if (docContent != null) {
try {
XPathFactory xPathFactory = XPathFactory.newInstance();
XPath xPath = xPathFactory.newXPath();
Node ndWmsCapabilities = (Node) xPath.evaluate("/WMS_Capabilities", docContent, XPathConstants.NODE);
if (ndWmsCapabilities == null) {
ndWmsCapabilities = (Node) xPath.evaluate("/WMT_MS_Capabilities", docContent, XPathConstants.NODE);
}
if (ndWmsCapabilities != null) {
final Envelope extent = readExtent(xPath, ndWmsCapabilities);
setter.set(new WMSRenderer() {
@Override
protected Envelope getExtent() {
return extent;
}
@Override
protected String getUrl() {
return getCapabilitiesUrl;
}
@Override
protected String getProxyUrl() {
return contextPath + proxyUrl;
}
@Override
protected int getMapHeightAdjustment() {
return getProperties().getMapHeightAdjustment();
}
});
}
} catch (Exception ex) {
}
}
}
});
}
/**
* Creates instance of the factory.
* @param properties properties
* @param contextPath context path
* @param proxyUrl proxy URL
*/
public WMSRendererFactory(ILiveDataProperties properties, String contextPath, String proxyUrl) {
super(properties);
this.contextPath = Val.chkStr(contextPath);
this.proxyUrl = Val.chkStr(proxyUrl);
}
/**
* Reads extent from the capabilities node.
* @param xPath xpath
* @param ndWmsCapabilities capabilities node
* @return envelope or <code>null</code> if envelope can not be created
* @throws javax.xml.xpath.XPathExpressionException if using XPath fails
*/
private Envelope readExtent(XPath xPath, Node ndWmsCapabilities) throws XPathExpressionException {
String wkid = "4326";
Node EX_GeographicBoundingBox = (Node) xPath.evaluate("Capability/Layer/EX_GeographicBoundingBox", ndWmsCapabilities, XPathConstants.NODE);
if (EX_GeographicBoundingBox != null) {
return extractExtent(xPath, EX_GeographicBoundingBox, new String[]{"westBoundLongitude", "southBoundLatitude", "eastBoundLongitude", "northBoundLatitude"}, wkid);
} else {
NodeList nodes = (NodeList) xPath.evaluate("//EX_GeographicBoundingBox", ndWmsCapabilities, XPathConstants.NODESET);
if (nodes.getLength() > 0) {
Envelope envelope = new Envelope();
envelope.setWkid(wkid);
for (int i = 0; i < nodes.getLength(); i++) {
Node node = nodes.item(i);
Envelope e = extractExtent(xPath, node, new String[]{"westBoundLongitude", "southBoundLatitude", "eastBoundLongitude", "northBoundLatitude"}, wkid);
if (e != null) {
envelope.merge(e);
}
}
if (envelope.hasSize())
return envelope;
}
}
Node LatLonBoundingBox = (Node) xPath.evaluate("Capability/Layer/LatLonBoundingBox", ndWmsCapabilities, XPathConstants.NODE);
if (LatLonBoundingBox != null) {
return extractExtent(xPath, LatLonBoundingBox, new String[]{"@minx", "@miny", "@maxx", "@maxy"}, wkid);
}
return null;
}
private Envelope extractExtent(XPath xPath, Node node, String[] names, String wkid) throws XPathExpressionException {
if (node != null && names != null && names.length == 4) {
String[] values = new String[4];
for (int i = 0; i < 4; i++) {
values[i] = Val.chkStr((String) xPath.evaluate(names[i], node, XPathConstants.STRING));
}
return makeExtent(values[0], values[1], values[2], values[3], wkid);
}
return null;
}
/**
* Creates envelope from string reprezentations of coordinates.
* @param sMinX minx
* @param sMinY miny
* @param sMaxX maxx
* @param sMaxY maxy
* @param wkid wkid
* @return envelope or <code>null</code> if envelope can not be created
*/
private Envelope makeExtent(String sMinX, String sMinY, String sMaxX, String sMaxY, String wkid) {
if (sMinX.length() > 0 && sMaxX.length() > 0 && sMinY.length() > 0 && sMaxY.length() > 0) {
double minx = Val.chkDbl(sMinX, -180.0);
double maxx = Val.chkDbl(sMaxX, 180.0);
double miny = Val.chkDbl(sMinY, -90.0);
double maxy = Val.chkDbl(sMaxY, 90.0);
Envelope envelope = new Envelope(minx, miny, maxx, maxy);
envelope.setWkid(wkid);
return envelope;
}
return null;
}
}
| apache-2.0 |
android-ia/platform_tools_idea | java/java-analysis-impl/src/com/intellij/codeInspection/PossibleHeapPollutionVarargsInspection.java | 6249 | /*
* Copyright 2000-2011 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInspection;
import com.intellij.codeInsight.AnnotationUtil;
import com.intellij.codeInsight.daemon.GroupNames;
import com.intellij.codeInsight.daemon.impl.analysis.JavaGenericsUtil;
import com.intellij.codeInsight.intention.AddAnnotationPsiFix;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.*;
import com.intellij.psi.search.searches.OverridingMethodsSearch;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.psi.util.PsiUtil;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
/**
* User: anna
* Date: 1/28/11
*/
public class PossibleHeapPollutionVarargsInspection extends BaseJavaBatchLocalInspectionTool {
public static final Logger LOG = Logger.getInstance("#" + PossibleHeapPollutionVarargsInspection.class.getName());
@Nls
@NotNull
@Override
public String getGroupDisplayName() {
return GroupNames.LANGUAGE_LEVEL_SPECIFIC_GROUP_NAME;
}
@Nls
@NotNull
@Override
public String getDisplayName() {
return "Possible heap pollution from parameterized vararg type";
}
@Override
public boolean isEnabledByDefault() {
return true;
}
@NotNull
@Override
public String getShortName() {
return "SafeVarargsDetector";
}
@NotNull
@Override
public String getID() {
return "unchecked";
}
@NotNull
@Override
public PsiElementVisitor buildVisitor(@NotNull final ProblemsHolder holder, boolean isOnTheFly) {
return new HeapPollutionVisitor() {
@Override
protected void registerProblem(PsiMethod method, PsiIdentifier nameIdentifier) {
final LocalQuickFix quickFix;
if (method.hasModifierProperty(PsiModifier.FINAL) ||
method.hasModifierProperty(PsiModifier.STATIC) ||
method.isConstructor()) {
quickFix = new AnnotateAsSafeVarargsQuickFix();
}
else {
final PsiClass containingClass = method.getContainingClass();
LOG.assertTrue(containingClass != null);
boolean canBeFinal = !method.hasModifierProperty(PsiModifier.ABSTRACT) &&
!containingClass.isInterface() &&
OverridingMethodsSearch.search(method).findFirst() == null;
quickFix = canBeFinal ? new MakeFinalAndAnnotateQuickFix() : null;
}
holder.registerProblem(nameIdentifier, "Possible heap pollution from parameterized vararg type #loc", quickFix);
}
};
}
private static class AnnotateAsSafeVarargsQuickFix implements LocalQuickFix {
@NotNull
@Override
public String getName() {
return "Annotate as @SafeVarargs";
}
@NotNull
@Override
public String getFamilyName() {
return getName();
}
@Override
public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) {
final PsiElement psiElement = descriptor.getPsiElement();
if (psiElement instanceof PsiIdentifier) {
final PsiMethod psiMethod = (PsiMethod)psiElement.getParent();
if (psiMethod != null) {
new AddAnnotationPsiFix("java.lang.SafeVarargs", psiMethod, PsiNameValuePair.EMPTY_ARRAY).applyFix(project, descriptor);
}
}
}
}
private static class MakeFinalAndAnnotateQuickFix implements LocalQuickFix {
@NotNull
@Override
public String getName() {
return "Make final and annotate as @SafeVarargs";
}
@NotNull
@Override
public String getFamilyName() {
return getName();
}
@Override
public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) {
final PsiElement psiElement = descriptor.getPsiElement();
if (psiElement instanceof PsiIdentifier) {
final PsiMethod psiMethod = (PsiMethod)psiElement.getParent();
psiMethod.getModifierList().setModifierProperty(PsiModifier.FINAL, true);
new AddAnnotationPsiFix("java.lang.SafeVarargs", psiMethod, PsiNameValuePair.EMPTY_ARRAY).applyFix(project, descriptor);
}
}
}
public abstract static class HeapPollutionVisitor extends JavaElementVisitor {
@Override
public void visitMethod(PsiMethod method) {
super.visitMethod(method);
if (!PsiUtil.getLanguageLevel(method).isAtLeast(LanguageLevel.JDK_1_7)) return;
if (AnnotationUtil.isAnnotated(method, "java.lang.SafeVarargs", false)) return;
if (!method.isVarArgs()) return;
final PsiParameter psiParameter = method.getParameterList().getParameters()[method.getParameterList().getParametersCount() - 1];
final PsiType componentType = ((PsiEllipsisType)psiParameter.getType()).getComponentType();
if (JavaGenericsUtil.isReifiableType(componentType)) {
return;
}
for (PsiReference reference : ReferencesSearch.search(psiParameter)) {
final PsiElement element = reference.getElement();
if (element instanceof PsiExpression && !PsiUtil.isAccessedForReading((PsiExpression)element)) {
return;
}
}
final PsiIdentifier nameIdentifier = method.getNameIdentifier();
if (nameIdentifier != null) {
//if (method.hasModifierProperty(PsiModifier.ABSTRACT)) return;
//final PsiClass containingClass = method.getContainingClass();
//if (containingClass == null || containingClass.isInterface()) return; do not add
registerProblem(method, nameIdentifier);
}
}
protected abstract void registerProblem(PsiMethod method, PsiIdentifier nameIdentifier);
}
}
| apache-2.0 |
chanakaudaya/developer-studio | esb/org.wso2.developerstudio.eclipse.gmf.esb.diagram/src/org/wso2/developerstudio/eclipse/gmf/esb/diagram/edit/policies/IterateMediatorTargetOutputConnectorItemSemanticEditPolicy.java | 3876 | package org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.policies;
import java.util.Iterator;
import org.eclipse.emf.ecore.EAnnotation;
import org.eclipse.gef.commands.Command;
import org.eclipse.gmf.runtime.diagram.core.commands.DeleteCommand;
import org.eclipse.gmf.runtime.emf.commands.core.command.CompositeTransactionalCommand;
import org.eclipse.gmf.runtime.emf.type.core.commands.DestroyElementCommand;
import org.eclipse.gmf.runtime.emf.type.core.requests.CreateRelationshipRequest;
import org.eclipse.gmf.runtime.emf.type.core.requests.DestroyElementRequest;
import org.eclipse.gmf.runtime.emf.type.core.requests.ReorientRelationshipRequest;
import org.eclipse.gmf.runtime.notation.Edge;
import org.eclipse.gmf.runtime.notation.View;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.EsbLinkCreateCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.commands.EsbLinkReorientCommand;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.edit.parts.EsbLinkEditPart;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.part.EsbVisualIDRegistry;
import org.wso2.developerstudio.eclipse.gmf.esb.diagram.providers.EsbElementTypes;
/**
* @generated
*/
public class IterateMediatorTargetOutputConnectorItemSemanticEditPolicy extends
EsbBaseItemSemanticEditPolicy {
/**
* @generated
*/
public IterateMediatorTargetOutputConnectorItemSemanticEditPolicy() {
super(EsbElementTypes.IterateMediatorTargetOutputConnector_3606);
}
/**
* @generated
*/
protected Command getDestroyElementCommand(DestroyElementRequest req) {
View view = (View) getHost().getModel();
CompositeTransactionalCommand cmd = new CompositeTransactionalCommand(getEditingDomain(),
null);
cmd.setTransactionNestingEnabled(false);
for (Iterator<?> it = view.getSourceEdges().iterator(); it.hasNext();) {
Edge outgoingLink = (Edge) it.next();
if (EsbVisualIDRegistry.getVisualID(outgoingLink) == EsbLinkEditPart.VISUAL_ID) {
DestroyElementRequest r = new DestroyElementRequest(outgoingLink.getElement(),
false);
cmd.add(new DestroyElementCommand(r));
cmd.add(new DeleteCommand(getEditingDomain(), outgoingLink));
continue;
}
}
EAnnotation annotation = view.getEAnnotation("Shortcut"); //$NON-NLS-1$
if (annotation == null) {
// there are indirectly referenced children, need extra commands: false
addDestroyShortcutsCommand(cmd, view);
// delete host element
cmd.add(new DestroyElementCommand(req));
} else {
cmd.add(new DeleteCommand(getEditingDomain(), view));
}
return getGEFWrapper(cmd.reduce());
}
/**
* @generated
*/
protected Command getCreateRelationshipCommand(CreateRelationshipRequest req) {
Command command = req.getTarget() == null ? getStartCreateRelationshipCommand(req)
: getCompleteCreateRelationshipCommand(req);
return command != null ? command : super.getCreateRelationshipCommand(req);
}
/**
* @generated
*/
protected Command getStartCreateRelationshipCommand(CreateRelationshipRequest req) {
if (EsbElementTypes.EsbLink_4001 == req.getElementType()) {
return getGEFWrapper(new EsbLinkCreateCommand(req, req.getSource(), req.getTarget()));
}
return null;
}
/**
* @generated
*/
protected Command getCompleteCreateRelationshipCommand(CreateRelationshipRequest req) {
if (EsbElementTypes.EsbLink_4001 == req.getElementType()) {
return null;
}
return null;
}
/**
* Returns command to reorient EClass based link. New link target or source
* should be the domain model element associated with this node.
*
* @generated
*/
protected Command getReorientRelationshipCommand(ReorientRelationshipRequest req) {
switch (getVisualID(req)) {
case EsbLinkEditPart.VISUAL_ID:
return getGEFWrapper(new EsbLinkReorientCommand(req));
}
return super.getReorientRelationshipCommand(req);
}
}
| apache-2.0 |
jeorme/OG-Platform | projects/OG-Analytics/src/main/java/com/opengamma/analytics/financial/credit/isdastandardmodel/fastcalibration/CreditCurveCalibrator.java | 20257 | /**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.financial.credit.isdastandardmodel.fastcalibration;
import static com.opengamma.analytics.financial.credit.isdastandardmodel.DoublesScheduleGenerator.getIntegrationsPoints;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import com.opengamma.analytics.financial.credit.isdastandardmodel.AccrualOnDefaultFormulae;
import com.opengamma.analytics.financial.credit.isdastandardmodel.CDSAnalytic;
import com.opengamma.analytics.financial.credit.isdastandardmodel.CDSCoupon;
import com.opengamma.analytics.financial.credit.isdastandardmodel.ISDACompliantCreditCurve;
import com.opengamma.analytics.financial.credit.isdastandardmodel.ISDACompliantCreditCurveBuilder.ArbitrageHandling;
import com.opengamma.analytics.financial.credit.isdastandardmodel.ISDACompliantYieldCurve;
import com.opengamma.analytics.financial.credit.isdastandardmodel.MultiCDSAnalytic;
import com.opengamma.analytics.math.function.Function1D;
import com.opengamma.analytics.math.rootfinding.NewtonRaphsonSingleRootFinder;
import com.opengamma.util.ArgumentChecker;
/**
*
*/
public class CreditCurveCalibrator {
private static final NewtonRaphsonSingleRootFinder ROOTFINDER = new NewtonRaphsonSingleRootFinder();
private final int _nCDS;
private final int _nCoupons;
private final double[] _t;
private final double _valuationDF;
private final double[] _lgd;
private final double[] _unitAccured;
private final int[][] _cds2CouponsMap;
private final int[][] _cdsCouponsUpdateMap;
private final int[][] _knot2CouponsMap;
private final ProtectionLegElement[] _protElems;
private final CouponOnlyElement[] _premElems;
private final ArbitrageHandling _arbHandle;
public CreditCurveCalibrator(final MultiCDSAnalytic multiCDS, final ISDACompliantYieldCurve yieldCurve) {
this(multiCDS, yieldCurve, AccrualOnDefaultFormulae.OrignalISDA, ArbitrageHandling.Ignore);
}
public CreditCurveCalibrator(final MultiCDSAnalytic multiCDS, final ISDACompliantYieldCurve yieldCurve, final AccrualOnDefaultFormulae formula, final ArbitrageHandling arbHandle) {
ArgumentChecker.notNull(multiCDS, "multiCDS");
ArgumentChecker.notNull(yieldCurve, "yieldCurve");
_arbHandle = arbHandle;
_nCDS = multiCDS.getNumMaturities();
_t = new double[_nCDS];
_lgd = new double[_nCDS];
_unitAccured = new double[_nCDS];
for (int i = 0; i < _nCDS; i++) {
_t[i] = multiCDS.getProtectionEnd(i);
_lgd[i] = multiCDS.getLGD();
_unitAccured[i] = multiCDS.getAccruedPremiumPerUnitSpread(i);
}
_valuationDF = yieldCurve.getDiscountFactor(multiCDS.getCashSettleTime());
//This is the global set of knots - it will be truncated down for the various leg elements
//TODO this will not match ISDA C for forward starting (i.e. accStart > tradeDate) CDS, and will give different answers
//if the Markit 'fix' is used in that case
final double[] knots = getIntegrationsPoints(multiCDS.getEffectiveProtectionStart(), _t[_nCDS - 1], yieldCurve.getKnotTimes(), _t);
//The protection leg
_protElems = new ProtectionLegElement[_nCDS];
for (int i = 0; i < _nCDS; i++) {
_protElems[i] = new ProtectionLegElement(i == 0 ? multiCDS.getEffectiveProtectionStart() : _t[i - 1], _t[i], yieldCurve, i, knots);
}
_cds2CouponsMap = new int[_nCDS][];
_cdsCouponsUpdateMap = new int[_nCDS][];
_knot2CouponsMap = new int[_nCDS][];
final List<CDSCoupon> allCoupons = new ArrayList<>(_nCDS + multiCDS.getTotalPayments() - 1);
allCoupons.addAll(Arrays.asList(multiCDS.getStandardCoupons()));
allCoupons.add(multiCDS.getTerminalCoupon(_nCDS - 1));
final int[] temp = new int[multiCDS.getTotalPayments()];
for (int i = 0; i < multiCDS.getTotalPayments(); i++) {
temp[i] = i;
}
_cds2CouponsMap[_nCDS - 1] = temp;
//complete the list of unique coupons and fill out the cds2CouponsMap
for (int i = 0; i < _nCDS - 1; i++) {
final CDSCoupon c = multiCDS.getTerminalCoupon(i);
final int nPayments = Math.max(0, multiCDS.getPaymentIndexForMaturity(i)) + 1;
_cds2CouponsMap[i] = new int[nPayments];
for (int jj = 0; jj < nPayments - 1; jj++) {
_cds2CouponsMap[i][jj] = jj;
}
//because of business-day adjustment, a terminal coupon can be identical to a standard coupon,
//in which case it is not added again
int index = allCoupons.indexOf(c);
if (index == -1) {
index = allCoupons.size();
allCoupons.add(c);
}
_cds2CouponsMap[i][nPayments - 1] = index;
}
//loop over the coupons to populate the couponUpdateMap
_nCoupons = allCoupons.size();
final int[] sizes = new int[_nCDS];
final int[] map = new int[_nCoupons];
for (int i = 0; i < _nCoupons; i++) {
final CDSCoupon c = allCoupons.get(i);
int index = Arrays.binarySearch(_t, c.getEffEnd());
if (index < 0) {
index = -(index + 1);
}
sizes[index]++;
map[i] = index;
}
//make the protection leg elements
_premElems = new CouponOnlyElement[_nCoupons];
if (multiCDS.isPayAccOnDefault()) {
for (int i = 0; i < _nCoupons; i++) {
_premElems[i] = new PremiumLegElement(multiCDS.getEffectiveProtectionStart(), allCoupons.get(i), yieldCurve, map[i], knots, formula);
}
} else {
for (int i = 0; i < _nCoupons; i++) {
_premElems[i] = new CouponOnlyElement(allCoupons.get(i), yieldCurve, map[i]);
}
}
//sort a map from coupon to curve node, to a map from curve node to coupons
for (int i = 0; i < _nCDS; i++) {
_knot2CouponsMap[i] = new int[sizes[i]];
}
final int[] indexes = new int[_nCDS];
for (int i = 0; i < _nCoupons; i++) {
final int index = map[i];
_knot2CouponsMap[index][indexes[index]++] = i;
}
//the cdsCouponsUpdateMap is the intersection of the cds2CouponsMap and knot2CouponsMap
for (int i = 0; i < _nCDS; i++) {
_cdsCouponsUpdateMap[i] = intersection(_knot2CouponsMap[i], _cds2CouponsMap[i]);
}
}
public CreditCurveCalibrator(final CDSAnalytic[] cds, final ISDACompliantYieldCurve yieldCurve) {
this(cds, yieldCurve, AccrualOnDefaultFormulae.OrignalISDA, ArbitrageHandling.Ignore);
}
public CreditCurveCalibrator(final CDSAnalytic[] cds, final ISDACompliantYieldCurve yieldCurve, final AccrualOnDefaultFormulae formula, final ArbitrageHandling arbHandle) {
ArgumentChecker.noNulls(cds, "cds");
ArgumentChecker.notNull(yieldCurve, "yieldCurve");
_arbHandle = arbHandle;
_nCDS = cds.length;
final boolean payAccOnDefault = cds[0].isPayAccOnDefault();
final double accStart = cds[0].getAccStart();
final double effectProtStart = cds[0].getEffectiveProtectionStart();
final double cashSettleTime = cds[0].getCashSettleTime();
_t = new double[_nCDS];
_t[0] = cds[0].getProtectionEnd();
//Check all the CDSs match
for (int i = 1; i < _nCDS; i++) {
ArgumentChecker.isTrue(payAccOnDefault == cds[i].isPayAccOnDefault(), "All CDSs must have same pay-accrual on default status");
ArgumentChecker.isTrue(accStart == cds[i].getAccStart(), "All CDSs must has same accrual start");
ArgumentChecker.isTrue(effectProtStart == cds[i].getEffectiveProtectionStart(), "All CDSs must has same effective protection start");
ArgumentChecker.isTrue(cashSettleTime == cds[i].getCashSettleTime(), "All CDSs must has same cash-settle time");
_t[i] = cds[i].getProtectionEnd();
ArgumentChecker.isTrue(_t[i] > _t[i - 1], "CDS maturities must be increasing");
}
_valuationDF = yieldCurve.getDiscountFactor(cashSettleTime);
_lgd = new double[_nCDS];
_unitAccured = new double[_nCDS];
for (int i = 0; i < _nCDS; i++) {
_lgd[i] = cds[i].getLGD();
_unitAccured[i] = cds[i].getAccruedYearFraction();
}
//This is the global set of knots - it will be truncated down for the various leg elements
//TODO this will not match ISDA C for forward starting (i.e. accStart > tradeDate) CDS, and will give different answers
//if the Markit 'fix' is used in that case
final double[] knots = getIntegrationsPoints(effectProtStart, _t[_nCDS - 1], yieldCurve.getKnotTimes(), _t);
//The protection leg
_protElems = new ProtectionLegElement[_nCDS];
for (int i = 0; i < _nCDS; i++) {
_protElems[i] = new ProtectionLegElement(i == 0 ? effectProtStart : _t[i - 1], _t[i], yieldCurve, i, knots);
}
_cds2CouponsMap = new int[_nCDS][];
_cdsCouponsUpdateMap = new int[_nCDS][];
_knot2CouponsMap = new int[_nCDS][];
final int nPaymentsFinalCDS = cds[_nCDS - 1].getNumPayments();
final List<CDSCoupon> allCoupons = new ArrayList<>(_nCDS + nPaymentsFinalCDS - 1);
allCoupons.addAll(Arrays.asList(cds[_nCDS - 1].getCoupons()));
final int[] temp = new int[nPaymentsFinalCDS];
for (int i = 0; i < nPaymentsFinalCDS; i++) {
temp[i] = i;
}
_cds2CouponsMap[_nCDS - 1] = temp;
//complete the list of unique coupons and fill out the cds2CouponsMap
for (int i = 0; i < _nCDS - 1; i++) {
final CDSCoupon[] c = cds[i].getCoupons();
final int nPayments = c.length;
_cds2CouponsMap[i] = new int[nPayments];
for (int k = 0; k < nPayments; k++) {
int index = allCoupons.indexOf(c[k]);
if (index == -1) {
index = allCoupons.size();
allCoupons.add(c[k]);
}
_cds2CouponsMap[i][k] = index;
}
}
//loop over the coupons to populate the couponUpdateMap
_nCoupons = allCoupons.size();
final int[] sizes = new int[_nCDS];
final int[] map = new int[_nCoupons];
for (int i = 0; i < _nCoupons; i++) {
final CDSCoupon c = allCoupons.get(i);
int index = Arrays.binarySearch(_t, c.getEffEnd());
if (index < 0) {
index = -(index + 1);
}
sizes[index]++;
map[i] = index;
}
//make the protection leg elements
_premElems = new CouponOnlyElement[_nCoupons];
if (payAccOnDefault) {
for (int i = 0; i < _nCoupons; i++) {
_premElems[i] = new PremiumLegElement(effectProtStart, allCoupons.get(i), yieldCurve, map[i], knots, formula);
}
} else {
for (int i = 0; i < _nCoupons; i++) {
_premElems[i] = new CouponOnlyElement(allCoupons.get(i), yieldCurve, map[i]);
}
}
//sort a map from coupon to curve node, to a map from curve node to coupons
for (int i = 0; i < _nCDS; i++) {
_knot2CouponsMap[i] = new int[sizes[i]];
}
final int[] indexes = new int[_nCDS];
for (int i = 0; i < _nCoupons; i++) {
final int index = map[i];
_knot2CouponsMap[index][indexes[index]++] = i;
}
//the cdsCouponsUpdateMap is the intersection of the cds2CouponsMap and knot2CouponsMap
for (int i = 0; i < _nCDS; i++) {
_cdsCouponsUpdateMap[i] = intersection(_knot2CouponsMap[i], _cds2CouponsMap[i]);
}
}
public ISDACompliantCreditCurve calibrate(final double[] premiums) {
ArgumentChecker.notEmpty(premiums, "premiums");
ArgumentChecker.isTrue(_nCDS == premiums.length, "premiums wrong length");
final double[] puf = new double[_nCDS];
final CalibrationImpl imp = new CalibrationImpl();
return imp.calibrate(premiums, puf);
}
public ISDACompliantCreditCurve calibrate(final double[] premiums, final double[] puf) {
ArgumentChecker.notEmpty(premiums, "premiums");
ArgumentChecker.notEmpty(puf, "puf");
ArgumentChecker.isTrue(_nCDS == premiums.length, "premiums wrong length");
ArgumentChecker.isTrue(_nCDS == puf.length, "puf wrong length");
final CalibrationImpl imp = new CalibrationImpl();
return imp.calibrate(premiums, puf);
}
private class CalibrationImpl {
private double[][] _protLegElmtPV;
private double[][] _premLegElmtPV;
private ISDACompliantCreditCurve _creditCurve;
public ISDACompliantCreditCurve calibrate(final double[] premiums, final double[] puf) {
_protLegElmtPV = new double[_nCDS][2];
_premLegElmtPV = new double[_nCoupons][2];
// use continuous premiums as initial guess
final double[] guess = new double[_nCDS];
for (int i = 0; i < _nCDS; i++) {
guess[i] = (premiums[i] + puf[i] / _t[i]) / _lgd[i];
}
_creditCurve = new ISDACompliantCreditCurve(_t, guess);
for (int i = 0; i < _nCDS; i++) {
final Function1D<Double, Double> func = getPointFunction(i, premiums[i], puf[i]);
final Function1D<Double, Double> grad = getPointDerivative(i, premiums[i]);
switch (_arbHandle) {
case Ignore: {
final double zeroRate = ROOTFINDER.getRoot(func, grad, guess[i]);
updateAll(zeroRate, i);
break;
}
case Fail: {
final double minValue = i == 0 ? 0.0 : _creditCurve.getRTAtIndex(i - 1) / _creditCurve.getTimeAtIndex(i);
if (i > 0 && func.evaluate(minValue) > 0.0) { //can never fail on the first spread
final StringBuilder msg = new StringBuilder();
if (puf[i] == 0.0) {
msg.append("The par spread of " + premiums[i] + " at index " + i);
} else {
msg.append("The premium of " + premiums[i] + "and points up-front of " + puf[i] + " at index " + i);
}
msg.append(" is an arbitrage; cannot fit a curve with positive forward hazard rate. ");
throw new IllegalArgumentException(msg.toString());
}
guess[i] = Math.max(minValue, guess[i]);
final double zeroRate = ROOTFINDER.getRoot(func, grad, guess[i]);
updateAll(zeroRate, i);
break;
}
case ZeroHazardRate: {
final double minValue = i == 0 ? 0.0 : _creditCurve.getRTAtIndex(i - 1) / _creditCurve.getTimeAtIndex(i);
if (i > 0 && func.evaluate(minValue) > 0.0) { //can never fail on the first spread
updateAll(minValue, i); //this is setting the forward hazard rate for this period to zero, rather than letting it go negative
} else {
guess[i] = Math.max(minValue, guess[i]);
final double zeroRate = ROOTFINDER.getRoot(func, grad, guess[i]);
updateAll(zeroRate, i);
}
break;
}
}
}
return _creditCurve;
}
private Function1D<Double, Double> getPointFunction(final int index, final double premium, final double puf) {
final int[] iCoupons = _cds2CouponsMap[index];
final int nCoupons = iCoupons.length;
final double dirtyPV = puf - premium * _unitAccured[index];
final double lgd = _lgd[index];
return new Function1D<Double, Double>() {
@Override
public Double evaluate(final Double h) {
update(h, index);
double protLegPV = 0.0;
for (int i = 0; i <= index; i++) {
protLegPV += _protLegElmtPV[i][0];
}
double premLegPV = 0.0;
for (int i = 0; i < nCoupons; i++) {
final int jj = iCoupons[i];
premLegPV += _premLegElmtPV[jj][0];
}
final double pv = (lgd * protLegPV - premium * premLegPV) / _valuationDF - dirtyPV;
return pv;
}
};
}
private Function1D<Double, Double> getPointDerivative(final int index, final double premium) {
final int[] iCoupons = _cdsCouponsUpdateMap[index];
final int nCoupons = iCoupons.length;
final double lgd = _lgd[index];
return new Function1D<Double, Double>() {
@Override
public Double evaluate(final Double x) {
//do not call update - all ready called for getting the value
final double protLegPVSense = _protLegElmtPV[index][1];
double premLegPVSense = 0.0;
for (int i = 0; i < nCoupons; i++) {
final int jj = iCoupons[i];
premLegPVSense += _premLegElmtPV[jj][1];
}
final double pvSense = (lgd * protLegPVSense - premium * premLegPVSense) / _valuationDF;
return pvSense;
}
};
}
private void update(final double h, final int index) {
_creditCurve.setRate(h, index);
_protLegElmtPV[index] = _protElems[index].pvAndSense(_creditCurve);
final int[] iCoupons = _cdsCouponsUpdateMap[index];
final int n = iCoupons.length;
for (int i = 0; i < n; i++) {
final int jj = iCoupons[i];
_premLegElmtPV[jj] = _premElems[jj].pvAndSense(_creditCurve);
}
}
private void updateAll(final double h, final int index) {
_creditCurve.setRate(h, index);
_protLegElmtPV[index] = _protElems[index].pvAndSense(_creditCurve);
final int[] iCoupons = _knot2CouponsMap[index];
final int n = iCoupons.length;
for (int i = 0; i < n; i++) {
final int jj = iCoupons[i];
_premLegElmtPV[jj] = _premElems[jj].pvAndSense(_creditCurve);
}
}
}
private static int[] intersection(final int[] first, final int[] second) {
final int n1 = first.length;
final int n2 = second.length;
int[] a;
int[] b;
int n;
if (n1 > n2) {
a = second;
b = first;
n = n2;
} else {
a = first;
b = second;
n = n1;
}
final int[] temp = new int[n];
int count = 0;
for (int i = 0; i < n; i++) {
final int index = Arrays.binarySearch(b, a[i]);
if (index >= 0) {
temp[count++] = a[i];
}
}
final int[] res = new int[count];
System.arraycopy(temp, 0, res, 0, count);
return res;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((_arbHandle == null) ? 0 : _arbHandle.hashCode());
// Correction made PLAT-6314
// result = prime * result + Arrays.hashCode(_cds2CouponsMap);
// result = prime * result + Arrays.hashCode(_cdsCouponsUpdateMap);
// result = prime * result + Arrays.hashCode(_knot2CouponsMap);
result = prime * result + Arrays.deepHashCode(_cds2CouponsMap);
result = prime * result + Arrays.deepHashCode(_cdsCouponsUpdateMap);
result = prime * result + Arrays.deepHashCode(_knot2CouponsMap);
result = prime * result + Arrays.hashCode(_lgd);
result = prime * result + _nCDS;
result = prime * result + _nCoupons;
result = prime * result + Arrays.hashCode(_premElems);
result = prime * result + Arrays.hashCode(_protElems);
result = prime * result + Arrays.hashCode(_t);
result = prime * result + Arrays.hashCode(_unitAccured);
long temp;
temp = Double.doubleToLongBits(_valuationDF);
result = prime * result + (int) (temp ^ (temp >>> 32));
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final CreditCurveCalibrator other = (CreditCurveCalibrator) obj;
if (_arbHandle != other._arbHandle) {
return false;
}
if (!Arrays.deepEquals(_cds2CouponsMap, other._cds2CouponsMap)) {
return false;
}
if (!Arrays.deepEquals(_cdsCouponsUpdateMap, other._cdsCouponsUpdateMap)) {
return false;
}
if (!Arrays.deepEquals(_knot2CouponsMap, other._knot2CouponsMap)) {
return false;
}
if (!Arrays.equals(_lgd, other._lgd)) {
return false;
}
if (_nCDS != other._nCDS) {
return false;
}
if (_nCoupons != other._nCoupons) {
return false;
}
if (!Arrays.equals(_premElems, other._premElems)) {
return false;
}
if (!Arrays.equals(_protElems, other._protElems)) {
return false;
}
if (!Arrays.equals(_t, other._t)) {
return false;
}
if (!Arrays.equals(_unitAccured, other._unitAccured)) {
return false;
}
if (Double.doubleToLongBits(_valuationDF) != Double.doubleToLongBits(other._valuationDF)) {
return false;
}
return true;
}
}
| apache-2.0 |
robertdale/tinkerpop | gremlin-core/src/main/java/org/apache/tinkerpop/gremlin/process/remote/EmbeddedRemoteConnection.java | 3151 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tinkerpop.gremlin.process.remote;
import org.apache.tinkerpop.gremlin.jsr223.JavaTranslator;
import org.apache.tinkerpop.gremlin.process.remote.traversal.EmbeddedRemoteTraversal;
import org.apache.tinkerpop.gremlin.process.remote.traversal.RemoteTraversal;
import org.apache.tinkerpop.gremlin.process.traversal.Bytecode;
import org.apache.tinkerpop.gremlin.process.traversal.Traversal;
import org.apache.tinkerpop.gremlin.process.traversal.Traverser;
import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversalSource;
import org.apache.tinkerpop.gremlin.structure.Graph;
import java.util.Iterator;
import java.util.concurrent.CompletableFuture;
/**
* Allows a {@link RemoteConnection} to be submitted to a "local" {@link Graph} instance thus simulating a connection
* to a remote source. Basic usage is as follows:
*
* <pre>
* {@code
* // Using TinkerGraph here but any embedded Graph instance would suffice
* Graph graph = TinkerFactory.createModern();
* GraphTraversalSource g = graph.traversal();
*
* // setup the remote as normal but give it the embedded "g" so that it executes against that
* GraphTraversalSource simulatedRemoteG = TraversalSourceFactory.traversal(new EmbeddedRemoteConnection(g));
* assertEquals(6, simulatedRemoteG.V().count().next().intValue());
* }
* </pre>
*
* @author Stephen Mallette (http://stephen.genoprime.com)
*/
public class EmbeddedRemoteConnection implements RemoteConnection {
private final GraphTraversalSource g;
public EmbeddedRemoteConnection(final GraphTraversalSource g) {
this.g = g;
}
@Override
public <E> CompletableFuture<RemoteTraversal<?, E>> submitAsync(final Bytecode bytecode) throws RemoteConnectionException {
// default implementation for backward compatibility to 3.2.4 - this method will probably just become
// the new submit() in 3.3.x when the deprecation is removed
final CompletableFuture<RemoteTraversal<?, E>> promise = new CompletableFuture<>();
try {
promise.complete(new EmbeddedRemoteTraversal(JavaTranslator.of(g).translate(bytecode)));
} catch (Exception t) {
promise.completeExceptionally(t);
}
return promise;
}
@Override
public void close() throws Exception {
g.close();
}
}
| apache-2.0 |
leasual/Mizuu | app/src/main/java/com/miz/mizuu/fragments/MovieLibraryFragment.java | 24510 | /*
* Copyright (C) 2014 Michell Bak
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.miz.mizuu.fragments;
import android.app.SearchManager;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.SharedPreferences;
import android.graphics.Bitmap.Config;
import android.graphics.Typeface;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.v4.app.ActivityCompat;
import android.support.v4.app.ActivityOptionsCompat;
import android.support.v4.app.Fragment;
import android.support.v4.content.LocalBroadcastManager;
import android.support.v4.util.Pair;
import android.support.v4.view.MenuItemCompat;
import android.support.v4.view.MenuItemCompat.OnActionExpandListener;
import android.support.v7.widget.CardView;
import android.support.v7.widget.SearchView;
import android.support.v7.widget.SearchView.OnQueryTextListener;
import android.view.ActionMode;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AbsListView;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemClickListener;
import android.widget.BaseAdapter;
import android.widget.GridView;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.TextView;
import com.github.ksoichiro.android.observablescrollview.ObservableGridView;
import com.miz.functions.CoverItem;
import com.miz.functions.MediumMovie;
import com.miz.functions.MizLib;
import com.miz.loader.MovieFilter;
import com.miz.loader.MovieLoader;
import com.miz.loader.MovieLibraryType;
import com.miz.loader.MovieSortType;
import com.miz.loader.OnLoadCompletedCallback;
import com.miz.mizuu.MizuuApplication;
import com.miz.mizuu.MovieCollection;
import com.miz.mizuu.MovieDetails;
import com.miz.mizuu.R;
import com.miz.mizuu.UnidentifiedMovies;
import com.miz.mizuu.Update;
import com.miz.utils.LocalBroadcastUtils;
import com.miz.utils.MovieDatabaseUtils;
import com.miz.utils.TypefaceUtils;
import com.miz.utils.ViewUtils;
import com.squareup.picasso.Picasso;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import static com.miz.functions.PreferenceKeys.GRID_ITEM_SIZE;
import static com.miz.functions.PreferenceKeys.IGNORED_TITLE_PREFIXES;
import static com.miz.functions.PreferenceKeys.SHOW_TITLES_IN_GRID;
public class MovieLibraryFragment extends Fragment implements SharedPreferences.OnSharedPreferenceChangeListener {
private Context mContext;
private SharedPreferences mSharedPreferences;
private int mImageThumbSize, mImageThumbSpacing;
private LoaderAdapter mAdapter;
private ObservableGridView mGridView;
private ProgressBar mProgressBar;
private boolean mShowTitles, mIgnorePrefixes, mLoading = true;
private Picasso mPicasso;
private Config mConfig;
private MovieLoader mMovieLoader;
private SearchView mSearchView;
private View mEmptyLibraryLayout;
private TextView mEmptyLibraryTitle, mEmptyLibraryDescription;
/**
* Empty constructor as per the Fragment documentation
*/
public MovieLibraryFragment() {}
public static MovieLibraryFragment newInstance(int type) {
MovieLibraryFragment frag = new MovieLibraryFragment();
Bundle b = new Bundle();
b.putInt("type", type);
frag.setArguments(b);
return frag;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setHasOptionsMenu(true);
mContext = getActivity().getApplicationContext();
// Set OnSharedPreferenceChange listener
PreferenceManager.getDefaultSharedPreferences(mContext).registerOnSharedPreferenceChangeListener(this);
// Initialize the PreferenceManager variable and preference variable(s)
mSharedPreferences = PreferenceManager.getDefaultSharedPreferences(getActivity());
mIgnorePrefixes = mSharedPreferences.getBoolean(IGNORED_TITLE_PREFIXES, false);
mShowTitles = mSharedPreferences.getBoolean(SHOW_TITLES_IN_GRID, true);
mImageThumbSize = ViewUtils.getGridViewThumbSize(mContext);
mImageThumbSpacing = getResources().getDimensionPixelSize(R.dimen.image_thumbnail_spacing);
mPicasso = MizuuApplication.getPicasso(mContext);
mConfig = MizuuApplication.getBitmapConfig();
mAdapter = new LoaderAdapter(mContext);
LocalBroadcastManager.getInstance(mContext).registerReceiver(mMessageReceiver, new IntentFilter(LocalBroadcastUtils.UPDATE_MOVIE_LIBRARY));
LocalBroadcastManager.getInstance(getActivity()).registerReceiver(mMessageReceiver, new IntentFilter("mizuu-movie-actor-search"));
}
@Override
public void onDestroy() {
super.onDestroy();
// Unregister since the activity is about to be closed.
LocalBroadcastManager.getInstance(mContext).unregisterReceiver(mMessageReceiver);
PreferenceManager.getDefaultSharedPreferences(mContext).unregisterOnSharedPreferenceChangeListener(this);
}
private BroadcastReceiver mMessageReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
if (mMovieLoader != null) {
if (intent.filterEquals(new Intent("mizuu-movie-actor-search"))) {
mMovieLoader.search("actor: " + intent.getStringExtra("intent_extra_data_key"));
} else {
mMovieLoader.load();
}
showProgressBar();
}
}
};
private OnLoadCompletedCallback mCallback = new OnLoadCompletedCallback() {
@Override
public void onLoadCompleted() {
mAdapter.notifyDataSetChanged();
}
};
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View v = inflater.inflate(R.layout.image_grid_fragment, container, false);
mProgressBar = (ProgressBar) v.findViewById(R.id.progress);
mEmptyLibraryLayout = v.findViewById(R.id.empty_library_layout);
mEmptyLibraryTitle = (TextView) v.findViewById(R.id.empty_library_title);
mEmptyLibraryTitle.setTypeface(TypefaceUtils.getRobotoCondensedRegular(mContext));
mEmptyLibraryDescription = (TextView) v.findViewById(R.id.empty_library_description);
mEmptyLibraryDescription.setTypeface(TypefaceUtils.getRobotoLight(mContext));
mAdapter = new LoaderAdapter(mContext);
mGridView = (ObservableGridView) v.findViewById(R.id.gridView);
mGridView.setAdapter(mAdapter);
mGridView.setColumnWidth(mImageThumbSize);
mGridView.setOnItemClickListener(new OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> arg0, View arg1, int arg2, long arg3) {
viewMovieDetails(arg2, arg1);
}
});
// We only want to display the contextual menu if we're showing movies, not collections
if (getArguments().getInt("type") != MovieLoader.COLLECTIONS) {
mGridView.setChoiceMode(GridView.CHOICE_MODE_MULTIPLE_MODAL);
mGridView.setMultiChoiceModeListener(new AbsListView.MultiChoiceModeListener() {
@Override
public void onItemCheckedStateChanged(ActionMode mode, int position, long id, boolean checked) {
mAdapter.setItemChecked(position, checked);
mode.setTitle(String.format(getString(R.string.selected),
mAdapter.getCheckedItemCount()));
}
@Override
public boolean onCreateActionMode(ActionMode mode, Menu menu) {
getActivity().getMenuInflater().inflate(R.menu.movie_library_cab, menu);
return true;
}
@Override
public boolean onPrepareActionMode(ActionMode mode, Menu menu) {
return false;
}
@Override
public boolean onActionItemClicked(ActionMode mode, MenuItem item) {
int id = item.getItemId();
switch (id) {
case R.id.movie_add_fav:
MovieDatabaseUtils.setMoviesFavourite(mContext, mAdapter.getCheckedMovies(), true);
break;
case R.id.movie_remove_fav:
MovieDatabaseUtils.setMoviesFavourite(mContext, mAdapter.getCheckedMovies(), false);
break;
case R.id.movie_watched:
MovieDatabaseUtils.setMoviesWatched(mContext, mAdapter.getCheckedMovies(), true);
break;
case R.id.movie_unwatched:
MovieDatabaseUtils.setMoviesWatched(mContext, mAdapter.getCheckedMovies(), false);
break;
case R.id.add_to_watchlist:
MovieDatabaseUtils.setMoviesWatchlist(mContext, mAdapter.getCheckedMovies(), true);
break;
case R.id.remove_from_watchlist:
MovieDatabaseUtils.setMoviesWatchlist(mContext, mAdapter.getCheckedMovies(), false);
break;
}
if (!(id == R.id.watched_menu ||
id == R.id.watchlist_menu ||
id == R.id.favorite_menu)) {
mode.finish();
LocalBroadcastUtils.updateMovieLibrary(mContext);
}
return true;
}
@Override
public void onDestroyActionMode(ActionMode mode) {
mAdapter.clearCheckedItems();
}
});
}
mMovieLoader = new MovieLoader(mContext, MovieLibraryType.fromInt(getArguments().getInt("type")), mCallback);
mMovieLoader.setIgnorePrefixes(mIgnorePrefixes);
mMovieLoader.load();
showProgressBar();
return v;
}
private void viewMovieDetails(int position, View view) {
Intent intent = new Intent();
if (mMovieLoader.getType() == MovieLibraryType.COLLECTIONS) { // Collection
intent.putExtra("collectionId", mAdapter.getItem(position).getCollectionId());
intent.putExtra("collectionTitle", mAdapter.getItem(position).getCollection());
intent.setClass(mContext, MovieCollection.class);
startActivity(intent);
} else {
intent.putExtra("tmdbId", mAdapter.getItem(position).getTmdbId());
intent.setClass(mContext, MovieDetails.class);
if (view != null) {
Pair<View, String> pair = new Pair<>(view.findViewById(R.id.cover), "cover");
ActivityOptionsCompat options = ActivityOptionsCompat.makeSceneTransitionAnimation(getActivity(), pair);
ActivityCompat.startActivityForResult(getActivity(), intent, 0, options.toBundle());
} else {
startActivityForResult(intent, 0);
}
}
}
private class LoaderAdapter extends BaseAdapter {
private Set<Integer> mChecked = new HashSet<>();
private LayoutInflater mInflater;
private final Context mContext;
private Typeface mTypeface;
public LoaderAdapter(Context context) {
mContext = context;
mInflater = (LayoutInflater) mContext.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
mTypeface = TypefaceUtils.getRobotoMedium(mContext);
}
public void setItemChecked(int index, boolean checked) {
if (checked)
mChecked.add(index);
else
mChecked.remove(index);
notifyDataSetChanged();
}
public void clearCheckedItems() {
mChecked.clear();
notifyDataSetChanged();
}
public int getCheckedItemCount() {
return mChecked.size();
}
public List<MediumMovie> getCheckedMovies() {
List<MediumMovie> movies = new ArrayList<>(mChecked.size());
for (Integer i : mChecked)
movies.add(getItem(i));
return movies;
}
@Override
public boolean isEmpty() {
return getCount() == 0 && !mLoading;
}
@Override
public int getCount() {
if (mMovieLoader != null)
return mMovieLoader.getResults().size();
return 0;
}
@Override
public MediumMovie getItem(int position) {
return mMovieLoader.getResults().get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup container) {
final MediumMovie movie = getItem(position);
CoverItem holder;
if (convertView == null) {
convertView = mInflater.inflate(R.layout.grid_cover, container, false);
holder = new CoverItem();
holder.cardview = (CardView) convertView.findViewById(R.id.card);
holder.cover = (ImageView) convertView.findViewById(R.id.cover);
holder.text = (TextView) convertView.findViewById(R.id.text);
holder.text.setTypeface(mTypeface);
convertView.setTag(holder);
} else {
holder = (CoverItem) convertView.getTag();
}
if (!mShowTitles) {
holder.text.setVisibility(View.GONE);
} else {
holder.text.setVisibility(View.VISIBLE);
holder.text.setText(mMovieLoader.getType() == MovieLibraryType.COLLECTIONS ?
movie.getCollection() : movie.getTitle());
}
holder.cover.setImageResource(R.color.card_background_dark);
mPicasso.load(mMovieLoader.getType() == MovieLibraryType.COLLECTIONS ?
movie.getCollectionPoster() : movie.getThumbnail()).placeholder(R.drawable.bg).config(mConfig).into(holder);
if (mChecked.contains(position)) {
holder.cardview.setForeground(getResources().getDrawable(R.drawable.checked_foreground_drawable));
} else {
holder.cardview.setForeground(null);
}
return convertView;
}
@Override
public void notifyDataSetChanged() {
super.notifyDataSetChanged();
// Hide the progress bar once the data set has been changed
hideProgressBar();
if (isEmpty()) {
showEmptyView();
} else {
hideEmptyView();
}
}
}
private void onSearchViewCollapsed() {
mMovieLoader.load();
showProgressBar();
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
inflater.inflate(R.menu.menu, menu);
menu.findItem(R.id.random).setVisible(mMovieLoader.getType() != MovieLibraryType.COLLECTIONS);
if (mMovieLoader.getType() == MovieLibraryType.COLLECTIONS) {
menu.findItem(R.id.sort).setVisible(false);
menu.findItem(R.id.filters).setVisible(false);
}
MenuItemCompat.setOnActionExpandListener(menu.findItem(R.id.search_textbox), new OnActionExpandListener() {
@Override
public boolean onMenuItemActionExpand(MenuItem item) {
return true;
}
@Override
public boolean onMenuItemActionCollapse(MenuItem item) {
onSearchViewCollapsed();
return true;
}
});
mSearchView = (SearchView) menu.findItem(R.id.search_textbox).getActionView();
mSearchView.setOnQueryTextListener(new OnQueryTextListener() {
@Override
public boolean onQueryTextChange(String newText) {
if (newText.length() > 0) {
mMovieLoader.search(newText);
showProgressBar();
} else {
onSearchViewCollapsed();
}
return true;
}
@Override
public boolean onQueryTextSubmit(String query) { return false; }
});
SearchManager searchManager = (SearchManager) getActivity().getSystemService(Context.SEARCH_SERVICE);
mSearchView.setSearchableInfo(searchManager.getSearchableInfo(getActivity().getComponentName()));
super.onCreateOptionsMenu(menu, inflater);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
super.onOptionsItemSelected(item);
switch (item.getItemId()) {
case R.id.update:
Intent intent = new Intent();
intent.setClass(mContext, Update.class);
intent.putExtra("isMovie", true);
startActivityForResult(intent, 0);
break;
case R.id.menuSortAdded:
mMovieLoader.setSortType(MovieSortType.DATE_ADDED);
mMovieLoader.load();
showProgressBar();
break;
case R.id.menuSortRating:
mMovieLoader.setSortType(MovieSortType.RATING);
mMovieLoader.load();
showProgressBar();
break;
case R.id.menuSortWeightedRating:
mMovieLoader.setSortType(MovieSortType.WEIGHTED_RATING);
mMovieLoader.load();
showProgressBar();
break;
case R.id.menuSortRelease:
mMovieLoader.setSortType(MovieSortType.RELEASE);
mMovieLoader.load();
showProgressBar();
break;
case R.id.menuSortTitle:
mMovieLoader.setSortType(MovieSortType.TITLE);
mMovieLoader.load();
showProgressBar();
break;
case R.id.menuSortDuration:
mMovieLoader.setSortType(MovieSortType.DURATION);
mMovieLoader.load();
showProgressBar();
break;
case R.id.genres:
mMovieLoader.showGenresFilterDialog(getActivity());
break;
case R.id.certifications:
mMovieLoader.showCertificationsFilterDialog(getActivity());
break;
case R.id.folders:
mMovieLoader.showFoldersFilterDialog(getActivity());
break;
case R.id.fileSources:
mMovieLoader.showFileSourcesFilterDialog(getActivity());
break;
case R.id.release_year:
mMovieLoader.showReleaseYearFilterDialog(getActivity());
break;
case R.id.offline_files:
mMovieLoader.addFilter(new MovieFilter(MovieFilter.OFFLINE_FILES));
mMovieLoader.load();
showProgressBar();
break;
case R.id.available_files:
mMovieLoader.addFilter(new MovieFilter(MovieFilter.AVAILABLE_FILES));
mMovieLoader.load();
showProgressBar();
break;
case R.id.clear_filters:
mMovieLoader.clearFilters();
mMovieLoader.load();
showProgressBar();
break;
case R.id.random:
if (mAdapter.getCount() > 0) {
int random = new Random().nextInt(mAdapter.getCount());
viewMovieDetails(random, null);
}
break;
case R.id.unidentifiedFiles:
startActivity(new Intent(mContext, UnidentifiedMovies.class));
break;
}
return true;
}
private void hideProgressBar() {
mGridView.setVisibility(View.VISIBLE);
mProgressBar.setVisibility(View.GONE);
mLoading = false;
}
private void showProgressBar() {
mGridView.setVisibility(View.GONE);
mProgressBar.setVisibility(View.VISIBLE);
mLoading = true;
}
private void showEmptyView() {
mGridView.setVisibility(View.GONE);
mProgressBar.setVisibility(View.GONE);
mEmptyLibraryLayout.setVisibility(View.VISIBLE);
if (mMovieLoader.isShowingSearchResults()) {
mEmptyLibraryTitle.setText(R.string.no_search_results);
mEmptyLibraryDescription.setText(R.string.no_search_results_description);
} else {
switch (mMovieLoader.getType()) {
case ALL_MOVIES:
mEmptyLibraryTitle.setText(R.string.no_movies);
mEmptyLibraryDescription.setText(MizLib.isTablet(mContext) ?
R.string.no_movies_description_tablet : R.string.no_movies_description);
break;
case FAVORITES:
mEmptyLibraryTitle.setText(R.string.no_favorites);
mEmptyLibraryDescription.setText(R.string.no_favorites_description);
break;
case NEW_RELEASES:
mEmptyLibraryTitle.setText(R.string.no_new_releases);
mEmptyLibraryDescription.setText(R.string.no_new_releases_description);
break;
case WATCHLIST:
mEmptyLibraryTitle.setText(R.string.empty_watchlist);
mEmptyLibraryDescription.setText(R.string.empty_watchlist_description);
break;
case WATCHED:
mEmptyLibraryTitle.setText(R.string.no_watched_movies);
mEmptyLibraryDescription.setText(R.string.no_watched_movies_description);
break;
case UNWATCHED:
mEmptyLibraryTitle.setText(R.string.no_unwatched_movies);
mEmptyLibraryDescription.setText(R.string.no_unwatched_movies_description);
break;
case COLLECTIONS:
mEmptyLibraryTitle.setText(R.string.no_movie_collections);
mEmptyLibraryDescription.setText(R.string.no_movie_collections_description);
break;
}
}
}
private void hideEmptyView() {
mEmptyLibraryLayout.setVisibility(View.GONE);
}
@Override
public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) {
if (key.equals(IGNORED_TITLE_PREFIXES)) {
mIgnorePrefixes = mSharedPreferences.getBoolean(IGNORED_TITLE_PREFIXES, false);
if (mMovieLoader != null) {
mMovieLoader.setIgnorePrefixes(mIgnorePrefixes);
mMovieLoader.load();
}
} else if (key.equals(GRID_ITEM_SIZE)) {
mImageThumbSize = ViewUtils.getGridViewThumbSize(mContext);
if (mGridView != null)
mGridView.setColumnWidth(mImageThumbSize);
mAdapter.notifyDataSetChanged();
} else if (key.equals(SHOW_TITLES_IN_GRID)) {
mShowTitles = sharedPreferences.getBoolean(SHOW_TITLES_IN_GRID, true);
mAdapter.notifyDataSetChanged();
}
}
} | apache-2.0 |
anti-social/elasticsearch | src/main/java/org/elasticsearch/common/lucene/docset/DocIdSets.java | 7061 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.lucene.docset;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.DocValuesDocIdSet;
import org.apache.lucene.search.FilteredDocIdSetIterator;
import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.RamUsageEstimator;
import org.apache.lucene.util.RoaringDocIdSet;
import org.apache.lucene.util.SparseFixedBitSet;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lucene.search.XDocIdSetIterator;
import java.io.IOException;
/**
*/
public class DocIdSets {
/**
* Return the size of the doc id set, plus a reference to it.
*/
public static long sizeInBytes(DocIdSet docIdSet) {
return RamUsageEstimator.NUM_BYTES_OBJECT_REF + docIdSet.ramBytesUsed();
}
/**
* Is it an empty {@link DocIdSet}?
*/
public static boolean isEmpty(@Nullable DocIdSet set) {
return set == null || set == DocIdSet.EMPTY;
}
/**
* Check if the given iterator can nextDoc() or advance() in sub-linear time
* of the number of documents. For instance, an iterator that would need to
* iterate one document at a time to check for its value would be considered
* broken.
*/
public static boolean isBroken(DocIdSetIterator iterator) {
while (iterator instanceof FilteredDocIdSetIterator) {
// this iterator is filtered (likely by some bits)
// unwrap in order to check if the underlying iterator is fast
iterator = ((FilteredDocIdSetIterator) iterator).getDelegate();
}
if (iterator instanceof XDocIdSetIterator) {
return ((XDocIdSetIterator) iterator).isBroken();
}
if (iterator instanceof MatchDocIdSetIterator) {
return true;
}
// DocValuesDocIdSet produces anonymous slow iterators
if (iterator != null && DocValuesDocIdSet.class.equals(iterator.getClass().getEnclosingClass())) {
return true;
}
return false;
}
/**
* Converts to a cacheable {@link DocIdSet}
* <p/>
* This never returns <code>null</code>.
*/
public static DocIdSet toCacheable(LeafReader reader, @Nullable DocIdSet set) throws IOException {
if (set == null || set == DocIdSet.EMPTY) {
return DocIdSet.EMPTY;
}
final DocIdSetIterator it = set.iterator();
if (it == null) {
return DocIdSet.EMPTY;
}
final int firstDoc = it.nextDoc();
if (firstDoc == DocIdSetIterator.NO_MORE_DOCS) {
return DocIdSet.EMPTY;
}
if (set instanceof BitDocIdSet) {
return set;
}
final RoaringDocIdSet.Builder builder = new RoaringDocIdSet.Builder(reader.maxDoc());
builder.add(firstDoc);
for (int doc = it.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = it.nextDoc()) {
builder.add(doc);
}
return builder.build();
}
/**
* Get a build a {@link Bits} instance that will match all documents
* contained in {@code set}. Note that this is a potentially heavy
* operation as this might require to consume an iterator of this set
* entirely and to load it into a {@link BitSet}. Prefer using
* {@link #asSequentialAccessBits} if you only need to consume the
* {@link Bits} once and in order.
*/
public static Bits toSafeBits(int maxDoc, @Nullable DocIdSet set) throws IOException {
if (set == null) {
return new Bits.MatchNoBits(maxDoc);
}
Bits bits = set.bits();
if (bits != null) {
return bits;
}
DocIdSetIterator iterator = set.iterator();
if (iterator == null) {
return new Bits.MatchNoBits(maxDoc);
}
return toBitSet(iterator, maxDoc);
}
/**
* Given a {@link DocIdSet}, return a {@link Bits} instance that will match
* all documents contained in the set. Note that the returned {@link Bits}
* instance should only be consumed once and in order.
*/
public static Bits asSequentialAccessBits(final int maxDoc, @Nullable DocIdSet set) throws IOException {
if (set == null) {
return new Bits.MatchNoBits(maxDoc);
}
Bits bits = set.bits();
if (bits != null) {
return bits;
}
final DocIdSetIterator iterator = set.iterator();
if (iterator == null) {
return new Bits.MatchNoBits(maxDoc);
}
return new Bits() {
int previous = 0;
@Override
public boolean get(int index) {
if (index < previous) {
throw new ElasticsearchIllegalArgumentException("This Bits instance can only be consumed in order. "
+ "Got called on [" + index + "] while previously called on [" + previous + "]");
}
previous = index;
int doc = iterator.docID();
if (doc < index) {
try {
doc = iterator.advance(index);
} catch (IOException e) {
throw new ElasticsearchIllegalStateException("Cannot advance iterator", e);
}
}
return index == doc;
}
@Override
public int length() {
return maxDoc;
}
};
}
/**
* Creates a {@link BitSet} from an iterator.
*/
public static BitSet toBitSet(DocIdSetIterator iterator, int numBits) throws IOException {
BitDocIdSet.Builder builder = new BitDocIdSet.Builder(numBits);
builder.or(iterator);
BitDocIdSet result = builder.build();
if (result != null) {
return result.bits();
} else {
return new SparseFixedBitSet(numBits);
}
}
}
| apache-2.0 |
kensipe/logstash | logstash-executor/src/main/java/org/apache/mesos/logstash/executor/docker/DockerLogStreamManager.java | 4414 | package org.apache.mesos.logstash.executor.docker;
import org.apache.mesos.logstash.executor.ConfigManager;
import org.apache.mesos.logstash.executor.frameworks.DockerFramework;
import org.apache.mesos.logstash.executor.logging.LogStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.stream.Collectors;
public class DockerLogStreamManager {
private static final Logger LOGGER = LoggerFactory.getLogger(ConfigManager.class);
final Map<String, Set<ProcessedDockerLogPath>> processedContainers;
private final DockerStreamer streamer;
public DockerLogStreamManager(DockerStreamer streamer) {
this.streamer = streamer;
this.processedContainers = new HashMap<>();
}
public synchronized void setupContainerLogfileStreaming(DockerFramework framework) {
if (!isAlreadyStreaming(framework)) {
processedContainers.put(framework.getContainerId(), new HashSet<>());
}
LOGGER.info("Setting up log streaming for " + framework.getName());
streamUnprocessedLogFiles(framework);
stopStreamingOfOrphanLogFiles(framework);
LOGGER.info("Done processing: " + framework.getName());
}
public void stopStreamingForWholeFramework(String containerId){
for (ProcessedDockerLogPath processedDockerLogPath : processedContainers.get(containerId)){
LOGGER.info("Stop streaming of " + processedDockerLogPath.dockerLogPath);
streamer.stopStreaming(containerId, processedDockerLogPath.logStream);
}
processedContainers.remove(containerId);
}
public Set<String> getProcessedContainers() {
return processedContainers.keySet();
}
public Set<DockerLogPath> getProcessedFiles(String containerId) {
if (processedContainers.containsKey(containerId)) {
return processedContainers.
get(containerId).stream()
.map(ProcessedDockerLogPath::getDockerLogPath)
.collect(Collectors.toSet());
}
return new HashSet<>();
}
private void streamUnprocessedLogFiles(DockerFramework framework) {
List<DockerLogPath> frameWorkLogFiles = framework.getLogFiles();
for (DockerLogPath dockerLogPath : frameWorkLogFiles){
Set<ProcessedDockerLogPath> processedDockerLogPaths = processedContainers
.get(framework.getContainerId());
Set<DockerLogPath> currentDockerLogPaths = processedDockerLogPaths.stream().map(ProcessedDockerLogPath::getDockerLogPath).collect(
Collectors.toSet());
if (!currentDockerLogPaths.contains(dockerLogPath)){
LOGGER.info("Start streaming: " + dockerLogPath);
LogStream logStream = streamer.startStreaming(dockerLogPath);
processedDockerLogPaths.add(new ProcessedDockerLogPath(logStream, dockerLogPath));
} else {
LOGGER.info("Ignoring already streaming: " + dockerLogPath);
}
}
}
private boolean isAlreadyStreaming(DockerFramework framework) {
return processedContainers.containsKey(framework.getContainerId());
}
private void stopStreamingOfOrphanLogFiles(DockerFramework framework) {
List<DockerLogPath> frameWorkLogFiles = framework.getLogFiles();
Iterator<ProcessedDockerLogPath> iterator = processedContainers.get(
framework.getContainerId()).iterator();
ProcessedDockerLogPath processedDockerLogPath;
while (iterator.hasNext()){
processedDockerLogPath = iterator.next();
if (!frameWorkLogFiles.contains(processedDockerLogPath.dockerLogPath)){
LOGGER.info("Stop streaming of " + processedDockerLogPath.dockerLogPath);
streamer.stopStreaming(framework.getContainerId(), processedDockerLogPath.logStream);
iterator.remove();
}
}
}
static class ProcessedDockerLogPath {
final LogStream logStream;
final DockerLogPath dockerLogPath;
private ProcessedDockerLogPath(LogStream logStream, DockerLogPath dockerLogPath) {
this.logStream = logStream;
this.dockerLogPath = dockerLogPath;
}
public DockerLogPath getDockerLogPath() {
return dockerLogPath;
}
}
}
| apache-2.0 |
saicheems/discovery-artifact-manager | toolkit/src/main/java/com/google/api/codegen/viewmodel/PagedListResponseFactoryClassView.java | 1635 | /* Copyright 2016 Google Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.api.codegen.viewmodel;
import com.google.auto.value.AutoValue;
@AutoValue
public abstract class PagedListResponseFactoryClassView {
public abstract String name();
public abstract String requestTypeName();
public abstract String responseTypeName();
public abstract String resourceTypeName();
public abstract String pagedListResponseTypeName();
public abstract String pageStreamingDescriptorName();
public static Builder newBuilder() {
return new AutoValue_PagedListResponseFactoryClassView.Builder();
}
@AutoValue.Builder
public abstract static class Builder {
public abstract Builder name(String val);
public abstract Builder requestTypeName(String val);
public abstract Builder responseTypeName(String val);
public abstract Builder resourceTypeName(String val);
public abstract Builder pagedListResponseTypeName(String val);
public abstract Builder pageStreamingDescriptorName(String val);
public abstract PagedListResponseFactoryClassView build();
}
}
| apache-2.0 |
cooldoger/cassandra | test/unit/org/apache/cassandra/db/compaction/LeveledCompactionStrategyTest.java | 21177 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.compaction;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import java.util.UUID;
import org.junit.Assert;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.OrderedJUnit4ClassRunner;
import org.apache.cassandra.SchemaLoader;
import org.apache.cassandra.Util;
import org.apache.cassandra.UpdateBuilder;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.db.DecoratedKey;
import org.apache.cassandra.db.Keyspace;
import org.apache.cassandra.dht.Range;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.io.sstable.ISSTableScanner;
import org.apache.cassandra.io.sstable.format.SSTableReader;
import org.apache.cassandra.notifications.SSTableAddedNotification;
import org.apache.cassandra.notifications.SSTableRepairStatusChanged;
import org.apache.cassandra.repair.ValidationManager;
import org.apache.cassandra.streaming.PreviewKind;
import org.apache.cassandra.repair.RepairJobDesc;
import org.apache.cassandra.repair.Validator;
import org.apache.cassandra.schema.CompactionParams;
import org.apache.cassandra.schema.KeyspaceParams;
import org.apache.cassandra.service.ActiveRepairService;
import org.apache.cassandra.utils.FBUtilities;
import static java.util.Collections.singleton;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@RunWith(OrderedJUnit4ClassRunner.class)
public class LeveledCompactionStrategyTest
{
private static final Logger logger = LoggerFactory.getLogger(LeveledCompactionStrategyTest.class);
private static final String KEYSPACE1 = "LeveledCompactionStrategyTest";
private static final String CF_STANDARDDLEVELED = "StandardLeveled";
private Keyspace keyspace;
private ColumnFamilyStore cfs;
@BeforeClass
public static void defineSchema() throws ConfigurationException
{
// Disable tombstone histogram rounding for tests
System.setProperty("cassandra.streaminghistogram.roundseconds", "1");
SchemaLoader.prepareServer();
SchemaLoader.createKeyspace(KEYSPACE1,
KeyspaceParams.simple(1),
SchemaLoader.standardCFMD(KEYSPACE1, CF_STANDARDDLEVELED)
.compaction(CompactionParams.lcs(Collections.singletonMap("sstable_size_in_mb", "1"))));
}
@Before
public void enableCompaction()
{
keyspace = Keyspace.open(KEYSPACE1);
cfs = keyspace.getColumnFamilyStore(CF_STANDARDDLEVELED);
cfs.enableAutoCompaction();
}
/**
* Since we use StandardLeveled CF for every test, we want to clean up after the test.
*/
@After
public void truncateSTandardLeveled()
{
cfs.truncateBlocking();
}
/**
* Ensure that the grouping operation preserves the levels of grouped tables
*/
@Test
public void testGrouperLevels() throws Exception{
ByteBuffer value = ByteBuffer.wrap(new byte[100 * 1024]); // 100 KB value, make it easy to have multiple files
//Need entropy to prevent compression so size is predictable with compression enabled/disabled
new Random().nextBytes(value.array());
// Enough data to have a level 1 and 2
int rows = 40;
int columns = 20;
// Adds enough data to trigger multiple sstable per level
for (int r = 0; r < rows; r++)
{
UpdateBuilder update = UpdateBuilder.create(cfs.metadata(), String.valueOf(r));
for (int c = 0; c < columns; c++)
update.newRow("column" + c).add("val", value);
update.applyUnsafe();
cfs.forceBlockingFlush();
}
waitForLeveling(cfs);
CompactionStrategyManager strategyManager = cfs.getCompactionStrategyManager();
// Checking we're not completely bad at math
int l1Count = strategyManager.getSSTableCountPerLevel()[1];
int l2Count = strategyManager.getSSTableCountPerLevel()[2];
if (l1Count == 0 || l2Count == 0)
{
logger.error("L1 or L2 has 0 sstables. Expected > 0 on both.");
logger.error("L1: " + l1Count);
logger.error("L2: " + l2Count);
Assert.fail();
}
Collection<Collection<SSTableReader>> groupedSSTables = cfs.getCompactionStrategyManager().groupSSTablesForAntiCompaction(cfs.getLiveSSTables());
for (Collection<SSTableReader> sstableGroup : groupedSSTables)
{
int groupLevel = -1;
Iterator<SSTableReader> it = sstableGroup.iterator();
while (it.hasNext())
{
SSTableReader sstable = it.next();
int tableLevel = sstable.getSSTableLevel();
if (groupLevel == -1)
groupLevel = tableLevel;
assert groupLevel == tableLevel;
}
}
}
/*
* This exercises in particular the code of #4142
*/
@Test
public void testValidationMultipleSSTablePerLevel() throws Exception
{
byte [] b = new byte[100 * 1024];
new Random().nextBytes(b);
ByteBuffer value = ByteBuffer.wrap(b); // 100 KB value, make it easy to have multiple files
// Enough data to have a level 1 and 2
int rows = 40;
int columns = 20;
// Adds enough data to trigger multiple sstable per level
for (int r = 0; r < rows; r++)
{
UpdateBuilder update = UpdateBuilder.create(cfs.metadata(), String.valueOf(r));
for (int c = 0; c < columns; c++)
update.newRow("column" + c).add("val", value);
update.applyUnsafe();
cfs.forceBlockingFlush();
}
waitForLeveling(cfs);
CompactionStrategyManager strategyManager = cfs.getCompactionStrategyManager();
// Checking we're not completely bad at math
assertTrue(strategyManager.getSSTableCountPerLevel()[1] > 0);
assertTrue(strategyManager.getSSTableCountPerLevel()[2] > 0);
Range<Token> range = new Range<>(Util.token(""), Util.token(""));
int gcBefore = keyspace.getColumnFamilyStore(CF_STANDARDDLEVELED).gcBefore(FBUtilities.nowInSeconds());
UUID parentRepSession = UUID.randomUUID();
ActiveRepairService.instance.registerParentRepairSession(parentRepSession,
FBUtilities.getBroadcastAddressAndPort(),
Arrays.asList(cfs),
Arrays.asList(range),
false,
ActiveRepairService.UNREPAIRED_SSTABLE,
true,
PreviewKind.NONE);
RepairJobDesc desc = new RepairJobDesc(parentRepSession, UUID.randomUUID(), KEYSPACE1, CF_STANDARDDLEVELED, Arrays.asList(range));
Validator validator = new Validator(desc, FBUtilities.getBroadcastAddressAndPort(), gcBefore, PreviewKind.NONE);
ValidationManager.instance.submitValidation(cfs, validator).get();
}
/**
* wait for leveled compaction to quiesce on the given columnfamily
*/
public static void waitForLeveling(ColumnFamilyStore cfs) throws InterruptedException
{
CompactionStrategyManager strategyManager = cfs.getCompactionStrategyManager();
while (true)
{
// since we run several compaction strategies we wait until L0 in all strategies is empty and
// atleast one L1+ is non-empty. In these tests we always run a single data directory with only unrepaired data
// so it should be good enough
boolean allL0Empty = true;
boolean anyL1NonEmpty = false;
for (List<AbstractCompactionStrategy> strategies : strategyManager.getStrategies())
{
for (AbstractCompactionStrategy strategy : strategies)
{
if (!(strategy instanceof LeveledCompactionStrategy))
return;
// note that we check > 1 here, if there is too little data in L0, we don't compact it up to L1
if (((LeveledCompactionStrategy)strategy).getLevelSize(0) > 1)
allL0Empty = false;
for (int i = 1; i < 5; i++)
if (((LeveledCompactionStrategy)strategy).getLevelSize(i) > 0)
anyL1NonEmpty = true;
}
}
if (allL0Empty && anyL1NonEmpty)
return;
Thread.sleep(100);
}
}
@Test
public void testCompactionProgress() throws Exception
{
// make sure we have SSTables in L1
byte [] b = new byte[100 * 1024];
new Random().nextBytes(b);
ByteBuffer value = ByteBuffer.wrap(b);
int rows = 2;
int columns = 10;
for (int r = 0; r < rows; r++)
{
UpdateBuilder update = UpdateBuilder.create(cfs.metadata(), String.valueOf(r));
for (int c = 0; c < columns; c++)
update.newRow("column" + c).add("val", value);
update.applyUnsafe();
cfs.forceBlockingFlush();
}
waitForLeveling(cfs);
LeveledCompactionStrategy strategy = (LeveledCompactionStrategy) cfs.getCompactionStrategyManager().getStrategies().get(1).get(0);
assert strategy.getLevelSize(1) > 0;
// get LeveledScanner for level 1 sstables
Collection<SSTableReader> sstables = strategy.manifest.getLevel(1);
List<ISSTableScanner> scanners = strategy.getScanners(sstables).scanners;
assertEquals(1, scanners.size()); // should be one per level
ISSTableScanner scanner = scanners.get(0);
// scan through to the end
while (scanner.hasNext())
scanner.next();
// scanner.getCurrentPosition should be equal to total bytes of L1 sstables
assertEquals(scanner.getCurrentPosition(), SSTableReader.getTotalUncompressedBytes(sstables));
}
@Test
public void testMutateLevel() throws Exception
{
cfs.disableAutoCompaction();
ByteBuffer value = ByteBuffer.wrap(new byte[100 * 1024]); // 100 KB value, make it easy to have multiple files
// Enough data to have a level 1 and 2
int rows = 40;
int columns = 20;
// Adds enough data to trigger multiple sstable per level
for (int r = 0; r < rows; r++)
{
UpdateBuilder update = UpdateBuilder.create(cfs.metadata(), String.valueOf(r));
for (int c = 0; c < columns; c++)
update.newRow("column" + c).add("val", value);
update.applyUnsafe();
cfs.forceBlockingFlush();
}
cfs.forceBlockingFlush();
LeveledCompactionStrategy strategy = (LeveledCompactionStrategy) cfs.getCompactionStrategyManager().getStrategies().get(1).get(0);
cfs.forceMajorCompaction();
for (SSTableReader s : cfs.getLiveSSTables())
{
assertTrue(s.getSSTableLevel() != 6 && s.getSSTableLevel() > 0);
strategy.manifest.remove(s);
s.descriptor.getMetadataSerializer().mutateLevel(s.descriptor, 6);
s.reloadSSTableMetadata();
strategy.manifest.add(s);
}
// verify that all sstables in the changed set is level 6
for (SSTableReader s : cfs.getLiveSSTables())
assertEquals(6, s.getSSTableLevel());
int[] levels = strategy.manifest.getAllLevelSize();
// verify that the manifest has correct amount of sstables
assertEquals(cfs.getLiveSSTables().size(), levels[6]);
}
@Test
public void testNewRepairedSSTable() throws Exception
{
byte [] b = new byte[100 * 1024];
new Random().nextBytes(b);
ByteBuffer value = ByteBuffer.wrap(b); // 100 KB value, make it easy to have multiple files
// Enough data to have a level 1 and 2
int rows = 40;
int columns = 20;
// Adds enough data to trigger multiple sstable per level
for (int r = 0; r < rows; r++)
{
UpdateBuilder update = UpdateBuilder.create(cfs.metadata(), String.valueOf(r));
for (int c = 0; c < columns; c++)
update.newRow("column" + c).add("val", value);
update.applyUnsafe();
cfs.forceBlockingFlush();
}
waitForLeveling(cfs);
cfs.disableAutoCompaction();
while(CompactionManager.instance.isCompacting(Arrays.asList(cfs), (sstable) -> true))
Thread.sleep(100);
CompactionStrategyManager manager = cfs.getCompactionStrategyManager();
List<List<AbstractCompactionStrategy>> strategies = manager.getStrategies();
LeveledCompactionStrategy repaired = (LeveledCompactionStrategy) strategies.get(0).get(0);
LeveledCompactionStrategy unrepaired = (LeveledCompactionStrategy) strategies.get(1).get(0);
assertEquals(0, repaired.manifest.getLevelCount() );
assertEquals(2, unrepaired.manifest.getLevelCount());
assertTrue(manager.getSSTableCountPerLevel()[1] > 0);
assertTrue(manager.getSSTableCountPerLevel()[2] > 0);
for (SSTableReader sstable : cfs.getLiveSSTables())
assertFalse(sstable.isRepaired());
int sstableCount = 0;
for (List<SSTableReader> level : unrepaired.manifest.generations)
sstableCount += level.size();
// we only have unrepaired sstables:
assertEquals(sstableCount, cfs.getLiveSSTables().size());
SSTableReader sstable1 = unrepaired.manifest.generations[2].get(0);
SSTableReader sstable2 = unrepaired.manifest.generations[1].get(0);
sstable1.descriptor.getMetadataSerializer().mutateRepairMetadata(sstable1.descriptor, System.currentTimeMillis(), null, false);
sstable1.reloadSSTableMetadata();
assertTrue(sstable1.isRepaired());
manager.handleNotification(new SSTableRepairStatusChanged(Arrays.asList(sstable1)), this);
int repairedSSTableCount = 0;
for (List<SSTableReader> level : repaired.manifest.generations)
repairedSSTableCount += level.size();
assertEquals(1, repairedSSTableCount);
// make sure the repaired sstable ends up in the same level in the repaired manifest:
assertTrue(repaired.manifest.generations[2].contains(sstable1));
// and that it is gone from unrepaired
assertFalse(unrepaired.manifest.generations[2].contains(sstable1));
unrepaired.removeSSTable(sstable2);
manager.handleNotification(new SSTableAddedNotification(singleton(sstable2), null), this);
assertTrue(unrepaired.manifest.getLevel(1).contains(sstable2));
assertFalse(repaired.manifest.getLevel(1).contains(sstable2));
}
@Test
public void testTokenRangeCompaction() throws Exception
{
// Remove any existing data so we can start out clean with predictable number of sstables
cfs.truncateBlocking();
// Disable auto compaction so cassandra does not compact
CompactionManager.instance.disableAutoCompaction();
ByteBuffer value = ByteBuffer.wrap(new byte[100 * 1024]); // 100 KB value, make it easy to have multiple files
DecoratedKey key1 = Util.dk(String.valueOf(1));
DecoratedKey key2 = Util.dk(String.valueOf(2));
List<DecoratedKey> keys = new ArrayList<>(Arrays.asList(key1, key2));
int numIterations = 10;
int columns = 2;
// Add enough data to trigger multiple sstables.
// create 10 sstables that contain data for both key1 and key2
for (int i = 0; i < numIterations; i++) {
for (DecoratedKey key : keys) {
UpdateBuilder update = UpdateBuilder.create(cfs.metadata(), key);
for (int c = 0; c < columns; c++)
update.newRow("column" + c).add("val", value);
update.applyUnsafe();
}
cfs.forceBlockingFlush();
}
// create 20 more sstables with 10 containing data for key1 and other 10 containing data for key2
for (int i = 0; i < numIterations; i++) {
for (DecoratedKey key : keys) {
UpdateBuilder update = UpdateBuilder.create(cfs.metadata(), key);
for (int c = 0; c < columns; c++)
update.newRow("column" + c).add("val", value);
update.applyUnsafe();
cfs.forceBlockingFlush();
}
}
// We should have a total of 30 sstables by now
assertEquals(30, cfs.getLiveSSTables().size());
// Compact just the tables with key2
// Bit hackish to use the key1.token as the prior key but works in BytesToken
Range<Token> tokenRange = new Range<>(key2.getToken(), key2.getToken());
Collection<Range<Token>> tokenRanges = new ArrayList<>(Arrays.asList(tokenRange));
cfs.forceCompactionForTokenRange(tokenRanges);
while(CompactionManager.instance.isCompacting(Arrays.asList(cfs), (sstable) -> true)) {
Thread.sleep(100);
}
// 20 tables that have key2 should have been compacted in to 1 table resulting in 11 (30-20+1)
assertEquals(11, cfs.getLiveSSTables().size());
// Compact just the tables with key1. At this point all 11 tables should have key1
Range<Token> tokenRange2 = new Range<>(key1.getToken(), key1.getToken());
Collection<Range<Token>> tokenRanges2 = new ArrayList<>(Arrays.asList(tokenRange2));
cfs.forceCompactionForTokenRange(tokenRanges2);
while(CompactionManager.instance.isCompacting(Arrays.asList(cfs), (sstable) -> true)) {
Thread.sleep(100);
}
// the 11 tables containing key1 should all compact to 1 table
assertEquals(1, cfs.getLiveSSTables().size());
}
@Test
public void testCompactionCandidateOrdering() throws Exception
{
// add some data
byte [] b = new byte[100 * 1024];
new Random().nextBytes(b);
ByteBuffer value = ByteBuffer.wrap(b);
int rows = 4;
int columns = 10;
// Just keep sstables in L0 for this test
cfs.disableAutoCompaction();
for (int r = 0; r < rows; r++)
{
UpdateBuilder update = UpdateBuilder.create(cfs.metadata(), String.valueOf(r));
for (int c = 0; c < columns; c++)
update.newRow("column" + c).add("val", value);
update.applyUnsafe();
cfs.forceBlockingFlush();
}
LeveledCompactionStrategy strategy = (LeveledCompactionStrategy) (cfs.getCompactionStrategyManager()).getStrategies().get(1).get(0);
// get readers for level 0 sstables
Collection<SSTableReader> sstables = strategy.manifest.getLevel(0);
Collection<SSTableReader> sortedCandidates = strategy.manifest.ageSortedSSTables(sstables);
assertTrue(String.format("More than 1 sstable required for test, found: %d .", sortedCandidates.size()), sortedCandidates.size() > 1);
long lastMaxTimeStamp = Long.MIN_VALUE;
for (SSTableReader sstable : sortedCandidates)
{
assertTrue(String.format("SStables not sorted into oldest to newest by maxTimestamp. Current sstable: %d , last sstable: %d", sstable.getMaxTimestamp(), lastMaxTimeStamp),
sstable.getMaxTimestamp() > lastMaxTimeStamp);
lastMaxTimeStamp = sstable.getMaxTimestamp();
}
}
}
| apache-2.0 |
PeterLauris/aifh | vol1/java-examples/src/main/java/com/heatonresearch/aifh/error/ErrorCalculationMSE.java | 1601 | /*
* Artificial Intelligence for Humans
* Volume 1: Fundamental Algorithms
* Java Version
* http://www.aifh.org
* http://www.jeffheaton.com
*
* Code repository:
* https://github.com/jeffheaton/aifh
* Copyright 2013 by Jeff Heaton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For more information on Heaton Research copyrights, licenses
* and trademarks visit:
* http://www.heatonresearch.com/copyright
*/
package com.heatonresearch.aifh.error;
/**
* Calculates the error as the average of the sum of the squared differences between the actual and ideal vectors.
* This is the most commonly used error calculation technique in this book.
* <p/>
* http://www.heatonresearch.com/wiki/Mean_Square_Error
*/
public class ErrorCalculationMSE extends AbstractErrorCalculation {
/**
* Calculate the error with MSE.
*
* @return The current error.
*/
@Override
public final double calculate() {
if (this.setSize == 0) {
return Double.POSITIVE_INFINITY;
}
return this.globalError / this.setSize;
}
}
| apache-2.0 |
TheTypoMaster/chromium-crosswalk | ui/android/java/src/org/chromium/ui/base/ActivityWindowAndroid.java | 13014 | // Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.ui.base;
import android.app.Activity;
import android.app.PendingIntent;
import android.content.ActivityNotFoundException;
import android.content.Intent;
import android.content.IntentSender.SendIntentException;
import android.content.SharedPreferences;
import android.content.pm.PackageManager;
import android.content.pm.PackageManager.NameNotFoundException;
import android.content.pm.PermissionInfo;
import android.os.Handler;
import android.preference.PreferenceManager;
import android.text.TextUtils;
import android.util.SparseArray;
import android.view.View;
import org.chromium.base.ActivityState;
import org.chromium.base.ApplicationStatus;
import org.chromium.base.BuildInfo;
import org.chromium.ui.UiUtils;
import java.lang.ref.WeakReference;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
/**
* The class provides the WindowAndroid's implementation which requires
* Activity Instance.
* Only instantiate this class when you need the implemented features.
*/
public class ActivityWindowAndroid
extends WindowAndroid
implements ApplicationStatus.ActivityStateListener, View.OnLayoutChangeListener {
// Constants used for intent request code bounding.
private static final int REQUEST_CODE_PREFIX = 1000;
private static final int REQUEST_CODE_RANGE_SIZE = 100;
private static final String TAG = "ActivityWindowAndroid";
private static final String PERMISSION_QUERIED_KEY_PREFIX = "HasRequestedAndroidPermission::";
private final WeakReference<Activity> mActivityRef;
private final Handler mHandler;
private final SparseArray<PermissionCallback> mOutstandingPermissionRequests;
private Method mRequestPermissionsMethod;
private int mNextRequestCode = 0;
/**
* Creates an Activity-specific WindowAndroid with associated intent functionality.
* TODO(jdduke): Remove this overload when all callsites have been updated to
* indicate their activity state listening preference.
* @param activity The activity associated with the WindowAndroid.
*/
public ActivityWindowAndroid(Activity activity) {
this(activity, true);
}
/**
* Creates an Activity-specific WindowAndroid with associated intent functionality.
* @param activity The activity associated with the WindowAndroid.
* @param listenToActivityState Whether to listen to activity state changes.
*/
public ActivityWindowAndroid(Activity activity, boolean listenToActivityState) {
super(activity.getApplicationContext());
mActivityRef = new WeakReference<Activity>(activity);
mHandler = new Handler();
mOutstandingPermissionRequests = new SparseArray<PermissionCallback>();
if (listenToActivityState) {
ApplicationStatus.registerStateListenerForActivity(this, activity);
}
}
@Override
protected void registerKeyboardVisibilityCallbacks() {
Activity activity = mActivityRef.get();
if (activity == null) return;
activity.findViewById(android.R.id.content).addOnLayoutChangeListener(this);
}
@Override
protected void unregisterKeyboardVisibilityCallbacks() {
Activity activity = mActivityRef.get();
if (activity == null) return;
activity.findViewById(android.R.id.content).removeOnLayoutChangeListener(this);
}
@Override
public int showCancelableIntent(
PendingIntent intent, IntentCallback callback, Integer errorId) {
Activity activity = mActivityRef.get();
if (activity == null) return START_INTENT_FAILURE;
int requestCode = generateNextRequestCode();
try {
activity.startIntentSenderForResult(
intent.getIntentSender(), requestCode, new Intent(), 0, 0, 0);
} catch (SendIntentException e) {
return START_INTENT_FAILURE;
}
storeCallbackData(requestCode, callback, errorId);
return requestCode;
}
@Override
public int showCancelableIntent(Intent intent, IntentCallback callback, Integer errorId) {
Activity activity = mActivityRef.get();
if (activity == null) return START_INTENT_FAILURE;
int requestCode = generateNextRequestCode();
try {
activity.startActivityForResult(intent, requestCode);
} catch (ActivityNotFoundException e) {
return START_INTENT_FAILURE;
}
storeCallbackData(requestCode, callback, errorId);
return requestCode;
}
@Override
public void cancelIntent(int requestCode) {
Activity activity = mActivityRef.get();
if (activity == null) return;
activity.finishActivity(requestCode);
}
/**
* Responds to the intent result if the intent was created by the native window.
* @param requestCode Request code of the requested intent.
* @param resultCode Result code of the requested intent.
* @param data The data returned by the intent.
* @return Boolean value of whether the intent was started by the native window.
*/
public boolean onActivityResult(int requestCode, int resultCode, Intent data) {
IntentCallback callback = mOutstandingIntents.get(requestCode);
mOutstandingIntents.delete(requestCode);
String errorMessage = mIntentErrors.remove(requestCode);
if (callback != null) {
callback.onIntentCompleted(this, resultCode,
mApplicationContext.getContentResolver(), data);
return true;
} else {
if (errorMessage != null) {
showCallbackNonExistentError(errorMessage);
return true;
}
}
return false;
}
private String getHasRequestedPermissionKey(String permission) {
String permissionQueriedKey = permission;
try {
// Runtime permissions are controlled at the group level. So when determining whether
// we have requested a particular permission before, we should check whether we
// have requested any permission in that group as that mimics the logic in the Android
// framework.
//
// e.g. Requesting first the permission ACCESS_FINE_LOCATION will result in Chrome
// treating ACCESS_COARSE_LOCATION as if it had already been requested as well.
PermissionInfo permissionInfo = getApplicationContext().getPackageManager()
.getPermissionInfo(permission, PackageManager.GET_META_DATA);
if (!TextUtils.isEmpty(permissionInfo.group)) {
permissionQueriedKey = permissionInfo.group;
}
} catch (NameNotFoundException e) {
// Unknown permission. Default back to the permission name instead of the group.
}
return PERMISSION_QUERIED_KEY_PREFIX + permissionQueriedKey;
}
@Override
public boolean canRequestPermission(String permission) {
if (!BuildInfo.isMncOrLater()) return false;
Activity activity = mActivityRef.get();
if (activity == null) return false;
// TODO(tedchoc): Child classes are currently required to determine whether we have
// previously requested the permission before but the user did not
// select "Never ask again". Merge with this class when possible.
// Check whether we have ever asked for this permission by checking whether we saved
// a preference associated with it before.
String permissionQueriedKey = getHasRequestedPermissionKey(permission);
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(activity);
if (!prefs.getBoolean(permissionQueriedKey, false)) return true;
return false;
}
@Override
public void requestPermissions(
final String[] permissions, final PermissionCallback callback) {
// If the permission request was not sent successfully, just post a response to the
// callback with whatever the current permission state is for all the requested
// permissions. The response is posted to keep the async behavior of this method
// consistent.
if (!requestPermissionsInternal(permissions, callback)) {
mHandler.post(new Runnable() {
@Override
public void run() {
int[] results = new int[permissions.length];
for (int i = 0; i < permissions.length; i++) {
results[i] = hasPermission(permissions[i])
? PackageManager.PERMISSION_GRANTED
: PackageManager.PERMISSION_DENIED;
}
callback.onRequestPermissionsResult(permissions, results);
}
});
} else {
Activity activity = mActivityRef.get();
SharedPreferences.Editor editor =
PreferenceManager.getDefaultSharedPreferences(activity).edit();
for (int i = 0; i < permissions.length; i++) {
editor.putBoolean(getHasRequestedPermissionKey(permissions[i]), true);
}
editor.apply();
}
}
/**
* Issues the permission request and returns whether it was sent successfully.
*/
private boolean requestPermissionsInternal(String[] permissions, PermissionCallback callback) {
// TODO(tedchoc): Remove the MNC check once the SDK version is bumped.
if (!BuildInfo.isMncOrLater()) return false;
// TODO(tedchoc): Remove the reflection aspect of this once a public M SDK is available.
Activity activity = mActivityRef.get();
if (activity == null) return false;
if (mRequestPermissionsMethod == null) {
try {
mRequestPermissionsMethod = Activity.class.getMethod(
"requestPermissions", String[].class, int.class);
} catch (NoSuchMethodException e) {
return false;
}
}
int requestCode = generateNextRequestCode();
mOutstandingPermissionRequests.put(requestCode, callback);
try {
mRequestPermissionsMethod.invoke(activity, permissions, requestCode);
return true;
} catch (IllegalAccessException e) {
mOutstandingPermissionRequests.delete(requestCode);
} catch (IllegalArgumentException e) {
mOutstandingPermissionRequests.delete(requestCode);
} catch (InvocationTargetException e) {
mOutstandingPermissionRequests.delete(requestCode);
}
return false;
}
/**
* Responds to a pending permission result.
* @param requestCode The unique code for the permission request.
* @param permissions The list of permissions in the result.
* @param grantResults Whether the permissions were granted.
* @return Whether the permission request corresponding to a pending permission request.
*/
public boolean onRequestPermissionsResult(int requestCode, String[] permissions,
int[] grantResults) {
PermissionCallback callback = mOutstandingPermissionRequests.get(requestCode);
mOutstandingPermissionRequests.delete(requestCode);
if (callback == null) return false;
callback.onRequestPermissionsResult(permissions, grantResults);
return true;
}
@Override
public WeakReference<Activity> getActivity() {
// Return a new WeakReference to prevent clients from releasing our internal WeakReference.
return new WeakReference<Activity>(mActivityRef.get());
}
@Override
public void onActivityStateChange(Activity activity, int newState) {
if (newState == ActivityState.STOPPED) {
onActivityStopped();
} else if (newState == ActivityState.STARTED) {
onActivityStarted();
}
}
@Override
public void onLayoutChange(View v, int left, int top, int right, int bottom, int oldLeft,
int oldTop, int oldRight, int oldBottom) {
keyboardVisibilityPossiblyChanged(UiUtils.isKeyboardShowing(mActivityRef.get(), v));
}
private int generateNextRequestCode() {
int requestCode = REQUEST_CODE_PREFIX + mNextRequestCode;
mNextRequestCode = (mNextRequestCode + 1) % REQUEST_CODE_RANGE_SIZE;
return requestCode;
}
private void storeCallbackData(int requestCode, IntentCallback callback, Integer errorId) {
mOutstandingIntents.put(requestCode, callback);
mIntentErrors.put(
requestCode, errorId == null ? null : mApplicationContext.getString(errorId));
}
}
| bsd-3-clause |
koshalt/modules | openmrs/src/test/java/org/motechproject/openmrs/it/MRSPatientServiceIT.java | 11544 | package org.motechproject.openmrs.it;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.motechproject.event.MotechEvent;
import org.motechproject.event.listener.EventListener;
import org.motechproject.event.listener.EventListenerRegistryService;
import org.motechproject.openmrs.domain.Concept;
import org.motechproject.openmrs.domain.ConceptName;
import org.motechproject.openmrs.domain.Identifier;
import org.motechproject.openmrs.domain.IdentifierType;
import org.motechproject.openmrs.domain.Location;
import org.motechproject.openmrs.domain.Patient;
import org.motechproject.openmrs.domain.Person;
import org.motechproject.openmrs.exception.ConceptNameAlreadyInUseException;
import org.motechproject.openmrs.exception.HttpException;
import org.motechproject.openmrs.exception.PatientNotFoundException;
import org.motechproject.openmrs.service.EventKeys;
import org.motechproject.openmrs.service.OpenMRSConceptService;
import org.motechproject.openmrs.service.OpenMRSLocationService;
import org.motechproject.openmrs.service.OpenMRSPatientService;
import org.motechproject.openmrs.service.OpenMRSPersonService;
import org.motechproject.testing.osgi.BasePaxIT;
import org.motechproject.testing.osgi.container.MotechNativeTestContainerFactory;
import org.ops4j.pax.exam.ExamFactory;
import org.ops4j.pax.exam.junit.PaxExam;
import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy;
import org.ops4j.pax.exam.spi.reactors.PerSuite;
import javax.inject.Inject;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.motechproject.openmrs.util.TestConstants.DEFAULT_CONFIG_NAME;
@RunWith(PaxExam.class)
@ExamReactorStrategy(PerSuite.class)
@ExamFactory(MotechNativeTestContainerFactory.class)
public class MRSPatientServiceIT extends BasePaxIT {
final Object lock = new Object();
@Inject
private OpenMRSLocationService locationAdapter;
@Inject
private OpenMRSPatientService patientAdapter;
@Inject
private OpenMRSPersonService personAdapter;
@Inject
private OpenMRSConceptService conceptAdapter;
@Inject
private EventListenerRegistryService eventListenerRegistry;
MrsListener mrsListener;
Patient patient;
Concept causeOfDeath;
Location location;
@Before
public void setUp() throws InterruptedException, ConceptNameAlreadyInUseException {
mrsListener = new MrsListener();
eventListenerRegistry.registerListener(mrsListener, Arrays.asList(EventKeys.CREATED_NEW_PATIENT_SUBJECT,
EventKeys.UPDATED_PATIENT_SUBJECT, EventKeys.PATIENT_DECEASED_SUBJECT, EventKeys.DELETED_PATIENT_SUBJECT));
String uuid = savePatient(preparePatient()).getUuid();
patient = patientAdapter.getPatientByUuid(DEFAULT_CONFIG_NAME, uuid);
prepareConceptOfDeath();
}
@Test
public void shouldCreatePatient() {
assertNotNull(patient.getMotechId());
assertEquals(patient.getUuid(), mrsListener.eventParameters.get(EventKeys.PATIENT_ID));
assertEquals(patient.getPerson().getUuid(), mrsListener.eventParameters.get(EventKeys.PERSON_ID));
assertTrue(mrsListener.created);
assertFalse(mrsListener.deceased);
assertFalse(mrsListener.updated);
assertFalse(mrsListener.deleted);
}
@Test
public void shouldUpdatePatient() throws InterruptedException {
final String newFirstName = "Changed Name";
final String newAddress = "Changed Address";
final String newMotechId = "604";
Person.Name name = patient.getPerson().getPreferredName();
name.setGivenName(newFirstName);
Person.Address address = patient.getPerson().getPreferredAddress();
address.setAddress1(newAddress);
patient.setMotechId(newMotechId);
synchronized (lock) {
patientAdapter.updatePatient(DEFAULT_CONFIG_NAME, patient);
lock.wait(60000);
}
Patient updated = patientAdapter.getPatientByUuid(DEFAULT_CONFIG_NAME, patient.getUuid());
assertEquals(newFirstName, updated.getPerson().getPreferredName().getGivenName());
assertEquals(newAddress, updated.getPerson().getPreferredAddress().getAddress1());
assertEquals(newMotechId, updated.getMotechId());
assertTrue(mrsListener.created);
assertFalse(mrsListener.deceased);
assertTrue(mrsListener.updated);
assertFalse(mrsListener.deleted);
}
@Test
public void shouldUpdatePatientIdentifiers() throws InterruptedException {
Patient testPatient = patientAdapter.getPatientByMotechId(DEFAULT_CONFIG_NAME, patient.getMotechId());
String newIdentifiactionNumber = "612";
testPatient.getIdentifiers().get(0).setIdentifier(newIdentifiactionNumber);
synchronized (lock) {
patientAdapter.updatePatientIdentifiers(DEFAULT_CONFIG_NAME, testPatient);
lock.wait(60000);
}
Patient updated = patientAdapter.getPatientByUuid(DEFAULT_CONFIG_NAME, testPatient.getUuid());
assertEquals("Old Identification Number", updated.getIdentifiers().get(0).getIdentifierType().getName());
assertEquals(newIdentifiactionNumber, updated.getIdentifiers().get(0).getIdentifier());
}
@Test
public void shouldGetPatientByMotechId() {
Patient fetched = patientAdapter.getPatientByMotechId(DEFAULT_CONFIG_NAME, patient.getMotechId());
assertNotNull(fetched);
assertEquals(fetched, patient);
}
@Test
public void shouldGetByUuid() {
Patient fetched = patientAdapter.getPatientByUuid(DEFAULT_CONFIG_NAME, patient.getUuid());
assertNotNull(fetched);
assertEquals(fetched, patient);
}
@Test
public void shouldSearchForPatient() throws InterruptedException, PatientNotFoundException {
List<Patient> found = patientAdapter.search(DEFAULT_CONFIG_NAME, patient.getPerson().getPreferredName().getGivenName(), patient.getMotechId());
assertEquals(patient.getPerson().getPreferredName().getGivenName(), found.get(0).getPerson().getPreferredName().getGivenName());
}
@Test
public void shouldDeceasePerson() throws HttpException, PatientNotFoundException, InterruptedException {
patientAdapter.deceasePatient(DEFAULT_CONFIG_NAME, patient.getMotechId(), causeOfDeath, new Date(), null);
Patient deceased = patientAdapter.getPatientByMotechId(DEFAULT_CONFIG_NAME, patient.getMotechId());
assertTrue(deceased.getPerson().getDead());
}
@Test
public void shouldDeletePatient() throws PatientNotFoundException, InterruptedException {
synchronized (lock) {
patientAdapter.deletePatient(DEFAULT_CONFIG_NAME, patient.getUuid());
assertNull(patientAdapter.getPatientByUuid(DEFAULT_CONFIG_NAME, patient.getUuid()));
lock.wait(60000);
}
assertTrue(mrsListener.created);
assertFalse(mrsListener.updated);
assertTrue(mrsListener.deleted);
assertFalse(mrsListener.deceased);
assertEquals(patient.getUuid(), mrsListener.eventParameters.get(EventKeys.PATIENT_ID));
}
@After
public void tearDown() throws InterruptedException, PatientNotFoundException {
String uuid = patient.getLocationForMotechId().getUuid();
deletePatient(patient);
if (uuid != null) {
locationAdapter.deleteLocation(DEFAULT_CONFIG_NAME, uuid);
}
conceptAdapter.deleteConcept(DEFAULT_CONFIG_NAME, causeOfDeath.getUuid());
eventListenerRegistry.clearListenersForBean("mrsTestListener");
}
private Patient preparePatient() {
Person person = new Person();
Person.Name name = new Person.Name();
name.setGivenName("John");
name.setFamilyName("Smith");
person.setNames(Collections.singletonList(name));
Person.Address address = new Person.Address();
address.setAddress1("10 Fifth Avenue");
person.setAddresses(Collections.singletonList(address));
person.setBirthdateEstimated(false);
person.setGender("M");
location = locationAdapter.createLocation(DEFAULT_CONFIG_NAME, new Location("FooName", "FooCountry", "FooRegion", "FooCountryDistrict", "FooStateProvince"));
assertNotNull(location);
return new Patient(prepareIdentifier(), person, "602", location);
}
private Patient savePatient(Patient patient) throws InterruptedException {
Patient created;
synchronized (lock) {
created = patientAdapter.createPatient(DEFAULT_CONFIG_NAME, patient);
assertNotNull(created);
lock.wait(60000);
}
return created;
}
private void deletePatient(Patient patient) throws PatientNotFoundException, InterruptedException {
String locationId = patient.getLocationForMotechId().getUuid();
patientAdapter.deletePatient(DEFAULT_CONFIG_NAME, patient.getUuid());
assertNull(patientAdapter.getPatientByUuid(DEFAULT_CONFIG_NAME, patient.getUuid()));
locationAdapter.deleteLocation(DEFAULT_CONFIG_NAME, locationId);
}
private void prepareConceptOfDeath() throws ConceptNameAlreadyInUseException {
Concept concept = new Concept();
ConceptName conceptName = new ConceptName("FooConceptOne");
concept.setNames(Arrays.asList(conceptName));
concept.setDatatype(new Concept.DataType("TEXT"));
concept.setConceptClass(new Concept.ConceptClass("Test"));
String uuid = conceptAdapter.createConcept(DEFAULT_CONFIG_NAME, concept).getUuid();
causeOfDeath = conceptAdapter.getConceptByUuid(DEFAULT_CONFIG_NAME, uuid);
}
private List<Identifier> prepareIdentifier() {
List<Identifier> testList = new ArrayList<>();
Identifier testIdentifier1 = new Identifier("603", new IdentifierType("Old Identification Number"));
testIdentifier1.setUuid("2222");
testIdentifier1.setLocation(location);
testList.add(testIdentifier1);
return testList;
}
public class MrsListener implements EventListener {
private boolean created = false;
private boolean updated = false;
private boolean deceased = false;
private boolean deleted = false;
private Map<String, Object> eventParameters;
public void handle(MotechEvent event) {
if (event.getSubject().equals(EventKeys.CREATED_NEW_PATIENT_SUBJECT)) {
created = true;
} else if (event.getSubject().equals(EventKeys.UPDATED_PATIENT_SUBJECT)) {
updated = true;
} else if (event.getSubject().equals(EventKeys.PATIENT_DECEASED_SUBJECT)) {
deceased = true;
} else if (event.getSubject().equals(EventKeys.DELETED_PATIENT_SUBJECT)) {
deleted = true;
}
eventParameters = event.getParameters();
synchronized (lock) {
lock.notify();
}
}
@Override
public String getIdentifier() {
return "mrsTestListener";
}
}
}
| bsd-3-clause |
kaituo/sedge | trunk/contrib/penny/java/src/main/java/org/apache/pig/penny/PennyServer.java | 1790 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.penny;
import java.io.IOException;
import java.util.Properties;
import org.apache.pig.ExecType;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.impl.PigContext;
public class PennyServer {
private final PigContext pigContext;
private static ExecType execType = ExecType.MAPREDUCE;
private static Properties properties = new Properties();
public static void setExecType(ExecType execType) {
PennyServer.execType = execType;
}
public static void setProperties(Properties properties) {
PennyServer.properties = properties;
}
public PennyServer() throws ExecException {
pigContext = new PigContext(execType, properties);
}
public ParsedPigScript parse(String pigScriptFilename) throws IOException {
return new ParsedPigScript(pigContext, pigScriptFilename);
}
public PigContext getPigContext() {
return pigContext;
}
}
| mit |
oleg-nenashev/jenkins | core/src/test/java/hudson/model/QueueTest.java | 2306 | package hudson.model;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.jvnet.hudson.test.Issue;
import org.kohsuke.stapler.HttpResponse;
import org.kohsuke.stapler.StaplerResponse;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.PrintWriter;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class QueueTest {
@Mock
StaplerResponse resp;
@Mock
Queue.Task task;
@Mock
PrintWriter writer;
@Before
public void setup() throws IOException {
when(resp.getWriter()).thenReturn(writer);
}
@Issue("JENKINS-21311")
@Test
public void cancelItemOnaValidItemShouldReturnA204() throws IOException, ServletException {
when(task.hasAbortPermission()).thenReturn(true);
Queue queue = new Queue(LoadBalancer.CONSISTENT_HASH);
queue.schedule(task, 6000);
HttpResponse httpResponse = queue.doCancelItem(Queue.WaitingItem.getCurrentCounterValue());
httpResponse.generateResponse(null, resp, null);
verify(resp).setStatus(HttpServletResponse.SC_NO_CONTENT);
}
@Issue("JENKINS-21311")
@Test
public void cancelItemOnANonExistingItemShouldReturnA404() throws IOException, ServletException {
Queue queue = new Queue(LoadBalancer.CONSISTENT_HASH);
queue.schedule(task, 6000);
HttpResponse httpResponse = queue.doCancelItem(Queue.WaitingItem.getCurrentCounterValue() + 1);
httpResponse.generateResponse(null, resp, null);
verify(resp).setStatus(HttpServletResponse.SC_NOT_FOUND);
}
@Issue("JENKINS-21311")
@Test
public void cancelItemOnANonCancellableItemShouldReturnA422() throws IOException, ServletException {
when(task.hasAbortPermission()).thenReturn(false);
Queue queue = new Queue(LoadBalancer.CONSISTENT_HASH);
queue.schedule(task, 6000);
HttpResponse httpResponse = queue.doCancelItem(Queue.WaitingItem.getCurrentCounterValue());
httpResponse.generateResponse(null, resp, null);
verify(resp).setStatus(422);
}
}
| mit |
open-keychain/spongycastle | core/src/main/java/org/spongycastle/asn1/x509/sigi/NameOrPseudonym.java | 4959 | package org.spongycastle.asn1.x509.sigi;
import java.util.Enumeration;
import org.spongycastle.asn1.ASN1Choice;
import org.spongycastle.asn1.ASN1EncodableVector;
import org.spongycastle.asn1.ASN1Object;
import org.spongycastle.asn1.ASN1Primitive;
import org.spongycastle.asn1.ASN1Sequence;
import org.spongycastle.asn1.ASN1String;
import org.spongycastle.asn1.DERSequence;
import org.spongycastle.asn1.x500.DirectoryString;
/**
* Structure for a name or pseudonym.
*
* <pre>
* NameOrPseudonym ::= CHOICE {
* surAndGivenName SEQUENCE {
* surName DirectoryString,
* givenName SEQUENCE OF DirectoryString
* },
* pseudonym DirectoryString
* }
* </pre>
*
* @see org.spongycastle.asn1.x509.sigi.PersonalData
*
*/
public class NameOrPseudonym
extends ASN1Object
implements ASN1Choice
{
private DirectoryString pseudonym;
private DirectoryString surname;
private ASN1Sequence givenName;
public static NameOrPseudonym getInstance(Object obj)
{
if (obj == null || obj instanceof NameOrPseudonym)
{
return (NameOrPseudonym)obj;
}
if (obj instanceof ASN1String)
{
return new NameOrPseudonym(DirectoryString.getInstance(obj));
}
if (obj instanceof ASN1Sequence)
{
return new NameOrPseudonym((ASN1Sequence)obj);
}
throw new IllegalArgumentException("illegal object in getInstance: "
+ obj.getClass().getName());
}
/**
* Constructor from DirectoryString.
* <p>
* The sequence is of type NameOrPseudonym:
* <pre>
* NameOrPseudonym ::= CHOICE {
* surAndGivenName SEQUENCE {
* surName DirectoryString,
* givenName SEQUENCE OF DirectoryString
* },
* pseudonym DirectoryString
* }
* </pre>
* @param pseudonym pseudonym value to use.
*/
public NameOrPseudonym(DirectoryString pseudonym)
{
this.pseudonym = pseudonym;
}
/**
* Constructor from ASN1Sequence.
* <p>
* The sequence is of type NameOrPseudonym:
* <pre>
* NameOrPseudonym ::= CHOICE {
* surAndGivenName SEQUENCE {
* surName DirectoryString,
* givenName SEQUENCE OF DirectoryString
* },
* pseudonym DirectoryString
* }
* </pre>
* </p>
* @param seq The ASN.1 sequence.
*/
private NameOrPseudonym(ASN1Sequence seq)
{
if (seq.size() != 2)
{
throw new IllegalArgumentException("Bad sequence size: "
+ seq.size());
}
if (!(seq.getObjectAt(0) instanceof ASN1String))
{
throw new IllegalArgumentException("Bad object encountered: "
+ seq.getObjectAt(0).getClass());
}
surname = DirectoryString.getInstance(seq.getObjectAt(0));
givenName = ASN1Sequence.getInstance(seq.getObjectAt(1));
}
/**
* Constructor from a given details.
*
* @param pseudonym The pseudonym.
*/
public NameOrPseudonym(String pseudonym)
{
this(new DirectoryString(pseudonym));
}
/**
* Constructor from a given details.
*
* @param surname The surname.
* @param givenName A sequence of directory strings making up the givenName
*/
public NameOrPseudonym(DirectoryString surname, ASN1Sequence givenName)
{
this.surname = surname;
this.givenName = givenName;
}
public DirectoryString getPseudonym()
{
return pseudonym;
}
public DirectoryString getSurname()
{
return surname;
}
public DirectoryString[] getGivenName()
{
DirectoryString[] items = new DirectoryString[givenName.size()];
int count = 0;
for (Enumeration e = givenName.getObjects(); e.hasMoreElements();)
{
items[count++] = DirectoryString.getInstance(e.nextElement());
}
return items;
}
/**
* Produce an object suitable for an ASN1OutputStream.
* <p>
* Returns:
* <pre>
* NameOrPseudonym ::= CHOICE {
* surAndGivenName SEQUENCE {
* surName DirectoryString,
* givenName SEQUENCE OF DirectoryString
* },
* pseudonym DirectoryString
* }
* </pre>
*
* @return a DERObject
*/
public ASN1Primitive toASN1Primitive()
{
if (pseudonym != null)
{
return pseudonym.toASN1Primitive();
}
else
{
ASN1EncodableVector vec1 = new ASN1EncodableVector();
vec1.add(surname);
vec1.add(givenName);
return new DERSequence(vec1);
}
}
}
| mit |
watusi/rhodes | platform/android/Rhodes/src/com/rhomobile/rhodes/api/MethodResult.java | 16583 | package com.rhomobile.rhodes.api;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.json.JSONException;
import com.rhomobile.rhodes.Logger;
import com.rhomobile.rhodes.util.JSONGenerator;
public class MethodResult implements IMethodResult {
private static final String TAG = MethodResult.class.getSimpleName();
private class JSONObjectResultGenerator extends JSONGenerator {
private String mObjectClassPath;
public JSONObjectResultGenerator(Object obj, String objectClassPath) {
super(obj);
mObjectClassPath = objectClassPath;
}
@Override
protected void parse(Object obj) throws JSONException {
if(String.class.isInstance(obj)) {
getStringer().object();
getStringer().key("__rhoID").value(obj);
getStringer().key("__rhoClass").value(mObjectClassPath);
getStringer().endObject();
} else {
super.parse(obj);
}
}
}
public enum ResultType { typeNone, typeBoolean, typeInteger, typeDouble, typeString, typeList, typeMap, typeError, typeArgError }
private boolean mIsRuby;
private boolean mSingleShot = true;
private String mStrCallback = "";
private String mStrCallbackData = "";
private long mRubyProcCallback;
private int mTabId;
private String mResultParamName = "result";
private String mObjectClassPath;
private long mRubyObjectClass;
private ResultType mResultType = ResultType.typeNone;
private ResultType mForceType = ResultType.typeNone;
private boolean mBooleanResult = false;
private int mIntegerResult = 0;
private double mDoubleResult = 0.0;
private String mStrResult;
private Collection<Object> mListResult;
private Map<String, Object> mMapResult;
private native void nativeCallBack(int tabId, boolean isRuby, boolean releaseCallback);
private native static void nativeReleaseRubyProcCallback(long rubyProc);
private void reset() {
mResultType = ResultType.typeNone;
}
public MethodResult(boolean isRuby) {
mIsRuby = isRuby;
mTabId = -1;
}
@Override
public void keepAlive() { mSingleShot = false; }
@Override
public void forceBooleanType() {
mForceType = ResultType.typeBoolean;
}
@Override
public void forceIntegerType() {
mForceType = ResultType.typeInteger;
}
@Override
public void forceDoubleType() {
mForceType = ResultType.typeDouble;
}
@Override
public void release() {
if (mRubyProcCallback != 0) {
nativeReleaseRubyProcCallback(mRubyProcCallback);
}
}
public int getResultType() {
Logger.T(TAG, "getResultType: " + mResultType.name() + " - " + mResultType.ordinal());
return mResultType.ordinal();
}
public String toString() {
StringBuilder res = new StringBuilder();
if (mRubyProcCallback != 0) {
res.append("RubyProc: ").append(mRubyProcCallback);
} else if (mStrCallback != null){
res.append("Callback: ").append(mStrCallback);
}
if (mStrCallbackData != null) {
res.append(", data: ").append(mStrCallbackData);
}
if (mRubyObjectClass != 0) {
res.append("; RubyClass: ").append(mRubyObjectClass);
} else if (mObjectClassPath != null){
res.append("; Class path: ").append(mObjectClassPath);
}
res.append("; Tab id: ").append(mTabId);
res.append("; resultType: ").append(mResultType.name());
res.append("; ").append(mResultParamName).append(": ");
switch (mResultType) {
case typeBoolean:
res.append(mBooleanResult);
break;
case typeInteger:
res.append(mIntegerResult);
break;
case typeDouble:
res.append(mDoubleResult);
break;
case typeString:
res.append('"').append(mStrResult).append('"');
break;
case typeList:
res.append(mListResult.toString());
break;
case typeMap:
res.append(mMapResult.toString());
break;
case typeError:
case typeArgError:
res.append('"').append(mStrResult).append('"');
break;
}
return res.toString();
}
public boolean getBoolean() { return mBooleanResult; }
public int getInteger() { return mIntegerResult; }
public double getDouble() { return mDoubleResult; }
public String getString() { return mStrResult; }
public Collection<Object> getList() { return mListResult; }
public Map<String, Object> getMap() { return mMapResult; }
public String getJson() throws JSONException {
JSONGenerator json;
switch(mResultType) {
case typeList:
if(getObjectClassPath() != null)
json = new JSONObjectResultGenerator(getList(), getObjectClassPath());
else
json = new JSONGenerator(getList());
break;
case typeMap:
if(getObjectClassPath() != null)
json = new JSONObjectResultGenerator(getMap(), getObjectClassPath());
else
json = new JSONGenerator(getMap());
break;
case typeBoolean:
json = new JSONGenerator(Boolean.valueOf(getBoolean()));
break;
case typeInteger:
json = new JSONGenerator(Integer.valueOf(getInteger()));
break;
case typeDouble:
json = new JSONGenerator(Double.valueOf(getDouble()));
break;
case typeString:
if(getObjectClassPath() != null)
json = new JSONObjectResultGenerator(getString(), getObjectClassPath());
else
json = new JSONGenerator(getString());
break;
default:
json = new JSONGenerator(null);
}
String res = json.toString();
Logger.D(TAG, res);
return res;
}
public String getResultParamName() {
return mResultParamName;
}
public void setResultParamName(String resultParamName) {
mResultParamName = resultParamName;
}
public String getObjectClassPath() {
return mObjectClassPath;
}
public void setObjectClassPath(String objectClassPath) {
mObjectClassPath = objectClassPath;
}
public long getRubyObjectClass() {
return mRubyObjectClass;
}
public void setRubyObjectClass(long rubyObjectClass) {
mRubyObjectClass = rubyObjectClass;
}
@Override
public void set(boolean res) {
Logger.T(TAG, "set("+res+")");
mBooleanResult = res;
mResultType = ResultType.typeBoolean;
Logger.T(TAG, toString());
if (mStrCallback.length() > 0 || mRubyProcCallback != 0) {
Logger.T(TAG, "Calling native callback handler");
nativeCallBack(mTabId, mIsRuby, mSingleShot);
}
}
@Override
public void set(int res) {
Logger.T(TAG, "set("+res+")");
mIntegerResult = res;
mResultType = ResultType.typeInteger;
Logger.T(TAG, toString());
if (mStrCallback.length() > 0 || mRubyProcCallback != 0) {
Logger.T(TAG, "Calling native callback handler");
nativeCallBack(mTabId, mIsRuby, mSingleShot);
}
}
@Override
public void set(double res) {
Logger.T(TAG, "set("+res+")");
mDoubleResult = res;
mResultType = ResultType.typeDouble;
Logger.T(TAG, toString());
if (mStrCallback.length() > 0 || mRubyProcCallback != 0) {
Logger.T(TAG, "Calling native callback handler");
nativeCallBack(mTabId, mIsRuby, mSingleShot);
}
}
@Override
public void set(String res) {
Logger.T(TAG, "set(\""+res+"\")");
try {
switch(mForceType) {
case typeNone:
case typeString:
break;
case typeBoolean:
set(Boolean.valueOf(res).booleanValue());
return;
case typeInteger:
if (res != null) {
set(Integer.valueOf(res).intValue());
} else {
set(0);
}
return;
case typeDouble:
if (res != null) {
set(Double.valueOf(res).doubleValue());
} else {
set(0.0);
}
return;
default:
Logger.W(TAG, "Cannot force string result to type: " + mForceType.name() + ". Returning string result: " + res);
break;
}
} catch (NumberFormatException ex) {
Logger.E(TAG, ex);
Logger.W(TAG, "Returning string result: " + res);
}
mStrResult = res;
mResultType = ResultType.typeString;
Logger.T(TAG, toString());
if (mStrCallback.length() > 0 || mRubyProcCallback != 0) {
Logger.T(TAG, "Calling native callback handler");
nativeCallBack(mTabId, mIsRuby, mSingleShot);
}
}
@Override
public void set(Collection<Object> res) {
Logger.T(TAG, "set("+res+")");
mListResult = res;
mResultType = ResultType.typeList;
Logger.T(TAG, toString());
if (mStrCallback.length() > 0 || mRubyProcCallback != 0) {
Logger.T(TAG, "Calling native callback handler");
nativeCallBack(mTabId, mIsRuby, mSingleShot);
}
}
@Override
public void set(Map<String, Object> res) {
Logger.T(TAG, "set("+res+")");
mMapResult = res;
mResultType = ResultType.typeMap;
Logger.T(TAG, toString());
if (mStrCallback.length() > 0 || mRubyProcCallback != 0) {
Logger.T(TAG, "Calling native callback handler");
nativeCallBack(mTabId, mIsRuby, mSingleShot);
}
}
@Override
public void collect(Object value) {
if (mResultType == ResultType.typeNone) {
mListResult = new ArrayList<Object>();
mResultType = ResultType.typeList;
}
if (mResultType != ResultType.typeList) {
if (mResultType != ResultType.typeError && mResultType != ResultType.typeArgError) {
setError("Wrong result type when collecting list result: " + mResultType.toString());
}
return;
}
mListResult.add(value);
}
@Override
public void collect(String key, Object value) {
if (mResultType == ResultType.typeNone) {
mMapResult = new HashMap<String, Object>();
mResultType = ResultType.typeMap;
}
if (mResultType != ResultType.typeMap) {
if (mResultType != ResultType.typeError && mResultType != ResultType.typeArgError) {
setError("Wrong result type when collecting map result: " + mResultType.toString());
}
return;
}
mMapResult.put(key, value);
}
@Override
public void setSelf(IMethodResult result) {
switch(mResultType) {
case typeNone:
break;
case typeBoolean:
result.set(getBoolean());
break;
case typeInteger:
result.set(getInteger());
break;
case typeDouble:
result.set(getDouble());
break;
case typeString:
result.set(getString());
break;
case typeList:
result.set(getList());
break;
case typeMap:
result.set(getMap());
break;
case typeArgError:
result.setArgError(getString());
break;
case typeError:
result.setError(getString());
break;
}
}
@Override
public void collectSelf(IMethodResult result) {
switch(mResultType) {
case typeNone:
break;
case typeBoolean:
result.collect(Boolean.valueOf(getBoolean()));
break;
case typeInteger:
result.collect(Integer.valueOf(getInteger()));
break;
case typeDouble:
result.collect(Double.valueOf(getDouble()));
break;
case typeString:
result.collect(getString());
break;
case typeList:
result.collect(getList());
break;
case typeMap:
result.collect(getMap());
break;
case typeArgError:
result.setArgError(getString());
break;
case typeError:
result.setError(getString());
break;
}
}
@Override
public void collectSelf(String key, IMethodResult result) {
switch(mResultType) {
case typeNone:
break;
case typeBoolean:
result.collect(key, Boolean.valueOf(getBoolean()));
break;
case typeInteger:
result.collect(key, Integer.valueOf(getInteger()));
break;
case typeDouble:
result.collect(key, Double.valueOf(getDouble()));
break;
case typeString:
result.collect(key, getString());
break;
case typeList:
result.collect(key, getList());
break;
case typeMap:
result.collect(key, getMap());
break;
case typeArgError:
result.setArgError(getString());
break;
case typeError:
result.setError(getString());
break;
}
}
@Override
public void mergeSelf(IMethodResult result) {
switch(mResultType) {
case typeNone:
break;
case typeList:
for (Object value: getList()) {
result.collect(value);
}
break;
case typeMap:
for (Map.Entry<String, Object> entry: getMap().entrySet()) {
result.collect(entry.getKey(), entry.getValue());
}
break;
case typeArgError:
result.setArgError(getString());
break;
case typeError:
result.setError(getString());
break;
case typeBoolean:
case typeInteger:
case typeDouble:
case typeString:
throw new RuntimeException("Cannot merge result of simple type: " + mResultType);
}
}
@Override
public void set() {
Logger.T(TAG, toString());
if (mStrCallback.length() > 0 || mRubyProcCallback != 0) {
Logger.T(TAG, "Calling native callback handler");
nativeCallBack(mTabId, mIsRuby, mSingleShot);
}
}
@Override
public void setArgError(String message) {
mStrResult = message;
mResultType = ResultType.typeArgError;
Logger.E(TAG, toString());
if (mStrCallback.length() > 0 || mRubyProcCallback != 0) {
Logger.T(TAG, "Calling native callback handler");
nativeCallBack(mTabId, mIsRuby, mSingleShot);
}
}
@Override
public void set(Throwable ex) {
mStrResult = ex.getMessage();
if(mStrResult == null || mStrResult.length() == 0) {
mStrResult = ex.getClass().getSimpleName();
}
mResultType = ResultType.typeError;
Logger.E(TAG, ex);
Logger.E(TAG, toString());
if (mStrCallback.length() > 0 || mRubyProcCallback != 0) {
Logger.T(TAG, "Calling native callback handler");
nativeCallBack(mTabId, mIsRuby, mSingleShot);
}
}
@Override
public void setError(String message) {
mStrResult = message;
mResultType = ResultType.typeError;
Logger.E(TAG, toString());
if (mStrCallback.length() > 0 || mRubyProcCallback != 0) {
Logger.T(TAG, "Calling native callback handler");
nativeCallBack(mTabId, mIsRuby, mSingleShot);
}
}
@Override
public boolean hasCallback() {
return (mStrCallback.length() > 0 || mRubyProcCallback != 0);
}
}
| mit |
phxql/smarthome | bundles/core/org.eclipse.smarthome.core.voice.test/src/test/java/org/eclipse/smarthome/core/voice/TTSExceptionTest.java | 1673 | /**
* Copyright (c) 2014-2016 by the respective copyright holders.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.eclipse.smarthome.core.voice;
import org.junit.Assert;
import org.junit.Test;
/**
* Test general purpose TTS exception
*
* @author Kelly Davis - Initial contribution and API
*/
public class TTSExceptionTest {
/**
* Test TTSException() constructor
*/
@Test
public void testConstructor0() {
TTSException ttsException = new TTSException();
Assert.assertNotNull("TTSException() constructor failed", ttsException);
}
/**
* Test TTSException(String message, Throwable cause) constructor
*/
@Test
public void testConstructor1() {
TTSException ttsException = new TTSException("Message", new Throwable());
Assert.assertNotNull("TTSException(String, Throwable) constructor failed", ttsException);
}
/**
* Test TTSException(String message) constructor
*/
@Test
public void testConstructor2() {
TTSException ttsException = new TTSException("Message");
Assert.assertNotNull("TTSException(String) constructor failed", ttsException);
}
/**
* Test TTSException(Throwable cause) constructor
*/
@Test
public void testConstructor3() {
TTSException ttsException = new TTSException(new Throwable());
Assert.assertNotNull("TTSException(Throwable) constructor failed", ttsException);
}
}
| epl-1.0 |
pplatek/adempiere | base/src/org/eevolution/model/X_PP_WF_Node_Product.java | 8018 | /******************************************************************************
* Product: Adempiere ERP & CRM Smart Business Solution *
* Copyright (C) 1999-2007 ComPiere, Inc. All Rights Reserved. *
* This program is free software, you can redistribute it and/or modify it *
* under the terms version 2 of the GNU General Public License as published *
* by the Free Software Foundation. This program is distributed in the hope *
* that it will be useful, but WITHOUT ANY WARRANTY, without even the implied *
* warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *
* See the GNU General Public License for more details. *
* You should have received a copy of the GNU General Public License along *
* with this program, if not, write to the Free Software Foundation, Inc., *
* 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. *
* For the text or an alternative of this public license, you may reach us *
* ComPiere, Inc., 2620 Augustine Dr. #245, Santa Clara, CA 95054, USA *
* or via info@compiere.org or http://www.compiere.org/license.html *
*****************************************************************************/
/** Generated Model - DO NOT CHANGE */
package org.eevolution.model;
import java.math.BigDecimal;
import java.sql.ResultSet;
import java.util.Properties;
import org.compiere.model.*;
import org.compiere.util.Env;
/** Generated Model for PP_WF_Node_Product
* @author Adempiere (generated)
* @version Release 3.8.0 - $Id$ */
public class X_PP_WF_Node_Product extends PO implements I_PP_WF_Node_Product, I_Persistent
{
/**
*
*/
private static final long serialVersionUID = 20150223L;
/** Standard Constructor */
public X_PP_WF_Node_Product (Properties ctx, int PP_WF_Node_Product_ID, String trxName)
{
super (ctx, PP_WF_Node_Product_ID, trxName);
/** if (PP_WF_Node_Product_ID == 0)
{
setAD_WF_Node_ID (0);
setEntityType (null);
// U
setM_Product_ID (0);
setPP_WF_Node_Product_ID (0);
} */
}
/** Load Constructor */
public X_PP_WF_Node_Product (Properties ctx, ResultSet rs, String trxName)
{
super (ctx, rs, trxName);
}
/** AccessLevel
* @return 3 - Client - Org
*/
protected int get_AccessLevel()
{
return accessLevel.intValue();
}
/** Load Meta Data */
protected POInfo initPO (Properties ctx)
{
POInfo poi = POInfo.getPOInfo (ctx, Table_ID, get_TrxName());
return poi;
}
public String toString()
{
StringBuffer sb = new StringBuffer ("X_PP_WF_Node_Product[")
.append(get_ID()).append("]");
return sb.toString();
}
public org.compiere.model.I_AD_WF_Node getAD_WF_Node() throws RuntimeException
{
return (org.compiere.model.I_AD_WF_Node)MTable.get(getCtx(), org.compiere.model.I_AD_WF_Node.Table_Name)
.getPO(getAD_WF_Node_ID(), get_TrxName()); }
/** Set Node.
@param AD_WF_Node_ID
Workflow Node (activity), step or process
*/
public void setAD_WF_Node_ID (int AD_WF_Node_ID)
{
if (AD_WF_Node_ID < 1)
set_ValueNoCheck (COLUMNNAME_AD_WF_Node_ID, null);
else
set_ValueNoCheck (COLUMNNAME_AD_WF_Node_ID, Integer.valueOf(AD_WF_Node_ID));
}
/** Get Node.
@return Workflow Node (activity), step or process
*/
public int getAD_WF_Node_ID ()
{
Integer ii = (Integer)get_Value(COLUMNNAME_AD_WF_Node_ID);
if (ii == null)
return 0;
return ii.intValue();
}
/** ConfigurationLevel AD_Reference_ID=53222 */
public static final int CONFIGURATIONLEVEL_AD_Reference_ID=53222;
/** System = S */
public static final String CONFIGURATIONLEVEL_System = "S";
/** Client = C */
public static final String CONFIGURATIONLEVEL_Client = "C";
/** Organization = O */
public static final String CONFIGURATIONLEVEL_Organization = "O";
/** Set Configuration Level.
@param ConfigurationLevel
Configuration Level for this parameter
*/
public void setConfigurationLevel (String ConfigurationLevel)
{
set_Value (COLUMNNAME_ConfigurationLevel, ConfigurationLevel);
}
/** Get Configuration Level.
@return Configuration Level for this parameter
*/
public String getConfigurationLevel ()
{
return (String)get_Value(COLUMNNAME_ConfigurationLevel);
}
/** EntityType AD_Reference_ID=389 */
public static final int ENTITYTYPE_AD_Reference_ID=389;
/** Set Entity Type.
@param EntityType
Dictionary Entity Type; Determines ownership and synchronization
*/
public void setEntityType (String EntityType)
{
set_Value (COLUMNNAME_EntityType, EntityType);
}
/** Get Entity Type.
@return Dictionary Entity Type; Determines ownership and synchronization
*/
public String getEntityType ()
{
return (String)get_Value(COLUMNNAME_EntityType);
}
/** Set Is Subcontracting.
@param IsSubcontracting Is Subcontracting */
public void setIsSubcontracting (boolean IsSubcontracting)
{
set_Value (COLUMNNAME_IsSubcontracting, Boolean.valueOf(IsSubcontracting));
}
/** Get Is Subcontracting.
@return Is Subcontracting */
public boolean isSubcontracting ()
{
Object oo = get_Value(COLUMNNAME_IsSubcontracting);
if (oo != null)
{
if (oo instanceof Boolean)
return ((Boolean)oo).booleanValue();
return "Y".equals(oo);
}
return false;
}
public org.compiere.model.I_M_Product getM_Product() throws RuntimeException
{
return (org.compiere.model.I_M_Product)MTable.get(getCtx(), org.compiere.model.I_M_Product.Table_Name)
.getPO(getM_Product_ID(), get_TrxName()); }
/** Set Product.
@param M_Product_ID
Product, Service, Item
*/
public void setM_Product_ID (int M_Product_ID)
{
if (M_Product_ID < 1)
set_Value (COLUMNNAME_M_Product_ID, null);
else
set_Value (COLUMNNAME_M_Product_ID, Integer.valueOf(M_Product_ID));
}
/** Get Product.
@return Product, Service, Item
*/
public int getM_Product_ID ()
{
Integer ii = (Integer)get_Value(COLUMNNAME_M_Product_ID);
if (ii == null)
return 0;
return ii.intValue();
}
/** Set Workflow Node Product.
@param PP_WF_Node_Product_ID Workflow Node Product */
public void setPP_WF_Node_Product_ID (int PP_WF_Node_Product_ID)
{
if (PP_WF_Node_Product_ID < 1)
set_ValueNoCheck (COLUMNNAME_PP_WF_Node_Product_ID, null);
else
set_ValueNoCheck (COLUMNNAME_PP_WF_Node_Product_ID, Integer.valueOf(PP_WF_Node_Product_ID));
}
/** Get Workflow Node Product.
@return Workflow Node Product */
public int getPP_WF_Node_Product_ID ()
{
Integer ii = (Integer)get_Value(COLUMNNAME_PP_WF_Node_Product_ID);
if (ii == null)
return 0;
return ii.intValue();
}
/** Set Quantity.
@param Qty
Quantity
*/
public void setQty (BigDecimal Qty)
{
set_Value (COLUMNNAME_Qty, Qty);
}
/** Get Quantity.
@return Quantity
*/
public BigDecimal getQty ()
{
BigDecimal bd = (BigDecimal)get_Value(COLUMNNAME_Qty);
if (bd == null)
return Env.ZERO;
return bd;
}
/** Set Sequence.
@param SeqNo
Method of ordering records; lowest number comes first
*/
public void setSeqNo (int SeqNo)
{
set_Value (COLUMNNAME_SeqNo, Integer.valueOf(SeqNo));
}
/** Get Sequence.
@return Method of ordering records; lowest number comes first
*/
public int getSeqNo ()
{
Integer ii = (Integer)get_Value(COLUMNNAME_SeqNo);
if (ii == null)
return 0;
return ii.intValue();
}
/** Set Yield %.
@param Yield
The Yield is the percentage of a lot that is expected to be of acceptable wuality may fall below 100 percent
*/
public void setYield (int Yield)
{
set_Value (COLUMNNAME_Yield, Integer.valueOf(Yield));
}
/** Get Yield %.
@return The Yield is the percentage of a lot that is expected to be of acceptable wuality may fall below 100 percent
*/
public int getYield ()
{
Integer ii = (Integer)get_Value(COLUMNNAME_Yield);
if (ii == null)
return 0;
return ii.intValue();
}
} | gpl-2.0 |
YouDiSN/OpenJDK-Research | jdk9/jaxp/src/java.xml/share/classes/javax/xml/transform/package-info.java | 10752 | /*
* Copyright (c) 2015, 2017, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/**
* Defines the generic APIs for processing transformation instructions,
* and performing a transformation from source to result. These interfaces have no
* dependencies on SAX or the DOM standard, and try to make as few assumptions as
* possible about the details of the source and result of a transformation. It
* achieves this by defining {@link javax.xml.transform.Source} and
* {@link javax.xml.transform.Result} interfaces.
*
* <p>
* To provide concrete classes for the user, the API defines specializations
* of the interfaces found at the root level. These interfaces are found in
* {@link javax.xml.transform.sax}, {@link javax.xml.transform.dom},
* {@link javax.xml.transform.stax}, and {@link javax.xml.transform.stream}.
*
*
* <h3>Creating Objects</h3>
*
* <p>
* The API allows a concrete {@link javax.xml.transform.TransformerFactory}
* object to be created from the static function
* {@link javax.xml.transform.TransformerFactory#newInstance}.
*
*
* <h3>Specification of Inputs and Outputs</h3>
*
* <p>
* This API defines two interface objects called {@link javax.xml.transform.Source}
* and {@link javax.xml.transform.Result}. In order to pass Source and Result
* objects to the interfaces, concrete classes must be used. The following concrete
* representations are defined for each of these objects:
* {@link javax.xml.transform.stream.StreamSource} and
* {@link javax.xml.transform.stream.StreamResult},
* {@link javax.xml.transform.stax.StAXSource} and
* {@link javax.xml.transform.stax.StAXResult}, and
* {@link javax.xml.transform.sax.SAXSource} and
* {@link javax.xml.transform.sax.SAXResult}, and
* {@link javax.xml.transform.dom.DOMSource} and
* {@link javax.xml.transform.dom.DOMResult}. Each of these objects defines a
* FEATURE string (which is in the form of a URL), which can be passed into
* {@link javax.xml.transform.TransformerFactory#getFeature} to see if the given
* type of Source or Result object is supported. For instance, to test if a
* DOMSource and a StreamResult is supported, you can apply the following test.
*
* <pre>
* <code>
* TransformerFactory tfactory = TransformerFactory.newInstance();
* if (tfactory.getFeature(DOMSource.FEATURE) &&
* tfactory.getFeature(StreamResult.FEATURE)) {
* ...
* }
* </code>
* </pre>
*
*
* <h3>
* <a id="qname-delimiter">Qualified Name Representation</a>
* </h3>
*
* <p>
* <a href="http://www.w3.org/TR/REC-xml-names">Namespaces</a> present something
* of a problem area when dealing with XML objects. Qualified Names appear in XML
* markup as prefixed names. But the prefixes themselves do not hold identity.
* Rather, it is the URIs that they contextually map to that hold the identity.
* Therefore, when passing a Qualified Name like "xyz:foo" among Java programs,
* one must provide a means to map "xyz" to a namespace.
*
* <p>
* One solution has been to create a "QName" object that holds the namespace URI,
* as well as the prefix and local name, but this is not always an optimal solution,
* as when, for example, you want to use unique strings as keys in a dictionary
* object. Not having a string representation also makes it difficult to specify
* a namespaced identity outside the context of an XML document.
*
* <p>
* In order to pass namespaced values to transformations, for instance when setting
* a property or a parameter on a {@link javax.xml.transform.Transformer} object,
* this specification defines that a String "qname" object parameter be passed as
* two-part string, the namespace URI enclosed in curly braces ({}), followed by
* the local name. If the qname has a null URI, then the String object only
* contains the local name. An application can safely check for a non-null URI by
* testing to see if the first character of the name is a '{' character.
*
* <p>
* For example, if a URI and local name were obtained from an element defined with
* <xyz:foo xmlns:xyz="http://xyz.foo.com/yada/baz.html"/>, then the
* Qualified Name would be "{http://xyz.foo.com/yada/baz.html}foo". Note that the
* prefix is lost.
*
*
* <h3>Result Tree Serialization</h3>
*
* <p>
* Serialization of the result tree to a stream can be controlled with the
* {@link javax.xml.transform.Transformer#setOutputProperties} and the
* {@link javax.xml.transform.Transformer#setOutputProperty} methods.
* These properties only apply to stream results, they have no effect when
* the result is a DOM tree or SAX event stream.
*
* <p>
* Strings that match the <a href="http://www.w3.org/TR/xslt#output">XSLT
* specification for xsl:output attributes</a> can be referenced from the
* {@link javax.xml.transform.OutputKeys} class. Other strings can be
* specified as well.
* If the transformer does not recognize an output key, a
* {@link java.lang.IllegalArgumentException} is thrown, unless the key name
* is <a href="#qname-delimiter">namespace qualified</a>. Output key names
* that are namespace qualified are always allowed, although they may be
* ignored by some implementations.
*
* <p>
* If all that is desired is the simple identity transformation of a
* source to a result, then {@link javax.xml.transform.TransformerFactory}
* provides a
* {@link javax.xml.transform.TransformerFactory#newTransformer()} method
* with no arguments. This method creates a Transformer that effectively copies
* the source to the result. This method may be used to create a DOM from SAX
* events or to create an XML or HTML stream from a DOM or SAX events.
*
* <h3>Exceptions and Error Reporting</h3>
*
* <p>
* The transformation API throw three types of specialized exceptions. A
* {@link javax.xml.transform.TransformerFactoryConfigurationError} is parallel to
* the {@link javax.xml.parsers.FactoryConfigurationError}, and is thrown
* when a configuration problem with the TransformerFactory exists. This error
* will typically be thrown when the transformation factory class specified with
* the "javax.xml.transform.TransformerFactory" system property cannot be found or
* instantiated.
*
* <p>
* A {@link javax.xml.transform.TransformerConfigurationException}
* may be thrown if for any reason a Transformer can not be created. A
* TransformerConfigurationException may be thrown if there is a syntax error in
* the transformation instructions, for example when
* {@link javax.xml.transform.TransformerFactory#newTransformer} is
* called.
*
* <p>
* {@link javax.xml.transform.TransformerException} is a general
* exception that occurs during the course of a transformation. A transformer
* exception may wrap another exception, and if any of the
* {@link javax.xml.transform.TransformerException#printStackTrace()}
* methods are called on it, it will produce a list of stack dumps, starting from
* the most recent. The transformer exception also provides a
* {@link javax.xml.transform.SourceLocator} object which indicates where
* in the source tree or transformation instructions the error occurred.
* {@link javax.xml.transform.TransformerException#getMessageAndLocation()}
* may be called to get an error message with location info, and
* {@link javax.xml.transform.TransformerException#getLocationAsString()}
* may be called to get just the location string.
*
* <p>
* Transformation warnings and errors are sent to an
* {@link javax.xml.transform.ErrorListener}, at which point the application may
* decide to report the error or warning, and may decide to throw an
* <code>Exception</code> for a non-fatal error. The <code>ErrorListener</code>
* may be set via {@link javax.xml.transform.TransformerFactory#setErrorListener}
* for reporting errors that have to do with syntax errors in the transformation
* instructions, or via {@link javax.xml.transform.Transformer#setErrorListener}
* to report errors that occur during the transformation. The <code>ErrorListener</code>
* on both objects will always be valid and non-<code>null</code>, whether set by
* the application or a default implementation provided by the processor.
* The default implementation provided by the processor will report all warnings
* and errors to <code>System.err</code> and does not throw any <code>Exception</code>s.
* Applications are <em>strongly</em> encouraged to register and use
* <code>ErrorListener</code>s that insure proper behavior for warnings and
* errors.
*
*
* <h3>Resolution of URIs within a transformation</h3>
*
* <p>
* The API provides a way for URIs referenced from within the stylesheet
* instructions or within the transformation to be resolved by the calling
* application. This can be done by creating a class that implements the
* {@link javax.xml.transform.URIResolver} interface, with its one method,
* {@link javax.xml.transform.URIResolver#resolve}, and use this class to
* set the URI resolution for the transformation instructions or transformation
* with {@link javax.xml.transform.TransformerFactory#setURIResolver} or
* {@link javax.xml.transform.Transformer#setURIResolver}. The
* <code>URIResolver.resolve</code> method takes two String arguments, the URI
* found in the stylesheet instructions or built as part of the transformation
* process, and the base URI against which the first argument will be made absolute
* if the absolute URI is required.
* The returned {@link javax.xml.transform.Source} object must be usable by
* the transformer, as specified in its implemented features.
*
* @since 1.5
*/
package javax.xml.transform;
| gpl-2.0 |
axDev-JDK/jaxws | src/share/jaxws_classes/com/sun/xml/internal/fastinfoset/util/ContiguousCharArrayArray.java | 7660 | /*
* Copyright (c) 2004, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
* THIS FILE WAS MODIFIED BY SUN MICROSYSTEMS, INC.
*/
package com.sun.xml.internal.fastinfoset.util;
import com.sun.xml.internal.fastinfoset.CommonResourceBundle;
public class ContiguousCharArrayArray extends ValueArray {
public static final int INITIAL_CHARACTER_SIZE = 512;
public static final int MAXIMUM_CHARACTER_SIZE = Integer.MAX_VALUE;
protected int _maximumCharacterSize;
public int[] _offset;
public int[] _length;
public char[] _array;
public int _arrayIndex;
public int _readOnlyArrayIndex;
private String[] _cachedStrings;
public int _cachedIndex;
private ContiguousCharArrayArray _readOnlyArray;
public ContiguousCharArrayArray(int initialCapacity, int maximumCapacity,
int initialCharacterSize, int maximumCharacterSize) {
_offset = new int[initialCapacity];
_length = new int[initialCapacity];
_array = new char[initialCharacterSize];
_maximumCapacity = maximumCapacity;
_maximumCharacterSize = maximumCharacterSize;
}
public ContiguousCharArrayArray() {
this(DEFAULT_CAPACITY, MAXIMUM_CAPACITY,
INITIAL_CHARACTER_SIZE, MAXIMUM_CHARACTER_SIZE);
}
public final void clear() {
_arrayIndex = _readOnlyArrayIndex;
_size = _readOnlyArraySize;
if (_cachedStrings != null) {
for (int i = _readOnlyArraySize; i < _cachedStrings.length; i++) {
_cachedStrings[i] = null;
}
}
}
public final int getArrayIndex() {
return _arrayIndex;
}
public final void setReadOnlyArray(ValueArray readOnlyArray, boolean clear) {
if (!(readOnlyArray instanceof ContiguousCharArrayArray)) {
throw new IllegalArgumentException(CommonResourceBundle.getInstance().getString("message.illegalClass", new Object[]{readOnlyArray}));
}
setReadOnlyArray((ContiguousCharArrayArray)readOnlyArray, clear);
}
public final void setReadOnlyArray(ContiguousCharArrayArray readOnlyArray, boolean clear) {
if (readOnlyArray != null) {
_readOnlyArray = readOnlyArray;
_readOnlyArraySize = readOnlyArray.getSize();
_readOnlyArrayIndex = readOnlyArray.getArrayIndex();
if (clear) {
clear();
}
_array = getCompleteCharArray();
_offset = getCompleteOffsetArray();
_length = getCompleteLengthArray();
_size = _readOnlyArraySize;
_arrayIndex = _readOnlyArrayIndex;
}
}
public final char[] getCompleteCharArray() {
if (_readOnlyArray == null) {
return _array;
} else {
final char[] ra = _readOnlyArray.getCompleteCharArray();
final char[] a = new char[_readOnlyArrayIndex + _array.length];
System.arraycopy(ra, 0, a, 0, _readOnlyArrayIndex);
return a;
}
}
public final int[] getCompleteOffsetArray() {
if (_readOnlyArray == null) {
return _offset;
} else {
final int[] ra = _readOnlyArray.getCompleteOffsetArray();
final int[] a = new int[_readOnlyArraySize + _offset.length];
System.arraycopy(ra, 0, a, 0, _readOnlyArraySize);
return a;
}
}
public final int[] getCompleteLengthArray() {
if (_readOnlyArray == null) {
return _length;
} else {
final int[] ra = _readOnlyArray.getCompleteLengthArray();
final int[] a = new int[_readOnlyArraySize + _length.length];
System.arraycopy(ra, 0, a, 0, _readOnlyArraySize);
return a;
}
}
public final String getString(int i) {
if (_cachedStrings != null && i < _cachedStrings.length) {
final String s = _cachedStrings[i];
return (s != null) ? s : (_cachedStrings[i] = new String(_array, _offset[i], _length[i]));
}
final String[] newCachedStrings = new String[_offset.length];
if (_cachedStrings != null && i >= _cachedStrings.length) {
System.arraycopy(_cachedStrings, 0, newCachedStrings, 0, _cachedStrings.length);
}
_cachedStrings = newCachedStrings;
return _cachedStrings[i] = new String(_array, _offset[i], _length[i]);
}
public final void ensureSize(int l) {
if (_arrayIndex + l >= _array.length) {
resizeArray(_arrayIndex + l);
}
}
public final void add(int l) {
if (_size == _offset.length) {
resize();
}
_cachedIndex = _size;
_offset[_size] = _arrayIndex;
_length[_size++] = l;
_arrayIndex += l;
}
public final int add(char[] c, int l) {
if (_size == _offset.length) {
resize();
}
final int oldArrayIndex = _arrayIndex;
final int arrayIndex = oldArrayIndex + l;
_cachedIndex = _size;
_offset[_size] = oldArrayIndex;
_length[_size++] = l;
if (arrayIndex >= _array.length) {
resizeArray(arrayIndex);
}
System.arraycopy(c, 0, _array, oldArrayIndex, l);
_arrayIndex = arrayIndex;
return oldArrayIndex;
}
protected final void resize() {
if (_size == _maximumCapacity) {
throw new ValueArrayResourceException(CommonResourceBundle.getInstance().getString("message.arrayMaxCapacity"));
}
int newSize = _size * 3 / 2 + 1;
if (newSize > _maximumCapacity) {
newSize = _maximumCapacity;
}
final int[] offset = new int[newSize];
System.arraycopy(_offset, 0, offset, 0, _size);
_offset = offset;
final int[] length = new int[newSize];
System.arraycopy(_length, 0, length, 0, _size);
_length = length;
}
protected final void resizeArray(int requestedSize) {
if (_arrayIndex == _maximumCharacterSize) {
throw new ValueArrayResourceException(CommonResourceBundle.getInstance().getString("message.maxNumberOfCharacters"));
}
int newSize = requestedSize * 3 / 2 + 1;
if (newSize > _maximumCharacterSize) {
newSize = _maximumCharacterSize;
}
final char[] array = new char[newSize];
System.arraycopy(_array, 0, array, 0, _arrayIndex);
_array = array;
}
}
| gpl-2.0 |
ogajduse/spacewalk | java/code/src/com/redhat/rhn/domain/server/SnapshotTagName.java | 1860 | /**
* Copyright (c) 2009--2017 Red Hat, Inc.
*
* This software is licensed to you under the GNU General Public License,
* version 2 (GPLv2). There is NO WARRANTY for this software, express or
* implied, including the implied warranties of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
* along with this software; if not, see
* http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
*
* Red Hat trademarks are not licensed under GPLv2. No permission is
* granted to use or replicate Red Hat trademarks that are incorporated
* in this software or its documentation.
*/
package com.redhat.rhn.domain.server;
import com.redhat.rhn.domain.BaseDomainHelper;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
/**
* CPU
* @version $Rev: 118113 $
*/
public class SnapshotTagName extends BaseDomainHelper {
private String name;
private Long id;
/**
* @return Returns the id.
*/
public Long getId() {
return id;
}
/**
* @param idIn The id to set.
*/
public void setId(Long idIn) {
this.id = idIn;
}
/**
* @return Returns the name.
*/
public String getName() {
return name;
}
/**
* @param nameIn The name to set.
*/
public void setName(String nameIn) {
this.name = nameIn;
}
/**
*
* {@inheritDoc}
*/
public int hashCode() {
return new HashCodeBuilder().append(name)
.toHashCode();
}
/**
*
* {@inheritDoc}
*/
public boolean equals(Object obj) {
SnapshotTagName other = (SnapshotTagName) obj;
return new EqualsBuilder().append(name, other.name)
.isEquals();
}
}
| gpl-2.0 |
gulliverrr/hestia-engine-dev | src/opt/boilercontrol/libs/org.eclipse.paho.client.mqttv3/src/main/java/org/eclipse/paho/client/mqttv3/IMqttDeliveryToken.java | 1927 | package org.eclipse.paho.client.mqttv3;
/**
* Provides a mechanism for tracking the delivery of a message.
*
* <p>A subclass of IMqttToken that allows the delivery of a message to be tracked.
* Unlike instances of IMqttToken delivery tokens can be used across connection
* and client restarts. This enables the delivery of a messages to be tracked
* after failures. There are two approaches
* <ul>
* <li>A list of delivery tokens for in-flight messages can be obtained using
* {@link IMqttAsyncClient#getPendingDeliveryTokens()}. The waitForCompletion
* method can then be used to block until the delivery is complete.
* <li>A {@link MqttCallback} can be set on the client. Once a message has been
* delivered the {@link MqttCallback#deliveryComplete(IMqttDeliveryToken)} method will
* be called withe delivery token being passed as a parameter.
* </ul>
* <p>
* An action is in progress until either:
* <ul>
* <li>isComplete() returns true or
* <li>getException() is not null. If a client shuts down before delivery is complete.
* an exception is returned. As long as the Java Runtime is not stopped a delivery token
* is valid across a connection disconnect and reconnect. In the event the client
* is shut down the getPendingDeliveryTokens method can be used once the client is
* restarted to obtain a list of delivery tokens for inflight messages.
* </ul>
* </p>
*
*/
public interface IMqttDeliveryToken extends IMqttToken {
/**
* Returns the message associated with this token.
* <p>Until the message has been delivered, the message being delivered will
* be returned. Once the message has been delivered <code>null</code> will be
* returned.
* @return the message associated with this token or null if already delivered.
* @throws MqttException if there was a problem completing retrieving the message
*/
public MqttMessage getMessage() throws MqttException;
}
| gpl-3.0 |
BTCTaras/Essentials | EssentialsSpawn/src/com/earth2me/essentials/spawn/IEssentialsSpawn.java | 831 | package com.earth2me.essentials.spawn;
import org.bukkit.Location;
import org.bukkit.plugin.Plugin;
public interface IEssentialsSpawn extends Plugin {
/**
* Sets the spawn for a given group to a given location.
*
* @param loc The location to set the spawn to
* @param group The group to set the spawn of, or 'default' for the default spawn
*
* @throws IllegalArgumentException If group is null
*/
public void setSpawn(Location loc, String group);
/**
* Gets the spawn location for a given group.
*
* @param group The group to get the spawn of, or 'default' for the default spawn
*
* @return The spawn location set for the given group
*
* @throws IllegalArgumentException If group is null
*/
public Location getSpawn(String group);
}
| gpl-3.0 |
sebastianpacheco/jPOS | jpos/src/test/java/org/jpos/util/SimpleMsgTest.java | 11537 | /*
* jPOS Project [http://jpos.org]
* Copyright (C) 2000-2015 Alejandro P. Revilla
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.jpos.util;
import static org.junit.Assert.assertEquals;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import org.jpos.iso.ISOUtil;
import org.junit.Before;
import org.junit.Test;
public class SimpleMsgTest {
PrintStream p;
ByteArrayOutputStream os;
private static final String NL = System.getProperty("line.separator");
@Before
public void setUp() {
os = new ByteArrayOutputStream();
p = new PrintStream(os);
}
@Test
public void testConstructor() throws Throwable {
SimpleMsg simpleMsg = new SimpleMsg("tag", "Some Name", true);
assertEquals("simpleMsg.msgContent", Boolean.TRUE, simpleMsg.msgContent);
assertEquals("simpleMsg.tagName", "tag", simpleMsg.tagName);
assertEquals("simpleMsg.msgName", "Some Name", simpleMsg.msgName);
}
@Test
public void testConstructor1() throws Throwable {
SimpleMsg simpleMsg = new SimpleMsg("tag", "Some Name", "testString");
assertEquals("simpleMsg.msgContent", "testString", simpleMsg.msgContent);
assertEquals("simpleMsg.tagName", "tag", simpleMsg.tagName);
assertEquals("simpleMsg.msgName", "Some Name", simpleMsg.msgName);
}
@Test
public void testConstructor2() throws Throwable {
SimpleMsg simpleMsg = new SimpleMsg("tag", "Some Name", 100L);
assertEquals("simpleMsg.msgContent", 100L, simpleMsg.msgContent);
assertEquals("simpleMsg.tagName", "tag", simpleMsg.tagName);
assertEquals("simpleMsg.msgName", "Some Name", simpleMsg.msgName);
}
@Test
public void testConstructor3() throws Throwable {
SimpleMsg simpleMsg = new SimpleMsg("tag", "Some Name", 100);
assertEquals("simpleMsg.msgContent", 100, simpleMsg.msgContent);
assertEquals("simpleMsg.tagName", "tag", simpleMsg.tagName);
assertEquals("simpleMsg.msgName", "Some Name", simpleMsg.msgName);
}
@Test
public void testConstructor4() throws Throwable {
SimpleMsg simpleMsg = new SimpleMsg("tag", "Some Name", (short) 100);
assertEquals("simpleMsg.msgContent", (short) 100, simpleMsg.msgContent);
assertEquals("simpleMsg.tagName", "tag", simpleMsg.tagName);
assertEquals("simpleMsg.msgName", "Some Name", simpleMsg.msgName);
}
@Test
public void testConstructor5() throws Throwable {
byte[] msgContent = new byte[0];
SimpleMsg simpleMsg = new SimpleMsg("tag", "Some Name", msgContent);
assertEquals("simpleMsg.msgContent", "", simpleMsg.msgContent);
assertEquals("simpleMsg.tagName", "tag", simpleMsg.tagName);
assertEquals("simpleMsg.msgName", "Some Name", simpleMsg.msgName);
}
@Test
public void testDump() throws Throwable {
new SimpleMsg("tag", "Some Name", new SimpleMsg("inner-tag", "Inner Name",
100L)).dump(p, "--||--");
assertEquals( "--||--<tag name=\"Some Name\">" + NL +
"--||-- <inner-tag name=\"Inner Name\">" + NL +
"--||-- 100" + NL +
"--||-- </inner-tag>" + NL +
"--||--</tag>" + NL
,os.toString());
}
@Test
public void testDump1() throws Throwable {
SimpleMsg[] msgContent = new SimpleMsg[0];
SimpleMsg simpleMsg = new SimpleMsg("inner-tag", "Inner Name", msgContent);
SimpleMsg[] msgContent2 = new SimpleMsg[2];
msgContent2[0] = simpleMsg;
msgContent2[1] = simpleMsg;
new SimpleMsg("tag", "Some Name", msgContent2).dump(p, "--||--");
assertEquals( "--||--<tag name=\"Some Name\">" + NL +
"--||-- <inner-tag name=\"Inner Name\">" + NL +
"--||-- </inner-tag>" + NL +
"--||-- <inner-tag name=\"Inner Name\">" + NL +
"--||-- </inner-tag>" + NL +
"--||--</tag>" + NL
,os.toString());
}
@Test
public void testDump2() throws Throwable {
SimpleMsg[] msgContent = new SimpleMsg[0];
SimpleMsg[] msgContent2 = new SimpleMsg[3];
msgContent2[0] = new SimpleMsg("inner-tag1", "Inner Name1", "~@%&|K}Id]+l\\");
msgContent2[1] = new SimpleMsg("inner-tag2", "Inner Name2", (short) 100);
msgContent2[2] = new SimpleMsg("inner-tag3", "Inner Name3", msgContent);
new SimpleMsg("tag", "Some Name", msgContent2).dump(p, "--||--");
assertEquals( "--||--<tag name=\"Some Name\">" + NL +
"--||-- <inner-tag1 name=\"Inner Name1\">" + NL +
"--||-- ~@%&|K}Id]+l\\" + NL +
"--||-- </inner-tag1>" + NL +
"--||-- <inner-tag2 name=\"Inner Name2\">" + NL +
"--||-- 100" + NL +
"--||-- </inner-tag2>" + NL +
"--||-- <inner-tag3 name=\"Inner Name3\">" + NL +
"--||-- </inner-tag3>" + NL +
"--||--</tag>" + NL
,os.toString());
}
@Test
public void testDump3() throws Throwable {
new SimpleMsg("tag", "Some Name", "~@%&|K}Id]+l\\").dump(p, "--||--");
assertEquals( "--||--<tag name=\"Some Name\">" + NL +
"--||-- ~@%&|K}Id]+l\\" + NL +
"--||--</tag>" + NL
,os.toString());
}
@Test
public void testDump4() throws Throwable {
SimpleMsg[] msgContent = new SimpleMsg[0];
new SimpleMsg("tag", "Some Name", msgContent).dump(p, "--||--");
assertEquals( "--||--<tag name=\"Some Name\">" + NL +
"--||--</tag>" + NL
,os.toString());
}
@Test
public void testDumpContentNull() throws Throwable {
new SimpleMsg("tag", "Some Name", null).dump(p, "--||--");
assertEquals( "--||--<tag name=\"Some Name\"/>" + NL
,os.toString());
}
@Test
public void testDumpContentNullByteArr() throws Throwable {
new SimpleMsg("tag", "Some Name", null).dump(p, "--||--");
assertEquals( "--||--<tag name=\"Some Name\"/>" + NL
,os.toString());
}
@Test
public void testDumpContentNullWithoutName() throws Throwable {
new SimpleMsg("tag", null).dump(p, "--||--");
assertEquals( "--||--<tag/>" + NL
,os.toString());
}
@Test
public void testDumpContentWithoutName() throws Throwable {
new SimpleMsg("tag", 100).dump(p, "--||--");
assertEquals( "--||--<tag>100</tag>" + NL
,os.toString());
}
@Test
public void testDumpContentByteArr() throws Throwable {
byte[] b = ISOUtil.hex2byte("3AF1");
new SimpleMsg("tag", "Some Name", b).dump(p, "--||--");
assertEquals( "--||--<tag name=\"Some Name\">" + NL +
"--||-- 3AF1" + NL +
"--||--</tag>" + NL
,os.toString());
}
@Test
public void testDumpContentByteArrWithoutName() throws Throwable {
byte[] b = ISOUtil.hex2byte("f13a");
new SimpleMsg("tag", b).dump(p, "--||--");
assertEquals( "--||--<tag>F13A</tag>" + NL
,os.toString());
}
@Test
public void testDumpContentBoolean() throws Throwable {
new SimpleMsg("tag", "Some Name", true).dump(p, "--||--");
assertEquals( "--||--<tag name=\"Some Name\">" + NL +
"--||-- true" + NL +
"--||--</tag>" + NL
,os.toString());
}
@Test
public void testDumpContentShort() throws Throwable {
new SimpleMsg("tag", "Some Name", (short)123).dump(p, "--||--");
assertEquals( "--||--<tag name=\"Some Name\">" + NL +
"--||-- 123" + NL +
"--||--</tag>" + NL
,os.toString());
}
@Test
public void testDumpContentLong() throws Throwable {
new SimpleMsg("tag", "Some Name", -123L).dump(p, "--||--");
assertEquals( "--||--<tag name=\"Some Name\">" + NL +
"--||-- -123" + NL +
"--||--</tag>" + NL
,os.toString());
}
@Test
public void testDumpContentDouble() throws Throwable {
new SimpleMsg("tag", "Some Name", -12.3).dump(p, "--||--");
assertEquals( "--||--<tag name=\"Some Name\">" + NL +
"--||-- -12.3" + NL +
"--||--</tag>" + NL
,os.toString());
}
@Test
public void testDumpContentDoubleWithoutName() throws Throwable {
new SimpleMsg("tag", -12.3).dump(p, "--||--");
assertEquals( "--||--<tag>-12.3</tag>" + NL
,os.toString());
}
@Test
public void testDumpInnerNull() throws Throwable {
new SimpleMsg("tag", "Some Name", new SimpleMsg("inner-tag",
"Inner Name", null)).dump(p, "--||--");
assertEquals( "--||--<tag name=\"Some Name\">" + NL +
"--||-- <inner-tag name=\"Inner Name\"/>" + NL +
"--||--</tag>" + NL
,os.toString());
}
@Test
public void testDumpInnerNulls() throws Throwable {
SimpleMsg[] msgContent = new SimpleMsg[4];
new SimpleMsg("tag", "Some Name", msgContent).dump(p, "--||--");
assertEquals( "--||--<tag name=\"Some Name\">" + NL +
"--||-- null" + NL +
"--||-- null" + NL +
"--||-- null" + NL +
"--||-- null" + NL +
"--||--</tag>" + NL
,os.toString());
}
@Test
public void testDumpInnerCompositeAndNulls() throws Throwable {
SimpleMsg[] msgContent = new SimpleMsg[4];
msgContent[0] = new SimpleMsg("inner-tag", "Inner Name", 100);
new SimpleMsg("tag", "Some Name", msgContent).dump(p, "--||--");
assertEquals( "--||--<tag name=\"Some Name\">" + NL +
"--||-- <inner-tag name=\"Inner Name\">" + NL +
"--||-- 100" + NL +
"--||-- </inner-tag>" + NL +
"--||-- null" + NL +
"--||-- null" + NL +
"--||-- null" + NL +
"--||--</tag>" + NL
,os.toString());
}
@Test(expected = NullPointerException.class)
public void testDumpPrintStreamNull() throws Throwable {
new SimpleMsg("tag", "Some Name", 100L).dump(null, "--||--");
}
}
| agpl-3.0 |
abbeyj/sonarqube | server/sonar-server/src/test/java/org/sonar/server/computation/issue/commonrule/DuplicatedBlockRuleTest.java | 3891 | /*
* SonarQube, open source software quality management tool.
* Copyright (C) 2008-2014 SonarSource
* mailto:contact AT sonarsource DOT com
*
* SonarQube is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* SonarQube is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
package org.sonar.server.computation.issue.commonrule;
import java.util.Collections;
import org.junit.Rule;
import org.junit.Test;
import org.sonar.api.measures.CoreMetrics;
import org.sonar.api.rule.RuleKey;
import org.sonar.api.rule.Severity;
import org.sonar.core.issue.DefaultIssue;
import org.sonar.server.computation.batch.TreeRootHolderRule;
import org.sonar.server.computation.component.Component;
import org.sonar.server.computation.component.ReportComponent;
import org.sonar.server.computation.component.FileAttributes;
import org.sonar.server.computation.measure.Measure;
import org.sonar.server.computation.measure.MeasureRepositoryRule;
import org.sonar.server.computation.metric.MetricRepositoryRule;
import org.sonar.server.computation.qualityprofile.ActiveRule;
import org.sonar.server.computation.qualityprofile.ActiveRulesHolderRule;
import org.sonar.server.rule.CommonRuleKeys;
import static org.assertj.core.api.Assertions.assertThat;
import static org.sonar.server.computation.component.ReportComponent.DUMB_PROJECT;
public class DuplicatedBlockRuleTest {
static RuleKey RULE_KEY = RuleKey.of(CommonRuleKeys.commonRepositoryForLang("java"), CommonRuleKeys.DUPLICATED_BLOCKS);
static ReportComponent FILE = ReportComponent.builder(Component.Type.FILE, 1)
.setFileAttributes(new FileAttributes(false, "java"))
.build();
@Rule
public ActiveRulesHolderRule activeRuleHolder = new ActiveRulesHolderRule();
@Rule
public MetricRepositoryRule metricRepository = new MetricRepositoryRule()
.add(CoreMetrics.DUPLICATED_BLOCKS);
@Rule
public TreeRootHolderRule treeRootHolder = new TreeRootHolderRule().setRoot(DUMB_PROJECT);
@Rule
public MeasureRepositoryRule measureRepository = MeasureRepositoryRule.create(treeRootHolder, metricRepository);
DuplicatedBlockRule underTest = new DuplicatedBlockRule(activeRuleHolder, measureRepository, metricRepository);
@Test
public void no_issue_if_no_duplicated_blocks() throws Exception {
activeRuleHolder.put(new ActiveRule(RULE_KEY, Severity.CRITICAL, Collections.<String, String>emptyMap()));
measureRepository.addRawMeasure(FILE.getReportAttributes().getRef(), CoreMetrics.DUPLICATED_BLOCKS_KEY, Measure.newMeasureBuilder().create(0));
DefaultIssue issue = underTest.processFile(FILE, "java");
assertThat(issue).isNull();
}
@Test
public void issue_if_duplicated_blocks() throws Exception {
activeRuleHolder.put(new ActiveRule(RULE_KEY, Severity.CRITICAL, Collections.<String, String>emptyMap()));
measureRepository.addRawMeasure(FILE.getReportAttributes().getRef(), CoreMetrics.DUPLICATED_BLOCKS_KEY, Measure.newMeasureBuilder().create(3));
DefaultIssue issue = underTest.processFile(FILE, "java");
assertThat(issue.ruleKey()).isEqualTo(RULE_KEY);
assertThat(issue.severity()).isEqualTo(Severity.CRITICAL);
assertThat(issue.effortToFix()).isEqualTo(3.0);
assertThat(issue.message()).isEqualTo("3 duplicated blocks of code must be removed.");
}
}
| lgpl-3.0 |
nguyentienlong/community-edition | projects/repository/source/java/org/alfresco/repo/security/person/HomeFolderManager.java | 1311 | /*
* Copyright (C) 2005-2011 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.repo.security.person;
import org.alfresco.service.cmr.repository.ChildAssociationRef;
/**
* An object responsible for creating / validating the home folder for a given person node child association reference.
*/
public interface HomeFolderManager
{
/**
* Create / move the home folder if required.
*
* @param childAssocRef
* the primary child association pointing to the person node.
*/
public void makeHomeFolder(ChildAssociationRef childAssocRef);
} | lgpl-3.0 |
sdmcraft/jackrabbit | jackrabbit-jcr-tests/src/main/java/org/apache/jackrabbit/test/api/nodetype/PropertyDefTest.java | 17486 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.test.api.nodetype;
import org.apache.jackrabbit.test.AbstractJCRTest;
import org.apache.jackrabbit.test.NotExecutableException;
import org.apache.jackrabbit.test.api.PropertyUtil;
import javax.jcr.Session;
import javax.jcr.Node;
import javax.jcr.RepositoryException;
import javax.jcr.PropertyType;
import javax.jcr.Value;
import javax.jcr.NodeIterator;
import javax.jcr.NamespaceException;
import javax.jcr.nodetype.NodeTypeManager;
import javax.jcr.nodetype.NodeTypeIterator;
import javax.jcr.nodetype.NodeType;
import javax.jcr.nodetype.PropertyDefinition;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
/**
* Tests if property definitions are properly defined.
*
* @test
* @sources PropertyDefTest.java
* @executeClass org.apache.jackrabbit.test.api.nodetype.PropertyDefTest
* @keywords level1
*/
public class PropertyDefTest extends AbstractJCRTest {
// format: '(<min>, <max>)', '[<min>, <max>]', '(, <max>)' etc.
private static final Pattern CONSTRAINTSPATTERN_BINARY =
Pattern.compile("([\\(\\[]) *\\d* *, *\\d* *([\\)\\]])");
// format: '(<min>, <max>)', '[<min>, <max>]', '(, <max>)' etc.
private static final Pattern CONSTRAINTSPATTERN_LONG =
Pattern.compile("([\\(\\[]) *(\\-?\\d*)? *, *(\\-?\\d*)? *([\\)\\]])");
// format: '(<min>, <max>)', '[<min>, <max>]', '(, <max>)' etc.
private static final Pattern CONSTRAINTSPATTERN_DOUBLE =
Pattern.compile("([\\(\\[]) *(\\-?\\d+\\.?\\d*)? *, *(\\-?\\d+\\.?\\d*)? *([\\)\\]])");
// format: '(<min>, <max>)', '[<min>, <max>]', '(, <max>)' etc.
private static final Pattern CONSTRAINTSPATTERN_DATE =
Pattern.compile("([\\(\\[]) *(" + PropertyUtil.PATTERNSTRING_DATE + ")? *, *" +
"(" + PropertyUtil.PATTERNSTRING_DATE + ")? *([\\)\\]])");
private static final Pattern CONSTRAINTSPATTERN_PATH =
Pattern.compile(PropertyUtil.PATTERNSTRING_PATH_WITHOUT_LAST_SLASH +
"(/|/\\*)?");
/**
* The session we use for the tests
*/
private Session session;
/**
* The node type manager of the session
*/
private NodeTypeManager manager;
/**
* If <code>true</code> indicates that the test found a mandatory property
*/
private boolean foundMandatoryProperty = false;
/**
* Sets up the fixture for the test cases.
*/
protected void setUp() throws Exception {
isReadOnly = true;
super.setUp();
session = getHelper().getReadOnlySession();
manager = session.getWorkspace().getNodeTypeManager();
// re-fetch testRootNode with read-only session
testRootNode = (Node) session.getItem(testRoot);
}
/**
* Releases the session aquired in {@link #setUp()}.
*/
protected void tearDown() throws Exception {
if (session != null) {
session.logout();
session = null;
}
manager = null;
super.tearDown();
}
/**
* Test getDeclaringNodeType() returns the node type which is defining the
* requested property def. Test runs for all existing node types.
*/
public void testGetDeclaringNodeType() throws RepositoryException {
NodeTypeIterator types = manager.getAllNodeTypes();
// loop all node types
while (types.hasNext()) {
NodeType currentType = types.nextNodeType();
PropertyDefinition defsOfCurrentType[] =
currentType.getPropertyDefinitions();
// loop all property defs of each node type
for (int i = 0; i < defsOfCurrentType.length; i++) {
PropertyDefinition def = defsOfCurrentType[i];
NodeType type = def.getDeclaringNodeType();
// check if def is part of the property defs of the
// declaring node type
PropertyDefinition defs[] = type.getPropertyDefinitions();
boolean hasType = false;
for (int j = 0; j < defs.length; j++) {
if (defs[j].getName().equals(def.getName())) {
hasType = true;
break;
}
}
assertTrue("getDeclaringNodeType() must return the node " +
"which defines the corresponding property def.",
hasType);
}
}
}
/**
* Tests if auto create properties are not a residual set definition
* (getName() does not return "*")
*/
public void testIsAutoCreate() throws RepositoryException {
NodeTypeIterator types = manager.getAllNodeTypes();
// loop all node types
while (types.hasNext()) {
NodeType type = types.nextNodeType();
PropertyDefinition defs[] = type.getPropertyDefinitions();
for (int i = 0; i < defs.length; i++) {
if (defs[i].isAutoCreated()) {
assertFalse("An auto create property must not be a " +
"residual set definition.",
defs[i].getName().equals("*"));
}
}
}
}
/**
* This test checks if item definitions with mandatory constraints are
* respected.
* <p>
* If the default workspace does not contain a node with a node type
* definition that specifies a mandatory property a {@link
* org.apache.jackrabbit.test.NotExecutableException} is thrown.
*/
public void testIsMandatory() throws RepositoryException, NotExecutableException {
traverse(testRootNode);
if (!foundMandatoryProperty) {
throw new NotExecutableException("Workspace does not contain any node with a mandatory property definition");
}
}
/**
* Tests if isRequiredType() returns a valid PropertyType. </p> The test
* runs for all available node types.
*/
public void testIsRequiredType()
throws RepositoryException {
NodeTypeIterator types = manager.getAllNodeTypes();
// loop all node types
while (types.hasNext()) {
NodeType type = types.nextNodeType();
PropertyDefinition defs[] = type.getPropertyDefinitions();
for (int i = 0; i < defs.length; i++) {
switch (defs[i].getRequiredType()) {
case PropertyType.STRING:
case PropertyType.BINARY:
case PropertyType.DATE:
case PropertyType.LONG:
case PropertyType.DOUBLE:
case PropertyType.NAME:
case PropertyType.PATH:
case PropertyType.REFERENCE:
case PropertyType.BOOLEAN:
case PropertyType.UNDEFINED:
case PropertyType.WEAKREFERENCE:
case PropertyType.DECIMAL:
case PropertyType.URI:
// success
break;
default:
fail("getRequiredType() returns an " +
"invalid PropertyType.");
}
}
}
}
/**
* Tests if value constraints match the pattern specified by the required
* property type. </p> The test runs for all value constraints of all
* properties of all available node types.
*/
public void testGetValueConstraints() throws RepositoryException {
NodeTypeIterator types = manager.getAllNodeTypes();
// loop all node types
while (types.hasNext()) {
NodeType type = types.nextNodeType();
PropertyDefinition defs[] = type.getPropertyDefinitions();
for (int i = 0; i < defs.length; i++) {
PropertyDefinition def = defs[i];
String constraints[] = def.getValueConstraints();
if (constraints != null) {
for (int j = 0; j < constraints.length; j++) {
Matcher matcher;
switch (defs[i].getRequiredType()) {
case PropertyType.STRING:
case PropertyType.UNDEFINED:
// any value matches
break;
case PropertyType.BINARY:
matcher =
CONSTRAINTSPATTERN_BINARY.matcher(constraints[j]);
assertTrue("Value constraint does not match " +
"the pattern of PropertyType.BINARY ",
matcher.matches());
break;
case PropertyType.DATE:
matcher =
CONSTRAINTSPATTERN_DATE.matcher(constraints[j]);
assertTrue("Value constraint does not match " +
"the pattern of PropertyType.DATE ",
matcher.matches());
break;
case PropertyType.LONG:
matcher =
CONSTRAINTSPATTERN_LONG.matcher(constraints[j]);
assertTrue("Value constraint does not match " +
"the pattern of PropertyType.LONG",
matcher.matches());
break;
case PropertyType.DOUBLE:
matcher =
CONSTRAINTSPATTERN_DOUBLE.matcher(constraints[j]);
assertTrue("Value constraint does not match " +
"the pattern of PropertyType.DOUBLE",
matcher.matches());
break;
case PropertyType.NAME:
matcher =
PropertyUtil.PATTERN_NAME.matcher(constraints[j]);
assertTrue("Value constraint does not match " +
"the pattern of PropertyType.NAME",
matcher.matches());
checkPrefix(constraints[j]);
break;
case PropertyType.PATH:
matcher = CONSTRAINTSPATTERN_PATH.matcher(constraints[j]);
assertTrue("Value constraint does not match " +
"the pattern of PropertyType.PATH",
matcher.matches());
String elems[] = constraints[j].split("/");
for (int k = 0; k < elems.length; k++) {
checkPrefix(elems[k]);
}
break;
case PropertyType.REFERENCE:
matcher =
PropertyUtil.PATTERN_NAME.matcher(constraints[j]);
assertTrue("Value constraint does not match " +
"the pattern of PropertyType.REFERENCE",
matcher.matches());
checkPrefix(constraints[j]);
break;
case PropertyType.BOOLEAN:
assertTrue("Value constraint does not match " +
"the pattern of PropertyType.BOOLEAN",
constraints[j].equals("true") ||
constraints[j].equals("false"));
break;
}
}
}
}
}
}
/**
* Tests if single-valued properties do have not more than one default value
* </p> The test runs for all default values of all properties of all
* available node types.
*/
public void testGetDefaultValues()
throws RepositoryException {
NodeTypeIterator types = manager.getAllNodeTypes();
// loop all node types
while (types.hasNext()) {
NodeType type = types.nextNodeType();
PropertyDefinition defs[] = type.getPropertyDefinitions();
for (int i = 0; i < defs.length; i++) {
PropertyDefinition def = defs[i];
Value values[] = def.getDefaultValues();
if (values != null) {
for (int j = 0; j < values.length; j++) {
if (!def.isMultiple()) {
assertEquals(
"Single-valued property "
+ type.getName() +"/" + def.getName()
+ " must not have more than one default value.",
1, values.length);
}
}
}
}
}
}
// ---------------------------------< internal >----------------------------
/**
* Traverses the node hierarchy and applies
* {@link #checkMandatoryConstraint(javax.jcr.Node, javax.jcr.nodetype.NodeType)}
* to all descendant nodes of <code>parentNode</code>.
*/
private void traverse(Node parentNode)
throws RepositoryException {
NodeIterator nodes = parentNode.getNodes();
while (nodes.hasNext()) {
Node node = nodes.nextNode();
NodeType primeType = node.getPrimaryNodeType();
checkMandatoryConstraint(node, primeType);
NodeType mixins[] = node.getMixinNodeTypes();
for (int i = 0; i < mixins.length; i++) {
checkMandatoryConstraint(node, mixins[i]);
}
traverse(node);
}
}
/**
* Checks if mandatory property definitions are respected.
*/
private void checkMandatoryConstraint(Node node, NodeType type)
throws RepositoryException {
// test if node contains all mandatory properties of current type
PropertyDefinition propDefs[] = type.getPropertyDefinitions();
for (int i = 0; i < propDefs.length; i++) {
PropertyDefinition propDef = propDefs[i];
if (propDef.isMandatory()) {
foundMandatoryProperty = true;
String name = propDef.getName();
assertTrue("Node " + node.getPath() + " does not contain " +
"value for mandatory property: " + name, node.hasProperty(name));
// todo check back with latest spec!
/*
try {
Property p = node.getProperty(name);
if (propDef.isMultiple()) {
// empty array fails
assertFalse("The mandatory and multiple property " + p.getName() +
" must not be empty.",
p.getValues().length == 0);
} else {
// empty value fails
assertNotNull("A mandatory property must have a value",
p.getValue());
}
} catch (PathNotFoundException e) {
fail("Mandatory property " + name + " does not exist.");
}
*/
}
}
}
/**
* Checks for NAME, PATH and REFERENCE constraint values if the constraint
* is reflecting the namespace mapping in the current <code>Session</code>
*
* @throws NamespaceException if the prefix of name is not a registered
* namespace prefix
*/
private void checkPrefix(String name)
throws NamespaceException, RepositoryException {
if (name.indexOf(":") != -1) {
String prefix = name.split(":")[0];
// NamespaceException is thrown if fails
session.getNamespaceURI(prefix);
}
}
}
| apache-2.0 |