repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
godotgildor/igv | src/org/broad/igv/track/LoadedDataInterval.java | 1873 | /*
* The MIT License (MIT)
*
* Copyright (c) 2007-2015 Broad Institute
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.broad.igv.track;
import org.broad.igv.feature.Locus;
import org.broad.igv.feature.LocusScore;
import java.util.List;
/**
* @author jrobinso
* @date Sep 28, 2010
*/
public class LoadedDataInterval {
Locus range;
private List<LocusScore> scores;
int zoom;
public LoadedDataInterval(String chr, int start, int end, int zoom, List<LocusScore> scores) {
range = new Locus(chr, start, end);
this.zoom = zoom;
this.scores = scores;
}
public boolean contains(String chr, int start, int end, int zoom) {
return this.zoom == zoom && range.contains(chr, start, end);
}
public List<LocusScore> getScores() {
return scores;
}
}
| mit |
clulab/banner | src/main/java/banner/tagging/Mention.java | 5101 | /*
Copyright (c) 2007 Arizona State University, Dept. of Computer Science and Dept. of Biomedical Informatics.
This file is part of the BANNER Named Entity Recognition System, http://banner.sourceforge.net
This software is provided under the terms of the Common Public License, version 1.0, as published by http://www.opensource.org. For further information, see the file 'LICENSE.txt' included with this distribution.
*/
package banner.tagging;
import java.util.List;
import banner.Sentence;
import banner.tokenization.Token;
/**
* Instances of this class represent the mention of an entity within a {@link Sentence}. Mentions are defined in terms of full tokens, and therefore
* finding mentions (the job of a {@link Tagger}) requires tokenization first.
*
* @author Bob
*/
public class Mention
{
private Sentence sentence;
private MentionType type;
private int start;
private int end;
public Mention(Sentence sentence, MentionType type, int start, int end)
{
if (sentence == null)
throw new IllegalArgumentException();
this.sentence = sentence;
if (type == null)
throw new IllegalArgumentException();
this.type = type;
if (start < 0)
throw new IllegalArgumentException();
this.start = start;
if (end > sentence.getTokens().size())
throw new IllegalArgumentException();
this.end = end;
if (length() <= 0)
throw new IllegalArgumentException("Illegal length - start: " + start + " end: " + end);
}
/**
* @return A {@link MentionType} indicating the type of entity being mentioned
*/
public MentionType getType()
{
return type;
}
/**
* @return The {@link Sentence} containing this {@link Mention}
*/
public Sentence getSentence()
{
return sentence;
}
/**
* @return The {@link Token}s which comprise this {@link Mention}
*/
public List<Token> getTokens()
{
return sentence.getTokens().subList(start, end);
}
/**
* @return A representation of this {@link Mention}, as a list of {@link TaggedToken}s
*/
public List<TaggedToken> getTaggedTokens()
{
return sentence.getTaggedTokens().subList(start, end);
}
/**
* @return The number of tokens this {@link Mention} contains
*/
public int length()
{
return end - start;
}
/**
* @return The original text of this {@link Mention}
*/
public String getText()
{
return sentence.getText().substring(getStartChar(), getEndChar());
}
/**
* Determines whether this {@link Mention} contains the specified {@link Mention}
*
* @param mention
* @return <code>true</code> if this {@link Mention} contains the specified {@link Mention}, <code>false</code> otherwise
*/
public boolean contains(Mention mention)
{
return sentence.equals(mention.sentence) && start <= mention.start && end >= mention.end;
}
public boolean contains(int tokenIndex)
{
return tokenIndex >= start && tokenIndex < end;
}
/**
* @return The index of the last token in this {@link Mention}
*/
public int getEnd()
{
return end;
}
/**
* @return The index of the first token in this {@link Mention}
*/
public int getStart()
{
return start;
}
public int getEndChar()
{
return sentence.getTokens().get(end - 1).getEnd();
}
public int getStartChar()
{
return sentence.getTokens().get(start).getStart();
}
/**
* Determines whether this {@link Mention} overlaps the specified {@link Mention}
*
* @param mention2
* @return <code>true</code> if this Mention overlaps with the specified {@link Mention}, <code>false</code> otherwise
*/
public boolean overlaps(Mention mention2)
{
return end > mention2.start && start < mention2.end;
}
// ----- Object overrides -----
@Override
public String toString()
{
return (type == null ? "null" : type.getText()) + ": " + getText();
}
@Override
public int hashCode()
{
final int PRIME = 31;
int result = 1;
result = PRIME * result + end;
result = PRIME * result + sentence.hashCode();
result = PRIME * result + start;
result = PRIME * result + type.hashCode();
return result;
}
@Override
public boolean equals(Object obj)
{
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final Mention other = (Mention)obj;
if (!sentence.equals(other.sentence))
return false;
if (!type.equals(other.type))
return false;
if (start != other.start)
return false;
if (end != other.end)
return false;
return true;
}
}
| epl-1.0 |
jon-bell/junit | src/main/java/org/junit/runners/model/TestTimedOutException.java | 1131 | package org.junit.runners.model;
import java.util.concurrent.TimeUnit;
/**
* Exception thrown when a test fails on timeout.
*
* @since 4.12
*
*/
public class TestTimedOutException extends Exception {
private static final long serialVersionUID = 31935685163547539L;
private final TimeUnit fTimeUnit;
private final long fTimeout;
/**
* Creates exception with a standard message "test timed out after [timeout] [timeUnit]"
*
* @param timeout the amount of time passed before the test was interrupted
* @param timeUnit the time unit for the timeout value
*/
public TestTimedOutException(long timeout, TimeUnit timeUnit) {
super(String.format("test timed out after %d %s",
timeout, timeUnit.name().toLowerCase()));
fTimeUnit = timeUnit;
fTimeout = timeout;
}
/**
* Gets the time passed before the test was interrupted
*/
public long getTimeout() {
return fTimeout;
}
/**
* Gets the time unit for the timeout value
*/
public TimeUnit getTimeUnit() {
return fTimeUnit;
}
}
| epl-1.0 |
openhab/openhab2 | bundles/org.openhab.binding.valloxmv/src/main/java/org/openhab/binding/valloxmv/internal/ValloxMVBindingConstants.java | 8916 | /**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.binding.valloxmv.internal;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.openhab.core.thing.ThingTypeUID;
/**
* The {@link ValloxMVBindingConstants} class defines common constants, which are
* used across the whole binding.
*
* @author Björn Brings - Initial contribution
*/
@NonNullByDefault
public class ValloxMVBindingConstants {
private static final String BINDING_ID = "valloxmv";
// List of all Thing Type UIDs
public static final ThingTypeUID THING_TYPE_VALLOXMV = new ThingTypeUID(BINDING_ID, "valloxmv");
// 4 states of ventilation unit (Fireplace = 1, Away = 2, At home = 3, Boost = 4)
public static final int STATE_FIREPLACE = 1;
public static final int STATE_AWAY = 2;
public static final int STATE_ATHOME = 3;
public static final int STATE_BOOST = 4;
// List of all Channel ids
/**
* Ventilation unit powered on
*/
public static final String CHANNEL_ONOFF = "onoff";
/**
* Current state ventilation unit (Fireplace = 1, Away = 2, At home = 3, Boost = 4)
*/
public static final String CHANNEL_STATE = "state";
/**
* Current fan speed (0 - 100)
*/
public static final String CHANNEL_FAN_SPEED = "fanspeed";
/**
* Current fan speed of extracting fan (1/min)
*/
public static final String CHANNEL_FAN_SPEED_EXTRACT = "fanspeedextract";
/**
* Current fan speed of supplying fan (1/min)
*/
public static final String CHANNEL_FAN_SPEED_SUPPLY = "fanspeedsupply";
/**
* Current temperature inside the building
*/
public static final String CHANNEL_TEMPERATURE_INSIDE = "tempinside";
/**
* Current temperature outside the building
*/
public static final String CHANNEL_TEMPERATURE_OUTSIDE = "tempoutside";
/**
* Current temperature of the air flow exhausting the building.
*/
public static final String CHANNEL_TEMPERATURE_EXHAUST = "tempexhaust";
/**
* Current temperature of the air flow incoming to the building before heating (if optional heating module included
* in ventilation unit).
*/
public static final String CHANNEL_TEMPERATURE_INCOMING_BEFORE_HEATING = "tempincomingbeforeheating";
/**
* Current temperature of the air flow incoming to the building.
*/
public static final String CHANNEL_TEMPERATURE_INCOMING = "tempincoming";
/**
* Current humidity of the air flow exhausting the building.
*/
public static final String CHANNEL_HUMIDITY = "humidity";
/**
* Current cell state (0=heat recovery, 1=cool recovery, 2=bypass, 3=defrosting).
*/
public static final String CHANNEL_CELLSTATE = "cellstate";
/**
* Total uptime in years (+ uptime in hours = total uptime).
*/
public static final String CHANNEL_UPTIME_YEARS = "uptimeyears";
/**
* Total uptime in hours (+ uptime in years = total uptime).
*/
public static final String CHANNEL_UPTIME_HOURS = "uptimehours";
/**
* Current uptime in hours.
*/
public static final String CHANNEL_UPTIME_HOURS_CURRENT = "uptimehourscurrent";
/**
* Date filter was changed last time.
*/
public static final String CHANNEL_FILTER_CHANGED_DATE = "filterchangeddate";
/**
* Days until filter has to be changed.
*/
public static final String CHANNEL_REMAINING_FILTER_DAYS = "remainingfilterdays";
/**
* Extract fan base speed in % (0-100).
*/
public static final String CHANNEL_EXTR_FAN_BALANCE_BASE = "extrfanbalancebase";
/**
* Supply fan base speed in % (0-100).
*/
public static final String CHANNEL_SUPP_FAN_BALANCE_BASE = "suppfanbalancebase";
/**
* Home fan speed in % (0-100).
*/
public static final String CHANNEL_HOME_SPEED_SETTING = "homespeedsetting";
/**
* Away fan speed in % (0-100).
*/
public static final String CHANNEL_AWAY_SPEED_SETTING = "awayspeedsetting";
/**
* Boost fan speed in % (0-100).
*/
public static final String CHANNEL_BOOST_SPEED_SETTING = "boostspeedsetting";
/**
* Target temperature in home state.
*/
public static final String CHANNEL_HOME_AIR_TEMP_TARGET = "homeairtemptarget";
/**
* Target temperature in away state.
*/
public static final String CHANNEL_AWAY_AIR_TEMP_TARGET = "awayairtemptarget";
/**
* Target temperature in boost state.
*/
public static final String CHANNEL_BOOST_AIR_TEMP_TARGET = "boostairtemptarget";
/**
* Timer value setting in minutes of boost profile (1-65535).
*/
public static final String CHANNEL_BOOST_TIME = "boosttime";
/**
* Timer enabled setting in boost profile (Enabled = 1, Disabled = 0).
*/
public static final String CHANNEL_BOOST_TIMER_ENABLED = "boosttimerenabled";
/**
* Fireplace profile extract fan speed setting in % (0-100).
*/
public static final String CHANNEL_FIREPLACE_EXTR_FAN = "fireplaceextrfan";
/**
* Fireplace profile supply fan speed setting in % (0-100).
*/
public static final String CHANNEL_FIREPLACE_SUPP_FAN = "fireplacesuppfan";
/**
* Timer value setting in minutes of fireplace profile (1-65535).
*/
public static final String CHANNEL_FIREPLACE_TIME = "fireplacetime";
/**
* Timer enabled setting in fireplace profile (Enabled = 1, Disabled = 0).
*/
public static final String CHANNEL_FIREPLACE_TIMER_ENABLED = "fireplacetimerenabled";
/**
* Programmable profile enabled
* Not sure if this is needed at all, Vallox modbus document does not list this.
*/
// public static final String CHANNEL_EXTRA_ENABLED = "extraenabled";
/**
* Target temperature in programmable profile.
*/
public static final String CHANNEL_EXTRA_AIR_TEMP_TARGET = "extraairtemptarget";
/**
* Programmable profile extract fan speed setting in % (0-100).
*/
public static final String CHANNEL_EXTRA_EXTR_FAN = "extraextrfan";
/**
* Programmable profile supply fan speed setting in % (0-100).
*/
public static final String CHANNEL_EXTRA_SUPP_FAN = "extrasuppfan";
/**
* Timer value setting in minutes of programmable profile (1-65535).
*/
public static final String CHANNEL_EXTRA_TIME = "extratime";
/**
* Timer enabled setting in programmable profile (Enabled = 1, Disabled = 0).
*/
public static final String CHANNEL_EXTRA_TIMER_ENABLED = "extratimerenabled";
/**
* Weekly Timer enabled setting (Enabled = 1, Disabled = 0).
*/
public static final String CHANNEL_WEEKLY_TIMER_ENABLED = "weeklytimerenabled";
/**
* Set of writable channels that are Switches
*/
public static final Set<String> WRITABLE_CHANNELS_SWITCHES = Collections
.unmodifiableSet(new HashSet<>(Arrays.asList(CHANNEL_ONOFF, CHANNEL_BOOST_TIMER_ENABLED,
CHANNEL_FIREPLACE_TIMER_ENABLED, CHANNEL_EXTRA_TIMER_ENABLED, CHANNEL_WEEKLY_TIMER_ENABLED)));
/**
*
* Set of writable channels that are dimensionless
*/
public static final Set<String> WRITABLE_CHANNELS_DIMENSIONLESS = Collections
.unmodifiableSet(new HashSet<>(Arrays.asList(CHANNEL_EXTR_FAN_BALANCE_BASE, CHANNEL_SUPP_FAN_BALANCE_BASE,
CHANNEL_HOME_SPEED_SETTING, CHANNEL_AWAY_SPEED_SETTING, CHANNEL_BOOST_SPEED_SETTING,
CHANNEL_BOOST_TIME, CHANNEL_BOOST_TIMER_ENABLED, CHANNEL_FIREPLACE_EXTR_FAN,
CHANNEL_FIREPLACE_SUPP_FAN, CHANNEL_FIREPLACE_TIME, CHANNEL_FIREPLACE_TIMER_ENABLED,
CHANNEL_EXTRA_EXTR_FAN, CHANNEL_EXTRA_SUPP_FAN, CHANNEL_EXTRA_TIME, CHANNEL_EXTRA_TIMER_ENABLED,
CHANNEL_WEEKLY_TIMER_ENABLED)));
/**
* Set of writable channels that are temperatures
*/
public static final Set<String> WRITABLE_CHANNELS_TEMPERATURE = Collections
.unmodifiableSet(new HashSet<>(Arrays.asList(CHANNEL_HOME_AIR_TEMP_TARGET, CHANNEL_AWAY_AIR_TEMP_TARGET,
CHANNEL_BOOST_AIR_TEMP_TARGET, CHANNEL_EXTRA_AIR_TEMP_TARGET)));
// Thing configuration
/**
* Name of the configuration parameters
*/
public static final String CONFIG_UPDATE_INTERVAL = "updateinterval";
public static final String CONFIG_IP = "ip";
}
| epl-1.0 |
akervern/che | ide/commons-gwt/src/main/java/org/eclipse/che/ide/websocket/impl/MessagesReSender.java | 2346 | /*
* Copyright (c) 2012-2018 Red Hat, Inc.
* This program and the accompanying materials are made
* available under the terms of the Eclipse Public License 2.0
* which is available at https://www.eclipse.org/legal/epl-2.0/
*
* SPDX-License-Identifier: EPL-2.0
*
* Contributors:
* Red Hat, Inc. - initial API and implementation
*/
package org.eclipse.che.ide.websocket.impl;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.eclipse.che.ide.util.loging.Log;
/**
* Caches messages that was transmitted when a web socket connection was not opened and resends them
* when the connection is opened again.
*
* @author Dmitry Kuleshov
*/
@Singleton
public class MessagesReSender {
private static final int MAX_MESSAGES = 100;
private final Map<String, List<String>> messageRegistry = new HashMap<>();
private final WebSocketConnectionManager connectionManager;
private final UrlResolver urlResolver;
@Inject
public MessagesReSender(WebSocketConnectionManager connectionManager, UrlResolver urlResolver) {
this.connectionManager = connectionManager;
this.urlResolver = urlResolver;
}
/**
* Add message that is to be sent when a connection defined be the URL is opened again.
*
* @param endpointId endpointId of websocket connection
* @param message plain text message
*/
public void add(String endpointId, String message) {
List<String> messages = messageRegistry.computeIfAbsent(endpointId, k -> new LinkedList<>());
if (messages.size() <= MAX_MESSAGES) {
messages.add(message);
}
}
public void reSend(String url) {
String endpointId = urlResolver.resolve(url);
if (!messageRegistry.containsKey(endpointId)) {
return;
}
List<String> messages = messageRegistry.get(endpointId);
if (messages.isEmpty()) {
return;
}
Log.debug(getClass(), "Going to resend websocket messaged: " + messages);
List<String> backing = new ArrayList<>(messages);
messages.clear();
for (String message : backing) {
if (connectionManager.isConnectionOpen(url)) {
connectionManager.sendMessage(url, message);
} else {
messages.add(message);
}
}
}
}
| epl-1.0 |
openhab/openhab2 | bundles/org.openhab.binding.lutron/src/main/java/org/openhab/binding/lutron/internal/StringUtils.java | 984 | /**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.binding.lutron.internal;
import java.util.Objects;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.eclipse.jdt.annotation.Nullable;
/**
* Supply some string utility methods formerly provided by org.apache.commons.lang.StringUtils.
*
* @author Bob Adair - Initial contribution
*
*/
@NonNullByDefault
public class StringUtils {
public static boolean equals(@Nullable String s1, @Nullable String s2) {
return Objects.equals(s1, s2);
}
public static boolean isEmpty(@Nullable String s1) {
return (s1 == null || s1.isEmpty());
}
}
| epl-1.0 |
crapo/sadlos2 | com.ge.research.sadl.mapping.ui/src-gen/com/ge/research/sadl/ui/contentassist/antlr/MappingParser.java | 5617 | /*
* generated by Xtext
*/
package com.ge.research.sadl.ui.contentassist.antlr;
import java.util.Collection;
import java.util.Map;
import java.util.HashMap;
import org.antlr.runtime.RecognitionException;
import org.eclipse.xtext.AbstractElement;
import org.eclipse.xtext.ui.editor.contentassist.antlr.AbstractContentAssistParser;
import org.eclipse.xtext.ui.editor.contentassist.antlr.FollowElement;
import org.eclipse.xtext.ui.editor.contentassist.antlr.internal.AbstractInternalContentAssistParser;
import com.google.inject.Inject;
import com.ge.research.sadl.services.MappingGrammarAccess;
public class MappingParser extends AbstractContentAssistParser {
@Inject
private MappingGrammarAccess grammarAccess;
private Map<AbstractElement, String> nameMappings;
@Override
protected com.ge.research.sadl.ui.contentassist.antlr.internal.InternalMappingParser createParser() {
com.ge.research.sadl.ui.contentassist.antlr.internal.InternalMappingParser result = new com.ge.research.sadl.ui.contentassist.antlr.internal.InternalMappingParser(null);
result.setGrammarAccess(grammarAccess);
return result;
}
@Override
protected String getRuleName(AbstractElement element) {
if (nameMappings == null) {
nameMappings = new HashMap<AbstractElement, String>() {
private static final long serialVersionUID = 1L;
{
put(grammarAccess.getModelAccess().getTriplesAlternatives_2_0(), "rule__Model__TriplesAlternatives_2_0");
put(grammarAccess.getGroupAccess().getGroupLinesAlternatives_1_0(), "rule__Group__GroupLinesAlternatives_1_0");
put(grammarAccess.getTripleAccess().getSubjAlternatives_0_0(), "rule__Triple__SubjAlternatives_0_0");
put(grammarAccess.getTripleAccess().getObjvalAlternatives_3_0(), "rule__Triple__ObjvalAlternatives_3_0");
put(grammarAccess.getLiteralValueAccess().getAlternatives(), "rule__LiteralValue__Alternatives");
put(grammarAccess.getLiteralValueAccess().getLiteralBooleanAlternatives_2_0(), "rule__LiteralValue__LiteralBooleanAlternatives_2_0");
put(grammarAccess.getColumnIDAccess().getAlternatives(), "rule__ColumnID__Alternatives");
put(grammarAccess.getModelAccess().getGroup(), "rule__Model__Group__0");
put(grammarAccess.getNewModelNSAccess().getGroup(), "rule__NewModelNS__Group__0");
put(grammarAccess.getNewModelNSAccess().getGroup_2(), "rule__NewModelNS__Group_2__0");
put(grammarAccess.getImportAccess().getGroup(), "rule__Import__Group__0");
put(grammarAccess.getImportAccess().getGroup_2(), "rule__Import__Group_2__0");
put(grammarAccess.getGroupAccess().getGroup(), "rule__Group__Group__0");
put(grammarAccess.getTripleAccess().getGroup(), "rule__Triple__Group__0");
put(grammarAccess.getNUMBERAccess().getGroup(), "rule__NUMBER__Group__0");
put(grammarAccess.getRefAccess().getGroup(), "rule__Ref__Group__0");
put(grammarAccess.getColumnNameAccess().getGroup(), "rule__ColumnName__Group__0");
put(grammarAccess.getModelAccess().getUriAssignment_0(), "rule__Model__UriAssignment_0");
put(grammarAccess.getModelAccess().getImportsAssignment_1(), "rule__Model__ImportsAssignment_1");
put(grammarAccess.getModelAccess().getTriplesAssignment_2(), "rule__Model__TriplesAssignment_2");
put(grammarAccess.getNewModelNSAccess().getBaseUriAssignment_1(), "rule__NewModelNS__BaseUriAssignment_1");
put(grammarAccess.getNewModelNSAccess().getPrefixAssignment_2_1(), "rule__NewModelNS__PrefixAssignment_2_1");
put(grammarAccess.getImportAccess().getImportURIAssignment_1(), "rule__Import__ImportURIAssignment_1");
put(grammarAccess.getImportAccess().getAliasAssignment_2_1(), "rule__Import__AliasAssignment_2_1");
put(grammarAccess.getGroupAccess().getGroupLinesAssignment_1(), "rule__Group__GroupLinesAssignment_1");
put(grammarAccess.getTripleAccess().getSubjAssignment_0(), "rule__Triple__SubjAssignment_0");
put(grammarAccess.getTripleAccess().getPredAssignment_2(), "rule__Triple__PredAssignment_2");
put(grammarAccess.getTripleAccess().getObjvalAssignment_3(), "rule__Triple__ObjvalAssignment_3");
put(grammarAccess.getLiteralValueAccess().getLiteralNumberAssignment_0(), "rule__LiteralValue__LiteralNumberAssignment_0");
put(grammarAccess.getLiteralValueAccess().getLiteralStringAssignment_1(), "rule__LiteralValue__LiteralStringAssignment_1");
put(grammarAccess.getLiteralValueAccess().getLiteralBooleanAssignment_2(), "rule__LiteralValue__LiteralBooleanAssignment_2");
put(grammarAccess.getRefAccess().getRefAssignment_0(), "rule__Ref__RefAssignment_0");
put(grammarAccess.getRefAccess().getAddlcolsAssignment_1(), "rule__Ref__AddlcolsAssignment_1");
put(grammarAccess.getRefAccess().getRowAssignment_3(), "rule__Ref__RowAssignment_3");
}
};
}
return nameMappings.get(element);
}
@Override
protected Collection<FollowElement> getFollowElements(AbstractInternalContentAssistParser parser) {
try {
com.ge.research.sadl.ui.contentassist.antlr.internal.InternalMappingParser typedParser = (com.ge.research.sadl.ui.contentassist.antlr.internal.InternalMappingParser) parser;
typedParser.entryRuleModel();
return typedParser.getFollowElements();
} catch(RecognitionException ex) {
throw new RuntimeException(ex);
}
}
@Override
protected String[] getInitialHiddenTokens() {
return new String[] { "RULE_WS", "RULE_ML_COMMENT", "RULE_SL_COMMENT" };
}
public MappingGrammarAccess getGrammarAccess() {
return this.grammarAccess;
}
public void setGrammarAccess(MappingGrammarAccess grammarAccess) {
this.grammarAccess = grammarAccess;
}
}
| epl-1.0 |
dhuebner/che | plugins/plugin-java/che-plugin-java-ext-maven/src/main/java/org/eclipse/che/ide/extension/maven/client/wizard/MavenPageView.java | 1930 | /*******************************************************************************
* Copyright (c) 2012-2016 Codenvy, S.A.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Codenvy, S.A. - initial API and implementation
*******************************************************************************/
package org.eclipse.che.ide.extension.maven.client.wizard;
import org.eclipse.che.ide.api.mvp.View;
import org.eclipse.che.ide.extension.maven.client.MavenArchetype;
import com.google.inject.ImplementedBy;
import java.util.List;
/**
* @author Evgen Vidolob
*/
@ImplementedBy(MavenPageViewImpl.class)
public interface MavenPageView extends View<MavenPageView.ActionDelegate> {
String getPackaging();
void setPackaging(String packaging);
MavenArchetype getArchetype();
void setArchetypes(List<MavenArchetype> archetypes);
String getGroupId();
void setGroupId(String group);
String getArtifactId();
void setArtifactId(String artifact);
String getVersion();
void setVersion(String value);
void setPackagingVisibility(boolean visible);
void setArchetypeSectionVisibility(boolean visible);
void enableArchetypes(boolean enabled);
boolean isGenerateFromArchetypeSelected();
void showArtifactIdMissingIndicator(boolean doShow);
void showGroupIdMissingIndicator(boolean doShow);
void showVersionMissingIndicator(boolean doShow);
void clearArchetypes();
public interface ActionDelegate {
void onCoordinatesChanged();
void packagingChanged(String packaging);
void generateFromArchetypeChanged(boolean isGenerateFromArchetype);
void archetypeChanged(MavenArchetype archetype);
}
}
| epl-1.0 |
mickey4u/new_mart | addons/binding/org.openhab.binding.zwave/src/main/java/org/openhab/binding/zwave/internal/protocol/event/ZWaveAssociationEvent.java | 1502 | /**
* Copyright (c) 2014-2015 openHAB UG (haftungsbeschraenkt) and others.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*/
package org.openhab.binding.zwave.internal.protocol.event;
import java.util.List;
import org.openhab.binding.zwave.internal.protocol.ZWaveAssociation;
import org.openhab.binding.zwave.internal.protocol.ZWaveAssociationGroup;
import org.openhab.binding.zwave.internal.protocol.commandclass.ZWaveCommandClass.CommandClass;
/**
* ZWave association group received event.
* Send from the association members to the binding
* Note that multiple events can be required to build up the full list.
*
* @author Chris Jackson
*/
public class ZWaveAssociationEvent extends ZWaveCommandClassValueEvent {
private ZWaveAssociationGroup group;
/**
* Constructor. Creates a new instance of the ZWaveAssociationEvent
* class.
*
* @param nodeId the nodeId of the event. Must be set to the controller node.
*/
public ZWaveAssociationEvent(int nodeId, ZWaveAssociationGroup group) {
super(nodeId, 0, CommandClass.ASSOCIATION, 0);
this.group = group;
}
public int getGroupId() {
return group.getIndex();
}
public List<ZWaveAssociation> getGroupMembers() {
return group.getAssociations();
}
}
| epl-1.0 |
boniatillo-com/PhaserEditor | source/thirdparty/jsdt/org.eclipse.wst.jsdt.ui/src/org/eclipse/wst/jsdt/internal/ui/wizards/dialogfields/StringButtonDialogField.java | 3983 | /*******************************************************************************
* Copyright (c) 2000, 2007 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.wst.jsdt.internal.ui.wizards.dialogfields;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.events.SelectionListener;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Text;
import org.eclipse.wst.jsdt.internal.ui.util.SWTUtil;
/**
* Dialog field containing a label, text control and a button control.
*/
public class StringButtonDialogField extends StringDialogField {
private Button fBrowseButton;
private String fBrowseButtonLabel;
private IStringButtonAdapter fStringButtonAdapter;
private boolean fButtonEnabled;
public StringButtonDialogField(IStringButtonAdapter adapter) {
super();
fStringButtonAdapter= adapter;
fBrowseButtonLabel= "!Browse...!"; //$NON-NLS-1$
fButtonEnabled= true;
}
/**
* Sets the label of the button.
*/
public void setButtonLabel(String label) {
fBrowseButtonLabel= label;
}
// ------ adapter communication
/**
* Programmatical pressing of the button
*/
public void changeControlPressed() {
fStringButtonAdapter.changeControlPressed(this);
}
// ------- layout helpers
/*
* @see DialogField#doFillIntoGrid
*/
public Control[] doFillIntoGrid(Composite parent, int nColumns) {
assertEnoughColumns(nColumns);
Label label= getLabelControl(parent);
label.setLayoutData(gridDataForLabel(1));
Text text= getTextControl(parent);
text.setLayoutData(gridDataForText(nColumns - 2));
Button button= getChangeControl(parent);
button.setLayoutData(gridDataForButton(button, 1));
return new Control[] { label, text, button };
}
/*
* @see DialogField#getNumberOfControls
*/
public int getNumberOfControls() {
return 3;
}
protected static GridData gridDataForButton(Button button, int span) {
GridData gd= new GridData();
gd.horizontalAlignment= GridData.FILL;
gd.grabExcessHorizontalSpace= false;
gd.horizontalSpan= span;
gd.widthHint = SWTUtil.getButtonWidthHint(button);
return gd;
}
// ------- ui creation
/**
* Creates or returns the created buttom widget.
* @param parent The parent composite or <code>null</code> if the widget has
* already been created.
*/
public Button getChangeControl(Composite parent) {
if (fBrowseButton == null) {
assertCompositeNotNull(parent);
fBrowseButton= new Button(parent, SWT.PUSH);
fBrowseButton.setFont(parent.getFont());
fBrowseButton.setText(fBrowseButtonLabel);
fBrowseButton.setEnabled(isEnabled() && fButtonEnabled);
fBrowseButton.addSelectionListener(new SelectionListener() {
public void widgetDefaultSelected(SelectionEvent e) {
changeControlPressed();
}
public void widgetSelected(SelectionEvent e) {
changeControlPressed();
}
});
}
return fBrowseButton;
}
// ------ enable / disable management
/**
* Sets the enable state of the button.
*/
public void enableButton(boolean enable) {
if (isOkToUse(fBrowseButton)) {
fBrowseButton.setEnabled(isEnabled() && enable);
}
fButtonEnabled= enable;
}
/*
* @see DialogField#updateEnableState
*/
protected void updateEnableState() {
super.updateEnableState();
if (isOkToUse(fBrowseButton)) {
fBrowseButton.setEnabled(isEnabled() && fButtonEnabled);
}
}
}
| epl-1.0 |
OpenLiberty/open-liberty | dev/com.ibm.websphere.javaee.jsf.2.2/src/javax/faces/component/_DeltaStateHelper.java | 37482 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package javax.faces.component;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import javax.el.ValueExpression;
import javax.faces.context.FacesContext;
/**
* A delta enabled state holder implementing the StateHolder Interface.
* <p>
* Components implementing the PartalStateHolder interface have an initial state
* and delta states, the initial state is the one holding all root values
* and deltas store differences to the initial states
* </p>
* <p>
* For components not implementing partial state saving only the initial states are
* of importance, everything is stored and restored continously there
* </p>
* <p>
* The state helper seems to have three internal storage mechanisms:
* one being a list which stores plain values,
* one being a key value pair which stores key values in maps
* add serves the plain list type while put serves the
* key value type,
* the third is the value which has to be stored plainly as is!
* </p>
* In other words, this map can be seen as a composite map. It has two maps:
* initial state map and delta map.
* <p>
* If delta map is used (method component.initialStateMarked() ),
* base or initial state map cannot be changed, since all changes
* should be tracked on delta map.
* </p>
* <p>
* The intention of this class is just hold property values
* and do a clean separation between initial state and delta.
* </p>
* <p>
* The code from this class comes from a refactor of
* org.apache.myfaces.trinidad.bean.util.PropertyHashMap
* </p>
* <p>
* The context from this class comes and that should be taken into account
* is this:
* </p>
* <p>
* First request:
* </p>
* <ul>
* <li> A new template is created (using
* javax.faces.view.ViewDeclarationLanguage.buildView method)
* and component.markInitialState is called from its related TagHandler classes
* (see javax.faces.view.facelets.ComponentHandler ).
* When this method is executed, the component tree was populated from the values
* set in the facelet abstract syntax tree (or in other words composition of
* facelets templates). </li>
* <li> From this point all updates on the variables are considered "delta". </li>
* <li> SaveState, if initialStateMarked is true, only delta is saved. </li>
* </ul>
* <p>
* Second request (and next ones)
* </p>
* <ul>
* <li> A new template is created and component.markInitialState is called from
* its related TagHandler classes again. In this way, components like c:forEach
* or c:if, that add or remove components could notify about this and handle
* them properly (see javax.faces.view.StateManagementStrategy). Note that a
* component restored using this method is no different as the same component
* at the first request at the same time. </li>
* <li> A call for restoreState is done, passing the delta as object value. If no
* delta, the state is complete and no call is triggered. </li>
* <li> Lifecycle occur, changing the necessary stuff. </li>
* <li> SaveState, if initialStateMarked is true, only delta is saved. </li>
* </ul>
* <p>
* From the previous analysis, the following conclusions arise:
* <ul>
* <li>This class only needs to keep track of delta changes, so when
* restoreState/saveState is called, the right objects are passed.</li>
* <li>UIComponent.clearInitialState is used to reset the partial
* state holder to a non delta state, so the state to be saved by
* saveState is no longer a delta instead is a full state. If a call
* to clearInitialState occur it is not expected a call for
* UIComponent.markInitialState occur on the current request.</li>
* <li>The state is handled in the same way on UIData, so components
* inside UIData share its state on all rows. There is no way to save
* delta per row.</li>
* <li>The map backed by method put(Serializable,String,Object) is
* a replacement of UIComponentBase.attributesMap and UIComponent.bindings map.
* Note that on jsf 1.2, instances saved on attributesMap should not be
* StateHolder, but on jsf 2.0 it is possible to have it. PartialStateHolder
* instances are not handled in this map, or in other words delta state is not
* handled in this classes (markInitialState and clearInitialState is not propagated).</li>
* <li>The list backed by method add(Serializable,Object) should be (is not) a
* replacement of UIComponentBase.facesListeners, but note that StateHelper
* does not implement PartialStateHolder, and facesListener could have instances
* of that class that needs to be notified when UIComponent.markInitialState or
* UIComponent.clearInitialState is called, or in other words facesListeners
* should deal with PartialStateHolder instances.</li>
* <li>The list backed by method add(Serializable,Object) is
* a replacement of UIViewRoot.phaseListeners list. Note that instances of
* PhaseListener are not expected to implement StateHolder or PartialStateHolder.</li>
* </ul>
* </p>
* <p>
* NOTE: The current implementation of StateHelper on RI does not handle
* stateHolder values internally. To prevent problems when developers create
* custom components we should do this too. But anyway, the code that
* handle this case should be let here as comment, if some day this feature
* is provided. Note than stateHolder aware properties like converter,
* validator or listeners should deal with StateHolder or PartialStateHolder
* on component classes.
*
* </p>
*/
class _DeltaStateHelper implements StateHelper, TransientStateHelper, TransientStateHolder
{
/**
* We need to hold a component instance because:
*
* - The component is the one who knows if we are on initial or delta mode
* - eval assume calls to component.ValueExpression
*/
private UIComponent _component;
/**
* This map holds the full current state
*/
private Map<Serializable, Object> _fullState;
/**
* This map only keep track of delta changes to be saved
*/
private Map<Serializable, Object> _deltas;
private Map<Object, Object> _transientState;
//private Map<Serializable, Object> _initialState;
private Object[] _initialState;
/**
* This map keep track of StateHolder keys, to be saved when
* saveState is called.
*/
//private Set<Serializable> _stateHolderKeys;
private boolean _transient = false;
/**
* This is a copy-on-write map of the full state after markInitialState()
* was called, but before any delta is written that is not part of
* the initial state (value, localValueSet, submittedValue, valid).
* The intention is allow to reset the StateHelper when copyFullInitialState
* is set to true.
*/
private Map<Serializable, Object> _initialFullState;
/**
* Indicates if a copy-on-write map is created to allow reset the state
* of this StateHelper.
*/
private boolean _copyFullInitialState;
public _DeltaStateHelper(UIComponent component)
{
super();
this._component = component;
_fullState = new HashMap<Serializable, Object>();
_deltas = null;
_transientState = null;
_initialFullState = null;
_copyFullInitialState = false;
//_stateHolderKeys = new HashSet<Serializable>();
}
/**
* Used to create delta map on demand
*
* @return
*/
private boolean _createDeltas(Serializable key)
{
if (isInitialStateMarked())
{
if (_copyFullInitialState && _initialFullState == null)
{
if (_initialState == null)
{
// Copy it directly
_initialFullState = new HashMap<Serializable, Object>();
copyMap(_component.getFacesContext(), _fullState, _initialFullState);
}
else
{
// Create only if the passed key is not part of the defined initial state
boolean keyInInitialState = false;
for (int i = 0; i < _initialState.length; i+=2)
{
Serializable key2 = (Serializable) _initialState[i];
if (key.equals(key2))
{
keyInInitialState = true;
break;
}
}
if (!keyInInitialState)
{
// Copy it directly, but note in this case if the initialFullState map
// contains some key already defined in initialState, this key must be
// overriden. It is better to do in that way, because it is possible
// to skip resetState() if the view cannot be recycled.
_initialFullState = new HashMap<Serializable, Object>();
copyMap(_component.getFacesContext(), _fullState, _initialFullState);
/*
for (int i = 0; i < _initialState.length; i+=2)
{
Serializable key2 = (Serializable) _initialState[i];
Object defaultValue = _initialState[i+1];
_initialFullState.put(key2, defaultValue);
}*/
}
}
}
if (_deltas == null)
{
_deltas = new HashMap<Serializable, Object>(2);
}
return true;
}
return false;
}
void setCopyFullInitialState(boolean value)
{
_copyFullInitialState = value;
}
private static void copyMap(FacesContext context,
Map<Serializable, Object> sourceMap,
Map<Serializable, Object> targetMap)
{
Map serializableMap = sourceMap;
Map.Entry<Serializable, Object> entry;
Iterator<Map.Entry<Serializable, Object>> it = serializableMap
.entrySet().iterator();
while (it.hasNext())
{
entry = it.next();
Serializable key = entry.getKey();
Object value = entry.getValue();
// The condition in which the call to saveAttachedState
// is to handle List, StateHolder or non Serializable instances.
// we check it here, to prevent unnecessary calls.
if (value instanceof StateHolder ||
value instanceof List ||
!(value instanceof Serializable))
{
Object savedValue = UIComponentBase.saveAttachedState(context,
value);
targetMap.put(key, UIComponentBase.restoreAttachedState(context,
savedValue));
}
else if (!(value instanceof Serializable))
{
Object newInstance;
try
{
newInstance = entry.getValue().getClass().newInstance();
}
catch (InstantiationException e)
{
throw new RuntimeException("Could not restore StateHolder of type " +
entry.getValue().getClass().getName()
+ " (missing no-args constructor?)", e);
}
catch (IllegalAccessException e)
{
throw new RuntimeException(e);
}
targetMap.put(key, newInstance);
}
else
{
targetMap.put(key, value);
}
}
}
protected boolean isInitialStateMarked()
{
return _component.initialStateMarked();
}
public void add(Serializable key, Object value)
{
if (_createDeltas(key))
{
//Track delta case
Map<Object, Boolean> deltaListMapValues = (Map<Object, Boolean>) _deltas
.get(key);
if (deltaListMapValues == null)
{
deltaListMapValues = new InternalDeltaListMap<Object, Boolean>(
3);
_deltas.put(key, deltaListMapValues);
}
deltaListMapValues.put(value, Boolean.TRUE);
}
//Handle change on full map
List<Object> fullListValues = (List<Object>) _fullState.get(key);
if (fullListValues == null)
{
fullListValues = new InternalList<Object>(3);
_fullState.put(key, fullListValues);
}
fullListValues.add(value);
}
public Object eval(Serializable key)
{
Object returnValue = _fullState.get(key);
if (returnValue != null)
{
return returnValue;
}
ValueExpression expression = _component.getValueExpression(key
.toString());
if (expression != null)
{
return expression.getValue(_component.getFacesContext()
.getELContext());
}
return null;
}
public Object eval(Serializable key, Object defaultValue)
{
Object returnValue = _fullState.get(key);
if (returnValue != null)
{
return returnValue;
}
ValueExpression expression = _component.getValueExpression(key
.toString());
if (expression != null)
{
return expression.getValue(_component.getFacesContext()
.getELContext());
}
return defaultValue;
}
public Object get(Serializable key)
{
return _fullState.get(key);
}
public Object put(Serializable key, Object value)
{
Object returnValue = null;
if (_createDeltas(key))
{
if (_deltas.containsKey(key))
{
returnValue = _deltas.put(key, value);
_fullState.put(key, value);
}
else if (value == null && !_fullState.containsKey(key))
{
returnValue = null;
}
else
{
_deltas.put(key, value);
returnValue = _fullState.put(key, value);
}
}
else
{
/*
if (value instanceof StateHolder)
{
_stateHolderKeys.add(key);
}
*/
returnValue = _fullState.put(key, value);
}
return returnValue;
}
public Object put(Serializable key, String mapKey, Object value)
{
boolean returnSet = false;
Object returnValue = null;
if (_createDeltas(key))
{
//Track delta case
Map<String, Object> mapValues = (Map<String, Object>) _deltas
.get(key);
if (mapValues == null)
{
mapValues = new InternalMap<String, Object>();
_deltas.put(key, mapValues);
}
if (mapValues.containsKey(mapKey))
{
returnValue = mapValues.put(mapKey, value);
returnSet = true;
}
else
{
mapValues.put(mapKey, value);
}
}
//Handle change on full map
Map<String, Object> mapValues = (Map<String, Object>) _fullState
.get(key);
if (mapValues == null)
{
mapValues = new InternalMap<String, Object>();
_fullState.put(key, mapValues);
}
if (returnSet)
{
mapValues.put(mapKey, value);
}
else
{
returnValue = mapValues.put(mapKey, value);
}
return returnValue;
}
public Object remove(Serializable key)
{
Object returnValue = null;
if (_createDeltas(key))
{
if (_deltas.containsKey(key))
{
// Keep track of the removed values using key/null pair on the delta map
returnValue = _deltas.put(key, null);
_fullState.remove(key);
}
else
{
// Keep track of the removed values using key/null pair on the delta map
_deltas.put(key, null);
returnValue = _fullState.remove(key);
}
}
else
{
returnValue = _fullState.remove(key);
}
return returnValue;
}
public Object remove(Serializable key, Object valueOrKey)
{
// Comment by lu4242 : The spec javadoc says if it is a Collection
// or Map deal with it. But the intention of this method is work
// with add(?,?) and put(?,?,?), this ones return instances of
// InternalMap and InternalList to prevent mixing, so to be
// consistent we'll cast to those classes here.
Object collectionOrMap = _fullState.get(key);
Object returnValue = null;
if (collectionOrMap instanceof InternalMap)
{
if (_createDeltas(key))
{
returnValue = _removeValueOrKeyFromMap(_deltas, key,
valueOrKey, true);
_removeValueOrKeyFromMap(_fullState, key, valueOrKey, false);
}
else
{
returnValue = _removeValueOrKeyFromMap(_fullState, key,
valueOrKey, false);
}
}
else if (collectionOrMap instanceof InternalList)
{
if (_createDeltas(key))
{
returnValue = _removeValueOrKeyFromCollectionDelta(_deltas,
key, valueOrKey);
_removeValueOrKeyFromCollection(_fullState, key, valueOrKey);
}
else
{
returnValue = _removeValueOrKeyFromCollection(_fullState, key,
valueOrKey);
}
}
return returnValue;
}
private static Object _removeValueOrKeyFromCollectionDelta(
Map<Serializable, Object> stateMap, Serializable key,
Object valueOrKey)
{
Object returnValue = null;
Map<Object, Boolean> c = (Map<Object, Boolean>) stateMap.get(key);
if (c != null)
{
if (c.containsKey(valueOrKey))
{
returnValue = valueOrKey;
}
c.put(valueOrKey, Boolean.FALSE);
}
return returnValue;
}
private static Object _removeValueOrKeyFromCollection(
Map<Serializable, Object> stateMap, Serializable key,
Object valueOrKey)
{
Object returnValue = null;
Collection c = (Collection) stateMap.get(key);
if (c != null)
{
if (c.remove(valueOrKey))
{
returnValue = valueOrKey;
}
if (c.isEmpty())
{
stateMap.remove(key);
}
}
return returnValue;
}
private static Object _removeValueOrKeyFromMap(
Map<Serializable, Object> stateMap, Serializable key,
Object valueOrKey, boolean delta)
{
if (valueOrKey == null)
{
return null;
}
Object returnValue = null;
Map<String, Object> map = (Map<String, Object>) stateMap.get(key);
if (map != null)
{
if (delta)
{
// Keep track of the removed values using key/null pair on the delta map
returnValue = map.put((String) valueOrKey, null);
}
else
{
returnValue = map.remove(valueOrKey);
}
if (map.isEmpty())
{
//stateMap.remove(key);
stateMap.put(key, null);
}
}
return returnValue;
}
public boolean isTransient()
{
return _transient;
}
/**
* Serializing cod
* the serialized data structure consists of key value pairs unless the value itself is an internal array
* or a map in case of an internal array or map the value itself is another array with its initial value
* myfaces.InternalArray, myfaces.internalMap
*
* the internal Array is then mapped to another array
*
* the internal Map again is then mapped to a map with key value pairs
*
*
*/
public Object saveState(FacesContext context)
{
Map serializableMap = (isInitialStateMarked()) ? _deltas : _fullState;
if (_initialState != null && _deltas != null && !_deltas.isEmpty()
&& isInitialStateMarked())
{
// Before save the state, check if the property was changed from the
// initial state value. If the property was changed but it has the
// same value from the one in the initial state, we can remove it
// from delta, because when the view is built again, it will be
// restored to the same state. This check suppose some additional
// map.get() calls when saving the state, but using it only in properties
// that are expected to change over lifecycle (value, localValueSet,
// submittedValue, valid), is worth to do it, because those ones
// always generated delta changes.
for (int i = 0; i < _initialState.length; i+=2)
{
Serializable key = (Serializable) _initialState[i];
Object defaultValue = _initialState[i+1];
// Check only if there is delta state for that property, in other
// case it is not necessary. Remember it is possible to have
// null values inside the Map.
if (_deltas.containsKey(key))
{
Object deltaValue = _deltas.get(key);
if (deltaValue == null && defaultValue == null)
{
_deltas.remove(key);
if (_deltas.isEmpty())
{
break;
}
}
if (deltaValue != null && deltaValue.equals(defaultValue))
{
_deltas.remove(key);
if (_deltas.isEmpty())
{
break;
}
}
}
}
}
if (serializableMap == null || serializableMap.size() == 0)
{
return null;
}
/*
int stateHolderKeyCount = 0;
if (isInitalStateMarked())
{
for (Iterator<Serializable> it = _stateHolderKeys.iterator(); it.hasNext();)
{
Serializable key = it.next();
if (!_deltas.containsKey(key))
{
stateHolderKeyCount++;
}
}
}*/
Map.Entry<Serializable, Object> entry;
//entry == key, value, key, value
Object[] retArr = new Object[serializableMap.entrySet().size() * 2];
//Object[] retArr = new Object[serializableMap.entrySet().size() * 2 + stateHolderKeyCount];
Iterator<Map.Entry<Serializable, Object>> it = serializableMap
.entrySet().iterator();
int cnt = 0;
while (it.hasNext())
{
entry = it.next();
retArr[cnt] = entry.getKey();
Object value = entry.getValue();
// The condition in which the call to saveAttachedState
// is to handle List, StateHolder or non Serializable instances.
// we check it here, to prevent unnecessary calls.
if (value instanceof StateHolder ||
value instanceof List ||
!(value instanceof Serializable))
{
Object savedValue = UIComponentBase.saveAttachedState(context,
value);
retArr[cnt + 1] = savedValue;
}
else
{
retArr[cnt + 1] = value;
}
cnt += 2;
}
/*
if (isInitalStateMarked())
{
for (Iterator<Serializable> it2 = _stateHolderKeys.iterator(); it.hasNext();)
{
Serializable key = it2.next();
if (!_deltas.containsKey(key))
{
retArr[cnt] = key;
Object value = _fullState.get(key);
if (value instanceof PartialStateHolder)
{
//Could contain delta, save it as _AttachedDeltaState
PartialStateHolder holder = (PartialStateHolder) value;
if (holder.isTransient())
{
retArr[cnt + 1] = null;
}
else
{
retArr[cnt + 1] = new _AttachedDeltaWrapper(value.getClass(), holder.saveState(context));
}
}
else
{
//Save everything
retArr[cnt + 1] = UIComponentBase.saveAttachedState(context, _fullState.get(key));
}
cnt += 2;
}
}
}
*/
return retArr;
}
public void restoreState(FacesContext context, Object state)
{
if (state == null)
{
return;
}
Object[] serializedState = (Object[]) state;
if (!isInitialStateMarked() && !_fullState.isEmpty())
{
_fullState.clear();
if(_deltas != null)
{
_deltas.clear();
}
}
for (int cnt = 0; cnt < serializedState.length; cnt += 2)
{
Serializable key = (Serializable) serializedState[cnt];
Object savedValue = UIComponentBase.restoreAttachedState(context,
serializedState[cnt + 1]);
if (isInitialStateMarked())
{
if (savedValue instanceof InternalDeltaListMap)
{
for (Map.Entry<Object, Boolean> mapEntry : ((Map<Object, Boolean>) savedValue)
.entrySet())
{
boolean addOrRemove = mapEntry.getValue();
if (addOrRemove)
{
//add
this.add(key, mapEntry.getKey());
}
else
{
//remove
this.remove(key, mapEntry.getKey());
}
}
}
else if (savedValue instanceof InternalMap)
{
for (Map.Entry<String, Object> mapEntry : ((Map<String, Object>) savedValue)
.entrySet())
{
this.put(key, mapEntry.getKey(), mapEntry.getValue());
}
}
/*
else if (savedValue instanceof _AttachedDeltaWrapper)
{
_AttachedStateWrapper wrapper = (_AttachedStateWrapper) savedValue;
//Restore delta state
((PartialStateHolder)_fullState.get(key)).restoreState(context, wrapper.getWrappedStateObject());
//Add this key as StateHolder key
_stateHolderKeys.add(key);
}
*/
else
{
put(key, savedValue);
}
}
else
{
put(key, savedValue);
}
}
}
/**
* Try to reset the state and then check if the reset was succesful or not,
* calling saveState().
*/
public Object resetHardState(FacesContext context)
{
if (_transientState != null)
{
_transientState.clear();
}
if (_deltas != null && !_deltas.isEmpty() && isInitialStateMarked())
{
clearFullStateMap(context);
}
return saveState(context);
}
/**
* Execute a "soft reset", which means only remove all transient state.
*/
public Object resetSoftState(FacesContext context)
{
if (_transientState != null)
{
_transientState.clear();
}
return null;
}
protected void clearFullStateMap(FacesContext context)
{
if (_deltas != null)
{
_deltas.clear();
}
if (_initialFullState != null)
{
// If there is no delta, fullState is not required to be cleared.
_fullState.clear();
copyMap(context, _initialFullState, _fullState);
}
if (_initialState != null)
{
// If initial state is defined, override properties in _initialFullState.
for (int i = 0; i < _initialState.length; i+=2)
{
Serializable key2 = (Serializable) _initialState[i];
Object defaultValue = _initialState[i+1];
if (_fullState.containsKey(key2))
{
_fullState.put(key2, defaultValue);
}
}
}
}
public void setTransient(boolean transientValue)
{
_transient = transientValue;
}
//We use our own data structures just to make sure
//nothing gets mixed up internally
static class InternalMap<K, V> extends HashMap<K, V> implements StateHolder
{
public InternalMap()
{
super();
}
public InternalMap(int initialCapacity, float loadFactor)
{
super(initialCapacity, loadFactor);
}
public InternalMap(Map<? extends K, ? extends V> m)
{
super(m);
}
public InternalMap(int initialSize)
{
super(initialSize);
}
public boolean isTransient()
{
return false;
}
public void setTransient(boolean newTransientValue)
{
// No op
}
public void restoreState(FacesContext context, Object state)
{
Object[] listAsMap = (Object[]) state;
for (int cnt = 0; cnt < listAsMap.length; cnt += 2)
{
this.put((K) listAsMap[cnt], (V) UIComponentBase
.restoreAttachedState(context, listAsMap[cnt + 1]));
}
}
public Object saveState(FacesContext context)
{
int cnt = 0;
Object[] mapArr = new Object[this.size() * 2];
for (Map.Entry<K, V> entry : this.entrySet())
{
mapArr[cnt] = entry.getKey();
Object value = entry.getValue();
if (value instanceof StateHolder ||
value instanceof List ||
!(value instanceof Serializable))
{
mapArr[cnt + 1] = UIComponentBase.saveAttachedState(context, value);
}
else
{
mapArr[cnt + 1] = value;
}
cnt += 2;
}
return mapArr;
}
}
/**
* Map used to keep track of list changes
*/
static class InternalDeltaListMap<K, V> extends InternalMap<K, V>
{
public InternalDeltaListMap()
{
super();
}
public InternalDeltaListMap(int initialCapacity, float loadFactor)
{
super(initialCapacity, loadFactor);
}
public InternalDeltaListMap(int initialSize)
{
super(initialSize);
}
public InternalDeltaListMap(Map<? extends K, ? extends V> m)
{
super(m);
}
}
static class InternalList<T> extends ArrayList<T> implements StateHolder
{
public InternalList()
{
super();
}
public InternalList(Collection<? extends T> c)
{
super(c);
}
public InternalList(int initialSize)
{
super(initialSize);
}
public boolean isTransient()
{
return false;
}
public void setTransient(boolean newTransientValue)
{
}
public void restoreState(FacesContext context, Object state)
{
Object[] listAsArr = (Object[]) state;
//since all other options would mean dual iteration
//we have to do it the hard way
for (Object elem : listAsArr)
{
add((T) UIComponentBase.restoreAttachedState(context, elem));
}
}
public Object saveState(FacesContext context)
{
Object[] values = new Object[size()];
for (int i = 0; i < size(); i++)
{
Object value = get(i);
if (value instanceof StateHolder ||
value instanceof List ||
!(value instanceof Serializable))
{
values[i] = UIComponentBase.saveAttachedState(context, value);
}
else
{
values[i] = value;
}
}
return values;
}
}
public Object getTransient(Object key)
{
return (_transientState == null) ? null : _transientState.get(key);
}
public Object getTransient(Object key, Object defaultValue)
{
Object returnValue = (_transientState == null) ? null : _transientState.get(key);
if (returnValue != null)
{
return returnValue;
}
return defaultValue;
}
public Object putTransient(Object key, Object value)
{
if (_transientState == null)
{
_transientState = new HashMap<Object, Object>();
}
return _transientState.put(key, value);
}
@SuppressWarnings("unchecked")
public void restoreTransientState(FacesContext context, Object state)
{
_transientState = (Map<Object, Object>) state;
}
public Object saveTransientState(FacesContext context)
{
return _transientState;
}
public void markPropertyInInitialState(Object[] defaultInitialState)
{
// Check if in the fullState, one of the default properties were changed
boolean canApplyDefaultInitialState = true;
for (int i = 0; i < defaultInitialState.length; i+=2)
{
Serializable key = (Serializable) defaultInitialState[i];
if (_fullState.containsKey(key))
{
canApplyDefaultInitialState = false;
break;
}
}
if (canApplyDefaultInitialState)
{
// Most of the times the defaultInitialState is used.
_initialState = defaultInitialState;
}
else
{
// recalculate it
Object[] initialState = new Object[defaultInitialState.length];
for (int i = 0; i < defaultInitialState.length; i+=2)
{
Serializable key = (Serializable) defaultInitialState[i];
initialState[i] = key;
if (_fullState.containsKey(key))
{
initialState[i+1] = _fullState.get(key);
}
else
{
initialState[i+1] = defaultInitialState[i+1];
}
}
_initialState = initialState;
}
}
}
| epl-1.0 |
theoweiss/openhab2 | bundles/org.openhab.binding.daikin/src/main/java/org/openhab/binding/daikin/internal/api/airbase/AirbaseBasicInfo.java | 1767 | /**
* Copyright (c) 2010-2019 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.binding.daikin.internal.api.airbase;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Holds information from the basic_info call.
*
* @author Paul Smedley - Initial contribution
*
*/
public class AirbaseBasicInfo {
private static final Logger LOGGER = LoggerFactory.getLogger(AirbaseBasicInfo.class);
public String ssid;
private AirbaseBasicInfo() {
}
public static AirbaseBasicInfo parse(String response) {
LOGGER.debug("Parsing string: \"{}\"", response);
Map<String, String> responseMap = Arrays.asList(response.split(",")).stream().filter(kv -> kv.contains("="))
.map(kv -> {
String[] keyValue = kv.split("=");
String key = keyValue[0];
String value = keyValue.length > 1 ? keyValue[1] : "";
return new String[] { key, value };
}).collect(Collectors.toMap(x -> x[0], x -> x[1]));
AirbaseBasicInfo info = new AirbaseBasicInfo();
info.ssid = responseMap.get("ssid");
return info;
}
public Map<String, String> getParamString() {
Map<String, String> params = new HashMap<>();
params.put("ssid", ssid);
return params;
}
}
| epl-1.0 |
elucash/eclipse-oxygen | org.eclipse.jdt.core/src/org/eclipse/jdt/internal/compiler/ast/UnionTypeReference.java | 6218 | /*******************************************************************************
* Copyright (c) 2011, 2014 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
* Stephan Herrmann - Contribution for
* Bug 429958 - [1.8][null] evaluate new DefaultLocation attribute of @NonNullByDefault
*******************************************************************************/
package org.eclipse.jdt.internal.compiler.ast;
import org.eclipse.jdt.internal.compiler.ASTVisitor;
import org.eclipse.jdt.internal.compiler.lookup.Binding;
import org.eclipse.jdt.internal.compiler.lookup.BlockScope;
import org.eclipse.jdt.internal.compiler.lookup.ClassScope;
import org.eclipse.jdt.internal.compiler.lookup.Scope;
import org.eclipse.jdt.internal.compiler.lookup.TypeBinding;
import org.eclipse.jdt.internal.compiler.lookup.TypeIds;
public class UnionTypeReference extends TypeReference {
public TypeReference[] typeReferences;
public UnionTypeReference(TypeReference[] typeReferences) {
this.bits |= ASTNode.IsUnionType;
this.typeReferences = typeReferences;
this.sourceStart = typeReferences[0].sourceStart;
int length = typeReferences.length;
this.sourceEnd = typeReferences[length - 1].sourceEnd;
}
/* (non-Javadoc)
* @see org.eclipse.jdt.internal.compiler.ast.TypeReference#getLastToken()
*/
public char[] getLastToken() {
return null;
}
/**
* @see org.eclipse.jdt.internal.compiler.ast.ArrayQualifiedTypeReference#getTypeBinding(org.eclipse.jdt.internal.compiler.lookup.Scope)
*/
protected TypeBinding getTypeBinding(Scope scope) {
return null; // not supported here - combined with resolveType(...)
}
/* (non-Javadoc)
* @see org.eclipse.jdt.internal.compiler.ast.TypeReference#getTypeBinding(org.eclipse.jdt.internal.compiler.lookup.Scope)
*/
public TypeBinding resolveType(BlockScope scope, boolean checkBounds, int location) {
// return the lub (least upper bound of all type binding)
int length = this.typeReferences.length;
TypeBinding[] allExceptionTypes = new TypeBinding[length];
boolean hasError = false;
for (int i = 0; i < length; i++) {
TypeBinding exceptionType = this.typeReferences[i].resolveType(scope, checkBounds, location);
if (exceptionType == null) {
return null;
}
switch(exceptionType.kind()) {
case Binding.PARAMETERIZED_TYPE :
if (exceptionType.isBoundParameterizedType()) {
hasError = true;
scope.problemReporter().invalidParameterizedExceptionType(exceptionType, this.typeReferences[i]);
// fall thru to create the variable - avoids additional errors because the variable is missing
}
break;
case Binding.TYPE_PARAMETER :
scope.problemReporter().invalidTypeVariableAsException(exceptionType, this.typeReferences[i]);
hasError = true;
// fall thru to create the variable - avoids additional errors because the variable is missing
break;
}
if (exceptionType.findSuperTypeOriginatingFrom(TypeIds.T_JavaLangThrowable, true) == null
&& exceptionType.isValidBinding()) {
scope.problemReporter().cannotThrowType(this.typeReferences[i], exceptionType);
hasError = true;
}
allExceptionTypes[i] = exceptionType;
// https://bugs.eclipse.org/bugs/show_bug.cgi?id=340486, ensure types are of union type.
for (int j = 0; j < i; j++) {
if (allExceptionTypes[j].isCompatibleWith(exceptionType)) {
scope.problemReporter().wrongSequenceOfExceptionTypes(
this.typeReferences[j],
allExceptionTypes[j],
exceptionType);
hasError = true;
} else if (exceptionType.isCompatibleWith(allExceptionTypes[j])) {
scope.problemReporter().wrongSequenceOfExceptionTypes(
this.typeReferences[i],
exceptionType,
allExceptionTypes[j]);
hasError = true;
}
}
}
if (hasError) {
return null;
}
// compute lub
return (this.resolvedType = scope.lowerUpperBound(allExceptionTypes));
}
/* (non-Javadoc)
* @see org.eclipse.jdt.internal.compiler.ast.TypeReference#getTypeName()
*/
public char[][] getTypeName() {
// we need to keep a return value that is a char[][]
return this.typeReferences[0].getTypeName();
}
/* (non-Javadoc)
* @see org.eclipse.jdt.internal.compiler.ast.TypeReference#traverse(org.eclipse.jdt.internal.compiler.ASTVisitor, org.eclipse.jdt.internal.compiler.lookup.BlockScope)
*/
public void traverse(ASTVisitor visitor, BlockScope scope) {
if (visitor.visit(this, scope)) {
int length = this.typeReferences == null ? 0 : this.typeReferences.length;
for (int i = 0; i < length; i++) {
this.typeReferences[i].traverse(visitor, scope);
}
}
visitor.endVisit(this, scope);
}
/* (non-Javadoc)
* @see org.eclipse.jdt.internal.compiler.ast.TypeReference#traverse(org.eclipse.jdt.internal.compiler.ASTVisitor, org.eclipse.jdt.internal.compiler.lookup.ClassScope)
*/
public void traverse(ASTVisitor visitor, ClassScope scope) {
if (visitor.visit(this, scope)) {
int length = this.typeReferences == null ? 0 : this.typeReferences.length;
for (int i = 0; i < length; i++) {
this.typeReferences[i].traverse(visitor, scope);
}
}
visitor.endVisit(this, scope);
}
/* (non-Javadoc)
* @see org.eclipse.jdt.internal.compiler.ast.Expression#printExpression(int, java.lang.StringBuffer)
*/
public StringBuffer printExpression(int indent, StringBuffer output) {
int length = this.typeReferences == null ? 0 : this.typeReferences.length;
printIndent(indent, output);
for (int i = 0; i < length; i++) {
this.typeReferences[i].printExpression(0, output);
if (i != length - 1) {
output.append(" | "); //$NON-NLS-1$
}
}
return output;
}
public boolean isUnionType() {
return true;
}
public TypeReference augmentTypeWithAdditionalDimensions(int additionalDimensions, Annotation[][] additionalAnnotations, boolean isVarargs) {
return this; // arrays are not legal as union types.
}
}
| epl-1.0 |
ctron/kura | kura/org.eclipse.kura.api/src/main/java/org/eclipse/kura/cloud/CloudletTopic.java | 2065 | /*******************************************************************************
* Copyright (c) 2011, 2020 Eurotech and/or its affiliates and others
*
* This program and the accompanying materials are made
* available under the terms of the Eclipse Public License 2.0
* which is available at https://www.eclipse.org/legal/epl-2.0/
*
* SPDX-License-Identifier: EPL-2.0
*
* Contributors:
* Eurotech
******************************************************************************/
package org.eclipse.kura.cloud;
import org.osgi.annotation.versioning.ProviderType;
/**
* @noextend This class is not intended to be subclassed by clients.
* @deprecated Please consider using {@link org.eclipse.kura.cloudconnection.message.KuraMessage} properties
*/
@ProviderType
@Deprecated
public class CloudletTopic {
public enum Method {
GET,
PUT,
POST,
DEL,
EXEC;
}
private Method method;
private String[] resources;
public static CloudletTopic parseAppTopic(String appTopic) {
CloudletTopic edcApplicationTopic = new CloudletTopic();
String[] parts = appTopic.split("/");
edcApplicationTopic.method = Method.valueOf(parts[0]);
if (parts.length > 1) {
edcApplicationTopic.resources = new String[parts.length - 1];
for (int i = 0; i < edcApplicationTopic.resources.length; i++) {
edcApplicationTopic.resources[i] = parts[i + 1];
}
}
return edcApplicationTopic;
}
private CloudletTopic() {
super();
}
public Method getMethod() {
return this.method;
}
public String[] getResources() {
return this.resources;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder(this.method.name());
if (this.resources != null) {
for (String resource : this.resources) {
sb.append("/");
sb.append(resource);
}
}
return sb.toString();
}
}
| epl-1.0 |
openhab/openhab2 | bundles/org.openhab.binding.onewire/src/main/java/org/openhab/binding/onewire/internal/device/DS2408.java | 1681 | /**
* Copyright (c) 2010-2020 Contributors to the openHAB project
*
* See the NOTICE file(s) distributed with this work for additional
* information.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License 2.0 which is available at
* http://www.eclipse.org/legal/epl-2.0
*
* SPDX-License-Identifier: EPL-2.0
*/
package org.openhab.binding.onewire.internal.device;
import org.eclipse.jdt.annotation.NonNullByDefault;
import org.openhab.binding.onewire.internal.DigitalIoConfig;
import org.openhab.binding.onewire.internal.OwException;
import org.openhab.binding.onewire.internal.SensorId;
import org.openhab.binding.onewire.internal.handler.OwBaseThingHandler;
import org.openhab.binding.onewire.internal.owserver.OwserverDeviceParameter;
/**
* The {@link DS2408} class defines an DS2408 device
*
* @author Jan N. Klug - Initial contribution
*/
@NonNullByDefault
public class DS2408 extends AbstractDigitalOwDevice {
public DS2408(SensorId sensorId, OwBaseThingHandler callback) {
super(sensorId, callback);
}
@Override
public void configureChannels() throws OwException {
ioConfig.clear();
for (int i = 0; i < 8; i++) {
ioConfig.add(new DigitalIoConfig(callback.getThing(), i,
new OwserverDeviceParameter("uncached/", String.format("/sensed.%d", i)),
new OwserverDeviceParameter(String.format("/PIO.%d", i))));
}
fullInParam = new OwserverDeviceParameter("uncached/", "/sensed.BYTE");
fullOutParam = new OwserverDeviceParameter("/PIO.BYTE");
super.configureChannels();
}
}
| epl-1.0 |
sguan-actuate/birt | testsuites/org.eclipse.birt.report.tests.model/src/org/eclipse/birt/report/tests/model/regression/Regression_78837.java | 2110 | /*******************************************************************************
* Copyright (c) 2004 Actuate Corporation.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Actuate Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.birt.report.tests.model.regression;
import org.eclipse.birt.report.model.api.DesignFileException;
import org.eclipse.birt.report.model.api.ElementFactory;
import org.eclipse.birt.report.model.api.ParameterGroupHandle;
import org.eclipse.birt.report.model.api.ScalarParameterHandle;
import org.eclipse.birt.report.model.api.command.ContentException;
import org.eclipse.birt.report.model.api.command.NameException;
import org.eclipse.birt.report.tests.model.BaseTestCase;
/**
* Regression description:
* </p>
* Add a parameter group, error "the element ParameterGroup is not supported
* yet" design
* </p>
* Test description:
* <p>
* Support paramter group
* </p>
*/
public class Regression_78837 extends BaseTestCase
{
private String INPUT = "Regression_78837.xml"; //$NON-NLS-1$
/**
* @throws DesignFileException
* @throws ContentException
* @throws NameException
*/
public void setUp( ) throws Exception
{
super.setUp();
removeResource();
copyResource_INPUT( INPUT, INPUT );
//copyResource_INPUT( INPUT2, INPUT2 );
}
public void tearDown( )
{
removeResource( );
}
public void test_regression_78837( ) throws DesignFileException, ContentException, NameException
{
openDesign( INPUT );
ElementFactory factory = designHandle.getElementFactory( );
ParameterGroupHandle group = factory.newParameterGroup( "group" ); //$NON-NLS-1$
ScalarParameterHandle param = factory.newScalarParameter( "p1" ); //$NON-NLS-1$
designHandle.getParameters( ).add( group );
group.getParameters( ).add( param );
}
}
| epl-1.0 |
loveyoupeng/rt | modules/controls/src/main/java/javafx/scene/control/TableColumnBase.java | 35777 | /*
* Copyright (c) 2012, 2014, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package javafx.scene.control;
import java.lang.ref.WeakReference;
import java.text.Collator;
import java.util.Comparator;
import com.sun.javafx.beans.IDProperty;
import com.sun.javafx.scene.control.ControlAcceleratorSupport;
import javafx.beans.property.BooleanProperty;
import javafx.beans.property.DoubleProperty;
import javafx.beans.property.ObjectProperty;
import javafx.beans.property.SimpleBooleanProperty;
import javafx.beans.property.SimpleObjectProperty;
import javafx.beans.property.SimpleStringProperty;
import javafx.beans.property.StringProperty;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.collections.ObservableSet;
import javafx.css.PseudoClass;
import javafx.css.Styleable;
import javafx.event.Event;
import javafx.event.EventDispatchChain;
import javafx.event.EventHandler;
import javafx.event.EventTarget;
import javafx.event.EventType;
import javafx.scene.Node;
import com.sun.javafx.scene.control.skin.Utils;
import com.sun.javafx.event.EventHandlerManager;
import java.util.HashMap;
import javafx.beans.property.ReadOnlyDoubleProperty;
import javafx.beans.property.ReadOnlyDoubleWrapper;
import javafx.beans.property.ReadOnlyObjectProperty;
import javafx.beans.property.ReadOnlyObjectWrapper;
import javafx.beans.property.SimpleDoubleProperty;
import javafx.beans.value.ObservableValue;
import javafx.collections.ObservableMap;
/**
* Table-like controls (such as {@link TableView} and {@link TreeTableView}) are
* made up of zero or more instances of a concrete TableColumnBase subclass
* ({@link TableColumn} and {@link TreeTableColumn}, respectively). Each
* table column in a table is responsible for displaying (and editing) the contents
* of that column. As well as being responsible for displaying and editing data
* for a single column, a table column also contains the necessary properties to:
* <ul>
* <li>Be resized (using {@link #minWidthProperty() minWidth}/{@link #prefWidthProperty() prefWidth}/{@link #maxWidthProperty() maxWidth}
* and {@link #widthProperty() width} properties)
* <li>Have its {@link #visibleProperty() visibility} toggled
* <li>Display {@link #textProperty() header text}
* <li>Display any {@link #getColumns() nested columns} it may contain
* <li>Have a {@link #contextMenuProperty() context menu} when the user
* right-clicks the column header area
* <li>Have the contents of the table be sorted (using
* {@link #comparatorProperty() comparator}, {@link #sortable sortable} and
* sortType).
* </ul>
* </p>
*
* When instantiating a concrete subclass of TableColumnBase, perhaps the two
* most important properties to set are the column {@link #textProperty() text}
* (what to show in the column header area), and the column
* {@code cell value factory} (which is used to populate individual cells in the
* column). Refer to the class documentation for {@link TableColumn} and
* {@link TreeTableColumn} for more information.
*
* @param <S> The type of the UI control (e.g. the type of the 'row').
* @param <T> The type of the content in all cells in this table column.
* @see TableColumn
* @see TreeTableColumn
* @see TablePositionBase
* @since JavaFX 8.0
*/
@IDProperty("id")
public abstract class TableColumnBase<S,T> implements EventTarget, Styleable {
/***************************************************************************
* *
* Static properties and methods *
* *
**************************************************************************/
// NOTE: If these numbers change, update the copy of this value in TableColumnHeader
static final double DEFAULT_WIDTH = 80.0F;
static final double DEFAULT_MIN_WIDTH = 10.0F;
static final double DEFAULT_MAX_WIDTH = 5000.0F;
/**
* By default all columns will use this comparator to perform sorting. This
* comparator simply performs null checks, and checks if the object is
* {@link Comparable}. If it is, the {@link Comparable#compareTo(java.lang.Object)}
* method is called, otherwise this method will defer to
* {@link Collator#compare(java.lang.String, java.lang.String)}.
*/
public static final Comparator DEFAULT_COMPARATOR = (obj1, obj2) -> {
if (obj1 == null && obj2 == null) return 0;
if (obj1 == null) return -1;
if (obj2 == null) return 1;
if (obj1 instanceof Comparable && (obj1.getClass() == obj2.getClass() || obj1.getClass().isAssignableFrom(obj2.getClass()))) {
return (obj1 instanceof String) ? Collator.getInstance().compare(obj1, obj2) : ((Comparable)obj1).compareTo(obj2);
}
return Collator.getInstance().compare(obj1.toString(), obj2.toString());
};
/***************************************************************************
* *
* Constructors *
* *
**************************************************************************/
/**
* Creates a default TableColumn with default cell factory, comparator, and
* onEditCommit implementation.
*/
protected TableColumnBase() {
this("");
}
/**
* Creates a TableColumn with the text set to the provided string, with
* default cell factory, comparator, and onEditCommit implementation.
* @param text The string to show when the TableColumn is placed within the TableView.
*/
protected TableColumnBase(String text) {
setText(text);
}
/***************************************************************************
* *
* Listeners *
* *
**************************************************************************/
/***************************************************************************
* *
* Instance Variables *
* *
**************************************************************************/
final EventHandlerManager eventHandlerManager = new EventHandlerManager(this);
/***************************************************************************
* *
* Properties *
* *
**************************************************************************/
// --- Text
/**
* This is the text to show in the header for this column.
*/
private StringProperty text = new SimpleStringProperty(this, "text", "");
public final StringProperty textProperty() { return text; }
public final void setText(String value) { text.set(value); }
public final String getText() { return text.get(); }
// --- Visible
/**
* Toggling this will immediately toggle the visibility of this column,
* and all children columns.
*/
private BooleanProperty visible = new SimpleBooleanProperty(this, "visible", true) {
@Override protected void invalidated() {
// set all children columns to be the same visibility. This isn't ideal,
// for example if a child column is hidden, then the parent hidden and
// shown, all columns will be visible again.
//
// TODO It may make sense for us to cache the visibility so that we may
// return to exactly the same state.
// set all children columns to be the same visibility. This isn't ideal,
// for example if a child column is hidden, then the parent hidden and
// shown, all columns will be visible again.
//
// TODO It may make sense for us to cache the visibility so that we may
// return to exactly the same state.
for (TableColumnBase<S,?> col : getColumns()) {
col.setVisible(isVisible());
}
}
};
public final void setVisible(boolean value) { visibleProperty().set(value); }
public final boolean isVisible() { return visible.get(); }
public final BooleanProperty visibleProperty() { return visible; }
// --- Parent Column
/**
* This read-only property will always refer to the parent of this column,
* in the situation where nested columns are being used.
*
* <p>In the currently existing subclasses, to create a nested
* column is simply a matter of placing the relevant TableColumnBase instances
* inside the columns ObservableList (for example, see
* {@link javafx.scene.control.TableColumn#getColumns()} and
* {@link javafx.scene.control.TreeTableColumn#getColumns()}.
*/
private ReadOnlyObjectWrapper<TableColumnBase<S,?>> parentColumn;
void setParentColumn(TableColumnBase<S,?> value) { parentColumnPropertyImpl().set(value); }
public final TableColumnBase<S,?> getParentColumn() {
return parentColumn == null ? null : parentColumn.get();
}
public final ReadOnlyObjectProperty<TableColumnBase<S,?>> parentColumnProperty() {
return parentColumnPropertyImpl().getReadOnlyProperty();
}
private ReadOnlyObjectWrapper<TableColumnBase<S,?>> parentColumnPropertyImpl() {
if (parentColumn == null) {
parentColumn = new ReadOnlyObjectWrapper<TableColumnBase<S,?>>(this, "parentColumn");
}
return parentColumn;
}
// --- Menu
/**
* This menu will be shown whenever the user right clicks within the header
* area of this TableColumnBase.
*/
private ObjectProperty<ContextMenu> contextMenu;
public final void setContextMenu(ContextMenu value) { contextMenuProperty().set(value); }
public final ContextMenu getContextMenu() { return contextMenu == null ? null : contextMenu.get(); }
public final ObjectProperty<ContextMenu> contextMenuProperty() {
if (contextMenu == null) {
contextMenu = new SimpleObjectProperty<ContextMenu>(this, "contextMenu") {
private WeakReference<ContextMenu> contextMenuRef;
@Override protected void invalidated() {
ContextMenu oldMenu = contextMenuRef == null ? null : contextMenuRef.get();
if (oldMenu != null) {
ControlAcceleratorSupport.removeAcceleratorsFromScene(oldMenu.getItems(), TableColumnBase.this);
}
ContextMenu ctx = get();
contextMenuRef = new WeakReference<>(ctx);
if (ctx != null) {
// if a context menu is set, we need to install any accelerators
// belonging to its menu items ASAP into the scene that this
// Control is in (if the control is not in a Scene, we will need
// to wait until it is and then do it).
ControlAcceleratorSupport.addAcceleratorsIntoScene(ctx.getItems(), TableColumnBase.this);
}
}
};
}
return contextMenu;
}
// --- Id
/**
* The id of this TableColumnBase. This simple string identifier is useful
* for finding a specific TableColumnBase within a UI control that uses
* TableColumnBase instances. The default value is {@code null}.
*
* @defaultValue null
*/
private StringProperty id;
public final void setId(String value) { idProperty().set(value); }
@Override public final String getId() { return id == null ? null : id.get(); }
public final StringProperty idProperty() {
if (id == null) {
id = new SimpleStringProperty(this, "id");
}
return id;
}
// --- style
/**
* A string representation of the CSS style associated with this
* TableColumnBase instance. This is analogous to the "style" attribute of an
* HTML element. Note that, like the HTML style attribute, this
* variable contains style properties and values and not the
* selector portion of a style rule.
* <p>
* Parsing this style might not be supported on some limited
* platforms. It is recommended to use a standalone CSS file instead.
*
* @defaultValue empty string
*/
private StringProperty style;
public final void setStyle(String value) { styleProperty().set(value); }
@Override public final String getStyle() { return style == null ? "" : style.get(); }
public final StringProperty styleProperty() {
if (style == null) {
style = new SimpleStringProperty(this, "style");
}
return style;
}
// --- Style class
private final ObservableList<String> styleClass = FXCollections.observableArrayList();
/**
* A list of String identifiers which can be used to logically group
* Nodes, specifically for an external style engine. This variable is
* analogous to the "class" attribute on an HTML element and, as such,
* each element of the list is a style class to which this Node belongs.
*
* @see <a href="http://www.w3.org/TR/css3-selectors/#class-html">CSS3 class selectors</a>
*/
@Override public ObservableList<String> getStyleClass() {
return styleClass;
}
// --- Graphic
/**
* <p>The graphic to show in the table column to allow the user to
* indicate graphically what is in the column. </p>
*/
private ObjectProperty<Node> graphic;
public final void setGraphic(Node value) {
graphicProperty().set(value);
}
public final Node getGraphic() {
return graphic == null ? null : graphic.get();
}
public final ObjectProperty<Node> graphicProperty() {
if (graphic == null) {
graphic = new SimpleObjectProperty<Node>(this, "graphic");
}
return graphic;
}
// --- Sort node
/**
* <p>The node to use as the "sort arrow", shown to the user in situations where
* the table column is part of the sort order. It may be the only item in
* the sort order, or it may be a secondary, tertiary, or latter sort item,
* and the node should reflect this visually. This is only used in the case of
* the table column being in the sort order (refer to, for example,
* {@link TableView#getSortOrder()} and {@link TreeTableView#getSortOrder()}).
* If not specified, the table column skin implementation is responsible for
* providing a default sort node.
*
* <p>The sort node is commonly seen represented as a triangle that rotates
* on screen to indicate whether the table column is part of the sort order,
* and if so, whether the sort is ascending or descending, and what position in
* the sort order it is in.
*/
private ObjectProperty<Node> sortNode = new SimpleObjectProperty<Node>(this, "sortNode");
public final void setSortNode(Node value) { sortNodeProperty().set(value); }
public final Node getSortNode() { return sortNode.get(); }
public final ObjectProperty<Node> sortNodeProperty() { return sortNode; }
// --- Width
/**
* The width of this column. Modifying this will result in the column width
* adjusting visually. It is recommended to not bind this property to an
* external property, as that will result in the column width not being
* adjustable by the user through dragging the left and right borders of
* column headers.
*/
public final ReadOnlyDoubleProperty widthProperty() { return width.getReadOnlyProperty(); }
public final double getWidth() { return width.get(); }
void setWidth(double value) { width.set(value); }
private ReadOnlyDoubleWrapper width = new ReadOnlyDoubleWrapper(this, "width", DEFAULT_WIDTH);
// --- Minimum Width
/**
* The minimum width the table column is permitted to be resized to.
*/
private DoubleProperty minWidth;
public final void setMinWidth(double value) { minWidthProperty().set(value); }
public final double getMinWidth() { return minWidth == null ? DEFAULT_MIN_WIDTH : minWidth.get(); }
public final DoubleProperty minWidthProperty() {
if (minWidth == null) {
minWidth = new SimpleDoubleProperty(this, "minWidth", DEFAULT_MIN_WIDTH) {
@Override protected void invalidated() {
if (getMinWidth() < 0) {
setMinWidth(0.0F);
}
impl_setWidth(getWidth());
}
};
}
return minWidth;
}
// --- Preferred Width
/**
* The preferred width of the TableColumn.
*/
public final DoubleProperty prefWidthProperty() { return prefWidth; }
public final void setPrefWidth(double value) { prefWidthProperty().set(value); }
public final double getPrefWidth() { return prefWidth.get(); }
private final DoubleProperty prefWidth = new SimpleDoubleProperty(this, "prefWidth", DEFAULT_WIDTH) {
@Override protected void invalidated() {
impl_setWidth(getPrefWidth());
}
};
// --- Maximum Width
// The table does not resize properly if this is set to Number.MAX_VALUE,
// so I've arbitrarily chosen a better, smaller number.
/**
* The maximum width the table column is permitted to be resized to.
*/
public final DoubleProperty maxWidthProperty() { return maxWidth; }
public final void setMaxWidth(double value) { maxWidthProperty().set(value); }
public final double getMaxWidth() { return maxWidth.get(); }
private DoubleProperty maxWidth = new SimpleDoubleProperty(this, "maxWidth", DEFAULT_MAX_WIDTH) {
@Override protected void invalidated() {
impl_setWidth(getWidth());
}
};
// --- Resizable
/**
* Used to indicate whether the width of this column can change. It is up
* to the resizing policy to enforce this however.
*/
private BooleanProperty resizable;
public final BooleanProperty resizableProperty() {
if (resizable == null) {
resizable = new SimpleBooleanProperty(this, "resizable", true);
}
return resizable;
}
public final void setResizable(boolean value) {
resizableProperty().set(value);
}
public final boolean isResizable() {
return resizable == null ? true : resizable.get();
}
// --- Sortable
/**
* <p>A boolean property to toggle on and off the 'sortability' of this column.
* When this property is true, this column can be included in sort
* operations. If this property is false, it will not be included in sort
* operations, even if it is contained within the sort order list of the
* underlying UI control (e.g. {@link TableView#getSortOrder()} or
* {@link TreeTableView#getSortOrder()}).</p>
*
* <p>For example, iIf a TableColumn instance is contained within the TableView sortOrder
* ObservableList, and its sortable property toggles state, it will force the
* TableView to perform a sort, as it is likely the view will need updating.</p>
*/
private BooleanProperty sortable;
public final BooleanProperty sortableProperty() {
if (sortable == null) {
sortable = new SimpleBooleanProperty(this, "sortable", true);
}
return sortable;
}
public final void setSortable(boolean value) {
sortableProperty().set(value);
}
public final boolean isSortable() {
return sortable == null ? true : sortable.get();
}
// --- Reorderable
private BooleanProperty reorderable;
/**
* @treatAsPrivate implementation detail
* @deprecated This is an internal API that is not intended for use and will be removed in the next version
*/
@Deprecated
public final BooleanProperty impl_reorderableProperty() {
if (reorderable == null) {
reorderable = new SimpleBooleanProperty(this, "reorderable", true);
}
return reorderable;
}
/**
* @treatAsPrivate implementation detail
* @deprecated This is an internal API that is not intended for use and will be removed in the next version
*/
@Deprecated
public final void impl_setReorderable(boolean value) {
impl_reorderableProperty().set(value);
}
/**
* @treatAsPrivate implementation detail
* @deprecated This is an internal API that is not intended for use and will be removed in the next version
*/
@Deprecated
public final boolean impl_isReorderable() {
return reorderable == null ? true : reorderable.get();
}
// --- fixed
// (not used in JavaFX 8.0, but added for easier exploration of the domain
// for releases post-8.0, as well as open source projects)
private BooleanProperty fixed;
/**
* @treatAsPrivate implementation detail
* @deprecated This is an internal API that is not intended for use and will be removed in the next version
*/
@Deprecated
public final BooleanProperty impl_fixedProperty() {
if (fixed == null) {
fixed = new SimpleBooleanProperty(this, "fixed", false);
}
return fixed;
}
/**
* @treatAsPrivate implementation detail
* @deprecated This is an internal API that is not intended for use and will be removed in the next version
*/
@Deprecated
public final void impl_setFixed(boolean value) {
impl_fixedProperty().set(value);
}
/**
* @treatAsPrivate implementation detail
* @deprecated This is an internal API that is not intended for use and will be removed in the next version
*/
@Deprecated
public final boolean impl_isFixed() {
return fixed == null ? false : fixed.get();
}
// --- Comparator
/**
* Comparator function used when sorting this table column. The two Objects
* given as arguments are the cell data for two individual cells in this
* column.
*/
private ObjectProperty<Comparator<T>> comparator;
public final ObjectProperty<Comparator<T>> comparatorProperty() {
if (comparator == null) {
comparator = new SimpleObjectProperty<Comparator<T>>(this, "comparator", DEFAULT_COMPARATOR);
}
return comparator;
}
public final void setComparator(Comparator<T> value) {
comparatorProperty().set(value);
}
public final Comparator<T> getComparator() {
return comparator == null ? DEFAULT_COMPARATOR : comparator.get();
}
// --- Editable
/**
* Specifies whether this table column allows editing. This, unlike
* {@link TableView#editableProperty()} and
* {@link TreeTableView#editableProperty()}, is true by default.
*/
private BooleanProperty editable;
public final void setEditable(boolean value) {
editableProperty().set(value);
}
public final boolean isEditable() {
return editable == null ? true : editable.get();
}
public final BooleanProperty editableProperty() {
if (editable == null) {
editable = new SimpleBooleanProperty(this, "editable", true);
}
return editable;
}
// --- Properties
private static final Object USER_DATA_KEY = new Object();
// A map containing a set of properties for this TableColumn
private ObservableMap<Object, Object> properties;
/**
* Returns an observable map of properties on this table column for use
* primarily by application developers.
*
* @return an observable map of properties on this table column for use
* primarily by application developers
*/
public final ObservableMap<Object, Object> getProperties() {
if (properties == null) {
properties = FXCollections.observableMap(new HashMap<Object, Object>());
}
return properties;
}
/**
* Tests if this table column has properties.
* @return true if node has properties.
*/
public boolean hasProperties() {
return properties != null && ! properties.isEmpty();
}
// --- UserData
/**
* Convenience method for setting a single Object property that can be
* retrieved at a later date. This is functionally equivalent to calling
* the getProperties().put(Object key, Object value) method. This can later
* be retrieved by calling {@link TableColumnBase#getUserData()}.
*
* @param value The value to be stored - this can later be retrieved by calling
* {@link TableColumnBase#getUserData()}.
*/
public void setUserData(Object value) {
getProperties().put(USER_DATA_KEY, value);
}
/**
* Returns a previously set Object property, or null if no such property
* has been set using the {@link TableColumnBase#setUserData(java.lang.Object)} method.
*
* @return The Object that was previously set, or null if no property
* has been set or if null was set.
*/
public Object getUserData() {
return getProperties().get(USER_DATA_KEY);
}
/***************************************************************************
* *
* Public API *
* *
**************************************************************************/
/**
* This enables support for nested columns, which can be useful to group
* together related data. For example, we may have a 'Name' column with
* two nested columns for 'First' and 'Last' names.
*
* <p>This has no impact on the table as such - all column indices point to the
* leaf columns only, and it isn't possible to sort using the parent column,
* just the leaf columns. In other words, this is purely a visual feature.</p>
*
* @return An ObservableList containing TableColumnBase instances (or subclasses)
* that are the children of this TableColumnBase. If these children
* TableColumnBase instances are set as visible, they will appear
* beneath this table column.
*/
public abstract ObservableList<? extends TableColumnBase<S,?>> getColumns();
/**
* Returns the actual value for a cell at a given row index (and which
* belongs to this table column).
*
* @param index The row index for which the data is required.
* @return The data that belongs to the cell at the intersection of the given
* row index and the table column that this method is called on.
*/
public final T getCellData(final int index) {
ObservableValue<T> result = getCellObservableValue(index);
return result == null ? null : result.getValue();
}
/**
* Returns the actual value for a cell from the given item.
*
* @param item The item from which a value of type T should be extracted.
* @return The data that should be used in a specific cell in this
* column, based on the item passed in as an argument.
*/
public final T getCellData(final S item) {
ObservableValue<T> result = getCellObservableValue(item);
return result == null ? null : result.getValue();
}
/**
* Attempts to return an ObservableValue<T> for the item in the given
* index (which is of type S). In other words, this method expects to receive
* an integer value that is greater than or equal to zero, and less than the
* size of the underlying data model. If the index is
* valid, this method will return an ObservableValue<T> for this
* specific column.
*
* <p>This is achieved by calling the {@code cell value factory}, and
* returning whatever it returns when passed a {@code CellDataFeatures} (see,
* for example, the CellDataFeatures classes belonging to
* {@link TableColumn.CellDataFeatures TableColumn} and
* {@link TreeTableColumn.CellDataFeatures TreeTableColumn} for more
* information).
*
* @param index The index of the item (of type S) for which an
* ObservableValue<T> is sought.
* @return An ObservableValue<T> for this specific table column.
*/
public abstract ObservableValue<T> getCellObservableValue(int index);
/**
* Attempts to return an ObservableValue<T> for the given item (which
* is of type S). In other words, this method expects to receive an object from
* the underlying data model for the entire 'row' in the table, and it must
* return an ObservableValue<T> for the value in this specific column.
*
* <p>This is achieved by calling the {@code cell value factory}, and
* returning whatever it returns when passed a {@code CellDataFeatures} (see,
* for example, the CellDataFeatures classes belonging to
* {@link TableColumn.CellDataFeatures TableColumn} and
* {@link TreeTableColumn.CellDataFeatures TreeTableColumn} for more
* information).
*
* @param item The item (of type S) for which an ObservableValue<T> is
* sought.
* @return An ObservableValue<T> for this specific table column.
*/
public abstract ObservableValue<T> getCellObservableValue(S item);
/** {@inheritDoc} */
@Override public EventDispatchChain buildEventDispatchChain(EventDispatchChain tail) {
return tail.prepend(eventHandlerManager);
}
/**
* Registers an event handler to this table column. The TableColumnBase class allows
* registration of listeners which will be notified when editing occurs.
* Note however that TableColumnBase is <b>not</b> a Node, and therefore no visual
* events will be fired on it.
*
* @param eventType the type of the events to receive by the handler
* @param eventHandler the handler to register
* @throws NullPointerException if the event type or handler is null
*/
public <E extends Event> void addEventHandler(EventType<E> eventType, EventHandler<E> eventHandler) {
eventHandlerManager.addEventHandler(eventType, eventHandler);
}
/**
* Unregisters a previously registered event handler from this table column. One
* handler might have been registered for different event types, so the
* caller needs to specify the particular event type from which to
* unregister the handler.
*
* @param eventType the event type from which to unregister
* @param eventHandler the handler to unregister
* @throws NullPointerException if the event type or handler is null
*/
public <E extends Event> void removeEventHandler(EventType<E> eventType, EventHandler<E> eventHandler) {
eventHandlerManager.removeEventHandler(eventType, eventHandler);
}
/***************************************************************************
* *
* Private Implementation *
* *
**************************************************************************/
/**
* @treatAsPrivate implementation detail
* @deprecated This is an internal API that is not intended for use and will be removed in the next version
*/
@Deprecated
public void impl_setWidth(double width) {
setWidth(Utils.boundedSize(width, getMinWidth(), getMaxWidth()));
}
void updateColumnWidths() {
if (! getColumns().isEmpty()) {
// zero out the width and min width values, and iterate to
// ensure the new value is equal to the sum of all children
// columns
double _minWidth = 0.0f;
double _prefWidth = 0.0f;
double _maxWidth = 0.0f;
for (TableColumnBase<S, ?> col : getColumns()) {
col.setParentColumn(this);
_minWidth += col.getMinWidth();
_prefWidth += col.getPrefWidth();
_maxWidth += col.getMaxWidth();
}
setMinWidth(_minWidth);
setPrefWidth(_prefWidth);
setMaxWidth(_maxWidth);
}
}
/***************************************************************************
* *
* Stylesheet Handling *
* *
**************************************************************************/
/**
* {@inheritDoc}
*/
public final ObservableSet<PseudoClass> getPseudoClassStates() {
return FXCollections.emptyObservableSet();
}
/***************************************************************************
* *
* Support Interfaces *
* *
**************************************************************************/
}
| gpl-2.0 |
vnu-dse/rtl | src/test/org/tzi/use/graph/GraphTest.java | 8082 | /*
* USE - UML based specification environment
* Copyright (C) 1999-2004 Mark Richters, University of Bremen
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License as
* published by the Free Software Foundation; either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*/
// $Id: GraphTest.java 1296 2010-02-25 15:54:57Z lhamann $
package org.tzi.use.graph;
import java.util.Arrays;
import java.util.HashSet;
import junit.framework.TestCase;
/**
* Test Graph classes.
*
* @version $ProjectVersion: 0.393 $
* @author Mark Richters
* @see DirectedGraph
*/
public class GraphTest extends TestCase {
public void test1() {
DirectedGraph<Integer, DirectedEdgeBase<Integer>> g;
Integer n0, n1, n2, n3, n4, n5;
DirectedEdgeBase<Integer> e23, e24a, e24b, e42;
/* +-------+
v | ----\
0 -----> 1 ----> 2 -----> 4 ----> 5
^ ^-+ |\----
| v
+--------------- 3 */
// ----
// env.printHeader("testing graph creation...");
g = new DirectedGraphBase<Integer, DirectedEdgeBase<Integer>>();
g.add(n0 = new Integer(0));
g.add(n1 = new Integer(1));
g.add(n2 = new Integer(2));
g.add(n3 = new Integer(3));
g.add(n4 = new Integer(4));
g.add(n5 = new Integer(5));
assertEquals(6, g.size());
// connect nodes
g.addEdge(new DirectedEdgeBase<Integer>(n0, n1));
g.addEdge(new DirectedEdgeBase<Integer>(n1, n1));
g.addEdge(new DirectedEdgeBase<Integer>(n1, n2));
g.addEdge(new DirectedEdgeBase<Integer>(n2, n1));
g.addEdge(e23 = new DirectedEdgeBase<Integer>(n2, n3));
g.addEdge(e24a = new DirectedEdgeBase<Integer>(n2, n4));
g.addEdge(e24b = new DirectedEdgeBase<Integer>(n2, n4));
g.addEdge(new DirectedEdgeBase<Integer>(n3, n0));
g.addEdge(e42 = new DirectedEdgeBase<Integer>(n4, n2));
g.addEdge(new DirectedEdgeBase<Integer>(n4, n5));
assertEquals(10, g.numEdges());
// ----
// env.printHeader("testing graph connectivity...");
assertEquals(1, g.numIncomingEdges(n0));
assertEquals(3, g.numIncomingEdges(n1));
assertEquals(2, g.numIncomingEdges(n2));
assertEquals(1, g.numIncomingEdges(n3));
assertEquals(2, g.numIncomingEdges(n4));
assertEquals(1, g.numIncomingEdges(n5));
assertEquals(1, g.numOutgoingEdges(n0));
assertEquals(2, g.numOutgoingEdges(n1));
assertEquals(4, g.numOutgoingEdges(n2));
assertEquals(1, g.numOutgoingEdges(n3));
assertEquals(2, g.numOutgoingEdges(n4));
assertEquals(0, g.numOutgoingEdges(n5));
// ----
// env.printHeader("testing views...");
assertEquals(3, g.targetNodeSet(n2).size());
assertEquals(2, g.targetNodeSet(n4).size());
assertEquals(0, g.targetNodeSet(n5).size());
assertEquals(3, g.sourceNodeSet(n1).size());
assertEquals(2, g.sourceNodeSet(n2).size());
assertEquals(1, g.sourceNodeSet(n4).size());
assertEquals(1, g.sourceNodeSet(n5).size());
HashSet<Integer> s = new HashSet<Integer>();
Integer[] ia;
ia = new Integer[] { n0, n1, n2, n3, n4, n5 };
s.addAll(Arrays.asList(ia));
assertEquals(s, g.targetNodeClosureSet(n0));
assertEquals(s, g.targetNodeClosureSet(n1));
assertEquals(s, g.targetNodeClosureSet(n2));
assertEquals(s, g.targetNodeClosureSet(n3));
assertEquals(s, g.targetNodeClosureSet(n4));
s = new HashSet<Integer>();
assertEquals(s, g.targetNodeClosureSet(n5));
ia = new Integer[] { n0, n1, n2, n3, n4 };
s.addAll(Arrays.asList(ia));
assertEquals(s, g.sourceNodeClosureSet(n0));
assertEquals(s, g.sourceNodeClosureSet(n1));
assertEquals(s, g.sourceNodeClosureSet(n2));
assertEquals(s, g.sourceNodeClosureSet(n3));
assertEquals(s, g.sourceNodeClosureSet(n4));
// ----
// env.printHeader("testing connections...");
assertEquals(true, g.existsPath(n0, n0));
assertEquals(true, g.existsPath(n0, n1));
assertEquals(true, g.existsPath(n0, n2));
assertEquals(true, g.existsPath(n1, n1));
assertEquals(true, g.existsPath(n3, n4));
assertEquals(true, g.existsPath(n2, n1));
assertEquals(true, g.existsPath(n3, n2));
assertEquals(true, g.existsPath(n4, n3));
assertEquals(true, g.existsPath(n0, n5));
assertEquals(false, g.existsPath(n5, n3));
HashSet<DirectedEdge<Integer>> s2 = new HashSet<DirectedEdge<Integer>>();
s2.add(e23);
assertEquals(s2, g.edgesBetween(n2, n3));
s2.clear();
s2.add(e24a);
s2.add(e24b);
s2.add(e42);
assertEquals(s2, g.edgesBetween(n2, n4));
// ----
// env.printHeader("testing cycles...");
assertEquals(true, g.hasCycle());
// ----
// env.printHeader("testing node deletion...");
assertEquals(true, g.remove(n0));
assertEquals(5, g.size());
assertEquals(8, g.numEdges());
// ----
// env.printHeader("testing edge deletion...");
assertEquals(true, g.removeEdge(e23));
assertEquals(5, g.size());
assertEquals(7, g.numEdges());
}
public void test2() {
DirectedGraph<Integer, DirectedEdgeBase<Integer>> g;
Integer n0, n1, n2, n3, n4, n5;
/*
0 -----> 1 ----> 2 -----> 4 ----> 5
| ^
v |
3 -------| */
// ----
// env.printHeader("testing graph creation...");
g = new DirectedGraphBase<Integer, DirectedEdgeBase<Integer>>();
g.add(n0 = new Integer(0));
g.add(n1 = new Integer(1));
g.add(n2 = new Integer(2));
g.add(n3 = new Integer(3));
g.add(n4 = new Integer(4));
g.add(n5 = new Integer(5));
assertEquals(6, g.size());
// connect nodes
g.addEdge(new DirectedEdgeBase<Integer>(n0, n1));
g.addEdge(new DirectedEdgeBase<Integer>(n1, n2));
g.addEdge(new DirectedEdgeBase<Integer>(n2, n3));
g.addEdge(new DirectedEdgeBase<Integer>(n2, n4));
g.addEdge(new DirectedEdgeBase<Integer>(n4, n5));
g.addEdge(new DirectedEdgeBase<Integer>(n3, n4));
assertEquals(6, g.numEdges());
// ----
// env.printHeader("testing graph connectivity...");
assertEquals(0, g.numIncomingEdges(n0));
assertEquals(1, g.numIncomingEdges(n1));
assertEquals(1, g.numIncomingEdges(n2));
assertEquals(1, g.numIncomingEdges(n3));
assertEquals(2, g.numIncomingEdges(n4));
assertEquals(1, g.numIncomingEdges(n5));
assertEquals(1, g.numOutgoingEdges(n0));
assertEquals(1, g.numOutgoingEdges(n1));
assertEquals(2, g.numOutgoingEdges(n2));
assertEquals(1, g.numOutgoingEdges(n3));
assertEquals(1, g.numOutgoingEdges(n4));
assertEquals(0, g.numOutgoingEdges(n5));
// ----
// env.printHeader("testing cycles...");
assertEquals(false, g.hasCycle());
}
}
| gpl-2.0 |
ianopolous/JPC | src/org/jpc/emulator/execution/opcodes/vm/fdivp_ST0_ST2.java | 2070 | /*
JPC: An x86 PC Hardware Emulator for a pure Java Virtual Machine
Copyright (C) 2012-2013 Ian Preston
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License version 2 as published by
the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
Details (including contact information) can be found at:
jpc.sourceforge.net
or the developer website
sourceforge.net/projects/jpc/
End of licence header
*/
package org.jpc.emulator.execution.opcodes.vm;
import org.jpc.emulator.execution.*;
import org.jpc.emulator.execution.decoder.*;
import org.jpc.emulator.processor.*;
import org.jpc.emulator.processor.fpu64.*;
import static org.jpc.emulator.processor.Processor.*;
public class fdivp_ST0_ST2 extends Executable
{
public fdivp_ST0_ST2(int blockStart, int eip, int prefices, PeekableInputStream input)
{
super(blockStart, eip);
int modrm = input.readU8();
}
public Branch execute(Processor cpu)
{
double freg0 = cpu.fpu.ST(0);
double freg1 = cpu.fpu.ST(2);
if (((freg0 == 0.0) && (freg1 == 0.0)) || (Double.isInfinite(freg0) && Double.isInfinite(freg1)))
cpu.fpu.setInvalidOperation();
if ((freg1 == 0.0) && !Double.isNaN(freg0) && !Double.isInfinite(freg0))
cpu.fpu.setZeroDivide();
cpu.fpu.setST(0, freg0/freg1);
cpu.fpu.pop();
return Branch.None;
}
public boolean isBranch()
{
return false;
}
public String toString()
{
return this.getClass().getName();
}
} | gpl-2.0 |
rfdrake/opennms | tests/mock-elements/src/main/java/org/opennms/netmgt/mock/MockMonitor.java | 3082 | /*******************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2012 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2012 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <license@opennms.org>
* http://www.opennms.org/
* http://www.opennms.com/
*******************************************************************************/
package org.opennms.netmgt.mock;
import java.util.Map;
import org.opennms.netmgt.model.PollStatus;
import org.opennms.netmgt.poller.MonitoredService;
import org.opennms.netmgt.poller.ServiceMonitor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MockMonitor implements ServiceMonitor {
private static final Logger LOG = LoggerFactory.getLogger(MockMonitor.class);
private MockNetwork m_network;
private String m_svcName;
/**
* Simple constructor so that the MockMonitor can be used as a placeholder {@link ServiceMonitor}
* inside config files.
*/
public MockMonitor() {}
/**
* @param network
* @param svcName
*/
public MockMonitor(MockNetwork network, String svcName) {
m_network = network;
m_svcName = svcName;
}
@Override
public void initialize(MonitoredService svc) {
}
@Override
public void initialize(Map<String, Object> parameters) {
}
@Override
public PollStatus poll(MonitoredService monSvc, Map<String, Object> parameters) {
synchronized(m_network) {
int nodeId = monSvc.getNodeId();
String ipAddr = monSvc.getIpAddr();
MockService svc = m_network.getService(nodeId, ipAddr, m_svcName);
if (svc == null) {
LOG.info("Invalid Poll: {}{}", ipAddr, m_svcName);
m_network.receivedInvalidPoll(ipAddr, m_svcName);
return PollStatus.unknown();
} else {
LOG.info("Poll: [{}{}{}]", svc.getInterface().getNode().getLabel(), ipAddr, m_svcName);
PollStatus pollStatus = svc.poll();
return PollStatus.get(pollStatus.getStatusCode(), pollStatus.getReason());
}
}
}
@Override
public void release() {
}
@Override
public void release(MonitoredService svc) {
}
}
| gpl-2.0 |
AcademicTorrents/AcademicTorrents-Downloader | vuze/com/aelitis/azureus/core/networkmanager/impl/TransportHelper.java | 3035 | /*
* Created on 21 Jun 2006
* Created by Paul Gardner
* Copyright (C) 2006 Aelitis, All Rights Reserved.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*
* AELITIS, SAS au capital de 46,603.30 euros
* 8 Allee Lenotre, La Grille Royale, 78600 Le Mesnil le Roi, France.
*
*/
package com.aelitis.azureus.core.networkmanager.impl;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
public interface
TransportHelper
{
public InetSocketAddress
getAddress();
public String
getName(boolean verbose);
public boolean
minimiseOverheads();
public int
getConnectTimeout();
public int
getReadTimeout();
public boolean
delayWrite(
ByteBuffer buffer );
public boolean
hasDelayedWrite();
public int
write(
ByteBuffer buffer,
boolean partial_write )
throws IOException;
public long
write(
ByteBuffer[] buffers,
int array_offset,
int length )
throws IOException;
public int
read(
ByteBuffer buffer )
throws IOException;
public long
read(
ByteBuffer[] buffers,
int array_offset,
int length )
throws IOException;
public void
pauseReadSelects();
public void
pauseWriteSelects();
public void
resumeReadSelects();
public void
resumeWriteSelects();
public void
registerForReadSelects(
selectListener listener,
Object attachment );
public void
registerForWriteSelects(
selectListener listener,
Object attachment );
public void
cancelReadSelects();
public void
cancelWriteSelects();
public boolean
isClosed();
public void
close(
String reason );
public void
failed(
Throwable reason );
public interface
selectListener
{
public boolean
selectSuccess(
TransportHelper helper,
Object attachment );
public void
selectFailure(
TransportHelper helper,
Object attachment,
Throwable msg);
}
public void
setUserData(
Object key,
Object data );
public Object
getUserData(
Object key );
public void
setTrace(
boolean on );
public void
setScatteringMode(long forBytes);
}
| gpl-2.0 |
JanaWengenroth/GKA1 | libraries/jgraphx/src/com/mxgraph/util/svg/PathParser.java | 17786 | /*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.mxgraph.util.svg;
import java.io.IOException;
/**
* This class implements an event-based parser for the SVG path's d
* attribute values.
*
* @author <a href="mailto:stephane@hillion.org">Stephane Hillion</a>
* @version $Id: PathParser.java,v 1.1 2012/11/15 13:26:45 gaudenz Exp $
*/
public class PathParser extends NumberParser
{
/**
* The path handler used to report parse events.
*/
protected PathHandler pathHandler;
/**
* Creates a new PathParser.
*/
public PathParser(PathHandler handler)
{
pathHandler = handler;
}
/**
* Allows an application to register a path handler.
*
* <p>If the application does not register a handler, all
* events reported by the parser will be silently ignored.
*
* <p>Applications may register a new or different handler in the
* middle of a parse, and the parser must begin using the new
* handler immediately.</p>
* @param handler The transform list handler.
*/
public void setPathHandler(PathHandler handler)
{
pathHandler = handler;
}
/**
* Returns the path handler in use.
*/
public PathHandler getPathHandler()
{
return pathHandler;
}
protected void doParse() throws ParseException, IOException
{
pathHandler.startPath();
current = reader.read();
loop: for (;;)
{
try
{
switch (current)
{
case 0xD:
case 0xA:
case 0x20:
case 0x9:
current = reader.read();
break;
case 'z':
case 'Z':
current = reader.read();
pathHandler.closePath();
break;
case 'm':
parsem();
break;
case 'M':
parseM();
break;
case 'l':
parsel();
break;
case 'L':
parseL();
break;
case 'h':
parseh();
break;
case 'H':
parseH();
break;
case 'v':
parsev();
break;
case 'V':
parseV();
break;
case 'c':
parsec();
break;
case 'C':
parseC();
break;
case 'q':
parseq();
break;
case 'Q':
parseQ();
break;
case 's':
parses();
break;
case 'S':
parseS();
break;
case 't':
parset();
break;
case 'T':
parseT();
break;
case 'a':
parsea();
break;
case 'A':
parseA();
break;
case -1:
break loop;
default:
reportUnexpected(current);
break;
}
}
catch (ParseException e)
{
errorHandler.error(e);
skipSubPath();
}
}
skipSpaces();
if (current != -1)
{
reportError("end.of.stream.expected", new Object[] { new Integer(
current) });
}
pathHandler.endPath();
}
/**
* Parses a 'm' command.
*/
protected void parsem() throws ParseException, IOException
{
current = reader.read();
skipSpaces();
float x = parseFloat();
skipCommaSpaces();
float y = parseFloat();
pathHandler.movetoRel(x, y);
boolean expectNumber = skipCommaSpaces2();
_parsel(expectNumber);
}
/**
* Parses a 'M' command.
*/
protected void parseM() throws ParseException, IOException
{
current = reader.read();
skipSpaces();
float x = parseFloat();
skipCommaSpaces();
float y = parseFloat();
pathHandler.movetoAbs(x, y);
boolean expectNumber = skipCommaSpaces2();
_parseL(expectNumber);
}
/**
* Parses a 'l' command.
*/
protected void parsel() throws ParseException, IOException
{
current = reader.read();
skipSpaces();
_parsel(true);
}
protected void _parsel(boolean expectNumber) throws ParseException,
IOException
{
for (;;)
{
switch (current)
{
default:
if (expectNumber)
reportUnexpected(current);
return;
case '+':
case '-':
case '.':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
break;
}
float x = parseFloat();
skipCommaSpaces();
float y = parseFloat();
pathHandler.linetoRel(x, y);
expectNumber = skipCommaSpaces2();
}
}
/**
* Parses a 'L' command.
*/
protected void parseL() throws ParseException, IOException
{
current = reader.read();
skipSpaces();
_parseL(true);
}
protected void _parseL(boolean expectNumber) throws ParseException,
IOException
{
for (;;)
{
switch (current)
{
default:
if (expectNumber)
reportUnexpected(current);
return;
case '+':
case '-':
case '.':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
break;
}
float x = parseFloat();
skipCommaSpaces();
float y = parseFloat();
pathHandler.linetoAbs(x, y);
expectNumber = skipCommaSpaces2();
}
}
/**
* Parses a 'h' command.
*/
protected void parseh() throws ParseException, IOException
{
current = reader.read();
skipSpaces();
boolean expectNumber = true;
for (;;)
{
switch (current)
{
default:
if (expectNumber)
reportUnexpected(current);
return;
case '+':
case '-':
case '.':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
break;
}
float x = parseFloat();
pathHandler.linetoHorizontalRel(x);
expectNumber = skipCommaSpaces2();
}
}
/**
* Parses a 'H' command.
*/
protected void parseH() throws ParseException, IOException
{
current = reader.read();
skipSpaces();
boolean expectNumber = true;
for (;;)
{
switch (current)
{
default:
if (expectNumber)
reportUnexpected(current);
return;
case '+':
case '-':
case '.':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
break;
}
float x = parseFloat();
pathHandler.linetoHorizontalAbs(x);
expectNumber = skipCommaSpaces2();
}
}
/**
* Parses a 'v' command.
*/
protected void parsev() throws ParseException, IOException
{
current = reader.read();
skipSpaces();
boolean expectNumber = true;
for (;;)
{
switch (current)
{
default:
if (expectNumber)
reportUnexpected(current);
return;
case '+':
case '-':
case '.':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
break;
}
float x = parseFloat();
pathHandler.linetoVerticalRel(x);
expectNumber = skipCommaSpaces2();
}
}
/**
* Parses a 'V' command.
*/
protected void parseV() throws ParseException, IOException
{
current = reader.read();
skipSpaces();
boolean expectNumber = true;
for (;;)
{
switch (current)
{
default:
if (expectNumber)
reportUnexpected(current);
return;
case '+':
case '-':
case '.':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
break;
}
float x = parseFloat();
pathHandler.linetoVerticalAbs(x);
expectNumber = skipCommaSpaces2();
}
}
/**
* Parses a 'c' command.
*/
protected void parsec() throws ParseException, IOException
{
current = reader.read();
skipSpaces();
boolean expectNumber = true;
for (;;)
{
switch (current)
{
default:
if (expectNumber)
reportUnexpected(current);
return;
case '+':
case '-':
case '.':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
break;
}
float x1 = parseFloat();
skipCommaSpaces();
float y1 = parseFloat();
skipCommaSpaces();
float x2 = parseFloat();
skipCommaSpaces();
float y2 = parseFloat();
skipCommaSpaces();
float x = parseFloat();
skipCommaSpaces();
float y = parseFloat();
pathHandler.curvetoCubicRel(x1, y1, x2, y2, x, y);
expectNumber = skipCommaSpaces2();
}
}
/**
* Parses a 'C' command.
*/
protected void parseC() throws ParseException, IOException
{
current = reader.read();
skipSpaces();
boolean expectNumber = true;
for (;;)
{
switch (current)
{
default:
if (expectNumber)
reportUnexpected(current);
return;
case '+':
case '-':
case '.':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
break;
}
float x1 = parseFloat();
skipCommaSpaces();
float y1 = parseFloat();
skipCommaSpaces();
float x2 = parseFloat();
skipCommaSpaces();
float y2 = parseFloat();
skipCommaSpaces();
float x = parseFloat();
skipCommaSpaces();
float y = parseFloat();
pathHandler.curvetoCubicAbs(x1, y1, x2, y2, x, y);
expectNumber = skipCommaSpaces2();
}
}
/**
* Parses a 'q' command.
*/
protected void parseq() throws ParseException, IOException
{
current = reader.read();
skipSpaces();
boolean expectNumber = true;
for (;;)
{
switch (current)
{
default:
if (expectNumber)
reportUnexpected(current);
return;
case '+':
case '-':
case '.':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
break;
}
float x1 = parseFloat();
skipCommaSpaces();
float y1 = parseFloat();
skipCommaSpaces();
float x = parseFloat();
skipCommaSpaces();
float y = parseFloat();
pathHandler.curvetoQuadraticRel(x1, y1, x, y);
expectNumber = skipCommaSpaces2();
}
}
/**
* Parses a 'Q' command.
*/
protected void parseQ() throws ParseException, IOException
{
current = reader.read();
skipSpaces();
boolean expectNumber = true;
for (;;)
{
switch (current)
{
default:
if (expectNumber)
reportUnexpected(current);
return;
case '+':
case '-':
case '.':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
break;
}
float x1 = parseFloat();
skipCommaSpaces();
float y1 = parseFloat();
skipCommaSpaces();
float x = parseFloat();
skipCommaSpaces();
float y = parseFloat();
pathHandler.curvetoQuadraticAbs(x1, y1, x, y);
expectNumber = skipCommaSpaces2();
}
}
/**
* Parses a 's' command.
*/
protected void parses() throws ParseException, IOException
{
current = reader.read();
skipSpaces();
boolean expectNumber = true;
for (;;)
{
switch (current)
{
default:
if (expectNumber)
reportUnexpected(current);
return;
case '+':
case '-':
case '.':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
break;
}
float x2 = parseFloat();
skipCommaSpaces();
float y2 = parseFloat();
skipCommaSpaces();
float x = parseFloat();
skipCommaSpaces();
float y = parseFloat();
pathHandler.curvetoCubicSmoothRel(x2, y2, x, y);
expectNumber = skipCommaSpaces2();
}
}
/**
* Parses a 'S' command.
*/
protected void parseS() throws ParseException, IOException
{
current = reader.read();
skipSpaces();
boolean expectNumber = true;
for (;;)
{
switch (current)
{
default:
if (expectNumber)
reportUnexpected(current);
return;
case '+':
case '-':
case '.':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
break;
}
float x2 = parseFloat();
skipCommaSpaces();
float y2 = parseFloat();
skipCommaSpaces();
float x = parseFloat();
skipCommaSpaces();
float y = parseFloat();
pathHandler.curvetoCubicSmoothAbs(x2, y2, x, y);
expectNumber = skipCommaSpaces2();
}
}
/**
* Parses a 't' command.
*/
protected void parset() throws ParseException, IOException
{
current = reader.read();
skipSpaces();
boolean expectNumber = true;
for (;;)
{
switch (current)
{
default:
if (expectNumber)
reportUnexpected(current);
return;
case '+':
case '-':
case '.':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
break;
}
float x = parseFloat();
skipCommaSpaces();
float y = parseFloat();
pathHandler.curvetoQuadraticSmoothRel(x, y);
expectNumber = skipCommaSpaces2();
}
}
/**
* Parses a 'T' command.
*/
protected void parseT() throws ParseException, IOException
{
current = reader.read();
skipSpaces();
boolean expectNumber = true;
for (;;)
{
switch (current)
{
default:
if (expectNumber)
reportUnexpected(current);
return;
case '+':
case '-':
case '.':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
break;
}
float x = parseFloat();
skipCommaSpaces();
float y = parseFloat();
pathHandler.curvetoQuadraticSmoothAbs(x, y);
expectNumber = skipCommaSpaces2();
}
}
/**
* Parses a 'a' command.
*/
protected void parsea() throws ParseException, IOException
{
current = reader.read();
skipSpaces();
boolean expectNumber = true;
for (;;)
{
switch (current)
{
default:
if (expectNumber)
reportUnexpected(current);
return;
case '+':
case '-':
case '.':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
break;
}
float rx = parseFloat();
skipCommaSpaces();
float ry = parseFloat();
skipCommaSpaces();
float ax = parseFloat();
skipCommaSpaces();
boolean laf;
switch (current)
{
default:
reportUnexpected(current);
return;
case '0':
laf = false;
break;
case '1':
laf = true;
break;
}
current = reader.read();
skipCommaSpaces();
boolean sf;
switch (current)
{
default:
reportUnexpected(current);
return;
case '0':
sf = false;
break;
case '1':
sf = true;
break;
}
current = reader.read();
skipCommaSpaces();
float x = parseFloat();
skipCommaSpaces();
float y = parseFloat();
pathHandler.arcRel(rx, ry, ax, laf, sf, x, y);
expectNumber = skipCommaSpaces2();
}
}
/**
* Parses a 'A' command.
*/
protected void parseA() throws ParseException, IOException
{
current = reader.read();
skipSpaces();
boolean expectNumber = true;
for (;;)
{
switch (current)
{
default:
if (expectNumber)
reportUnexpected(current);
return;
case '+':
case '-':
case '.':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
break;
}
float rx = parseFloat();
skipCommaSpaces();
float ry = parseFloat();
skipCommaSpaces();
float ax = parseFloat();
skipCommaSpaces();
boolean laf;
switch (current)
{
default:
reportUnexpected(current);
return;
case '0':
laf = false;
break;
case '1':
laf = true;
break;
}
current = reader.read();
skipCommaSpaces();
boolean sf;
switch (current)
{
default:
reportUnexpected(current);
return;
case '0':
sf = false;
break;
case '1':
sf = true;
break;
}
current = reader.read();
skipCommaSpaces();
float x = parseFloat();
skipCommaSpaces();
float y = parseFloat();
pathHandler.arcAbs(rx, ry, ax, laf, sf, x, y);
expectNumber = skipCommaSpaces2();
}
}
/**
* Skips a sub-path.
*/
protected void skipSubPath() throws ParseException, IOException
{
for (;;)
{
switch (current)
{
case -1:
case 'm':
case 'M':
return;
default:
break;
}
current = reader.read();
}
}
protected void reportUnexpected(int ch) throws ParseException, IOException
{
reportUnexpectedCharacterError(current);
skipSubPath();
}
/**
* Skips the whitespaces and an optional comma.
* @return true if comma was skipped.
*/
protected boolean skipCommaSpaces2() throws IOException
{
wsp1: for (;;)
{
switch (current)
{
default:
break wsp1;
case 0x20:
case 0x9:
case 0xD:
case 0xA:
break;
}
current = reader.read();
}
if (current != ',')
return false; // no comma.
wsp2: for (;;)
{
switch (current = reader.read())
{
default:
break wsp2;
case 0x20:
case 0x9:
case 0xD:
case 0xA:
break;
}
}
return true; // had comma
}
}
| gpl-2.0 |
mlvdv/truffle | truffle/com.oracle.truffle.sl.test/src/com/oracle/truffle/sl/test/ToStringOfEvalTest.java | 3513 | /*
* Copyright (c) 2012, 2015, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* The Universal Permissive License (UPL), Version 1.0
*
* Subject to the condition set forth below, permission is hereby granted to any
* person obtaining a copy of this software, associated documentation and/or
* data (collectively the "Software"), free of charge and under any and all
* copyright rights in the Software, and any and all patent rights owned or
* freely licensable by each licensor hereunder covering either (i) the
* unmodified Software as contributed to or provided by such licensor, or (ii)
* the Larger Works (as defined below), to deal in both
*
* (a) the Software, and
*
* (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if
* one is included with the Software each a "Larger Work" to which the Software
* is contributed by such licensors),
*
* without restriction, including without limitation the rights to copy, create
* derivative works of, display, perform, and distribute the Software and make,
* use, sell, offer for sale, import, export, have made, and have sold the
* Software and the Larger Work(s), and to sublicense the foregoing rights on
* either these or other terms.
*
* This license is subject to the following condition:
*
* The above copyright notice and either this complete permission notice or at a
* minimum a reference to the UPL must be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.oracle.truffle.sl.test;
import com.oracle.truffle.api.source.Source;
import com.oracle.truffle.api.vm.PolyglotEngine;
import java.io.IOException;
import org.junit.After;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import org.junit.Before;
import org.junit.Test;
public class ToStringOfEvalTest {
PolyglotEngine engine;
@Before
public void initialize() {
engine = PolyglotEngine.newBuilder().build();
}
@After
public void dispose() {
engine.dispose();
}
@Test
public void checkToStringOnAFunction() throws IOException {
PolyglotEngine.Language sl = engine.getLanguages().get("application/x-sl");
sl.eval(Source.fromText("function checkName() {}", "defineFn"));
PolyglotEngine.Value value1 = engine.findGlobalSymbol("checkName");
PolyglotEngine.Value value2 = engine.findGlobalSymbol("checkName");
assertNotNull("Symbol is not null", value1);
assertNotNull("Symbol is not null either", value2);
Object global1 = value1.get();
Object global2 = value2.get();
assertNotNull("Symbol is not null", global1);
assertNotNull("Symbol is not null either", global2);
assertEquals("Symbols are the same", global1, global2);
assertTrue("Contans checkName text: " + global2, global2.toString().contains("checkName"));
}
}
| gpl-2.0 |
ctrueden/bioformats | components/forks/poi/src/loci/poi/hssf/record/formula/MemFuncPtg.java | 3355 | /*
* #%L
* Fork of Apache Jakarta POI.
* %%
* Copyright (C) 2008 - 2013 Open Microscopy Environment:
* - Board of Regents of the University of Wisconsin-Madison
* - Glencoe Software, Inc.
* - University of Dundee
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
/*
* Ptg.java
*
* Created on October 28, 2001, 6:30 PM
*/
package loci.poi.hssf.record.formula;
import loci.poi.util.LittleEndian;
import loci.poi.hssf.model.Workbook;
import loci.poi.hssf.record.RecordInputStream;
/**
* @author Glen Stampoultzis (glens at apache.org)
*/
public class MemFuncPtg extends ControlPtg
{
public final static byte sid = 0x29;
private short field_1_len_ref_subexpression = 0;
public MemFuncPtg()
{
//Required for clone methods
}
/**Creates new function pointer from a byte array
* usually called while reading an excel file.
*/
public MemFuncPtg( RecordInputStream in )
{
field_1_len_ref_subexpression = in.readShort();
}
public int getSize()
{
return 3;
}
public void writeBytes( byte[] array, int offset )
{
array[offset + 0] = sid ;
LittleEndian.putShort( array, offset + 1, (short)field_1_len_ref_subexpression );
}
public String toFormulaString(Workbook book)
{
return "";
}
public byte getDefaultOperandClass()
{
return 0;
}
public int getNumberOfOperands()
{
return field_1_len_ref_subexpression;
}
public Object clone()
{
MemFuncPtg ptg = new MemFuncPtg();
ptg.field_1_len_ref_subexpression = this.field_1_len_ref_subexpression;
return ptg;
}
public int getLenRefSubexpression()
{
return field_1_len_ref_subexpression;
}
public void setLenRefSubexpression(int len)
{
field_1_len_ref_subexpression = (short)len;
}
}
| gpl-2.0 |
elijah513/ice | java/test/src/main/java/test/IceDiscovery/simple/Client.java | 940 | // **********************************************************************
//
// Copyright (c) 2003-2015 ZeroC, Inc. All rights reserved.
//
// This copy of Ice is licensed to you under the terms described in the
// ICE_LICENSE file included in this distribution.
//
// **********************************************************************
package test.IceDiscovery.simple;
public class Client extends test.Util.Application
{
@Override
public int run(String[] args)
{
int num;
try
{
num = args.length == 1 ? Integer.parseInt(args[0]) : 0;
}
catch(NumberFormatException ex)
{
num = 0;
}
AllTests.allTests(communicator(), num);
return 0;
}
public static void main(String[] args)
{
Client c = new Client();
int status = c.main("Client", args);
System.gc();
System.exit(status);
}
}
| gpl-2.0 |
wesen/nmedit | libs/jnmprotocol2/src/net/sf/nmedit/jnmprotocol2/utils/QueueBuffer.java | 22539 | /*
Nord Modular Midi Protocol 3.03 Library
Copyright (C) 2003-2006 Marcus Andersson
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package net.sf.nmedit.jnmprotocol2.utils;
import java.util.ArrayList;
import java.util.Collection;
import java.util.ConcurrentModificationException;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Queue;
import net.sf.nmedit.jnmprotocol2.utils.QueueBuffer;
/**
* A queue that is optimized for buffering and fast
* insert/remove operations of elements.
*
* The queue uses two different linked lists internally.
* The first list contains the elements that have an object
* associated with them. The second list to which we refer
* with the term cache, contains the elements
* that have no object associated with them anymore.
*
* If elements that contain data are removed from the queue
* (for example using poll()) the will be stored in the cache
* (the reference to the data is set to null).
* Later if a data is offered to the queue, it is not necessary
* to create a new list element but instead an element from the
* cache can be used.
*
* The advantages of the cache are:
* <ul>
* <li>it is faster to reuse an element instead of allocating a new one</li>
* <li>less in best case no garbage is created. The garbage collector
* has less work to do</li>
* <li>memory consumption is constant if the queue is used correctly.
* A correct use means that the maximum number of elements is below
* or equal a constant boundary. For example the queue contains always
* <=1000 elements. A queue that uses no such cache can produce
* up to several MB of garbage.
* </li>
* </ul>
*
* The queue uses a linked list as internal representation
* and recycles unused list items to avoid unecessary garbage.
*
* @author Christian Schneider
*/
public class QueueBuffer<E> implements Queue<E>
{
/**
* Element of a linked list. Contains the stored element data and
* the next element in the list.
*/
private static class Element<E>
{
public E data;
public Element<E> next;
public Element(E data)
{
this.data = data;
this.next = null;
}
}
// head is the first element in the list (the front element in this queue).
// always one of the following contitions is true:
// - head == null
// if and only if the queue is empty
// - head == tail && head!=null
// if and only if the number of elements in the queue is 1
// - head != tail
// if and only if the queue contains more than one element
private Element<E> head;
// tail is the last element in the list (the last element in this queue).
// always one of the following contitions is true:
// - tail == null
// if and only if the queue is empty
// - head == tail && head!=null
// if and only if the number of elements in the queue is 1
// - head != tail
// if and only if the queue contains more than one element
private Element<E> tail;
// the head of the list of cached elements. This list contains the elements
// that were previously used and removed from the queue. We recycle
// them if new data is offered to the queue.
// elements in the cache contain no data, thus the Element.data field is always null
// always one of the following contitions is true:
// - cacheHead == null
// if and only if the cache is empty
// - cacheHead == cacheTail && cacheHead!=null
// if and only if the number of elements in the cache is 1
// - cacheHead != cacheTail
// if and only if the cache contains more than one element
private Element<E> cacheHead;
// the tail of the list of cached elements
// elements in the cache contain no data, thus the Element.data field is always null
// always one of the following contitions is true:
// - cacheTail == null
// if and only if the cache is empty
// - cacheHead == cacheTail && cacheHead!=null
// if and only if the number of elements in the cache is 1
// - cacheHead != cacheTail
// if and only if the cache contains more than one element
private Element<E> cacheTail;
// variable is changed if the collection is modified
private transient int modcount;
/**
* Removes all elements from this queue and adds them to a new queue.
* @return a new queue
*/
public QueueBuffer<E> release()
{
QueueBuffer<E> q = new QueueBuffer<E>();
q.head = head;
q.tail = tail;
head = null;
tail = null;
modcount++;
return q;
}
/**
* Inserts the specified element into this queue.
*
* If possible the queue will not create new element
* container but recycle one that is not used anymore.
*
* @param o the element to insert.
* @return <tt>true</tt> if o is not null, otherwise false
*/
public final boolean offer( E o )
{
if (o == null)
return false;
// the new element
Element<E> e ;
// we check if we can recycle the new element
// or if we have to create a new instance
if (cacheHead != null)
{
// recycle the head of the cache
e = cacheHead;
// remove head element from cache
cacheHead = cacheHead.next;
if (cacheHead==null)
cacheTail = null;
// initialize element fields
e.next = null;
e.data = o;
}
else
{
// create a new instance
e = new Element<E>(o);
}
// no we add the element it to the queue
// we first have to check if the queue is empty
// or if it already contains some elements
if (tail != null)
{
// the queue is not empty
// the last element will be e
tail.next = e;
// and e will become the new tail
tail = e;
}
else
{
// the queue is empty, thus e is both
// head and tail
head = tail = e;
}
// we have changed the queue
modcount++;
// always return true
return true;
}
/**
* Retrieves and removes the head of this queue, or <tt>null</tt>
* if this queue is empty.
*
* The element container of the removed element will be
* added to a cache and recycled if further elements are added
* to the qeue.
*
* @return the head of this queue, or <tt>null</tt> if this
* queue is empty.
*/
public final E poll()
{
// first see if the queue is empty and return null if so
if (head == null)
return null;
// the queue is not empty
// the result element / data
Element<E> e = head;
E result = e.data;
// if the head is removed then
// - the next element in the queue will become the new head
head = head.next;
// - if the next element is null then tail will become null, too
if (head == null)
tail = null;
// now we can cache the unused element container
// first disconnect it
e.next = null;
// then add it to the cache (e.data will be automatically set to null)
cache(e);
// we have changed the queue
modcount++;
// we are done
return result;
}
/**
* Retrieves and removes the head of this queue. This method
* differs from the <tt>poll</tt> method in that it throws an
* exception if this queue is empty.
*
* @return the head of this queue.
* @throws NoSuchElementException if this queue is empty.
*/
public E remove()
{
E e = poll();
if (e == null)
throw new NoSuchElementException();
return e;
}
/**
* Retrieves, but does not remove, the head of this queue,
* returning <tt>null</tt> if this queue is empty.
*
* @return the head of this queue, or <tt>null</tt> if this queue
* is empty.
*/
public final E peek()
{
return head != null ? head.data : null;
}
/**
* Retrieves, but does not remove, the head of this queue. This method
* differs from the <tt>peek</tt> method only in that it throws an
* exception if this queue is empty.
*
* @return the head of this queue.
* @throws NoSuchElementException if this queue is empty.
*/
public final E element()
{
E e = peek();
if (e == null)
throw new NoSuchElementException();
return e;
}
/**
* Returns the number of elements in this collection. If this collection
* contains more than <tt>Integer.MAX_VALUE</tt> elements, returns
* <tt>Integer.MAX_VALUE</tt>.
*
* Do not use this method if not absolutely necessary because it
* has complexity O(n).
*
* @return the number of elements in this collection
*/
public int size()
{
int size = 0;
Element<E> pos = head;
while (pos!=null && size!=Integer.MAX_VALUE)
{
size ++;
pos = pos.next;
}
return size;
}
/**
* Returns <tt>true</tt> if this collection contains no elements.
*
* @return <tt>true</tt> if this collection contains no elements
*/
public final boolean isEmpty()
{
return head == null;
}
/**
* Returns <tt>true</tt> if this collection contains the specified
* element. More formally, returns <tt>true</tt> if and only if this
* collection contains at least one element <tt>e</tt> such that
* <tt>(o==null ? e==null : o.equals(e))</tt>.
*
* @param o element whose presence in this collection is to be tested.
* @return <tt>true</tt> if this collection contains the specified
* element
* @throws NullPointerException if the specified element is null and this
* collection does not support null elements (optional).
*/
public boolean contains( Object o )
{
if (o==null)
throw new NullPointerException();
Element<E> pos = head;
while (pos!=null)
{
if (pos.data==o || pos.data.equals(o))
return true;
pos = pos.next;
}
return false;
}
/**
* Returns an iterator over the elements in this collection.
* The elements are returned in the same order like they are
* stored in this queue.
*
* @return an <tt>Iterator</tt> over the elements in this collection
*/
public Iterator<E> iterator()
{
return new Iterator<E> ()
{
// the data of the element returned by next()
// or null if next() was not called or remove() was called
E data = null;
// current position, starting at the head of the queue
// if pos == null then the iteration is completed
Element<E> pos = head;
// remember modification counter so we can see if
// concurrent modifications were performed
int knownMod = modcount;
public boolean hasNext()
{
return pos != null;
}
/**
* Checks if the modification counter has changed and
* throws a ConcurrentModificationException if this is
* the case.
*/
private void checkMod()
{
if (knownMod != modcount)
throw new ConcurrentModificationException();
}
public E next()
{
// check for modifications
checkMod();
// see if element exists
if (!hasNext())
throw new NoSuchElementException();
// remember data
data = pos.data;
// remember position
pos = pos.next;
return data;
}
public void remove()
{
// check for modifications
checkMod();
// check if data!=null what means that next() has been called
if (data == null)
throw new IllegalStateException();
// remove data
QueueBuffer.this.remove(data);
// set data to null since we also use it as state variable
data = null;
// update the known modification value
knownMod = modcount;
}
};
}
public Object[] toArray()
{
List<Object> list = new ArrayList<Object>(size());
list.addAll(this);
return list.toArray();
}
public <T> T[] toArray( T[] a )
{
List<E> list = new ArrayList<E>(size());
list.addAll(this);
return list.toArray(a);
}
/**
* Adds the specified element to the queue.
*
* @param o element whose presence in this collection is to be ensured.
* @return <tt>true</tt>
* @throws NullPointerException if the specified element is null.
*/
public boolean add( E o )
{
if (o == null)
throw new NullPointerException();
return offer(o);
}
/**
* Removes a single instance of the specified element from this
* collection, if it is present. More formally,
* removes an element <tt>e</tt> such that <tt>(o==null ? e==null :
* o.equals(e))</tt>, if this collection contains one or more such
* elements. Returns true if this collection contained the specified
* element.
*
* @param o element to be removed from this collection, if present.
* @return <tt>true</tt> if this collection contained the specified element
* @throws NullPointerException if the specified element is null.
*/
public boolean remove( Object o )
{
// null-elements are not allowed
if (o==null)
throw new NullPointerException();
// the element is not in the queue
if (isEmpty())
return false;
// prev is the previous element of pos
Element<E> prev = null;
// the current position in the queue
Element<E> pos = head;
while (pos!=null)
{
// see if we have found the specified element
if (pos.data == o || pos.data.equals(o))
{
// remove element
// if we remove the head then we have to set head:=head.next
if (head == pos)
head = head.next;
// if we remove tail then we have to set tail:=previous of tail
if (tail == pos)
tail = prev;
// if either head or tail is null then the queue is empty and we have to
// set both to null
if (head == null ^ tail == null)
head = tail = null;
// if the removed element (pos) has a previous element
// than we have to link the previous element with the next element
// to be sure that pos is not inside the list anymore
if (prev != null)
prev.next = pos.next;
// now we can cache the unused element container
// first disconnect it
pos.next = null;
// then add it to the cache (e.data will be automatically set to null)
cache(pos);
// update modification counter
modcount++;
// the element has been removed
return true;
}
// we have not found it yet
// ensure that prev is previous element of pos
prev = pos;
pos = prev.next;
}
// the element could not be found, thus it was not removed
return false;
}
/**
* Returns <tt>true</tt> if this collection contains all of the elements
* in the specified collection.
*
* @param c collection to be checked for containment in this collection.
* @return <tt>true</tt> if this collection contains all of the elements
* in the specified collection
* @throws NullPointerException if the specified collection is <tt>null</tt>.
* @see #contains(Object)
*/
public boolean containsAll( Collection<?> c )
{
if (c == this || c.isEmpty())
return true;
for (Object o : c)
{
// we do not support null-elements
if (o == null)
throw new NullPointerException();
if (!contains(o))
return false;
}
return true;
}
/**
* Offers each element of the specified queue to this queue. The specified
* queue does not contain any elements after this operation.
*
* To balance the transaction of elements, this queue will provide
* all cached (unused) elements to the specified queue.
*
* Note: The operation has complexity O(1) and not O(n).
*
* @param queue the queue to offer to this queue
* @return true if this collection changed as result of the
* operation.
* @throws NullPointerException if the specified queue is null
*/
public boolean offerAll( QueueBuffer<E> queue )
{
// first see if the collection will be changed
if (queue.isEmpty())
return false;
// first see if this collection is empty
if (tail == null)
{
// this collection is empty and the head of
// the specified queue will become the head of this queue
head = queue.head;
}
else
{
// this collection is not empty, we add
// the specified queue to the tail
tail.next = queue.head;
}
// the tail of the specified queue will become the tail of this queue
tail = queue.tail;
// the queue has no element after this operation
queue.head = queue.tail = null;
// now we give the specified queue our cached elements as exchange
if (cacheHead != null)
{
// see if the specified queue has no cached elements
if (queue.cacheTail == null)
{
// no it hasn't, thus we can copy them 1:1
queue.cacheHead = cacheHead;
queue.cacheTail = cacheTail;
}
else
{
// the specified queue has already elements in it's cache
queue.cacheTail.next = cacheHead;
queue.cacheTail = cacheTail;
}
// this cache is empty as result of this operation
cacheHead = cacheTail = null;
}
// update the modification counters of both queues
queue.modcount ++;
modcount ++;
// the collection has changed as result of this operation
return true;
}
/**
* The operation is not supported.
* @throws UnsupportedOperationException
*/
public boolean addAll( Collection<? extends E> c )
{
throw new UnsupportedOperationException();
}
/**
* The operation is not supported.
* @throws UnsupportedOperationException
*/
public boolean removeAll( Collection<?> c )
{
throw new UnsupportedOperationException();
}
/**
* The operation is not supported.
* @throws UnsupportedOperationException
*/
public boolean retainAll( Collection<?> c )
{
throw new UnsupportedOperationException();
}
/**
* Clears the cache of unused elements.
*/
public void clearCache()
{
cacheHead = cacheTail = null;
}
/**
* Removes all of the elements from this collection.
* This collection will be empty after this method.
*
* If the collection was not empty then the element
* containeres will be recycled.
*/
public void clear()
{
if (isEmpty())
return;
// we do not set head.next to null here
// so that each element (head,...,tail)
// will be added to the cache
cache(head);
head = tail = null;
modcount++;
}
/**
* Adds the specified element and each of it's
* successors to the cache. Set e.next=null if
* only e should be added to the cache.
*
* The operation will set the Element.data field
* of e and it's successors to null.
*
* @param e the element(s) that should be recycled
*/
private final void cache(Element<E> e)
{
// set Element.data fields to null
Element<E> newTail = e;
while (newTail.next!=null)
{
newTail.data = null;
newTail = newTail.next;
}
newTail.data = null;
// see if the cache is empty or not
if (cacheTail != null)
cacheTail.next = e;
else
cacheHead = e;
cacheTail = newTail;
}
}
| gpl-2.0 |
Halcom/zanata-server | zanata-war/src/main/java/org/zanata/email/EmailValidationEmailStrategy.java | 2151 | /*
* Copyright 2014, Red Hat, Inc. and individual contributors
* as indicated by the @author tags. See the copyright.txt file in the
* distribution for a full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.zanata.email;
import com.googlecode.totallylazy.collections.PersistentMap;
import lombok.RequiredArgsConstructor;
import org.zanata.i18n.Messages;
import javax.mail.internet.InternetAddress;
/**
* @author Sean Flanigan <a href="mailto:sflaniga@redhat.com">sflaniga@redhat.com</a>
*/
@RequiredArgsConstructor
public class EmailValidationEmailStrategy extends
EmailStrategy {
private final String key;
@Override
public String getSubject(Messages msgs) {
return msgs.get("jsf.email.accountchange.Subject");
}
@Override
public String getBodyResourceName() {
return "org/zanata/email/templates/email_validation.vm";
}
@Override
public PersistentMap<String, Object> makeContext(
PersistentMap<String, Object> genericContext,
InternetAddress[] toAddresses) {
PersistentMap<String, Object> context = super.makeContext(genericContext,
toAddresses);
return context
.insert("activationKey", key)
.insert("newEmail", toAddresses[0].getAddress())
.insert("toName", toAddresses[0].getPersonal());
}
}
| gpl-2.0 |
AcademicTorrents/AcademicTorrents-Downloader | vuze/com/aelitis/net/udp/mc/MCGroupException.java | 1235 | /*
* Created on 14-Jun-2004
* Created by Paul Gardner
* Copyright (C) 2004, 2005, 2006 Aelitis, All Rights Reserved.
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*
* AELITIS, SAS au capital de 46,603.30 euros
* 8 Allee Lenotre, La Grille Royale, 78600 Le Mesnil le Roi, France.
*
*/
package com.aelitis.net.udp.mc;
/**
* @author parg
*
*/
public class
MCGroupException
extends Exception
{
public
MCGroupException(
String str )
{
super( str );
}
public
MCGroupException(
String str,
Throwable cause )
{
super( str, cause );
}
}
| gpl-2.0 |
GiGatR00n/Aion-Core-v4.7.5 | AC-Game/src/com/aionemu/gameserver/model/templates/event/EventTemplate.java | 9069 | /**
* This file is part of Aion-Lightning <aion-lightning.org>.
*
* Aion-Lightning is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Aion-Lightning is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details. *
*
* You should have received a copy of the GNU General Public License
* along with Aion-Lightning.
* If not, see <http://www.gnu.org/licenses/>.
*
*
* Credits goes to all Open Source Core Developer Groups listed below
* Please do not change here something, ragarding the developer credits, except the "developed by XXXX".
* Even if you edit a lot of files in this source, you still have no rights to call it as "your Core".
* Everybody knows that this Emulator Core was developed by Aion Lightning
* @-Aion-Unique-
* @-Aion-Lightning
* @Aion-Engine
* @Aion-Extreme
* @Aion-NextGen
* @Aion-Core Dev.
*/
package com.aionemu.gameserver.model.templates.event;
import com.aionemu.gameserver.dataholders.DataManager;
import com.aionemu.gameserver.dataholders.SpawnsData2;
import com.aionemu.gameserver.model.gameobjects.VisibleObject;
import com.aionemu.gameserver.model.gameobjects.player.Player;
import com.aionemu.gameserver.model.templates.Guides.GuideTemplate;
import com.aionemu.gameserver.model.templates.spawns.Spawn;
import com.aionemu.gameserver.model.templates.spawns.SpawnMap;
import com.aionemu.gameserver.model.templates.spawns.SpawnSpotTemplate;
import com.aionemu.gameserver.model.templates.spawns.SpawnTemplate;
import com.aionemu.gameserver.services.item.ItemService;
import com.aionemu.gameserver.spawnengine.SpawnEngine;
import com.aionemu.gameserver.utils.ThreadPoolManager;
import com.aionemu.gameserver.utils.gametime.DateTimeUtil;
import com.aionemu.gameserver.world.World;
import com.aionemu.gameserver.world.knownlist.Visitor;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.xml.bind.annotation.*;
import javax.xml.datatype.XMLGregorianCalendar;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.Future;
/**
* @author Rolandas
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "EventTemplate")
public class EventTemplate {
private static Logger log = LoggerFactory.getLogger(EventTemplate.class);
@XmlElement(name = "event_drops", required = false)
protected EventDrops eventDrops;
@XmlElement(name = "quests", required = false)
protected EventQuestList quests;
@XmlElement(name = "spawns", required = false)
protected SpawnsData2 spawns;
@XmlElement(name = "inventory_drop", required = false)
protected InventoryDrop inventoryDrop;
@XmlList
@XmlElement(name = "surveys", required = false)
protected List<String> surveys;
@XmlAttribute(name = "name", required = true)
protected String name;
@XmlAttribute(name = "start", required = true)
@XmlSchemaType(name = "dateTime")
protected XMLGregorianCalendar startDate;
@XmlAttribute(name = "end", required = true)
@XmlSchemaType(name = "dateTime")
protected XMLGregorianCalendar endDate;
@XmlAttribute(name = "theme", required = false)
private String theme;
@XmlTransient
protected List<VisibleObject> spawnedObjects;
@XmlTransient
private Future<?> invDropTask = null;
public String getName() {
return name;
}
public EventDrops EventDrop() {
return eventDrops;
}
public DateTime getStartDate() {
return DateTimeUtil.getDateTime(startDate.toGregorianCalendar());
}
public DateTime getEndDate() {
return DateTimeUtil.getDateTime(endDate.toGregorianCalendar());
}
public List<Integer> getStartableQuests() {
if (quests == null) {
return new ArrayList<Integer>();
}
return quests.getStartableQuests();
}
public List<Integer> getMaintainableQuests() {
if (quests == null) {
return new ArrayList<Integer>();
}
return quests.getMaintainQuests();
}
public boolean isActive() {
return getStartDate().isBeforeNow() && getEndDate().isAfterNow();
}
public boolean isExpired() {
return !isActive();
}
@XmlTransient
volatile boolean isStarted = false;
public void setStarted() {
isStarted = true;
}
public boolean isStarted() {
return isStarted;
}
public void Start() {
if (isStarted) {
return;
}
if (spawns != null && spawns.size() > 0) {
if (spawnedObjects == null) {
spawnedObjects = new ArrayList<VisibleObject>();
}
for (SpawnMap map : spawns.getTemplates()) {
DataManager.SPAWNS_DATA2.addNewSpawnMap(map);
Collection<Integer> instanceIds = World.getInstance().getWorldMap(map.getMapId()).getAvailableInstanceIds();
for (Integer instanceId : instanceIds) {
int spawnCount = 0;
for (Spawn spawn : map.getSpawns()) {
spawn.setEventTemplate(this);
for (SpawnSpotTemplate spot : spawn.getSpawnSpotTemplates()) {
SpawnTemplate t = SpawnEngine.addNewSpawn(map.getMapId(), spawn.getNpcId(), spot.getX(), spot.getY(),
spot.getZ(), spot.getHeading(), spawn.getRespawnTime());
t.setEventTemplate(this);
SpawnEngine.spawnObject(t, instanceId);
spawnCount++;
}
}
log.info("Spawned event objects in " + map.getMapId() + " [" + instanceId + "] : " + spawnCount + " (" + this.getName() + ")");
}
}
DataManager.SPAWNS_DATA2.afterUnmarshal(null, null);
DataManager.SPAWNS_DATA2.clearTemplates();
}
if (inventoryDrop != null) {
invDropTask = ThreadPoolManager.getInstance().scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
World.getInstance().doOnAllPlayers(new Visitor<Player>() {
@Override
public void visit(Player player) {
if (player.getCommonData().getLevel() >= inventoryDrop.getStartLevel()) {
ItemService.dropItemToInventory(player, inventoryDrop.getDropItem());
}
}
});
}
}, inventoryDrop.getInterval() * 60000, inventoryDrop.getInterval() * 60000);
}
if (surveys != null) {
for (String survey : surveys) {
GuideTemplate template = DataManager.GUIDE_HTML_DATA.getTemplateByTitle(survey);
if (template != null) {
template.setActivated(true);
}
}
}
isStarted = true;
}
public void Stop() {
if (!isStarted) {
return;
}
if (spawnedObjects != null) {
for (VisibleObject o : spawnedObjects) {
if (o.isSpawned()) {
o.getController().delete();
}
}
DataManager.SPAWNS_DATA2.removeEventSpawnObjects(spawnedObjects);
log.info("Despawned " + spawnedObjects.size() + " event objects (" + this.getName() + ")");
spawnedObjects.clear();
spawnedObjects = null;
}
if (invDropTask != null) {
invDropTask.cancel(false);
invDropTask = null;
}
if (surveys != null) {
for (String survey : surveys) {
GuideTemplate template = DataManager.GUIDE_HTML_DATA.getTemplateByTitle(survey);
if (template != null) {
template.setActivated(false);
}
}
}
isStarted = false;
}
public void addSpawnedObject(VisibleObject object) {
if (spawnedObjects == null) {
spawnedObjects = new ArrayList<VisibleObject>();
}
spawnedObjects.add(object);
}
/**
* @return the theme name
*/
public String getTheme() {
if (theme != null) {
return theme.toLowerCase();
}
return theme;
}
}
| gpl-2.0 |
md-5/jdk10 | src/java.xml/share/classes/org/xml/sax/helpers/ParserFactory.java | 5284 | /*
* Copyright (c) 2000, 2019, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package org.xml.sax.helpers;
import jdk.xml.internal.SecuritySupport;
/**
* Java-specific class for dynamically loading SAX parsers.
*
* <p><strong>Note:</strong> This class is designed to work with the now-deprecated
* SAX1 {@link org.xml.sax.Parser Parser} class. SAX2 applications should use
* {@link org.xml.sax.helpers.XMLReaderFactory XMLReaderFactory} instead.</p>
*
* <p>ParserFactory is not part of the platform-independent definition
* of SAX; it is an additional convenience class designed
* specifically for Java XML application writers. SAX applications
* can use the static methods in this class to allocate a SAX parser
* dynamically at run-time based either on the value of the
* `org.xml.sax.parser' system property or on a string containing the class
* name.</p>
*
* <p>Note that the application still requires an XML parser that
* implements SAX1.</p>
*
* @deprecated This class works with the deprecated
* {@link org.xml.sax.Parser Parser}
* interface.
* @since 1.4, SAX 1.0
* @author David Megginson
* @version 2.0.1 (sax2r2)
*/
@SuppressWarnings( "deprecation" )
@Deprecated(since="1.5")
public class ParserFactory {
/**
* Private null constructor.
*/
private ParserFactory ()
{
}
/**
* Create a new SAX parser using the `org.xml.sax.parser' system property.
*
* <p>The named class must exist and must implement the
* {@link org.xml.sax.Parser Parser} interface.</p>
*
* @exception java.lang.NullPointerException There is no value
* for the `org.xml.sax.parser' system property.
* @exception java.lang.ClassNotFoundException The SAX parser
* class was not found (check your CLASSPATH).
* @exception IllegalAccessException The SAX parser class was
* found, but you do not have permission to load
* it.
* @exception InstantiationException The SAX parser class was
* found but could not be instantiated.
* @exception java.lang.ClassCastException The SAX parser class
* was found and instantiated, but does not implement
* org.xml.sax.Parser.
* @see #makeParser(java.lang.String)
* @see org.xml.sax.Parser
*/
public static org.xml.sax.Parser makeParser ()
throws ClassNotFoundException,
IllegalAccessException,
InstantiationException,
NullPointerException,
ClassCastException
{
String className = SecuritySupport.getSystemProperty("org.xml.sax.parser");
if (className == null) {
throw new NullPointerException("No value for sax.parser property");
} else {
return makeParser(className);
}
}
/**
* Create a new SAX parser object using the class name provided.
*
* <p>The named class must exist and must implement the
* {@link org.xml.sax.Parser Parser} interface.</p>
*
* @param className A string containing the name of the
* SAX parser class.
* @exception java.lang.ClassNotFoundException The SAX parser
* class was not found (check your CLASSPATH).
* @exception IllegalAccessException The SAX parser class was
* found, but you do not have permission to load
* it.
* @exception InstantiationException The SAX parser class was
* found but could not be instantiated.
* @exception java.lang.ClassCastException The SAX parser class
* was found and instantiated, but does not implement
* org.xml.sax.Parser.
* @see #makeParser()
* @see org.xml.sax.Parser
*/
public static org.xml.sax.Parser makeParser (String className)
throws ClassNotFoundException,
IllegalAccessException,
InstantiationException,
ClassCastException
{
return NewInstance.newInstance (org.xml.sax.Parser.class,
SecuritySupport.getClassLoader(), className);
}
}
| gpl-2.0 |
axDev-JDK/jdk | test/sun/security/krb5/auto/BadKdc2.java | 1830 | /*
* Copyright (c) 2009, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
* @bug 6843127
* @run main/othervm/timeout=300 BadKdc2
* @summary krb5 should not try to access unavailable kdc too often
*/
import java.io.*;
import java.security.Security;
public class BadKdc2 {
public static void main(String[] args)
throws Exception {
Security.setProperty("krb5.kdc.bad.policy", "tryLess:2,1000");
BadKdc.go(
"121212222222(32){1,2}11112121(32){1,2}", // 1 2
"11112121(32){1,2}11112121(32){1,2}", // 1 2
// refresh
"121212222222(32){1,2}11112121(32){1,2}", // 1 2
// k3 off k2 on
"1111(21){1,2}1111(22){1,2}", // 1
// k1 on
"(11){1,2}(12){1,2}" // empty
);
}
}
| gpl-2.0 |
bestmazzo/freedomotic | plugins/devices/arduinousb/src/main/java/com/freedomotic/plugins/devices/arduinousb/ArduinoUSB.java | 4868 | /**
*
* Copyright (c) 2009-2015 Freedomotic team http://freedomotic.com
*
* This file is part of Freedomotic
*
* This Program is free software; you can redistribute it and/or modify it under
* the terms of the GNU General Public License as published by the Free Software
* Foundation; either version 2, or (at your option) any later version.
*
* This Program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
* details.
*
* You should have received a copy of the GNU General Public License along with
* Freedomotic; see the file COPYING. If not, see
* <http://www.gnu.org/licenses/>.
*/
package com.freedomotic.plugins.devices.arduinousb;
import com.freedomotic.api.EventTemplate;
import com.freedomotic.api.Protocol;
import com.freedomotic.events.ProtocolRead;
import com.freedomotic.exceptions.PluginStartupException;
import com.freedomotic.exceptions.UnableToExecuteException;
import com.freedomotic.helpers.SerialHelper;
import com.freedomotic.helpers.SerialPortListener;
import com.freedomotic.reactions.Command;
import java.util.logging.Level;
import java.util.logging.Logger;
import jssc.SerialPortException;
public class ArduinoUSB extends Protocol {
private static final Logger LOG = Logger.getLogger(ArduinoUSB.class.getName());
private String portName = configuration.getStringProperty("serial.port", "/dev/usb0");
private Integer baudRate = configuration.getIntProperty("serial.baudrate", 9600);
private Integer dataBits = configuration.getIntProperty("serial.databits", 8);
private Integer parity = configuration.getIntProperty("serial.parity", 0);
private Integer stopBits = configuration.getIntProperty("serial.stopbits", 1);
private String chunkTerminator = configuration.getStringProperty("chunk.terminator", "\n");
//ALTERNITIVE TO CHUNK TERMINATOR:
//private Integer chunkSize = configuration.getIntProperty("chunk.size", 5);
private String delimiter = configuration.getStringProperty("delimiter", ";");
private SerialHelper serial;
public ArduinoUSB() {
super("Arduino USB", "/arduinousb/arduinousb-manifest.xml");
//This disables loop execution od onRun() method
setPollingWait(-1); // onRun() executes once.
}
@Override
public void onStart() throws PluginStartupException {
try {
serial = new SerialHelper(portName, baudRate, dataBits, stopBits, parity, new SerialPortListener() {
@Override
public void onDataAvailable(String data) {
LOG.log(Level.CONFIG, "Arduino USB received: {0}", data);
sendChanges(data);
}
});
// in this example it reads until a string terminator (default: new line char)
serial.setChunkTerminator(chunkTerminator);
} catch (SerialPortException ex) {
throw new PluginStartupException("Error while creating Arduino serial connection. " + ex.getMessage(), ex);
}
}
@Override
public void onStop() {
if (serial != null) {
serial.disconnect();
}
}
@Override
protected void onRun() {
//nothing to do, Arduino messages are read by SerialHelper
}
@Override
protected void onCommand(Command c) throws UnableToExecuteException {
//this method receives freedomotic commands sent on channel app.actuators.protocol.arduinousb.in
String message = c.getProperty("arduinousb.message");
try {
serial.write(message);
} catch (SerialPortException ex) {
throw new UnableToExecuteException("Error writing message '" + message + "' to arduino serial board: " + ex.getMessage(), ex);
}
}
private void sendChanges(String data) {
// in this example we are using Arduino Serial.println() so
// remove '\r' and '\n' at the end of the string and split data read
String[] receivedMessage = data.substring(0, data.length() - 2).split(delimiter);
String receivedAddress = receivedMessage[0];
String receivedStatus = receivedMessage[1];
ProtocolRead event = new ProtocolRead(this, "arduinousb", receivedAddress);
if (receivedStatus.equalsIgnoreCase("on")) {
event.addProperty("isOn", "true");
} else {
event.addProperty("isOn", "false");
}
this.notifyEvent(event);
}
@Override
protected boolean canExecute(Command c) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
protected void onEvent(EventTemplate event) {
//not nothing. This plugins doesn't listen to freedomotic events
}
}
| gpl-2.0 |
smarr/graal | graal/com.oracle.graal.compiler.hsail.test/src/com/oracle/graal/compiler/hsail/test/Vec3ObjStreamIntCaptureTest.java | 1985 | /*
* Copyright (c) 2009, 2012, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.oracle.graal.compiler.hsail.test;
import org.junit.*;
import com.oracle.graal.compiler.hsail.test.infra.*;
/**
* Tests codegen for a java 7 style object array stream kernel, one int capture.
*/
public class Vec3ObjStreamIntCaptureTest extends GraalKernelTester {
static final int NUM = 20;
@Result public Vec3[] inArray = new Vec3[NUM];
void setupArrays() {
for (int i = 0; i < NUM; i++) {
inArray[i] = new Vec3(i, i + 1, -1);
}
}
/**
* The "kernel" method we will be testing. For Array Stream, an object from the array will be
* the last parameter
*/
public void run(int adjustment, Vec3 vec3) {
vec3.z = vec3.x + vec3.y - adjustment;
}
@Override
public void runTest() {
setupArrays();
dispatchMethodKernel(inArray, 7);
}
@Test
public void test() {
testGeneratedHsail();
}
}
| gpl-2.0 |
kahowell/esb-message-admin | api/src/test/java/org/esbtools/message/admin/model/SearchFieldTest.java | 3276 |
package org.esbtools.message.admin.model;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.Arrays;
import java.util.List;
import org.junit.Test;
public class SearchFieldTest {
@Test
public void testGetValueTypeTrue() {
assertEquals(SearchField.valueOf("messageType").getValueType(), String.class);
}
@Test
public void testNotPreDefined() {
assertFalse(SearchField.isPreDefined("customfield"));
}
@Test
public void testIsPreDefined() {
assertTrue(SearchField.isPreDefined("id"));
assertTrue(SearchField.isPreDefined("errorQueue"));
assertTrue(SearchField.isPreDefined("messageId"));
assertTrue(SearchField.isPreDefined("messageGuid"));
assertTrue(SearchField.isPreDefined("messageType"));
assertTrue(SearchField.isPreDefined("sourceQueue"));
assertTrue(SearchField.isPreDefined("sourceSystem"));
assertTrue(SearchField.isPreDefined("originalSystem"));
assertTrue(SearchField.isPreDefined("queueName"));
assertTrue(SearchField.isPreDefined("queueLocation"));
assertTrue(SearchField.isPreDefined("errorComponent"));
assertTrue(SearchField.isPreDefined("serviceName"));
assertTrue(SearchField.isPreDefined("customHeader"));
}
@Test
public void testIsPreDefinedUpperCase() {
assertTrue(SearchField.isPreDefined("ID"));
assertTrue(SearchField.isPreDefined("ERRORQUEUE"));
assertTrue(SearchField.isPreDefined("MESSAGEID"));
assertTrue(SearchField.isPreDefined("MESSAGEGUID"));
assertTrue(SearchField.isPreDefined("MESSAGETYPE"));
assertTrue(SearchField.isPreDefined("SOURCEQUEUE"));
assertTrue(SearchField.isPreDefined("SOURCESYSTEM"));
assertTrue(SearchField.isPreDefined("ORIGINALSYSTEM"));
assertTrue(SearchField.isPreDefined("QUEUENAME"));
assertTrue(SearchField.isPreDefined("QUEUELOCATION"));
assertTrue(SearchField.isPreDefined("ERRORCOMPONENT"));
assertTrue(SearchField.isPreDefined("SERVICENAME"));
assertTrue(SearchField.isPreDefined("CUSTOMHEADER"));
}
@Test
public void testIsPreDefinedLowerCase() {
assertTrue(SearchField.isPreDefined("id"));
assertTrue(SearchField.isPreDefined("errorqueue"));
assertTrue(SearchField.isPreDefined("messageid"));
assertTrue(SearchField.isPreDefined("messageguid"));
assertTrue(SearchField.isPreDefined("messagetype"));
assertTrue(SearchField.isPreDefined("sourcequeue"));
assertTrue(SearchField.isPreDefined("sourcesystem"));
assertTrue(SearchField.isPreDefined("originalsystem"));
assertTrue(SearchField.isPreDefined("queuename"));
assertTrue(SearchField.isPreDefined("queuelocation"));
assertTrue(SearchField.isPreDefined("errorcomponent"));
assertTrue(SearchField.isPreDefined("servicename"));
assertTrue(SearchField.isPreDefined("customheader"));
}
@Test
public void testMatch() {
SearchField matchResult = SearchField.match("messageid");
assertEquals(SearchField.messageId, matchResult);
}
}
| gpl-3.0 |
MarcelStructr/structr | structr-ui/src/main/java/org/structr/cloud/message/Crypt.java | 1877 | /**
* Copyright (C) 2010-2016 Structr GmbH
*
* This file is part of Structr <http://structr.org>.
*
* Structr is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* Structr is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Structr. If not, see <http://www.gnu.org/licenses/>.
*/
package org.structr.cloud.message;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import org.structr.cloud.CloudConnection;
import org.structr.common.error.FrameworkException;
/**
*
*
*/
public class Crypt extends Message {
public Crypt() {}
public Crypt(final long id) {
super(id, 0);
}
@Override
public String toString() {
return "Crypt(" + getId() + ")";
}
@Override
public void onRequest(CloudConnection serverConnection) throws IOException, FrameworkException {
// just reply with this message so the client knows that
// encryption is working
serverConnection.send(this);
}
@Override
public void onResponse(CloudConnection clientConnection) throws IOException, FrameworkException {
clientConnection.setAuthenticated();
}
@Override
public void afterSend(CloudConnection connection) {
}
@Override
protected void deserializeFrom(DataInputStream inputStream) throws IOException {
// no additional data
}
@Override
protected void serializeTo(DataOutputStream outputStream) throws IOException {
// no additional data
}
}
| gpl-3.0 |
VegasGoat/TFCraft | src/Common/com/bioxx/tfc/Items/Tools/ItemCustomHoe.java | 5661 | package com.bioxx.tfc.Items.Tools;
import java.util.List;
import net.minecraft.block.Block;
import net.minecraft.client.renderer.texture.IIconRegister;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemHoe;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.IIcon;
import net.minecraft.world.World;
import net.minecraftforge.common.ForgeHooks;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.event.entity.player.UseHoeEvent;
import cpw.mods.fml.common.eventhandler.Event.Result;
import com.bioxx.tfc.Reference;
import com.bioxx.tfc.Core.TFCTabs;
import com.bioxx.tfc.Core.TFC_Core;
import com.bioxx.tfc.Core.TFC_Textures;
import com.bioxx.tfc.Items.ItemTerra;
import com.bioxx.tfc.TileEntities.TEFarmland;
import com.bioxx.tfc.api.TFCBlocks;
import com.bioxx.tfc.api.Crafting.AnvilManager;
import com.bioxx.tfc.api.Enums.EnumItemReach;
import com.bioxx.tfc.api.Enums.EnumSize;
import com.bioxx.tfc.api.Enums.EnumWeight;
import com.bioxx.tfc.api.Interfaces.ISize;
public class ItemCustomHoe extends ItemHoe implements ISize
{
public ItemCustomHoe(ToolMaterial e)
{
super(e);
setCreativeTab(TFCTabs.TFC_TOOLS);
setNoRepair();
}
@Override
public void registerIcons(IIconRegister registerer)
{
String name = this.getUnlocalizedName().replace("item.", "");
name = name.replace("IgIn ", "");
name = name.replace("IgEx ", "");
name = name.replace("Sed ", "");
name = name.replace("MM ", "");
this.itemIcon = registerer.registerIcon(Reference.MOD_ID + ":" + "tools/" + name);
}
@Override
public IIcon getIcon(ItemStack stack, int pass)
{
NBTTagCompound nbt = stack.getTagCompound();
if(pass == 1 && nbt != null && nbt.hasKey("broken"))
return TFC_Textures.brokenItem;
else
return getIconFromDamageForRenderPass(stack.getItemDamage(), pass);
}
@Override
public boolean onItemUseFirst(ItemStack stack, EntityPlayer player, World world, int x, int y, int z, int side, float hitX, float hitY, float hitZ)
{
if (world.isRemote || world.getBlock(x, y, z) == TFCBlocks.toolRack)
return false;
else
{
UseHoeEvent event = new UseHoeEvent(player, stack, world, x, y, z);
if (MinecraftForge.EVENT_BUS.post(event))
return false;
if (event.getResult() == Result.ALLOW)
{
stack.damageItem(1, player);
return true;
}
Block var8 = world.getBlock(x, y, z);
Block var9 = world.getBlock(x, y + 1, z);
boolean isDirt = TFC_Core.isDirt(var8);
if (side != 1 || !var9.isAir(world, x, y + 1, z) || !TFC_Core.isGrass(var8) && !isDirt)
return false;
else
{
Block var10 = var8 == TFCBlocks.dirt || var8 == TFCBlocks.grass || var8 == TFCBlocks.dryGrass ? TFCBlocks.dirt :
var8 == TFCBlocks.dirt2 || var8 == TFCBlocks.grass2 || var8 == TFCBlocks.dryGrass2 ? TFCBlocks.dirt2 : null;
if(var10 != null)
{
int meta = world.getBlockMetadata(x, y, z);
if(var10 == TFCBlocks.dirt)
{
world.playSoundEffect(x + 0.5F, y + 0.5F, z + 0.5F, var10.stepSound.getStepResourcePath(), (var10.stepSound.getVolume() + 1.0F) / 2.0F, var10.stepSound.getPitch() * 0.8F);
if (world.isRemote)
return true;
else
{
world.setBlock(x, y, z, TFCBlocks.tilledSoil, meta, 0x2);
world.markBlockForUpdate(x, y, z);
stack.damageItem(1, player);
if(isDirt)
{
TEFarmland te = (TEFarmland) world.getTileEntity(x, y, z);
te.nutrients[0] = 100;
te.nutrients[1] = 100;
te.nutrients[2] = 100;
}
return true;
}
}
else if(var10 == TFCBlocks.dirt2)
{
world.playSoundEffect(x + 0.5F, y + 0.5F, z + 0.5F, var10.stepSound.getStepResourcePath(), (var10.stepSound.getVolume() + 1.0F) / 2.0F, var10.stepSound.getPitch() * 0.8F);
if (world.isRemote)
return true;
else
{
world.setBlock(x, y, z, TFCBlocks.tilledSoil2, meta, 0x2);
world.markBlockForUpdate(x, y, z);
stack.damageItem(1, player);
if(isDirt)
{
TEFarmland te = (TEFarmland) world.getTileEntity(x, y, z);
te.nutrients[0] = 100;
te.nutrients[1] = 100;
te.nutrients[2] = 100;
}
return true;
}
}
}
}
return false;
}
}
@Override
public void addInformation(ItemStack is, EntityPlayer player, List arraylist, boolean flag)
{
ItemTerra.addSizeInformation(is, arraylist);
ItemTerraTool.addSmithingBonusInformation(is, arraylist);
}
@Override
public int getItemStackLimit()
{
if(canStack())
return this.getSize(null).stackSize * getWeight(null).multiplier;
else
return 1;
}
@Override
public EnumSize getSize(ItemStack is)
{
return EnumSize.LARGE;
}
@Override
public boolean canStack()
{
return false;
}
@Override
public EnumWeight getWeight(ItemStack is)
{
return EnumWeight.LIGHT;
}
@Override
public int getMaxDamage(ItemStack stack)
{
return (int) (getMaxDamage()+(getMaxDamage() * AnvilManager.getDurabilityBuff(stack)));
}
@Override
public float getDigSpeed(ItemStack stack, Block block, int meta)
{
float digSpeed = super.getDigSpeed(stack, block, meta);
if (ForgeHooks.isToolEffective(stack, block, meta))
{
return digSpeed + (digSpeed * AnvilManager.getDurabilityBuff(stack));
}
return digSpeed;
}
@Override
public EnumItemReach getReach(ItemStack is)
{
return EnumItemReach.FAR;
}
} | gpl-3.0 |
wormzjl/PneumaticCraft | src/pneumaticCraft/client/gui/programmer/GuiProgWidgetImportExport.java | 3068 | package pneumaticCraft.client.gui.programmer;
import net.minecraft.client.Minecraft;
import net.minecraft.client.resources.I18n;
import net.minecraftforge.common.util.ForgeDirection;
import pneumaticCraft.client.gui.GuiProgrammer;
import pneumaticCraft.client.gui.widget.GuiCheckBox;
import pneumaticCraft.client.gui.widget.IGuiWidget;
import pneumaticCraft.client.gui.widget.WidgetTextFieldNumber;
import pneumaticCraft.common.progwidgets.ICountWidget;
import pneumaticCraft.common.progwidgets.IProgWidget;
import pneumaticCraft.common.progwidgets.ProgWidgetInventoryBase;
import pneumaticCraft.common.util.PneumaticCraftUtils;
public class GuiProgWidgetImportExport<Widget extends IProgWidget> extends GuiProgWidgetAreaShow<Widget>{
private GuiCheckBox useItemCount;
private WidgetTextFieldNumber textField;
public GuiProgWidgetImportExport(Widget widget, GuiProgrammer guiProgrammer){
super(widget, guiProgrammer);
}
@Override
public void initGui(){
super.initGui();
if(showSides()) {
for(int i = 0; i < 6; i++) {
String sideName = PneumaticCraftUtils.getOrientationName(ForgeDirection.getOrientation(i));
GuiCheckBox checkBox = new GuiCheckBox(i, guiLeft + 4, guiTop + 30 + i * 12, 0xFF000000, sideName);
checkBox.checked = ((ProgWidgetInventoryBase)widget).getSides()[i];
addWidget(checkBox);
}
}
useItemCount = new GuiCheckBox(6, guiLeft + 4, guiTop + (showSides() ? 115 : 30), 0xFF000000, I18n.format("gui.progWidget.itemFilter.useItemCount"));
useItemCount.setTooltip("gui.progWidget.itemFilter.useItemCount.tooltip");
useItemCount.checked = ((ICountWidget)widget).useCount();
addWidget(useItemCount);
textField = new WidgetTextFieldNumber(Minecraft.getMinecraft().fontRenderer, guiLeft + 7, guiTop + (showSides() ? 128 : 43), 50, 11);
textField.setValue(((ICountWidget)widget).getCount());
textField.setEnabled(useItemCount.checked);
addWidget(textField);
}
protected boolean showSides(){
return true;
}
@Override
public void actionPerformed(IGuiWidget checkBox){
if(checkBox.getID() < 6 && checkBox.getID() >= 0) {
((ProgWidgetInventoryBase)widget).getSides()[checkBox.getID()] = ((GuiCheckBox)checkBox).checked;
} else if(checkBox.getID() == 6) {
((ICountWidget)widget).setUseCount(((GuiCheckBox)checkBox).checked);
textField.setEnabled(((GuiCheckBox)checkBox).checked);
}
super.actionPerformed(checkBox);
}
@Override
public void onKeyTyped(IGuiWidget widget){
((ICountWidget)this.widget).setCount(textField.getValue());
super.onKeyTyped(widget);
}
@Override
public void drawScreen(int mouseX, int mouseY, float partialTicks){
super.drawScreen(mouseX, mouseY, partialTicks);
if(showSides()) fontRendererObj.drawString("Accessing sides:", guiLeft + 4, guiTop + 20, 0xFF000000);
}
}
| gpl-3.0 |
jtux270/translate | ovirt/frontend/webadmin/modules/uicommonweb/src/main/java/org/ovirt/engine/ui/uicommonweb/models/vms/CustomInstanceType.java | 3884 | package org.ovirt.engine.ui.uicommonweb.models.vms;
import org.ovirt.engine.core.common.businessentities.BootSequence;
import org.ovirt.engine.core.common.businessentities.DisplayType;
import org.ovirt.engine.core.common.businessentities.InstanceType;
import org.ovirt.engine.core.common.businessentities.MigrationSupport;
import org.ovirt.engine.core.common.businessentities.UsbPolicy;
import org.ovirt.engine.core.common.businessentities.network.VmNetworkInterface;
import org.ovirt.engine.core.compat.Guid;
import org.ovirt.engine.ui.uicompat.ConstantsManager;
import java.util.List;
/**
* Null object for instance types
*/
public class CustomInstanceType implements InstanceType {
public static final CustomInstanceType INSTANCE = new CustomInstanceType();
@Override
public String getDescription() {
return ConstantsManager.getInstance().getConstants().customInstanceTypeDescription();
}
@Override
public String getName() {
return ConstantsManager.getInstance().getConstants().customInstanceTypeName();
}
@Override
public Guid getId() {
return null;
}
@Override
public void setName(String value) {
}
@Override
public void setDescription(String value) {
}
@Override
public int getMemSizeMb() {
return 0;
}
@Override
public void setMemSizeMb(int value) {
}
@Override
public int getNumOfSockets() {
return 0;
}
@Override
public void setNumOfSockets(int value) {
}
@Override
public int getCpuPerSocket() {
return 0;
}
@Override
public void setCpuPerSocket(int value) {
}
@Override
public List<VmNetworkInterface> getInterfaces() {
return null;
}
@Override
public void setInterfaces(List<VmNetworkInterface> value) {
}
@Override
public int getNumOfMonitors() {
return 0;
}
@Override
public void setNumOfMonitors(int value) {
}
@Override
public UsbPolicy getUsbPolicy() {
return null;
}
@Override
public void setUsbPolicy(UsbPolicy value) {
}
@Override
public boolean isAutoStartup() {
return false;
}
@Override
public void setAutoStartup(boolean value) {
}
@Override
public BootSequence getDefaultBootSequence() {
// default boot sequence
return BootSequence.C;
}
@Override
public void setDefaultBootSequence(BootSequence value) {
}
@Override
public DisplayType getDefaultDisplayType() {
return null;
}
@Override
public void setDefaultDisplayType(DisplayType value) {
}
@Override
public int getPriority() {
return 0;
}
@Override
public void setPriority(int value) {
}
@Override
public int getMinAllocatedMem() {
return 0;
}
@Override
public void setMinAllocatedMem(int value) {
}
@Override
public Boolean getTunnelMigration() {
return Boolean.FALSE;
}
@Override
public void setTunnelMigration(Boolean value) {
}
@Override
public void setSingleQxlPci(boolean value) {
}
@Override
public boolean getSingleQxlPci() {
return false;
}
@Override
public boolean isSmartcardEnabled() {
return false;
}
@Override
public void setSmartcardEnabled(boolean smartcardEnabled) {
}
@Override
public MigrationSupport getMigrationSupport() {
return null;
}
@Override
public void setMigrationSupport(MigrationSupport migrationSupport) {
}
@Override
public void setMigrationDowntime(Integer migrationDowntime) {
}
@Override
public Integer getMigrationDowntime() {
return null;
}
@Override
public void setId(Guid id) {
}
}
| gpl-3.0 |
kriztan/Pix-Art-Messenger | libs/android-transcoder/src/main/java/net/ypresto/androidtranscoder/format/OutputFormatUnavailableException.java | 825 | /*
* Copyright (C) 2014 Yuya Tanaka
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.ypresto.androidtranscoder.format;
public class OutputFormatUnavailableException extends RuntimeException {
public OutputFormatUnavailableException(String detailMessage) {
super(detailMessage);
}
}
| gpl-3.0 |
s20121035/rk3288_android5.1_repo | packages/apps/Launcher3/src/com/android/launcher3/LauncherRootView.java | 445 | package com.android.launcher3;
import android.content.Context;
import android.graphics.Rect;
import android.util.AttributeSet;
public class LauncherRootView extends InsettableFrameLayout {
public LauncherRootView(Context context, AttributeSet attrs) {
super(context, attrs);
}
@Override
protected boolean fitSystemWindows(Rect insets) {
setInsets(insets);
return true; // I'll take it from here
}
} | gpl-3.0 |
itachi1706/Equivalent-Exchange-3 | src/main/java/com/pahimar/ee3/network/message/MessageTransmutationKnowledgeUpdate.java | 4503 | package com.pahimar.ee3.network.message;
import com.pahimar.ee3.inventory.ContainerTransmutationTablet;
import com.pahimar.ee3.knowledge.TransmutationKnowledge;
import com.pahimar.ee3.tileentity.TileEntityTransmutationTablet;
import com.pahimar.ee3.util.CompressionHelper;
import cpw.mods.fml.client.FMLClientHandler;
import cpw.mods.fml.common.network.simpleimpl.IMessage;
import cpw.mods.fml.common.network.simpleimpl.IMessageHandler;
import cpw.mods.fml.common.network.simpleimpl.MessageContext;
import io.netty.buffer.ByteBuf;
import net.minecraft.client.gui.inventory.GuiContainer;
import net.minecraft.item.ItemStack;
import java.util.Collection;
public class MessageTransmutationKnowledgeUpdate implements IMessage, IMessageHandler<MessageTransmutationKnowledgeUpdate, IMessage>
{
public int xCoord, yCoord, zCoord;
public TransmutationKnowledge transmutationKnowledge;
public MessageTransmutationKnowledgeUpdate()
{
}
public MessageTransmutationKnowledgeUpdate(TileEntityTransmutationTablet tileEntityTransmutationTablet, Collection<ItemStack> knownTransmutationsCollection)
{
if (tileEntityTransmutationTablet != null)
{
this.xCoord = tileEntityTransmutationTablet.xCoord;
this.yCoord = tileEntityTransmutationTablet.yCoord;
this.zCoord = tileEntityTransmutationTablet.zCoord;
}
else
{
this.xCoord = 0;
this.yCoord = Integer.MIN_VALUE;
this.zCoord = 0;
}
if (knownTransmutationsCollection != null)
{
this.transmutationKnowledge = new TransmutationKnowledge(knownTransmutationsCollection);
}
else
{
this.transmutationKnowledge = new TransmutationKnowledge();
}
}
public MessageTransmutationKnowledgeUpdate(int xCoord, int yCoord, int zCoord, Collection<ItemStack> knownTransmutationsCollection)
{
this.xCoord = xCoord;
this.yCoord = yCoord;
this.zCoord = zCoord;
if (knownTransmutationsCollection != null)
{
this.transmutationKnowledge = new TransmutationKnowledge(knownTransmutationsCollection);
}
else
{
this.transmutationKnowledge = new TransmutationKnowledge();
}
}
@Override
public void fromBytes(ByteBuf buf)
{
this.xCoord = buf.readInt();
this.yCoord = buf.readInt();
this.zCoord = buf.readInt();
byte[] compressedString = null;
int readableBytes = buf.readInt();
if (readableBytes > 0)
{
compressedString = buf.readBytes(readableBytes).array();
}
if (compressedString != null)
{
String uncompressedString = CompressionHelper.decompressStringFromByteArray(compressedString);
this.transmutationKnowledge = TransmutationKnowledge.createFromJson(uncompressedString);
}
}
@Override
public void toBytes(ByteBuf buf)
{
buf.writeInt(xCoord);
buf.writeInt(yCoord);
buf.writeInt(zCoord);
byte[] compressedString = null;
if (transmutationKnowledge != null)
{
compressedString = CompressionHelper.compressStringToByteArray(transmutationKnowledge.toJson());
}
if (compressedString != null)
{
buf.writeInt(compressedString.length);
buf.writeBytes(compressedString);
}
else
{
buf.writeInt(0);
}
}
@Override
public IMessage onMessage(MessageTransmutationKnowledgeUpdate message, MessageContext ctx)
{
if (message.yCoord != Integer.MIN_VALUE)
{
if (FMLClientHandler.instance().getClient().currentScreen instanceof GuiContainer)
{
GuiContainer guiContainer = (GuiContainer) FMLClientHandler.instance().getClient().currentScreen;
if (guiContainer.inventorySlots instanceof ContainerTransmutationTablet)
{
if (FMLClientHandler.instance().getWorldClient().getTileEntity(message.xCoord, message.yCoord, message.zCoord) instanceof TileEntityTransmutationTablet)
{
((ContainerTransmutationTablet) guiContainer.inventorySlots).handleTransmutationKnowledgeUpdate(message.transmutationKnowledge);
}
}
}
}
return null;
}
}
| gpl-3.0 |
dsibournemouth/autoweka | weka-3.7.7/src/main/java/weka/gui/sql/ResultSetTableModel.java | 6448 | /*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/*
* ResultSetTableModel.java
* Copyright (C) 2005-2012 University of Waikato, Hamilton, New Zealand
*
*/
package weka.gui.sql;
import java.sql.ResultSet;
import java.util.HashSet;
import javax.swing.event.TableModelListener;
import javax.swing.table.TableModel;
/**
* The model for an SQL ResultSet.
*
* @author FracPete (fracpete at waikato dot ac dot nz)
* @version $Revision: 8034 $
*/
public class ResultSetTableModel implements TableModel {
/** the listeners. */
protected HashSet m_Listeners;
/** the data. */
protected Object[][] m_Data;
/** for retrieving the data etc. */
protected ResultSetHelper m_Helper;
/**
* initializes the model, retrieves all rows.
*
* @param rs the ResultSet to get the data from
*/
public ResultSetTableModel(ResultSet rs) {
this(rs, 0);
}
/**
* initializes the model, retrieves only the given amount of rows (0 means
* all).
*
* @param rs the ResultSet to get the data from
* @param rows the maximum number of rows to retrieve, 0 retrieves all
*/
public ResultSetTableModel(ResultSet rs, int rows) {
super();
m_Listeners = new HashSet();
m_Helper = new ResultSetHelper(rs, rows);
m_Data = m_Helper.getCells();
}
/**
* adds a listener to the list that is notified each time a change to data
* model occurs.
*
* @param l the listener to add
*/
public void addTableModelListener(TableModelListener l) {
m_Listeners.add(l);
}
/**
* returns the most specific superclass for all the cell values in the
* column (always String).
*
* @param columnIndex the index of the column
* @return the class
*/
public Class getColumnClass(int columnIndex) {
Class result;
result = null;
if ( (m_Helper.getColumnClasses() != null)
&& (columnIndex >= 0)
&& (columnIndex < getColumnCount()) ) {
if (columnIndex == 0)
result = Integer.class;
else
result = m_Helper.getColumnClasses()[columnIndex - 1];
}
return result;
}
/**
* returns the number of columns in the model.
*
* @return the number of columns
*/
public int getColumnCount() {
return m_Helper.getColumnCount() + 1;
}
/**
* returns the name of the column at columnIndex.
*
* @param columnIndex the index of the column
* @return the name
*/
public String getColumnName(int columnIndex) {
String result;
result = "";
if ( (m_Helper.getColumnNames() != null)
&& (columnIndex >= 0)
&& (columnIndex < getColumnCount()) ) {
if (columnIndex == 0)
result = "Row";
else
result = m_Helper.getColumnNames()[columnIndex - 1];
}
return result;
}
/**
* returns the number of rows in the model.
*
* @return the number of data rows
*/
public int getRowCount() {
return m_Data.length;
}
/**
* returns the value for the cell at columnindex and rowIndex.
*
* @param rowIndex the row of the cell
* @param columnIndex the column of the cell
* @return the data value
*/
public Object getValueAt(int rowIndex, int columnIndex) {
Object result;
result = null;
if ( (rowIndex >= 0) && (rowIndex < getRowCount())
&& (columnIndex >= 0) && (columnIndex < getColumnCount()) ) {
if (columnIndex == 0)
result = new Integer(rowIndex + 1);
else
result = m_Data[rowIndex][columnIndex - 1];
}
return result;
}
/**
* checks whether the value of the cell is NULL.
*
* @param rowIndex the row of the cell
* @param columnIndex the column of the cell
* @return true if the cell value is NULL
*/
public boolean isNullAt(int rowIndex, int columnIndex) {
return (getValueAt(rowIndex, columnIndex) == null);
}
/**
* returns whether the column at the given index is numeric.
*
* @param columnIndex the column to check
* @return whether the column is numeric
*/
public boolean isNumericAt(int columnIndex) {
boolean result;
result = false;
if ( (columnIndex >= 0) && (columnIndex < getColumnCount()) ) {
if (columnIndex == 0) {
result = true;
}
else {
if (m_Helper.getNumericColumns() == null)
result = false;
else
result = m_Helper.getNumericColumns()[columnIndex - 1];
}
}
return result;
}
/**
* returns true if the cell at rowindex and columnindexis editable.
*
* @param rowIndex the row of the cell
* @param columnIndex the column of the cell
* @return always false
*/
public boolean isCellEditable(int rowIndex, int columnIndex) {
return false;
}
/**
* removes a listener from the list that is notified each time a change to
* the data model occurs.
*
* @param l the listener to remove
*/
public void removeTableModelListener(TableModelListener l) {
m_Listeners.remove(l);
}
/**
* sets the value in the cell at columnIndex and rowIndex to aValue.
* Ignored.
*
* @param aValue the value to set - ignored
* @param rowIndex the row of the cell
* @param columnIndex the column of the cell
*/
public void setValueAt(Object aValue, int rowIndex, int columnIndex) {
// ignore
}
/**
* frees up the memory.
*
* @throws Throwable if something goes wrong
*/
public void finalize() throws Throwable {
try {
m_Helper.getResultSet().close();
m_Helper.getResultSet().getStatement().close();
m_Helper = null;
}
catch (Exception e) {
// ignored
}
m_Data = null;
super.finalize();
}
}
| gpl-3.0 |
acrutiapps/BookReader | src/com/hlidskialf/android/hardware/ShakeListener.java | 3362 |
/* The following code was written by Matthew Wiggins
* and is released under the APACHE 2.0 license
*
* http://www.apache.org/licenses/LICENSE-2.0
*/
package com.hlidskialf.android.hardware;
import android.content.Context;
import android.hardware.Sensor;
import android.hardware.SensorEvent;
import android.hardware.SensorEventListener;
import android.hardware.SensorManager;
public class ShakeListener implements SensorEventListener
{
private static final int FORCE_THRESHOLD = 350;
private static final int TIME_THRESHOLD = 100;
private static final int SHAKE_TIMEOUT = 500;
private static final int SHAKE_DURATION = 1000;
private static final int SHAKE_COUNT = 3;
private SensorManager mSensorMgr;
private Sensor mAccelerometer;
private float mLastX=-1.0f, mLastY=-1.0f, mLastZ=-1.0f;
private long mLastTime;
private OnShakeListener mShakeListener;
private Context mContext;
private int mShakeCount = 0;
private long mLastShake;
private long mLastForce;
public interface OnShakeListener
{
public void onShake();
}
public ShakeListener(Context context)
{
mContext = context;
resume();
}
public void setOnShakeListener(OnShakeListener listener)
{
mShakeListener = listener;
}
public void resume() {
mSensorMgr = (SensorManager)mContext.getSystemService(Context.SENSOR_SERVICE);
mAccelerometer = mSensorMgr.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
if (mSensorMgr == null) {
throw new UnsupportedOperationException("Sensors not supported");
}
boolean supported = mSensorMgr.registerListener(this, mAccelerometer, SensorManager.SENSOR_DELAY_GAME);
if (!supported) {
mSensorMgr.unregisterListener(this, mAccelerometer);
throw new UnsupportedOperationException("Accelerometer not supported");
}
}
public void pause() {
if (mSensorMgr != null) {
mSensorMgr.unregisterListener(this, mAccelerometer);
mSensorMgr = null;
}
}
@Override
public void onAccuracyChanged(Sensor sensor, int i) { }
@Override
public void onSensorChanged(SensorEvent sensorEvent) {
if (sensorEvent.sensor != mAccelerometer) return;
long now = System.currentTimeMillis();
if ((now - mLastForce) > SHAKE_TIMEOUT) {
mShakeCount = 0;
}
float[] values = sensorEvent.values;
if ((now - mLastTime) > TIME_THRESHOLD) {
long diff = now - mLastTime;
float speed = Math.abs(values[SensorManager.DATA_X] + values[SensorManager.DATA_Y] + values[SensorManager.DATA_Z] - mLastX - mLastY - mLastZ) / diff * 10000;
if (speed > FORCE_THRESHOLD) {
if ((++mShakeCount >= SHAKE_COUNT) && (now - mLastShake > SHAKE_DURATION)) {
mLastShake = now;
mShakeCount = 0;
if (mShakeListener != null) {
mShakeListener.onShake();
}
}
mLastForce = now;
}
mLastTime = now;
mLastX = values[SensorManager.DATA_X];
mLastY = values[SensorManager.DATA_Y];
mLastZ = values[SensorManager.DATA_Z];
}
}
}
| gpl-3.0 |
MartyParty21/AwakenDreamsClient | mcp/src/minecraft/net/minecraft/network/handshake/client/package-info.java | 207 | @ParametersAreNonnullByDefault
@MethodsReturnNonnullByDefault
package net.minecraft.network.handshake.client;
import mcp.MethodsReturnNonnullByDefault;
import javax.annotation.ParametersAreNonnullByDefault; | gpl-3.0 |
xubinux/xbin-store | xbin-store-web-portal/src/main/java/cn/binux/portal/exception/GlobalException.java | 1180 | package cn.binux.portal.exception;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.servlet.HandlerExceptionResolver;
import org.springframework.web.servlet.ModelAndView;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* 全局异常处理
*
* @author xubin.
* @create 2017-02-05 下午3:49
*/
public class GlobalException implements HandlerExceptionResolver {
private static final Logger logger = LoggerFactory.getLogger(GlobalException.class);
@Override
public ModelAndView resolveException(HttpServletRequest httpServletRequest, HttpServletResponse httpServletResponse, Object o, Exception e) {
logger.info("进入portal 全局异常处理器!");
//控制台打印异常
e.printStackTrace();
logger.error("发生异常!",e);
// 发短信 发邮件 ...
// 跳转错误页面
ModelAndView modelAndView = new ModelAndView();
modelAndView.addObject("message", "服务器开小差,请稍后重试!");
modelAndView.setViewName("error/exception");
return modelAndView;
}
}
| gpl-3.0 |
ceskaexpedice/kramerius | rest/src/main/java/cz/incad/kramerius/rest/api/k5/client/item/decorators/details/AbstractDetailDecorator.java | 1287 | /*
* Copyright (C) 2013 Pavel Stastny
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package cz.incad.kramerius.rest.api.k5.client.item.decorators.details;
import org.json.JSONException;
import org.json.JSONObject;
import cz.incad.kramerius.rest.api.k5.client.item.decorators.AbstractItemDecorator;
public abstract class AbstractDetailDecorator extends AbstractItemDecorator {
public static final String DETAILS_KEY = "details";
public String[] details(String details) {
return details.split("##");
}
public String getModel(JSONObject jsonObj) throws JSONException {
return jsonObj.has("model") ? jsonObj.getString("model") : null;
}
}
| gpl-3.0 |
WurstContributor/Wurst-Client | Wurst Client/src/tk/wurst_client/mods/XRayMod.java | 877 | /*
* Copyright © 2014 - 2015 | Alexander01998 | All rights reserved.
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package tk.wurst_client.mods;
import java.util.ArrayList;
import net.minecraft.block.Block;
import net.minecraft.client.Minecraft;
import tk.wurst_client.mods.Mod.Category;
import tk.wurst_client.mods.Mod.Info;
@Info(category = Category.RENDER,
description = "Allows you to see ores through walls.",
name = "X-Ray")
public class XRayMod extends Mod
{
public static ArrayList<Block> xrayBlocks = new ArrayList<Block>();
@Override
public String getRenderName()
{
return "X-Wurst";
}
@Override
public void onToggle()
{
Minecraft.getMinecraft().renderGlobal.loadRenderers();
}
}
| mpl-2.0 |
aihua/opennms | core/upgrade/src/main/java/org/opennms/upgrade/implementations/monitoringLocations16/LocationDef.java | 10305 | /*******************************************************************************
* This file is part of OpenNMS(R).
*
* Copyright (C) 2014 The OpenNMS Group, Inc.
* OpenNMS(R) is Copyright (C) 1999-2014 The OpenNMS Group, Inc.
*
* OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc.
*
* OpenNMS(R) is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License,
* or (at your option) any later version.
*
* OpenNMS(R) is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with OpenNMS(R). If not, see:
* http://www.gnu.org/licenses/
*
* For more information contact:
* OpenNMS(R) Licensing <license@opennms.org>
* http://www.opennms.org/
* http://www.opennms.com/
*******************************************************************************/
package org.opennms.upgrade.implementations.monitoringLocations16;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElementWrapper;
import javax.xml.bind.annotation.XmlRootElement;
/**
* <p>
* This element contains the name of the location, the name of the
* monitoring area (used to aggregate locations, example: Area San Francisco,
* location name "SFO" which becomes SFO-1 or SFO-BuildingA, etc.)
* Additionally, a geolocation can be provided (an address or other
* identifying location that can be looked up with a geolocation
* API), as well as coordinates (latitude,longitude). Finally, a
* priority can be assigned to the location, for purposes of sorting
* (1 = highest, 100 = lowest).
* </p>
* <p>
* The polling package name is used to associate with a polling
* configuration found in the polling-configuration.xml file.
* </p>
* <p>
* The collection package name is used to associate with a collection
* configuration found in the collectd-configuration.xml file.
*/
@XmlRootElement(name="location-def")
@XmlAccessorType(XmlAccessType.NONE)
public class LocationDef implements Serializable {
private static final long serialVersionUID = -7651610012389148818L;
/**
* The name of the location. This must be a unique identifier.
*/
@XmlAttribute(name="location-name")
private String m_locationName;
/**
* The name of the monitoring area. This field is used to group
* multiple locations together, ie, a region, or abstract category.
*/
@XmlAttribute(name="monitoring-area")
private String m_monitoringArea;
/**
* The polling package associated with this monitoring location.
*/
@XmlAttribute(name="polling-package-name")
private String m_pollingPackageName;
@XmlAttribute(name="collection-package-name")
private String m_collectionPackageName;
/**
* The geolocation (address) of this monitoring location.
*/
@XmlAttribute(name="geolocation")
private String m_geolocation;
/**
* The coordinates (latitude,longitude) of this monitoring location.
*/
@XmlAttribute(name="coordinates")
private String m_coordinates;
/**
* The priority of the location. (1=highest)
*/
@XmlAttribute(name="priority")
private Long m_priority;
@XmlElementWrapper(name="tags")
@XmlElement(name="tag")
private List<Tag> m_tags;
public LocationDef() {
super();
}
public LocationDef(final String locationName, final String monitoringArea, final String pollingPackageName, final String collectionPackageName, final String geolocation, final String coordinates, final Long priority, final String... tags) {
this();
m_locationName = locationName;
m_monitoringArea = monitoringArea;
m_pollingPackageName = pollingPackageName;
m_collectionPackageName = collectionPackageName;
m_geolocation = geolocation;
m_coordinates = coordinates;
m_priority = priority;
for (final String tag : tags) {
if (m_tags == null) {
m_tags = new ArrayList<Tag>(tags.length);
}
m_tags.add(new Tag(tag));
}
}
public String getLocationName() {
return m_locationName;
}
public void setLocationName(final String locationName) {
m_locationName = locationName;
}
public String getMonitoringArea() {
return m_monitoringArea;
}
public void setMonitoringArea(final String monitoringArea) {
m_monitoringArea = monitoringArea;
}
public String getPollingPackageName() {
return m_pollingPackageName;
}
public void setPollingPackageName(final String pollingPackageName) {
m_pollingPackageName = pollingPackageName;
}
public String getCollectionPackageName() {
return m_collectionPackageName;
}
public void setCollectionPackageName(final String collectionPackageName) {
m_collectionPackageName = collectionPackageName;
}
public String getGeolocation() {
return m_geolocation;
}
public void setGeolocation(final String geolocation) {
m_geolocation = geolocation;
}
public String getCoordinates() {
return m_coordinates;
}
public void setCoordinates(final String coordinates) {
m_coordinates = coordinates;
}
public Long getPriority() {
return m_priority == null? 100L : m_priority;
}
public void setPriority(final Long priority) {
m_priority = priority;
}
public List<Tag> getTags() {
if (m_tags == null) {
return Collections.emptyList();
} else {
return Collections.unmodifiableList(m_tags);
}
}
public void setTags(final List<Tag> tags) {
if (tags == null || tags.size() == 0) {
m_tags = null;
} else {
m_tags = new ArrayList<Tag>(tags);
}
}
@Override
public int hashCode() {
final int prime = 353;
int result = 1;
result = prime * result + ((m_coordinates == null) ? 0 : m_coordinates.hashCode());
result = prime * result + ((m_geolocation == null) ? 0 : m_geolocation.hashCode());
result = prime * result + ((m_locationName == null) ? 0 : m_locationName.hashCode());
result = prime * result + ((m_monitoringArea == null) ? 0 : m_monitoringArea.hashCode());
result = prime * result + ((m_pollingPackageName == null) ? 0 : m_pollingPackageName.hashCode());
result = prime * result + ((m_collectionPackageName == null) ? 0 : m_collectionPackageName.hashCode());
result = prime * result + ((m_priority == null) ? 0 : m_priority.hashCode());
result = prime * result + ((m_tags == null || m_tags.size() == 0) ? 0 : m_tags.hashCode());
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof LocationDef)) {
return false;
}
final LocationDef other = (LocationDef) obj;
if (m_coordinates == null) {
if (other.m_coordinates != null) {
return false;
}
} else if (!m_coordinates.equals(other.m_coordinates)) {
return false;
}
if (m_geolocation == null) {
if (other.m_geolocation != null) {
return false;
}
} else if (!m_geolocation.equals(other.m_geolocation)) {
return false;
}
if (m_locationName == null) {
if (other.m_locationName != null) {
return false;
}
} else if (!m_locationName.equals(other.m_locationName)) {
return false;
}
if (m_monitoringArea == null) {
if (other.m_monitoringArea != null) {
return false;
}
} else if (!m_monitoringArea.equals(other.m_monitoringArea)) {
return false;
}
if (m_pollingPackageName == null) {
if (other.m_pollingPackageName != null) {
return false;
}
} else if (!m_pollingPackageName.equals(other.m_pollingPackageName)) {
return false;
}
if (m_collectionPackageName == null) {
if (other.m_collectionPackageName != null) {
return false;
}
} else if (!m_collectionPackageName.equals(other.m_collectionPackageName)) {
return false;
}
if (m_priority == null) {
if (other.m_priority != null) {
return false;
}
} else if (!m_priority.equals(other.m_priority)) {
return false;
}
if (m_tags == null || m_tags.size() == 0) {
if (other.m_tags != null && other.m_tags.size() > 0) {
return false;
}
} else {
if (other.m_tags == null || other.m_tags.size() == 0) {
return false;
} else if (!m_tags.equals(other.m_tags)) {
return false;
}
}
return true;
}
@Override
public String toString() {
return "OnmsMonitoringLocation [location-name=" + m_locationName +
", monitoring-area=" + m_monitoringArea +
", polling-package-name=" + m_pollingPackageName +
", collection-package-name=" + m_collectionPackageName +
", geolocation=" + m_geolocation +
", coordinates=" + m_coordinates +
", priority=" + m_priority +
", tags=" + m_tags + "]";
}
}
| agpl-3.0 |
uniteddiversity/mycollab | mycollab-mobile/src/main/java/com/esofthead/mycollab/mobile/module/project/view/task/TaskContainer.java | 1096 | /**
* This file is part of mycollab-mobile.
*
* mycollab-mobile is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* mycollab-mobile is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with mycollab-mobile. If not, see <http://www.gnu.org/licenses/>.
*/
package com.esofthead.mycollab.mobile.module.project.view.task;
import com.esofthead.mycollab.mobile.ui.AbstractMobileMainView;
import com.esofthead.mycollab.vaadin.mvp.ViewComponent;
/**
* @author MyCollab Ltd.
*
* @since 4.5.0
*
*/
@ViewComponent
public class TaskContainer extends AbstractMobileMainView {
private static final long serialVersionUID = -9011057045375634646L;
}
| agpl-3.0 |
ging/vcc | extras/chat/ijabbar/src/anzsoft/iJabBar/client/gui/NotifyButton.java | 3538 | package anzsoft.iJabBar.client.gui;
import com.extjs.gxt.ui.client.core.El;
import com.extjs.gxt.ui.client.event.ComponentEvent;
import com.extjs.gxt.ui.client.event.Events;
import com.extjs.gxt.ui.client.event.Listener;
import com.google.gwt.dom.client.Style;
import com.google.gwt.user.client.DOM;
import com.google.gwt.user.client.Element;
import com.google.gwt.user.client.Event;
import com.google.gwt.user.client.ui.Widget;
public class NotifyButton extends MenuButton {
private String toolTip = null;
private int count = 0;
private Element mainElement;
private Element buttonElement;
private Element countElement;
private Element tipElement;
public NotifyButton() {
setMenuAlign("bl-tl");
addStyleName("ijab_abutton");
menu = new BarMenu(true, false, new BarMenuListener() {
public void onClose() {
}
public void onMin() {
}
});
menu.addListener(Events.Hide, new Listener<BarMenuEvent>() {
public void handleEvent(BarMenuEvent be) {
removeStyleName("ijab_menubutton-focus");
removeStyleName("ijab_notificationbutton-focus");
}
});
menu.addListener(Events.Show, new Listener<BarMenuEvent>() {
public void handleEvent(BarMenuEvent be) {
addStyleName("ijab_menubutton-focus");
addStyleName("ijab_notificationbutton-focus");
}
});
}
public void setMenuStyle(String style) {
menu.addStyleName(style);
}
public void setMenuHeadeing(String heading) {
menu.setHeading(heading);
}
public void attatchMenuWidget(Widget widget, int height) {
menu.attachWidget(widget, height);
}
@Override
protected El getFocusEl() {
return el();
}
@Override
protected void onRender(Element target, int index) {
super.onRender(target, index);
mainElement = DOM.createAnchor();
setElement(mainElement, target, index);
buttonElement = DOM.createDiv();
buttonElement.setClassName("inner_button notify_button");
countElement = DOM.createSpan();
countElement.setClassName("emobig");
countElement.setInnerText("" + count);
buttonElement.appendChild(countElement);
tipElement = DOM.createDiv();
tipElement.setClassName("ijab_abutton_tooltip");
if (toolTip != null)
tipElement.setInnerHTML("<strong>" + toolTip + "</strong");
if (menu != null) {
menu.render(buttonElement);
buttonElement.appendChild(menu.getElement());
}
mainElement.appendChild(tipElement);
mainElement.appendChild(buttonElement);
if (getFocusEl() != null) {
getFocusEl().addEventsSunk(Event.FOCUSEVENTS);
}
listener = new Listener<ComponentEvent>() {
public void handleEvent(ComponentEvent be) {
//monitorMouseOver(be.getEvent());
}
};
el().addEventsSunk(Event.ONCLICK | Event.MOUSEEVENTS);
}
public void hideToolTip() {
Style style = tipElement.getStyle();
style.setProperty("display", "none");
}
public void showToolTip() {
if (toolTip == null || toolTip.length() == 0)
return;
Style style = tipElement.getStyle();
style.setProperty("display", "block");
}
public void setTooltip(String tip) {
toolTip = tip;
if (isRendered()) {
tipElement.setInnerHTML("<strong>" + tip + "</strong>");
}
}
protected void onClick(ComponentEvent ce) {
super.onClick(ce);
hideToolTip();
}
protected void onMouseOver(ComponentEvent ce) {
super.onMouseOut(ce);
showToolTip();
}
protected void onMouseOut(ComponentEvent ce) {
super.onMouseOut(ce);
hideToolTip();
}
public void setCount(int count) {
this.count = count;
if (this.isRendered()) {
countElement.setInnerText("" + count);
}
}
}
| agpl-3.0 |
brtonnies/rapidminer-studio | src/main/java/com/rapidminer/operator/meta/EvolutionaryParameterOptimizationOperator.java | 10971 | /**
* Copyright (C) 2001-2015 by RapidMiner and the contributors
*
* Complete list of developers available at our web site:
*
* http://rapidminer.com
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see http://www.gnu.org/licenses/.
*/
package com.rapidminer.operator.meta;
import java.util.Iterator;
import java.util.List;
import com.rapidminer.operator.Operator;
import com.rapidminer.operator.OperatorDescription;
import com.rapidminer.operator.OperatorException;
import com.rapidminer.operator.UserError;
import com.rapidminer.operator.ValueDouble;
import com.rapidminer.operator.performance.PerformanceVector;
import com.rapidminer.parameter.ParameterType;
import com.rapidminer.parameter.ParameterTypeDouble;
import com.rapidminer.parameter.ParameterTypeInt;
import com.rapidminer.parameter.UndefinedParameterError;
import com.rapidminer.parameter.value.ParameterValueRange;
import com.rapidminer.parameter.value.ParameterValues;
import com.rapidminer.tools.RandomGenerator;
import com.rapidminer.tools.math.optimization.ec.es.ESOptimization;
import com.rapidminer.tools.math.optimization.ec.es.Individual;
import com.rapidminer.tools.math.optimization.ec.es.OptimizationValueType;
/**
* This operator finds the optimal values for a set of parameters using an evolutionary strategies
* approach which is often more appropriate than a grid search or a greedy search like the quadratic
* programming approach and leads to better results. The parameter <var>parameters</var> is a list
* of key value pairs where the keys are of the form <code>operator_name.parameter_name</code> and
* the value for each parameter must be a semicolon separated pair of a minimum and a maximum value
* in squared parantheses, e.g. [10;100] for a range of 10 until 100. <br/>
* The operator returns an optimal {@link ParameterSet} which can as well be written to a file with
* a {@link com.rapidminer.operator.io.ParameterSetWriter}. This parameter set can be read in
* another process using a {@link com.rapidminer.operator.io.ParameterSetLoader}. <br/>
* The file format of the parameter set file is straightforward and can easily be generated by
* external applications. Each line is of the form <center>
* <code>operator_name.parameter_name = value</code></center> <br/>
* Please refer to section {@rapidminer.ref sec:parameter_optimization|Advanced Processes/Parameter
* and performance analysis} for an example application.
*
* @author Ingo Mierswa, Tobias Malbrecht
*/
public class EvolutionaryParameterOptimizationOperator extends ParameterOptimizationOperator {
public static final String PARAMETER_MAX_GENERATIONS = ESOptimization.PARAMETER_MAX_GENERATIONS;
public static final String PARAMETER_GENERATIONS_WITHOUT_IMPROVAL = ESOptimization.PARAMETER_GENERATIONS_WITHOUT_IMPROVAL;
public static final String PARAMETER_POPULATION_SIZE = ESOptimization.PARAMETER_POPULATION_SIZE;
public static final String PARAMETER_TOURNAMENT_FRACTION = ESOptimization.PARAMETER_TOURNAMENT_FRACTION;
public static final String PARAMETER_KEEP_BEST = ESOptimization.PARAMETER_KEEP_BEST;
public static final String PARAMETER_MUTATION_TYPE = ESOptimization.PARAMETER_MUTATION_TYPE;
public static final String PARAMETER_SELECTION_TYPE = ESOptimization.PARAMETER_SELECTION_TYPE;
public static final String PARAMETER_CROSSOVER_PROB = ESOptimization.PARAMETER_CROSSOVER_PROB;
public static final String PARAMETER_SHOW_CONVERGENCE_PLOT = ESOptimization.PARAMETER_SHOW_CONVERGENCE_PLOT;
public static final String PARAMETER_SPECIFIY_POPULATION_SIZE = ESOptimization.PARAMETER_SPECIFIY_POPULATION_SIZE;
// private IOContainer input;
/** The actual optimizer. */
private ESOptimization optimizer;
private double bestFitnessEver = Double.NaN;
private double lastGenerationsPerformance = Double.NaN;
/** The operators for which parameters should be optimized. */
private Operator[] operators;
/** The names of the parameters which should be optimized. */
private String[] parameters;
/** The parameter types. */
private OptimizationValueType[] types;
public EvolutionaryParameterOptimizationOperator(OperatorDescription description) {
super(description);
addValue(new ValueDouble("best", "best performance ever") {
@Override
public double getDoubleValue() {
return bestFitnessEver;
}
});
}
public Operator[] getOptimizationOperators() {
return this.operators;
}
public String[] getOptimizationParameters() {
return this.parameters;
}
public OptimizationValueType[] getOptimizationValueTypes() {
return this.types;
}
@Override
public int getParameterValueMode() {
return VALUE_MODE_CONTINUOUS;
}
@Override
public double getCurrentBestPerformance() {
// must make this check, because optimizer will be set null to tidy up after execution
if (optimizer != null) {
return optimizer.getBestFitnessInGeneration();
} else {
return lastGenerationsPerformance;
}
}
@Override
public void doWork() throws OperatorException {
// check parameter values list
List<ParameterValues> parameterValuesList = parseParameterValues(getParameterList("parameters"));
if (parameterValuesList == null) {
throw new UserError(this, 922);
}
for (Iterator<ParameterValues> iterator = parameterValuesList.iterator(); iterator.hasNext();) {
ParameterValues parameterValues = iterator.next();
if (!(parameterValues instanceof ParameterValueRange)) {
getLogger()
.warning(
"Found (and deleted) unsupported parameter value definition. Parameters have to be given as range (e.g. as [2;5.7]).");
iterator.remove();
}
}
if (parameterValuesList.size() == 0) {
throw new UserError(this, 922);
}
// get parameters to optimize
this.operators = new Operator[parameterValuesList.size()];
this.parameters = new String[parameterValuesList.size()];
double[] min = new double[parameterValuesList.size()];
double[] max = new double[parameterValuesList.size()];
this.types = new OptimizationValueType[parameterValuesList.size()];
int index = 0;
for (Iterator<ParameterValues> iterator = parameterValuesList.iterator(); iterator.hasNext();) {
ParameterValueRange parameterValueRange = (ParameterValueRange) iterator.next();
operators[index] = parameterValueRange.getOperator();
parameters[index] = parameterValueRange.getParameterType().getKey();
min[index] = Double.valueOf(parameterValueRange.getMin());
max[index] = Double.valueOf(parameterValueRange.getMax());
ParameterType targetType = parameterValueRange.getParameterType();
if (targetType == null) {
throw new UserError(this, 906, parameterValueRange.getOperator() + "." + parameterValueRange.getKey());
}
if (targetType instanceof ParameterTypeDouble) {
types[index] = OptimizationValueType.VALUE_TYPE_DOUBLE;
getLogger().fine("Parameter type of parameter " + targetType.getKey() + ": double");
} else if (targetType instanceof ParameterTypeInt) {
types[index] = OptimizationValueType.VALUE_TYPE_INT;
getLogger().fine("Parameter type of parameter " + targetType.getKey() + ": int");
} else {
throw new UserError(this, 909, targetType.getKey());
}
index++;
}
// create and start optimizer
RandomGenerator random = RandomGenerator.getRandomGenerator(this);
this.optimizer = createOptimizer(random);
for (int i = 0; i < min.length; i++) {
this.optimizer.setMin(i, min[i]);
this.optimizer.setMax(i, max[i]);
this.optimizer.setValueType(i, types[i]);
}
optimizer.optimize();
// create result and return it
double[] bestParameters = optimizer.getBestValuesEver();
String[] bestValues = null;
if (bestParameters != null) {
bestValues = new String[bestParameters.length];
for (int i = 0; i < bestParameters.length; i++) {
if (types[i].equals(OptimizationValueType.VALUE_TYPE_DOUBLE)) {
bestValues[i] = bestParameters[i] + "";
} else {
bestValues[i] = (int) Math.round(bestParameters[i]) + "";
}
}
} else {
bestValues = new String[operators.length];
for (int i = 0; i < bestValues.length; i++) {
bestValues[i] = "unknown";
}
}
ParameterSet bestSet = new ParameterSet(operators, parameters, bestValues, optimizer.getBestPerformanceEver());
// freeing memory, but saving best value before
this.bestFitnessEver = optimizer.getBestFitnessEver();
this.lastGenerationsPerformance = optimizer.getBestFitnessInGeneration();
this.optimizer = null;
deliver(bestSet);
}
/** This method creates a apropriate optimizer */
protected ESOptimization createOptimizer(RandomGenerator random) throws UndefinedParameterError {
return new ESParameterOptimization(this, operators.length, ESOptimization.INIT_TYPE_RANDOM,
getParameterAsInt(PARAMETER_MAX_GENERATIONS), getParameterAsInt(PARAMETER_GENERATIONS_WITHOUT_IMPROVAL),
getParameterAsInt(PARAMETER_POPULATION_SIZE), getParameterAsInt(PARAMETER_SELECTION_TYPE),
getParameterAsDouble(PARAMETER_TOURNAMENT_FRACTION), getParameterAsBoolean(PARAMETER_KEEP_BEST),
getParameterAsInt(PARAMETER_MUTATION_TYPE), getParameterAsDouble(PARAMETER_CROSSOVER_PROB),
getParameterAsBoolean(PARAMETER_SHOW_CONVERGENCE_PLOT), random, this);
}
public ESOptimization getOptimization() {
return optimizer;
}
public PerformanceVector setParametersAndEvaluate(Individual individual) throws OperatorException {
double[] currentValues = individual.getValues();
for (int j = 0; j < currentValues.length; j++) {
String value;
if (types[j].equals(OptimizationValueType.VALUE_TYPE_DOUBLE)) {
value = currentValues[j] + "";
} else {
value = (int) Math.round(currentValues[j]) + "";
}
operators[j].getParameters().setParameter(parameters[j], value);
getLogger().fine(operators[j] + "." + parameters[j] + " = " + value);
}
return getPerformanceVector();
}
@Override
public List<ParameterType> getParameterTypes() {
List<ParameterType> types = super.getParameterTypes();
types.addAll(ESOptimization.getParameterTypes(this));
return types;
}
public int getNumberOfOptimizationParameters() {
return this.parameters.length;
}
}
| agpl-3.0 |
sbliven/biojava | biojava3-core/src/main/java/org/biojava3/core/sequence/compound/RNACompoundSet.java | 885 | package org.biojava3.core.sequence.compound;
import org.biojava3.core.sequence.template.AbstractNucleotideCompoundSet;
/**
*
* @author Andy Yates
*/
public class RNACompoundSet extends AbstractNucleotideCompoundSet<NucleotideCompound> {
private static class InitaliseOnDemand {
public static final RNACompoundSet INSTANCE = new RNACompoundSet();
}
public static RNACompoundSet getRNACompoundSet() {
return InitaliseOnDemand.INSTANCE;
}
public RNACompoundSet() {
addNucleotideCompound("A", "U");
addNucleotideCompound("U", "A");
addNucleotideCompound("G", "C");
addNucleotideCompound("C", "G");
addNucleotideCompound("N", "N");
addNucleotideCompound("-", "-");
}
public NucleotideCompound newNucleotideCompound(String base, String complement, String... equivalents) {
return new NucleotideCompound(base, this, complement);
}
} | lgpl-2.1 |
JordanReiter/railo | railo-java/railo-core/src/railo/runtime/interpreter/ref/op/Or.java | 829 | package railo.runtime.interpreter.ref.op;
import railo.runtime.PageContext;
import railo.runtime.exp.PageException;
import railo.runtime.interpreter.ref.Ref;
import railo.runtime.interpreter.ref.RefSupport;
import railo.runtime.op.Caster;
/**
* Plus operation
*/
public final class Or extends RefSupport implements Ref {
private Ref right;
private Ref left;
/**
* constructor of the class
* @param left
* @param right
*/
public Or(Ref left, Ref right) {
this.left=left;
this.right=right;
}
@Override
public Object getValue(PageContext pc) throws PageException {
return (Caster.toBooleanValue(left.getValue(pc)) || Caster.toBooleanValue(right.getValue(pc)))?Boolean.TRUE:Boolean.FALSE;
}
@Override
public String getTypeName() {
return "operation";
}
}
| lgpl-2.1 |
CustomizedTools/CustomizedTools | core/src/main/java/com/customized/tools/commands/StringGroup.java | 1944 | /*
* JBoss, Home of Professional Open Source
* Copyright 2014 Red Hat Inc. and/or its affiliates and other contributors
* as indicated by the @authors tag. All rights reserved.
* See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.customized.tools.commands;
import org.jboss.aesh.parser.Parser;
/**
* @author <a href="mailto:stale.pedersen@jboss.org">Ståle W. Pedersen</a>
*/
public class StringGroup {
private String[] strings;
private int maxLength = 0;
public StringGroup(int size) {
strings = new String[size];
}
public int largestString() {
return maxLength;
}
public String[] getStrings() {
return strings;
}
public void addString(String s, int place) {
strings[place] = s;
if(s.length() > maxLength)
maxLength = s.length();
}
public String getString(int place) {
return strings[place];
}
public String getFormattedString(int place) {
return Parser.padLeft(maxLength+1, strings[place]);
}
public String getFormattedStringPadRight(int place) {
return " " + Parser.padRight(maxLength, strings[place]);
}
public void formatStringsBasedOnMaxLength() {
for(int i=0; i<strings.length;i++)
strings[i] = Parser.padLeft(maxLength, strings[i]);
}
}
| lgpl-2.1 |
lucee/unoffical-Lucee-no-jre | source/java/core/src/lucee/commons/db/DBUtil.java | 2386 | /**
* Copyright (c) 2014, the Railo Company Ltd.
* Copyright (c) 2015, Lucee Assosication Switzerland
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library. If not, see <http://www.gnu.org/licenses/>.
*
*/
package lucee.commons.db;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
/**
* Utility for db
*/
public final class DBUtil {
public static void setAutoCommitEL(Connection conn, boolean b) {
try {
if(conn!=null)conn.setAutoCommit(b);
}
catch (Throwable e) {}
}
public static void setReadOnlyEL(Connection conn, boolean b) {
try {
if(conn!=null)conn.setReadOnly(b);
}
catch (Throwable e) {}
}
public static void commitEL(Connection conn) {
try {
if(conn!=null)conn.commit();
}
catch (Throwable e) {}
}
public static void setTransactionIsolationEL(Connection conn,int level) {
try {
if(conn!=null)conn.setTransactionIsolation(level);
}
catch (Throwable e) {}
}
public static void closeEL(Statement stat) {
if(stat!=null) {
try {
stat.close();
} catch (Throwable t) {}
}
}
public static void closeEL(ResultSet rs) {
if(rs!=null) {
try {
rs.close();
} catch (Throwable t) {}
}
}
/*public static Connection getConnection(String connStr, String user, String pass) throws SQLException {
try {
return new ConnectionProxy(new StateFactory(), DriverManager.getConnection(connStr, user, pass));
}
catch (SQLException e) {
if(connStr.indexOf('?')!=-1) {
connStr=connStr+"&user="+user+"&password="+pass;
return new ConnectionProxy(new StateFactory(), DriverManager.getConnection(connStr));
}
throw e;
}
}*/
} | lgpl-2.1 |
Alfresco/community-edition | projects/remote-api/source/test-java/org/alfresco/rest/api/tests/client/data/Site.java | 1445 | /*
* #%L
* Alfresco Remote API
* %%
* Copyright (C) 2005 - 2016 Alfresco Software Limited
* %%
* This file is part of the Alfresco software.
* If the software was purchased under a paid Alfresco license, the terms of
* the paid license agreement will prevail. Otherwise, the software is
* provided under the following open source license terms:
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
package org.alfresco.rest.api.tests.client.data;
import org.json.simple.JSONObject;
public interface Site extends JSONAble
{
Boolean getCreated();
String getGuid();
String getNetworkId();
Boolean isCreated();
String getSiteId();
String getTitle();
String getDescription();
String getVisibility();
String getType();
SiteRole getRole();
void expected(Object o);
JSONObject toJSON();
}
| lgpl-3.0 |
PiRSquared17/zildo | zildo/src/zildo/monde/dialog/DialogTopic.java | 1923 | /**
* The Land of Alembrum
* Copyright (C) 2006-2013 Evariste Boussaton
*
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package zildo.monde.dialog;
// DialogTopic.cpp: implementation of the DialogTopic class.
//
//////////////////////////////////////////////////////////////////////
public class DialogTopic {
private int topicId;
private String topicName;
private boolean accessible;
//////////////////////////////////////////////////////////////////////
// Construction/Destruction
//////////////////////////////////////////////////////////////////////
public DialogTopic()
{
this.topicId=0;
this.topicName="";
this.accessible=false;
}
public DialogTopic(int topicId, String topicName)
{
this.topicId=topicId;
this.topicName=topicName;
// Default : not accessible for player
this.accessible=false;
}
public int getTopicId() {
return topicId;
}
public void setTopicId(int topicId) {
this.topicId = topicId;
}
public String getTopicName() {
return topicName;
}
public void setTopicName(String topicName) {
this.topicName = topicName;
}
public boolean isAccessible() {
return accessible;
}
public void setAccessible(boolean accessible) {
this.accessible = accessible;
}
} | lgpl-3.0 |
syzer/incubator-tamaya | modules/events/src/main/java/org/apache/tamaya/events/delta/ChangeType.java | 956 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.tamaya.events.delta;
/**
* Created by Anatole on 20.02.2015.
*/
public enum ChangeType {
NEW,
DELETED,
UPDATED,
}
| apache-2.0 |
djechelon/spring-security | config/src/test/java/org/springframework/security/config/debug/SecurityDebugBeanFactoryPostProcessorTests.java | 1895 | /*
* Copyright 2002-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.config.debug;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.security.config.BeanIds;
import org.springframework.security.config.test.SpringTestContext;
import org.springframework.security.config.test.SpringTestContextExtension;
import org.springframework.security.web.FilterChainProxy;
import org.springframework.security.web.debug.DebugFilter;
import static org.assertj.core.api.Assertions.assertThat;
/**
* @author Rob Winch
* @author Josh Cummings
*/
@ExtendWith(SpringTestContextExtension.class)
public class SecurityDebugBeanFactoryPostProcessorTests {
public final SpringTestContext spring = new SpringTestContext(this);
@Test
public void contextRefreshWhenInDebugModeAndDependencyHasAutowiredConstructorThenDebugModeStillWorks() {
// SEC-1885
this.spring.configLocations(
"classpath:org/springframework/security/config/debug/SecurityDebugBeanFactoryPostProcessorTests-context.xml")
.autowire();
assertThat(this.spring.getContext().getBean(BeanIds.SPRING_SECURITY_FILTER_CHAIN))
.isInstanceOf(DebugFilter.class);
assertThat(this.spring.getContext().getBean(BeanIds.FILTER_CHAIN_PROXY)).isInstanceOf(FilterChainProxy.class);
}
}
| apache-2.0 |
ankitsinghal/phoenix | phoenix-core/src/main/java/org/apache/phoenix/monitoring/MetricsStopWatch.java | 2055 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.monitoring;
import org.apache.phoenix.util.PhoenixStopWatch;
/**
*
* Stop watch that is cognizant of the fact whether or not metrics is enabled.
* If metrics isn't enabled it doesn't do anything. Otherwise, it delegates
* calls to a {@code PhoenixStopWatch}.
*
*/
final class MetricsStopWatch {
private final boolean isMetricsEnabled;
private final PhoenixStopWatch stopwatch;
MetricsStopWatch(boolean isMetricsEnabled) {
this.isMetricsEnabled = isMetricsEnabled;
this.stopwatch = new PhoenixStopWatch();
}
void start() {
if (isMetricsEnabled) {
stopwatch.start();
}
}
void stop() {
if (isMetricsEnabled) {
if (stopwatch.isRunning()) {
stopwatch.stop();
}
}
}
boolean isRunning() {
return isMetricsEnabled && stopwatch.isRunning();
}
long getElapsedTimeInMs() {
if (isMetricsEnabled) {
return stopwatch.elapsedMillis();
}
return 0;
}
@org.apache.phoenix.thirdparty.com.google.common.annotations.VisibleForTesting
final boolean getMetricsEnabled(){
return isMetricsEnabled;
}
}
| apache-2.0 |
vineetgarg02/hive | ql/src/java/org/apache/hadoop/hive/ql/exec/mr/ExecDriver.java | 32195 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.exec.mr;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.Serializable;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryMXBean;
import java.security.AccessController;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.ThreadLocalRandom;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hive.ql.exec.AddToClassPathAction;
import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
import org.apache.hadoop.hive.ql.log.LogDivertAppenderForTest;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.common.CompressionUtils;
import org.apache.hadoop.hive.common.JavaUtils;
import org.apache.hadoop.hive.common.LogUtils;
import org.apache.hadoop.hive.common.LogUtils.LogInitializationException;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.conf.HiveConfUtil;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.TaskQueue;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.QueryPlan;
import org.apache.hadoop.hive.ql.QueryState;
import org.apache.hadoop.hive.ql.exec.FetchOperator;
import org.apache.hadoop.hive.ql.exec.HiveTotalOrderPartitioner;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.OperatorUtils;
import org.apache.hadoop.hive.ql.exec.PartitionKeySampler;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.exec.tez.TezSessionPoolManager;
import org.apache.hadoop.hive.ql.io.BucketizedHiveInputFormat;
import org.apache.hadoop.hive.ql.io.HiveFileFormatUtils;
import org.apache.hadoop.hive.ql.io.HiveKey;
import org.apache.hadoop.hive.ql.io.HiveOutputFormatImpl;
import org.apache.hadoop.hive.ql.io.IOPrepareCache;
import org.apache.hadoop.hive.ql.log.LogDivertAppender;
import org.apache.hadoop.hive.ql.log.NullAppender;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.FetchWork;
import org.apache.hadoop.hive.ql.plan.MapWork;
import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
import org.apache.hadoop.hive.ql.plan.MapredWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
import org.apache.hadoop.hive.ql.plan.ReduceWork;
import org.apache.hadoop.hive.ql.plan.api.StageType;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
import org.apache.hadoop.hive.ql.stats.StatsCollectionContext;
import org.apache.hadoop.hive.ql.stats.StatsFactory;
import org.apache.hadoop.hive.ql.stats.StatsPublisher;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.Counters;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.common.util.HiveStringUtils;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.core.Appender;
import org.apache.logging.log4j.core.appender.FileAppender;
import org.apache.logging.log4j.core.appender.RollingFileAppender;
/**
* ExecDriver is the central class in co-ordinating execution of any map-reduce task.
* It's main responsibilities are:
*
* - Converting the plan (MapredWork) into a MR Job (JobConf)
* - Submitting a MR job to the cluster via JobClient and ExecHelper
* - Executing MR job in local execution mode (where applicable)
*
*/
public class ExecDriver extends Task<MapredWork> implements Serializable, HadoopJobExecHook {
private static final long serialVersionUID = 1L;
private static final String JOBCONF_FILENAME = "jobconf.xml";
protected transient JobConf job;
public static MemoryMXBean memoryMXBean;
protected HadoopJobExecHelper jobExecHelper;
private transient boolean isShutdown = false;
private transient boolean jobKilled = false;
protected static transient final Logger LOG = LoggerFactory.getLogger(ExecDriver.class);
private RunningJob rj;
/**
* Constructor when invoked from QL.
*/
public ExecDriver() {
super();
console = new LogHelper(LOG);
job = new JobConf(ExecDriver.class);
this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this);
}
@Override
public boolean requireLock() {
return true;
}
private void initializeFiles(String prop, String files) {
if (files != null && files.length() > 0) {
job.set(prop, files);
}
}
/**
* Retrieve the resources from the current session and configuration for the given type.
* @return Comma-separated list of resources
*/
protected static String getResource(HiveConf conf, SessionState.ResourceType resType) {
switch(resType) {
case JAR:
String addedJars = Utilities.getResourceFiles(conf, SessionState.ResourceType.JAR);
String auxJars = conf.getAuxJars();
String reloadableAuxJars = SessionState.get() == null ? null : SessionState.get().getReloadableAuxJars();
return HiveStringUtils.joinIgnoringEmpty(new String[]{addedJars, auxJars, reloadableAuxJars}, ',');
case FILE:
return Utilities.getResourceFiles(conf, SessionState.ResourceType.FILE);
case ARCHIVE:
return Utilities.getResourceFiles(conf, SessionState.ResourceType.ARCHIVE);
}
return null;
}
/**
* Initialization when invoked from QL.
*/
@Override
public void initialize(QueryState queryState, QueryPlan queryPlan, TaskQueue taskQueue, Context context) {
super.initialize(queryState, queryPlan, taskQueue, context);
job = new JobConf(conf, ExecDriver.class);
initializeFiles("tmpjars", getResource(conf, SessionState.ResourceType.JAR));
initializeFiles("tmpfiles", getResource(conf, SessionState.ResourceType.FILE));
initializeFiles("tmparchives", getResource(conf, SessionState.ResourceType.ARCHIVE));
conf.stripHiddenConfigurations(job);
this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this);
}
/**
* Constructor/Initialization for invocation as independent utility.
*/
public ExecDriver(MapredWork plan, JobConf job, boolean isSilent) throws HiveException {
setWork(plan);
this.job = job;
console = new LogHelper(LOG, isSilent);
this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this);
}
/**
* Fatal errors are those errors that cannot be recovered by retries. These are application
* dependent. Examples of fatal errors include: - the small table in the map-side joins is too
* large to be feasible to be handled by one mapper. The job should fail and the user should be
* warned to use regular joins rather than map-side joins. Fatal errors are indicated by counters
* that are set at execution time. If the counter is non-zero, a fatal error occurred. The value
* of the counter indicates the error type.
*
* @return true if fatal errors happened during job execution, false otherwise.
*/
@Override
public boolean checkFatalErrors(Counters ctrs, StringBuilder errMsg) {
Counters.Counter cntr = ctrs.findCounter(
HiveConf.getVar(job, HiveConf.ConfVars.HIVECOUNTERGROUP),
Operator.HIVE_COUNTER_FATAL);
return cntr != null && cntr.getValue() > 0;
}
/**
* Execute a query plan using Hadoop.
*/
@SuppressWarnings({"deprecation", "unchecked"})
@Override
public int execute() {
IOPrepareCache ioPrepareCache = IOPrepareCache.get();
ioPrepareCache.clear();
boolean success = true;
boolean ctxCreated = false;
Path emptyScratchDir;
JobClient jc = null;
if (taskQueue.isShutdown()) {
LOG.warn("Task was cancelled");
return 5;
}
MapWork mWork = work.getMapWork();
ReduceWork rWork = work.getReduceWork();
Context ctx = context;
try {
if (ctx == null) {
ctx = new Context(job);
ctxCreated = true;
}
emptyScratchDir = ctx.getMRTmpPath();
FileSystem fs = emptyScratchDir.getFileSystem(job);
fs.mkdirs(emptyScratchDir);
} catch (IOException e) {
console.printError("Error launching map-reduce job", "\n"
+ org.apache.hadoop.util.StringUtils.stringifyException(e));
return 5;
}
HiveFileFormatUtils.prepareJobOutput(job);
//See the javadoc on HiveOutputFormatImpl and HadoopShims.prepareJobOutput()
job.setOutputFormat(HiveOutputFormatImpl.class);
job.setMapRunnerClass(ExecMapRunner.class);
job.setMapperClass(ExecMapper.class);
job.setMapOutputKeyClass(HiveKey.class);
job.setMapOutputValueClass(BytesWritable.class);
try {
String partitioner = HiveConf.getVar(job, ConfVars.HIVEPARTITIONER);
job.setPartitionerClass(JavaUtils.loadClass(partitioner));
} catch (ClassNotFoundException e) {
throw new RuntimeException(e.getMessage(), e);
}
propagateSplitSettings(job, mWork);
job.setNumReduceTasks(rWork != null ? rWork.getNumReduceTasks().intValue() : 0);
job.setReducerClass(ExecReducer.class);
// set input format information if necessary
setInputAttributes(job);
// HIVE-23354 enforces that MR speculative execution is disabled
job.setBoolean(MRJobConfig.REDUCE_SPECULATIVE, false);
job.setBoolean(MRJobConfig.MAP_SPECULATIVE, false);
String inpFormat = HiveConf.getVar(job, HiveConf.ConfVars.HIVEINPUTFORMAT);
if (mWork.isUseBucketizedHiveInputFormat()) {
inpFormat = BucketizedHiveInputFormat.class.getName();
}
LOG.info("Using " + inpFormat);
try {
job.setInputFormat(JavaUtils.loadClass(inpFormat));
} catch (ClassNotFoundException e) {
throw new RuntimeException(e.getMessage(), e);
}
// No-Op - we don't really write anything here ..
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(Text.class);
int returnVal = 0;
boolean noName = StringUtils.isEmpty(job.get(MRJobConfig.JOB_NAME));
if (noName) {
// This is for a special case to ensure unit tests pass
job.set(MRJobConfig.JOB_NAME,
"JOB" + ThreadLocalRandom.current().nextInt());
}
try{
MapredLocalWork localwork = mWork.getMapRedLocalWork();
if (localwork != null && localwork.hasStagedAlias()) {
if (!ShimLoader.getHadoopShims().isLocalMode(job)) {
Path localPath = localwork.getTmpPath();
Path hdfsPath = mWork.getTmpHDFSPath();
FileSystem hdfs = hdfsPath.getFileSystem(job);
FileSystem localFS = localPath.getFileSystem(job);
FileStatus[] hashtableFiles = localFS.listStatus(localPath);
int fileNumber = hashtableFiles.length;
String[] fileNames = new String[fileNumber];
for ( int i = 0; i < fileNumber; i++){
fileNames[i] = hashtableFiles[i].getPath().getName();
}
//package and compress all the hashtable files to an archive file
String stageId = this.getId();
String archiveFileName = Utilities.generateTarFileName(stageId);
localwork.setStageID(stageId);
CompressionUtils.tar(localPath.toUri().getPath(), fileNames,archiveFileName);
Path archivePath = Utilities.generateTarPath(localPath, stageId);
LOG.info("Archive "+ hashtableFiles.length+" hash table files to " + archivePath);
//upload archive file to hdfs
Path hdfsFilePath =Utilities.generateTarPath(hdfsPath, stageId);
short replication = (short) job.getInt("mapred.submit.replication", 10);
hdfs.copyFromLocalFile(archivePath, hdfsFilePath);
hdfs.setReplication(hdfsFilePath, replication);
LOG.info("Upload 1 archive file from" + archivePath + " to: " + hdfsFilePath);
//add the archive file to distributed cache
DistributedCache.createSymlink(job);
DistributedCache.addCacheArchive(hdfsFilePath.toUri(), job);
LOG.info("Add 1 archive file to distributed cache. Archive file: " + hdfsFilePath.toUri());
}
}
work.configureJobConf(job);
List<Path> inputPaths = Utilities.getInputPaths(job, mWork, emptyScratchDir, ctx, false);
Utilities.setInputPaths(job, inputPaths);
Utilities.setMapRedWork(job, work, ctx.getMRTmpPath());
if (mWork.getSamplingType() > 0 && rWork != null && job.getNumReduceTasks() > 1) {
try {
handleSampling(ctx, mWork, job);
job.setPartitionerClass(HiveTotalOrderPartitioner.class);
} catch (IllegalStateException e) {
console.printInfo("Not enough sampling data.. Rolling back to single reducer task");
rWork.setNumReduceTasks(1);
job.setNumReduceTasks(1);
} catch (Exception e) {
LOG.error("Sampling error", e);
console.printError(e.toString(),
"\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
rWork.setNumReduceTasks(1);
job.setNumReduceTasks(1);
}
}
jc = new JobClient(job);
// make this client wait if job tracker is not behaving well.
Throttle.checkJobTracker(job, LOG);
if (mWork.isGatheringStats() || (rWork != null && rWork.isGatheringStats())) {
// initialize stats publishing table
StatsPublisher statsPublisher;
StatsFactory factory = StatsFactory.newFactory(job);
if (factory != null) {
statsPublisher = factory.getStatsPublisher();
List<String> statsTmpDir = Utilities.getStatsTmpDirs(mWork, job);
if (rWork != null) {
statsTmpDir.addAll(Utilities.getStatsTmpDirs(rWork, job));
}
StatsCollectionContext sc = new StatsCollectionContext(job);
sc.setStatsTmpDirs(statsTmpDir);
if (!statsPublisher.init(sc)) { // creating stats table if not exists
if (HiveConf.getBoolVar(job, HiveConf.ConfVars.HIVE_STATS_RELIABLE)) {
throw
new HiveException(ErrorMsg.STATSPUBLISHER_INITIALIZATION_ERROR.getErrorCodedMsg());
}
}
}
}
Utilities.createTmpDirs(job, mWork);
Utilities.createTmpDirs(job, rWork);
SessionState ss = SessionState.get();
// TODO: why is there a TezSession in MR ExecDriver?
if (ss != null && HiveConf.getVar(job, ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) {
// TODO: this is the only place that uses keepTmpDir. Why?
TezSessionPoolManager.closeIfNotDefault(ss.getTezSession(), true);
}
HiveConfUtil.updateJobCredentialProviders(job);
// Finally SUBMIT the JOB!
if (taskQueue.isShutdown()) {
LOG.warn("Task was cancelled");
return 5;
}
rj = jc.submitJob(job);
if (taskQueue.isShutdown()) {
LOG.warn("Task was cancelled");
killJob();
return 5;
}
this.jobID = rj.getJobID();
updateStatusInQueryDisplay();
returnVal = jobExecHelper.progress(rj, jc, ctx);
success = (returnVal == 0);
} catch (Exception e) {
setException(e);
String mesg = " with exception '" + Utilities.getNameMessage(e) + "'";
if (rj != null) {
mesg = "Ended Job = " + rj.getJobID() + mesg;
} else {
mesg = "Job Submission failed" + mesg;
}
// Has to use full name to make sure it does not conflict with
// org.apache.commons.lang3.StringUtils
console.printError(mesg, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
success = false;
returnVal = 1;
} finally {
Utilities.clearWork(job);
try {
if (ctxCreated) {
ctx.clear();
}
if (rj != null) {
if (returnVal != 0) {
killJob();
}
jobID = rj.getID().toString();
}
if (jc!=null) {
jc.close();
}
} catch (Exception e) {
LOG.warn("Failed while cleaning up ", e);
} finally {
HadoopJobExecHelper.runningJobs.remove(rj);
}
}
// get the list of Dynamic partition paths
try {
if (rj != null) {
if (mWork.getAliasToWork() != null) {
for (Operator<? extends OperatorDesc> op : mWork.getAliasToWork().values()) {
op.jobClose(job, success);
}
}
if (rWork != null) {
rWork.getReducer().jobClose(job, success);
}
}
} catch (Exception e) {
// jobClose needs to execute successfully otherwise fail task
if (success) {
setException(e);
success = false;
returnVal = 3;
String mesg = "Job Commit failed with exception '" + Utilities.getNameMessage(e) + "'";
console.printError(mesg, "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
}
}
return (returnVal);
}
public static void propagateSplitSettings(JobConf job, MapWork work) {
if (work.getNumMapTasks() != null) {
job.setNumMapTasks(work.getNumMapTasks().intValue());
}
if (work.getMaxSplitSize() != null) {
HiveConf.setLongVar(job, HiveConf.ConfVars.MAPREDMAXSPLITSIZE, work.getMaxSplitSize().longValue());
}
if (work.getMinSplitSize() != null) {
HiveConf.setLongVar(job, HiveConf.ConfVars.MAPREDMINSPLITSIZE, work.getMinSplitSize().longValue());
}
if (work.getMinSplitSizePerNode() != null) {
HiveConf.setLongVar(job, HiveConf.ConfVars.MAPREDMINSPLITSIZEPERNODE, work.getMinSplitSizePerNode().longValue());
}
if (work.getMinSplitSizePerRack() != null) {
HiveConf.setLongVar(job, HiveConf.ConfVars.MAPREDMINSPLITSIZEPERRACK, work.getMinSplitSizePerRack().longValue());
}
}
private void handleSampling(Context context, MapWork mWork, JobConf job)
throws Exception {
assert mWork.getAliasToWork().keySet().size() == 1;
String alias = mWork.getAliases().get(0);
Operator<?> topOp = mWork.getAliasToWork().get(alias);
PartitionDesc partDesc = mWork.getAliasToPartnInfo().get(alias);
ArrayList<PartitionDesc> parts = mWork.getPartitionDescs();
List<Path> inputPaths = mWork.getPaths();
Path tmpPath = context.getExternalTmpPath(inputPaths.get(0));
Path partitionFile = new Path(tmpPath, ".partitions");
ShimLoader.getHadoopShims().setTotalOrderPartitionFile(job, partitionFile);
PartitionKeySampler sampler = new PartitionKeySampler();
if (mWork.getSamplingType() == MapWork.SAMPLING_ON_PREV_MR) {
console.printInfo("Use sampling data created in previous MR");
// merges sampling data from previous MR and make partition keys for total sort
for (Path path : inputPaths) {
FileSystem fs = path.getFileSystem(job);
for (FileStatus status : fs.globStatus(new Path(path, ".sampling*"))) {
sampler.addSampleFile(status.getPath(), job);
}
}
} else if (mWork.getSamplingType() == MapWork.SAMPLING_ON_START) {
console.printInfo("Creating sampling data..");
assert topOp instanceof TableScanOperator;
TableScanOperator ts = (TableScanOperator) topOp;
FetchWork fetchWork;
if (!partDesc.isPartitioned()) {
assert inputPaths.size() == 1;
fetchWork = new FetchWork(inputPaths.get(0), partDesc.getTableDesc());
} else {
fetchWork = new FetchWork(inputPaths, parts, partDesc.getTableDesc());
}
fetchWork.setSource(ts);
// random sampling
FetchOperator fetcher = PartitionKeySampler.createSampler(fetchWork, job, ts);
try {
ts.initialize(job, new ObjectInspector[]{fetcher.getOutputObjectInspector()});
OperatorUtils.setChildrenCollector(ts.getChildOperators(), sampler);
while (fetcher.pushRow()) { }
} finally {
fetcher.clearFetchContext();
}
} else {
throw new IllegalArgumentException("Invalid sampling type " + mWork.getSamplingType());
}
sampler.writePartitionKeys(partitionFile, job);
}
/**
* Set hive input format, and input format file if necessary.
*/
protected void setInputAttributes(Configuration conf) {
MapWork mWork = work.getMapWork();
if (mWork.getInputformat() != null) {
HiveConf.setVar(conf, ConfVars.HIVEINPUTFORMAT, mWork.getInputformat());
}
// Intentionally overwrites anything the user may have put here
conf.setBoolean("hive.input.format.sorted", mWork.isInputFormatSorted());
if (HiveConf.getVar(conf, ConfVars.HIVE_CURRENT_DATABASE, (String)null) == null) {
HiveConf.setVar(conf, ConfVars.HIVE_CURRENT_DATABASE, getCurrentDB());
}
}
public static String getCurrentDB() {
String currentDB = null;
if (SessionState.get() != null) {
currentDB = SessionState.get().getCurrentDatabase();
}
return currentDB == null ? "default" : currentDB;
}
public boolean mapStarted() {
return this.jobExecHelper.mapStarted();
}
public boolean reduceStarted() {
return this.jobExecHelper.reduceStarted();
}
public boolean mapDone() {
return this.jobExecHelper.mapDone();
}
public boolean reduceDone() {
return this.jobExecHelper.reduceDone();
}
private static void printUsage() {
System.err.println("ExecDriver -plan <plan-file> [-jobconffile <job conf file>]"
+ "[-files <file1>[,<file2>] ...]");
System.exit(1);
}
/**
* we are running the hadoop job via a sub-command. this typically happens when we are running
* jobs in local mode. the log4j in this mode is controlled as follows: 1. if the admin provides a
* log4j properties file especially for execution mode - then we pick that up 2. otherwise - we
* default to the regular hive log4j properties if one is supplied 3. if none of the above two
* apply - we don't do anything - the log4j properties would likely be determined by hadoop.
*
* The intention behind providing a separate option #1 is to be able to collect hive run time logs
* generated in local mode in a separate (centralized) location if desired. This mimics the
* behavior of hive run time logs when running against a hadoop cluster where they are available
* on the tasktracker nodes.
*/
private static void setupChildLog4j(Configuration conf) {
try {
LogUtils.initHiveExecLog4j();
LogDivertAppender.registerRoutingAppender(conf);
LogDivertAppenderForTest.registerRoutingAppenderIfInTest(conf);
} catch (LogInitializationException e) {
System.err.println(e.getMessage());
}
}
@SuppressWarnings("unchecked")
public static void main(String[] args) throws IOException, HiveException {
String planFileName = null;
String jobConfFileName = null;
boolean noLog = false;
String files = null;
String libjars = null;
boolean localtask = false;
try {
for (int i = 0; i < args.length; i++) {
if (args[i].equals("-plan")) {
planFileName = args[++i];
} else if (args[i].equals("-jobconffile")) {
jobConfFileName = args[++i];
} else if (args[i].equals("-nolog")) {
noLog = true;
} else if (args[i].equals("-files")) {
files = args[++i];
} else if (args[i].equals("-libjars")) {
libjars = args[++i];
}else if (args[i].equals("-localtask")) {
localtask = true;
}
}
} catch (IndexOutOfBoundsException e) {
System.err.println("Missing argument to option");
printUsage();
}
JobConf conf;
if (localtask) {
conf = new JobConf(MapredLocalTask.class);
} else {
conf = new JobConf(ExecDriver.class);
}
if (jobConfFileName != null) {
conf.addResource(new Path(jobConfFileName));
}
// Initialize the resources from command line
if (files != null) {
conf.set("tmpfiles", files);
}
if (libjars != null) {
conf.set("tmpjars", libjars);
}
if(UserGroupInformation.isSecurityEnabled()){
String hadoopAuthToken = System.getenv(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
if(hadoopAuthToken != null){
conf.set("mapreduce.job.credentials.binary", hadoopAuthToken);
}
}
boolean isSilent = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESESSIONSILENT);
String queryId = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID, "").trim();
if(queryId.isEmpty()) {
queryId = "unknown-" + System.currentTimeMillis();
HiveConf.setVar(conf, HiveConf.ConfVars.HIVEQUERYID, queryId);
}
System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), queryId);
LogUtils.registerLoggingContext(conf);
if (noLog) {
// If started from main(), and noLog is on, we should not output
// any logs. To turn the log on, please set -Dtest.silent=false
org.apache.logging.log4j.Logger logger = org.apache.logging.log4j.LogManager.getRootLogger();
NullAppender appender = NullAppender.createNullAppender();
appender.addToLogger(logger.getName(), Level.ERROR);
appender.start();
} else {
setupChildLog4j(conf);
}
Logger LOG = LoggerFactory.getLogger(ExecDriver.class.getName());
LogHelper console = new LogHelper(LOG, isSilent);
if (planFileName == null) {
console.printError("Must specify Plan File Name");
printUsage();
}
// print out the location of the log file for the user so
// that it's easy to find reason for local mode execution failures
for (Appender appender : ((org.apache.logging.log4j.core.Logger) LogManager.getRootLogger())
.getAppenders().values()) {
if (appender instanceof FileAppender) {
console.printInfo("Execution log at: " + ((FileAppender) appender).getFileName());
} else if (appender instanceof RollingFileAppender) {
console.printInfo("Execution log at: " + ((RollingFileAppender) appender).getFileName());
}
}
// the plan file should always be in local directory
Path p = new Path(planFileName);
FileSystem fs = FileSystem.getLocal(conf);
InputStream pathData = fs.open(p);
// this is workaround for hadoop-17 - libjars are not added to classpath of the
// child process. so we add it here explicitly
try {
// see also - code in CliDriver.java
ClassLoader loader = conf.getClassLoader();
if (StringUtils.isNotBlank(libjars)) {
AddToClassPathAction addAction = new AddToClassPathAction(
loader, Arrays.asList(StringUtils.split(libjars, ",")));
loader = AccessController.doPrivileged(addAction);
}
conf.setClassLoader(loader);
// Also set this to the Thread ContextClassLoader, so new threads will
// inherit
// this class loader, and propagate into newly created Configurations by
// those
// new threads.
Thread.currentThread().setContextClassLoader(loader);
} catch (Exception e) {
throw new HiveException(e.getMessage(), e);
}
int ret;
if (localtask) {
memoryMXBean = ManagementFactory.getMemoryMXBean();
MapredLocalWork plan = SerializationUtilities.deserializePlan(pathData, MapredLocalWork.class);
MapredLocalTask ed = new MapredLocalTask(plan, conf, isSilent);
ed.initialize(null, null, new TaskQueue(), null);
ret = ed.executeInProcess();
} else {
MapredWork plan = SerializationUtilities.deserializePlan(pathData, MapredWork.class);
ExecDriver ed = new ExecDriver(plan, conf, isSilent);
ed.setTaskQueue(new TaskQueue());
ret = ed.execute();
}
if (ret != 0) {
System.exit(ret);
}
}
/**
* Given a Hive Configuration object - generate a command line fragment for passing such
* configuration information to ExecDriver.
*/
public static String generateCmdLine(HiveConf hconf, Context ctx)
throws IOException {
HiveConf tempConf = new HiveConf();
Path hConfFilePath = new Path(ctx.getLocalTmpPath(), JOBCONF_FILENAME);
OutputStream out = null;
Properties deltaP = hconf.getChangedProperties();
boolean hadoopLocalMode = ShimLoader.getHadoopShims().isLocalMode(hconf);
String hadoopSysDir = "mapred.system.dir";
String hadoopWorkDir = "mapred.local.dir";
for (Object one : deltaP.keySet()) {
String oneProp = (String) one;
if (hadoopLocalMode && (oneProp.equals(hadoopSysDir) || oneProp.equals(hadoopWorkDir))) {
continue;
}
tempConf.set(oneProp, hconf.get(oneProp));
}
// Multiple concurrent local mode job submissions can cause collisions in
// working dirs and system dirs
// Workaround is to rename map red working dir to a temp dir in such cases
if (hadoopLocalMode) {
tempConf.set(hadoopSysDir, hconf.get(hadoopSysDir) + "/"
+ ThreadLocalRandom.current().nextInt(Integer.MAX_VALUE));
tempConf.set(hadoopWorkDir, hconf.get(hadoopWorkDir) + "/"
+ ThreadLocalRandom.current().nextInt(Integer.MAX_VALUE));
}
try {
out = FileSystem.getLocal(hconf).create(hConfFilePath);
tempConf.writeXml(out);
} finally {
if (out != null) {
out.close();
}
}
return " -jobconffile " + hConfFilePath.toString();
}
@Override
public Collection<MapWork> getMapWork() {
return Collections.<MapWork>singleton(getWork().getMapWork());
}
@Override
public boolean isMapRedTask() {
return true;
}
@Override
public Collection<Operator<? extends OperatorDesc>> getTopOperators() {
return getWork().getMapWork().getAliasToWork().values();
}
@Override
public boolean hasReduce() {
MapredWork w = getWork();
return w.getReduceWork() != null;
}
@Override
public StageType getType() {
return StageType.MAPRED;
}
@Override
public String getName() {
return "MAPRED";
}
@Override
public void logPlanProgress(SessionState ss) throws IOException {
ss.getHiveHistory().logPlanProgress(queryPlan);
}
public boolean isTaskShutdown() {
return isShutdown;
}
@Override
public void shutdown() {
super.shutdown();
killJob();
isShutdown = true;
}
@Override
public String getExternalHandle() {
return this.jobID;
}
private void killJob() {
boolean needToKillJob = false;
synchronized(this) {
if (rj != null && !jobKilled) {
jobKilled = true;
needToKillJob = true;
}
}
if (needToKillJob) {
try {
rj.killJob();
} catch (Exception e) {
LOG.warn("failed to kill job " + rj.getID(), e);
}
}
}
}
| apache-2.0 |
NASA-Tournament-Lab/CoECI-CMS-Healthcare-Fraud-Prevention | node/src/java/tests/com/hfpp/network/node/services/DataRequestExpiredExceptionUnitTests.java | 2527 | /*
* Copyright (C) 2013 TopCoder Inc., All Rights Reserved.
*/
package com.hfpp.network.node.services;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import junit.framework.JUnit4TestAdapter;
import org.junit.Test;
/**
* <p>
* Unit tests for <code>{@link DataRequestExpiredException}</code> class.
* </p>
*
* @author sparemax
* @version 1.0
*/
public class DataRequestExpiredExceptionUnitTests {
/**
* <p>
* Represents a detail message.
* </p>
*/
private static final String DETAIL_MESSAGE = "detail";
/**
* <p>
* Represents an error cause.
* </p>
*/
private static final Throwable CAUSE = new Exception("UnitTests");
/**
* <p>
* Adapter for earlier versions of JUnit.
* </p>
*
* @return a test suite.
*/
public static junit.framework.Test suite() {
return new JUnit4TestAdapter(DataRequestExpiredExceptionUnitTests.class);
}
/**
* <p>
* <code>DataRequestExpiredException</code> should be subclass of <code>NetworkNodeServiceException</code>.
* </p>
*/
@Test
public void testInheritance() {
assertTrue("DataRequestExpiredException should be subclass of NetworkNodeServiceException.",
DataRequestExpiredException.class.getSuperclass() == NetworkNodeServiceException.class);
}
/**
* <p>
* Tests accuracy of <code>DataRequestExpiredException(String)</code> constructor.<br>
* Instance should be correctly created.
* </p>
*/
@Test
public void testCtor1() {
DataRequestExpiredException exception =
new DataRequestExpiredException(DETAIL_MESSAGE);
// Verify the error message
assertEquals("NetworkNodeServiceException message should be correct.", DETAIL_MESSAGE, exception.getMessage());
}
/**
* <p>
* Tests accuracy of <code>DataRequestExpiredException(String, Throwable)</code> constructor.<br>
* Instance should be correctly created.
* </p>
*/
@Test
public void testCtor2() {
DataRequestExpiredException exception =
new DataRequestExpiredException(DETAIL_MESSAGE, CAUSE);
// Verify the error message
assertEquals("Error message should be correct.", DETAIL_MESSAGE, exception.getMessage());
// Verify the error cause
assertSame("Error cause should be correct.", CAUSE, exception.getCause());
}
}
| apache-2.0 |
HonzaKral/elasticsearch | server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexServiceTests.java | 5260 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster.metadata;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.SnapshotsInProgress;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.routing.RoutingTable;
import org.elasticsearch.cluster.routing.allocation.AllocationService;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.repositories.IndexId;
import org.elasticsearch.snapshots.Snapshot;
import org.elasticsearch.snapshots.SnapshotId;
import org.elasticsearch.snapshots.SnapshotInProgressException;
import org.elasticsearch.snapshots.SnapshotInfoTests;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.VersionUtils;
import static java.util.Collections.singleton;
import static java.util.Collections.singletonList;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
public class MetadataDeleteIndexServiceTests extends ESTestCase {
private final AllocationService allocationService = mock(AllocationService.class);
private final MetadataDeleteIndexService service = new MetadataDeleteIndexService(Settings.EMPTY, null, allocationService);
public void testDeleteMissing() {
Index index = new Index("missing", "doesn't matter");
ClusterState state = ClusterState.builder(ClusterName.DEFAULT).build();
IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> service.deleteIndices(state, singleton(index)));
assertEquals(index, e.getIndex());
}
public void testDeleteSnapshotting() {
String index = randomAlphaOfLength(5);
Snapshot snapshot = new Snapshot("doesn't matter", new SnapshotId("snapshot name", "snapshot uuid"));
SnapshotsInProgress snaps = new SnapshotsInProgress(new SnapshotsInProgress.Entry(snapshot, true, false,
SnapshotsInProgress.State.INIT, singletonList(new IndexId(index, "doesn't matter")),
System.currentTimeMillis(), (long) randomIntBetween(0, 1000), ImmutableOpenMap.of(),
SnapshotInfoTests.randomUserMetadata(), VersionUtils.randomVersion(random())));
ClusterState state = ClusterState.builder(clusterState(index))
.putCustom(SnapshotsInProgress.TYPE, snaps)
.build();
Exception e = expectThrows(SnapshotInProgressException.class,
() -> service.deleteIndices(state, singleton(state.metadata().getIndices().get(index).getIndex())));
assertEquals("Cannot delete indices that are being snapshotted: [[" + index + "]]. Try again after snapshot finishes "
+ "or cancel the currently running snapshot.", e.getMessage());
}
public void testDeleteUnassigned() {
// Create an unassigned index
String index = randomAlphaOfLength(5);
ClusterState before = clusterState(index);
// Mock the built reroute
when(allocationService.reroute(any(ClusterState.class), any(String.class))).then(i -> i.getArguments()[0]);
// Remove it
ClusterState after = service.deleteIndices(before, singleton(before.metadata().getIndices().get(index).getIndex()));
// It is gone
assertNull(after.metadata().getIndices().get(index));
assertNull(after.routingTable().index(index));
assertNull(after.blocks().indices().get(index));
// Make sure we actually attempted to reroute
verify(allocationService).reroute(any(ClusterState.class), any(String.class));
}
private ClusterState clusterState(String index) {
IndexMetadata indexMetadata = IndexMetadata.builder(index)
.settings(Settings.builder().put("index.version.created", VersionUtils.randomVersion(random())))
.numberOfShards(1)
.numberOfReplicas(1)
.build();
return ClusterState.builder(ClusterName.DEFAULT)
.metadata(Metadata.builder().put(indexMetadata, false))
.routingTable(RoutingTable.builder().addAsNew(indexMetadata).build())
.blocks(ClusterBlocks.builder().addBlocks(indexMetadata))
.build();
}
}
| apache-2.0 |
swlsw/incubator-reef | lang/java/reef-tests/src/test/java/org/apache/reef/tests/close_eval/package-info.java | 877 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* TODO: Document.
*/
package org.apache.reef.tests.close_eval;
| apache-2.0 |
minahlee/incubator-zeppelin | zeppelin-interpreter/src/main/java/org/apache/zeppelin/interpreter/thrift/RemoteApplicationResult.java | 16293 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.9.2)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.zeppelin.interpreter.thrift;
import org.apache.thrift.scheme.IScheme;
import org.apache.thrift.scheme.SchemeFactory;
import org.apache.thrift.scheme.StandardScheme;
import org.apache.thrift.scheme.TupleScheme;
import org.apache.thrift.protocol.TTupleProtocol;
import org.apache.thrift.protocol.TProtocolException;
import org.apache.thrift.EncodingUtils;
import org.apache.thrift.TException;
import org.apache.thrift.async.AsyncMethodCallback;
import org.apache.thrift.server.AbstractNonblockingServer.*;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import java.util.EnumMap;
import java.util.Set;
import java.util.HashSet;
import java.util.EnumSet;
import java.util.Collections;
import java.util.BitSet;
import java.nio.ByteBuffer;
import java.util.Arrays;
import javax.annotation.Generated;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2017-1-25")
public class RemoteApplicationResult implements org.apache.thrift.TBase<RemoteApplicationResult, RemoteApplicationResult._Fields>, java.io.Serializable, Cloneable, Comparable<RemoteApplicationResult> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("RemoteApplicationResult");
private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.BOOL, (short)1);
private static final org.apache.thrift.protocol.TField MSG_FIELD_DESC = new org.apache.thrift.protocol.TField("msg", org.apache.thrift.protocol.TType.STRING, (short)2);
private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
static {
schemes.put(StandardScheme.class, new RemoteApplicationResultStandardSchemeFactory());
schemes.put(TupleScheme.class, new RemoteApplicationResultTupleSchemeFactory());
}
public boolean success; // required
public String msg; // required
/** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
public enum _Fields implements org.apache.thrift.TFieldIdEnum {
SUCCESS((short)1, "success"),
MSG((short)2, "msg");
private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
static {
for (_Fields field : EnumSet.allOf(_Fields.class)) {
byName.put(field.getFieldName(), field);
}
}
/**
* Find the _Fields constant that matches fieldId, or null if its not found.
*/
public static _Fields findByThriftId(int fieldId) {
switch(fieldId) {
case 1: // SUCCESS
return SUCCESS;
case 2: // MSG
return MSG;
default:
return null;
}
}
/**
* Find the _Fields constant that matches fieldId, throwing an exception
* if it is not found.
*/
public static _Fields findByThriftIdOrThrow(int fieldId) {
_Fields fields = findByThriftId(fieldId);
if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
return fields;
}
/**
* Find the _Fields constant that matches name, or null if its not found.
*/
public static _Fields findByName(String name) {
return byName.get(name);
}
private final short _thriftId;
private final String _fieldName;
_Fields(short thriftId, String fieldName) {
_thriftId = thriftId;
_fieldName = fieldName;
}
public short getThriftFieldId() {
return _thriftId;
}
public String getFieldName() {
return _fieldName;
}
}
// isset id assignments
private static final int __SUCCESS_ISSET_ID = 0;
private byte __isset_bitfield = 0;
public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
static {
Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
tmpMap.put(_Fields.MSG, new org.apache.thrift.meta_data.FieldMetaData("msg", org.apache.thrift.TFieldRequirementType.DEFAULT,
new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
metaDataMap = Collections.unmodifiableMap(tmpMap);
org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(RemoteApplicationResult.class, metaDataMap);
}
public RemoteApplicationResult() {
}
public RemoteApplicationResult(
boolean success,
String msg)
{
this();
this.success = success;
setSuccessIsSet(true);
this.msg = msg;
}
/**
* Performs a deep copy on <i>other</i>.
*/
public RemoteApplicationResult(RemoteApplicationResult other) {
__isset_bitfield = other.__isset_bitfield;
this.success = other.success;
if (other.isSetMsg()) {
this.msg = other.msg;
}
}
public RemoteApplicationResult deepCopy() {
return new RemoteApplicationResult(this);
}
@Override
public void clear() {
setSuccessIsSet(false);
this.success = false;
this.msg = null;
}
public boolean isSuccess() {
return this.success;
}
public RemoteApplicationResult setSuccess(boolean success) {
this.success = success;
setSuccessIsSet(true);
return this;
}
public void unsetSuccess() {
__isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __SUCCESS_ISSET_ID);
}
/** Returns true if field success is set (has been assigned a value) and false otherwise */
public boolean isSetSuccess() {
return EncodingUtils.testBit(__isset_bitfield, __SUCCESS_ISSET_ID);
}
public void setSuccessIsSet(boolean value) {
__isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __SUCCESS_ISSET_ID, value);
}
public String getMsg() {
return this.msg;
}
public RemoteApplicationResult setMsg(String msg) {
this.msg = msg;
return this;
}
public void unsetMsg() {
this.msg = null;
}
/** Returns true if field msg is set (has been assigned a value) and false otherwise */
public boolean isSetMsg() {
return this.msg != null;
}
public void setMsgIsSet(boolean value) {
if (!value) {
this.msg = null;
}
}
public void setFieldValue(_Fields field, Object value) {
switch (field) {
case SUCCESS:
if (value == null) {
unsetSuccess();
} else {
setSuccess((Boolean)value);
}
break;
case MSG:
if (value == null) {
unsetMsg();
} else {
setMsg((String)value);
}
break;
}
}
public Object getFieldValue(_Fields field) {
switch (field) {
case SUCCESS:
return Boolean.valueOf(isSuccess());
case MSG:
return getMsg();
}
throw new IllegalStateException();
}
/** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
public boolean isSet(_Fields field) {
if (field == null) {
throw new IllegalArgumentException();
}
switch (field) {
case SUCCESS:
return isSetSuccess();
case MSG:
return isSetMsg();
}
throw new IllegalStateException();
}
@Override
public boolean equals(Object that) {
if (that == null)
return false;
if (that instanceof RemoteApplicationResult)
return this.equals((RemoteApplicationResult)that);
return false;
}
public boolean equals(RemoteApplicationResult that) {
if (that == null)
return false;
boolean this_present_success = true;
boolean that_present_success = true;
if (this_present_success || that_present_success) {
if (!(this_present_success && that_present_success))
return false;
if (this.success != that.success)
return false;
}
boolean this_present_msg = true && this.isSetMsg();
boolean that_present_msg = true && that.isSetMsg();
if (this_present_msg || that_present_msg) {
if (!(this_present_msg && that_present_msg))
return false;
if (!this.msg.equals(that.msg))
return false;
}
return true;
}
@Override
public int hashCode() {
List<Object> list = new ArrayList<Object>();
boolean present_success = true;
list.add(present_success);
if (present_success)
list.add(success);
boolean present_msg = true && (isSetMsg());
list.add(present_msg);
if (present_msg)
list.add(msg);
return list.hashCode();
}
@Override
public int compareTo(RemoteApplicationResult other) {
if (!getClass().equals(other.getClass())) {
return getClass().getName().compareTo(other.getClass().getName());
}
int lastComparison = 0;
lastComparison = Boolean.valueOf(isSetSuccess()).compareTo(other.isSetSuccess());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetSuccess()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
if (lastComparison != 0) {
return lastComparison;
}
}
lastComparison = Boolean.valueOf(isSetMsg()).compareTo(other.isSetMsg());
if (lastComparison != 0) {
return lastComparison;
}
if (isSetMsg()) {
lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.msg, other.msg);
if (lastComparison != 0) {
return lastComparison;
}
}
return 0;
}
public _Fields fieldForId(int fieldId) {
return _Fields.findByThriftId(fieldId);
}
public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
}
public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("RemoteApplicationResult(");
boolean first = true;
sb.append("success:");
sb.append(this.success);
first = false;
if (!first) sb.append(", ");
sb.append("msg:");
if (this.msg == null) {
sb.append("null");
} else {
sb.append(this.msg);
}
first = false;
sb.append(")");
return sb.toString();
}
public void validate() throws org.apache.thrift.TException {
// check for required fields
// check for sub-struct validity
}
private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
try {
write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
try {
// it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
__isset_bitfield = 0;
read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
} catch (org.apache.thrift.TException te) {
throw new java.io.IOException(te);
}
}
private static class RemoteApplicationResultStandardSchemeFactory implements SchemeFactory {
public RemoteApplicationResultStandardScheme getScheme() {
return new RemoteApplicationResultStandardScheme();
}
}
private static class RemoteApplicationResultStandardScheme extends StandardScheme<RemoteApplicationResult> {
public void read(org.apache.thrift.protocol.TProtocol iprot, RemoteApplicationResult struct) throws org.apache.thrift.TException {
org.apache.thrift.protocol.TField schemeField;
iprot.readStructBegin();
while (true)
{
schemeField = iprot.readFieldBegin();
if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
break;
}
switch (schemeField.id) {
case 1: // SUCCESS
if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) {
struct.success = iprot.readBool();
struct.setSuccessIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
case 2: // MSG
if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
struct.msg = iprot.readString();
struct.setMsgIsSet(true);
} else {
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
break;
default:
org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
}
iprot.readFieldEnd();
}
iprot.readStructEnd();
// check for required fields of primitive type, which can't be checked in the validate method
struct.validate();
}
public void write(org.apache.thrift.protocol.TProtocol oprot, RemoteApplicationResult struct) throws org.apache.thrift.TException {
struct.validate();
oprot.writeStructBegin(STRUCT_DESC);
oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
oprot.writeBool(struct.success);
oprot.writeFieldEnd();
if (struct.msg != null) {
oprot.writeFieldBegin(MSG_FIELD_DESC);
oprot.writeString(struct.msg);
oprot.writeFieldEnd();
}
oprot.writeFieldStop();
oprot.writeStructEnd();
}
}
private static class RemoteApplicationResultTupleSchemeFactory implements SchemeFactory {
public RemoteApplicationResultTupleScheme getScheme() {
return new RemoteApplicationResultTupleScheme();
}
}
private static class RemoteApplicationResultTupleScheme extends TupleScheme<RemoteApplicationResult> {
@Override
public void write(org.apache.thrift.protocol.TProtocol prot, RemoteApplicationResult struct) throws org.apache.thrift.TException {
TTupleProtocol oprot = (TTupleProtocol) prot;
BitSet optionals = new BitSet();
if (struct.isSetSuccess()) {
optionals.set(0);
}
if (struct.isSetMsg()) {
optionals.set(1);
}
oprot.writeBitSet(optionals, 2);
if (struct.isSetSuccess()) {
oprot.writeBool(struct.success);
}
if (struct.isSetMsg()) {
oprot.writeString(struct.msg);
}
}
@Override
public void read(org.apache.thrift.protocol.TProtocol prot, RemoteApplicationResult struct) throws org.apache.thrift.TException {
TTupleProtocol iprot = (TTupleProtocol) prot;
BitSet incoming = iprot.readBitSet(2);
if (incoming.get(0)) {
struct.success = iprot.readBool();
struct.setSuccessIsSet(true);
}
if (incoming.get(1)) {
struct.msg = iprot.readString();
struct.setMsgIsSet(true);
}
}
}
}
| apache-2.0 |
minji-kim/calcite | core/src/main/java/org/apache/calcite/plan/RelOptPredicateList.java | 7496 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.plan;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.rex.RexUtil;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
/**
* Predicates that are known to hold in the output of a particular relational
* expression.
*
* <p><b>Pulled up predicates</b> (field {@link #pulledUpPredicates} are
* predicates that apply to every row output by the relational expression. They
* are inferred from the input relational expression(s) and the relational
* operator.
*
* <p>For example, if you apply {@code Filter(x > 1)} to a relational
* expression that has a predicate {@code y < 10} then the pulled up predicates
* for the Filter are {@code [y < 10, x > ]}.
*
* <p><b>Inferred predicates</b> only apply to joins. If there there is a
* predicate on the left input to a join, and that predicate is over columns
* used in the join condition, then a predicate can be inferred on the right
* input to the join. (And vice versa.)
*
* <p>For example, in the query
* <blockquote>SELECT *<br>
* FROM emp<br>
* JOIN dept ON emp.deptno = dept.deptno
* WHERE emp.gender = 'F' AND emp.deptno < 10</blockquote>
* we have
* <ul>
* <li>left: {@code Filter(Scan(EMP), deptno < 10},
* predicates: {@code [deptno < 10]}
* <li>right: {@code Scan(DEPT)}, predicates: {@code []}
* <li>join: {@code Join(left, right, emp.deptno = dept.deptno},
* leftInferredPredicates: [],
* rightInferredPredicates: [deptno < 10],
* pulledUpPredicates: [emp.gender = 'F', emp.deptno < 10,
* emp.deptno = dept.deptno, dept.deptno < 10]
* </ul>
*
* <p>Note that the predicate from the left input appears in
* {@code rightInferredPredicates}. Predicates from several sources appear in
* {@code pulledUpPredicates}.
*/
public class RelOptPredicateList {
private static final ImmutableList<RexNode> EMPTY_LIST = ImmutableList.of();
public static final RelOptPredicateList EMPTY =
new RelOptPredicateList(EMPTY_LIST, EMPTY_LIST, EMPTY_LIST,
ImmutableMap.<RexNode, RexNode>of());
/** Predicates that can be pulled up from the relational expression and its
* inputs. */
public final ImmutableList<RexNode> pulledUpPredicates;
/** Predicates that were inferred from the right input.
* Empty if the relational expression is not a join. */
public final ImmutableList<RexNode> leftInferredPredicates;
/** Predicates that were inferred from the left input.
* Empty if the relational expression is not a join. */
public final ImmutableList<RexNode> rightInferredPredicates;
/** A map of each (e, constant) pair that occurs within
* {@link #pulledUpPredicates}. */
public final ImmutableMap<RexNode, RexNode> constantMap;
private RelOptPredicateList(ImmutableList<RexNode> pulledUpPredicates,
ImmutableList<RexNode> leftInferredPredicates,
ImmutableList<RexNode> rightInferredPredicates,
ImmutableMap<RexNode, RexNode> constantMap) {
this.pulledUpPredicates = Preconditions.checkNotNull(pulledUpPredicates);
this.leftInferredPredicates =
Preconditions.checkNotNull(leftInferredPredicates);
this.rightInferredPredicates =
Preconditions.checkNotNull(rightInferredPredicates);
this.constantMap = Preconditions.checkNotNull(constantMap);
}
/** Creates a RelOptPredicateList with only pulled-up predicates, no inferred
* predicates.
*
* <p>Use this for relational expressions other than joins.
*
* @param pulledUpPredicates Predicates that apply to the rows returned by the
* relational expression
*/
public static RelOptPredicateList of(RexBuilder rexBuilder,
Iterable<RexNode> pulledUpPredicates) {
ImmutableList<RexNode> pulledUpPredicatesList =
ImmutableList.copyOf(pulledUpPredicates);
if (pulledUpPredicatesList.isEmpty()) {
return EMPTY;
}
return of(rexBuilder, pulledUpPredicatesList, EMPTY_LIST, EMPTY_LIST);
}
/** Creates a RelOptPredicateList for a join.
*
* @param rexBuilder Rex builder
* @param pulledUpPredicates Predicates that apply to the rows returned by the
* relational expression
* @param leftInferredPredicates Predicates that were inferred from the right
* input
* @param rightInferredPredicates Predicates that were inferred from the left
* input
*/
public static RelOptPredicateList of(RexBuilder rexBuilder,
Iterable<RexNode> pulledUpPredicates,
Iterable<RexNode> leftInferredPredicates,
Iterable<RexNode> rightInferredPredicates) {
final ImmutableList<RexNode> pulledUpPredicatesList =
ImmutableList.copyOf(pulledUpPredicates);
final ImmutableList<RexNode> leftInferredPredicateList =
ImmutableList.copyOf(leftInferredPredicates);
final ImmutableList<RexNode> rightInferredPredicatesList =
ImmutableList.copyOf(rightInferredPredicates);
if (pulledUpPredicatesList.isEmpty()
&& leftInferredPredicateList.isEmpty()
&& rightInferredPredicatesList.isEmpty()) {
return EMPTY;
}
final ImmutableMap<RexNode, RexNode> constantMap =
RexUtil.predicateConstants(RexNode.class, rexBuilder,
pulledUpPredicatesList);
return new RelOptPredicateList(pulledUpPredicatesList,
leftInferredPredicateList, rightInferredPredicatesList, constantMap);
}
public RelOptPredicateList union(RexBuilder rexBuilder,
RelOptPredicateList list) {
if (this == EMPTY) {
return list;
} else if (list == EMPTY) {
return this;
} else {
return RelOptPredicateList.of(rexBuilder,
concat(pulledUpPredicates, list.pulledUpPredicates),
concat(leftInferredPredicates, list.leftInferredPredicates),
concat(rightInferredPredicates, list.rightInferredPredicates));
}
}
/** Concatenates two immutable lists, avoiding a copy it possible. */
private static <E> ImmutableList<E> concat(ImmutableList<E> list1,
ImmutableList<E> list2) {
if (list1.isEmpty()) {
return list2;
} else if (list2.isEmpty()) {
return list1;
} else {
return ImmutableList.<E>builder().addAll(list1).addAll(list2).build();
}
}
public RelOptPredicateList shift(RexBuilder rexBuilder, int offset) {
return RelOptPredicateList.of(rexBuilder,
RexUtil.shift(pulledUpPredicates, offset),
RexUtil.shift(leftInferredPredicates, offset),
RexUtil.shift(rightInferredPredicates, offset));
}
}
// End RelOptPredicateList.java
| apache-2.0 |
salyh/geronimo-specs | geronimo-jaxb_2.2_spec/src/main/java/javax/xml/bind/JAXBPermission.java | 1061 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package javax.xml.bind;
import java.security.BasicPermission;
public final class JAXBPermission extends BasicPermission {
private static final long serialVersionUID = 1L;
public JAXBPermission(String name) {
super(name);
}
} | apache-2.0 |
sflyphotobooks/crp-batik | sources/org/apache/batik/gvt/flow/LineInfo.java | 14224 | /*
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.batik.gvt.flow;
import java.awt.geom.Point2D;
import org.apache.batik.gvt.font.GVTGlyphVector;
/**
* One line Class Desc
*
* Complete Class Desc
*
* @author <a href="mailto:deweese@apache.org">deweese</a>
* @version $Id: LineInfo.java 478188 2006-11-22 15:19:17Z dvholten $
*/
public class LineInfo {
FlowRegions fr;
double lineHeight = -1;
double ascent = -1;
double descent = -1;
double hLeading = -1;
double baseline;
int numGlyphs;
int words = 0;
int size=0;
GlyphGroupInfo [] ggis=null;
int newSize=0;
GlyphGroupInfo [] newGGIS=null;
int numRanges;
double [] ranges;
double [] rangeAdv;
BlockInfo bi = null;
boolean paraStart;
boolean paraEnd;
protected static final int FULL_WORD = 0;
protected static final int FULL_ADV = 1;
public LineInfo(FlowRegions fr, BlockInfo bi, boolean paraStart) {
this.fr = fr;
this.bi = bi;
this.lineHeight = bi.getLineHeight();
this.ascent = bi.getAscent();
this.descent = bi.getDescent();
this.hLeading = (lineHeight-(ascent+descent))/2;
this.baseline = (float)(fr.getCurrentY()+hLeading+ascent);
this.paraStart = paraStart;
this.paraEnd = false;
if (lineHeight > 0) {
fr.newLineHeight(lineHeight);
updateRangeInfo();
}
}
public void setParaEnd(boolean paraEnd) {
this.paraEnd = paraEnd;
}
public boolean addWord(WordInfo wi) {
double nlh = wi.getLineHeight();
if (nlh <= lineHeight)
return insertWord(wi);
fr.newLineHeight(nlh);
if (!updateRangeInfo()) {
if (lineHeight > 0) // restore old LH
fr.newLineHeight(lineHeight);
return false;
}
if (!insertWord(wi)) {
if (lineHeight > 0) // Failure, restore old line Height.
setLineHeight(lineHeight);
return false;
}
// Success, word fits on line.
lineHeight = nlh;
if (wi.getAscent() > ascent)
ascent = wi.getAscent();
if (wi.getDescent() > descent)
descent = wi.getDescent();
hLeading = (nlh-(ascent+descent))/2;
baseline = (float)(fr.getCurrentY()+hLeading+ascent);
return true;
}
public boolean insertWord(WordInfo wi) {
// Merge wi's glyph groups into the current GGI's.
// This puts them into newGGIS, so if it fails we can
// retain to the old ggis array.
mergeGlyphGroups(wi);
if (!assignGlyphGroupRanges(newSize, newGGIS))
return false;
// Swap in to GG info.
swapGlyphGroupInfo();
return true;
}
static final float MAX_COMPRESS=0.1f;
static final float COMRESS_SCALE=3;
public boolean assignGlyphGroupRanges(int ggSz, GlyphGroupInfo []ggis) {
int i=0, r=0;
while (r<numRanges) {
double range = ranges[2*r+1]-ranges[2*r];
float adv=0;
float rangeAdvance = 0;
while (i<ggSz) {
GlyphGroupInfo ggi = ggis[i];
ggi.setRange(r);
adv = ggi.getAdvance();
double delta = range-(rangeAdvance + adv);
if (delta < 0) break;
i++;
rangeAdvance += adv;
}
// Check last glyphGroup anyways...
if (i == ggSz) {
i--;
rangeAdvance -= adv;
}
GlyphGroupInfo ggi = ggis[i];
float ladv = ggi.getLastAdvance();
while (rangeAdvance + ladv > range) {
// "i" can't fit in this region see if "i-1" can.
i--;
ladv = 0;
if (i < 0) break;
ggi = ggis[i];
if (r != ggi.getRange()) // Not from this range nothing fits.
break;
rangeAdvance -= ggi.getAdvance();
ladv = ggi.getLastAdvance();
}
i++;
rangeAdv[r] = rangeAdvance + ladv;
r++;
if (i == ggSz) return true;
}
return false;
}
/**
* This method updates the line height and recalculates
* the available flow ranges for the line.
*/
public boolean setLineHeight(double lh) {
fr.newLineHeight(lh);
if (updateRangeInfo()) {
lineHeight = lh;
return true;
}
// restore line height.
if (lineHeight > 0)
fr.newLineHeight(lineHeight);
return false;
}
public double getCurrentY() {
return fr.getCurrentY();
}
public boolean gotoY(double y) {
if (fr.gotoY(y))
return true;
if (lineHeight > 0)
updateRangeInfo();
this.baseline = (float)(fr.getCurrentY()+hLeading+ascent);
return false;
}
protected boolean updateRangeInfo() {
fr.resetRange();
int nr = fr.getNumRangeOnLine();
if (nr == 0)
return false;
numRanges = nr;
if (ranges == null) {
rangeAdv = new double[numRanges];
ranges = new double[2*numRanges];
} else if (numRanges > rangeAdv.length) {
int sz = 2*rangeAdv.length;
if (sz < numRanges) sz = numRanges;
rangeAdv = new double[sz];
ranges = new double[2*sz];
}
for (int r=0; r<numRanges; r++) {
double [] rangeBounds = fr.nextRange();
// System.err.println("RG["+r+"]: [" +
// rangeBounds[0] + "," + rangeBounds[1] +"]");
double r0 = rangeBounds[0];
if (r == 0) {
double delta = bi.getLeftMargin();
if (paraStart) {
double indent = bi.getIndent();
// Limit indent to the amount of margin we have.
if (delta < -indent) delta = 0;
else delta += indent;
}
r0 += delta;
}
double r1 = rangeBounds[1];
if (r == numRanges-1)
r1 -= bi.getRightMargin();
ranges[2*r] = r0;
ranges[2*r+1] = r1;
}
return true;
}
protected void swapGlyphGroupInfo() {
GlyphGroupInfo [] tmp = ggis;
ggis = newGGIS;
newGGIS = tmp;
size = newSize;
newSize = 0;
}
/**
* This function merges the glyph groups from <tt>wi<tt/>
* into the glyph groups that are already on this line.
* It does no fit checking, just adds them in the
* proper place in the <tt>newGGIS</tt> data member.
*/
protected void mergeGlyphGroups(WordInfo wi) {
int numGG = wi.getNumGlyphGroups();
newSize = 0;
if (ggis == null) {
// first glyph group on line just add them.
newSize = numGG;
newGGIS = new GlyphGroupInfo[numGG];
for (int i=0; i< numGG; i++)
newGGIS[i] = wi.getGlyphGroup(i);
} else {
// We need to merge the new glyph groups with the
// existing glyph Groups.
int s = 0;
int i = 0;
GlyphGroupInfo nggi = wi.getGlyphGroup(i);
int nStart = nggi.getStart();
GlyphGroupInfo oggi = ggis[size-1];
int oStart = oggi.getStart();
newGGIS = assureSize(newGGIS, size+numGG);
if (nStart < oStart) {
oggi = ggis[s];
oStart = oggi.getStart();
while((s<size)&&(i<numGG)) {
if (nStart < oStart) {
newGGIS[newSize++] = nggi;
i++;
if (i<numGG) {
nggi = wi.getGlyphGroup(i);
nStart = nggi.getStart();
}
} else {
newGGIS[newSize++] = oggi;
s++;
if (s<size) {
oggi = ggis[s];
oStart = oggi.getStart();
}
}
}
}
while(s<size) {
newGGIS[newSize++] = ggis[s++];
}
while(i<numGG) {
newGGIS[newSize++] = wi.getGlyphGroup(i++);
}
}
// for (int i=0; i<newSize; i++) {
// System.err.println("GGIS["+i+"]: " + newGGIS[i].start + " -> " +
// newGGIS[i].end);
// }
}
public void layout() {
if (size == 0) return;
// This is needed because we know that in most cases
// the addition of the last word failed. In the case of
// BIDI this will mess up region assignments.
// If one wanted to you could check on BIDI, and/or
// lastPara.
assignGlyphGroupRanges(size, ggis);
GVTGlyphVector gv = ggis[0].getGlyphVector();
int justType = FULL_WORD;
double ggAdv = 0;
double gAdv = 0;
// Calculate the number of Glyph Groups and the number
// of glpyhs in each range for use with full justification.
int []rangeGG = new int[numRanges];
int []rangeG = new int[numRanges];
GlyphGroupInfo []rangeLastGGI = new GlyphGroupInfo[numRanges];
GlyphGroupInfo ggi = ggis[0];
int r = ggi.getRange();
rangeGG[r]++;
rangeG [r] += ggi.getGlyphCount();
for (int i=1; i<size; i++) {
ggi = ggis[i];
r = ggi.getRange();
if ((rangeLastGGI[r]==null) || !rangeLastGGI[r].getHideLast())
rangeGG[r]++;
rangeLastGGI[r] = ggi;
rangeG [r] += ggi.getGlyphCount();
GlyphGroupInfo pggi = ggis[i-1];
int pr = pggi.getRange();
if (r != pr)
rangeG[pr]+= pggi.getLastGlyphCount()-pggi.getGlyphCount();
}
rangeG[r]+= ggi.getLastGlyphCount()-ggi.getGlyphCount();
int currRange = -1;
double locX=0, range=0, rAdv=0;
r=-1;
ggi = null;
for (int i=0; i<size; i++) {
GlyphGroupInfo pggi = ggi;
int prevRange = currRange;
ggi = ggis[i];
currRange = ggi.getRange();
if (currRange != prevRange) {
locX = ranges[2*currRange];
range = ranges[2*currRange+1]-locX;
rAdv = rangeAdv[currRange];
int textAlign = bi.getTextAlignment();
if ((paraEnd) && (textAlign == BlockInfo.ALIGN_FULL))
textAlign = BlockInfo.ALIGN_START;
switch (textAlign) {
default:
case BlockInfo.ALIGN_FULL: {
double delta = range-rAdv;
if (justType == FULL_WORD) {
int numSp = rangeGG[currRange]-1;
if (numSp >= 1)
ggAdv = delta/numSp;
} else {
int numSp = rangeG[currRange]-1;
if (numSp >= 1) gAdv = delta/numSp;
}
} break;
case BlockInfo.ALIGN_START: break;
case BlockInfo.ALIGN_MIDDLE: locX += (range-rAdv)/2; break;
case BlockInfo.ALIGN_END: locX += (range-rAdv); break;
}
} else if ((pggi!= null) && pggi.getHideLast()) {
// Hide last glyph from prev glyph group (soft hyphen etc).
gv.setGlyphVisible(pggi.getEnd(), false);
}
int start = ggi.getStart();
int end = ggi.getEnd();
boolean [] hide = ggi.getHide();
Point2D p2d = gv.getGlyphPosition(start);
double deltaX = p2d.getX();
double advAdj = 0;
for (int g=start; g<=end; g++) {
Point2D np2d = gv.getGlyphPosition(g+1);
if (hide[g-start]) {
gv.setGlyphVisible(g, false);
advAdj += np2d.getX()-p2d.getX();
} else {
gv.setGlyphVisible(g, true);
}
p2d.setLocation(p2d.getX()-deltaX-advAdj+locX,
p2d.getY()+baseline);
gv.setGlyphPosition(g, p2d);
p2d = np2d;
advAdj -= gAdv;
}
if (ggi.getHideLast())
locX += ggi.getAdvance()-advAdj;
else
locX += ggi.getAdvance()-advAdj+ggAdv;
}
}
public static GlyphGroupInfo [] assureSize
(GlyphGroupInfo [] ggis, int sz) {
if (ggis == null) {
if (sz < 10) sz = 10;
return new GlyphGroupInfo[sz];
}
if (sz <= ggis.length)
return ggis;
int nsz = ggis.length*2;
if (nsz < sz) nsz = sz;
return new GlyphGroupInfo[nsz];
}
}
| apache-2.0 |
otoniel-isidoro/assertj-db | src/main/java/org/assertj/db/error/ShouldHaveRowsSize.java | 1664 | /**
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2015 the original author or authors.
*/
package org.assertj.db.error;
import org.assertj.core.error.BasicErrorMessageFactory;
import org.assertj.core.error.ErrorMessageFactory;
/**
* Creates an error message indicating that an assertion that verifies the rows size a value failed.
*
* @author Régis Pouiller
*
*/
public class ShouldHaveRowsSize extends BasicErrorMessageFactory {
/**
* Creates a new <code>{@link ShouldHaveRowsSize}</code>.
*
* @param actualSize the size of {@code actual}.
* @param expectedSize the expected size.
* @return the created {@code ErrorMessageFactory}.
*/
public static ErrorMessageFactory shouldHaveRowsSize(int actualSize, int expectedSize) {
return new ShouldHaveRowsSize(actualSize, expectedSize);
}
/**
* Constructor.
*
* @param actualSize the size of {@code actual}.
* @param expectedSize the expected size.
*/
private ShouldHaveRowsSize(int actualSize, int expectedSize) {
super("%nExpecting size (number of rows) to be equal to :%n <%s>%nbut was:%n <%s>", expectedSize, actualSize);
}
}
| apache-2.0 |
gradle/gradle | subprojects/execution/src/main/java/org/gradle/internal/execution/steps/Result.java | 1920 | /*
* Copyright 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.internal.execution.steps;
import org.gradle.internal.Try;
import org.gradle.internal.execution.ExecutionResult;
import org.gradle.internal.execution.UnitOfWork;
import java.time.Duration;
public interface Result {
/**
* The elapsed wall clock time of executing the actual work, i.e. the time it took to execute the
* {@link UnitOfWork#execute(UnitOfWork.ExecutionRequest)} method.
*
* The execution time refers to when and where the work was executed: if a previous result was reused,
* then this method will return the time it took to produce the previous result.
*
* Note that reused work times might be different to what it would actually take to execute the work
* in the current build for a number of reasons:
*
* <ul>
* <li>reused work could have happened on a remote machine with different hardware capabilities,</li>
* <li>there might have been more or less load on the machine producing the reused work,</li>
* <li>the work reused might have been executed incrementally,</li>
* <li>had there been no work to reuse, the local execution might have happened happen incrementally.</li>
* </ul>
*/
Duration getDuration();
Try<ExecutionResult> getExecutionResult();
}
| apache-2.0 |
kierarad/gocd | common/src/test/java/com/thoughtworks/go/domain/materials/DirectoryCleanerTest.java | 6430 | /*
* Copyright 2019 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.domain.materials;
import com.thoughtworks.go.util.command.InMemoryStreamConsumer;
import com.thoughtworks.go.util.command.ProcessOutputStreamConsumer;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import java.io.File;
import java.io.IOException;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
public class DirectoryCleanerTest {
private File baseFolder;
private DirectoryCleaner cleaner;
private InMemoryStreamConsumer consumer;
@Rule
public final TemporaryFolder temporaryFolder = new TemporaryFolder();
@Before
public void createBaseDirectory() throws IOException {
consumer = ProcessOutputStreamConsumer.inMemoryConsumer();
baseFolder = temporaryFolder.newFolder("directoryCleaner");
cleaner = new DirectoryCleaner(baseFolder, consumer);
}
@Test
public void shouldDoNothingIfDirectoryIsEmpty() {
cleaner.allowed("non-existent");
cleaner.clean();
assertThat(baseFolder.exists(), is(true));
}
@Test
public void shouldNotCleanSvnDestIfExternalIsEnabled() {
File svnDest = new File(baseFolder, "test1");
File shouldExist = new File(svnDest, "shouldExist");
shouldExist.mkdirs();
File svnExternal = new File(baseFolder, "test1/external");
svnExternal.mkdirs();
cleaner.allowed("test1", "test1/subdir");
cleaner.clean();
assertThat(svnDest.exists(), is(true));
assertThat(shouldExist.exists(), is(true));
}
@Test
public void shouldKeepMaterialFolderIfItContainsOtherMaterials() {
File material1 = mkdirDir(baseFolder, "material1");
File dirOfMaterial1 = mkdirDir(material1, "dirOfMaterial1");
File material2 = mkdirDir(material1, "material2");
File oldMaterial3 = mkdirDir(baseFolder, "oldMaterial3");
cleaner.allowed("material1", "material1/material2");
cleaner.clean();
assertThat(material1.exists(), is(true));
assertThat(dirOfMaterial1.exists(), is(true));
assertThat(material2.exists(), is(true));
assertThat(oldMaterial3.exists(), is(false));
}
private File mkdirDir(File root, String dir) {
File directory = new File(root, dir);
directory.mkdir();
return directory;
}
@Test
public void shouldRemoveExtraDirectoriesInRootFolder() {
File notAllowed = new File(baseFolder, "notAllowed");
notAllowed.mkdirs();
cleaner.allowed("allowed");
cleaner.clean();
assertThat(baseFolder.exists(), is(true));
assertThat(notAllowed.exists(), is(false));
}
@Test
public void shouldNotRemoveAllowedDirectoriesInRootFolder() {
File allowedFolder = new File(baseFolder, "allowed");
allowedFolder.mkdir();
cleaner.allowed("allowed");
cleaner.clean();
assertThat(baseFolder.exists(), is(true));
assertThat(allowedFolder.exists(), is(true));
}
@Test
public void shouldNotRemoveAllowedDirectoriesInSubfolder() {
File allowedFolder = new File(baseFolder, "subfolder/allowed");
allowedFolder.mkdirs();
cleaner.allowed("subfolder/allowed");
cleaner.clean();
assertThat(baseFolder.exists(), is(true));
assertThat(allowedFolder.getParentFile().exists(), is(true));
assertThat(allowedFolder.exists(), is(true));
}
@Test
public void shouldRemoveNotAllowedDirectoriesInSubfolder() {
File allowedFolder = new File(baseFolder, "subfolder/allowed");
allowedFolder.mkdirs();
File notAllowedFolder = new File(baseFolder, "subfolder/notAllowed");
notAllowedFolder.mkdirs();
cleaner.allowed("subfolder/allowed");
cleaner.clean();
assertThat(baseFolder.exists(), is(true));
assertThat(allowedFolder.getParentFile().exists(), is(true));
assertThat(notAllowedFolder.exists(), is(false));
}
@Test
public void shouldDoNothingIfSubdirectoryDoesNotExist() {
File allowedFolder = new File(baseFolder, "subfolder/allowed");
cleaner.allowed("subfolder/allowed");
cleaner.clean();
assertThat(baseFolder.exists(), is(true));
assertThat(allowedFolder.exists(), is(false));
}
@Test
public void shouldNotRemoveAnythingIfNoAllowedWasSet() {
File allowedFolder = new File(baseFolder, "subfolder/allowed");
allowedFolder.mkdirs();
cleaner.clean();
assertThat(baseFolder.exists(), is(true));
assertThat(allowedFolder.exists(), is(true));
}
@Test
public void shouldNotProcessFilesOutsideTheBaseFolder() {
try {
cleaner.allowed("/../..");
Assert.fail("Should not allow file outside the baseDirectory");
} catch (Exception e) {
assertThat(
e.getMessage(),
containsString("Folder " + new File(baseFolder, "/../..").getAbsolutePath() + " is outside the base folder"));
}
}
@Test
public void shouldReportDeletingFiles() throws IOException {
File allowedFolder = new File(baseFolder, "subfolder/allowed");
allowedFolder.mkdirs();
File notAllowedFolder = new File(baseFolder, "subfolder/notallowed");
notAllowedFolder.mkdirs();
cleaner.allowed("subfolder/allowed");
cleaner.clean();
assertThat(consumer.getStdOut(), containsString("Deleting folder " + notAllowedFolder.getPath()));
assertThat(consumer.getStdOut(), containsString("Keeping folder " + allowedFolder.getPath()));
}
}
| apache-2.0 |
TDesjardins/GWT-OL3-Playground | gwt-ol3-client/src/main/java/ol/source/ImageMapGuideParams.java | 2346 | /*******************************************************************************
* Copyright 2014, 2018 gwt-ol3
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package ol.source;
import jsinterop.annotations.JsPackage;
import jsinterop.annotations.JsProperty;
import jsinterop.annotations.JsType;
import ol.Params;
/**
* params for MapGuide-requests.
* @author tlochmann
*/
@JsType(isNative = true, namespace = JsPackage.GLOBAL, name = "Object")
public class ImageMapGuideParams implements Params {
/**
* @param format
* MapGuide image format (JPG/PNG/PNG8)
*
*/
@JsProperty(name = "FORMAT")
public native void setFormat(String format);
/**
* @param hideLayers List of layers to hide.
*/
@JsProperty(name = "HIDELAYERS")
public native void setHideLayers(String hideLayers);
/**
* @param mapDefinition
* MapGuide mapDefinition e.g. "Library://Samples/Sheboygan/Maps/Sheboygan.MapDefinition"
*/
@JsProperty(name = "MAPDEFINITION")
public native void setMapDefinition(String mapDefinition);
/**
*
* @param mapName Name of the map to display.
*/
@JsProperty(name = "MAPNAME")
public native void setMapName(String mapName);
@JsProperty(name = "PASSWORD")
public native void setPassword(String password);
/**
* @param showLayers List of layers to display.
*/
@JsProperty(name = "SHOWLAYERS")
public native void setShowLayers(String showLayers);
@JsProperty(name = "USERNAME")
public native void setUserName(String userName);
/**
* @param version
* MapGuide image version
*
*/
@JsProperty(name = "VERSION")
public native void setVersion(String version);
}
| apache-2.0 |
gradle/gradle | subprojects/files/src/main/java/org/gradle/internal/file/pattern/package-info.java | 705 | /*
* Copyright 2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@NonNullApi
package org.gradle.internal.file.pattern;
import org.gradle.api.NonNullApi;
| apache-2.0 |
andreagenso/java2scala | test/J2s/java/openjdk-6-src-b27/jdk/src/share/classes/javax/swing/plaf/synth/SynthComboBoxUI.java | 28031 | /*
* Copyright (c) 2002, 2006, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package javax.swing.plaf.synth;
import java.awt.*;
import java.awt.event.*;
import java.lang.reflect.*;
import javax.swing.*;
import javax.swing.plaf.*;
import javax.swing.event.*;
import javax.swing.plaf.basic.*;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeEvent;
import sun.swing.plaf.synth.SynthUI;
/**
* Synth's ComboBoxUI.
*
* @author Scott Violet
*/
class SynthComboBoxUI extends BasicComboBoxUI implements
PropertyChangeListener, SynthUI {
private SynthStyle style;
private boolean useListColors;
/**
* Used to adjust the location and size of the popup. Very useful for
* situations such as we find in Nimbus where part of the border is used
* to paint the focus. In such cases, the border is empty space, and not
* part of the "visual" border, and in these cases, you'd like the popup
* to be adjusted such that it looks as if it were next to the visual border.
* You may want to use negative insets to get the right look.
*/
Insets popupInsets;
/**
* This flag may be set via UIDefaults. By default, it is false, to
* preserve backwards compatibility. If true, then the combo will
* "act as a button" when it is not editable.
*/
private boolean buttonWhenNotEditable;
/**
* A flag to indicate that the combo box and combo box button should
* remain in the PRESSED state while the combo popup is visible.
*/
private boolean pressedWhenPopupVisible;
/**
* When buttonWhenNotEditable is true, this field is used to help make
* the combo box appear and function as a button when the combo box is
* not editable. In such a state, you can click anywhere on the button
* to get it to open the popup. Also, anywhere you hover over the combo
* will cause the entire combo to go into "rollover" state, and anywhere
* you press will go into "pressed" state. This also keeps in sync the
* state of the combo and the arrowButton.
*/
private ButtonHandler buttonHandler;
/**
* Handler for repainting combo when editor component gains/looses focus
*/
private EditorFocusHandler editorFocusHandler;
/**
* If true, then the cell renderer will be forced to be non-opaque when
* used for rendering the selected item in the combo box (not in the list),
* and forced to opaque after rendering the selected value.
*/
private boolean forceOpaque = false;
/**
* NOTE: This serves the same purpose as the same field in BasicComboBoxUI.
* It is here because I could not give the padding field in
* BasicComboBoxUI protected access in an update release.
*/
private Insets padding;
public static ComponentUI createUI(JComponent c) {
return new SynthComboBoxUI();
}
/**
* @inheritDoc
*
* Overridden to ensure that ButtonHandler is created prior to any of
* the other installXXX methods, since several of them reference
* buttonHandler.
*/
@Override
public void installUI(JComponent c) {
buttonHandler = new ButtonHandler();
super.installUI(c);
}
@Override
protected void installDefaults() {
//NOTE: This next line of code was added because, since squareButton in
//BasicComboBoxUI is private, I need to have some way of reading it from UIManager.
//This is an incomplete solution (since it implies that squareButons,
//once set, cannot be reset per state. Probably ok, but not always ok).
//This line of code should be removed at the same time that squareButton
//is made protected in the super class.
super.installDefaults();
//This is here instead of in updateStyle because the value for padding
//needs to remain consistent with the value for padding in
//BasicComboBoxUI. I wouldn't have this value here at all if not
//for the fact that I cannot make "padding" protected in any way
//for an update release. This *should* be fixed in Java 7
padding = UIManager.getInsets("ComboBox.padding");
updateStyle(comboBox);
}
private void updateStyle(JComboBox comboBox) {
SynthStyle oldStyle = style;
SynthContext context = getContext(comboBox, ENABLED);
style = SynthLookAndFeel.updateStyle(context, this);
if (style != oldStyle) {
popupInsets = (Insets)style.get(context, "ComboBox.popupInsets");
useListColors = style.getBoolean(context,
"ComboBox.rendererUseListColors", true);
buttonWhenNotEditable = style.getBoolean(context,
"ComboBox.buttonWhenNotEditable", false);
pressedWhenPopupVisible = style.getBoolean(context,
"ComboBox.pressedWhenPopupVisible", false);
if (oldStyle != null) {
uninstallKeyboardActions();
installKeyboardActions();
}
forceOpaque = style.getBoolean(context,
"ComboBox.forceOpaque", false);
}
context.dispose();
if(listBox != null) {
SynthLookAndFeel.updateStyles(listBox);
}
}
@Override
protected void installListeners() {
comboBox.addPropertyChangeListener(this);
comboBox.addMouseListener(buttonHandler);
editorFocusHandler = new EditorFocusHandler(comboBox);
super.installListeners();
}
@Override
public void uninstallUI(JComponent c) {
if (popup instanceof SynthComboPopup) {
((SynthComboPopup)popup).removePopupMenuListener(buttonHandler);
}
super.uninstallUI(c);
buttonHandler = null;
}
@Override
protected void uninstallDefaults() {
SynthContext context = getContext(comboBox, ENABLED);
style.uninstallDefaults(context);
context.dispose();
style = null;
}
@Override
protected void uninstallListeners() {
editorFocusHandler.unregister();
comboBox.removePropertyChangeListener(this);
comboBox.removeMouseListener(buttonHandler);
buttonHandler.pressed = false;
buttonHandler.over = false;
super.uninstallListeners();
}
@Override
public SynthContext getContext(JComponent c) {
return getContext(c, getComponentState(c));
}
private SynthContext getContext(JComponent c, int state) {
return SynthContext.getContext(SynthContext.class, c,
SynthLookAndFeel.getRegion(c), style, state);
}
private Region getRegion(JComponent c) {
return SynthLookAndFeel.getRegion(c);
}
private int getComponentState(JComponent c) {
// currently we have a broken situation where if a developer
// takes the border from a JComboBox and sets it on a JTextField
// then the codepath will eventually lead back to this method
// but pass in a JTextField instead of JComboBox! In case this
// happens, we just return the normal synth state for the component
// instead of doing anything special
if (!(c instanceof JComboBox)) return SynthLookAndFeel.getComponentState(c);
JComboBox box = (JComboBox)c;
if (shouldActLikeButton()) {
int state = ENABLED;
if ((!c.isEnabled())) {
state = DISABLED;
}
if (buttonHandler.isPressed()) {
state |= PRESSED;
}
if (buttonHandler.isRollover()) {
state |= MOUSE_OVER;
}
if (box.isFocusOwner()) {
state |= FOCUSED;
}
return state;
} else {
// for editable combos the editor component has the focus not the
// combo box its self, so we should make the combo paint focused
// when its editor has focus
int basicState = SynthLookAndFeel.getComponentState(c);
if (box.isEditable() &&
box.getEditor().getEditorComponent().isFocusOwner()) {
basicState |= FOCUSED;
}
return basicState;
}
}
@Override
protected ComboPopup createPopup() {
SynthComboPopup p = new SynthComboPopup(comboBox);
p.addPopupMenuListener(buttonHandler);
return p;
}
@Override
protected ListCellRenderer createRenderer() {
return new SynthComboBoxRenderer();
}
@Override
protected ComboBoxEditor createEditor() {
return new SynthComboBoxEditor();
}
//
// end UI Initialization
//======================
@Override
public void propertyChange(PropertyChangeEvent e) {
if (SynthLookAndFeel.shouldUpdateStyle(e)) {
updateStyle(comboBox);
}
}
@Override
protected JButton createArrowButton() {
SynthArrowButton button = new SynthArrowButton(SwingConstants.SOUTH);
button.setName("ComboBox.arrowButton");
button.setModel(buttonHandler);
return button;
}
//=================================
// begin ComponentUI Implementation
@Override
public void update(Graphics g, JComponent c) {
SynthContext context = getContext(c);
SynthLookAndFeel.update(context, g);
context.getPainter().paintComboBoxBackground(context, g, 0, 0,
c.getWidth(), c.getHeight());
paint(context, g);
context.dispose();
}
@Override
public void paint(Graphics g, JComponent c) {
SynthContext context = getContext(c);
paint(context, g);
context.dispose();
}
protected void paint(SynthContext context, Graphics g) {
hasFocus = comboBox.hasFocus();
if ( !comboBox.isEditable() ) {
Rectangle r = rectangleForCurrentValue();
paintCurrentValue(g,r,hasFocus);
}
}
@Override
public void paintBorder(SynthContext context, Graphics g, int x,
int y, int w, int h) {
context.getPainter().paintComboBoxBorder(context, g, x, y, w, h);
}
/**
* Paints the currently selected item.
*/
@Override
public void paintCurrentValue(Graphics g,Rectangle bounds,boolean hasFocus) {
ListCellRenderer renderer = comboBox.getRenderer();
Component c;
c = renderer.getListCellRendererComponent(
listBox, comboBox.getSelectedItem(), -1, false, false );
// Fix for 4238829: should lay out the JPanel.
boolean shouldValidate = false;
if (c instanceof JPanel) {
shouldValidate = true;
}
if (c instanceof UIResource) {
c.setName("ComboBox.renderer");
}
boolean force = forceOpaque && c instanceof JComponent;
if (force) {
((JComponent)c).setOpaque(false);
}
int x = bounds.x, y = bounds.y, w = bounds.width, h = bounds.height;
if (padding != null) {
x = bounds.x + padding.left;
y = bounds.y + padding.top;
w = bounds.width - (padding.left + padding.right);
h = bounds.height - (padding.top + padding.bottom);
}
currentValuePane.paintComponent(g, c, comboBox, x, y, w, h, shouldValidate);
if (force) {
((JComponent)c).setOpaque(true);
}
}
/**
* @return true if this combo box should act as one big button. Typically
* only happens when buttonWhenNotEditable is true, and comboBox.isEditable
* is false.
*/
private boolean shouldActLikeButton() {
return buttonWhenNotEditable && !comboBox.isEditable();
}
/**
* Return the default size of an empty display area of the combo box using
* the current renderer and font.
*
* This method was overridden to use SynthComboBoxRenderer instead of
* DefaultListCellRenderer as the default renderer when calculating the
* size of the combo box. This is used in the case of the combo not having
* any data.
*
* @return the size of an empty display area
* @see #getDisplaySize
*/
@Override
protected Dimension getDefaultSize() {
SynthComboBoxRenderer r = new SynthComboBoxRenderer();
Dimension d = getSizeForComponent(r.getListCellRendererComponent(listBox, " ", -1, false, false));
return new Dimension(d.width, d.height);
}
/**
* This has been refactored out in hopes that it may be investigated and
* simplified for the next major release. adding/removing
* the component to the currentValuePane and changing the font may be
* redundant operations.
*
* NOTE: This method was copied in its entirety from BasicComboBoxUI. Might
* want to make it protected in BasicComboBoxUI in Java 7
*/
private Dimension getSizeForComponent(Component comp) {
currentValuePane.add(comp);
comp.setFont(comboBox.getFont());
Dimension d = comp.getPreferredSize();
currentValuePane.remove(comp);
return d;
}
/**
* From BasicComboBoxRenderer v 1.18.
*
* Be aware that SynthFileChooserUIImpl relies on the fact that the default
* renderer installed on a Synth combo box is a JLabel. If this is changed,
* then an assert will fail in SynthFileChooserUIImpl
*/
private class SynthComboBoxRenderer extends JLabel implements ListCellRenderer, UIResource {
public SynthComboBoxRenderer() {
super();
setName("ComboBox.renderer");
setText(" ");
}
@Override
public Component getListCellRendererComponent(JList list, Object value,
int index, boolean isSelected, boolean cellHasFocus) {
setName("ComboBox.listRenderer");
SynthLookAndFeel.resetSelectedUI();
if (isSelected) {
setBackground(list.getSelectionBackground());
setForeground(list.getSelectionForeground());
if (!useListColors) {
SynthLookAndFeel.setSelectedUI(
(SynthLabelUI)SynthLookAndFeel.getUIOfType(getUI(),
SynthLabelUI.class), isSelected, cellHasFocus,
list.isEnabled(), false);
}
} else {
setBackground(list.getBackground());
setForeground(list.getForeground());
}
setFont(list.getFont());
if (value instanceof Icon) {
setIcon((Icon)value);
setText("");
} else {
String text = (value == null) ? " " : value.toString();
if ("".equals(text)) {
text = " ";
}
setText(text);
}
// The renderer component should inherit the enabled and
// orientation state of its parent combobox. This is
// especially needed for GTK comboboxes, where the
// ListCellRenderer's state determines the visual state
// of the combobox.
if (comboBox != null){
setEnabled(comboBox.isEnabled());
setComponentOrientation(comboBox.getComponentOrientation());
}
return this;
}
@Override
public void paint(Graphics g) {
super.paint(g);
SynthLookAndFeel.resetSelectedUI();
}
}
/**
* From BasicCombBoxEditor v 1.24.
*/
private static class SynthComboBoxEditor implements
ComboBoxEditor, UIResource {
protected JTextField editor;
private Object oldValue;
public SynthComboBoxEditor() {
editor = new JTextField("",9);
editor.setName("ComboBox.textField");
}
@Override
public Component getEditorComponent() {
return editor;
}
/**
* Sets the item that should be edited.
*
* @param anObject the displayed value of the editor
*/
@Override
public void setItem(Object anObject) {
String text;
if ( anObject != null ) {
text = anObject.toString();
oldValue = anObject;
} else {
text = "";
}
// workaround for 4530952
if (!text.equals(editor.getText())) {
editor.setText(text);
}
}
@Override
public Object getItem() {
Object newValue = editor.getText();
if (oldValue != null && !(oldValue instanceof String)) {
// The original value is not a string. Should return the value in it's
// original type.
if (newValue.equals(oldValue.toString())) {
return oldValue;
} else {
// Must take the value from the editor and get the value and cast it to the new type.
Class cls = oldValue.getClass();
try {
Method method = cls.getMethod("valueOf", new Class[]{String.class});
newValue = method.invoke(oldValue, new Object[] { editor.getText()});
} catch (Exception ex) {
// Fail silently and return the newValue (a String object)
}
}
}
return newValue;
}
@Override
public void selectAll() {
editor.selectAll();
editor.requestFocus();
}
@Override
public void addActionListener(ActionListener l) {
editor.addActionListener(l);
}
@Override
public void removeActionListener(ActionListener l) {
editor.removeActionListener(l);
}
}
/**
* Handles all the logic for treating the combo as a button when it is
* not editable, and when shouldActLikeButton() is true. This class is a
* special ButtonModel, and installed on the arrowButton when appropriate.
* It also is installed as a mouse listener and mouse motion listener on
* the combo box. In this way, the state between the button and combo
* are in sync. Whenever one is "over" both are. Whenever one is pressed,
* both are.
*/
private final class ButtonHandler extends DefaultButtonModel
implements MouseListener, PopupMenuListener {
/**
* Indicates that the mouse is over the combo or the arrow button.
* This field only has meaning if buttonWhenNotEnabled is true.
*/
private boolean over;
/**
* Indicates that the combo or arrow button has been pressed. This
* field only has meaning if buttonWhenNotEnabled is true.
*/
private boolean pressed;
//------------------------------------------------------------------
// State Methods
//------------------------------------------------------------------
/**
* <p>Updates the internal "pressed" state. If shouldActLikeButton()
* is true, and if this method call will change the internal state,
* then the combo and button will be repainted.</p>
*
* <p>Note that this method is called either when a press event
* occurs on the combo box, or on the arrow button.</p>
*/
private void updatePressed(boolean p) {
this.pressed = p && isEnabled();
if (shouldActLikeButton()) {
comboBox.repaint();
}
}
/**
* <p>Updates the internal "over" state. If shouldActLikeButton()
* is true, and if this method call will change the internal state,
* then the combo and button will be repainted.</p>
*
* <p>Note that this method is called either when a mouseover/mouseoff event
* occurs on the combo box, or on the arrow button.</p>
*/
private void updateOver(boolean o) {
boolean old = isRollover();
this.over = o && isEnabled();
boolean newo = isRollover();
if (shouldActLikeButton() && old != newo) {
comboBox.repaint();
}
}
//------------------------------------------------------------------
// DefaultButtonModel Methods
//------------------------------------------------------------------
/**
* {@inheritDoc}
*
* Ensures that isPressed() will return true if the combo is pressed,
* or the arrowButton is pressed, <em>or</em> if the combo popup is
* visible. This is the case because a combo box looks pressed when
* the popup is visible, and so should the arrow button.
*/
@Override
public boolean isPressed() {
boolean b = shouldActLikeButton() ? pressed : super.isPressed();
return b || (pressedWhenPopupVisible && comboBox.isPopupVisible());
}
/**
* {@inheritDoc}
*
* Ensures that the armed state is in sync with the pressed state
* if shouldActLikeButton is true. Without this method, the arrow
* button will not look pressed when the popup is open, regardless
* of the result of isPressed() alone.
*/
@Override
public boolean isArmed() {
boolean b = shouldActLikeButton() ||
(pressedWhenPopupVisible && comboBox.isPopupVisible());
return b ? isPressed() : super.isArmed();
}
/**
* {@inheritDoc}
*
* Ensures that isRollover() will return true if the combo is
* rolled over, or the arrowButton is rolled over.
*/
@Override
public boolean isRollover() {
return shouldActLikeButton() ? over : super.isRollover();
}
/**
* {@inheritDoc}
*
* Forwards pressed states to the internal "pressed" field
*/
@Override
public void setPressed(boolean b) {
super.setPressed(b);
updatePressed(b);
}
/**
* {@inheritDoc}
*
* Forwards rollover states to the internal "over" field
*/
@Override
public void setRollover(boolean b) {
super.setRollover(b);
updateOver(b);
}
//------------------------------------------------------------------
// MouseListener/MouseMotionListener Methods
//------------------------------------------------------------------
@Override
public void mouseEntered(MouseEvent mouseEvent) {
updateOver(true);
}
@Override
public void mouseExited(MouseEvent mouseEvent) {
updateOver(false);
}
@Override
public void mousePressed(MouseEvent mouseEvent) {
updatePressed(true);
}
@Override
public void mouseReleased(MouseEvent mouseEvent) {
updatePressed(false);
}
@Override
public void mouseClicked(MouseEvent e) {}
//------------------------------------------------------------------
// PopupMenuListener Methods
//------------------------------------------------------------------
/**
* @inheritDoc
*
* Ensures that the combo box is repainted when the popup is closed.
* This avoids a bug where clicking off the combo wasn't causing a repaint,
* and thus the combo box still looked pressed even when it was not.
*
* This bug was only noticed when acting as a button, but may be generally
* present. If so, remove the if() block
*/
@Override
public void popupMenuCanceled(PopupMenuEvent e) {
if (shouldActLikeButton() || pressedWhenPopupVisible) {
comboBox.repaint();
}
}
@Override
public void popupMenuWillBecomeVisible(PopupMenuEvent e) {}
@Override
public void popupMenuWillBecomeInvisible(PopupMenuEvent e) {}
}
/**
* Handler for repainting combo when editor component gains/looses focus
*/
private static class EditorFocusHandler implements FocusListener,
PropertyChangeListener {
private JComboBox comboBox;
private ComboBoxEditor editor = null;
private Component editorComponent = null;
private EditorFocusHandler(JComboBox comboBox) {
this.comboBox = comboBox;
editor = comboBox.getEditor();
if (editor != null){
editorComponent = editor.getEditorComponent();
if (editorComponent != null){
editorComponent.addFocusListener(this);
}
}
comboBox.addPropertyChangeListener("editor",this);
}
public void unregister(){
comboBox.removePropertyChangeListener(this);
if (editorComponent!=null){
editorComponent.removeFocusListener(this);
}
}
/** Invoked when a component gains the keyboard focus. */
public void focusGained(FocusEvent e) {
// repaint whole combo on focus gain
comboBox.repaint();
}
/** Invoked when a component loses the keyboard focus. */
public void focusLost(FocusEvent e) {
// repaint whole combo on focus loss
comboBox.repaint();
}
/**
* Called when the combos editor changes
*
* @param evt A PropertyChangeEvent object describing the event source and
* the property that has changed.
*/
public void propertyChange(PropertyChangeEvent evt) {
ComboBoxEditor newEditor = comboBox.getEditor();
if (editor != newEditor){
if (editorComponent!=null){
editorComponent.removeFocusListener(this);
}
editor = newEditor;
if (editor != null){
editorComponent = editor.getEditorComponent();
if (editorComponent != null){
editorComponent.addFocusListener(this);
}
}
}
}
}
}
| apache-2.0 |
SnappyDataInc/snappy-store | tests/sql/src/main/java/sql/ecotests/tpce/implementation/app/server/persistence/src/main/java/com/pivotal/gemfirexd/app/tpce/jpa/entity/Address.java | 2891 | /*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.app.tpce.jpa.entity;
import java.io.Serializable;
import javax.persistence.*;
import java.util.Set;
/**
* The persistent class for the ADDRESS database table.
*
*/
@Entity
@Table(name="ADDRESS")
public class Address implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@Column(name="AD_ID", unique=true, nullable=false)
private long adId;
@Column(name="AD_CTRY", length=80)
private String adCtry;
@Column(name="AD_LINE1", length=80)
private String adLine1;
@Column(name="AD_LINE2", length=80)
private String adLine2;
//bi-directional many-to-one association to ZipCode
@ManyToOne(fetch=FetchType.LAZY)
@JoinColumn(name="AD_ZC_CODE", nullable=false)
private ZipCode zipCode;
//bi-directional many-to-one association to Company
@OneToMany(mappedBy="address")
private Set<Company> companies;
//bi-directional many-to-one association to Customer
@OneToMany(mappedBy="address")
private Set<Customer> customers;
//bi-directional many-to-one association to Exchange
@OneToMany(mappedBy="address")
private Set<Exchange> exchanges;
public Address() {
}
public long getAdId() {
return this.adId;
}
public void setAdId(long adId) {
this.adId = adId;
}
public String getAdCtry() {
return this.adCtry;
}
public void setAdCtry(String adCtry) {
this.adCtry = adCtry;
}
public String getAdLine1() {
return this.adLine1;
}
public void setAdLine1(String adLine1) {
this.adLine1 = adLine1;
}
public String getAdLine2() {
return this.adLine2;
}
public void setAdLine2(String adLine2) {
this.adLine2 = adLine2;
}
public ZipCode getZipCode() {
return this.zipCode;
}
public void setZipCode(ZipCode zipCode) {
this.zipCode = zipCode;
}
public Set<Company> getCompanies() {
return this.companies;
}
public void setCompanies(Set<Company> companies) {
this.companies = companies;
}
public Set<Customer> getCustomers() {
return this.customers;
}
public void setCustomers(Set<Customer> customers) {
this.customers = customers;
}
public Set<Exchange> getExchanges() {
return this.exchanges;
}
public void setExchanges(Set<Exchange> exchanges) {
this.exchanges = exchanges;
}
} | apache-2.0 |
MaxRau/CoffeeMud | com/planet_ink/coffee_mud/Abilities/Prayers/Prayer_DreamFeast.java | 4127 | package com.planet_ink.coffee_mud.Abilities.Prayers;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2004-2015 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
@SuppressWarnings("rawtypes")
public class Prayer_DreamFeast extends Prayer
{
@Override public String ID() { return "Prayer_DreamFeast"; }
private final static String localizedName = CMLib.lang().L("Dream Feast");
@Override public String name() { return localizedName; }
private final static String localizedDisplayText = CMLib.lang().L("(Dream Feast)");
@Override public String displayText() { return localizedDisplayText; }
@Override public int classificationCode(){return Ability.ACODE_PRAYER|Ability.DOMAIN_RESTORATION;}
@Override public int abstractQuality(){ return Ability.QUALITY_OK_OTHERS;}
@Override public long flags(){return Ability.FLAG_HOLY;}
@Override protected int canAffectCode(){return Ability.CAN_MOBS;}
@Override protected int canTargetCode(){return Ability.CAN_MOBS;}
protected int ticksSleeping=0;
@Override
public void setMiscText(String newMiscText)
{
super.setMiscText(newMiscText);
ticksSleeping=0;
}
@Override
public boolean tick(Tickable ticking, int tickID)
{
if(!super.tick(ticking, tickID))
return false;
if(CMLib.flags().isSleeping(affected))
{
ticksSleeping++;
if(ticksSleeping > 8)
{
if(affected instanceof MOB)
((MOB)affected).tell(L("You have wonderful dreams of an abundant feasts and overflowing wines."));
}
}
else
if(ticksSleeping > 8)
{
if(affected instanceof MOB)
{
((MOB)affected).tell(L("You wake up feeling full and content."));
((MOB)affected).curState().setHunger(CharState.DEFAULT_HUNGER_FULL);
((MOB)affected).curState().setThirst(CharState.DEFAULT_THIRST_FULL);
}
unInvoke();
}
return true;
}
@Override
public boolean invoke(MOB mob, Vector commands, Physical givenTarget, boolean auto, int asLevel)
{
final MOB target=this.getTarget(mob,commands,givenTarget);
if(target==null)
return false;
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
final boolean success=proficiencyCheck(mob,0,auto);
if(success)
{
CMMsg msg=CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto),L("^S<S-NAME> @x1 for <T-NAMESELF> to have dreams of feasts!^?",prayWord(mob)));
if(mob.location().okMessage(mob,msg))
{
mob.location().send(mob,msg);
Ability A=beneficialAffect(mob,target,asLevel,0);
if(A!=null)
A.setMiscText("");
}
}
else
beneficialWordsFizzle(mob,target,auto?"":L("<S-NAME> @x1 for <T-NAMESELF> to have good dreams, but nothing happens.",prayWord(mob)));
// return whether it worked
return success;
}
}
| apache-2.0 |
ty1er/incubator-asterixdb | asterixdb/asterix-runtime/src/main/java/org/apache/asterix/runtime/evaluators/functions/NumericTruncDescriptor.java | 10140 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Numeric function Round half to even
* Author : Xiaoyu Ma@UC Irvine
* 01/30/2012
*/
package org.apache.asterix.runtime.evaluators.functions;
import java.io.DataOutput;
import java.math.BigDecimal;
import org.apache.asterix.dataflow.data.nontagged.serde.ADoubleSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AFloatSerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AInt16SerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AInt32SerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AInt64SerializerDeserializer;
import org.apache.asterix.dataflow.data.nontagged.serde.AInt8SerializerDeserializer;
import org.apache.asterix.formats.nontagged.SerializerDeserializerProvider;
import org.apache.asterix.om.base.AMutableDouble;
import org.apache.asterix.om.base.AMutableFloat;
import org.apache.asterix.om.base.AMutableInt16;
import org.apache.asterix.om.base.AMutableInt32;
import org.apache.asterix.om.base.AMutableInt64;
import org.apache.asterix.om.base.AMutableInt8;
import org.apache.asterix.runtime.exceptions.TypeMismatchException;
import org.apache.asterix.om.functions.BuiltinFunctions;
import org.apache.asterix.om.functions.IFunctionDescriptor;
import org.apache.asterix.om.functions.IFunctionDescriptorFactory;
import org.apache.asterix.om.types.ATypeTag;
import org.apache.asterix.om.types.BuiltinType;
import org.apache.asterix.runtime.evaluators.base.AbstractScalarFunctionDynamicDescriptor;
import org.apache.hyracks.algebricks.core.algebra.functions.FunctionIdentifier;
import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluator;
import org.apache.hyracks.algebricks.runtime.base.IScalarEvaluatorFactory;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.data.std.api.IPointable;
import org.apache.hyracks.data.std.primitive.VoidPointable;
import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
import org.apache.hyracks.dataflow.common.data.accessors.IFrameTupleReference;
public class NumericTruncDescriptor extends AbstractScalarFunctionDynamicDescriptor {
private static final long serialVersionUID = 1L;
public static final IFunctionDescriptorFactory FACTORY = new IFunctionDescriptorFactory() {
@Override
public IFunctionDescriptor createFunctionDescriptor() {
return new NumericTruncDescriptor();
}
};
@Override
public FunctionIdentifier getIdentifier() {
return BuiltinFunctions.NUMERIC_TRUNC;
}
@Override
public IScalarEvaluatorFactory createEvaluatorFactory(IScalarEvaluatorFactory[] args) {
return new IScalarEvaluatorFactory() {
private static final long serialVersionUID = 1L;
@Override
public IScalarEvaluator createScalarEvaluator(IHyracksTaskContext ctx) throws HyracksDataException {
return new NumericTruncEvaluator(args, ctx);
}
};
}
class NumericTruncEvaluator implements IScalarEvaluator {
private final ArrayBackedValueStorage resultStorage = new ArrayBackedValueStorage();
private final DataOutput out = resultStorage.getDataOutput();
private final IPointable argValue = new VoidPointable();
private final IPointable argPrecision = new VoidPointable();
private final IScalarEvaluator eval;
private final IScalarEvaluator precision;
private final AMutableDouble aDouble = new AMutableDouble(0);
private final AMutableFloat aFloat = new AMutableFloat(0);
private final AMutableInt64 aInt64 = new AMutableInt64(0);
private final AMutableInt32 aInt32 = new AMutableInt32(0);
private final AMutableInt16 aInt16 = new AMutableInt16((short) 0);
private final AMutableInt8 aInt8 = new AMutableInt8((byte) 0);
@SuppressWarnings("rawtypes")
private ISerializerDeserializer serde;
NumericTruncEvaluator(IScalarEvaluatorFactory[] args, IHyracksTaskContext ctx) throws HyracksDataException {
eval = args[0].createScalarEvaluator(ctx);
precision = args[1].createScalarEvaluator(ctx);
}
private int getPrecision() throws HyracksDataException {
byte[] bytes = argPrecision.getByteArray();
int offset = argPrecision.getStartOffset();
if (bytes[offset] == ATypeTag.SERIALIZED_INT8_TYPE_TAG) {
return AInt8SerializerDeserializer.getByte(bytes, offset + 1);
} else if (bytes[offset] == ATypeTag.SERIALIZED_INT16_TYPE_TAG) {
return AInt16SerializerDeserializer.getShort(bytes, offset + 1);
} else if (bytes[offset] == ATypeTag.SERIALIZED_INT32_TYPE_TAG) {
return AInt32SerializerDeserializer.getInt(bytes, offset + 1);
} else if (bytes[offset] == ATypeTag.SERIALIZED_INT64_TYPE_TAG) {
return (int) AInt64SerializerDeserializer.getLong(bytes, offset + 1);
} else {
throw new TypeMismatchException(getIdentifier(), 1, bytes[offset],
ATypeTag.SERIALIZED_INT8_TYPE_TAG, ATypeTag.SERIALIZED_INT16_TYPE_TAG,
ATypeTag.SERIALIZED_INT32_TYPE_TAG, ATypeTag.SERIALIZED_INT64_TYPE_TAG);
}
}
@SuppressWarnings("unchecked")
@Override
public void evaluate(IFrameTupleReference tuple, IPointable result) throws HyracksDataException {
resultStorage.reset();
eval.evaluate(tuple, argValue);
precision.evaluate(tuple, argPrecision);
byte[] data = argValue.getByteArray();
int offset = argValue.getStartOffset();
if (data[offset] == ATypeTag.SERIALIZED_INT8_TYPE_TAG) {
serde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT8);
byte val = AInt8SerializerDeserializer.getByte(data, offset + 1);
aInt8.setValue(val);
serde.serialize(aInt8, out);
} else if (data[offset] == ATypeTag.SERIALIZED_INT16_TYPE_TAG) {
serde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT16);
short val = AInt16SerializerDeserializer.getShort(data, offset + 1);
aInt16.setValue(val);
serde.serialize(aInt16, out);
} else if (data[offset] == ATypeTag.SERIALIZED_INT32_TYPE_TAG) {
serde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT32);
int val = AInt32SerializerDeserializer.getInt(data, offset + 1);
aInt32.setValue(val);
serde.serialize(aInt32, out);
} else if (data[offset] == ATypeTag.SERIALIZED_INT64_TYPE_TAG) {
serde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AINT64);
long val = AInt64SerializerDeserializer.getLong(data, offset + 1);
aInt64.setValue(val);
serde.serialize(aInt64, out);
} else if (data[offset] == ATypeTag.SERIALIZED_FLOAT_TYPE_TAG) {
serde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.AFLOAT);
float val = AFloatSerializerDeserializer.getFloat(data, offset + 1);
if (Float.isNaN(val) || Float.isInfinite(val) || Float.compare(val, -0.0F) == 0
|| Float.compare(val, 0.0F) == 0) {
aFloat.setValue(val);
serde.serialize(aFloat, out);
} else {
BigDecimal r = new BigDecimal(Float.toString(val));
aFloat.setValue(r.setScale(getPrecision(), BigDecimal.ROUND_DOWN).floatValue());
serde.serialize(aFloat, out);
}
} else if (data[offset] == ATypeTag.SERIALIZED_DOUBLE_TYPE_TAG) {
serde = SerializerDeserializerProvider.INSTANCE.getSerializerDeserializer(BuiltinType.ADOUBLE);
double val = ADoubleSerializerDeserializer.getDouble(data, offset + 1);
if (Double.isNaN(val) || Double.isInfinite(val) || Double.compare(val, -0.0D) == 0
|| Double.compare(val, 0.0D) == 0) {
aDouble.setValue(val);
serde.serialize(aDouble, out);
} else {
BigDecimal r = new BigDecimal(Double.toString(val));
aDouble.setValue(r.setScale(getPrecision(), BigDecimal.ROUND_DOWN).doubleValue());
serde.serialize(aDouble, out);
}
} else {
throw new TypeMismatchException(getIdentifier(), 0, data[offset],
ATypeTag.SERIALIZED_INT8_TYPE_TAG, ATypeTag.SERIALIZED_INT16_TYPE_TAG,
ATypeTag.SERIALIZED_INT32_TYPE_TAG, ATypeTag.SERIALIZED_INT64_TYPE_TAG,
ATypeTag.SERIALIZED_FLOAT_TYPE_TAG, ATypeTag.SERIALIZED_DOUBLE_TYPE_TAG);
}
result.set(resultStorage);
}
}
}
| apache-2.0 |
dabaitu/presto | presto-main/src/main/java/com/facebook/presto/sql/analyzer/Analysis.java | 17471 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.analyzer;
import com.facebook.presto.metadata.QualifiedObjectName;
import com.facebook.presto.metadata.Signature;
import com.facebook.presto.metadata.TableHandle;
import com.facebook.presto.spi.ColumnHandle;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.sql.tree.DefaultTraversalVisitor;
import com.facebook.presto.sql.tree.ExistsPredicate;
import com.facebook.presto.sql.tree.Expression;
import com.facebook.presto.sql.tree.FunctionCall;
import com.facebook.presto.sql.tree.InPredicate;
import com.facebook.presto.sql.tree.Join;
import com.facebook.presto.sql.tree.LambdaArgumentDeclaration;
import com.facebook.presto.sql.tree.Node;
import com.facebook.presto.sql.tree.QualifiedNameReference;
import com.facebook.presto.sql.tree.QuantifiedComparisonExpression;
import com.facebook.presto.sql.tree.Query;
import com.facebook.presto.sql.tree.QuerySpecification;
import com.facebook.presto.sql.tree.Relation;
import com.facebook.presto.sql.tree.SampledRelation;
import com.facebook.presto.sql.tree.Statement;
import com.facebook.presto.sql.tree.SubqueryExpression;
import com.facebook.presto.sql.tree.Table;
import com.google.common.base.Preconditions;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ListMultimap;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.collect.Sets.newIdentityHashSet;
import static java.util.Objects.requireNonNull;
public class Analysis
{
private final Statement root;
private final List<Expression> parameters;
private String updateType;
private final IdentityHashMap<Table, Query> namedQueries = new IdentityHashMap<>();
private final IdentityHashMap<Node, Scope> scopes = new IdentityHashMap<>();
private final Set<Expression> columnReferences = newIdentityHashSet();
private final IdentityHashMap<QuerySpecification, List<FunctionCall>> aggregates = new IdentityHashMap<>();
private final IdentityHashMap<QuerySpecification, List<List<Expression>>> groupByExpressions = new IdentityHashMap<>();
private final IdentityHashMap<Node, Expression> where = new IdentityHashMap<>();
private final IdentityHashMap<QuerySpecification, Expression> having = new IdentityHashMap<>();
private final IdentityHashMap<Node, List<Expression>> orderByExpressions = new IdentityHashMap<>();
private final IdentityHashMap<Node, List<Expression>> outputExpressions = new IdentityHashMap<>();
private final IdentityHashMap<QuerySpecification, List<FunctionCall>> windowFunctions = new IdentityHashMap<>();
private final IdentityHashMap<Join, Expression> joins = new IdentityHashMap<>();
private final ListMultimap<Node, InPredicate> inPredicatesSubqueries = ArrayListMultimap.create();
private final ListMultimap<Node, SubqueryExpression> scalarSubqueries = ArrayListMultimap.create();
private final ListMultimap<Node, ExistsPredicate> existsSubqueries = ArrayListMultimap.create();
private final ListMultimap<Node, QuantifiedComparisonExpression> quantifiedComparisonSubqueries = ArrayListMultimap.create();
private final IdentityHashMap<Table, TableHandle> tables = new IdentityHashMap<>();
private final IdentityHashMap<Expression, Type> types = new IdentityHashMap<>();
private final IdentityHashMap<Expression, Type> coercions = new IdentityHashMap<>();
private final Set<Expression> typeOnlyCoercions = newIdentityHashSet();
private final IdentityHashMap<Relation, Type[]> relationCoercions = new IdentityHashMap<>();
private final IdentityHashMap<FunctionCall, Signature> functionSignature = new IdentityHashMap<>();
private final IdentityHashMap<QualifiedNameReference, LambdaArgumentDeclaration> lambdaArgumentReferences = new IdentityHashMap<>();
private final IdentityHashMap<Field, ColumnHandle> columns = new IdentityHashMap<>();
private final IdentityHashMap<SampledRelation, Double> sampleRatios = new IdentityHashMap<>();
// for create table
private Optional<QualifiedObjectName> createTableDestination = Optional.empty();
private Map<String, Expression> createTableProperties = ImmutableMap.of();
private boolean createTableAsSelectWithData = true;
private boolean createTableAsSelectNoOp = false;
private Optional<Insert> insert = Optional.empty();
// for describe input and describe output
private final boolean isDescribe;
public Analysis(Statement root, List<Expression> parameters, boolean isDescribe)
{
requireNonNull(parameters);
this.root = root;
this.parameters = parameters;
this.isDescribe = isDescribe;
}
public Statement getStatement()
{
return root;
}
public String getUpdateType()
{
return updateType;
}
public void setUpdateType(String updateType)
{
this.updateType = updateType;
}
public boolean isCreateTableAsSelectWithData()
{
return createTableAsSelectWithData;
}
public void setCreateTableAsSelectWithData(boolean createTableAsSelectWithData)
{
this.createTableAsSelectWithData = createTableAsSelectWithData;
}
public boolean isCreateTableAsSelectNoOp()
{
return createTableAsSelectNoOp;
}
public void setCreateTableAsSelectNoOp(boolean createTableAsSelectNoOp)
{
this.createTableAsSelectNoOp = createTableAsSelectNoOp;
}
public void setAggregates(QuerySpecification node, List<FunctionCall> aggregates)
{
this.aggregates.put(node, aggregates);
}
public List<FunctionCall> getAggregates(QuerySpecification query)
{
return aggregates.get(query);
}
public IdentityHashMap<Expression, Type> getTypes()
{
return new IdentityHashMap<>(types);
}
public Type getType(Expression expression)
{
checkArgument(types.containsKey(expression), "Expression not analyzed: %s", expression);
return types.get(expression);
}
public Type getTypeWithCoercions(Expression expression)
{
checkArgument(types.containsKey(expression), "Expression not analyzed: %s", expression);
if (coercions.containsKey(expression)) {
return coercions.get(expression);
}
return types.get(expression);
}
public Type[] getRelationCoercion(Relation relation)
{
return relationCoercions.get(relation);
}
public void addRelationCoercion(Relation relation, Type[] types)
{
relationCoercions.put(relation, types);
}
public IdentityHashMap<Expression, Type> getCoercions()
{
return coercions;
}
public Type getCoercion(Expression expression)
{
return coercions.get(expression);
}
public void addLambdaArgumentReferences(IdentityHashMap<QualifiedNameReference, LambdaArgumentDeclaration> lambdaArgumentReferences)
{
this.lambdaArgumentReferences.putAll(lambdaArgumentReferences);
}
public LambdaArgumentDeclaration getLambdaArgumentReference(QualifiedNameReference qualifiedNameReference)
{
return lambdaArgumentReferences.get(qualifiedNameReference);
}
public Map<QualifiedNameReference, LambdaArgumentDeclaration> getLambdaArgumentReferences()
{
return lambdaArgumentReferences;
}
public void setGroupingSets(QuerySpecification node, List<List<Expression>> expressions)
{
groupByExpressions.put(node, expressions);
}
public boolean isTypeOnlyCoercion(Expression expression)
{
return typeOnlyCoercions.contains(expression);
}
public List<List<Expression>> getGroupingSets(QuerySpecification node)
{
return groupByExpressions.get(node);
}
public void setWhere(Node node, Expression expression)
{
where.put(node, expression);
}
public Expression getWhere(QuerySpecification node)
{
return where.get(node);
}
public void setOrderByExpressions(Node node, List<Expression> items)
{
orderByExpressions.put(node, items);
}
public List<Expression> getOrderByExpressions(Node node)
{
return orderByExpressions.get(node);
}
public void setOutputExpressions(Node node, List<Expression> expressions)
{
outputExpressions.put(node, expressions);
}
public List<Expression> getOutputExpressions(Node node)
{
return outputExpressions.get(node);
}
public void setHaving(QuerySpecification node, Expression expression)
{
having.put(node, expression);
}
public void setJoinCriteria(Join node, Expression criteria)
{
joins.put(node, criteria);
}
public Expression getJoinCriteria(Join join)
{
return joins.get(join);
}
public void recordSubqueries(Node node, ExpressionAnalysis expressionAnalysis)
{
this.inPredicatesSubqueries.putAll(node, expressionAnalysis.getSubqueryInPredicates());
this.scalarSubqueries.putAll(node, expressionAnalysis.getScalarSubqueries());
this.existsSubqueries.putAll(node, expressionAnalysis.getExistsSubqueries());
this.quantifiedComparisonSubqueries.putAll(node, expressionAnalysis.getQuantifiedComparisons());
}
public List<InPredicate> getInPredicateSubqueries(Node node)
{
if (inPredicatesSubqueries.containsKey(node)) {
return inPredicatesSubqueries.get(node);
}
return ImmutableList.of();
}
public List<SubqueryExpression> getScalarSubqueries(Node node)
{
if (scalarSubqueries.containsKey(node)) {
return scalarSubqueries.get(node);
}
return ImmutableList.of();
}
public List<ExistsPredicate> getExistsSubqueries(Node node)
{
if (existsSubqueries.containsKey(node)) {
return existsSubqueries.get(node);
}
return ImmutableList.of();
}
public List<QuantifiedComparisonExpression> getQuantifiedComparisonSubqueries(Node node)
{
if (quantifiedComparisonSubqueries.containsKey(node)) {
return quantifiedComparisonSubqueries.get(node);
}
return ImmutableList.of();
}
public void setWindowFunctions(QuerySpecification node, List<FunctionCall> functions)
{
windowFunctions.put(node, functions);
}
public Map<QuerySpecification, List<FunctionCall>> getWindowFunctions()
{
return windowFunctions;
}
public List<FunctionCall> getWindowFunctions(QuerySpecification query)
{
return windowFunctions.get(query);
}
public void addColumnReferences(Set<Expression> columnReferences)
{
this.columnReferences.addAll(columnReferences);
}
public Scope getScope(Node node)
{
return tryGetScope(node).orElseThrow(() -> new IllegalArgumentException(String.format("Analysis does not contain information for node: %s", node)));
}
public Optional<Scope> tryGetScope(Node node)
{
if (scopes.containsKey(node)) {
return Optional.of(scopes.get(node));
}
if (root == null) {
return Optional.empty();
}
GetScopeVisitor visitor = new GetScopeVisitor(scopes, node);
visitor.process(root, null);
return visitor.getResult();
}
public Scope getRootScope()
{
return getScope(root);
}
private static class GetScopeVisitor
extends DefaultTraversalVisitor<Void, Scope>
{
private final IdentityHashMap<Node, Scope> scopes;
private final Node node;
private Scope result;
public GetScopeVisitor(IdentityHashMap<Node, Scope> scopes, Node node)
{
this.scopes = requireNonNull(scopes, "scopes is null");
this.node = requireNonNull(node, "node is null");
}
@Override
public Void process(Node current, @Nullable Scope candidate)
{
if (result != null) {
return null;
}
if (scopes.containsKey(current)) {
candidate = scopes.get(current);
}
if (node == current) {
result = candidate;
}
else {
super.process(current, candidate);
}
return null;
}
public Optional<Scope> getResult()
{
return Optional.ofNullable(result);
}
}
public void setScope(Node node, Scope scope)
{
scopes.put(node, scope);
}
public RelationType getOutputDescriptor()
{
return getOutputDescriptor(root);
}
public RelationType getOutputDescriptor(Node node)
{
return getScope(node).getRelationType();
}
public TableHandle getTableHandle(Table table)
{
return tables.get(table);
}
public void registerTable(Table table, TableHandle handle)
{
tables.put(table, handle);
}
public Signature getFunctionSignature(FunctionCall function)
{
return functionSignature.get(function);
}
public void addFunctionSignatures(IdentityHashMap<FunctionCall, Signature> infos)
{
functionSignature.putAll(infos);
}
public Set<Expression> getColumnReferences()
{
return ImmutableSet.copyOf(columnReferences);
}
public void addTypes(IdentityHashMap<Expression, Type> types)
{
this.types.putAll(types);
}
public void addCoercion(Expression expression, Type type, boolean isTypeOnlyCoercion)
{
this.coercions.put(expression, type);
if (isTypeOnlyCoercion) {
this.typeOnlyCoercions.add(expression);
}
}
public void addCoercions(IdentityHashMap<Expression, Type> coercions, Set<Expression> typeOnlyCoercions)
{
this.coercions.putAll(coercions);
this.typeOnlyCoercions.addAll(typeOnlyCoercions);
}
public Expression getHaving(QuerySpecification query)
{
return having.get(query);
}
public void setColumn(Field field, ColumnHandle handle)
{
columns.put(field, handle);
}
public ColumnHandle getColumn(Field field)
{
return columns.get(field);
}
public void setCreateTableDestination(QualifiedObjectName destination)
{
this.createTableDestination = Optional.of(destination);
}
public Optional<QualifiedObjectName> getCreateTableDestination()
{
return createTableDestination;
}
public void setCreateTableProperties(Map<String, Expression> createTableProperties)
{
this.createTableProperties = createTableProperties;
}
public Map<String, Expression> getCreateTableProperties()
{
return createTableProperties;
}
public void setInsert(Insert insert)
{
this.insert = Optional.of(insert);
}
public Optional<Insert> getInsert()
{
return insert;
}
public Query getNamedQuery(Table table)
{
return namedQueries.get(table);
}
public void registerNamedQuery(Table tableReference, Query query)
{
requireNonNull(tableReference, "tableReference is null");
requireNonNull(query, "query is null");
namedQueries.put(tableReference, query);
}
public void setSampleRatio(SampledRelation relation, double ratio)
{
sampleRatios.put(relation, ratio);
}
public double getSampleRatio(SampledRelation relation)
{
Preconditions.checkState(sampleRatios.containsKey(relation), "Sample ratio missing for %s. Broken analysis?", relation);
return sampleRatios.get(relation);
}
public List<Expression> getParameters()
{
return parameters;
}
public boolean isDescribe()
{
return isDescribe;
}
@Immutable
public static final class Insert
{
private final TableHandle target;
private final List<ColumnHandle> columns;
public Insert(TableHandle target, List<ColumnHandle> columns)
{
this.target = requireNonNull(target, "target is null");
this.columns = requireNonNull(columns, "columns is null");
checkArgument(columns.size() > 0, "No columns given to insert");
}
public List<ColumnHandle> getColumns()
{
return columns;
}
public TableHandle getTarget()
{
return target;
}
}
}
| apache-2.0 |
roberthafner/flowable-engine | modules/flowable-engine/src/main/java/org/activiti/engine/impl/cfg/multitenant/TenantAwareDataSource.java | 3662 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.engine.impl.cfg.multitenant;
import java.io.PrintWriter;
import java.sql.Connection;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.util.HashMap;
import java.util.Map;
import java.util.logging.Logger;
import javax.sql.DataSource;
import org.activiti.engine.ActivitiException;
/**
* A {@link DataSource} implementation that switches the currently used datasource based on the
* current values of the {@link TenantInfoHolder}.
*
* When a {@link Connection} is requested from this {@link DataSource}, the correct {@link DataSource}
* for the current tenant will be determined and used.
*
* Heavily influenced and inspired by Spring's AbstractRoutingDataSource.
*
* @author Joram Barrez
*/
public class TenantAwareDataSource implements DataSource {
protected TenantInfoHolder tenantInfoHolder;
protected Map<Object, DataSource> dataSources = new HashMap<Object, DataSource>();
public TenantAwareDataSource(TenantInfoHolder tenantInfoHolder) {
this.tenantInfoHolder = tenantInfoHolder;
}
public void addDataSource(Object key, DataSource dataSource) {
dataSources.put(key, dataSource);
}
public void removeDataSource(Object key) {
dataSources.remove(key);
}
public Connection getConnection() throws SQLException {
return getCurrentDataSource().getConnection();
}
public Connection getConnection(String username, String password) throws SQLException {
return getCurrentDataSource().getConnection(username, password);
}
protected DataSource getCurrentDataSource() {
String tenantId = tenantInfoHolder.getCurrentTenantId();
DataSource dataSource = dataSources.get(tenantId);
if (dataSource == null) {
throw new ActivitiException("Could not find a dataSource for tenant " + tenantId);
}
return dataSource;
}
public int getLoginTimeout() throws SQLException {
return 0; // Default
}
public Logger getParentLogger() throws SQLFeatureNotSupportedException {
return Logger.getLogger(Logger.GLOBAL_LOGGER_NAME);
}
@SuppressWarnings("unchecked")
public <T> T unwrap(Class<T> iface) throws SQLException {
if (iface.isInstance(this)) {
return (T) this;
}
throw new SQLException("Cannot unwrap " + getClass().getName() + " as an instance of " + iface.getName());
}
public boolean isWrapperFor(Class<?> iface) throws SQLException {
return iface.isInstance(this);
}
public Map<Object, DataSource> getDataSources() {
return dataSources;
}
public void setDataSources(Map<Object, DataSource> dataSources) {
this.dataSources = dataSources;
}
// Unsupported //////////////////////////////////////////////////////////
public PrintWriter getLogWriter() throws SQLException {
throw new UnsupportedOperationException();
}
public void setLogWriter(PrintWriter out) throws SQLException {
throw new UnsupportedOperationException();
}
public void setLoginTimeout(int seconds) throws SQLException {
throw new UnsupportedOperationException();
}
}
| apache-2.0 |
caosg/BroadleafCommerce | common/src/main/java/org/broadleafcommerce/common/sandbox/service/SandBoxServiceImpl.java | 6194 | /*
* #%L
* BroadleafCommerce Open Admin Platform
* %%
* Copyright (C) 2009 - 2013 Broadleaf Commerce
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.broadleafcommerce.common.sandbox.service;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.broadleafcommerce.common.sandbox.dao.SandBoxDao;
import org.broadleafcommerce.common.sandbox.domain.SandBox;
import org.broadleafcommerce.common.sandbox.domain.SandBoxType;
import org.broadleafcommerce.common.util.TransactionUtils;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.Resource;
@Service(value = "blSandBoxService")
public class SandBoxServiceImpl implements SandBoxService {
private static final Log LOG = LogFactory.getLog(SandBoxServiceImpl.class);
@Resource(name = "blSandBoxDao")
protected SandBoxDao sandBoxDao;
@Override
public SandBox retrieveSandBoxById(Long sandboxId) {
return sandBoxDao.retrieve(sandboxId);
}
@Override
public List<SandBox> retrieveAllSandBoxes() {
return sandBoxDao.retrieveAllSandBoxes();
}
@Override
public List<SandBox> retrieveSandBoxesByType(SandBoxType type) {
return sandBoxDao.retrieveSandBoxesByType(type);
}
@Override
public SandBox retrieveUserSandBoxForParent(Long authorId, Long parentSandBoxId) {
return sandBoxDao.retrieveUserSandBoxForParent(authorId, parentSandBoxId);
}
@Override
public SandBox retrieveSandBoxManagementById(Long sandBoxId) {
return sandBoxDao.retrieveSandBoxManagementById(sandBoxId);
}
@Override
public List<SandBox> retrievePreviewSandBoxes(Long authorId) {
List<SandBox> returnList = new ArrayList<SandBox>();
List<SandBox> authorBoxes = sandBoxDao.retrieveSandBoxesForAuthor(authorId, SandBoxType.USER);
List<SandBox> approvalBoxes = sandBoxDao.retrieveSandBoxesByType(SandBoxType.APPROVAL);
List<SandBox> defaultBoxes = sandBoxDao.retrieveSandBoxesByType(SandBoxType.DEFAULT);
List<SandBox> candidateBoxes = new ArrayList<SandBox>();
candidateBoxes.addAll(approvalBoxes);
candidateBoxes.addAll(defaultBoxes);
returnList.addAll(authorBoxes);
for (SandBox cb : candidateBoxes) {
boolean match = false;
for (SandBox authorBox : authorBoxes) {
if (authorBox.getId().equals(cb.getId()) ||
(authorBox.getParentSandBox() != null && authorBox.getParentSandBox().getId().equals(cb.getId()))) {
match = true;
}
}
if (!match) {
returnList.add(cb);
}
}
return returnList;
}
@Override
public SandBox retrieveUserSandBox(Long authorId, Long overrideSandBoxId, String sandBoxName) {
SandBox userSandbox;
if (overrideSandBoxId != null) {
userSandbox = retrieveSandBoxById(overrideSandBoxId);
} else {
userSandbox = retrieveSandBox(sandBoxName, SandBoxType.USER);
if (userSandbox == null) {
userSandbox = createSandBox(sandBoxName, SandBoxType.USER);
}
}
return userSandbox;
}
@Override
public Map<Long, String> retrieveAuthorNamesForSandBoxes(Set<Long> sandBoxIds) {
return sandBoxDao.retrieveAuthorNamesForSandBoxes(sandBoxIds);
}
@Override
public synchronized SandBox createSandBox(String sandBoxName, SandBoxType sandBoxType) {
return sandBoxDao.createSandBox(sandBoxName, sandBoxType);
}
@Override
public synchronized SandBox createUserSandBox(Long authorId, SandBox approvalSandBox) {
if (checkForExistingSandbox(SandBoxType.USER, approvalSandBox.getName(), authorId)) {
return sandBoxDao.createUserSandBox(authorId, approvalSandBox);
}
return sandBoxDao.retrieveNamedSandBox(SandBoxType.USER, approvalSandBox.getName(), authorId);
}
@Override
public synchronized SandBox createDefaultSandBox() {
return sandBoxDao.createDefaultSandBox();
}
@Override
public SandBox retrieveSandBox(String sandBoxName, SandBoxType sandBoxType) {
return sandBoxDao.retrieveNamedSandBox(sandBoxType, sandBoxName);
}
@Override
@Deprecated
public List<SandBox> retrieveAllUserSandBoxes(Long authorId) {
return sandBoxDao.retrieveAllUserSandBoxes(authorId);
}
@Override
@Transactional(TransactionUtils.DEFAULT_TRANSACTION_MANAGER)
public void archiveChildSandboxes(Long parentSandBoxId) {
List<SandBox> childSandBoxes = retrieveChildSandBoxesByParentId(parentSandBoxId);
for (SandBox sandbox : childSandBoxes) {
sandbox.setArchived('Y');
sandBoxDao.merge(sandbox);
}
}
public List<SandBox> retrieveChildSandBoxesByParentId(Long parentSandBoxId) {
return sandBoxDao.retrieveChildSandBoxesByParentId(parentSandBoxId);
}
@Override
public boolean checkForExistingApprovalSandboxWithName(String sandboxName) {
return checkForExistingSandbox(SandBoxType.APPROVAL, sandboxName, null);
}
@Override
public boolean checkForExistingSandbox(SandBoxType sandBoxType, String sandboxName, Long authorId) {
SandBox sb = sandBoxDao.retrieveNamedSandBox(sandBoxType, sandboxName, authorId);
return sb == null;
}
}
| apache-2.0 |
djechelon/spring-security | oauth2/oauth2-core/src/main/java/org/springframework/security/oauth2/core/converter/ClaimTypeConverter.java | 2309 | /*
* Copyright 2002-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.security.oauth2.core.converter;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import org.springframework.core.convert.converter.Converter;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;
/**
* A {@link Converter} that provides type conversion for claim values.
*
* @author Joe Grandja
* @since 5.2
* @see Converter
*/
public final class ClaimTypeConverter implements Converter<Map<String, Object>, Map<String, Object>> {
private final Map<String, Converter<Object, ?>> claimTypeConverters;
/**
* Constructs a {@code ClaimTypeConverter} using the provided parameters.
* @param claimTypeConverters a {@link Map} of {@link Converter}(s) keyed by claim
* name
*/
public ClaimTypeConverter(Map<String, Converter<Object, ?>> claimTypeConverters) {
Assert.notEmpty(claimTypeConverters, "claimTypeConverters cannot be empty");
Assert.noNullElements(claimTypeConverters.values().toArray(), "Converter(s) cannot be null");
this.claimTypeConverters = Collections.unmodifiableMap(new LinkedHashMap<>(claimTypeConverters));
}
@Override
public Map<String, Object> convert(Map<String, Object> claims) {
if (CollectionUtils.isEmpty(claims)) {
return claims;
}
Map<String, Object> result = new HashMap<>(claims);
this.claimTypeConverters.forEach((claimName, typeConverter) -> {
if (claims.containsKey(claimName)) {
Object claim = claims.get(claimName);
Object mappedClaim = typeConverter.convert(claim);
if (mappedClaim != null) {
result.put(claimName, mappedClaim);
}
}
});
return result;
}
}
| apache-2.0 |
streamlio/heron | heron/statemgrs/tests/java/com/twitter/heron/statemgr/zookeeper/curator/CuratorStateManagerTest.java | 12007 | // Copyright 2016 Twitter. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.twitter.heron.statemgr.zookeeper.curator;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.SettableFuture;
import com.google.protobuf.Message;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.api.ACLBackgroundPathAndBytesable;
import org.apache.curator.framework.api.BackgroundCallback;
import org.apache.curator.framework.api.BackgroundPathable;
import org.apache.curator.framework.api.CreateBuilder;
import org.apache.curator.framework.api.CuratorEvent;
import org.apache.curator.framework.api.DeleteBuilder;
import org.apache.curator.framework.api.ExistsBuilder;
import org.apache.curator.framework.api.GetDataBuilder;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.Watcher;
import org.apache.zookeeper.data.Stat;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import com.twitter.heron.common.basics.Pair;
import com.twitter.heron.spi.common.Config;
import com.twitter.heron.spi.common.Key;
import com.twitter.heron.spi.utils.NetworkUtils;
import com.twitter.heron.statemgr.zookeeper.ZkContext;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyInt;
import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
/**
* CuratorStateManager Tester.
*/
public class CuratorStateManagerTest {
private static final String CONNECTION_STRING = "connectionString";
private static final String PATH = "/heron/n90/path";
private static final String ROOT_ADDR = "/";
private static final String TOPOLOGY_NAME = "topology";
private static final String TUNNEL_STRING = "tunnelConnectionString";
private Config tunnelingConfig;
private Config config;
@Before
public void before() throws Exception {
Config.Builder builder = Config.newBuilder(true)
.put(Key.STATEMGR_ROOT_PATH, ROOT_ADDR)
.put(Key.TOPOLOGY_NAME, TOPOLOGY_NAME)
.put(Key.STATEMGR_CONNECTION_STRING, CONNECTION_STRING);
// config is used for testing all the methods exception initialize and close
config = builder.build();
// tunneling config is used when testing initialize/close method
tunnelingConfig = builder
.put(ZkContext.IS_INITIALIZE_TREE, true)
.put(ZkContext.IS_TUNNEL_NEEDED, true)
.put(Key.SCHEDULER_IS_SERVICE, false)
.build();
}
@After
public void after() throws Exception {
}
/**
* Test initialize method
* @throws Exception
*/
@Test
public void testInitialize() throws Exception {
CuratorStateManager spyStateManager = spy(new CuratorStateManager());
CuratorFramework mockClient = mock(CuratorFramework.class);
doReturn(mockClient).when(spyStateManager).getCuratorClient();
doReturn(new Pair<String, List<Process>>(TUNNEL_STRING, new ArrayList<Process>()))
.when(spyStateManager).setupZkTunnel(any(NetworkUtils.TunnelConfig.class));
doReturn(true).when(mockClient).blockUntilConnected(anyInt(), any(TimeUnit.class));
spyStateManager.initialize(tunnelingConfig);
// Make sure tunneling is setup correctly
assertTrue(spyStateManager.getConnectionString().equals(TUNNEL_STRING));
// Verify curator client is invoked
verify(mockClient).start();
verify(mockClient).blockUntilConnected(anyInt(), any(TimeUnit.class));
verify(mockClient, times(9)).createContainers(anyString());
// Verify initTree is called
verify(spyStateManager).initTree();
}
/**
* Test close method
* @throws Exception
*/
@Test
public void testClose() throws Exception {
CuratorStateManager spyStateManager = spy(new CuratorStateManager());
CuratorFramework mockClient = mock(CuratorFramework.class);
Process mockProcess1 = mock(Process.class);
Process mockProcess2 = mock(Process.class);
List<Process> tunnelProcesses = new ArrayList<>();
tunnelProcesses.add(mockProcess1);
tunnelProcesses.add(mockProcess2);
doReturn(mockClient).when(spyStateManager).getCuratorClient();
doReturn(new Pair<>(TUNNEL_STRING, tunnelProcesses))
.when(spyStateManager).setupZkTunnel(any(NetworkUtils.TunnelConfig.class));
doReturn(true).when(mockClient).blockUntilConnected(anyInt(), any(TimeUnit.class));
spyStateManager.initialize(tunnelingConfig);
spyStateManager.close();
// verify curator and processes are closed correctly
verify(mockClient).close();
verify(mockProcess1).destroy();
verify(mockProcess2).destroy();
}
/**
* Test nodeExists method
* @throws Exception
*/
@Test
public void testExistNode() throws Exception {
CuratorStateManager spyStateManager = spy(new CuratorStateManager());
CuratorFramework mockClient = mock(CuratorFramework.class);
ExistsBuilder mockExistsBuilder = mock(ExistsBuilder.class);
final String correctPath = "/correct/path";
final String wrongPath = "/wrong/path";
doReturn(mockClient)
.when(spyStateManager).getCuratorClient();
doReturn(true)
.when(mockClient).blockUntilConnected(anyInt(), any(TimeUnit.class));
doReturn(mockExistsBuilder)
.when(mockClient).checkExists();
doReturn(new Stat())
.when(mockExistsBuilder).forPath(correctPath);
doReturn(null)
.when(mockExistsBuilder).forPath(wrongPath);
spyStateManager.initialize(config);
// Verify the result is true when path is correct
ListenableFuture<Boolean> result1 = spyStateManager.nodeExists(correctPath);
verify(mockExistsBuilder).forPath(correctPath);
assertTrue(result1.get());
// Verify the result is false when path is wrong
ListenableFuture<Boolean> result2 = spyStateManager.nodeExists(wrongPath);
verify(mockExistsBuilder).forPath(wrongPath);
assertFalse(result2.get());
}
/**
* test createNode method
* @throws Exception
*/
@Test
public void testCreateNode() throws Exception {
CuratorStateManager spyStateManager = spy(new CuratorStateManager());
CuratorFramework mockClient = mock(CuratorFramework.class);
CreateBuilder mockCreateBuilder = mock(CreateBuilder.class);
// Mockito doesn't support mock type-parametrized class, thus suppress the warning
@SuppressWarnings("rawtypes")
ACLBackgroundPathAndBytesable mockPath = spy(ACLBackgroundPathAndBytesable.class);
final byte[] data = new byte[10];
doReturn(mockClient)
.when(spyStateManager).getCuratorClient();
doReturn(true)
.when(mockClient).blockUntilConnected(anyInt(), any(TimeUnit.class));
doReturn(mockCreateBuilder)
.when(mockClient).create();
doReturn(mockPath)
.when(mockCreateBuilder).withMode(any(CreateMode.class));
spyStateManager.initialize(config);
// Verify the node is created successfully
ListenableFuture<Boolean> result = spyStateManager.createNode(PATH, data, false);
verify(mockCreateBuilder).withMode(any(CreateMode.class));
verify(mockPath).forPath(PATH, data);
assertTrue(result.get());
}
/**
* Test deleteNode method
* @throws Exception
*/
@Test
public void testDeleteNode() throws Exception {
CuratorStateManager spyStateManager = spy(new CuratorStateManager());
CuratorFramework mockClient = mock(CuratorFramework.class);
DeleteBuilder mockDeleteBuilder = mock(DeleteBuilder.class);
// Mockito doesn't support mock type-parametrized class, thus suppress the warning
@SuppressWarnings("rawtypes")
BackgroundPathable mockBackPathable = mock(BackgroundPathable.class);
doReturn(mockClient)
.when(spyStateManager).getCuratorClient();
doReturn(true)
.when(mockClient).blockUntilConnected(anyInt(), any(TimeUnit.class));
doReturn(mockDeleteBuilder)
.when(mockClient).delete();
doReturn(mockBackPathable)
.when(mockDeleteBuilder).withVersion(-1);
spyStateManager.initialize(config);
ListenableFuture<Boolean> result = spyStateManager.deleteExecutionState(PATH);
// Verify the node is deleted correctly
verify(mockDeleteBuilder).withVersion(-1);
assertTrue(result.get());
}
/**
* Test getNodeData method
* @throws Exception
*/
@Test
public void testGetNodeData() throws Exception {
CuratorStateManager spyStateManager = spy(new CuratorStateManager());
final CuratorFramework mockClient = mock(CuratorFramework.class);
GetDataBuilder mockGetBuilder = mock(GetDataBuilder.class);
// Mockito doesn't support mock type-parametrized class, thus suppress the warning
@SuppressWarnings("rawtypes")
BackgroundPathable mockBackPathable = mock(BackgroundPathable.class);
final CuratorEvent mockEvent = mock(CuratorEvent.class);
Message.Builder mockBuilder = mock(Message.Builder.class);
Message mockMessage = mock(Message.class);
final byte[] data = "wy_1989".getBytes();
doReturn(mockMessage)
.when(mockBuilder).build();
doReturn(data)
.when(mockEvent).getData();
doReturn(PATH)
.when(mockEvent).getPath();
doReturn(mockClient)
.when(spyStateManager).getCuratorClient();
doReturn(true)
.when(mockClient).blockUntilConnected(anyInt(), any(TimeUnit.class));
doReturn(mockGetBuilder)
.when(mockClient).getData();
doReturn(mockBackPathable)
.when(mockGetBuilder).usingWatcher(any(Watcher.class));
doAnswer(new Answer<Object>() {
@Override
public Object answer(InvocationOnMock invocationOnMock) throws Throwable {
Object[] objests = invocationOnMock.getArguments();
// the first object is the BackgroundCallback
((BackgroundCallback) objests[0]).processResult(mockClient, mockEvent);
return null;
}
}).when(mockBackPathable).inBackground(any(BackgroundCallback.class));
spyStateManager.initialize(config);
// Verify the data on node is fetched correctly
ListenableFuture<Message> result = spyStateManager.getNodeData(null, PATH, mockBuilder);
assertTrue(result.get().equals(mockMessage));
}
/**
* Test deleteSchedulerLocation method
* @throws Exception
*/
@Test
public void testDeleteSchedulerLocation() throws Exception {
CuratorStateManager spyStateManager = spy(new CuratorStateManager());
CuratorFramework mockClient = mock(CuratorFramework.class);
doReturn(mockClient)
.when(spyStateManager).getCuratorClient();
doReturn(true)
.when(mockClient).blockUntilConnected(anyInt(), any(TimeUnit.class));
spyStateManager.initialize(config);
final SettableFuture<Boolean> fakeResult = SettableFuture.create();
fakeResult.set(false);
doReturn(fakeResult).when(spyStateManager).deleteNode(anyString(), anyBoolean());
ListenableFuture<Boolean> result = spyStateManager.deleteSchedulerLocation(TOPOLOGY_NAME);
assertTrue(result.get());
}
}
| apache-2.0 |
simbadzina/hadoop-fcfs | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java | 20378 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.crypto.CipherSuite;
import org.apache.hadoop.crypto.JceAesCtrCryptoCodec;
import org.apache.hadoop.crypto.OpensslAesCtrCryptoCodec;
/**
* This class contains constants for configuration keys used
* in the common code.
*
* It includes all publicly documented configuration keys. In general
* this class should not be used directly (use CommonConfigurationKeys
* instead)
*
*/
@InterfaceAudience.Public
public class CommonConfigurationKeysPublic {
// The Keys
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String NET_TOPOLOGY_SCRIPT_NUMBER_ARGS_KEY =
"net.topology.script.number.args";
/** Default value for NET_TOPOLOGY_SCRIPT_NUMBER_ARGS_KEY */
public static final int NET_TOPOLOGY_SCRIPT_NUMBER_ARGS_DEFAULT = 100;
//FS keys
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String FS_DEFAULT_NAME_KEY = "fs.defaultFS";
/** Default value for FS_DEFAULT_NAME_KEY */
public static final String FS_DEFAULT_NAME_DEFAULT = "file:///";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String FS_DF_INTERVAL_KEY = "fs.df.interval";
/** Default value for FS_DF_INTERVAL_KEY */
public static final long FS_DF_INTERVAL_DEFAULT = 60000;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String FS_DU_INTERVAL_KEY = "fs.du.interval";
/** Default value for FS_DU_INTERVAL_KEY */
public static final long FS_DU_INTERVAL_DEFAULT = 600000;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String FS_CLIENT_RESOLVE_REMOTE_SYMLINKS_KEY =
"fs.client.resolve.remote.symlinks";
/** Default value for FS_CLIENT_RESOLVE_REMOTE_SYMLINKS_KEY */
public static final boolean FS_CLIENT_RESOLVE_REMOTE_SYMLINKS_DEFAULT = true;
//Defaults are not specified for following keys
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String NET_TOPOLOGY_SCRIPT_FILE_NAME_KEY =
"net.topology.script.file.name";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY =
"net.topology.node.switch.mapping.impl";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String NET_TOPOLOGY_IMPL_KEY =
"net.topology.impl";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String NET_TOPOLOGY_TABLE_MAPPING_FILE_KEY =
"net.topology.table.file.name";
public static final String NET_DEPENDENCY_SCRIPT_FILE_NAME_KEY =
"net.topology.dependency.script.file.name";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String FS_TRASH_CHECKPOINT_INTERVAL_KEY =
"fs.trash.checkpoint.interval";
/** Default value for FS_TRASH_CHECKPOINT_INTERVAL_KEY */
public static final long FS_TRASH_CHECKPOINT_INTERVAL_DEFAULT = 0;
// TBD: Code is still using hardcoded values (e.g. "fs.automatic.close")
// instead of constant (e.g. FS_AUTOMATIC_CLOSE_KEY)
//
/** Not used anywhere, looks like default value for FS_LOCAL_BLOCK_SIZE */
public static final long FS_LOCAL_BLOCK_SIZE_DEFAULT = 32*1024*1024;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String FS_AUTOMATIC_CLOSE_KEY = "fs.automatic.close";
/** Default value for FS_AUTOMATIC_CLOSE_KEY */
public static final boolean FS_AUTOMATIC_CLOSE_DEFAULT = true;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String FS_FILE_IMPL_KEY = "fs.file.impl";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String FS_FTP_HOST_KEY = "fs.ftp.host";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String FS_FTP_HOST_PORT_KEY = "fs.ftp.host.port";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String FS_TRASH_INTERVAL_KEY = "fs.trash.interval";
/** Default value for FS_TRASH_INTERVAL_KEY */
public static final long FS_TRASH_INTERVAL_DEFAULT = 0;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String IO_MAPFILE_BLOOM_SIZE_KEY =
"io.mapfile.bloom.size";
/** Default value for IO_MAPFILE_BLOOM_SIZE_KEY */
public static final int IO_MAPFILE_BLOOM_SIZE_DEFAULT = 1024*1024;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String IO_MAPFILE_BLOOM_ERROR_RATE_KEY =
"io.mapfile.bloom.error.rate" ;
/** Default value for IO_MAPFILE_BLOOM_ERROR_RATE_KEY */
public static final float IO_MAPFILE_BLOOM_ERROR_RATE_DEFAULT = 0.005f;
/** Codec class that implements Lzo compression algorithm */
public static final String IO_COMPRESSION_CODEC_LZO_CLASS_KEY =
"io.compression.codec.lzo.class";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String IO_MAP_INDEX_INTERVAL_KEY =
"io.map.index.interval";
/** Default value for IO_MAP_INDEX_INTERVAL_DEFAULT */
public static final int IO_MAP_INDEX_INTERVAL_DEFAULT = 128;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String IO_MAP_INDEX_SKIP_KEY = "io.map.index.skip";
/** Default value for IO_MAP_INDEX_SKIP_KEY */
public static final int IO_MAP_INDEX_SKIP_DEFAULT = 0;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String IO_SEQFILE_COMPRESS_BLOCKSIZE_KEY =
"io.seqfile.compress.blocksize";
/** Default value for IO_SEQFILE_COMPRESS_BLOCKSIZE_KEY */
public static final int IO_SEQFILE_COMPRESS_BLOCKSIZE_DEFAULT = 1000000;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String IO_FILE_BUFFER_SIZE_KEY =
"io.file.buffer.size";
/** Default value for IO_FILE_BUFFER_SIZE_KEY */
public static final int IO_FILE_BUFFER_SIZE_DEFAULT = 4096;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String IO_SKIP_CHECKSUM_ERRORS_KEY =
"io.skip.checksum.errors";
/** Default value for IO_SKIP_CHECKSUM_ERRORS_KEY */
public static final boolean IO_SKIP_CHECKSUM_ERRORS_DEFAULT = false;
/**
* @deprecated Moved to mapreduce, see mapreduce.task.io.sort.mb
* in mapred-default.xml
* See https://issues.apache.org/jira/browse/HADOOP-6801
*/
public static final String IO_SORT_MB_KEY = "io.sort.mb";
/** Default value for IO_SORT_MB_DEFAULT */
public static final int IO_SORT_MB_DEFAULT = 100;
/**
* @deprecated Moved to mapreduce, see mapreduce.task.io.sort.factor
* in mapred-default.xml
* See https://issues.apache.org/jira/browse/HADOOP-6801
*/
public static final String IO_SORT_FACTOR_KEY = "io.sort.factor";
/** Default value for IO_SORT_FACTOR_DEFAULT */
public static final int IO_SORT_FACTOR_DEFAULT = 100;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String IO_SERIALIZATIONS_KEY = "io.serializations";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String TFILE_IO_CHUNK_SIZE_KEY = "tfile.io.chunk.size";
/** Default value for TFILE_IO_CHUNK_SIZE_DEFAULT */
public static final int TFILE_IO_CHUNK_SIZE_DEFAULT = 1024*1024;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String TFILE_FS_INPUT_BUFFER_SIZE_KEY =
"tfile.fs.input.buffer.size";
/** Default value for TFILE_FS_INPUT_BUFFER_SIZE_KEY */
public static final int TFILE_FS_INPUT_BUFFER_SIZE_DEFAULT = 256*1024;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String TFILE_FS_OUTPUT_BUFFER_SIZE_KEY =
"tfile.fs.output.buffer.size";
/** Default value for TFILE_FS_OUTPUT_BUFFER_SIZE_KEY */
public static final int TFILE_FS_OUTPUT_BUFFER_SIZE_DEFAULT = 256*1024;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY =
"ipc.client.connection.maxidletime";
/** Default value for IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY */
public static final int IPC_CLIENT_CONNECTION_MAXIDLETIME_DEFAULT = 10000; // 10s
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String IPC_CLIENT_CONNECT_TIMEOUT_KEY =
"ipc.client.connect.timeout";
/** Default value for IPC_CLIENT_CONNECT_TIMEOUT_KEY */
public static final int IPC_CLIENT_CONNECT_TIMEOUT_DEFAULT = 20000; // 20s
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String IPC_CLIENT_CONNECT_MAX_RETRIES_KEY =
"ipc.client.connect.max.retries";
/** Default value for IPC_CLIENT_CONNECT_MAX_RETRIES_KEY */
public static final int IPC_CLIENT_CONNECT_MAX_RETRIES_DEFAULT = 10;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String IPC_CLIENT_CONNECT_RETRY_INTERVAL_KEY =
"ipc.client.connect.retry.interval";
/** Default value for IPC_CLIENT_CONNECT_RETRY_INTERVAL_KEY */
public static final int IPC_CLIENT_CONNECT_RETRY_INTERVAL_DEFAULT = 1000;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SOCKET_TIMEOUTS_KEY =
"ipc.client.connect.max.retries.on.timeouts";
/** Default value for IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SOCKET_TIMEOUTS_KEY */
public static final int IPC_CLIENT_CONNECT_MAX_RETRIES_ON_SOCKET_TIMEOUTS_DEFAULT = 45;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String IPC_CLIENT_TCPNODELAY_KEY =
"ipc.client.tcpnodelay";
/** Default value for IPC_CLIENT_TCPNODELAY_KEY */
public static final boolean IPC_CLIENT_TCPNODELAY_DEFAULT = true;
/** Enable low-latency connections from the client */
public static final String IPC_CLIENT_LOW_LATENCY = "ipc.client.low-latency";
/** Default value of IPC_CLIENT_LOW_LATENCY */
public static final boolean IPC_CLIENT_LOW_LATENCY_DEFAULT = false;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String IPC_SERVER_LISTEN_QUEUE_SIZE_KEY =
"ipc.server.listen.queue.size";
/** Default value for IPC_SERVER_LISTEN_QUEUE_SIZE_KEY */
public static final int IPC_SERVER_LISTEN_QUEUE_SIZE_DEFAULT = 128;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String IPC_CLIENT_KILL_MAX_KEY = "ipc.client.kill.max";
/** Default value for IPC_CLIENT_KILL_MAX_KEY */
public static final int IPC_CLIENT_KILL_MAX_DEFAULT = 10;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String IPC_CLIENT_IDLETHRESHOLD_KEY =
"ipc.client.idlethreshold";
/** Default value for IPC_CLIENT_IDLETHRESHOLD_DEFAULT */
public static final int IPC_CLIENT_IDLETHRESHOLD_DEFAULT = 4000;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String IPC_SERVER_TCPNODELAY_KEY =
"ipc.server.tcpnodelay";
/** Default value for IPC_SERVER_TCPNODELAY_KEY */
public static final boolean IPC_SERVER_TCPNODELAY_DEFAULT = true;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String IPC_SERVER_MAX_CONNECTIONS_KEY =
"ipc.server.max.connections";
/** Default value for IPC_SERVER_MAX_CONNECTIONS_KEY */
public static final int IPC_SERVER_MAX_CONNECTIONS_DEFAULT = 0;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_KEY =
"hadoop.rpc.socket.factory.class.default";
public static final String HADOOP_RPC_SOCKET_FACTORY_CLASS_DEFAULT_DEFAULT =
"org.apache.hadoop.net.StandardSocketFactory";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SOCKS_SERVER_KEY = "hadoop.socks.server";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_UTIL_HASH_TYPE_KEY =
"hadoop.util.hash.type";
/** Default value for HADOOP_UTIL_HASH_TYPE_KEY */
public static final String HADOOP_UTIL_HASH_TYPE_DEFAULT = "murmur";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_GROUP_MAPPING =
"hadoop.security.group.mapping";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_GROUPS_CACHE_SECS =
"hadoop.security.groups.cache.secs";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final long HADOOP_SECURITY_GROUPS_CACHE_SECS_DEFAULT =
300;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS =
"hadoop.security.groups.negative-cache.secs";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final long HADOOP_SECURITY_GROUPS_NEGATIVE_CACHE_SECS_DEFAULT =
30;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_GROUPS_CACHE_WARN_AFTER_MS =
"hadoop.security.groups.cache.warn.after.ms";
public static final long HADOOP_SECURITY_GROUPS_CACHE_WARN_AFTER_MS_DEFAULT =
5000;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_AUTHENTICATION =
"hadoop.security.authentication";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_AUTHORIZATION =
"hadoop.security.authorization";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_INSTRUMENTATION_REQUIRES_ADMIN =
"hadoop.security.instrumentation.requires.admin";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_SERVICE_USER_NAME_KEY =
"hadoop.security.service.user.name.key";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_AUTH_TO_LOCAL =
"hadoop.security.auth_to_local";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN =
"hadoop.kerberos.min.seconds.before.relogin";
/** Default value for HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN */
public static final int HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT =
60;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_RPC_PROTECTION =
"hadoop.rpc.protection";
/** Class to override Sasl Properties for a connection */
public static final String HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS =
"hadoop.security.saslproperties.resolver.class";
public static final String HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX =
"hadoop.security.crypto.codec.classes";
public static final String
HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_AES_CTR_NOPADDING_KEY =
HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_KEY_PREFIX
+ CipherSuite.AES_CTR_NOPADDING.getConfigSuffix();
public static final String
HADOOP_SECURITY_CRYPTO_CODEC_CLASSES_AES_CTR_NOPADDING_DEFAULT =
OpensslAesCtrCryptoCodec.class.getName() + "," +
JceAesCtrCryptoCodec.class.getName();
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_CRYPTO_CIPHER_SUITE_KEY =
"hadoop.security.crypto.cipher.suite";
public static final String HADOOP_SECURITY_CRYPTO_CIPHER_SUITE_DEFAULT =
"AES/CTR/NoPadding";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_CRYPTO_JCE_PROVIDER_KEY =
"hadoop.security.crypto.jce.provider";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_KEY =
"hadoop.security.crypto.buffer.size";
/** Defalt value for HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_KEY */
public static final int HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_DEFAULT = 8192;
/** Class to override Impersonation provider */
public static final String HADOOP_SECURITY_IMPERSONATION_PROVIDER_CLASS =
"hadoop.security.impersonation.provider.class";
// <!-- KMSClientProvider configurations -->
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String KMS_CLIENT_ENC_KEY_CACHE_SIZE =
"hadoop.security.kms.client.encrypted.key.cache.size";
/** Default value for KMS_CLIENT_ENC_KEY_CACHE_SIZE */
public static final int KMS_CLIENT_ENC_KEY_CACHE_SIZE_DEFAULT = 500;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String KMS_CLIENT_ENC_KEY_CACHE_LOW_WATERMARK =
"hadoop.security.kms.client.encrypted.key.cache.low-watermark";
/** Default value for KMS_CLIENT_ENC_KEY_CACHE_LOW_WATERMARK */
public static final float KMS_CLIENT_ENC_KEY_CACHE_LOW_WATERMARK_DEFAULT =
0.3f;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String KMS_CLIENT_ENC_KEY_CACHE_NUM_REFILL_THREADS =
"hadoop.security.kms.client.encrypted.key.cache.num.refill.threads";
/** Default value for KMS_CLIENT_ENC_KEY_NUM_REFILL_THREADS */
public static final int KMS_CLIENT_ENC_KEY_CACHE_NUM_REFILL_THREADS_DEFAULT =
2;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String KMS_CLIENT_ENC_KEY_CACHE_EXPIRY_MS =
"hadoop.security.kms.client.encrypted.key.cache.expiry";
/** Default value for KMS_CLIENT_ENC_KEY_CACHE_EXPIRY (12 hrs)*/
public static final int KMS_CLIENT_ENC_KEY_CACHE_EXPIRY_DEFAULT = 43200000;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_JAVA_SECURE_RANDOM_ALGORITHM_KEY =
"hadoop.security.java.secure.random.algorithm";
/** Defalt value for HADOOP_SECURITY_JAVA_SECURE_RANDOM_ALGORITHM_KEY */
public static final String HADOOP_SECURITY_JAVA_SECURE_RANDOM_ALGORITHM_DEFAULT =
"SHA1PRNG";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_SECURE_RANDOM_IMPL_KEY =
"hadoop.security.secure.random.impl";
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_SECURITY_SECURE_RANDOM_DEVICE_FILE_PATH_KEY =
"hadoop.security.random.device.file.path";
public static final String HADOOP_SECURITY_SECURE_RANDOM_DEVICE_FILE_PATH_DEFAULT =
"/dev/urandom";
}
| apache-2.0 |
nabilzhang/enunciate | examples/cxf/src/main/java/com/webcohesion/enunciate/examples/cxf/genealogy/services/RelationshipService.java | 1619 | /**
* Copyright © 2006-2016 Web Cohesion (info@webcohesion.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webcohesion.enunciate.examples.cxf.genealogy.services;
import com.webcohesion.enunciate.examples.cxf.genealogy.data.Relationship;
import javax.jws.WebService;
import java.util.List;
import java.util.ArrayList;
/**
* Test of a basic class-based service with no interface.
*
* @author Ryan Heaton
*/
@WebService
public class RelationshipService {
public List<Relationship> getRelationships(String personId) throws RelationshipException {
if ("throw".equals(personId)) {
throw new RelationshipException("hi");
}
// todo: uncomment when CXF handles this
// if ("outthrow".equals(personId)) {
// throw new OutsideException("outside message");
// }
ArrayList<Relationship> list = new ArrayList<Relationship>();
for (int i = 0; i < 3; i++) {
Relationship relationship = new Relationship();
relationship.setId(String.valueOf(i));
list.add(relationship);
}
return list;
}
public void touch() {
}
}
| apache-2.0 |
MikeThomsen/nifi | nifi-nar-bundles/nifi-standard-services/nifi-hbase_2-client-service-bundle/nifi-hbase_2-client-service/src/test/java/org/apache/nifi/hbase/TestHBase_2_ListLookupService.java | 4929 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.hbase;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Table;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.hadoop.KerberosProperties;
import org.apache.nifi.processor.AbstractProcessor;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import java.io.File;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.when;
public class TestHBase_2_ListLookupService {
static final String TABLE_NAME = "guids";
private TestRunner runner;
private HBase_2_ListLookupService lookupService;
private MockHBaseClientService clientService;
private NoOpProcessor processor;
@BeforeEach
public void before() throws Exception {
processor = new NoOpProcessor();
runner = TestRunners.newTestRunner(processor);
// setup mock HBaseClientService
final Table table = Mockito.mock(Table.class);
when(table.getName()).thenReturn(TableName.valueOf(TABLE_NAME));
final KerberosProperties kerberosProperties = new KerberosProperties(new File("src/test/resources/krb5.conf"));
clientService = new MockHBaseClientService(table, "family", kerberosProperties);
runner.addControllerService("clientService", clientService);
runner.setProperty(clientService, HBase_2_ClientService.HADOOP_CONF_FILES, "src/test/resources/hbase-site.xml");
runner.enableControllerService(clientService);
// setup HBase LookupService
lookupService = new HBase_2_ListLookupService();
runner.addControllerService("lookupService", lookupService);
runner.setProperty(lookupService, HBase_2_ListLookupService.HBASE_CLIENT_SERVICE, "clientService");
runner.setProperty(lookupService, HBase_2_RecordLookupService.TABLE_NAME, TABLE_NAME);
runner.enableControllerService(lookupService);
}
private Optional<List> setupAndRun() throws Exception {
// setup some staged data in the mock client service
final Map<String,String> cells = new HashMap<>();
cells.put("cq1", "v1");
cells.put("cq2", "v2");
clientService.addResult("row1", cells, System.currentTimeMillis());
Map<String, Object> lookup = new HashMap<>();
lookup.put("rowKey", "row1");
return lookupService.lookup(lookup);
}
@Test
public void testLookupKeyList() throws Exception {
Optional<List> results = setupAndRun();
assertTrue(results.isPresent());
List result = results.get();
assertTrue(result.size() == 2);
assertTrue(result.contains("cq1"));
assertTrue(result.contains("cq2"));
}
@Test
public void testLookupValueList() throws Exception {
runner.disableControllerService(lookupService);
runner.setProperty(lookupService, HBase_2_ListLookupService.RETURN_TYPE, HBase_2_ListLookupService.VALUE_LIST);
runner.enableControllerService(lookupService);
Optional<List> results = setupAndRun();
assertTrue(results.isPresent());
List result = results.get();
assertTrue(result.size() == 2);
assertTrue(result.contains("v1"));
assertTrue(result.contains("v2"));
}
// Processor that does nothing just so we can create a TestRunner
private static class NoOpProcessor extends AbstractProcessor {
@Override
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return Collections.emptyList();
}
@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
}
}
}
| apache-2.0 |
ketan/gocd | server/src/test-integration/java/com/thoughtworks/go/server/service/PipelineScheduleServiceTest.java | 23697 | /*
* Copyright 2020 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.service;
import com.thoughtworks.go.config.*;
import com.thoughtworks.go.config.materials.MaterialConfigs;
import com.thoughtworks.go.config.materials.SubprocessExecutionContext;
import com.thoughtworks.go.domain.*;
import com.thoughtworks.go.domain.buildcause.BuildCause;
import com.thoughtworks.go.domain.exception.StageAlreadyBuildingException;
import com.thoughtworks.go.domain.materials.Material;
import com.thoughtworks.go.domain.materials.TestingMaterial;
import com.thoughtworks.go.domain.materials.svn.Subversion;
import com.thoughtworks.go.domain.materials.svn.SvnCommand;
import com.thoughtworks.go.fixture.PipelineWithTwoStages;
import com.thoughtworks.go.helper.*;
import com.thoughtworks.go.server.cache.GoCache;
import com.thoughtworks.go.server.dao.DatabaseAccessHelper;
import com.thoughtworks.go.server.dao.PipelineDao;
import com.thoughtworks.go.server.dao.StageSqlMapDao;
import com.thoughtworks.go.server.domain.Username;
import com.thoughtworks.go.server.persistence.MaterialRepository;
import com.thoughtworks.go.server.scheduling.ScheduleHelper;
import com.thoughtworks.go.server.transaction.TransactionTemplate;
import com.thoughtworks.go.util.GoConfigFileHelper;
import com.thoughtworks.go.util.TimeProvider;
import com.thoughtworks.go.utils.Assertions;
import com.thoughtworks.go.utils.Timeout;
import org.apache.commons.io.FileUtils;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import java.io.File;
import static com.thoughtworks.go.helper.MaterialConfigsMother.svnMaterialConfig;
import static com.thoughtworks.go.helper.ModificationsMother.modifyOneFile;
import static com.thoughtworks.go.util.GoConfigFileHelper.env;
import static com.thoughtworks.go.util.GoConstants.DEFAULT_APPROVED_BY;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {
"classpath:/applicationContext-global.xml",
"classpath:/applicationContext-dataLocalAccess.xml",
"classpath:/testPropertyConfigurer.xml",
"classpath:/spring-all-servlet.xml",
})
public class PipelineScheduleServiceTest {
@Autowired private ScheduleService scheduleService;
@Autowired private GoConfigDao goConfigDao;
@Autowired private GoConfigService goConfigService;
@Autowired private PipelineDao pipelineDao;
@Autowired private StageSqlMapDao stageDao;
@Autowired PipelineScheduleQueue pipelineScheduleQueue;
@Autowired PipelineService pipelineService;
@Autowired private ScheduleHelper scheduleHelper;
@Autowired private DatabaseAccessHelper dbHelper;
@Autowired private PipelineLockService pipelineLockService;
@Autowired private GoCache goCache;
@Autowired private EnvironmentConfigService environmentConfigService;
@Autowired private MaterialRepository materialRepository;
@Autowired private TransactionTemplate transactionTemplate;
@Autowired private SubprocessExecutionContext subprocessExecutionContext;
@Autowired private InstanceFactory instanceFactory;
@Autowired private AgentService agentService;
@Rule
public final TemporaryFolder temporaryFolder = new TemporaryFolder();
private PipelineWithTwoStages pipelineWithTwoStages;
private PipelineConfig mingleConfig;
private PipelineConfig evolveConfig;
private String md5 = "md5-test";
private static final String STAGE_NAME = "dev";
private GoConfigFileHelper configHelper;
public Subversion repository;
public static TestRepo testRepo;
private PipelineConfig goConfig;
@Before
public void setup() throws Exception {
configHelper = new GoConfigFileHelper();
configHelper.usingCruiseConfigDao(goConfigDao);
configHelper.onSetUp();
testRepo = new SvnTestRepo(temporaryFolder);
dbHelper.onSetUp();
repository = new SvnCommand(null, testRepo.projectRepositoryUrl());
mingleConfig = configHelper.addPipeline("mingle", STAGE_NAME, repository, "unit", "functional");
goConfig = configHelper.addPipeline("go", STAGE_NAME, repository, "unit");
StageConfig ftStageConfig = StageConfigMother.custom("ft", "twist");
ftStageConfig.jobConfigByConfigName(new CaseInsensitiveString("twist")).addVariable("JOB_LVL", "job value");
ftStageConfig.setVariables(env("STAGE_LVL", "stage value"));
configHelper.addStageToPipeline("go", ftStageConfig);
configHelper.addEnvironmentVariableToPipeline("go", env("PIPELINE_LVL", "pipeline value"));
configHelper.addEnvironments("uat");
EnvironmentConfig uatEnv = configHelper.currentConfig().getEnvironments().named(new CaseInsensitiveString("uat"));
uatEnv.addPipeline(new CaseInsensitiveString("go"));
uatEnv.addEnvironmentVariable("ENV_LVL", "env value");
evolveConfig = configHelper.addPipeline("evolve", STAGE_NAME, repository, "unit");
goCache.clear();
}
@After
public void teardown() throws Exception {
if (pipelineWithTwoStages != null) {
pipelineWithTwoStages.onTearDown();
}
dbHelper.onTearDown();
pipelineScheduleQueue.clear();
testRepo.tearDown();
FileUtils.deleteQuietly(new File("pipelines"));
configHelper.onTearDown();
}
@Test
public void shouldScheduleStageAfterModifications() throws Exception {
scheduleAndCompleteInitialPipelines();
Material stubMaterial = new TestingMaterial();
mingleConfig.setMaterialConfigs(new MaterialConfigs(stubMaterial.config()));
MaterialRevisions revisions = new MaterialRevisions();
revisions.addRevision(stubMaterial, ((TestingMaterial)stubMaterial).modificationsSince(null, null, subprocessExecutionContext));
BuildCause buildCause = BuildCause.createWithModifications(revisions, "");
dbHelper.saveMaterials(buildCause.getMaterialRevisions());
Pipeline pipeline = instanceFactory.createPipelineInstance(mingleConfig, buildCause, new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider());
pipelineService.save(pipeline);
verifyMingleScheduledWithModifications();
}
@Test
public void shouldLockPipelineWhenSchedulingIt() throws Exception {
scheduleAndCompleteInitialPipelines();
configHelper.lockPipeline("mingle");
Material stubMaterial = new TestingMaterial();
mingleConfig.setMaterialConfigs(new MaterialConfigs(stubMaterial.config()));
assertThat(pipelineLockService.isLocked("mingle"), is(false));
MaterialRevisions revisions = new MaterialRevisions();
revisions.addRevision(stubMaterial, ((TestingMaterial) stubMaterial).modificationsSince(null, null, subprocessExecutionContext));
BuildCause buildCause = BuildCause.createWithModifications(revisions, "");
dbHelper.saveMaterials(buildCause.getMaterialRevisions());
Pipeline pipeline = instanceFactory.createPipelineInstance(mingleConfig, buildCause, new DefaultSchedulingContext(DEFAULT_APPROVED_BY), md5, new TimeProvider());
pipelineService.save(pipeline);
assertThat(pipelineLockService.isLocked("mingle"), is(true));
}
@Test
public void shouldScheduleJobForAllAgentsWhenToBeRunOnAllAgents() throws Exception {
configHelper.addEnvironments("dev");
Agent agentConfigWithUuid1 = new Agent("uuid1", "localhost", "127.0.0.1", "cookie1");
agentConfigWithUuid1.setEnvironments("dev");
agentService.saveOrUpdate(agentConfigWithUuid1);
agentService.saveOrUpdate(new Agent("uuid2", "localhost", "127.0.0.1", "cookie2"));
agentService.saveOrUpdate(new Agent("uuid3", "localhost", "127.0.0.1", "cookie3"));
configHelper.setRunOnAllAgents(CaseInsensitiveString.str(evolveConfig.name()), STAGE_NAME, "unit", true);
Material stubMaterial = new TestingMaterial();
evolveConfig.setMaterialConfigs(new MaterialConfigs(stubMaterial.config()));
MaterialRevisions revisions = new MaterialRevisions();
revisions.addRevision(stubMaterial, ((TestingMaterial) stubMaterial).modificationsSince(null, null, subprocessExecutionContext));
BuildCause buildCause = BuildCause.createWithModifications(revisions, "");
dbHelper.saveMaterials(buildCause.getMaterialRevisions());
Pipeline pipeline = instanceFactory.createPipelineInstance(evolveConfig, buildCause, new DefaultSchedulingContext(DEFAULT_APPROVED_BY, environmentConfigService.agentsForPipeline(evolveConfig.name())), md5,
new TimeProvider());
pipelineService.save(pipeline);
Stage instance = scheduleService.scheduleStage(pipeline, STAGE_NAME, "anyone", new ScheduleService.NewStageInstanceCreator(goConfigService),
new ScheduleService.ExceptioningErrorHandler());
JobInstances scheduledJobs = instance.getJobInstances();
assertThat(scheduledJobs.toArray(), hasItemInArray(hasProperty("name", is(RunOnAllAgents.CounterBasedJobNameGenerator.appendMarker("unit", 1)))));
assertThat(scheduledJobs.toArray(), hasItemInArray(hasProperty("agentUuid", is("uuid2"))));
assertThat(scheduledJobs.toArray(), hasItemInArray(hasProperty("name", is(RunOnAllAgents.CounterBasedJobNameGenerator.appendMarker("unit", 2)))));
assertThat(scheduledJobs.toArray(), hasItemInArray(hasProperty("agentUuid", is("uuid3"))));
assertThat(scheduledJobs.size(), is(2));
}
@Test
public void shouldScheduleMultipleJobsWhenToBeRunMultipleInstance() throws Exception {
configHelper.setRunMultipleInstance(CaseInsensitiveString.str(evolveConfig.name()), STAGE_NAME, "unit", 2);
Material stubMaterial = new TestingMaterial();
evolveConfig.setMaterialConfigs(new MaterialConfigs(stubMaterial.config()));
MaterialRevisions revisions = new MaterialRevisions();
revisions.addRevision(stubMaterial, ((TestingMaterial) stubMaterial).modificationsSince(null, null, subprocessExecutionContext));
BuildCause buildCause = BuildCause.createWithModifications(revisions, "");
dbHelper.saveMaterials(buildCause.getMaterialRevisions());
Pipeline pipeline = instanceFactory.createPipelineInstance(evolveConfig, buildCause, new DefaultSchedulingContext(DEFAULT_APPROVED_BY, environmentConfigService.agentsForPipeline(evolveConfig.name())), md5, new TimeProvider());
pipelineService.save(pipeline);
Stage instance = scheduleService.scheduleStage(pipeline, STAGE_NAME, "anyone", new ScheduleService.NewStageInstanceCreator(goConfigService), new ScheduleService.ExceptioningErrorHandler());
JobInstances scheduledJobs = instance.getJobInstances();
assertThat(scheduledJobs.size(), is(2));
assertThat(scheduledJobs.toArray(), hasItemInArray(hasProperty("name", is(RunMultipleInstance.CounterBasedJobNameGenerator.appendMarker("unit", 1)))));
assertThat(scheduledJobs.toArray(), hasItemInArray(hasProperty("name", is(RunMultipleInstance.CounterBasedJobNameGenerator.appendMarker("unit", 2)))));
}
@Test
public void shouldPassEnvironmentLevelEnvironmentVariablesToJobsForNewlyScheduledStage() throws Exception {
scheduleAndCompleteInitialPipelines();
Pipeline pipeline = pipelineDao.mostRecentPipeline("go");
Stage stage = scheduleService.scheduleStage(pipeline, "ft", "anonymous", new ScheduleService.NewStageInstanceCreator(goConfigService), new ScheduleService.ExceptioningErrorHandler());
EnvironmentVariables jobVariables = stage.getJobInstances().first().getPlan().getVariables();
assertThat(jobVariables.size(), is(3)); //pipeline, stage, job, env is applied while creating work
assertThat(jobVariables, hasItem(new EnvironmentVariable("PIPELINE_LVL", "pipeline value")));
assertThat(jobVariables, hasItem(new EnvironmentVariable("STAGE_LVL", "stage value")));
assertThat(jobVariables, hasItem(new EnvironmentVariable("JOB_LVL", "job value")));
}
@Test
public void shouldLockPipelineWhenSchedulingStage() throws Exception {
scheduleAndCompleteInitialPipelines();
Pipeline pipeline = pipelineDao.mostRecentPipeline("mingle");
configHelper.lockPipeline("mingle");
assertThat(pipelineLockService.isLocked("mingle"), is(false));
scheduleService.scheduleStage(pipeline, STAGE_NAME, "anonymous", new ScheduleService.NewStageInstanceCreator(goConfigService), new ScheduleService.ExceptioningErrorHandler());
assertThat(pipelineLockService.isLocked("mingle"), is(true));
}
@Test
public void shouldForceFirstStagePlan() throws Exception {
pipelineWithTwoStages = new PipelineWithTwoStages(materialRepository, transactionTemplate, temporaryFolder);
pipelineWithTwoStages.usingDbHelper(dbHelper).usingConfigHelper(configHelper).onSetUp();
pipelineWithTwoStages.createPipelineWithFirstStagePassedAndSecondStageRunning();
Pipeline pipeline = manualSchedule(pipelineWithTwoStages.pipelineName);
assertThat(pipeline.getFirstStage().stageState(), is(StageState.Building));
}
@Test
public void shouldForceFirstStagePlanWhenOtherStageIsRunning() throws Exception {
pipelineWithTwoStages = new PipelineWithTwoStages(materialRepository, transactionTemplate, temporaryFolder);
pipelineWithTwoStages.usingDbHelper(dbHelper).usingConfigHelper(configHelper).onSetUp();
pipelineWithTwoStages.createPipelineWithFirstStagePassedAndSecondStageRunning();
Pipeline pipeline = manualSchedule(pipelineWithTwoStages.pipelineName);
assertThat(pipeline.getFirstStage().isActive(), is(true));
}
@Test
public void shouldForceStagePlanWithModificationsSinceLast() throws Exception {
Pipeline completedMingle = scheduleAndCompleteInitialPipelines();
pipelineDao.loadPipeline(completedMingle.getId());
TestingMaterial testingMaterial = new TestingMaterial();
mingleConfig.setMaterialConfigs(new MaterialConfigs(testingMaterial.config()));
MaterialRevisions revisions = new MaterialRevisions();
revisions.addRevision(testingMaterial, testingMaterial.modificationsSince(null, null, subprocessExecutionContext));
BuildCause buildCause = BuildCause.createManualForced(revisions, Username.ANONYMOUS);
dbHelper.saveMaterials(buildCause.getMaterialRevisions());
Pipeline forcedPipeline = instanceFactory.createPipelineInstance(mingleConfig, buildCause, new DefaultSchedulingContext(
DEFAULT_APPROVED_BY), md5, new TimeProvider());
pipelineService.save(forcedPipeline);
verifyMingleScheduledWithModifications();
}
@Test
public void shouldNotScheduleAnyNewPipelineWhenErrorHappens() throws Exception {
String stageName = "invalidStageName";
PipelineConfig invalidPipeline = configHelper.addPipelineWithInvalidMaterial("invalidPipeline", stageName);
int beforeScheduling = pipelineDao.count(CaseInsensitiveString.str(invalidPipeline.name()));
autoSchedulePipelines();
int afterScheduling = pipelineDao.count(CaseInsensitiveString.str(invalidPipeline.name()));
assertThat(beforeScheduling, is(afterScheduling));
}
@Test
public void shouldNotScheduleActivePipeline() throws Exception {
Pipeline pipeline = PipelineMother.building(mingleConfig);
pipeline = dbHelper.savePipelineWithStagesAndMaterials(pipeline);
Pipeline newPipeline = manualSchedule(CaseInsensitiveString.str(mingleConfig.name()));
assertThat(newPipeline.getId(), is(pipeline.getId()));
}
@Test
public void shouldNotScheduleBuildIfNoModification() throws Exception {
autoSchedulePipelines("mingle", "evolve");
// Get the scheduled evolve stage and complete it.
Stage evolveInstance = stageDao.mostRecentWithBuilds(CaseInsensitiveString.str(evolveConfig.name()), evolveConfig.findBy(new CaseInsensitiveString("dev")));
dbHelper.passStage(evolveInstance);
stageDao.stageStatusChanged(evolveInstance);
autoSchedulePipelines();
Stage mostRecent = stageDao.mostRecentWithBuilds(CaseInsensitiveString.str(evolveConfig.name()), evolveConfig.findBy(new CaseInsensitiveString("dev")));
assertThat(mostRecent.getId(), is(evolveInstance.getId()));
assertThat(mostRecent.getJobInstances().first().getState(), is(JobState.Completed));
}
@Test
public void shouldSaveBuildStateCorrectly() throws Exception {
PipelineConfig cruisePlan = configHelper.addPipeline("cruise", "dev", repository);
goConfigService.forceNotifyListeners();
autoSchedulePipelines("mingle", "evolve", "cruise");
Stage cruise = stageDao.mostRecentWithBuilds(CaseInsensitiveString.str(cruisePlan.name()), cruisePlan.findBy(new CaseInsensitiveString("dev")));
JobInstance instance = cruise.getJobInstances().first();
assertThat(instance.getState(), is(JobState.Scheduled));
}
@Test
public void shouldRemoveBuildCauseIfPipelineNotExist() throws Exception {
configHelper.addPipeline("cruise", "dev", repository);
goConfigService.forceNotifyListeners();
scheduleHelper.autoSchedulePipelinesWithRealMaterials("mingle", "evolve", "cruise");
Assertions.assertWillHappen(2, PipelineScheduleQueueMatcher.numberOfScheduledPipelinesIsAtLeast(pipelineScheduleQueue), Timeout.FIVE_SECONDS);
int originalSize = pipelineScheduleQueue.toBeScheduled().size();
assertThat(originalSize, greaterThan(1));
configHelper.initializeConfigFile();
goConfigService.forceNotifyListeners();
scheduleService.autoSchedulePipelinesFromRequestBuffer();
assertThat(pipelineScheduleQueue.toBeScheduled().size(), is(0));
}
@Test
public void shouldRemoveBuildCauseIfAnyExceptionIsThrown() throws Exception {
configHelper.addPipeline("cruise", "dev", repository);
goConfigService.forceNotifyListeners();
goConfigService.getCurrentConfig().pipelineConfigByName(new CaseInsensitiveString("cruise")).get(0).jobConfigByConfigName(new CaseInsensitiveString("unit")).setRunOnAllAgents(true);
scheduleHelper.autoSchedulePipelinesWithRealMaterials("cruise");
goConfigService.forceNotifyListeners();
scheduleService.autoSchedulePipelinesFromRequestBuffer();
assertThat(pipelineScheduleQueue.toBeScheduled().size(), is(0));
}
@Test
public void shouldNotThrowErrorWhenMaterialsChange() throws Exception {
configHelper.addPipeline("cruise", "dev", repository);
goConfigService.forceNotifyListeners();
scheduleHelper.autoSchedulePipelinesWithRealMaterials("mingle", "evolve", "cruise");
configHelper.replaceMaterialForPipeline("cruise", svnMaterialConfig("http://new-material", null));
goConfigService.forceNotifyListeners();
try {
scheduleService.autoSchedulePipelinesFromRequestBuffer();
} catch (Exception e) {
fail("#2520 - should not cause an error if materials have changed");
}
}
@Test
public void shouldConsumeAllBuildCausesInServerHealth() throws Exception {
pipelineScheduleQueue.schedule(new CaseInsensitiveString("mingle"), BuildCause.createManualForced(modifyOneFile(mingleConfig), Username.ANONYMOUS));
pipelineScheduleQueue.schedule(new CaseInsensitiveString("evolve"), BuildCause.createManualForced(modifyOneFile(evolveConfig), Username.ANONYMOUS));
scheduleService.autoSchedulePipelinesFromRequestBuffer();
assertThat(pipelineScheduleQueue.toBeScheduled().size(), is(0));
}
private void autoSchedulePipelines(String... pipelineNames) throws Exception {
scheduleHelper.autoSchedulePipelinesWithRealMaterials(pipelineNames);
scheduleService.autoSchedulePipelinesFromRequestBuffer();
}
private Pipeline manualSchedule(String pipelineName) throws Exception, StageAlreadyBuildingException {
scheduleHelper.manuallySchedulePipelineWithRealMaterials(pipelineName, new Username(new CaseInsensitiveString("some user name")));
scheduleService.autoSchedulePipelinesFromRequestBuffer();
return pipelineService.mostRecentFullPipelineByName(pipelineName);
}
private void assertPipelinesScheduled() {
Pipeline minglePipeline = pipelineDao.mostRecentPipeline(CaseInsensitiveString.str(mingleConfig.name()));
Stage mingleStage = minglePipeline.getFirstStage();
assertThat(mingleStage.getName(), is(STAGE_NAME));
assertThat(mingleStage.getJobInstances().size(), is(2));
JobInstance mingleJob = mingleStage.getJobInstances().first();
assertThat(mingleJob.getState(), is(JobState.Scheduled));
assertPipelineScheduled(evolveConfig);
assertPipelineScheduled(goConfig);
}
private void assertPipelineScheduled(PipelineConfig config) {
Stage evolveStage = stageDao.mostRecentWithBuilds(CaseInsensitiveString.str(config.name()), config.findBy(new CaseInsensitiveString("dev")));
assertThat(evolveStage.getName(), is("dev"));
assertThat(evolveStage.getJobInstances().size(), is(1));
assertThat(evolveStage.getJobInstances().first().getState(), is(JobState.Scheduled));
}
private void verifyMingleScheduledWithModifications() {
Pipeline scheduledPipeline = pipelineDao.mostRecentPipeline(CaseInsensitiveString.str(mingleConfig.name()));
BuildCause buildCause = scheduledPipeline.getBuildCause();
assertThat(buildCause.getMaterialRevisions().totalNumberOfModifications(), is(3));
JobInstance instance = scheduledPipeline.getFirstStage().getJobInstances().first();
assertThat(instance.getState(), is(JobState.Scheduled));
}
private Pipeline scheduleAndCompleteInitialPipelines() throws Exception {
autoSchedulePipelines("mingle", "evolve", "go");
assertPipelinesScheduled();
passFirstStage(goConfig);
return passFirstStage(mingleConfig);
}
private Pipeline passFirstStage(PipelineConfig pipelineConfig) {
Stage completedMingleStage = stageDao.mostRecentWithBuilds(CaseInsensitiveString.str(pipelineConfig.name()), pipelineConfig.findBy(new CaseInsensitiveString("dev")));
dbHelper.passStage(completedMingleStage);
dbHelper.passStage(completedMingleStage);
assertThat(completedMingleStage.getJobInstances().first().getState(), is(JobState.Completed));
Pipeline pipeline = pipelineDao.mostRecentPipeline(CaseInsensitiveString.str(pipelineConfig.name()));
return dbHelper.passPipeline(pipeline);
}
}
| apache-2.0 |
rajath26/cassandra-trunk | tools/stress/src/org/apache/cassandra/stress/generatedata/RowGenDistributedSize.java | 3711 | package org.apache.cassandra.stress.generatedata;
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
public class RowGenDistributedSize extends RowGen
{
// TODO - make configurable
static final int MAX_SINGLE_CACHE_SIZE = 16 * 1024;
final Distribution countDistribution;
final Distribution sizeDistribution;
final TreeMap<Integer, ByteBuffer> cache = new TreeMap<>();
// array re-used for returning columns
final ByteBuffer[] ret;
final int[] sizes;
final boolean isDeterministic;
public RowGenDistributedSize(DataGen dataGenerator, Distribution countDistribution, Distribution sizeDistribution)
{
super(dataGenerator);
this.countDistribution = countDistribution;
this.sizeDistribution = sizeDistribution;
ret = new ByteBuffer[(int) countDistribution.maxValue()];
sizes = new int[ret.length];
// TODO: should keep it deterministic in event that count distribution is not, but size and dataGen are, so that
// we simply need to generate the correct selection of columns
this.isDeterministic = dataGen.isDeterministic() && countDistribution.maxValue() == countDistribution.minValue()
&& sizeDistribution.minValue() == sizeDistribution.maxValue();
}
ByteBuffer getBuffer(int size)
{
if (size >= MAX_SINGLE_CACHE_SIZE)
return ByteBuffer.allocate(size);
Map.Entry<Integer, ByteBuffer> found = cache.ceilingEntry(size);
if (found == null)
{
// remove the next entry down, and replace it with a cache of this size
Integer del = cache.lowerKey(size);
if (del != null)
cache.remove(del);
return ByteBuffer.allocate(size);
}
ByteBuffer r = found.getValue();
cache.remove(found.getKey());
return r;
}
@Override
List<ByteBuffer> getColumns(long operationIndex)
{
int i = 0;
int count = (int) countDistribution.next();
while (i < count)
{
int columnSize = (int) sizeDistribution.next();
sizes[i] = columnSize;
ret[i] = getBuffer(columnSize);
i++;
}
while (i < ret.length && ret[i] != null)
ret[i] = null;
i = 0;
while (i < count)
{
ByteBuffer b = ret[i];
cache.put(b.capacity(), b);
b.position(b.capacity() - sizes[i]);
ret[i] = b.slice();
b.position(0);
i++;
}
return Arrays.asList(ret).subList(0, count);
}
public int count(long operationIndex)
{
return (int) countDistribution.next();
}
@Override
public boolean isDeterministic()
{
return isDeterministic;
}
}
| apache-2.0 |
lanwen/rest-assured | rest-assured/src/main/java/com/jayway/restassured/builder/ResponseBuilder.java | 6291 | /*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jayway.restassured.builder;
import com.jayway.restassured.internal.RestAssuredResponseImpl;
import com.jayway.restassured.response.Cookies;
import com.jayway.restassured.response.Headers;
import com.jayway.restassured.response.Response;
import java.io.InputStream;
import static com.jayway.restassured.internal.assertion.AssertParameter.notNull;
import static java.lang.String.format;
/**
* A builder to make it easier to create new {@link Response} implementations. This is useful if you're working with {@link com.jayway.restassured.filter.Filter}s and want to
* change the response you get from the sever somehow.
*/
public class ResponseBuilder {
private RestAssuredResponseImpl restAssuredResponse = new RestAssuredResponseImpl();
/**
* Clone an already existing response.
*
* @return Builder.
*/
public ResponseBuilder clone(Response response) {
if(isRestAssuredResponse(response)) {
final RestAssuredResponseImpl raResponse = raResponse(response);
restAssuredResponse.setContent(raResponse.getContent());
restAssuredResponse.setHasExpectations(raResponse.getHasExpectations());
restAssuredResponse.setDefaultContentType(raResponse.getDefaultContentType());
restAssuredResponse.setDefaultCharset(raResponse.getDefaultCharset());
restAssuredResponse.setSessionIdName(raResponse.getSessionIdName());
restAssuredResponse.setConnectionManager(raResponse.getConnectionManager());
restAssuredResponse.setConfig(raResponse.getConfig());
restAssuredResponse.setRpr(raResponse.getRpr());
restAssuredResponse.setLogRepository(raResponse.getLogRepository());
} else {
restAssuredResponse.setContent(response.asInputStream());
}
restAssuredResponse.setContentType(response.getContentType());
restAssuredResponse.setCookies(response.getDetailedCookies());
restAssuredResponse.setResponseHeaders(response.getHeaders());
restAssuredResponse.setStatusCode(response.getStatusCode());
restAssuredResponse.setStatusLine(response.getStatusLine());
return this;
}
/**
* Set the response body to a String
*
* @return Builder.
*/
public ResponseBuilder setBody(String stringBody) {
notNull(stringBody, "Response body");
restAssuredResponse.setContent(stringBody);
return this;
}
/**
* Set the response body to an inputstream
*
* @return Builder.
*/
public ResponseBuilder setBody(InputStream inputStream) {
notNull(inputStream, "Response body");
restAssuredResponse.setContent(inputStream);
return this;
}
/**
* Set the response body to an array of bytes
*
* @return Builder.
*/
public ResponseBuilder setBody(byte[] bytes) {
notNull(bytes, "Response body");
restAssuredResponse.setContent(bytes);
return this;
}
/**
* Set response headers, e.g:
* <pre>
* Header first = new Header("headerName1", "headerValue1");
* Header second = new Header("headerName2", "headerValue2");
* Headers headers = new Header(first, second);
* </pre>
*
* @see Headers
*
* @return The builder
*/
public ResponseBuilder setHeaders(Headers headers) {
notNull(headers, "Headers");
restAssuredResponse.setResponseHeaders(headers);
return this;
}
/**
* Set some cookies that will be available in the response. To create cookies you can do:
* <pre>
* Cookie cookie1 = Cookie.Builder("username", "John").setComment("comment 1").build();
* Cookie cookie2 = Cookie.Builder("token", 1234).setComment("comment 2").build();
* Cookies cookies = new Cookies(cookie1, cookie2);
* </pre>
*
* @return The Builder
*/
public ResponseBuilder setCookies(Cookies cookies) {
notNull(cookies, "Cookies");
restAssuredResponse.setCookies(cookies);
return this;
}
/**
* Set the content type of the response
*
* @return The builder
*/
public ResponseBuilder setContentType(String contentType) {
notNull(contentType, "Content type");
restAssuredResponse.setContentType(contentType);
return this;
}
/**
* Set the status line of the response.
*
* @return The builder
*/
public ResponseBuilder setStatusLine(String statusLine) {
notNull(statusLine, "Status line");
restAssuredResponse.setStatusLine(statusLine);
return this;
}
/**
* Set the status code of the response.
*
* @return The builder
*/
public ResponseBuilder setStatusCode(int statusCode) {
restAssuredResponse.setStatusCode(statusCode);
return this;
}
/**
* Build the actual response
*
* @return The response object
*/
public Response build() {
final int statusCode = restAssuredResponse.statusCode();
if(statusCode < 100 || statusCode >= 600) {
throw new IllegalArgumentException(format("Status code must be greater than 100 and less than 600, was %d.", statusCode));
}
notNull("Status line", restAssuredResponse.statusLine());
return restAssuredResponse;
}
private boolean isRestAssuredResponse(Response response) {
return response instanceof RestAssuredResponseImpl;
}
private RestAssuredResponseImpl raResponse(Response response) {
return (RestAssuredResponseImpl) response;
}
}
| apache-2.0 |
vvv1559/intellij-community | platform/platform-tests/testSrc/com/intellij/util/AlarmTest.java | 6756 | /*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.util;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.application.impl.LaterInvocator;
import com.intellij.testFramework.PlatformTestCase;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
public class AlarmTest extends PlatformTestCase {
public void testTwoAddsWithZeroDelayMustExecuteSequentially() throws Exception {
Alarm alarm = new Alarm(getTestRootDisposable());
assertRequestsExecuteSequentially(alarm);
}
public void testAlarmRequestsShouldExecuteSequentiallyEvenInPooledThread() throws Exception {
Alarm alarm = new Alarm(Alarm.ThreadToUse.POOLED_THREAD, getTestRootDisposable());
assertRequestsExecuteSequentially(alarm);
}
public void testAlarmRequestsShouldExecuteSequentiallyEveryWhere() throws Exception {
Alarm alarm = new Alarm(Alarm.ThreadToUse.OWN_THREAD, getTestRootDisposable());
assertRequestsExecuteSequentially(alarm);
}
public void testAlarmRequestsShouldExecuteSequentiallyAbsolutelyEveryWhere() throws Exception {
Alarm alarm = new Alarm(Alarm.ThreadToUse.SHARED_THREAD, getTestRootDisposable());
assertRequestsExecuteSequentially(alarm);
}
private static void assertRequestsExecuteSequentially(@NotNull Alarm alarm) throws InterruptedException, ExecutionException {
int N = 10000;
StringBuffer log = new StringBuffer(N*4);
StringBuilder expected = new StringBuilder(N * 4);
for (int i = 0; i < N; i++) {
final int finalI = i;
alarm.addRequest(() -> log.append(finalI).append(" "), 0);
}
for (int i = 0; i < N; i++) {
expected.append(i).append(" ");
}
Future<?> future = ApplicationManager.getApplication().executeOnPooledThread(() -> {
try {
alarm.waitForAllExecuted(100, TimeUnit.SECONDS);
}
catch (Exception e) {
throw new RuntimeException(e);
}
});
while (!future.isDone()) {
UIUtil.dispatchAllInvocationEvents();
}
future.get();
assertEquals(0, alarm.getActiveRequestCount());
assertEquals(expected.toString(), log.toString());
}
public void testOneAlarmDoesNotStartTooManyThreads() {
Alarm alarm = new Alarm(getTestRootDisposable());
Map<Thread, StackTraceElement[]> before = Thread.getAllStackTraces();
AtomicInteger executed = new AtomicInteger();
int N = 100000;
for (int i = 0; i < N; i++) {
alarm.addRequest(executed::incrementAndGet, 10);
}
while (executed.get() != N) {
UIUtil.dispatchAllInvocationEvents();
}
Map<Thread, StackTraceElement[]> after = Thread.getAllStackTraces();
Map<Thread, List<StackTraceElement>> diff = new HashMap<>();
after.forEach((key, value) -> diff.put(key, Arrays.asList(value)));
before.keySet().forEach(diff::remove);
if (!(after.size() - before.size() < 10)) {
fail("before: "+before.size()+"; after: "+after.size()+"Diff:\n"+diff);
}
}
public void testManyAlarmsDoNotStartTooManyThreads() {
Map<Thread, StackTraceElement[]> before = Thread.getAllStackTraces();
AtomicInteger executed = new AtomicInteger();
int N = 100000;
List<Alarm> alarms = Collections.nCopies(N, "").stream().map(__ -> new Alarm(getTestRootDisposable())).collect(Collectors.toList());
alarms.forEach(alarm -> alarm.addRequest(executed::incrementAndGet, 10));
while (executed.get() != N) {
UIUtil.dispatchAllInvocationEvents();
}
Map<Thread, StackTraceElement[]> after = Thread.getAllStackTraces();
LOG.debug("before: "+before.size()+"; after: "+after.size());
assertTrue(after.size() - before.size() < 10);
}
public void testOrderIsPreservedAfterModalitySwitching() {
Alarm alarm = new Alarm();
StringBuilder sb = new StringBuilder();
Object modal = new Object();
LaterInvocator.enterModal(modal);
try {
ApplicationManager.getApplication().invokeLater(() -> TimeoutUtil.sleep(10), ModalityState.NON_MODAL);
alarm.addRequest(() -> sb.append("1"), 0, ModalityState.NON_MODAL);
alarm.addRequest(() -> sb.append("2"), 5, ModalityState.NON_MODAL);
UIUtil.dispatchAllInvocationEvents();
assertEquals("", sb.toString());
}
finally {
LaterInvocator.leaveModal(modal);
}
while (!alarm.isEmpty()) {
UIUtil.dispatchAllInvocationEvents();
}
assertEquals("12", sb.toString());
}
public void testFlushImmediately() {
Alarm alarm = new Alarm();
StringBuilder sb = new StringBuilder();
alarm.addRequest(() -> sb.append("1"), 0, ModalityState.NON_MODAL);
alarm.addRequest(() -> sb.append("2"), 5, ModalityState.NON_MODAL);
assertEquals("", sb.toString());
alarm.flush();
assertEquals("12", sb.toString());
}
public void testWaitForAllExecutedMustWaitUntilExecutionFinish() throws Exception {
Alarm alarm = new Alarm(Alarm.ThreadToUse.POOLED_THREAD, getTestRootDisposable());
StringBuffer sb = new StringBuffer();
long start = System.currentTimeMillis();
int delay = 100;
alarm.addRequest(() -> {
TimeoutUtil.sleep(1000);
sb.append("1");
}, delay);
alarm.addRequest(() -> {
TimeoutUtil.sleep(1000);
sb.append("2");
}, delay*2);
String s = sb.toString();
long elapsed = System.currentTimeMillis() - start;
if (elapsed > delay/2) {
System.err.println("No no no no this agent is so overloaded I quit");
return;
}
assertEquals(2, alarm.getActiveRequestCount());
assertEquals("", s);
try {
// started to execute but not finished yet
alarm.waitForAllExecuted(1000, TimeUnit.MILLISECONDS);
fail();
}
catch (TimeoutException ignored) {
}
alarm.waitForAllExecuted(3000, TimeUnit.MILLISECONDS);
assertEquals(2, sb.length());
}
} | apache-2.0 |
jmvachon/bridgepoint | src/org.xtuml.bp.qa.odometer/src/org/xtuml/qa/odometer/tracker/OdoWindowListener.java | 1665 | package org.xtuml.qa.odometer.tracker;
import org.eclipse.ui.IPartService;
import org.eclipse.ui.ISelectionService;
import org.eclipse.ui.IWindowListener;
import org.eclipse.ui.IWorkbenchWindow;
public class OdoWindowListener implements IWindowListener {
@Override
public void windowActivated(IWorkbenchWindow window) {
OdoWindowListener.setUp(window);
}
@Override
public void windowClosed(IWorkbenchWindow window) {
OdoWindowListener.tearDown(window);
}
@Override
public void windowDeactivated(IWorkbenchWindow window) {
OdoWindowListener.tearDown(window);
}
@Override
public void windowOpened(IWorkbenchWindow window) {
OdoWindowListener.setUp(window);
}
public static void setUp (IWorkbenchWindow window) {
IPartService partService = (IPartService)window.getService(IPartService.class);
partService.addPartListener(OdoPartListener.getInstance());
ISelectionService selService = (ISelectionService)window.getService(ISelectionService.class);
selService.addSelectionListener(OdoSelectionChangedListener.getInstance());
}
public static void tearDown(IWorkbenchWindow window) {
IPartService partService = (IPartService)window.getService(IPartService.class);
partService.removePartListener(OdoPartListener.getInstance());
ISelectionService selService = (ISelectionService)window.getService(ISelectionService.class);
selService.removeSelectionListener(OdoSelectionChangedListener.getInstance());
}
static OdoWindowListener self = null;
public static IWindowListener getInstance() {
if (self == null) {
self = new OdoWindowListener();
}
return self;
}
}
| apache-2.0 |
F0REacH/pac4j-1.5.1 | pac4j-oauth/src/main/java/org/pac4j/oauth/profile/facebook/FacebookConverters.java | 2967 | /*
Copyright 2012 - 2014 Jerome Leleu
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.pac4j.oauth.profile.facebook;
import org.pac4j.core.profile.converter.DateConverter;
import org.pac4j.core.profile.converter.FormattedDateConverter;
import org.pac4j.oauth.profile.converter.JsonListConverter;
import org.pac4j.oauth.profile.converter.JsonObjectConverter;
import org.pac4j.oauth.profile.facebook.converter.FacebookRelationshipStatusConverter;
/**
* This class defines all the converters specific to Facebook.
*
* @author Jerome Leleu
* @since 1.1.0
*/
public final class FacebookConverters {
public final static FormattedDateConverter birthdayConverter = new FormattedDateConverter("MM/dd/yyyy");
public final static FacebookRelationshipStatusConverter relationshipStatusConverter = new FacebookRelationshipStatusConverter();
public final static JsonListConverter listObjectConverter = new JsonListConverter(FacebookObject.class);
public final static JsonListConverter listEducationConverter = new JsonListConverter(FacebookEducation.class);
public final static JsonObjectConverter objectConverter = new JsonObjectConverter(FacebookObject.class);
public final static JsonListConverter listWorkConverter = new JsonListConverter(FacebookWork.class);
public final static DateConverter workDateConverter = new DateConverter("yyyy-MM");
public final static JsonListConverter listInfoConverter = new JsonListConverter(FacebookInfo.class);
public final static JsonListConverter listPhotoConverter = new JsonListConverter(FacebookPhoto.class);
public final static JsonListConverter listEventConverter = new JsonListConverter(FacebookEvent.class);
public final static JsonListConverter listGroupConverter = new JsonListConverter(FacebookGroup.class);
public final static DateConverter eventDateConverter = new DateConverter("yyyy-MM-dd'T'HH:mm:ss");
public final static JsonObjectConverter applicationConverter = new JsonObjectConverter(FacebookApplication.class);
public final static JsonObjectConverter musicDataConverter = new JsonObjectConverter(FacebookMusicData.class);
public final static JsonListConverter listMusicListensConverter = new JsonListConverter(FacebookMusicListen.class);
public final static JsonObjectConverter pictureConverter = new JsonObjectConverter(FacebookPicture.class);
}
| apache-2.0 |
goodwinnk/intellij-community | platform/util/src/com/intellij/openapi/util/io/FileSystemUtil.java | 24786 | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.util.io;
import com.intellij.Patches;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.io.win32.FileInfo;
import com.intellij.openapi.util.io.win32.IdeaWin32;
import com.intellij.util.ArrayUtil;
import com.intellij.util.SystemProperties;
import com.intellij.util.containers.ContainerUtil;
import com.sun.jna.*;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Array;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Collection;
import java.util.Collections;
import java.util.Locale;
import java.util.Map;
import static com.intellij.util.BitUtil.isSet;
/**
* @version 11.1
*/
public class FileSystemUtil {
static final String FORCE_USE_NIO2_KEY = "idea.io.use.nio2";
static final String FORCE_USE_FALLBACK_KEY = "idea.io.use.fallback";
static final String COARSE_TIMESTAMP_KEY = "idea.io.coarse.ts";
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.util.io.FileSystemUtil");
private abstract static class Mediator {
@Nullable
protected abstract FileAttributes getAttributes(@NotNull String path) throws Exception;
@Nullable
protected abstract String resolveSymLink(@NotNull String path) throws Exception;
protected boolean clonePermissions(@NotNull String source, @NotNull String target, boolean onlyPermissionsToExecute) throws Exception { return false; }
@NotNull
private String getName() { return getClass().getSimpleName().replace("MediatorImpl", ""); }
}
@NotNull
private static Mediator ourMediator = getMediator();
private static Mediator getMediator() {
boolean forceNio2 = SystemProperties.getBooleanProperty(FORCE_USE_NIO2_KEY, false);
boolean forceFallback = SystemProperties.getBooleanProperty(FORCE_USE_FALLBACK_KEY, false);
Throwable error = null;
if (!forceNio2 && !forceFallback) {
if (SystemInfo.isWindows && IdeaWin32.isAvailable()) {
try {
return check(new IdeaWin32MediatorImpl());
}
catch (Throwable t) {
error = t;
}
}
else if (SystemInfo.isLinux || SystemInfo.isMac || SystemInfo.isSolaris || SystemInfo.isFreeBSD) {
try {
return check(new JnaUnixMediatorImpl());
}
catch (Throwable t) {
error = t;
}
}
}
if (!forceFallback && SystemInfo.isJavaVersionAtLeast(7, 0, 0) && !"1.7.0-ea".equals(SystemInfo.JAVA_VERSION)) {
try {
return check(new Nio2MediatorImpl());
}
catch (Throwable t) {
error = t;
}
}
if (!forceFallback) {
LOG.warn("Failed to load filesystem access layer: " + SystemInfo.OS_NAME + ", " + SystemInfo.JAVA_VERSION + ", " + "nio2=" + forceNio2, error);
}
return new FallbackMediatorImpl();
}
private static Mediator check(final Mediator mediator) throws Exception {
final String quickTestPath = SystemInfo.isWindows ? "C:\\" : "/";
mediator.getAttributes(quickTestPath);
return mediator;
}
private FileSystemUtil() { }
@Nullable
public static FileAttributes getAttributes(@NotNull String path) {
try {
if (LOG.isTraceEnabled()) {
LOG.trace("getAttributes(" + path + ")");
long t = System.nanoTime();
FileAttributes result = ourMediator.getAttributes(path);
t = (System.nanoTime() - t) / 1000;
LOG.trace(" " + t + " mks");
return result;
}
else {
return ourMediator.getAttributes(path);
}
}
catch (Exception e) {
LOG.warn(e);
}
return null;
}
@Nullable
public static FileAttributes getAttributes(@NotNull File file) {
return getAttributes(file.getPath());
}
public static long lastModified(@NotNull File file) {
FileAttributes attributes = getAttributes(file);
return attributes != null ? attributes.lastModified : 0;
}
/**
* Checks if a last element in the path is a symlink.
*/
public static boolean isSymLink(@NotNull String path) {
if (SystemInfo.areSymLinksSupported) {
FileAttributes attributes = getAttributes(path);
return attributes != null && attributes.isSymLink();
}
return false;
}
/**
* Checks if a last element in the path is a symlink.
*/
public static boolean isSymLink(@NotNull File file) {
return isSymLink(file.getAbsolutePath());
}
@Nullable
public static String resolveSymLink(@NotNull String path) {
try {
String realPath;
if (LOG.isTraceEnabled()) {
LOG.trace("resolveSymLink(" + path + ")");
long t = System.nanoTime();
realPath = ourMediator.resolveSymLink(path);
t = (System.nanoTime() - t) / 1000;
LOG.trace(" " + t + " mks");
}
else {
realPath = ourMediator.resolveSymLink(path);
}
if (realPath != null && (SystemInfo.isWindows && realPath.startsWith("\\\\") || new File(realPath).exists())) {
return realPath;
}
}
catch (Exception e) {
LOG.warn(e);
}
return null;
}
@Nullable
public static String resolveSymLink(@NotNull File file) {
return resolveSymLink(file.getAbsolutePath());
}
/**
* Gives the second file permissions of the first one if possible; returns true if succeed.
* Will do nothing on Windows.
*/
public static boolean clonePermissions(@NotNull String source, @NotNull String target) {
try {
return ourMediator.clonePermissions(source, target, false);
}
catch (Exception e) {
LOG.warn(e);
return false;
}
}
/**
* Gives the second file permissions to execute of the first one if possible; returns true if succeed.
* Will do nothing on Windows.
*/
public static boolean clonePermissionsToExecute(@NotNull String source, @NotNull String target) {
try {
return ourMediator.clonePermissions(source, target, true);
}
catch (Exception e) {
LOG.warn(e);
return false;
}
}
private static class Nio2MediatorImpl extends Mediator {
private final Method myGetPath;
private final Object myLinkOptions;
private final Object myNoFollowLinkOptions;
private final Method myReadAttributes;
private final Method mySetAttribute;
private final Method myToRealPath;
private final Method myToMillis;
private final Class<?> mySchema;
private final Method myIsSymbolicLink;
private final Method myIsDirectory;
private final Method myIsOther;
private final Method mySize;
private final Method myLastModifiedTime;
private final Method myIsHidden;
private final Method myIsReadOnly;
private final Method myPermissions;
private Nio2MediatorImpl() throws Exception {
assert Patches.USE_REFLECTION_TO_ACCESS_JDK7;
myGetPath = accessible(Class.forName("java.nio.file.Paths").getMethod("get", String.class, String[].class));
Class<?> pathClass = Class.forName("java.nio.file.Path");
Class<?> filesClass = Class.forName("java.nio.file.Files");
Class<?> linkOptClass = Class.forName("java.nio.file.LinkOption");
myLinkOptions = Array.newInstance(linkOptClass, 0);
myNoFollowLinkOptions = Array.newInstance(linkOptClass, 1);
Array.set(myNoFollowLinkOptions, 0, linkOptClass.getField("NOFOLLOW_LINKS").get(null));
Class<?> linkOptArrayClass = myLinkOptions.getClass();
myReadAttributes = accessible(filesClass.getMethod("readAttributes", pathClass, Class.class, linkOptArrayClass));
mySetAttribute = accessible(filesClass.getMethod("setAttribute", pathClass, String.class, Object.class, linkOptArrayClass));
myToRealPath = accessible(pathClass.getMethod("toRealPath", linkOptArrayClass));
myToMillis = accessible(Class.forName("java.nio.file.attribute.FileTime").getMethod("toMillis"));
mySchema = Class.forName("java.nio.file.attribute." + (SystemInfo.isWindows ? "DosFileAttributes" : "PosixFileAttributes"));
myIsSymbolicLink = accessible(mySchema.getMethod("isSymbolicLink"));
myIsDirectory = accessible(mySchema.getMethod("isDirectory"));
myIsOther = accessible(mySchema.getMethod("isOther"));
mySize = accessible(mySchema.getMethod("size"));
myLastModifiedTime = accessible(mySchema.getMethod("lastModifiedTime"));
if (SystemInfo.isWindows) {
myIsHidden = accessible(mySchema.getMethod("isHidden"));
myIsReadOnly = accessible(mySchema.getMethod("isReadOnly"));
myPermissions = null;
}
else {
myIsHidden = myIsReadOnly = null;
myPermissions = accessible(mySchema.getMethod("permissions"));
}
}
private static Method accessible(Method method) {
method.setAccessible(true);
return method;
}
@Override
protected FileAttributes getAttributes(@NotNull String path) throws Exception {
try {
Object pathObj = myGetPath.invoke(null, path, ArrayUtil.EMPTY_STRING_ARRAY);
Object attributes = myReadAttributes.invoke(null, pathObj, mySchema, myNoFollowLinkOptions);
boolean isSymbolicLink = (Boolean)myIsSymbolicLink.invoke(attributes) ||
SystemInfo.isWindows && (Boolean)myIsOther.invoke(attributes) && (Boolean)myIsDirectory.invoke(attributes);
if (isSymbolicLink) {
try {
attributes = myReadAttributes.invoke(null, pathObj, mySchema, myLinkOptions);
}
catch (InvocationTargetException e) {
Throwable cause = e.getCause();
if (cause != null && "java.nio.file.NoSuchFileException".equals(cause.getClass().getName())) {
return FileAttributes.BROKEN_SYMLINK;
}
}
}
boolean isDirectory = (Boolean)myIsDirectory.invoke(attributes);
boolean isOther = (Boolean)myIsOther.invoke(attributes);
long size = (Long)mySize.invoke(attributes);
long lastModified = (Long)myToMillis.invoke(myLastModifiedTime.invoke(attributes));
if (SystemInfo.isWindows) {
boolean isHidden = new File(path).getParent() == null ? false : (Boolean)myIsHidden.invoke(attributes);
boolean isWritable = isDirectory || !(Boolean)myIsReadOnly.invoke(attributes);
return new FileAttributes(isDirectory, isOther, isSymbolicLink, isHidden, size, lastModified, isWritable);
}
else {
boolean isWritable = new File(path).canWrite();
return new FileAttributes(isDirectory, isOther, isSymbolicLink, false, size, lastModified, isWritable);
}
}
catch (InvocationTargetException e) {
Throwable cause = e.getCause();
if (cause instanceof IOException || cause != null && "java.nio.file.InvalidPathException".equals(cause.getClass().getName())) {
LOG.debug(cause);
return null;
}
throw e;
}
}
@Override
protected String resolveSymLink(@NotNull String path) throws Exception {
Object pathObj = myGetPath.invoke(null, path, ArrayUtil.EMPTY_STRING_ARRAY);
try {
return myToRealPath.invoke(pathObj, myLinkOptions).toString();
}
catch (InvocationTargetException e) {
Throwable cause = e.getCause();
if (cause != null && "java.nio.file.NoSuchFileException".equals(cause.getClass().getName())) return null;
throw e;
}
}
@Override
protected boolean clonePermissions(@NotNull String source, @NotNull String target, boolean onlyPermissionsToExecute) throws Exception {
if (SystemInfo.isUnix) {
Object sourcePath = myGetPath.invoke(null, source, ArrayUtil.EMPTY_STRING_ARRAY);
Object targetPath = myGetPath.invoke(null, target, ArrayUtil.EMPTY_STRING_ARRAY);
Collection sourcePermissions = getPermissions(sourcePath);
Collection targetPermissions = getPermissions(targetPath);
if (sourcePermissions != null && targetPermissions != null) {
if (onlyPermissionsToExecute) {
Collection<Object> permissionsToSet = ContainerUtil.newHashSet();
for (Object permission : targetPermissions) {
if (!permission.toString().endsWith("_EXECUTE")) {
permissionsToSet.add(permission);
}
}
for (Object permission : sourcePermissions) {
if (permission.toString().endsWith("_EXECUTE")) {
permissionsToSet.add(permission);
}
}
mySetAttribute.invoke(null, targetPath, "posix:permissions", permissionsToSet, myLinkOptions);
}
else {
mySetAttribute.invoke(null, targetPath, "posix:permissions", sourcePermissions, myLinkOptions);
}
return true;
}
}
return false;
}
private Collection getPermissions(Object sourcePath) throws IllegalAccessException, InvocationTargetException {
Object attributes = myReadAttributes.invoke(null, sourcePath, mySchema, myLinkOptions);
return attributes != null ? (Collection)myPermissions.invoke(attributes) : null;
}
}
private static class IdeaWin32MediatorImpl extends Mediator {
private final IdeaWin32 myInstance = IdeaWin32.getInstance();
@Override
protected FileAttributes getAttributes(@NotNull String path) {
FileInfo fileInfo = myInstance.getInfo(path);
return fileInfo != null ? fileInfo.toFileAttributes() : null;
}
@Override
protected String resolveSymLink(@NotNull String path) {
path = new File(path).getAbsolutePath();
char drive = Character.toUpperCase(path.charAt(0));
if (!(path.length() > 3 && drive >= 'A' && drive <= 'Z' && path.charAt(1) == ':' && path.charAt(2) == '\\')) {
return path; // unknown format
}
int remainder = 4;
while (remainder < path.length()) {
int next = path.indexOf('\\', remainder);
String subPath = next > 0 ? path.substring(0, next) : path;
FileAttributes attributes = getAttributes(subPath);
if (attributes == null) {
return null;
}
if (attributes.isSymLink()) {
return myInstance.resolveSymLink(path);
}
remainder = next > 0 ? next + 1 : path.length();
}
return path;
}
}
// thanks to SVNKit for the idea of platform-specific offsets
private static class JnaUnixMediatorImpl extends Mediator {
@SuppressWarnings({"OctalInteger", "SpellCheckingInspection"})
private static class LibC {
static final int S_MASK = 0177777;
static final int S_IFMT = 0170000;
static final int S_IFLNK = 0120000; // symbolic link
static final int S_IFREG = 0100000; // regular file
static final int S_IFDIR = 0040000; // directory
static final int PERM_MASK = 0777;
static final int EXECUTE_MASK = 0111;
static final int WRITE_MASK = 0222;
static final int W_OK = 2; // write permission flag for access(2)
static native int getuid();
static native int getgid();
static native int chmod(String path, int mode);
static native int access(String path, int mode);
}
@SuppressWarnings("SpellCheckingInspection")
private static class UnixLibC {
static native int lstat(String path, Pointer stat);
static native int stat(String path, Pointer stat);
}
@SuppressWarnings("SpellCheckingInspection")
private static class LinuxLibC {
static native int __lxstat64(int ver, String path, Pointer stat);
static native int __xstat64(int ver, String path, Pointer stat);
}
private static final int[] LINUX_32 = {16, 44, 72, 24, 28};
private static final int[] LINUX_64 = {24, 48, 88, 28, 32};
private static final int[] LNX_PPC32 = {16, 48, 80, 24, 28};
private static final int[] LNX_PPC64 = LINUX_64;
private static final int[] LNX_ARM32 = LNX_PPC32;
private static final int[] BSD_32 = { 8, 48, 32, 12, 16};
private static final int[] BSD_64 = { 8, 72, 40, 12, 16};
private static final int[] BSD_32_12 = {24, 96, 64, 28, 32};
private static final int[] BSD_64_12 = {24,112, 64, 28, 32};
private static final int[] SUN_OS_32 = {20, 48, 64, 28, 32};
private static final int[] SUN_OS_64 = {16, 40, 64, 24, 28};
private static final int STAT_VER = 1;
private static final int OFF_MODE = 0;
private static final int OFF_SIZE = 1;
private static final int OFF_TIME = 2;
private static final int OFF_UID = 3;
private static final int OFF_GID = 4;
private final int[] myOffsets;
private final int myUid;
private final int myGid;
private final boolean myCoarseTs = SystemProperties.getBooleanProperty(COARSE_TIMESTAMP_KEY, false);
private JnaUnixMediatorImpl() {
if ("linux-x86".equals(Platform.RESOURCE_PREFIX)) myOffsets = LINUX_32;
else if ("linux-x86-64".equals(Platform.RESOURCE_PREFIX)) myOffsets = LINUX_64;
else if ("linux-arm".equals(Platform.RESOURCE_PREFIX)) myOffsets = LNX_ARM32;
else if ("linux-ppc".equals(Platform.RESOURCE_PREFIX)) myOffsets = LNX_PPC32;
else if ("linux-ppc64le".equals(Platform.RESOURCE_PREFIX)) myOffsets = LNX_PPC64;
else if ("darwin".equals(Platform.RESOURCE_PREFIX)) myOffsets = BSD_64;
else if ("freebsd-x86".equals(Platform.RESOURCE_PREFIX)) myOffsets = SystemInfo.isOsVersionAtLeast("12") ? BSD_32_12 : BSD_32;
else if ("freebsd-x86-64".equals(Platform.RESOURCE_PREFIX)) myOffsets = SystemInfo.isOsVersionAtLeast("12") ? BSD_64_12 : BSD_64;
else if ("sunos-x86".equals(Platform.RESOURCE_PREFIX)) myOffsets = SUN_OS_32;
else if ("sunos-x86-64".equals(Platform.RESOURCE_PREFIX)) myOffsets = SUN_OS_64;
else throw new IllegalStateException("Unsupported OS/arch: " + SystemInfo.OS_NAME + "/" + SystemInfo.OS_ARCH);
Map<String, String> options = Collections.singletonMap(Library.OPTION_STRING_ENCODING, System.getProperty("sun.jnu.encoding"));
NativeLibrary lib = NativeLibrary.getInstance("c", options);
Native.register(LibC.class, lib);
Native.register(SystemInfo.isLinux ? LinuxLibC.class : UnixLibC.class, lib);
myUid = LibC.getuid();
myGid = LibC.getgid();
}
@Override
protected FileAttributes getAttributes(@NotNull String path) {
Memory buffer = new Memory(256);
int res = SystemInfo.isLinux ? LinuxLibC.__lxstat64(STAT_VER, path, buffer) : UnixLibC.lstat(path, buffer);
if (res != 0) return null;
int mode = getModeFlags(buffer) & LibC.S_MASK;
boolean isSymlink = (mode & LibC.S_IFMT) == LibC.S_IFLNK;
if (isSymlink) {
if (!loadFileStatus(path, buffer)) {
return FileAttributes.BROKEN_SYMLINK;
}
mode = getModeFlags(buffer) & LibC.S_MASK;
}
boolean isDirectory = (mode & LibC.S_IFMT) == LibC.S_IFDIR;
boolean isSpecial = !isDirectory && (mode & LibC.S_IFMT) != LibC.S_IFREG;
long size = buffer.getLong(myOffsets[OFF_SIZE]);
long mTime1 = SystemInfo.is32Bit ? buffer.getInt(myOffsets[OFF_TIME]) : buffer.getLong(myOffsets[OFF_TIME]);
long mTime2 = myCoarseTs ? 0 : SystemInfo.is32Bit ? buffer.getInt(myOffsets[OFF_TIME] + 4) : buffer.getLong(myOffsets[OFF_TIME] + 8);
long mTime = mTime1 * 1000 + mTime2 / 1000000;
boolean writable = ownFile(buffer) ? (mode & LibC.WRITE_MASK) != 0 : LibC.access(path, LibC.W_OK) == 0;
return new FileAttributes(isDirectory, isSpecial, isSymlink, false, size, mTime, writable);
}
@Override
protected String resolveSymLink(@NotNull final String path) throws Exception {
try {
return new File(path).getCanonicalPath();
}
catch (IOException e) {
String message = e.getMessage();
if (message != null && message.toLowerCase(Locale.US).contains("too many levels of symbolic links")) {
LOG.debug(e);
return null;
}
throw new IOException("Cannot resolve '" + path + "'", e);
}
}
@Override
protected boolean clonePermissions(@NotNull String source, @NotNull String target, boolean onlyPermissionsToExecute) {
Memory buffer = new Memory(256);
if (!loadFileStatus(source, buffer)) return false;
int permissions;
int sourcePermissions = getModeFlags(buffer) & LibC.PERM_MASK;
if (onlyPermissionsToExecute) {
if (!loadFileStatus(target, buffer)) return false;
int targetPermissions = getModeFlags(buffer) & LibC.PERM_MASK;
permissions = targetPermissions & ~LibC.EXECUTE_MASK | sourcePermissions & LibC.EXECUTE_MASK;
}
else {
permissions = sourcePermissions;
}
return LibC.chmod(target, permissions) == 0;
}
private static boolean loadFileStatus(String path, Memory buffer) {
return (SystemInfo.isLinux ? LinuxLibC.__xstat64(STAT_VER, path, buffer) : UnixLibC.stat(path, buffer)) == 0;
}
private int getModeFlags(Memory buffer) {
return SystemInfo.isLinux ? buffer.getInt(myOffsets[OFF_MODE]) : buffer.getShort(myOffsets[OFF_MODE]);
}
private boolean ownFile(Memory buffer) {
return buffer.getInt(myOffsets[OFF_UID]) == myUid && buffer.getInt(myOffsets[OFF_GID]) == myGid;
}
}
private static class FallbackMediatorImpl extends Mediator {
// from java.io.FileSystem
private static final int BA_REGULAR = 0x02;
private static final int BA_DIRECTORY = 0x04;
private static final int BA_HIDDEN = 0x08;
private final Object myFileSystem;
private final Method myGetBooleanAttributes;
private FallbackMediatorImpl() {
Object fileSystem;
Method getBooleanAttributes;
try {
Field fs = File.class.getDeclaredField("fs");
fs.setAccessible(true);
fileSystem = fs.get(null);
getBooleanAttributes = fileSystem.getClass().getMethod("getBooleanAttributes", File.class);
getBooleanAttributes.setAccessible(true);
}
catch (Throwable t) {
fileSystem = null;
getBooleanAttributes = null;
}
myFileSystem = fileSystem;
myGetBooleanAttributes = getBooleanAttributes;
}
@Override
protected FileAttributes getAttributes(@NotNull final String path) throws Exception {
final File file = new File(path);
if (myFileSystem != null) {
final int flags = (Integer)myGetBooleanAttributes.invoke(myFileSystem, file);
if (flags != 0) {
boolean isDirectory = isSet(flags, BA_DIRECTORY);
boolean isSpecial = !isSet(flags, BA_REGULAR) && !isSet(flags, BA_DIRECTORY);
boolean isHidden = isSet(flags, BA_HIDDEN) && !isWindowsRoot(path);
boolean isWritable = SystemInfo.isWindows && isDirectory || file.canWrite();
return new FileAttributes(isDirectory, isSpecial, false, isHidden, file.length(), file.lastModified(), isWritable);
}
}
else if (file.exists()) {
boolean isDirectory = file.isDirectory();
boolean isSpecial = !isDirectory && !file.isFile();
boolean isHidden = file.isHidden() && !isWindowsRoot(path);
boolean isWritable = SystemInfo.isWindows && isDirectory || file.canWrite();
return new FileAttributes(isDirectory, isSpecial, false, isHidden, file.length(), file.lastModified(), isWritable);
}
return null;
}
private static boolean isWindowsRoot(String p) {
return SystemInfo.isWindows && p.length() >= 2 && p.length() <= 3 && Character.isLetter(p.charAt(0)) && p.charAt(1) == ':';
}
@Override
protected String resolveSymLink(@NotNull final String path) throws Exception {
return new File(path).getCanonicalPath();
}
@Override
protected boolean clonePermissions(@NotNull String source, @NotNull String target, boolean onlyPermissionsToExecute) {
if (SystemInfo.isUnix) {
File srcFile = new File(source);
File dstFile = new File(target);
if (!onlyPermissionsToExecute) {
if (!dstFile.setWritable(srcFile.canWrite(), true)) return false;
}
return dstFile.setExecutable(srcFile.canExecute(), true);
}
return false;
}
}
@TestOnly
static void resetMediator() {
ourMediator = getMediator();
}
@TestOnly
static String getMediatorName() {
return ourMediator.getName();
}
} | apache-2.0 |
jgarman/autopsy | KeywordSearch/src/org/sleuthkit/autopsy/keywordsearch/KeywordSearchListsEncase.java | 7757 | /*
* Autopsy Forensic Browser
*
* Copyright 2011 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.keywordsearch;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.logging.Level;
/**
* @author dfickling
* KeywordSearchListsEncase adds support for Encase tab-delimited
* keyword list exports to Autopsy.
*
* load() does the I/O operation, converting lines from the text file to
* an unsorted list of EncaseFileEntrys
* The next step is to recreate the original folder hierarchy,
* and finally the EncaseFileEntries are converted to KeywordSearchLists
*
*/
public class KeywordSearchListsEncase extends KeywordSearchListsAbstract{
ArrayList<EncaseFileEntry> entriesUnsorted;
EncaseFileEntry rootEntry;
public KeywordSearchListsEncase(String encasePath) {
super(encasePath);
}
/**
* Follow the EncaseFileEntry hierarchy starting with given entry
* Create list for each Folder entry, add keyword for each Expression
* @param entry
* @param parentPath
*/
private void doCreateListsFromEntries(EncaseFileEntry entry, String parentPath) {
String name;
if(parentPath.isEmpty()) {
name = entry.name;
} else {
name = parentPath + "/" + entry.name;
}
List<Keyword> children = new ArrayList<Keyword>();
for(EncaseFileEntry child : entry.children) {
switch(child.type) {
case Folder:
doCreateListsFromEntries(child, name);
break;
case Expression:
if(child.flags.contains(EncaseFlag.pg)) { // Skip GREP keywords
break;
}
children.add(new Keyword(child.value, true));
break;
}
}
// Give each list a unique name
if(theLists.containsKey(name)) {
int i = 2;
while(theLists.containsKey(name + "(" + i + ")")) {
i+=1;
}
name = name + "(" + i + ")";
}
// Don't create lists if there are no keywords
if (!children.isEmpty()) {
KeywordSearchList newList = new KeywordSearchList(name, new Date(), new Date(),
true, true, children);
theLists.put(name, newList);
}
}
/**
* Convert entriesUnsorted (a list of childless and parentless EncaseFileEntries) into an EncaseFileEntry structure
*/
private void doCreateEntryStructure(EncaseFileEntry parent) {
if (!parent.isFull()) {
EncaseFileEntry child = entriesUnsorted.remove(0);
child.hasParent = true;
child.parent = parent;
parent.addChild(child);
if(!child.isFull()) {
doCreateEntryStructure(child);
}
if (!parent.isFull()) {
doCreateEntryStructure(parent);
}
}
if (parent.hasParent) {
doCreateEntryStructure(parent.parent);
}
}
@Override
public boolean save() {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public boolean save(boolean isExport) {
throw new UnsupportedOperationException("Not supported yet.");
}
@Override
public boolean load() {
try {
BufferedReader readBuffer = new BufferedReader(new InputStreamReader(new FileInputStream(filePath), "utf-16"));
String structLine;
String metaLine;
entriesUnsorted = new ArrayList<EncaseFileEntry>();
for(int line = 1; line < 6; line++) {
readBuffer.readLine();
}
while ((structLine = readBuffer.readLine()) != null && (metaLine = readBuffer.readLine()) != null) {
String[] structArr = structLine.split("\t");
String[] metaArr = metaLine.split("\t");
EncaseMetaType type = EncaseMetaType.getType(metaArr[0]);
String childCount = structArr[1];
String name = metaArr[1];
String value = metaArr[2];
ArrayList<EncaseFlag> flags = new ArrayList<EncaseFlag>();
for(int i = 0; i < 17; i++) {
if(metaArr.length < i+4) {
continue;
}
if(!metaArr[i+3].equals("")) {
flags.add(EncaseFlag.getFlag(i));
}
}
entriesUnsorted.add(new EncaseFileEntry(name, value, Integer.parseInt(childCount), false, null, type, flags));
}
this.rootEntry = entriesUnsorted.remove(0);
doCreateEntryStructure(this.rootEntry);
doCreateListsFromEntries(this.rootEntry, "");
return true;
} catch (FileNotFoundException ex) {
logger.log(Level.INFO, "File at " + filePath + " does not exist!", ex);
} catch (IOException ex) {
logger.log(Level.INFO, "Failed to read file at " + filePath, ex);
}
return false;
}
private enum EncaseMetaType {
Expression, Folder;
static EncaseMetaType getType(String type) {
if(type.equals("5")) {
return Folder;
} else if(type.equals("")) {
return Expression;
} else {
throw new IllegalArgumentException("Unsupported EncaseMetaType: " + type);
}
}
}
/*
* Flags for EncaseFileEntries.
* p8 = UTF-8
* p7 = UTF-7
* pg = GREP
*/
private enum EncaseFlag {
pc, pu, pb, p8, p7, pg, an, ph, or, di, um, st, ww, pr, lo, ta, cp;
static EncaseFlag getFlag(int i) {
return EncaseFlag.values()[i];
}
}
/**
* An entry in the Encase keyword list file.
*/
private class EncaseFileEntry {
String name;
String value;
int childCount;
List<EncaseFileEntry> children;
EncaseFileEntry parent;
EncaseMetaType type;
boolean hasParent;
ArrayList<EncaseFlag> flags;
EncaseFileEntry(String name, String value, int childCount, boolean hasParent, EncaseFileEntry parent, EncaseMetaType type, ArrayList<EncaseFlag> flags) {
this.name = name;
this.value = value;
this.childCount = childCount;
this.children = new ArrayList<EncaseFileEntry>();
this.hasParent = hasParent;
this.parent = parent;
this.type = type;
this.flags = flags;
}
boolean isFull() {
return children.size() == childCount;
}
void addChild(EncaseFileEntry child) {
children.add(child);
}
}
}
| apache-2.0 |
MikeThomsen/nifi | nifi-nar-bundles/nifi-standard-bundle/nifi-standard-processors/src/test/java/org/apache/nifi/processors/standard/ConversionWithSchemaInferenceIT.java | 1747 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.standard;
public class ConversionWithSchemaInferenceIT extends AbstractConversionIT {
@Override
protected String csvPostfix() {
return "with_header.csv";
}
@Override
protected String jsonPostfix() {
return "json";
}
@Override
protected String avroPostfix() {
return "with_schema.avro";
}
@Override
protected String xmlPostfix() {
return "xml";
}
@Override
public void testJsonToAvro() throws Exception {
fromJson(jsonPostfix());
// JSON schema inference doesn't discern FLOAT but uses DOUBLE instead.
// So the expected avro is a little bit different as the deserialized values also end up in
// Long and Double objects
toAvro("with_schema.json.to.avro");
testConversion(reader, readerConfigurer, writer, writerConfigurer, inputHandler, resultHandler);
}
}
| apache-2.0 |
titusfortner/selenium | java/src/org/openqa/selenium/firefox/HasExtensions.java | 1654 | // Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.firefox;
import org.openqa.selenium.Beta;
import java.nio.file.Path;
/**
* Used by classes to indicate that they can install and uninstall browser extensions on the fly.
*/
@Beta
public interface HasExtensions {
/**
* Installs an extension.
*
* @param path absolute path to the extension file that should be installed.
* @return the unique identifier of the installed extension.
*/
String installExtension(Path path);
String installExtension(Path path, Boolean temporary);
/**
* Uninstall the extension by the given identifier.
* This value can be found in the extension's manifest, and typically ends with "@mozilla.org".
*
* @param extensionId The unique extension identifier returned by {{@link #installExtension(Path)}}
*/
void uninstallExtension(String extensionId);
}
| apache-2.0 |
RLDevOps/Demo | src/main/java/org/olat/registration/DMZPWChangeContentControllerCreator.java | 2071 | /**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br>
* University of Zurich, Switzerland.
* <p>
*/
package org.olat.registration;
import org.olat.core.commons.fullWebApp.BaseFullWebappController;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.control.Controller;
import org.olat.core.gui.control.WindowControl;
import org.olat.core.gui.control.creator.AutoCreator;
import org.olat.core.gui.control.creator.ControllerCreator;
import org.olat.login.DmzBFWCParts;
/**
* Description:<br>
* TODO: patrickb Class Description for DMZContentControllerCreator
* <P>
* Initial Date: 29.01.2008 <br>
*
* @author patrickb
*/
public class DMZPWChangeContentControllerCreator implements ControllerCreator {
/**
* @see org.olat.core.gui.control.creator.ControllerCreator#createController(org.olat.core.gui.UserRequest, org.olat.core.gui.control.WindowControl)
*/
@Override
public Controller createController(final UserRequest lureq, final WindowControl lwControl) {
final DmzBFWCParts dmzSitesAndNav = new DmzBFWCParts();
dmzSitesAndNav.showTopNav(false);
final AutoCreator contentControllerCreator = new AutoCreator();
contentControllerCreator.setClassName(PwChangeController.class.getName());
dmzSitesAndNav.setContentControllerCreator(contentControllerCreator);
return new BaseFullWebappController(lureq, lwControl, dmzSitesAndNav);
}
}
| apache-2.0 |
lovepoem/dubbo | dubbo-remoting/dubbo-remoting-http/src/main/java/org/apache/dubbo/remoting/http/tomcat/TomcatHttpServer.java | 3833 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.remoting.http.tomcat;
import org.apache.dubbo.common.URL;
import org.apache.dubbo.common.logger.Logger;
import org.apache.dubbo.common.logger.LoggerFactory;
import org.apache.dubbo.remoting.http.HttpHandler;
import org.apache.dubbo.remoting.http.servlet.DispatcherServlet;
import org.apache.dubbo.remoting.http.servlet.ServletManager;
import org.apache.dubbo.remoting.http.support.AbstractHttpServer;
import org.apache.catalina.Context;
import org.apache.catalina.LifecycleException;
import org.apache.catalina.connector.Connector;
import org.apache.catalina.startup.Tomcat;
import java.io.File;
import static org.apache.dubbo.common.constants.CommonConstants.DEFAULT_THREADS;
import static org.apache.dubbo.common.constants.CommonConstants.THREADS_KEY;
import static org.apache.dubbo.remoting.Constants.ACCEPTS_KEY;
public class TomcatHttpServer extends AbstractHttpServer {
private static final Logger logger = LoggerFactory.getLogger(TomcatHttpServer.class);
private final Tomcat tomcat;
private final URL url;
public TomcatHttpServer(URL url, final HttpHandler handler) {
super(url, handler);
this.url = url;
DispatcherServlet.addHttpHandler(url.getPort(), handler);
String baseDir = new File(System.getProperty("java.io.tmpdir")).getAbsolutePath();
tomcat = new Tomcat();
Connector connector = tomcat.getConnector();
connector.setPort(url.getPort());
connector.setProperty("maxThreads", String.valueOf(url.getParameter(THREADS_KEY, DEFAULT_THREADS)));
connector.setProperty("maxConnections", String.valueOf(url.getParameter(ACCEPTS_KEY, -1)));
connector.setProperty("URIEncoding", "UTF-8");
connector.setProperty("connectionTimeout", "60000");
connector.setProperty("maxKeepAliveRequests", "-1");
tomcat.setBaseDir(baseDir);
tomcat.setPort(url.getPort());
Context context = tomcat.addContext("/", baseDir);
Tomcat.addServlet(context, "dispatcher", new DispatcherServlet());
// Issue : https://github.com/apache/dubbo/issues/6418
// addServletMapping method will be removed since Tomcat 9
// context.addServletMapping("/*", "dispatcher");
context.addServletMappingDecoded("/*", "dispatcher");
ServletManager.getInstance().addServletContext(url.getPort(), context.getServletContext());
// tell tomcat to fail on startup failures.
System.setProperty("org.apache.catalina.startup.EXIT_ON_INIT_FAILURE", "true");
try {
tomcat.start();
} catch (LifecycleException e) {
throw new IllegalStateException("Failed to start tomcat server at " + url.getAddress(), e);
}
}
@Override
public void close() {
super.close();
ServletManager.getInstance().removeServletContext(url.getPort());
try {
tomcat.stop();
} catch (Exception e) {
logger.warn(e.getMessage(), e);
}
}
}
| apache-2.0 |
jk1/intellij-community | java/java-tests/testSrc/com/intellij/java/codeInsight/AbstractParameterInfoTestCase.java | 3542 | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.java.codeInsight;
import com.intellij.codeInsight.AutoPopupController;
import com.intellij.codeInsight.CodeInsightSettings;
import com.intellij.codeInsight.completion.LightFixtureCompletionTestCase;
import com.intellij.codeInsight.daemon.impl.ParameterHintsPresentationManager;
import com.intellij.codeInsight.hint.ParameterInfoController;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.codeInsight.lookup.LookupElementPresentation;
import com.intellij.ide.highlighter.JavaFileType;
import com.intellij.openapi.actionSystem.IdeActions;
import com.intellij.openapi.editor.Editor;
import com.intellij.testFramework.fixtures.EditorHintFixture;
import com.intellij.util.ui.UIUtil;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.locks.LockSupport;
import java.util.stream.Stream;
public abstract class AbstractParameterInfoTestCase extends LightFixtureCompletionTestCase {
private EditorHintFixture myHintFixture;
private int myStoredAutoPopupDelay;
@Override
protected void setUp() throws Exception {
super.setUp();
myHintFixture = new EditorHintFixture(getTestRootDisposable());
myStoredAutoPopupDelay = CodeInsightSettings.getInstance().PARAMETER_INFO_DELAY;
CodeInsightSettings.getInstance().PARAMETER_INFO_DELAY = 100; // speed up tests
}
@Override
protected void tearDown() throws Exception {
try {
CodeInsightSettings.getInstance().PARAMETER_INFO_DELAY = myStoredAutoPopupDelay;
}
finally {
super.tearDown();
}
}
protected void configureJava(String text) {
myFixture.configureByText(JavaFileType.INSTANCE, text);
}
protected void showParameterInfo() {
myFixture.performEditorAction(IdeActions.ACTION_EDITOR_SHOW_PARAMETER_INFO);
UIUtil.dispatchAllInvocationEvents();
}
protected void checkHintContents(String hintText) {
assertEquals(hintText, myHintFixture.getCurrentHintText());
}
public void checkResult(String text) {
myFixture.checkResult(text);
}
public void complete(String partOfItemText) {
LookupElement[] elements = myFixture.completeBasic();
LookupElement element = Stream.of(elements).filter(e -> {
LookupElementPresentation p = new LookupElementPresentation();
e.renderElement(p);
return (p.getItemText() + p.getTailText()).contains(partOfItemText);
}).findAny().get();
selectItem(element);
}
private void waitForParameterInfoUpdate() throws TimeoutException {
ParameterInfoController.waitForDelayedActions(getEditor(), 1, TimeUnit.MINUTES);
}
public static void waitTillAnimationCompletes(Editor editor) {
long deadline = System.currentTimeMillis() + 60_000;
while (ParameterHintsPresentationManager.getInstance().isAnimationInProgress(editor)) {
if (System.currentTimeMillis() > deadline) fail("Too long waiting for animation to finish");
LockSupport.parkNanos(10_000_000);
UIUtil.dispatchAllInvocationEvents();
}
}
private void waitForAutoPopup() throws TimeoutException {
AutoPopupController.getInstance(getProject()).waitForDelayedActions(1, TimeUnit.MINUTES);
}
protected void waitForAllAsyncStuff() throws TimeoutException {
waitForParameterInfoUpdate();
myFixture.doHighlighting();
waitTillAnimationCompletes(getEditor());
waitForAutoPopup();
}
}
| apache-2.0 |
artnaseef/activemq-maven-plugin | src/test/java/org/apache/activemq/maven/XBeanFileResolverTest.java | 2053 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.maven;
import static org.junit.Assert.assertEquals;
import java.io.File;
import java.io.IOException;
import org.junit.Test;
/**
* Test for: Helper to convert relative paths to XBean description files to URL-compliant absolute paths.
*/
public class XBeanFileResolverTest {
private static final String XBEAN_FILE = "xbean:file:";
@Test
public void urlToXBeanFileShouldBeResolvedToAbsolutePath() throws IOException {
XBeanFileResolver xBeanFileResolver = new XBeanFileResolver();
String currentDirectory = getCurrentDirectoryLinuxStyle();
String relativeXBeanFilePath = "src/main/resources/activemq.xml";
// e.g. xbean:file:C:/dev/src/active-mq/activemq-tooling/activemq-maven-plugin/src/main/resources/activemq.xml
String expectedUrl = XBEAN_FILE + currentDirectory + "/" + relativeXBeanFilePath;
String actualUrl = xBeanFileResolver.toUrlCompliantAbsolutePath(XBEAN_FILE + relativeXBeanFilePath);
assertEquals(expectedUrl, actualUrl);
}
private String getCurrentDirectoryLinuxStyle() throws IOException {
String currentDirectory = new File(".").getCanonicalPath();
return currentDirectory.replace("\\", "/");
}
}
| apache-2.0 |
weiwl/closure-compiler | src/com/google/javascript/jscomp/testing/NodeSubject.java | 1650 | /*
* Copyright 2015 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp.testing;
import static com.google.common.truth.Truth.THROW_ASSERTION_ERROR;
import static org.junit.Assert.assertEquals;
import com.google.common.truth.FailureStrategy;
import com.google.common.truth.Subject;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
/**
* A Truth Subject for the Node class.
*/
public final class NodeSubject extends Subject<NodeSubject, Node> {
public static NodeSubject assertNode(Node node) {
return new NodeSubject(THROW_ASSERTION_ERROR, node);
}
public NodeSubject(FailureStrategy fs, Node node) {
super(fs, node);
}
public void isEqualTo(Node node) {
String treeDiff = node.checkTreeEquals(getSubject());
if (treeDiff != null) {
failWithRawMessage("%s", treeDiff);
}
}
public void hasType(int type) {
String message = "Node is of type " + Token.name(getSubject().getType())
+ " not of type " + Token.name(type);
assertEquals(message, type, getSubject().getType());
}
}
| apache-2.0 |