repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15
values |
|---|---|---|---|---|
mcapino/trajectorytools | src/main/java/tt/euclid2i/rrtstar/GraphDomain.java | 1613 | package tt.euclid2i.rrtstar;
import java.util.Collection;
import java.util.Set;
import org.jgrapht.DirectedGraph;
import tt.euclid2i.Line;
import tt.euclid2i.Point;
import tt.euclid2i.Region;
import tt.euclid2i.probleminstance.ShortestPathProblem;
import tt.euclid2i.region.Rectangle;
import tt.planner.rrtstar.util.Extension;
public class GraphDomain extends StraightLineDomain {
DirectedGraph<Point, Line> graph;
public GraphDomain(DirectedGraph<Point, Line> graph,
ShortestPathProblem problem, int seed, double tryGoalRatio) {
super(problem, seed, tryGoalRatio);
this.graph = graph;
}
public GraphDomain(Rectangle bounds, DirectedGraph<Point, Line> graph,
Collection<Region> obstacles,
Region target, Point targetPoint, int seed,
double tryGoalRatio) {
super(bounds, obstacles, target, targetPoint, seed, tryGoalRatio);
this.graph = graph;
}
@Override
public Extension<Point, Line> extendTo(
Point from, Point to) {
assert (graph.containsVertex(from));
Set<Line> outEdges = graph.outgoingEdgesOf(from);
Line bestEdge = null;
double bestEdgeDistance = Double.POSITIVE_INFINITY;
for (Line edge : outEdges) {
if (edge.getEnd().distance(to) < bestEdgeDistance) {
bestEdgeDistance = edge.getEnd().distance(to);
bestEdge = edge;
}
}
return new Extension<Point, Line>(from, bestEdge.getEnd(), bestEdge, graph.getEdgeWeight(bestEdge), bestEdge.getEnd().equals(to));
}
}
| lgpl-3.0 |
seanbright/ari4java | classes/ch/loway/oss/ari4java/generated/ari_1_8_0/ClassTranslator_impl_ari_1_8_0.java | 8750 | package ch.loway.oss.ari4java.generated.ari_1_8_0;
// ----------------------------------------------------
// THIS CLASS WAS GENERATED AUTOMATICALLY
// PLEASE DO NOT EDIT
// Generated on: Sat Jan 30 13:39:06 CET 2016
// ----------------------------------------------------
import ch.loway.oss.ari4java.ARI;
import ch.loway.oss.ari4java.generated.*;
import ch.loway.oss.ari4java.generated.ari_1_8_0.models.*;
import ch.loway.oss.ari4java.generated.ari_1_8_0.actions.*;
/**********************************************************
* This is a class translator.
*********************************************************/
public class ClassTranslator_impl_ari_1_8_0 implements ARI.ClassFactory {
@Override
public Class getImplementationFor(Class interfaceClass) {
if ( interfaceClass.equals(ActionApplications.class) ) {
return (ActionApplications_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ActionAsterisk.class) ) {
return (ActionAsterisk_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ActionBridges.class) ) {
return (ActionBridges_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ActionChannels.class) ) {
return (ActionChannels_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ActionDeviceStates.class) ) {
return (ActionDeviceStates_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ActionEndpoints.class) ) {
return (ActionEndpoints_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ActionEvents.class) ) {
return (ActionEvents_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ActionPlaybacks.class) ) {
return (ActionPlaybacks_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ActionRecordings.class) ) {
return (ActionRecordings_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ActionSounds.class) ) {
return (ActionSounds_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(Application.class) ) {
return (Application_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ApplicationReplaced.class) ) {
return (ApplicationReplaced_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(AsteriskInfo.class) ) {
return (AsteriskInfo_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(Bridge.class) ) {
return (Bridge_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(BridgeAttendedTransfer.class) ) {
return (BridgeAttendedTransfer_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(BridgeBlindTransfer.class) ) {
return (BridgeBlindTransfer_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(BridgeCreated.class) ) {
return (BridgeCreated_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(BridgeDestroyed.class) ) {
return (BridgeDestroyed_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(BridgeMerged.class) ) {
return (BridgeMerged_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(BuildInfo.class) ) {
return (BuildInfo_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(CallerID.class) ) {
return (CallerID_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(Channel.class) ) {
return (Channel_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ChannelCallerId.class) ) {
return (ChannelCallerId_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ChannelConnectedLine.class) ) {
return (ChannelConnectedLine_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ChannelCreated.class) ) {
return (ChannelCreated_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ChannelDestroyed.class) ) {
return (ChannelDestroyed_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ChannelDialplan.class) ) {
return (ChannelDialplan_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ChannelDtmfReceived.class) ) {
return (ChannelDtmfReceived_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ChannelEnteredBridge.class) ) {
return (ChannelEnteredBridge_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ChannelHangupRequest.class) ) {
return (ChannelHangupRequest_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ChannelHold.class) ) {
return (ChannelHold_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ChannelLeftBridge.class) ) {
return (ChannelLeftBridge_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ChannelStateChange.class) ) {
return (ChannelStateChange_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ChannelTalkingFinished.class) ) {
return (ChannelTalkingFinished_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ChannelTalkingStarted.class) ) {
return (ChannelTalkingStarted_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ChannelUnhold.class) ) {
return (ChannelUnhold_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ChannelUserevent.class) ) {
return (ChannelUserevent_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ChannelVarset.class) ) {
return (ChannelVarset_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ConfigInfo.class) ) {
return (ConfigInfo_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(ConfigTuple.class) ) {
return (ConfigTuple_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(DeviceState.class) ) {
return (DeviceState_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(DeviceStateChanged.class) ) {
return (DeviceStateChanged_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(Dial.class) ) {
return (Dial_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(Dialed.class) ) {
return (Dialed_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(DialplanCEP.class) ) {
return (DialplanCEP_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(Endpoint.class) ) {
return (Endpoint_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(EndpointStateChange.class) ) {
return (EndpointStateChange_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(Event.class) ) {
return (Event_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(FormatLangPair.class) ) {
return (FormatLangPair_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(LiveRecording.class) ) {
return (LiveRecording_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(Message.class) ) {
return (Message_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(MissingParams.class) ) {
return (MissingParams_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(Module.class) ) {
return (Module_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(Playback.class) ) {
return (Playback_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(PlaybackFinished.class) ) {
return (PlaybackFinished_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(PlaybackStarted.class) ) {
return (PlaybackStarted_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(RecordingFailed.class) ) {
return (RecordingFailed_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(RecordingFinished.class) ) {
return (RecordingFinished_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(RecordingStarted.class) ) {
return (RecordingStarted_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(SetId.class) ) {
return (SetId_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(Sound.class) ) {
return (Sound_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(StasisEnd.class) ) {
return (StasisEnd_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(StasisStart.class) ) {
return (StasisStart_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(StatusInfo.class) ) {
return (StatusInfo_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(StoredRecording.class) ) {
return (StoredRecording_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(SystemInfo.class) ) {
return (SystemInfo_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(TextMessage.class) ) {
return (TextMessage_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(TextMessageReceived.class) ) {
return (TextMessageReceived_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(TextMessageVariable.class) ) {
return (TextMessageVariable_impl_ari_1_8_0.class);
} else
if ( interfaceClass.equals(Variable.class) ) {
return (Variable_impl_ari_1_8_0.class);
} else
{
return null;
}
}
}
| lgpl-3.0 |
ODTBuilder/Builder-v1.0 | OpenGDS_2017/main/java/org/kabeja/dxf/parser/table/DXFLineTypeTableHandler.java | 3623 | /*******************************************************************************
* Copyright 2010 Simon Mieth
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package org.kabeja.dxf.parser.table;
import org.kabeja.common.LineType;
import org.kabeja.dxf.parser.DXFValue;
import org.kabeja.util.Constants;
/**
* @author <a href="mailto:simon.mieth@gmx.de">Simon Mieth</a>
*/
public class DXFLineTypeTableHandler extends AbstractTableHandler {
public final static String TABLE_KEY = "LTYPE";
public final static int GROUPCODE_LTYPE_NAME = 2;
public final static int GROUPCODE_LTYPE_DESCRIPTION = 3;
public final static int GROUPCODE_LTYPE_ALIGNMENT = 72;
public final static int GROUPCODE_LTYPE_SEGMENT = 49;
public final static int GROUPCODE_LTYPE_LENGTH = 40;
public final static int GROUPCODE_LTYPE_SEGMENT_COUNT = 73;
public final static int GROUPCODE_LTYPE_SCALE = 46;
private LineType ltype;
private int segmentCount = 0;
private double[] pattern;
/*
* (non-Javadoc)
*
* @see de.miethxml.kabeja.parser.table.DXFTableHandler#endParsing()
*/
public void endParsing() {
ltype.setPattern(pattern);
doc.addLineType(ltype);
}
/*
* (non-Javadoc)
*
* @see de.miethxml.kabeja.parser.table.DXFTableHandler#getTableKey()
*/
public String getTableType() {
// TODO Auto-generated method stub
return TABLE_KEY;
}
/*
* (non-Javadoc)
*
* @see de.miethxml.kabeja.parser.table.DXFTableHandler#parseGroup(int,
* de.miethxml.kabeja.parser.DXFValue)
*/
public void parseGroup(int groupCode, DXFValue value) {
switch (groupCode) {
case GROUPCODE_LTYPE_NAME:
ltype.setName(value.getValue());
break;
case GROUPCODE_LTYPE_DESCRIPTION:
ltype.setDescritpion(value.getValue());
break;
case GROUPCODE_LTYPE_SEGMENT_COUNT:
int count = value.getIntegerValue();
pattern = new double[count];
segmentCount = 0;
break;
case GROUPCODE_LTYPE_SEGMENT:
pattern[segmentCount] = value.getDoubleValue();
segmentCount++;
break;
case GROUPCODE_LTYPE_LENGTH:
ltype.setPatternLength(value.getDoubleValue());
break;
case GROUPCODE_LTYPE_ALIGNMENT:
ltype.setAlignment(value.getIntegerValue());
break;
case GROUPCODE_LTYPE_SCALE:
ltype.setScale(value.getDoubleValue());
break;
case Constants.GROUPCODE_STANDARD_FLAGS:
ltype.setFlags(value.getIntegerValue());
break;
default:
break;
}
}
/*
* (non-Javadoc)
*
* @see de.miethxml.kabeja.parser.table.DXFTableHandler#startParsing()
*/
public void startParsing() {
ltype = new LineType();
segmentCount = 0;
pattern = null;
}
}
| lgpl-3.0 |
masmangan/secret-octo-batman | src/margulis/gui/JInfoSobreDemandaPanel.java | 3184 | package margulis.gui;
import java.awt.BorderLayout;
import java.awt.Dimension;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JTextField;
import javax.swing.SwingConstants;
import margulis.pojo.DemandaInicial;
/**
* @author �ndrei
*/
public class JInfoSobreDemandaPanel extends JPanel {
private static final long serialVersionUID = -7585722317489961554L;
private JLabel lblTitulo;
private JLabel lblDemandaPadrao;
private JLabel lblInfluenciaPreco;
private JLabel lblInfluenciaMarketing;
private JTextField demandaPadrao;
private JTextField influenciaPreco;
private JTextField influenciaMarketing;
public JInfoSobreDemandaPanel() {
setLayout(new BorderLayout());
lblTitulo = new JLabel("Informações Sobre a Demanda");
JPanel painel = new JPanel();
painel.setLayout(null);
lblDemandaPadrao = new JLabel("Demanda Padrão");
lblDemandaPadrao.setBounds(12, 29, 214, 33);
painel.add(lblDemandaPadrao);
demandaPadrao = new JTextField();
demandaPadrao.setHorizontalAlignment(SwingConstants.RIGHT);
demandaPadrao.setBounds(225, 29, 218, 33);
painel.add(demandaPadrao);
lblInfluenciaPreco = new JLabel("Influência do Preço");
lblInfluenciaPreco.setBounds(12, 91, 214, 33);
painel.add(lblInfluenciaPreco);
influenciaPreco = new JTextField();
influenciaPreco.setHorizontalAlignment(SwingConstants.RIGHT);
influenciaPreco.setBounds(225, 91, 218, 33);
painel.add(influenciaPreco);
lblInfluenciaMarketing = new JLabel("Influência do Marketing");
lblInfluenciaMarketing.setBounds(17, 154, 209, 33);
painel.add(lblInfluenciaMarketing);
add(BorderLayout.NORTH, lblTitulo);
add(BorderLayout.CENTER, painel);
influenciaMarketing = new JTextField();
influenciaMarketing.setHorizontalAlignment(SwingConstants.RIGHT);
influenciaMarketing.setBounds(225, 154, 218, 33);
painel.add(influenciaMarketing);
}
/**
* Retorna o objeto <b>DemandaInicial</b> atribuindo as informa��es atuais
* no painel.
*
* @return Objeto <b>DemandaInicial</b>
*/
public DemandaInicial getInfoSobreDemanda() {
return new DemandaInicial(Double.parseDouble(this.demandaPadrao
.getText()),
Double.parseDouble(this.influenciaPreco.getText()),
Double.parseDouble(this.lblInfluenciaMarketing.getText()));
}
/**
* Reseta os atributos da janela.
*/
public void clear() {
demandaPadrao.setText("");
influenciaPreco.setText("");
influenciaMarketing.setText("");
}
/**
* Create the GUI and show it. For thread safety, this method should be
* invoked from the event-dispatching thread.
*/
private static void createAndShowGUI() {
JFrame frame = new JFrame("Teste");
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.getContentPane().add(new JInfoSobreDemandaPanel());
frame.setPreferredSize(new Dimension(800, 200));
frame.pack();
frame.setVisible(true);
}
public static void main(String[] args) {
// Schedule a job for the event-dispatching thread:
// creating and showing this application's GUI.
javax.swing.SwingUtilities.invokeLater(new Runnable() {
public void run() {
createAndShowGUI();
}
});
}
} | lgpl-3.0 |
SergiyKolesnikov/fuji | benchmark_typechecker/subjectSystems/Violet/RecentFile_stubfix/com/horstmann/violet/framework/EditorFrame.java | 1080 | package com.horstmann.violet.framework;
import de.uni_passau.spl.bytecodecomposer.stubs.Stub;
import java.lang.String;
import java.lang.Object;
import java.awt.event.ActionEvent;
import javax.swing.AbstractButton;
import com.horstmann.violet.framework.ResourceFactory;
import javax.swing.JOptionPane;
import com.horstmann.violet.framework.Open;
import javax.swing.JMenuItem;
import java.io.File;
import java.util.ArrayList;
import java.awt.event.ActionListener;
import java.lang.Class;
import javax.swing.JMenu;
import java.io.IOException;
public class EditorFrame extends javax.swing.JFrame {
@Stub
public void open(java.lang.String name) {
return ;
}
@Stub
public javax.swing.JDesktopPane desktop;
@Stub
public com.horstmann.violet.framework.Open open;
@Stub
public void original() {
return ;
}
@Stub
public java.util.ArrayList recentFiles;
@Stub
public javax.swing.JMenu fileMenu;
@Stub
public com.horstmann.violet.framework.ResourceFactory factory;
@Stub
public java.io.InputStream in;
}
| lgpl-3.0 |
arnonmoscona/Doric | src/main/java/com/moscona/dataSpace/impl/query/InQuery.java | 17840 | /*
* Copyright (c) 2015. Arnon Moscona
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.moscona.dataSpace.impl.query;
import com.moscona.dataSpace.*;
import com.moscona.dataSpace.exceptions.DataSpaceException;
import com.moscona.dataSpace.impl.AbstractVector;
import com.moscona.dataSpace.impl.query.support.AbstractQueryTerm;
import com.moscona.dataSpace.impl.query.support.LongSetParameter;
import com.moscona.dataSpace.impl.query.support.QueryParameterList;
import com.moscona.dataSpace.impl.query.support.StringSetParameter;
import com.moscona.dataSpace.impl.segment.*;
import org.apache.commons.lang3.StringUtils;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Set;
/**
* Created: 12/22/10 3:52 PM
* By: Arnon Moscona
* A query term that test for belonging to a specified set of values (discrete types only: integral numerics and strings)
*/
public class InQuery<T extends IScalar> extends AbstractQueryTerm<T> {
public static final String VALUES = "values";
/*
Performance comment:
I benchmarked three different implementation of set on String arrays:
- java.util.HashSet
- net.ontopia.utils.CompactHashSet
- com.google.common.collect.ImmutableSet
The test was a repeated serch over a vector of a million symbols - repeating out of 500 distinct symbols.
I used actual symbols rom a day buffer to make the simulation realistic for stock analysis.
The java.util.HashSet won hands down both for a set of 5 and for a set of 50 (tested on Java 6 64bit JDK 1.6.0_18)
On average the Google immutable set was 26% slower than the JDK implementation and the ontopia implementation
was 91% slower.
So the conclusion is that the google implementation has a semantic advantage but a significant performance penalty.
The CompactHashSet is totally out of the question. There is a high chance that the results are seriously skewed by
the data distribution, but I only care about my use case here.
In conclusion - I'm keeping the semantically lame defensive copy to maintain the performance advantage.
*/
// only one of the following will be used in actuality
private Set<Integer> stringSetValue=null; // IMPORTANT strings are coded as integers and are converted only when needed
private Set<Long> longSetValue=null;
private DataSpace dataSpace = null;
public InQuery() {
// do nothing?
}
/**
* Called during query parameter list construction to have the subclass populate it with its parameters
*
* @param params
* @param baseType
*/
@Override
protected void populateEmptyParameterList(QueryParameterList params, IVector.BaseType baseType) throws DataSpaceException {
switch (baseType) {
case LONG:
case INTEGER:
case SHORT:
case BYTE:
makeLongParameters(params);
break;
case DOUBLE:
case FLOAT:
throwIncompatibleException(baseType);
break;
case STRING:
makeStringParameters(params);
break;
case BOOLEAN:
throwIncompatibleException(baseType);
break;
default:
throwIncompatibleException(baseType);
}
}
private void makeLongParameters(QueryParameterList params) throws DataSpaceException {
params.add(new LongSetParameter(null, VALUES, "the list of values to match against"));
}
private void makeStringParameters(QueryParameterList params) throws DataSpaceException {
params.add(new StringSetParameter(null, VALUES, "the list of values to match against"));
}
/**
* Sets the parameters for this term before starting any evaluation. Do not perform any evaluation except parsing
* the parameters, storing them (you won't get them again), and validating the. If you throw an exception here
* then the query evaluation will be aborted.
*
* @param params
* @param vector
* @throws com.moscona.dataSpace.exceptions.DataSpaceException
*
*/
@SuppressWarnings({"unchecked"})
@Override
protected void setParameters(IQueryParameterList params, IVector vector) throws DataSpaceException {
validateParamNames(params, vector.getBaseType());
this.params = params;
this.baseType = vector.getBaseType();
IQueryParameter param = params.get(VALUES);
IQueryParameter.ParameterType paramBaseType = param.getType();
validateVectorCompatibility(paramBaseType, vector);
dataSpace = vector.getDataSpace();
switch (paramBaseType) {
case STRING_SET:
stringSetValue = makeStringSet(param.getValueSet());
break;
case LONG_SET:
longSetValue = makeLongSet(param.getValueSet());
break;
case BOOLEAN:
case LONG:
case DOUBLE:
case STRING:
throw new DataSpaceException("Incompatible parameter type: "+paramBaseType+ " this only takes sets, no scalars");
}
}
private Set<Integer> makeStringSet(Set valueSet) {
HashSet<Integer> retval = new HashSet<Integer>();
for (Object o: valueSet) {
retval.add(dataSpace.getCode(o.toString()));
}
return retval;
}
private Set<Long> makeLongSet(Set valueSet) {
HashSet<Long> retval = new HashSet<Long>();
for (Object o: valueSet) {
if (Number.class.isAssignableFrom(o.getClass())) {
retval.add(((Number)o).longValue());
}
else {
retval.add(Long.parseLong(o.toString()));
}
}
return retval;
}
private void validateVectorCompatibility(IQueryParameter.ParameterType paramBaseType, IVector vector) throws DataSpaceException {
switch (baseType) {
case LONG:
case INTEGER:
case SHORT:
case BYTE:
switch (paramBaseType) {
case STRING_SET:
convertStringsToLongs();
break;
case LONG_SET:
break;
}
break;
case STRING:
switch (paramBaseType) {
case STRING_SET:
break;
case LONG_SET:
convertLongsToStrings();
break;
}
break;
case DOUBLE:
case FLOAT:
case BOOLEAN:
default:
throwIncompatibleException(baseType);
}
}
private void convertLongsToStrings() {
stringSetValue = new HashSet<Integer>();
for (long value: longSetValue) {
stringSetValue.add(dataSpace.getCode(Long.toString(value)));
}
}
private void convertStringsToLongs() throws DataSpaceException {
Integer currentString = null;
try {
longSetValue = new HashSet<Long>();
for (Integer str: stringSetValue) {
currentString = str;
longSetValue.add(Long.parseLong(dataSpace.decodeToString(str).trim().replaceAll(",", "")));
}
}
catch (NumberFormatException e) {
throw new DataSpaceException("Number format exception. Could not parse "+currentString);
}
}
/**
* The subclass is required to tell us whether it can directly access the backing array for "bulk" evaluation or
* that we need to iterate on its behalf. Bulk access is far faster than iteration and its preferable. If you choose
* bulk access, you must ensure that you never modify the segment data.
* If you choose bulk you do not have to implement the match(element) method.
* If you do not choose bulk you do not need to implement the bulkMatch method
*
* @return
*/
@Override
protected boolean canProcessInBulk() {
return true;
}
/**
* An opportunity for the term to be evaluated based on the segment stats alone without looking at any of the
* concrete data
*
* @param stats
* @param segmentNumber
* @param queryState
* @return true or false if there is a uniform result to the whole segment, null if unable to determine
*/
@Override
protected Boolean quickMatch(ISegmentStats stats, int segmentNumber, boolean useResolution, double resolution, IQueryState queryState) throws DataSpaceException {
switch (baseType) {
case LONG:
case INTEGER:
case SHORT:
case BYTE:
return longQuickMatch(stats, segmentNumber, queryState);
case STRING:
return stringQuickMatch(stats, segmentNumber, queryState);
case DOUBLE:
case FLOAT:
case BOOLEAN:
default:
return null;
}
}
private Boolean stringQuickMatch(ISegmentStats stats, int segmentNumber, IQueryState queryState) {
String min = ((StringSegmentStats)stats).getMin();
String max = ((StringSegmentStats)stats).getMax();
int trues = 0; // counts for how many values we accepted everything
int falses = 0; // counts for how many values we rejected everything
for (Integer candidate: stringSetValue) {
Boolean result = quickEval(dataSpace.decodeToString(candidate), min, max, true, true);
if (result==null) {
return null; // not sure about one => not sure about all
}
if(result) {
trues++;
}
else {
falses++;
}
}
if (trues>0 && falses==0) {
return true;
}
if (trues==0 && falses>0) {
return false;
}
return null;
}
private Boolean longQuickMatch(ISegmentStats stats, int segmentNumber, IQueryState queryState) throws DataSpaceException {
long min = ((LongSegmentStats)stats).getMin();
long max = ((LongSegmentStats)stats).getMax();
if (min > max) {
throw new DataSpaceException("Invalid segment stats min>max: "+min+">"+max);
}
int trues = 0; // counts for how many values we accepted everything
int falses = 0; // counts for how many values we rejected everything
for (long candidate: longSetValue) {
Boolean result = quickEval(candidate, candidate, min, max, true, true);
if (result==null) {
return null; // not sure about one => not sure about all
}
if(result) {
trues++;
}
else {
falses++;
}
}
if (trues>0 && falses==0) {
return true;
}
if (trues==0 && falses>0) {
return false;
}
return null;
}
/**
* Evaluates one data element at a time, returning true if it passed the match and false otherwise. This is much
* less efficient than bulk matching, but is easier to implement and safer (immutability is guaranteed)
*
* @param element
* @param queryState
* @return
*/
@SuppressWarnings({"FloatingPointEquality"}) // by default we'll be using resolution based comparison
@Override
protected boolean match(IScalar element, boolean useResolution, double resolution, IQueryState queryState) throws DataSpaceException {
switch (baseType) {
case LONG:
case INTEGER:
case SHORT:
case BYTE:
return longSetValue.contains(element.getLongValue());
case STRING:
return stringSetValue.contains(dataSpace.getCode(element.toString()));
case DOUBLE:
case FLOAT:
case BOOLEAN:
default:
return false;
}
}
/**
* Evaluates the segment as a whole using direct access to its backing array
* This is where the real efficiency (and ugliness kicks in - direct access to backing arrays as primitive types
* this is geared for high speed queries on very large segments (1 million long is considered good).
* 6 copies of the same method differing only in the primitive type...
* @param segmentInfo
* @param progressiveResult
* @param queryState
* @throws com.moscona.dataSpace.exceptions.DataSpaceException
*
*/
@Override
protected void bulkMatch(AbstractVector.SegmentInfo segmentInfo, IBitMap progressiveResult, boolean useResolution, double resolution, IQueryState queryState) throws DataSpaceException {
switch (baseType) {
case LONG:
bulkMatchLong(segmentInfo, progressiveResult);
return;
case INTEGER:
bulkMatchInteger(segmentInfo, progressiveResult);
return;
case SHORT:
bulkMatchShort(segmentInfo, progressiveResult);
return;
case BYTE:
bulkMatchByte(segmentInfo, progressiveResult);
return;
case STRING:
bulkMatchString(segmentInfo, progressiveResult);
return;
case DOUBLE:
case FLOAT:
case BOOLEAN:
default:
applyToAll(false,segmentInfo,progressiveResult,queryState);
}
}
private void bulkMatchString(AbstractVector.SegmentInfo segmentInfo, IBitMap progressiveResult) throws DataSpaceException {
try {
int[] values = ((StringSegmentBackingArray) ((StringSegment) segmentInfo.getSegment()).getBackingArray()).data;
for (int value : values) {
progressiveResult.add(stringSetValue.contains(value));
}
}
catch (Exception e) {
throw new DataSpaceException("Exception while bulk matching segment (string): "+e,e);
}
}
private void bulkMatchByte(AbstractVector.SegmentInfo segmentInfo, IBitMap progressiveResult) throws DataSpaceException {
try {
byte[] values = ((ByteSegmentBackingArray) ((ByteSegment) segmentInfo.getSegment()).getBackingArray()).data;
for (byte value : values) {
progressiveResult.add(longSetValue.contains((long)value));
}
}
catch (Exception e) {
throw new DataSpaceException("Exception while bulk matching segment (byte): "+e,e);
}
}
private void bulkMatchShort(AbstractVector.SegmentInfo segmentInfo, IBitMap progressiveResult) throws DataSpaceException {
try {
short[] values = ((ShortSegmentBackingArray) ((ShortSegment) segmentInfo.getSegment()).getBackingArray()).data;
for (short value : values) {
progressiveResult.add(longSetValue.contains((long)value));
}
}
catch (Exception e) {
throw new DataSpaceException("Exception while bulk matching segment (short): "+e,e);
}
}
private void bulkMatchInteger(AbstractVector.SegmentInfo segmentInfo, IBitMap progressiveResult) throws DataSpaceException {
try {
int[] values = ((IntegerSegmentBackingArray) ((IntegerSegment) segmentInfo.getSegment()).getBackingArray()).data;
for (int value : values) {
progressiveResult.add(longSetValue.contains((long)value));
}
}
catch (Exception e) {
throw new DataSpaceException("Exception while bulk matching segment (int): "+e,e);
}
}
private void bulkMatchLong(AbstractVector.SegmentInfo segmentInfo, IBitMap progressiveResult) throws DataSpaceException {
try {
long[] values = ((LongSegmentBackingArray) ((LongSegment) segmentInfo.getSegment()).getBackingArray()).data;
for (long value : values) {
progressiveResult.add(longSetValue.contains(value));
}
}
catch (Exception e) {
throw new DataSpaceException("Exception while bulk matching segment (long): "+e,e);
}
}
@Override
public String toString(IQueryParameterList params) {
switch (baseType) {
case LONG:
case INTEGER:
case SHORT:
case BYTE:
return "in ["+ StringUtils.join(longSetValue,", ")+"]";
case STRING:
ArrayList<String> decoded = new ArrayList<String>();
for (int code: stringSetValue) {
decoded.add(dataSpace.decodeToString(code));
}
return "in ['"+ StringUtils.join(decoded,"', '")+"']";
case DOUBLE:
case FLOAT:
case BOOLEAN:
default:
return "Unsupported type for in (...) query";
}
}
}
| lgpl-3.0 |
Biblivre/Biblivre-5 | src/java/biblivre/administration/reports/dto/HoldingCreationByDateReportDto.java | 2496 | /*******************************************************************************
* Este arquivo é parte do Biblivre5.
*
* Biblivre5 é um software livre; você pode redistribuí-lo e/ou
* modificá-lo dentro dos termos da Licença Pública Geral GNU como
* publicada pela Fundação do Software Livre (FSF); na versão 3 da
* Licença, ou (caso queira) qualquer versão posterior.
*
* Este programa é distribuído na esperança de que possa ser útil,
* mas SEM NENHUMA GARANTIA; nem mesmo a garantia implícita de
* MERCANTIBILIDADE OU ADEQUAÇÃO PARA UM FIM PARTICULAR. Veja a
* Licença Pública Geral GNU para maiores detalhes.
*
* Você deve ter recebido uma cópia da Licença Pública Geral GNU junto
* com este programa, Se não, veja em <http://www.gnu.org/licenses/>.
*
* @author Alberto Wagner <alberto@biblivre.org.br>
* @author Danniel Willian <danniel@biblivre.org.br>
******************************************************************************/
package biblivre.administration.reports.dto;
import java.util.List;
public class HoldingCreationByDateReportDto extends BaseReportDto {
private String initialDate;
private String finalDate;
private String totalBiblioMain;
private String totalBiblioWork;
private String totalHoldingMain;
private String totalHoldingWork;
private List<String[]> data;
public List<String[]> getData() {
return this.data;
}
public void setData(List<String[]> data) {
this.data = data;
}
public String getFinalDate() {
return this.finalDate;
}
public void setFinalDate(String finalDate) {
this.finalDate = finalDate;
}
public String getInitialDate() {
return this.initialDate;
}
public void setInitialDate(String initialDate) {
this.initialDate = initialDate;
}
public String getTotalBiblioMain() {
return this.totalBiblioMain;
}
public void setTotalBiblioMain(String totalBiblioMain) {
this.totalBiblioMain = totalBiblioMain;
}
public String getTotalBiblioWork() {
return this.totalBiblioWork;
}
public void setTotalBiblioWork(String totalBiblioWork) {
this.totalBiblioWork = totalBiblioWork;
}
public String getTotalHoldingMain() {
return this.totalHoldingMain;
}
public void setTotalHoldingMain(String totalHoldingMain) {
this.totalHoldingMain = totalHoldingMain;
}
public String getTotalHoldingWork() {
return this.totalHoldingWork;
}
public void setTotalHoldingWork(String totalHoldingWork) {
this.totalHoldingWork = totalHoldingWork;
}
}
| lgpl-3.0 |
jmecosta/sonar | sonar-batch/src/main/java/org/sonar/batch/ProfileLoader.java | 1099 | /*
* Sonar, open source software quality management tool.
* Copyright (C) 2008-2012 SonarSource
* mailto:contact AT sonarsource DOT com
*
* Sonar is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* Sonar is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with Sonar; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02
*/
package org.sonar.batch;
import org.sonar.api.profiles.RulesProfile;
import org.sonar.api.resources.Project;
public interface ProfileLoader {
/**
* Loads quality profile for specified project.
*/
RulesProfile load(Project project);
}
| lgpl-3.0 |
loftuxab/community-edition-old | projects/repository/source/java/org/alfresco/repo/avm/DeletedNode.java | 1085 | /*
* Copyright (C) 2005-2010 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>. */
package org.alfresco.repo.avm;
/**
* Represents a node that has been deleted.
* @author britt
*/
public interface DeletedNode extends AVMNode
{
/**
* Get the type of node that this is a deleted placeholder for
* @return The type of node that was deleted.
*/
public int getDeletedType();
}
| lgpl-3.0 |
cismet/cids-custom-wrrl-db-mv | src/main/java/de/cismet/cids/custom/actions/wrrl_db_mv/VerbreitungsraumRouteToolbarAction.java | 1257 | /***************************************************
*
* cismet GmbH, Saarbruecken, Germany
*
* ... and it just works.
*
****************************************************/
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package de.cismet.cids.custom.actions.wrrl_db_mv;
import org.openide.util.lookup.ServiceProvider;
import de.cismet.cids.custom.wrrl_db_mv.commons.WRRLUtil;
import de.cismet.cids.navigator.utils.AbstractNewObjectToolbarAction;
import de.cismet.cids.navigator.utils.CidsClientToolbarItem;
/**
* DOCUMENT ME!
*
* @author therter
* @version $Revision$, $Date$
*/
@ServiceProvider(service = CidsClientToolbarItem.class)
public class VerbreitungsraumRouteToolbarAction extends AbstractNewObjectToolbarAction {
//~ Methods ----------------------------------------------------------------
@Override
public String getSorterString() {
return "X";
}
@Override
public String getDomain() {
return WRRLUtil.DOMAIN_NAME;
}
@Override
public String getTableName() {
return "verbreitungsraum";
}
@Override
public String getTooltipString() {
return "neuen Verbreitungsraum anlegen";
}
}
| lgpl-3.0 |
delphiprogramming/gisgraphy | src/main/java/com/gisgraphy/fulltext/Constants.java | 2279 | /*******************************************************************************
* Gisgraphy Project
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA
*
* Copyright 2008 Gisgraphy project
*
* David Masclet <davidmasclet@gisgraphy.com>
******************************************************************************/
package com.gisgraphy.fulltext;
import com.gisgraphy.domain.geoloc.entity.Adm;
import com.gisgraphy.domain.geoloc.entity.City;
import com.gisgraphy.domain.geoloc.entity.CitySubdivision;
import com.gisgraphy.domain.geoloc.entity.Street;
public class Constants {
/**
* convenence placetype for only city
*/
public final static Class[] ONLY_CITY_PLACETYPE = new Class[]{City.class};
/**
* convenence placetype for only adm
*/
public final static Class[] ONLY_ADM_PLACETYPE = new Class[]{Adm.class};
/**
* convenence placetype for city and citySubdivision
*/
public final static Class[] CITY_AND_CITYSUBDIVISION_PLACETYPE = new Class[] {City.class,CitySubdivision.class};
/**
* convenence placetype for city and citySubdivision
*/
public final static Class[] ADDRESSES_PLACETYPE = new Class[] {City.class,CitySubdivision.class,Street.class,Adm.class};
/**
* convenence placetype for city and citySubdivision
*/
public final static Class[] ONLY_CITYSUBDIVISION_PLACETYPE = new Class[] {CitySubdivision.class};
/**
* convenence placetype for city and citySubdivision
*/
public final static Class[] STREET_PLACETYPE = new Class[] {Street.class};
}
| lgpl-3.0 |
patrickwestphal/locality-module-extractor | src/uk/ac/manchester/syntactic_locality/ExtractModules4OntologyEntities2.java | 18009 | package uk.ac.manchester.syntactic_locality;
import java.io.File;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.io.RDFXMLOntologyFormat;
import org.semanticweb.owlapi.model.IRI;
import org.semanticweb.owlapi.model.OWLAxiom;
import org.semanticweb.owlapi.model.OWLEntity;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyCreationException;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import uk.ac.manchester.cs.owl.owlapi.OWLClassImpl;
import com.sun.istack.internal.logging.Logger;
public class ExtractModules4OntologyEntities2 {
private OWLOntologyManager externalOntologyManager;
private static Logger logger = Logger.getLogger(OWLOntologyManager.class);
private OWLOntology ontoToModularize;
// Don't forget the trainling slahes!!
private String ontFilesPathPrefix = "/home/patrick/projects/2014/smallis/data/wo_imports/";
private String resultModuleFilePath = "/tmp/module2.owl";
private List<String> fileNames = new ArrayList<String>(Arrays.asList(
"basic.owl", "chebi_import.owl", "cl-basic.owl",
"cl-bridge-to-fbbt.owl", "cl-bridge-to-fma.owl", "cl-bridge-to-ma.owl",
"cl-bridge-to-wbbt.owl", "cl-bridge-to-zfa.owl", "cl_import.owl",
"ctd_omim_mesh_bridge.owl", "doid_import.owl", "dpo-importer.owl",
"dpo.owl", "extra.owl", "fbbt_import.owl", "fbbt_phenotype.owl",
"fma_import.owl", "go-plus.owl", "go_extensions__chebi_import.owl",
"go_extensions__pato_import.owl", "go_extensions__pr_import.owl",
"go_extensions__ro_import.owl", "go_extensions__uberon_import.owl",
"go_import.owl", "go_phenotype.owl", "hp-importer.owl", "hp.owl",
"hsapdv_import.owl", "human-genes.owl", "mammal.owl", "merged.owl",
"metazoa.owl", "monarch.owl", "mp-edit.owl", "mp-importer.owl", "mp.owl",
"mp_hp-align-equiv.owl", "mpath_import.owl", "mpath_phenotype.owl",
"nbo_import.owl", "nbo_phenotype.owl", "ncbitaxon_import.owl",
"pato.owl", "pato_import.owl", "po_import.owl", "pr_import.owl",
"ro_extra.owl", "ro_import.owl", "ro_pending.owl", "so.owl",
"so_import.owl", "uberon-bridge-to-fbbt.owl", "uberon-bridge-to-fma.owl",
"uberon-bridge-to-ma.owl", "uberon-bridge-to-nifstd.owl",
"uberon-bridge-to-wbbt.owl", "uberon-bridge-to-zfa.owl",
"uberon_import.owl", "uberon_phenotype.owl", "vertebrate-curated.owl",
"vertebrate.owl", "wbbt_import.owl",
"wbphenotype-equivalence-axioms-subq-ubr.owl", "wbphenotype-importer.owl",
"wbphenotype.owl", "x-disjoint.owl", "zfa.owl", "zp-importer.owl", "zp.owl"
));
private static final String defaultModuleIRI = "http://dl-learner.org/ontologies/module_";
private OWLOntology module;
private IRI physicalModuleIRI;
private IRI moduleIRI;
private ModuleExtractor extractor;
// classes from MGI_GenePheno.rpt/Normal_MPannot_V2.csv
private List<OWLEntity> entities = new ArrayList<OWLEntity>(Arrays.asList(
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0000186"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0000188"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0000208"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0000218"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0000351"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0000358"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0000623"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0000688"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0000709"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0000714"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0000753"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0000755"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0000921"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0001186"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0001260"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0001263"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0001513"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0001552"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0001559"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0001577"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0001625"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0001805"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0001844"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0001845"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0001846"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0001861"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0001869"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0001870"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0001873"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0002006"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0002023"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0002078"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0002083"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0002145"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0002169"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0002408"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0002432"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0002444"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0002451"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0002651"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0002727"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0002743"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0002833"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0002869"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0002870"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0002874"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0002875"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0003009"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0003059"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0003077"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0003179"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0003339"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0003341"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0003504"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0003562"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0003631"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0003632"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0003725"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0003726"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0004031"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0004392"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0004801"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0004803"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0004804"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0004974"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005010"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005013"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005015"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005018"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005042"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005092"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005179"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005215"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005293"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005331"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005367"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005369"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005370"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005371"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005375"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005376"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005377"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005378"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005379"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005380"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005381"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005382"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005384"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005385"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005386"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005387"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005388"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005389"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005390"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005391"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005394"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005397"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005463"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005491"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005515"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005566"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005580"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0005658"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0006082"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0006413"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0008074"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0008075"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0008078"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0008102"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0008247"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0008566"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0008567"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0008699"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0008721"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0008873"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0008874"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0008880"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0009168"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0009171"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0009176"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0010378"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0010768"))),
new OWLClassImpl(IRI.create(new URI("http://purl.obolibrary.org/obo/MP_0010771")))
));
public ExtractModules4OntologyEntities2() throws URISyntaxException, OWLOntologyCreationException{
logger.info("reading ontology files...");
loadExternalOntology();
logger.info("fnished reading ontology files");
logger.info("initializing extractor...");
initExtractor();
logger.info("finished initializing extractor");
logger.info("starting extraction...");
OWLOntologyManager man = OWLManager.createOWLOntologyManager();
OWLOntology modules = man.createOntology();
for (OWLEntity ent : entities) {
logger.info("extracting module for " + ent);
moduleIRI = IRI.create(defaultModuleIRI + getEntityLabel(ent.getIRI().toString()) + ".owl");
Set<OWLAxiom> axioms = extractor.extractModuleAxiomsForEntity(ent);
module = extractor.getModuleFromAxioms(axioms, moduleIRI);
man.addAxioms(modules, module.getAxioms());
logger.info("finished extraction");
}
physicalModuleIRI = IRI.create(new File(resultModuleFilePath));
saveModuleToPhysicalIRI();
}
private void loadExternalOntology() throws OWLOntologyCreationException {
externalOntologyManager = OWLManager.createOWLOntologyManager();
ontoToModularize = externalOntologyManager.createOntology();
for (String fileName : fileNames) {
String filePath = ontFilesPathPrefix + fileName;
logger.info("reading " + filePath);
OWLOntologyManager tmpMan = OWLManager.createOWLOntologyManager();
OWLOntology tmpOnt = tmpMan.loadOntologyFromOntologyDocument(new File(filePath));
logger.info("copying " + filePath + " to main ontology");
externalOntologyManager.addAxioms(ontoToModularize, tmpOnt.getAxioms());
logger.info("done");
}
}
private void saveModuleToPhysicalIRI() {
try {
logger.info("saving " + physicalModuleIRI);
externalOntologyManager.saveOntology(module, new RDFXMLOntologyFormat(), physicalModuleIRI);
} catch (Exception e) {
System.err.println("Error saving module\n" + e.getLocalizedMessage());
e.printStackTrace();
}
}
private void initExtractor(){
//Bottom module
boolean dualConcepts=false;
boolean dualRoles=false;
extractor = new ModuleExtractor(ontoToModularize, dualConcepts, dualRoles);
}
private String getEntityLabel(String iriStr){
if (iriStr.indexOf("#")>=0)
return iriStr.split("#")[1];
return iriStr;
}
public static void main(String[] args) throws URISyntaxException, OWLOntologyCreationException {
logger.info("starting");
new ExtractModules4OntologyEntities2();
logger.info("finished");
}
}
| lgpl-3.0 |
beangle/library | commons/core/src/main/java/org/beangle/commons/lang/functor/CollectionHasUpto1ElementPredicate.java | 1138 | /*
* Beangle, Agile Development Scaffold and Toolkits.
*
* Copyright © 2005, The Beangle Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.beangle.commons.lang.functor;
import java.util.Collection;
/**
* <p>
* CollectionHasUpto1ElementPredicate class.
* </p>
*
* @author chaostone
* @version $Id: $
*/
public class CollectionHasUpto1ElementPredicate implements Predicate<Collection<?>> {
public Boolean apply(final Collection<?> object) {
return object.size() < 2;
}
}
| lgpl-3.0 |
smart-facility/TransMob | model/src/test/java/core/synthetic/SynPopGeneratorTest.java | 5002 | /* This file is part of TransMob.
TransMob is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
TransMob is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser Public License for more details.
You should have received a copy of the GNU Lesser Public License
along with TransMob. If not, see <http://www.gnu.org/licenses/>.
*/
/**
*
*/
package core.synthetic;
import org.apache.log4j.Logger;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import core.synthetic.SynPopGenerator;
/**
* @author nhuynh
*
*/
public class SynPopGeneratorTest {
private SynPopGenerator synPopGenerator;
private static final Logger logger = Logger.getLogger(SynPopGeneratorTest.class);
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception {
synPopGenerator = new SynPopGenerator();
}
/**
* @throws java.lang.Exception
*/
@After
public void tearDown() throws Exception {
synPopGenerator = null;
}
/**
* Test method for
* {@link core.synthetic.SynPopGenerator#main(java.lang.String[])}.
*/
@Test @Ignore
public void testMain() {
// logger.debug("Start testing SynPopGenerator");
// SynPopGenerator.main(null);
}
/**
* Test method for {@link core.synthetic.SynPopGenerator#synPopGenerator()}.
*/
@Test @Ignore
public void testSynPopGenerator() {
}
/**
* Test method for
* {@link core.synthetic.SynPopGenerator#correctTablesUsingB04(core.PostgresHandler, core.PostgresHandler)}
* .
*/
@Test @Ignore
public void testCorrectTablesUsingB04() {
}
/**
* Test method for
* {@link core.synthetic.SynPopGenerator#createIndivRecords(int[][], int[][])}
* .
*/
@Test @Ignore
public void testCreateIndivRecords() {
}
/**
* Test method for
* {@link core.synthetic.SynPopGenerator#zTest(int[][], int[][], int[][], int[][], int[][], int[][], int[][], int[][])}
* .
*/
@Test @Ignore
public void testZTest() {
}
/**
* Test method for
* {@link core.synthetic.SynPopGenerator#displayTables(int[][], int[][], int[][], int[][], int[][], int[][], int[][], int[][], int[][], int[][])}
* .
*/
@Test @Ignore
public void testDisplayTables() {
}
/**
* Test method for
* {@link core.synthetic.SynPopGenerator#compareWithHTS(int[][], int[][], int[][])}
* .
*/
@Test @Ignore
public void testCompareWithHTS() {
}
/**
* Test method for
* {@link core.synthetic.SynPopGenerator#chiSquaredTest(int[][], int[][], int[][], int[][], int[][], int[][], int[][], int[][])}
* .
*/
@Test @Ignore
public void testChiSquaredTest() {
}
/**
* Test method for
* {@link core.synthetic.SynPopGenerator#adjustNumberOfHF(int[][], int[][], int[][])}
* .
*/
@Test @Ignore
public void testAdjustNumberOfHF() {
}
/**
* Test method for
* {@link core.synthetic.SynPopGenerator#adjustNumberOfNFHhold(int[][], int[][], int[][])}
* .
*/
@Test @Ignore
public void testAdjustNumberOfNFHhold() {
}
/**
* Test method for
* {@link core.synthetic.SynPopGenerator#allocateNumPersonsToHhold(int[][], int[][], int[][], int[][], int[][], int[][])}
* .
*/
@Test @Ignore
public void testAllocateNumPersonsToHhold() {
}
/**
* Test method for
* {@link core.synthetic.SynPopGenerator#correctNumPersonsInFamilyHhold(int[][], int[][], int[][], int[][], int[][])}
* .
*/
@Test @Ignore
public void testCorrectNumPersonsInFamilyHhold() {
}
/**
* Test method for
* {@link core.synthetic.SynPopGenerator#testMinNumPersonsInHhold(int[][], int[][], int)}
* .
*/
@Test @Ignore
public void testTestMinNumPersonsInHhold() {
}
/**
* Test method for
* {@link core.synthetic.SynPopGenerator#testNumberofHhold(int[][], int, int, int, int)}
* .
*/
@Test @Ignore
public void testTestNumberofHhold() {
}
/**
* Test method for
* {@link core.synthetic.SynPopGenerator#displayNumHF(int[][], int, int, int, int, int, int)}
* .
*/
@Test @Ignore
public void testDisplayNumHF() {
}
/**
* Test method for
* {@link core.synthetic.SynPopGenerator#correctHouseholdRelationshipTable(java.lang.String, core.PostgresHandler, int[][], int[][], int)}
* .
*/
@Test @Ignore
public void testCorrectHouseholdRelationshipTable() {
}
/**
* Test method for
* {@link core.synthetic.SynPopGenerator#displayDataHholdRel(int[][], int[][])}
* .
*/
@Test @Ignore
public void testDisplayDataHholdRel() {
}
}
| lgpl-3.0 |
ecologylab/ecologylabFundamental | src/ecologylab/serialization/ISimplStringMarshaller.java | 537 | package ecologylab.serialization;
/**
* Represents an interfaces for classes that can marshal
* a given object to a "Simpl" string representation and from a representation to an object.
* This has the implicit round-trip heuristic that an object marshalled to a string, then unmarshalled should be equviliant to the original object.
* @author tom
*
*/
public interface ISimplStringMarshaller {
String marshal(Object object) throws SIMPLTranslationException;
Object unmarshal(String string) throws SIMPLTranslationException;
}
| lgpl-3.0 |
Builders-SonarSource/sonarqube-bis | server/sonar-server/src/main/java/org/sonar/ce/queue/package-info.java | 961 | /*
* SonarQube
* Copyright (C) 2009-2016 SonarSource SA
* mailto:contact AT sonarsource DOT com
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
@ParametersAreNonnullByDefault
package org.sonar.ce.queue;
import javax.annotation.ParametersAreNonnullByDefault;
| lgpl-3.0 |
ujmp/universal-java-matrix-package | ujmp-core/src/main/java/org/ujmp/core/objectmatrix/DenseObjectMatrixMultiD.java | 1219 | /*
* Copyright (C) 2008-2015 by Holger Arndt
*
* This file is part of the Universal Java Matrix Package (UJMP).
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership and licensing.
*
* UJMP is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* UJMP is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with UJMP; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301 USA
*/
package org.ujmp.core.objectmatrix;
import org.ujmp.core.genericmatrix.DenseGenericMatrixMultiD;
public interface DenseObjectMatrixMultiD extends ObjectMatrixMultiD, DenseObjectMatrix,
DenseGenericMatrixMultiD<Object> {
}
| lgpl-3.0 |
michaelsembwever/Sesat | query-transform-control-spi/src/main/java/no/sesat/search/query/transform/MapInfoPageQueryTransformer.java | 2522 | /* Copyright (2006-2012) Schibsted ASA
* This file is part of Possom.
*
* Possom is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Possom is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Possom. If not, see <http://www.gnu.org/licenses/>.
*/
package no.sesat.search.query.transform;
import java.util.Map;
import no.sesat.search.datamodel.generic.StringDataObject;
/**
* Transformes the query if the requestparameters contains a contentId.
*
*
* @version $Revision:$
*/
public class MapInfoPageQueryTransformer extends AbstractQueryTransformer {
private final MapInfoPageQueryTransformerConfig config;
/** Required constructor.
* @param config Query transformer config
*/
public MapInfoPageQueryTransformer(final QueryTransformerConfig config){
this.config = (MapInfoPageQueryTransformerConfig) config;
}
/**
* If the request parameteters contains the contentid parameter, append recordid to the query.
*
* @see no.sesat.search.query.transform.QueryTransformer
*/
public String getTransformedQuery() {
final String originalQuery = getContext().getTransformedQuery();
Map<String,StringDataObject> requestParameters = getContext().getDataModel().getParameters().getValues();
if(requestParameters != null && requestParameters.containsKey(config.getParameterName())){
return config.getPrefix() + ":" + requestParameters.get(config.getParameterName()).getString();
}
return originalQuery;
}
public String getFilter() {
Map<String,StringDataObject> requestParameters = getContext().getDataModel().getParameters().getValues();
if(requestParameters != null && requestParameters.containsKey(config.getParameterName()) &&
requestParameters.containsKey(config.getFilterParameterName())){
return "+" + config.getFilterPrefix() + ":'" + requestParameters.get(config.getFilterParameterName()).getString() + "'";
}
return "";
}
}
| lgpl-3.0 |
RenePonto/dissect-cf | src/main/java/hu/mta/sztaki/lpds/cloud/simulator/iaas/vmconsolidation/SolutionBasedConsolidator.java | 3230 | /*
* ========================================================================
* DIScrete event baSed Energy Consumption simulaTor
* for Clouds and Federations (DISSECT-CF)
* ========================================================================
*
* This file is part of DISSECT-CF.
*
* DISSECT-CF is free software: you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at
* your option) any later version.
*
* DISSECT-CF is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
* General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with DISSECT-CF. If not, see <http://www.gnu.org/licenses/>.
*
* (C) Copyright 2017, Gabor Kecskemeti (g.kecskemeti@ljmu.ac.uk)
*/
package hu.mta.sztaki.lpds.cloud.simulator.iaas.vmconsolidation;
import java.util.logging.Logger;
import hu.mta.sztaki.lpds.cloud.simulator.iaas.IaaSService;
/**
* This class is to be sub-classed by all workload consolidators that use the
* Solution class. It allows the loading of the mutation probability
*
* @author "Gabor Kecskemeti, Department of Computer Science, Liverpool John
* Moores University, (c) 2017"
*/
public abstract class SolutionBasedConsolidator extends ModelBasedConsolidator {
protected double mutationProb;
protected int randomCreations;
protected int unchangedCreations;
protected int firstFitCreations;
public SolutionBasedConsolidator(IaaSService toConsolidate, long consFreq) {
super(toConsolidate, consFreq);
}
@Override
protected void processProps() {
this.mutationProb = Double.parseDouble(props.getProperty("mutationProb"));
}
/**
* We have to determine how to fill the population/swarm. At the moment there is
* going to be one unchanged solution, size * 0.25 first fit solutions and the
* rest of the creations is made randomly.
*
* Note that there will be only random creations if the populationSize/swarmSize
* is less than three.
*
* @param numberOfCreations
* The swarmSize/populationSize.
*/
protected void determineCreations(int numberOfCreations) {
// if the populationSize is less than 3, we only use random creations
if(numberOfCreations < 3) {
randomCreations = numberOfCreations;
unchangedCreations = 0;
firstFitCreations = 0;
}
else if(numberOfCreations == 3) {
randomCreations = 1;
unchangedCreations = 1;
firstFitCreations = 1;
}
else {
unchangedCreations = 1;
Double randoms = numberOfCreations * 0.25;
firstFitCreations = randoms.intValue();
randomCreations = numberOfCreations - unchangedCreations - firstFitCreations;
}
Logger.getGlobal().info("Creations: " + numberOfCreations + ", random: " + randomCreations + ", first fit: "
+ firstFitCreations + ", unchanged: " + unchangedCreations);
}
}
| lgpl-3.0 |
JorgeVector/OfertaGuiadaVector | src/main/java/com/isb/og/wsdl/ofegui/ComIsbSanoguServiciosdirogEFCbBuscGestoraPPEType.java | 2550 | //
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, vhudson-jaxb-ri-2.1-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2017.06.05 at 02:19:47 PM CEST
//
package com.isb.og.wsdl.ofegui;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for com.isb.sanogu.serviciosdirog.e.f.cb.BuscGestoraPP_E_Type complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="com.isb.sanogu.serviciosdirog.e.f.cb.BuscGestoraPP_E_Type">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="datoBusqueda" type="{http://www.isban.es/webservices/TDCs}NOMBRE_MEDIO_Type" minOccurs="0"/>
* <element name="indReps" type="{http://www.isban.es/webservices/TDCs}INDICADOR_GENERICO_Type" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "com.isb.sanogu.serviciosdirog.e.f.cb.BuscGestoraPP_E_Type", propOrder = {
"datoBusqueda",
"indReps"
})
public class ComIsbSanoguServiciosdirogEFCbBuscGestoraPPEType {
protected String datoBusqueda;
protected String indReps;
/**
* Gets the value of the datoBusqueda property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getDatoBusqueda() {
return datoBusqueda;
}
/**
* Sets the value of the datoBusqueda property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setDatoBusqueda(String value) {
this.datoBusqueda = value;
}
/**
* Gets the value of the indReps property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getIndReps() {
return indReps;
}
/**
* Sets the value of the indReps property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setIndReps(String value) {
this.indReps = value;
}
}
| unlicense |
binkley/spring-actuator-demo | local/src/main/java/hello/HelloWorldMetrics.java | 1341 | package hello;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import com.codahale.metrics.Timer.Context;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.springframework.stereotype.Component;
import javax.inject.Inject;
/**
* {@code HelloWorldMetrics} <strong>needs documentation</strong>.
*
* @author <a href="mailto:boxley@thoughtworks.com">Brian Oxley</a>
* @todo Needs documentation
* @todo Not a fan of aspects, argument that metrics are a form of business
* code, and should be explicitly coded for
* @see <a href="http://kielczewski.eu/2015/01/application-metrics-with-spring-boot-actuator/">Application
* Metrics With Spring Boot Actuator</a>
*/
@Aspect
@Component
public class HelloWorldMetrics {
private final Timer callTime;
@Inject
public HelloWorldMetrics(final MetricRegistry metrics) {
callTime = metrics.timer("helloWorld.calls.sayHello");
}
@Around("execution(* hello.HelloWorldController.sayHello(String))")
public Object aroundCall(final ProceedingJoinPoint cut)
throws Throwable {
final Context time = callTime.time();
try {
return cut.proceed();
} finally {
time.stop();
}
}
}
| unlicense |
codeApeFromChina/resource | frame_packages/java_libs/hibernate-distribution-3.6.10.Final/project/hibernate-core/src/main/java/org/hibernate/type/descriptor/java/ClobTypeDescriptor.java | 4136 | /*
* Hibernate, Relational Persistence for Idiomatic Java
*
* Copyright (c) 2010, Red Hat Inc. or third-party contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Red Hat Inc.
*
* This copyrighted material is made available to anyone wishing to use, modify,
* copy, or redistribute it subject to the terms and conditions of the GNU
* Lesser General Public License, as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this distribution; if not, write to:
* Free Software Foundation, Inc.
* 51 Franklin Street, Fifth Floor
* Boston, MA 02110-1301 USA
*/
package org.hibernate.type.descriptor.java;
import java.io.Reader;
import java.io.Serializable;
import java.sql.Clob;
import java.sql.SQLException;
import java.util.Comparator;
import org.hibernate.HibernateException;
import org.hibernate.engine.jdbc.ClobProxy;
import org.hibernate.engine.jdbc.WrappedClob;
import org.hibernate.type.descriptor.CharacterStream;
import org.hibernate.type.descriptor.WrapperOptions;
/**
* Descriptor for {@link Clob} handling.
* <p/>
* Note, {@link Clob clobs} really are mutable (their internal state can in fact be mutated). We simply
* treat them as immutable because we cannot properly check them for changes nor deep copy them.
*
* @author Steve Ebersole
*/
public class ClobTypeDescriptor extends AbstractTypeDescriptor<Clob> {
public static final ClobTypeDescriptor INSTANCE = new ClobTypeDescriptor();
public static class ClobMutabilityPlan implements MutabilityPlan<Clob> {
public static final ClobMutabilityPlan INSTANCE = new ClobMutabilityPlan();
public boolean isMutable() {
return false;
}
public Clob deepCopy(Clob value) {
return value;
}
public Serializable disassemble(Clob value) {
throw new UnsupportedOperationException( "Clobs are not cacheable" );
}
public Clob assemble(Serializable cached) {
throw new UnsupportedOperationException( "Clobs are not cacheable" );
}
}
public ClobTypeDescriptor() {
super( Clob.class, ClobMutabilityPlan.INSTANCE );
}
public String toString(Clob value) {
try {
return DataHelper.extractString( value.getCharacterStream() );
}
catch ( SQLException e ) {
throw new HibernateException( "Unable to access clob stream", e );
}
}
public Clob fromString(String string) {
return ClobProxy.generateProxy( string );
}
@Override
@SuppressWarnings({ "unchecked" })
public Comparator<Clob> getComparator() {
return IncomparableComparator.INSTANCE;
}
@Override
public int extractHashCode(Clob value) {
return System.identityHashCode( value );
}
@Override
public boolean areEqual(Clob one, Clob another) {
return one == another;
}
@SuppressWarnings({ "unchecked" })
public <X> X unwrap(final Clob value, Class<X> type, WrapperOptions options) {
if ( ! ( Clob.class.isAssignableFrom( type ) || CharacterStream.class.isAssignableFrom( type ) ) ) {
throw unknownUnwrap( type );
}
if ( value == null ) {
return null;
}
if ( CharacterStream.class.isAssignableFrom( type ) ) {
try {
return (X) new CharacterStreamImpl( DataHelper.extractString( value.getCharacterStream() ) );
}
catch ( SQLException e ) {
throw new HibernateException( "Unable to access lob stream", e );
}
}
final Clob clob = WrappedClob.class.isInstance( value )
? ( (WrappedClob) value ).getWrappedClob()
: value;
return (X) clob;
}
public <X> Clob wrap(X value, WrapperOptions options) {
if ( value == null ) {
return null;
}
if ( ! Clob.class.isAssignableFrom( value.getClass() ) ) {
throw unknownWrap( value.getClass() );
}
return options.getLobCreator().wrap( (Clob) value );
}
}
| unlicense |
baowp/test-sample | src/test/java/com/iteye/baowp/proxy/JavassistProxyTest.java | 2251 | package com.iteye.baowp.proxy;
import com.iteye.baowp.api.Calculator;
import com.iteye.baowp.spi.FirstCalculator;
import javassist.util.proxy.MethodHandler;
import javassist.util.proxy.ProxyFactory;
import javassist.util.proxy.ProxyObject;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
/**
* Created with IntelliJ IDEA.
* User: baowp
* Date: 12/17/13
* Time: 9:45 AM
*/
public class JavassistProxyTest {
private final Logger logger = LoggerFactory.getLogger(getClass());
@Test
public void testCalculator() {
Calculator calculator = create(FirstCalculator.class);
logger.info(calculator.calculate(1));
logger.info(calculator.getClass().getName());
}
@Test
public void testEfficiency(){
Calculator calculator = create(FirstCalculator.class);
long start = System.currentTimeMillis();
for (int i = 0; i < 1000000; i++) {
calculator.calculate(i);
}
long end = System.currentTimeMillis();
long span = end - start;
logger.info("span time {}", span);
}
@Test
public void testArrayList() {
List list = create(ArrayList.class);
list.add("foo");
logger.info(list.getClass().getName());
}
private <T> T create(Class<T> classs) {
ProxyFactory factory = new ProxyFactory();
factory.setSuperclass(classs);
Class clazz = factory.createClass();
MethodHandler handler = new MethodHandler() {
@Override
public Object invoke(Object self, Method thisMethod, Method proceed, Object[] args) throws Throwable {
//logger.info("proxy handle {},{}", thisMethod.getName(), proceed.getName());
return proceed.invoke(self, args);
}
};
Object instance = null;
try {
instance = clazz.newInstance();
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
((ProxyObject) instance).setHandler(handler);
return (T) instance;
}
}
| unlicense |
dave-cassettari/sapelli | Library/src/uk/ac/ucl/excites/sapelli/shared/media/MediaHelpers.java | 3335 | /**
* Sapelli data collection platform: http://sapelli.org
*
* Copyright 2012-2014 University College London - ExCiteS group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.ac.ucl.excites.sapelli.shared.media;
import java.util.regex.Pattern;
/**
* @author Michalis Vitos, mstevens
*
*/
public final class MediaHelpers
{
/**
* This class should never be instantiated
*/
private MediaHelpers() {}
/**
* Pattern to recognise audio files by their extension.
*
* Based on supported audio/container file types in Android: http://developer.android.com/guide/appendix/media-formats.html
*/
static private final Pattern audioFilePattern = Pattern.compile("(.*/)*.+\\.(3gp|mp4|mp3|m4a|aac|ts|flac|mid|xmf|mxmf|rtttl|rtx|ota|imy|ogg|mkv|wav)$", Pattern.CASE_INSENSITIVE);
/**
* Pattern to recognise raster image files by their extension.
*/
static private final Pattern rasterImageFilePattern = Pattern.compile("(.*/)*.+\\.(png|jpg|gif|bmp|jpeg)$", Pattern.CASE_INSENSITIVE);
/**
* Pattern to recognise vector image files by their extension.
*/
static private final Pattern vectorImageFilePattern = Pattern.compile("(.*/)*.+\\.(svg|svgz)$", Pattern.CASE_INSENSITIVE);
/**
* Checks whether a filename (or path) has an audio file type extension.
*
* Recognises all Android-supported audio/container file types: http://developer.android.com/guide/appendix/media-formats.html
*
* @param fileNameOrPath
* @return
*/
static public boolean isAudioFileName(String fileNameOrPath)
{
if(fileNameOrPath == null)
return false;
return audioFilePattern.matcher(fileNameOrPath).matches(); // "" will never match the pattern
}
/**
* Checks whether a filename (or path) has an image (raster or vector) file type extension (PNG, JPG/JPEG, GIF, BMP, SVG or SVGZ)
*
* @param fileNameOrPath
* @return
*/
static public boolean isImageFileName(String fileNameOrPath)
{
return fileNameOrPath != null && (isRasterImageFileName(fileNameOrPath) || isVectorImageFileName(fileNameOrPath));
}
/**
* Checks whether a filename (or path) has an raster image file type extension (PNG, JPG/JPEG, GIF or BMP)
*
* @param fileNameOrPath
* @return
*/
static public boolean isRasterImageFileName(String fileNameOrPath)
{
if(fileNameOrPath == null)
return false;
return rasterImageFilePattern.matcher(fileNameOrPath).matches(); // "" will never match the pattern
}
/**
* Checks whether a filename (or path) has an vector image file type extension (SVG or SVGZ)
*
* @param fileNameOrPath
* @return
*/
static public boolean isVectorImageFileName(String fileNameOrPath)
{
if(fileNameOrPath == null)
return false;
return vectorImageFilePattern.matcher(fileNameOrPath).matches(); // "" will never match the pattern
}
}
| unlicense |
clilystudio/NetBook | allsrc/com/ximalaya/ting/android/opensdk/model/album/LastUpTrack.java | 1601 | package com.ximalaya.ting.android.opensdk.model.album;
import com.google.gson.annotations.SerializedName;
public class LastUpTrack
{
@SerializedName("created_at")
private long createdAt;
@SerializedName("duration")
private long duration;
@SerializedName("track_id")
private long trackId;
@SerializedName("track_title")
private String trackTitle;
@SerializedName("updated_at")
private long updatedAt;
public long getCreatedAt()
{
return this.createdAt;
}
public long getDuration()
{
return this.duration;
}
public long getTrackId()
{
return this.trackId;
}
public String getTrackTitle()
{
return this.trackTitle;
}
public long getUpdatedAt()
{
return this.updatedAt;
}
public void setCreatedAt(long paramLong)
{
this.createdAt = paramLong;
}
public void setDuration(long paramLong)
{
this.duration = paramLong;
}
public void setTrackId(long paramLong)
{
this.trackId = paramLong;
}
public void setTrackTitle(String paramString)
{
this.trackTitle = paramString;
}
public void setUpdatedAt(long paramLong)
{
this.updatedAt = paramLong;
}
public String toString()
{
return "LastUpTrack [trackId=" + this.trackId + ", trackTitle=" + this.trackTitle + ", duration=" + this.duration + ", createdAt=" + this.createdAt + ", updatedAt=" + this.updatedAt + "]";
}
}
/* Location: E:\Progs\Dev\Android\Decompile\apktool\zssq\zssq-dex2jar.jar
* Qualified Name: com.ximalaya.ting.android.opensdk.model.album.LastUpTrack
* JD-Core Version: 0.6.0
*/ | unlicense |
igitras-cg/core | src/main/java/com/igitras/cg/core/model/Template.java | 461 | package com.igitras.cg.core.model;
import java.util.Map;
/**
* Template for serializer.
*
* @author mason
*/
public interface Template<T> {
/**
* Get the template file.
*
* @return template file name
*/
String getTemplate();
/**
* Build the model context for serialize with template from model.
*
* @param model model
*
* @return value map
*/
Map<String, Object> buildContext(T model);
}
| unlicense |
alkedr/reporting-matchers | src/main/java/com/github/alkedr/matchers/reporting/reporters/UncheckedNodesFilteringReporter.java | 2866 | package com.github.alkedr.matchers.reporting.reporters;
import com.github.alkedr.matchers.reporting.sub.value.keys.Key;
import java.util.ArrayList;
import java.util.List;
// непроверенные значения - present node и absent node без содержимого
class UncheckedNodesFilteringReporter implements SimpleTreeReporter {
private final SimpleTreeReporter next;
private final List<NodeStarter> nodesStack = new ArrayList<>();
UncheckedNodesFilteringReporter(SimpleTreeReporter next) {
this.next = next;
}
@Override
public void beginPresentNode(Key key, Object value) {
rememberNode(reporter -> reporter.beginPresentNode(key, value));
}
@Override
public void beginAbsentNode(Key key) {
rememberNode(reporter -> reporter.beginAbsentNode(key));
}
@Override
public void beginBrokenNode(Key key, Throwable throwable) {
rememberNode(reporter -> reporter.beginBrokenNode(key, throwable));
rememberThatCurrentNodeHasContent();
}
@Override
public void endNode() {
if (nodesStack.isEmpty()) {
next.endNode();
} else {
nodesStack.remove(nodesStack.size() - 1);
}
}
@Override
public void correctlyPresent() {
rememberThatCurrentNodeHasContent();
next.correctlyPresent();
}
@Override
public void correctlyAbsent() {
rememberThatCurrentNodeHasContent();
next.correctlyAbsent();
}
@Override
public void incorrectlyPresent() {
rememberThatCurrentNodeHasContent();
next.incorrectlyPresent();
}
@Override
public void incorrectlyAbsent() {
rememberThatCurrentNodeHasContent();
next.incorrectlyAbsent();
}
@Override
public void passedCheck(String description) {
rememberThatCurrentNodeHasContent();
next.passedCheck(description);
}
@Override
public void failedCheck(String expected, String actual) {
rememberThatCurrentNodeHasContent();
next.failedCheck(expected, actual);
}
@Override
public void checkForAbsentItem(String description) {
rememberThatCurrentNodeHasContent();
next.checkForAbsentItem(description);
}
@Override
public void brokenCheck(String description, Throwable throwable) {
rememberThatCurrentNodeHasContent();
next.brokenCheck(description, throwable);
}
private void rememberNode(NodeStarter nodeStarter) {
nodesStack.add(nodeStarter);
}
private void rememberThatCurrentNodeHasContent() {
for (NodeStarter nodeStarter : nodesStack) {
nodeStarter.start(next);
}
nodesStack.clear();
}
private interface NodeStarter {
void start(SimpleTreeReporter simpleTreeReporter);
}
}
| unlicense |
jorenver/Proyecto_Sistemas_Distribuidos | pruebaAlgoritmo/umontreal/ssj/probdist/GammaDistFromMoments.java | 2108 | /*
* Class: GammaDistFromMoments
* Description: gamma distribution
* Environment: Java
* Software: SSJ
* Copyright (C) 2001 Pierre L'Ecuyer and Universite de Montreal
* Organization: DIRO, Universite de Montreal
* @author
* @since
*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package umontreal.ssj.probdist;
/**
* Extends the @ref GammaDist distribution with constructors accepting the
* mean @f$\mu@f$ and variance @f$\sigma^2@f$ as arguments instead of a
* shape parameter @f$\alpha@f$ and a scale parameter @f$\lambda@f$. Since
* @f$\mu=\alpha/ \lambda@f$, and @f$\sigma^2=\alpha/ \lambda^2@f$, the
* shape and scale parameters are @f$\alpha=\mu^2 / \sigma^2@f$, and
* @f$\lambda=\mu/ \sigma^2@f$, respectively.
*
* <div class="SSJ-bigskip"></div>
*
* @ingroup probdist_continuous
*/
public class GammaDistFromMoments extends GammaDist {
/**
* Constructs a gamma distribution with mean `mean`, variance `var`,
* and `d` decimal of precision.
* @param mean the desired mean.
* @param var the desired variance.
* @param d the number of decimals of precision.
*/
public GammaDistFromMoments (double mean, double var, int d) {
super (mean * mean / var, mean / var, d);
}
/**
* Constructs a gamma distribution with mean `mean`, and variance
* `var`.
* @param mean the desired mean.
* @param var the desired variance.
*/
public GammaDistFromMoments (double mean, double var) {
super (mean * mean / var, mean / var);
}
} | unlicense |
naebomium/android | app/src/main/java/com/mobium/new_api/methodParameters/GetItemsExtra.java | 822 | package com.mobium.new_api.methodParameters;
/**
* on 15.07.15.
* http://mobiumapps.com/
*/
public class GetItemsExtra {
private Integer limit;
private Integer offset;
private String regionId;
public GetItemsExtra(Integer limit, Integer offset, String regionId) {
this.limit = limit;
this.offset = offset;
this.regionId = regionId;
}
public Integer getLimit() {
return limit;
}
public void setLimit(Integer limit) {
this.limit = limit;
}
public Integer getOffset() {
return offset;
}
public void setOffset(Integer offset) {
this.offset = offset;
}
public String getRegionId() {
return regionId;
}
public void setRegionId(String regionId) {
this.regionId = regionId;
}
}
| unlicense |
RedTriplane/RedTriplane | r3-box2d-jd/src/org/jbox2d/d/pooling/normal/OrderedStack.java | 2862 | /*******************************************************************************
* Copyright (c) 2013, Daniel Murphy
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
******************************************************************************/
/**
* Created at 12:52:04 AM Jan 20, 2011
*/
package org.jbox2d.d.pooling.normal;
/**
* @author Daniel Murphy
*/
public abstract class OrderedStack<E> {
private final Object[] pool;
private int index;
private final int size;
private final Object[] container;
public OrderedStack(int argStackSize, int argContainerSize) {
size = argStackSize;
pool = new Object[argStackSize];
for (int i = 0; i < argStackSize; i++) {
pool[i] = newInstance();
}
index = 0;
container = new Object[argContainerSize];
}
@SuppressWarnings("unchecked")
public final E pop() {
assert (index < size) : "End of stack reached, there is probably a leak somewhere";
return (E) pool[index++];
}
@SuppressWarnings("unchecked")
public final E[] pop(int argNum) {
assert (index + argNum < size) : "End of stack reached, there is probably a leak somewhere";
assert (argNum <= container.length) : "Container array is too small";
System.arraycopy(pool, index, container, 0, argNum);
index += argNum;
return (E[]) container;
}
public final void push(int argNum) {
index -= argNum;
assert (index >= 0) : "Beginning of stack reached, push/pops are unmatched";
}
/** Creates a new instance of the object contained by this stack. */
protected abstract E newInstance();
}
| unlicense |
breandan/6Engine | src/main/java/com/daexsys/siximpl/world/block/BlockFace.java | 132 | package com.daexsys.siximpl.world.block;
public enum BlockFace {
TOP,
BOTTOM,
FRONT,
BACK,
LEFT,
RIGHT
}
| unlicense |
Normegil/Librarium-Server | src/test/java/be/normegil/librarium/model/data/book/UTAbstractBDComparator.java | 1886 | package be.normegil.librarium.model.data.book;
import be.normegil.librarium.WarningTypes;
import be.normegil.librarium.Constants;
import be.normegil.librarium.model.data.fake.FakeAbstractBD;
import be.normegil.librarium.tool.DataFactory;
import be.normegil.librarium.tool.FactoryRepository;
import be.normegil.librarium.tool.test.model.data.AbstractDataComparableTest;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class UTAbstractBDComparator extends AbstractDataComparableTest<AbstractBD> {
@SuppressWarnings(WarningTypes.UNCHECKED_CAST)
private static final DataFactory<AbstractBD> FACTORY = FactoryRepository.get(AbstractBD.class);
@Override
protected AbstractBD getNewEntity() {
return FACTORY.getNew();
}
@Override
protected int compare(final AbstractBD entity1, final AbstractBD entity2) {
return entity1.compareTo(entity2);
}
@Override
public void testEquality() throws Exception {
AbstractBD entity = getEntity();
AbstractBD copy = new FakeAbstractBD(entity);
assertTrue(getComparatorHelper().testComparatorResult(Constants.Comparator.EQUALS, compare(entity, copy)));
assertEquals(compare(entity, copy), entity.compareTo(copy));
}
@Test
public void testIssueNumber_First() throws Exception {
AbstractBD entity = getEntity();
AbstractBD copy = new FakeAbstractBD(entity);
copy.setIssueNumber(entity.getIssueNumber() + 1);
assertTrue(getComparatorHelper().testComparatorResult(Constants.Comparator.PRIORITY_FIRST, compare(entity, copy)));
}
@Test
public void testIssueNumber_Second() throws Exception {
AbstractBD entity = getEntity();
AbstractBD copy = new FakeAbstractBD(entity);
copy.setIssueNumber(entity.getIssueNumber() + 1);
assertTrue(getComparatorHelper().testComparatorResult(Constants.Comparator.PRIORITY_SECOND, compare(copy, entity)));
}
}
| unlicense |
geronimo-iia/restexpress | restexpress-core/src/test/java/org/restexpress/RequestTest.java | 9911 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.restexpress;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.net.URLEncoder;
import java.util.List;
import java.util.Map;
import org.jboss.netty.buffer.ChannelBuffers;
import org.jboss.netty.handler.codec.http.DefaultHttpRequest;
import org.jboss.netty.handler.codec.http.HttpMethod;
import org.jboss.netty.handler.codec.http.HttpRequest;
import org.jboss.netty.handler.codec.http.HttpVersion;
import org.junit.Before;
import org.junit.Test;
import org.restexpress.domain.CharacterSet;
import org.restexpress.http.BadRequestException;
/**
* @author toddf
* @since Mar 29, 2011
*/
public class RequestTest
{
private Request request;
@Before
public void initialize()
{
HttpRequest httpRequest = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/foo?param1=bar¶m2=blah&yada");
httpRequest.headers().add("Host", "testing-host");
request = new Request(httpRequest, null, null);
}
@Test
public void shouldRetrieveEntireUrl()
{
assertEquals("http://testing-host/foo?param1=bar¶m2=blah&yada", request.getUrl());
}
@Test
public void shouldRetrieveBaseUrl()
{
assertEquals("http://testing-host", request.getBaseUrl());
}
@Test
public void shouldRetrievePath()
{
assertEquals("/foo?param1=bar¶m2=blah&yada", request.getPath());
}
@Test
public void shouldApplyQueryStringParamsAsHeaders()
{
assertEquals("bar", request.getHeader("param1"));
assertEquals("blah", request.getHeader("param2"));
assertEquals("", request.getHeader("yada"));
}
@Test
public void shouldParseQueryStringIntoMap()
{
Map<String, String> m = request.getQueryStringMap();
assertNotNull(m);
assertEquals("bar", m.get("param1"));
assertEquals("blah", m.get("param2"));
assertEquals("", m.get("yada"));
}
@Test
public void shouldHandleNoQueryString()
{
Request r = new Request(new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/foo"), null);
Map<String, String> m = r.getQueryStringMap();
assertNotNull(m);
assertTrue(m.isEmpty());
}
@Test
public void shouldHandleNullQueryString()
{
Request r = new Request(new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/foo?"), null);
Map<String, String> m = r.getQueryStringMap();
assertNotNull(m);
assertTrue(m.isEmpty());
}
@Test
public void shouldHandleGoofyQueryString()
{
Request r = new Request(new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/foo??&"), null);
Map<String, String> m = r.getQueryStringMap();
assertNotNull(m);
assertEquals("", m.get("?"));
}
@Test
public void shouldHandleUrlEncodedQueryString()
{
Request r = new Request(new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/foo?assertion=assertion%7CfitnesseIdm40%40ecollege.com%7C2013-03-14T18%3A02%3A08%2B00%3A00%7C2f6f1b0fa8ecce7d092c1c45cc44e4c7"), null);
assertEquals("assertion|fitnesseIdm40@ecollege.com|2013-03-14T18:02:08+00:00|2f6f1b0fa8ecce7d092c1c45cc44e4c7", r.getHeader("assertion"));
}
@Test
public void shouldSetHeaderOnInvalidUrlDecodedQueryString()
{
String key = "invalidUrlDecode";
String value = "%invalid";
Request r = new Request(new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/foo?" + key + "=" + value), null);
assertEquals(value, r.getHeader(key));
}
@Test
public void shouldSetAndGetHeader()
{
String key = "header-key";
String value = "header value";
request.addHeader(key, value);
assertEquals(value, request.getHeader(key));
}
@Test
public void shouldSetAndGetInvalidUrlEncodedHeader()
{
String key = "invalid-header-key";
String value = "%invalidUrlEncode";
request.addHeader(key, value);
assertEquals(value, request.getHeader(key));
}
@Test
public void shouldNotAlterUrlEncodedHeader()
{
String key = "validUrlDecode";
String value = "%20this%20that";
request.addHeader(key, value);
assertEquals(value, request.getHeader(key));
}
@Test
public void shouldNotAlterUrlDecodedHeaderWithMessage()
{
String key = "validUrlDecode";
String value = "%20this%20that";
request.addHeader(key, value);
assertEquals(value, request.getHeader(key, "This should not display"));
}
@Test(expected=BadRequestException.class)
public void shouldThrowBadRequestExceptionOnMissingUrlDecodedHeader()
{
request.getHeader("missing", "missing header");
}
@Test
public void shouldBeGetRequest()
{
assertEquals(HttpMethod.GET, request.getHttpMethod());
assertEquals(HttpMethod.GET, request.getEffectiveHttpMethod());
}
@Test
public void shouldBePostRequest()
{
Request postRequest = new Request(new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/foo"), null);
assertEquals(HttpMethod.POST, postRequest.getHttpMethod());
assertEquals(HttpMethod.POST, postRequest.getEffectiveHttpMethod());
}
@Test
public void shouldBePutRequest()
{
Request putRequest = new Request(new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.PUT, "/foo"), null);
assertEquals(HttpMethod.PUT, putRequest.getHttpMethod());
assertEquals(HttpMethod.PUT, putRequest.getEffectiveHttpMethod());
}
@Test
public void shouldBeDeleteRequest()
{
Request deleteRequest = new Request(new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.DELETE, "/foo"), null);
assertEquals(HttpMethod.DELETE, deleteRequest.getHttpMethod());
assertEquals(HttpMethod.DELETE, deleteRequest.getEffectiveHttpMethod());
}
@Test
public void shouldBeEffectivePutRequest()
{
Request putRequest = new Request(new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/foo?_method=pUt"), null);
assertEquals(HttpMethod.POST, putRequest.getHttpMethod());
assertEquals(HttpMethod.PUT, putRequest.getEffectiveHttpMethod());
}
@Test
public void shouldBeEffectiveDeleteRequest()
{
Request deleteRequest = new Request(new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/foo?_method=DeLeTe"), null);
assertEquals(HttpMethod.POST, deleteRequest.getHttpMethod());
assertEquals(HttpMethod.DELETE, deleteRequest.getEffectiveHttpMethod());
}
@Test
public void shouldBeEffectivePostRequest()
{
Request deleteRequest = new Request(new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/foo?_method=xyzt"), null);
assertEquals(HttpMethod.POST, deleteRequest.getHttpMethod());
assertEquals(HttpMethod.POST, deleteRequest.getEffectiveHttpMethod());
}
@Test
public void shouldParseUrlFormEncodedBody()
throws Exception
{
DefaultHttpRequest httpRequest = new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/foo?_method=xyzt");
String formValue1 = "http://login.berlin.ecollege-labs.com/google-service/google/sso/callback/google.JSON?successUrl=http%3A%2F%2Fdashboard.berlin.ecollege-labs.com%2Ftransfer.html&failureUrl=http%3A%2F%2Flogin.berlin.ecollege-labs.com&domain=GOOGLE_NON_MARKET_PLACE_DOMAIN";
String formValue2 = "https://www.google.com/accounts/o8/id?id=AItOawkHDpeMEfe_xM14z_ge7UATYOSg_QlPeDg";
String formValue3 = "https://www.google.com/accounts/o8/id?id=AItOawkHDpeMEfe_xM14z_ge7UATYOSg_QlPeDg";
String charset = CharacterSet.UTF_8.getCharsetName();
httpRequest.setContent(ChannelBuffers.wrappedBuffer(("openid.return_to=" + URLEncoder.encode(formValue1, charset)
+ "&openid.identity=" + URLEncoder.encode(formValue2, charset)
+ "&openid.claimed_id=" + URLEncoder.encode(formValue3,charset)).getBytes()));
Request formPost = new Request(httpRequest, null);
Map<String, List<String>> form = formPost.getEntityFromUrlFormEncoded();
assertEquals(3, form.size());
assertNotNull(form.get("openid.return_to"));
assertNotNull(form.get("openid.identity"));
assertNotNull(form.get("openid.claimed_id"));
assertEquals(formValue1, form.get("openid.return_to").get(0));
assertEquals(formValue2, form.get("openid.identity").get(0));
assertEquals(formValue3, form.get("openid.claimed_id").get(0));
}
@Test
public void shouldGetRequestHeaderNames()
{
request.addHeader("header-key", "header-value");
request.addHeader("header-key-1", "header-value-1");
request.addHeader("header-key-2", "");
assertTrue(request.getHeaderNames().contains("header-key"));
assertTrue(request.getHeaderNames().contains("header-key-1"));
assertTrue(request.getHeaderNames().contains("header-key-2"));
}
@Test
public void shouldGetAllRawHeadersWithSameName() {
request.addHeader("common-key", "header-value");
request.addHeader("common-key", "header-value-1");
assertTrue(request.getHeaders("common-key").contains("header-value"));
assertTrue(request.getHeaders("common-key").contains("header-value-1"));
}
@Test
public void shouldNotReturnNullWhenNoQueryString() {
Request noQueryRequest = new Request(
new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.GET, "/noquery"),
null, null
);
assertNotNull(noQueryRequest.getQueryStringMap());
}
}
| apache-2.0 |
beefsuperman/kunkunweather | app/src/main/java/com/example/kunkunweather/gson/Forecast.java | 485 | package com.example.kunkunweather.gson;
import com.google.gson.annotations.SerializedName;
/**
* Created by 坤阳 on 2017/4/21.
*/
public class Forecast {
public String date;
@SerializedName("tmp")
public Temperature temperature;
@SerializedName("cond")
public More more;
public class Temperature{
public String max;
public String min;
}
public class More{
@SerializedName("txt_d")
public String info;
}
}
| apache-2.0 |
javanna/elasticshell | src/main/java/org/elasticsearch/shell/client/builders/cluster/ClusterStateRequestBuilder.java | 3946 | /*
* Licensed to Luca Cavanna (the "Author") under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.shell.client.builders.cluster;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.settings.ImmutableSettings;
import org.elasticsearch.common.settings.SettingsFilter;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.shell.client.builders.AbstractRequestBuilderJsonOutput;
import org.elasticsearch.shell.json.JsonToString;
import org.elasticsearch.shell.json.StringToJson;
import java.io.IOException;
/**
* @author Luca Cavanna
*
* Request builder for cluster state API
*/
@SuppressWarnings("unused")
public class ClusterStateRequestBuilder<JsonInput,JsonOutput> extends AbstractRequestBuilderJsonOutput<ClusterStateRequest, ClusterStateResponse, JsonInput, JsonOutput> {
public ClusterStateRequestBuilder(Client client, JsonToString<JsonInput> jsonToString, StringToJson<JsonOutput> stringToJson) {
super(client, new ClusterStateRequest(), jsonToString, stringToJson);
}
public ClusterStateRequestBuilder<JsonInput,JsonOutput> filterAll() {
request.filterAll();
return this;
}
public ClusterStateRequestBuilder<JsonInput,JsonOutput> filterBlocks(boolean filter) {
request.filterBlocks(filter);
return this;
}
public ClusterStateRequestBuilder<JsonInput,JsonOutput> filterMetaData(boolean filter) {
request.filterMetaData(filter);
return this;
}
public ClusterStateRequestBuilder<JsonInput,JsonOutput> filterNodes(boolean filter) {
request.filterNodes(filter);
return this;
}
public ClusterStateRequestBuilder<JsonInput,JsonOutput> filterRoutingTable(boolean filter) {
request.filterRoutingTable(filter);
return this;
}
public ClusterStateRequestBuilder<JsonInput,JsonOutput> filterIndices(String... indices) {
request.filteredIndices(indices);
return this;
}
public ClusterStateRequestBuilder<JsonInput,JsonOutput> filteredIndexTemplates(String... templates) {
request.filteredIndexTemplates(templates);
return this;
}
public ClusterStateRequestBuilder<JsonInput,JsonOutput> local(boolean local) {
request.local(local);
return this;
}
@Override
protected ActionFuture<ClusterStateResponse> doExecute(ClusterStateRequest request) {
return client.admin().cluster().state(request);
}
@Override
protected XContentBuilder toXContent(ClusterStateRequest request, ClusterStateResponse response, XContentBuilder builder) throws IOException {
builder.startObject();
builder.field(Fields.CLUSTER_NAME, response.getClusterName().value());
response.getState().settingsFilter(new SettingsFilter(ImmutableSettings.settingsBuilder().build())).toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
return builder;
}
}
| apache-2.0 |
emistoolbox/emistoolbox | libs/joriki/src/info/joriki/util/CloneableObject.java | 316 | /*
* Copyright 2002 Felix Pahl. All rights reserved.
* Use is subject to license terms.
*/
package info.joriki.util;
public class CloneableObject implements Cloneable
{
public Object clone ()
{
try { return super.clone (); } catch (CloneNotSupportedException cnse) { throw new InternalError (); }
}
}
| apache-2.0 |
FFY00/deobfuscator | src/main/java/com/javadeobfuscator/deobfuscator/org/objectweb/asm/optimizer/FieldConstantsCollector.java | 3521 | /***
* ASM: a very small and fast Java bytecode manipulation framework
* Copyright (c) 2000-2011 INRIA, France Telecom
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.javadeobfuscator.deobfuscator.org.objectweb.asm.optimizer;
import com.javadeobfuscator.deobfuscator.org.objectweb.asm.AnnotationVisitor;
import com.javadeobfuscator.deobfuscator.org.objectweb.asm.Attribute;
import com.javadeobfuscator.deobfuscator.org.objectweb.asm.FieldVisitor;
import com.javadeobfuscator.deobfuscator.org.objectweb.asm.Opcodes;
import com.javadeobfuscator.deobfuscator.org.objectweb.asm.TypePath;
/**
* A {@link FieldVisitor} that collects the {@link Constant}s of the fields it
* visits.
*
* @author Eric Bruneton
*/
public class FieldConstantsCollector extends FieldVisitor {
private final ConstantPool cp;
public FieldConstantsCollector(final FieldVisitor fv, final ConstantPool cp) {
super(Opcodes.ASM5, fv);
this.cp = cp;
}
@Override
public AnnotationVisitor visitAnnotation(final String desc,
final boolean visible) {
cp.newUTF8(desc);
if (visible) {
cp.newUTF8("RuntimeVisibleAnnotations");
} else {
cp.newUTF8("RuntimeInvisibleAnnotations");
}
return new AnnotationConstantsCollector(fv.visitAnnotation(desc,
visible), cp);
}
@Override
public AnnotationVisitor visitTypeAnnotation(int typeRef,
TypePath typePath, String desc, boolean visible) {
cp.newUTF8(desc);
if (visible) {
cp.newUTF8("RuntimeVisibleTypeAnnotations");
} else {
cp.newUTF8("RuntimeInvisibleTypeAnnotations");
}
return new AnnotationConstantsCollector(fv.visitAnnotation(desc,
visible), cp);
}
@Override
public void visitAttribute(final Attribute attr) {
// can do nothing
fv.visitAttribute(attr);
}
@Override
public void visitEnd() {
fv.visitEnd();
}
}
| apache-2.0 |
afelisatti/async-http-client | providers/netty4/src/main/java/org/asynchttpclient/providers/netty4/request/body/NettyBodyBody.java | 3521 | /*
* Copyright (c) 2014 AsyncHttpClient Project. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at
* http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the Apache License Version 2.0 for the specific language governing permissions and limitations there under.
*/
package org.asynchttpclient.providers.netty4.request.body;
import static org.asynchttpclient.util.MiscUtils.closeSilently;
import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelProgressiveFuture;
import io.netty.handler.codec.http.LastHttpContent;
import io.netty.handler.stream.ChunkedWriteHandler;
import java.io.IOException;
import org.asynchttpclient.AsyncHttpClientConfig;
import org.asynchttpclient.Body;
import org.asynchttpclient.BodyGenerator;
import org.asynchttpclient.RandomAccessBody;
import org.asynchttpclient.providers.netty4.NettyAsyncHttpProviderConfig;
import org.asynchttpclient.providers.netty4.channel.ChannelManager;
import org.asynchttpclient.providers.netty4.future.NettyResponseFuture;
import org.asynchttpclient.providers.netty4.request.ProgressListener;
import org.asynchttpclient.providers.netty4.request.body.FeedableBodyGenerator.FeedListener;
public class NettyBodyBody implements NettyBody {
private final Body body;
private final NettyAsyncHttpProviderConfig nettyConfig;
public NettyBodyBody(Body body, NettyAsyncHttpProviderConfig nettyConfig) {
this.body = body;
this.nettyConfig = nettyConfig;
}
public Body getBody() {
return body;
}
@Override
public long getContentLength() {
return body.getContentLength();
}
@Override
public String getContentType() {
return null;
};
@Override
public void write(final Channel channel, NettyResponseFuture<?> future, AsyncHttpClientConfig config) throws IOException {
Object msg;
if (body instanceof RandomAccessBody && !ChannelManager.isSslHandlerConfigured(channel.pipeline()) && !nettyConfig.isDisableZeroCopy()) {
msg = new BodyFileRegion((RandomAccessBody) body);
} else {
msg = new BodyChunkedInput(body);
BodyGenerator bg = future.getRequest().getBodyGenerator();
if (bg instanceof FeedableBodyGenerator) {
FeedableBodyGenerator.class.cast(bg).setListener(new FeedListener() {
@Override
public void onContentAdded() {
channel.pipeline().get(ChunkedWriteHandler.class).resumeTransfer();
}
});
}
}
ChannelFuture writeFuture = channel.write(msg, channel.newProgressivePromise());
writeFuture.addListener(new ProgressListener(config, future.getAsyncHandler(), future, false, getContentLength()) {
public void operationComplete(ChannelProgressiveFuture cf) {
closeSilently(body);
super.operationComplete(cf);
}
});
channel.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT);
}
}
| apache-2.0 |
CloudBPM/SwinFlowCloud-CloudSide | core.runtime/src/main/java/com/cloudibpm/core/runtime/util/json/WfProcessInstanceJSONParser.java | 25105 | /**
*
*/
package com.cloudibpm.core.runtime.util.json;
import com.cloudibpm.core.Location;
import com.cloudibpm.core.buildtime.util.json.WfProcessJSONParser;
import com.cloudibpm.core.buildtime.wfprocess.task.*;
import com.cloudibpm.core.data.FileConstant;
import com.cloudibpm.core.data.variable.AccessibleVariable;
import com.cloudibpm.core.data.variable.ArrayDataVariable;
import com.cloudibpm.core.data.variable.DataVariable;
import com.cloudibpm.core.runtime.wfprocess.WfProcessInstance;
import com.cloudibpm.core.runtime.wfprocess.task.*;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.List;
/**
* This class is used to parse business process from JSON to Java object.
* 基本思想是,通过新建对象,并对JSON对象的每一个属性逐一访问和获取,并赋值给新对象。
* 可以使的处理思路清晰和简单,效率可能有点低,但是由于JSON对象中,有很多字符串需要解析,
* 如表达式字符串,在访问JSON对象属性过程中就可以对其进行解析。
*
* @author Dahai created on 20170808 last updated on 2017-11-28, last updated at
* 16:21 on 2018-09-02 (My 46th birthday)
*
*/
public class WfProcessInstanceJSONParser extends WfProcessJSONParser {
/**
* 将字符串反序列化成WfProcessInstance对象。
*
* @date Dahai Cao last updated at 16:29 on Sunday 2018-09-02
* @param jsonWfProcess
* String
* @return WfProcessInstance
* @throws JSONException
* @throws Exception
*/
public static WfProcessInstance parseWfProcessInstance(String jsonWfProcess) throws JSONException, Exception {
JSONObject obj = new JSONObject(jsonWfProcess);
WfProcessInstance newprocess = new WfProcessInstance();
parseCommonProps(newprocess, obj);
if (!obj.isNull("purchasePrice")) {
newprocess.setPurchasePrice(obj.getDouble("purchasePrice"));
}
if (!obj.isNull("usagePrice")) {
newprocess.setUsagePrice(obj.getDouble("usagePrice"));
}
if(!obj.isNull("wfProcessId")){
newprocess.setWfProcessId(obj.getString("wfProcessId"));
}
if(!obj.isNull("launchTime")){
newprocess.setLaunchTime(obj.getLong("launchTime"));
}
if(!obj.isNull("code")){
newprocess.setCode(obj.getString("code"));
}
if (!obj.isNull("deprecated")){
newprocess.setDeprecated(obj.getInt("deprecated"));
}
if (!obj.isNull("staffLaunched")){
newprocess.setStaffLaunched(obj.getInt("staffLaunched"));
}
if(!obj.isNull("trialPeriod")){
newprocess.setTrialPeriod(obj.getInt("trialPeriod"));
}
if(!obj.isNull("startTime")){
newprocess.setStartTime(obj.getLong("startTime"));
}
if(!obj.isNull("workflowType")){
newprocess.setWorkflowType(obj.getInt("workflowType"));
}
if(!obj.isNull("totalUseCounting")){
newprocess.setTotalUseCounting(obj.getLong("totalUseCounting"));
}
if(!obj.isNull("processType")){
newprocess.setProcessType(obj.getInt("processType"));
}
if (!obj.isNull("id")){
newprocess.setId(obj.getString("id"));
}
if(!obj.isNull("owner")){
newprocess.setOwner(obj.getString("owner"));
}
if (!obj.isNull("accessLevel")){
newprocess.setAccessLevel(obj.getInt("accessLevel"));
}
if (!obj.isNull("releaseDate")){
newprocess.setReleaseDate(obj.getLong("releaseDate"));
}
if (!obj.isNull("successCounting")){
newprocess.setSuccessCounting(obj.getLong("successCounting"));
}
if (!obj.isNull("terminationTime")){
newprocess.setTerminationTime(obj.getLong("terminationTime"));
}
if (!obj.isNull("name")){
newprocess.setName(obj.getString("name"));
}
if (!obj.isNull("terminationCounting")){
newprocess.setTerminationCounting(obj.getLong("terminationCounting"));
}
if (!obj.isNull("endTime")){
newprocess.setEndTime(obj.getLong("endTime"));
}
if (!obj.isNull("lastupdate")){
newprocess.setLastupdate(obj.getLong("lastupdate"));
}
if (!obj.isNull("totalDownloading")){
newprocess.setTotalDownloading(obj.getLong("totalDownloading"));
}
if (!obj.isNull("suspensionTime")){
newprocess.setSuspensionTime(obj.getLong("suspensionTime"));
}
if (!obj.isNull("status")){
newprocess.setStatus(obj.getInt("status"));
}
if (!obj.isNull("version")){
newprocess.setVersion(obj.getString("version"));
}
if (!obj.isNull("ver")){
newprocess.setVer(obj.getLong("ver"));
}
if (!obj.isNull("version")){
newprocess.setVersion(obj.getString("version"));
}
if (!obj.isNull("launchUserId")){
newprocess.setLaunchUserId(obj.getString("launchUserId"));
}
if (!obj.isNull("launchUser")){
newprocess.setLaunchUser(obj.getString("launchUser"));
}
if (!obj.isNull("idType")){
newprocess.setIdType(obj.getString("idType"));
}
if (!obj.isNull("idNumber")){
newprocess.setIdNumber(obj.getString("idNumber"));
}
if (!obj.isNull("mobileNumber")){
newprocess.setMobileNumber(obj.getString("mobileNumber"));
}
if (!obj.isNull("mobileNumber")){
newprocess.setMobileNumber(obj.getString("mobileNumber"));
}
if (!obj.isNull("ipv4")){
newprocess.setIpv4(obj.getString("ipv4"));
}
if (!obj.isNull("ipv6")){
newprocess.setIpv6(obj.getString("ipv6"));
}
if (!obj.isNull("serverIp")){
newprocess.setServerIp(obj.getString("serverIp"));
}
if (!obj.isNull("device")){
newprocess.setDevice(obj.getString("device"));
}
if (!obj.isNull("longitude")){
newprocess.setLongitude(obj.getString("longitude"));
}
if (!obj.isNull("latitude")){
newprocess.setLatitude(obj.getString("latitude"));
}
JSONArray jsonarr = obj.getJSONArray("children");
if (jsonarr.length() > 0) { // parsing data variables and tasks
for (int i = 0; i < jsonarr.length(); i++) {
parseChildren(jsonarr.getJSONObject(i), newprocess);
}
}
parseTransitionInstance(jsonarr, newprocess);
return newprocess;
}
private static void parseChildren(JSONObject obj, WfProcessInstance newprocess) throws Exception {
String currOwner = newprocess.getId();
String owner = newprocess.getOwner();
if (obj.getString("classtypename").equals(ArrayDataVariable.class.getSimpleName())) {
newprocess.addChild(parseArrayDataVariable(obj, currOwner, owner));
} else if (obj.getString("classtypename").equals(DataVariable.class.getSimpleName())) {
newprocess.addChild(parseDataVariable(obj, currOwner, owner));
} else if (obj.getString("classtypename").equals(StartPoint.class.getSimpleName())
|| obj.getString("classtypename").equals(StartPointInstance.class.getSimpleName())) {
StartPointInstance task = new StartPointInstance();
setCommonTaskProps(task, obj, currOwner, owner);
if (!obj.isNull("startTime")) {
task.setStartTime(obj.getLong("startTime"));
}
if (!obj.isNull("endTime")) {
task.setEndTime(obj.getLong("endTime"));
}
if (!obj.isNull("definitionId")) {
task.setDefinitionId(obj.getString("definitionId"));
}
if (!obj.isNull("launchUIType")) {
task.setLaunchUIType(obj.getInt("launchUIType"));
}
if (!obj.isNull("launchUIUrl")) {
task.setLaunchUIUrl(obj.getString("launchUIUrl"));
}
if (!obj.isNull("launchFormContent")) {
JSONObject o = obj.getJSONObject(("launchFormContent"));
// task.setLaunchFormContent(o.toString());
task.setLaunchFormContent(o);// 直接的JSON Object存进去
// String content = obj.getString("launchFormContent");
// System.out.println(content);
// task.setLaunchFormContent(content);
}
newprocess.addChild(task);
JSONArray jsonarr = obj.getJSONArray("accessibleVars");
if (jsonarr.length() > 0) {
List<AccessibleVariable> list = new ArrayList<AccessibleVariable>();
for (int i = 0; i < jsonarr.length(); i++) {
list.add(parseAccessibleVariables(jsonarr.getJSONObject(i)));
}
task.setAccessibleVars(list.toArray(new AccessibleVariable[list.size()]));
}
} else if (obj.getString("classtypename").equals(EndPoint.class.getSimpleName())
|| obj.getString("classtypename").equals(EndPointInstance.class.getSimpleName())) {
EndPointInstance task = new EndPointInstance();
setCommonTaskProps(task, obj, currOwner, owner);
if (!obj.isNull("startTime")) {
task.setStartTime(obj.getLong("startTime"));
}
if (!obj.isNull("endTime")) {
task.setEndTime(obj.getLong("endTime"));
}
if (!obj.isNull("definitionId")) {
task.setDefinitionId(obj.getString("definitionId"));
}
if (!obj.isNull("processInstanceId")) {
task.setProcessInstanceId(obj.getString("processInstanceId"));
}
if (!obj.isNull("processId")) {
task.setProcessId(obj.getString("processId"));
}
if (!obj.isNull("endUIType")) {
task.setEndUIType(obj.getInt("endUIType"));
}
if (!obj.isNull("endUIUrl")) {
task.setEndUIUrl(obj.getString("endUIUrl"));
}
if (!obj.isNull("endFormContent")) {
JSONObject o = obj.getJSONObject(("endFormContent"));
task.setEndFormContent(o.toString());
}
newprocess.addChild(task);
JSONArray jsonarr = obj.getJSONArray("accessibleVars");
if (jsonarr.length() > 0) {
List<AccessibleVariable> list = new ArrayList<AccessibleVariable>();
for (int i = 0; i < jsonarr.length(); i++) {
list.add(parseAccessibleVariables(jsonarr.getJSONObject(i)));
}
task.setAccessibleVars(list.toArray(new AccessibleVariable[list.size()]));
}
} else if (obj.getString("classtypename").equals(AssignTask.class.getSimpleName())
|| obj.getString("classtypename").equals(AssignTaskInstance.class.getSimpleName())) {
AssignTaskInstance task = new AssignTaskInstance();
setCommonTaskProps(task, obj, currOwner, owner);
if (!obj.isNull("startTime")) {
task.setStartTime(obj.getLong("startTime"));
}
if (!obj.isNull("endTime")) {
task.setEndTime(obj.getLong("endTime"));
}
if (!obj.isNull("definitionId")) {
task.setDefinitionId(obj.getString("definitionId"));
}
JSONArray jsonarr = obj.getJSONArray("assignments");
if (jsonarr.length() > 0) {
List<Assignment> list = new ArrayList<Assignment>();
for (int i = 0; i < jsonarr.length(); i++) {
Assignment a = new Assignment();
a.setType(0);
parseAssignments(a, jsonarr.getJSONObject(i), currOwner, owner);
list.add(a);
}
task.setAssignments(list.toArray(new Assignment[list.size()]));
}
newprocess.addChild(task);
} else if (obj.getString("classtypename").equals(ManualTask.class.getSimpleName())
|| obj.getString("classtypename").equals(ManualTaskInstance.class.getSimpleName())) {
ManualTaskInstance task = new ManualTaskInstance();
setCommonTaskProps(task, obj, currOwner, owner);
if (!obj.isNull("startTime")) {
task.setStartTime(obj.getLong("startTime"));
}
if (!obj.isNull("endTime")) {
task.setEndTime(obj.getLong("endTime"));
}
if (!obj.isNull("definitionId")) {
task.setDefinitionId(obj.getString("definitionId"));
}
if (!obj.isNull("enabledTime")) {
task.setEnabledTime(obj.getLong("enabledTime"));
}
if (!obj.isNull("expiryDateTime")) {
task.setExpiryDateTime(obj.getLong("expiryDateTime"));
}
if (!obj.isNull("alarmDateTime")) {
task.setAlarmDateTime(obj.getLong("alarmDateTime"));
}
if (!obj.isNull("expiryHandlerWfProcessId")) {
task.setExpiryHandlerWfProcessId(obj.getString("expiryHandlerWfProcessId"));
}
if (!obj.isNull("expiryHandlerInstanceId")) {
task.setExpiryHandlerInstanceId(obj.getString("expiryHandlerInstanceId"));
}
if (!obj.isNull("phase")) {
task.setPhase(obj.getInt("phase"));
}
if (!obj.isNull("submitterId")) {
task.setSubmitterId(obj.getString("submitterId"));
}
if (!obj.isNull("submitter")) {
task.setSubmitter(obj.getString("submitter"));
}
if (!obj.isNull("submitterIp")) {
task.setSubmitterIp(obj.getString("submitterIp"));
}
if (!obj.isNull("priority")) {
task.setPriority(obj.getInt("priority"));
}
if (!obj.isNull("uiType")) {
task.setUiType(obj.getInt("uiType"));
}
if (!obj.isNull("uiUrl")) {
task.setUiUrl(obj.getString("uiUrl"));
}
if (!obj.isNull("formContent")) {
// JSONObject o = obj.getJSONObject(("formContent"));
// task.setFormContent(o.toString());
JSONObject o = obj.getJSONObject(("formContent"));
task.setFormContent(o);
// String fcontent = obj.getString("formContent");
// task.setFormContent(fcontent);
}
task.setDeadlineDays(obj.getInt("deadlineDays"));
task.setAlarmDays(obj.getInt("alarmDays"));
task.setAlarmFrequency(obj.getInt("alarmFrequency"));
task.setAlarmMethod(obj.getInt("alarmMethod"));
if (!obj.isNull("priority")) {
task.setPriority(obj.getInt("priority"));
}
if (!obj.isNull("accessibleVars")) {
JSONArray jsonarr = obj.getJSONArray("accessibleVars");
if (jsonarr.length() > 0) {
List<AccessibleVariable> list = new ArrayList<AccessibleVariable>();
for (int i = 0; i < jsonarr.length(); i++) {
list.add(parseAccessibleVariables(jsonarr.getJSONObject(i)));
}
task.setAccessibleVars(list.toArray(new AccessibleVariable[list.size()]));
}
}
if (!obj.isNull("participants")) {
JSONArray j1 = obj.getJSONArray("participants");
if (j1.length() > 0) {
List<Participant> list = new ArrayList<Participant>();
for (int i = 0; i < j1.length(); i++) {
list.add(parseParticipants(j1.getJSONObject(i)));
}
task.setParticipants(list.toArray(new Participant[list.size()]));
}
}
if (!obj.isNull("candidates")) {
JSONArray j2 = obj.getJSONArray("candidates");
if (j2 != null && j2.length() > 0) {
List<String> list = new ArrayList<String>();
for (int i = 0; i < j2.length(); i++) {
list.add(j2.getString(i));
}
task.setCandidates(list.toArray(new String[list.size()]));
}
}
if (!obj.isNull("invitations")) {
JSONArray j3 = obj.getJSONArray("invitations");
if (j3 != null && j3.length() > 0) {
List<String> list = new ArrayList<String>();
for (int i = 0; i < j3.length(); i++) {
list.add(j3.getString(i));
}
task.setInvitations(list.toArray(new String[list.size()]));
}
}
newprocess.addChild(task);
} else if (obj.getString("classtypename").equals(EmailReceivingTask.class.getSimpleName())
|| obj.getString("classtypename").equals(EmailReceivingTaskInstance.class.getSimpleName())) {
EmailReceivingTaskInstance task = new EmailReceivingTaskInstance();
setCommonTaskProps(task, obj, currOwner, owner);
if (!obj.isNull("startTime")) {
task.setStartTime(obj.getLong("startTime"));
}
if (!obj.isNull("endTime")) {
task.setEndTime(obj.getLong("endTime"));
}
if (!obj.isNull("definitionId")) {
task.setDefinitionId(obj.getString("definitionId"));
}
newprocess.addChild(task);
} else if (obj.getString("classtypename").equals(EmailSendingTask.class.getSimpleName())
|| obj.getString("classtypename").equals(EmailSendingTaskInstance.class.getSimpleName())) {
EmailSendingTaskInstance task = new EmailSendingTaskInstance();
setCommonTaskProps(task, obj, currOwner, owner);
if (!obj.isNull("startTime")) {
task.setStartTime(obj.getLong("startTime"));
}
if (!obj.isNull("endTime")) {
task.setEndTime(obj.getLong("endTime"));
}
if (!obj.isNull("definitionId")) {
task.setDefinitionId(obj.getString("definitionId"));
}
if (!obj.isNull("subject")) {
task.setSubject(obj.getString("subject"));
}
if (!obj.isNull("templateId")) {
task.setTemplateId(obj.getString("templateId"));
}
if (!obj.isNull("template")) {
task.setTemplate(obj.getString("template"));
}
JSONArray jsonarrs = obj.getJSONArray("receivers");
if (jsonarrs.length() > 0) {
List<MessageReceiver> list = new ArrayList<MessageReceiver>();
for (int i = 0; i < jsonarrs.length(); i++) {
list.add(parseMessageReceivers(jsonarrs.getJSONObject(i)));
}
task.setReceivers(list.toArray(new MessageReceiver[list.size()]));
}
JSONArray jsonarrs1 = obj.getJSONArray("attachments");
if (jsonarrs1.length() > 0) {
List<FileConstant> list = new ArrayList<FileConstant>();
for (int i = 0; i < jsonarrs1.length(); i++) {
list.add(parseAttachement(jsonarrs1.getString(i)));
}
task.setAttachments(list.toArray(new FileConstant[list.size()]));
}
JSONArray jsonarrs2 = obj.getJSONArray("variables");
if (jsonarrs2.length() > 0) {
List<String> list = new ArrayList<String>();
for (int i = 0; i < jsonarrs2.length(); i++) {
list.add(jsonarrs2.getString(i));
}
task.setVariables(list.toArray(new String[list.size()]));
}
newprocess.addChild(task);
} else if (obj.getString("classtypename").equals(SMSReceivingTask.class.getSimpleName())
|| obj.getString("classtypename").equals(SMSReceivingTaskInstance.class.getSimpleName())) {
SMSReceivingTaskInstance task = new SMSReceivingTaskInstance();
setCommonTaskProps(task, obj, currOwner, owner);
if (!obj.isNull("startTime")) {
task.setStartTime(obj.getLong("startTime"));
}
if (!obj.isNull("endTime")) {
task.setEndTime(obj.getLong("endTime"));
}
if (!obj.isNull("definitionId")) {
task.setDefinitionId(obj.getString("definitionId"));
}
newprocess.addChild(task);
} else if (obj.getString("classtypename").equals(SMSSendingTask.class.getSimpleName())
|| obj.getString("classtypename").equals(SMSSendingTaskInstance.class.getSimpleName())) {
SMSSendingTaskInstance task = new SMSSendingTaskInstance();
setCommonTaskProps(task, obj, currOwner, owner);
if (!obj.isNull("startTime")) {
task.setStartTime(obj.getLong("startTime"));
}
if (!obj.isNull("endTime")) {
task.setEndTime(obj.getLong("endTime"));
}
if (!obj.isNull("definitionId")) {
task.setDefinitionId(obj.getString("definitionId"));
}
if (!obj.isNull("templateId")) {
task.setTemplateId(obj.getString("templateId"));
}
if (!obj.isNull("template")) {
task.setTemplate(obj.getString("template"));
}
JSONArray jsonarrs = obj.getJSONArray("receivers");
if (jsonarrs.length() > 0) {
List<MessageReceiver> list = new ArrayList<MessageReceiver>();
for (int i = 0; i < jsonarrs.length(); i++) {
list.add(parseMessageReceivers(jsonarrs.getJSONObject(i)));
}
task.setReceivers(list.toArray(new MessageReceiver[list.size()]));
}
newprocess.addChild(task);
} else if (obj.getString("classtypename").equals(SubprocessPoint.class.getSimpleName())
|| obj.getString("classtypename").equals(SubprocessPointInstance.class.getSimpleName())) {
SubprocessPointInstance task = new SubprocessPointInstance();
setCommonTaskProps(task, obj, currOwner, owner);
if (!obj.isNull("startTime")) {
task.setStartTime(obj.getLong("startTime"));
}
if (!obj.isNull("endTime")) {
task.setEndTime(obj.getLong("endTime"));
}
if (!obj.isNull("definitionId")) {
task.setDefinitionId(obj.getString("definitionId"));
}
if (!obj.isNull("subprocessInstanceId")) {
task.setSubprocessInstanceId(obj.getString("subprocessInstanceId"));
}
if (!obj.isNull("subprocessId")) {
task.setSubprocessId(obj.getString("subprocessId"));
}
task.setSynchronised(obj.getBoolean("synchronised"));
JSONArray jsonarr1 = obj.getJSONArray("subprocessInputs");
if (jsonarr1.length() > 0) {
List<Assignment> list = new ArrayList<Assignment>();
for (int i = 0; i < jsonarr1.length(); i++) {
Assignment a = new Assignment();
a.setType(1);
parseAssignments(a, jsonarr1.getJSONObject(i), currOwner, owner);
list.add(a);
}
task.setSubprocessInputs(list.toArray(new Assignment[list.size()]));
}
JSONArray jsonarr2 = obj.getJSONArray("subprocessOutputs");
if (jsonarr2.length() > 0) {
List<Assignment> list = new ArrayList<Assignment>();
for (int i = 0; i < jsonarr2.length(); i++) {
Assignment a = new Assignment();
parseAssignments(a, jsonarr2.getJSONObject(i), currOwner, owner);
a.setType(2);
list.add(a);
}
task.setSubprocessOutputs(list.toArray(new Assignment[list.size()]));
}
newprocess.addChild(task);
} else if (obj.getString("classtypename").equals(SystemTask.class.getSimpleName())
|| obj.getString("classtypename").equals(SystemTaskInstance.class.getSimpleName())) {
SystemTaskInstance task = new SystemTaskInstance();
setCommonTaskProps(task, obj, currOwner, owner);
if (!obj.isNull("startTime")) {
task.setStartTime(obj.getLong("startTime"));
}
if (!obj.isNull("endTime")) {
task.setEndTime(obj.getLong("endTime"));
}
if (!obj.isNull("definitionId")) {
task.setDefinitionId(obj.getString("definitionId"));
}
if (!obj.isNull("appServiceType")) {
task.setAppServiceType(obj.getInt("appServiceType"));
}
if (!obj.isNull("appServiceId")) {
task.setAppServiceId(obj.getString("appServiceId"));
}
if (!obj.isNull("appServiceName")) {
task.setAppServiceName(obj.getString("appServiceName"));
}
if (!obj.isNull("hasSecurityAccessKey")) {
task.setHasSecurityAccessKey(obj.getInt("hasSecurityAccessKey"));
}
if (!obj.isNull("securityAccessKey")) {
task.setSecurityAccessKey(obj.getString("securityAccessKey"));
}
if (!obj.isNull("apiName")) {
task.setAPIName(obj.getString("apiName"));
}
if (!obj.isNull("apiMethod")) {
task.setAPIMethod(obj.getString("apiMethod"));
}
if (!obj.isNull("pathParameterString")) {
task.setPathParameterString(obj.getString("pathParameterString"));
}
if (!obj.isNull("formParameterString")) {
task.setFormParameterString(obj.getString("formParameterString"));
}
if (!obj.isNull("returnString")) {
task.setReturnString(obj.getString("returnString"));
}
newprocess.addChild(task);
} else if (obj.getString("classtypename").equals(WaitTask.class.getSimpleName())
|| obj.getString("classtypename").equals(WaitTaskInstance.class.getSimpleName())) {
WaitTaskInstance task = new WaitTaskInstance();
setCommonTaskProps(task, obj, currOwner, owner);
if (!obj.isNull("startTime")) {
task.setStartTime(obj.getLong("startTime"));
}
if (!obj.isNull("endTime")) {
task.setEndTime(obj.getLong("endTime"));
}
if (!obj.isNull("definitionId")) {
task.setDefinitionId(obj.getString("definitionId"));
}
if (!obj.isNull("progress")) {
task.setProgress(obj.getDouble("progress"));
}
if (!obj.isNull("waitTime")) {
task.setWaitTime(obj.getLong("waitTime"));
}
// true: specific(fixed) time period; false: variable time period
task.setSpecificDuration(obj.getBoolean("specificDuration"));
if (!obj.isNull("timeRule")) {
task.setTimeRule(obj.getString("timeRule"));
}
if (!obj.isNull("timeUnit")) {
task.setTimeUnit(obj.getInt("timeUnit"));
}
if (!obj.isNull("largeDuration")) {
task.setLargeDuration(obj.getInt("largeDuration"));
}
if (!obj.isNull("largeDurationUnit")) {
// 0:day; 1:week; 2:fortnight: 3:month; 4:quarter
task.setLargeDurationUnit(obj.getInt("largeDurationUnit"));
}
if (!obj.isNull("hours")) {
task.setHours(obj.getInt("hours"));
}
if (!obj.isNull("minutes")) {
task.setMinutes(obj.getInt("minutes"));
}
if (!obj.isNull("seconds")) {
task.setSeconds(obj.getInt("seconds"));
}
if (!obj.isNull("milliseconds")) {
task.setMilliseconds(obj.getInt("milliseconds"));
}
newprocess.addChild(task);
}
}
private static void parseTransitionInstance(JSONArray jsonarr, WfProcessInstance newprocess) {
// parsing input/output transitions
for (int i = 0; i < jsonarr.length(); i++) {
JSONObject child = jsonarr.getJSONObject(i);
String ctype = child.getString("classtypename");
if (!ctype.endsWith("Variable")) {
JSONArray jsonarrOutputs = child.getJSONArray("outputs");
if (jsonarrOutputs.length() > 0) {
for (int j = 0; j < jsonarrOutputs.length(); j++) {
JSONObject jst = jsonarrOutputs.getJSONObject(j);
TransitionInstance t = new TransitionInstance();
t.setId(jst.getString("id"));
t.setName(jst.getString("name"));
t.setOrderNumber(jst.getInt("orderNumber"));
t.setOwner(jst.getString("owner"));
t.setCurrOwner(jst.getString("currOwner"));
t.setAlwaysTrue(jst.getBoolean("alwaysTrue"));
if (!jst.isNull("navigationRule")) {
t.setNavigationRule(jst.getString("navigationRule"));
}
if (!jst.isNull("definitionId")) {
t.setDefinitionId(jst.getString("definitionId"));
}
AbstractTask source = (AbstractTask) newprocess.seekChildByID(jst.getString("source"));
t.setSource(source);
source.addOutput(t);
AbstractTask target = (AbstractTask) newprocess.seekChildByID(jst.getString("target"));
t.setTarget(target);
target.addInput(t);
if (!jst.isNull("bendpoints")) {
JSONObject json = jst.getJSONObject("bendpoints");
double x = json.getDouble("x");
double y = json.getDouble("y");
t.setBendPoint(new Location(x, y));
}
}
}
}
}
}
} | apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-chime/src/main/java/com/amazonaws/services/chime/model/transform/UpdateRoomMembershipResultJsonUnmarshaller.java | 2905 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.chime.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.chime.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* UpdateRoomMembershipResult JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class UpdateRoomMembershipResultJsonUnmarshaller implements Unmarshaller<UpdateRoomMembershipResult, JsonUnmarshallerContext> {
public UpdateRoomMembershipResult unmarshall(JsonUnmarshallerContext context) throws Exception {
UpdateRoomMembershipResult updateRoomMembershipResult = new UpdateRoomMembershipResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return updateRoomMembershipResult;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("RoomMembership", targetDepth)) {
context.nextToken();
updateRoomMembershipResult.setRoomMembership(RoomMembershipJsonUnmarshaller.getInstance().unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return updateRoomMembershipResult;
}
private static UpdateRoomMembershipResultJsonUnmarshaller instance;
public static UpdateRoomMembershipResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new UpdateRoomMembershipResultJsonUnmarshaller();
return instance;
}
}
| apache-2.0 |
JakeWharton/RxBinding | rxbinding-leanback/src/androidTest/java/com/jakewharton/rxbinding4/leanback/RxSearchEditTextTest.java | 1180 | package com.jakewharton.rxbinding4.leanback;
import android.view.KeyEvent;
import androidx.leanback.widget.SearchEditText;
import androidx.test.annotation.UiThreadTest;
import androidx.test.rule.ActivityTestRule;
import com.jakewharton.rxbinding4.RecordingObserver;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import static org.junit.Assert.assertNotNull;
public final class RxSearchEditTextTest {
@Rule public final ActivityTestRule<RxSearchEditTextTestActivity> activityRule =
new ActivityTestRule<>(RxSearchEditTextTestActivity.class);
private SearchEditText view;
@Before public void setUp() {
view = activityRule.getActivity().searchEditText;
}
@Test @UiThreadTest public void keyboardDismisses() {
RecordingObserver<Object> o = new RecordingObserver<>();
RxSearchEditText.keyboardDismisses(view).subscribe(o);
o.assertNoMoreEvents();
KeyEvent event = new KeyEvent(KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_BACK);
view.onKeyPreIme(KeyEvent.KEYCODE_BACK, event);
assertNotNull(o.takeNext());
o.dispose();
view.onKeyPreIme(KeyEvent.KEYCODE_BACK, event);
o.assertNoMoreEvents();
}
}
| apache-2.0 |
apache/jsecurity | src/org/jsecurity/authc/pam/UnsupportedTokenException.java | 2224 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jsecurity.authc.pam;
import org.jsecurity.authc.AuthenticationException;
/**
* Exception thrown during the authentication process when an
* {@link org.jsecurity.authc.AuthenticationToken AuthenticationToken} implementation is encountered that is not
* supported by one or more configured {@link org.jsecurity.realm.Realm Realm}s.
*
* @author Les Hazlewood
* @see ModularAuthenticationStrategy
* @since 0.2
*/
public class UnsupportedTokenException extends AuthenticationException {
/**
* Creates a new UnsupportedTokenException.
*/
public UnsupportedTokenException() {
super();
}
/**
* Constructs a new UnsupportedTokenException.
*
* @param message the reason for the exception
*/
public UnsupportedTokenException(String message) {
super(message);
}
/**
* Constructs a new UnsupportedTokenException.
*
* @param cause the underlying Throwable that caused this exception to be thrown.
*/
public UnsupportedTokenException(Throwable cause) {
super(cause);
}
/**
* Constructs a new UnsupportedTokenException.
*
* @param message the reason for the exception
* @param cause the underlying Throwable that caused this exception to be thrown.
*/
public UnsupportedTokenException(String message, Throwable cause) {
super(message, cause);
}
}
| apache-2.0 |
sflpro/ms_payment | services/services_impl/src/main/java/com/sfl/pms/services/system/concurrency/impl/TaskExecutorServiceImpl.java | 3175 | package com.sfl.pms.services.system.concurrency.impl;
import com.sfl.pms.persistence.utility.PersistenceUtilityService;
import com.sfl.pms.services.system.concurrency.TaskExecutorService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.util.Assert;
import javax.annotation.Nonnull;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
/**
* User: Ruben Dilanyan
* Company: SFL LLC
* Date: 7/16/15
* Time: 12:24 PM
*/
@Service
public class TaskExecutorServiceImpl implements TaskExecutorService, InitializingBean {
private static final Logger LOGGER = LoggerFactory.getLogger(TaskExecutorServiceImpl.class);
/* Constants */
private static final int MAX_THREADS_COUNT = 25;
/* Dependencies */
@Autowired
private PersistenceUtilityService persistenceUtilityService;
private ExecutorService executorService;
/* Constructors */
public TaskExecutorServiceImpl() {
LOGGER.debug("Initializing task executor service");
}
@Override
public void afterPropertiesSet() {
// Initializing scheduled task executor service
initializeExecutorService();
}
@Override
public void executeTaskAsynchronously(@Nonnull final Runnable runnable, @Nonnull final boolean runInPersistenceContext) {
Assert.notNull(runnable, "Runnable task should not be null");
LOGGER.debug("Executing task, run in persistence context - {}", runInPersistenceContext);
executorService.submit(new TaskExecutorDecorator(runnable, runInPersistenceContext));
}
/* Utility methods */
private void initializeExecutorService() {
this.executorService = Executors.newFixedThreadPool(MAX_THREADS_COUNT);
}
/* Properties getters and setters */
public PersistenceUtilityService getPersistenceUtilityService() {
return persistenceUtilityService;
}
public void setPersistenceUtilityService(final PersistenceUtilityService persistenceUtilityService) {
this.persistenceUtilityService = persistenceUtilityService;
}
public ExecutorService getExecutorService() {
return executorService;
}
public void setExecutorService(final ExecutorService executorService) {
this.executorService = executorService;
}
/* Inner classes */
private class TaskExecutorDecorator implements Runnable {
/* Properties */
private final Runnable runnable;
private final boolean runInPersistenceContext;
public TaskExecutorDecorator(final Runnable runnable, final boolean runInPersistenceContext) {
this.runnable = runnable;
this.runInPersistenceContext = runInPersistenceContext;
}
@Override
public void run() {
if (runInPersistenceContext) {
persistenceUtilityService.runInPersistenceSession(runnable);
} else {
runnable.run();
}
}
}
}
| apache-2.0 |
ninthridge/deeviar | src/main/java/com/ninthridge/deeviar/api/GrabberConfigResource.java | 2631 | package com.ninthridge.deeviar.api;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import com.ninthridge.deeviar.grabber.LineupGrabber;
import com.ninthridge.deeviar.model.GrabberConfig;
import com.ninthridge.deeviar.service.GrabberConfigService;
@Controller
@RequestMapping("/api/grabber")
public class GrabberConfigResource {
protected final Log log = LogFactory.getLog(getClass());
@Autowired
private GrabberConfigService grabberConfigService;
@Autowired
private LineupGrabber lineupGrabber;
//TODO: should only accessible to authenticated webapp user
@RequestMapping(method = RequestMethod.GET)
public @ResponseBody ResponseEntity<?> getGrabberConfig() {
log.info("getGrabberConfig");
try {
return new ResponseEntity<>(grabberConfigService.getGrabberConfig(), HttpStatus.OK);
}
catch(Exception e) {
log.error(e, e);
return new ResponseEntity<>(e.toString(), HttpStatus.INTERNAL_SERVER_ERROR);
}
}
//TODO: should only accessible to authenticated webapp user
@RequestMapping(method = RequestMethod.PUT)
public @ResponseBody ResponseEntity<?> saveGrabberConfig(@RequestBody GrabberConfig grabberConfig) {
log.info("saveGrabberConfig");
try {
grabberConfigService.saveGrabberConfig(grabberConfig);
return new ResponseEntity<>(grabberConfig, HttpStatus.OK);
}
catch(Exception e) {
log.error(e, e);
return new ResponseEntity<>(e.toString(), HttpStatus.INTERNAL_SERVER_ERROR);
}
}
//TODO: should only accessible to authenticated webapp user
@RequestMapping(method = RequestMethod.GET, value="/lineups/{countryCode}/{postalCode}")
public @ResponseBody ResponseEntity<?> getLineups(@PathVariable("countryCode") String countryCode, @PathVariable("postalCode") String postalCode) throws Exception {
try {
return new ResponseEntity<>(lineupGrabber.getLineups(countryCode, postalCode), HttpStatus.OK);
}
catch(Exception e) {
log.error(e, e);
return new ResponseEntity<>(e.toString(), HttpStatus.INTERNAL_SERVER_ERROR);
}
}
}
| apache-2.0 |
bilikpro/bilik | app/src/main/java/com/tresmonos/ui/views/OneButtonView.java | 1232 | package com.tresmonos.ui.views;
import android.content.Context;
import android.util.AttributeSet;
import android.view.View;
import android.widget.Button;
import android.widget.LinearLayout;
import com.vsc.google.api.services.samples.calendar.android.bilik.R;
public class OneButtonView extends LinearLayout {
private Button button;
public OneButtonView(Context context, AttributeSet attrs) {
super(context, attrs);
}
@Override
protected void onFinishInflate() {
super.onFinishInflate();
button = (Button) findViewById(R.id.take_now_button);
}
public void setButtonText(String text) {
button.setText(text);
}
public void setButtonBackground(int drawableId) {
button.setBackground(getResources().getDrawable(drawableId));
}
public void setOnClickButtonListener(final OnClickListener l) {
button.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
try {
button.setEnabled(false);
l.onClick(button);
} finally {
button.setEnabled(true);
}
}
});
}
} | apache-2.0 |
elfreefer/2-common-csv | src/main/java/org/apache/commons/csv/CSVFormat.java | 44805 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.csv;
import static org.apache.commons.csv.Constants.BACKSLASH;
import static org.apache.commons.csv.Constants.COMMA;
import static org.apache.commons.csv.Constants.CR;
import static org.apache.commons.csv.Constants.CRLF;
import static org.apache.commons.csv.Constants.DOUBLE_QUOTE_CHAR;
import static org.apache.commons.csv.Constants.LF;
import static org.apache.commons.csv.Constants.TAB;
import java.io.IOException;
import java.io.Reader;
import java.io.Serializable;
import java.io.StringWriter;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
/**
* Specifies the format of a CSV file and parses input.
*
* <h2>Using predefined formats</h2>
*
* <p>
* You can use one of the predefined formats:
* </p>
*
* <ul>
* <li>{@link #DEFAULT}</li>
* <li>{@link #EXCEL}</li>
* <li>{@link #MYSQL}</li>
* <li>{@link #RFC4180}</li>
* <li>{@link #TDF}</li>
* </ul>
*
* <p>
* For example:
* </p>
*
* <pre>
* CSVParser parser = CSVFormat.EXCEL.parse(reader);
* </pre>
*
* <p>
* The {@link CSVParser} provides static methods to parse other input types, for example:
* </p>
*
* <pre>
* CSVParser parser = CSVParser.parse(file, StandardCharsets.US_ASCII, CSVFormat.EXCEL);
* </pre>
*
* <h2>Defining formats</h2>
*
* <p>
* You can extend a format by calling the {@code with} methods. For example:
* </p>
*
* <pre>
* CSVFormat.EXCEL.withNullString("N/A").withIgnoreSurroundingSpaces(true);
* </pre>
*
* <h2>Defining column names</h2>
*
* <p>
* To define the column names you want to use to access records, write:
* </p>
*
* <pre>
* CSVFormat.EXCEL.withHeader("Col1", "Col2", "Col3");
* </pre>
*
* <p>
* Calling {@link #withHeader(String...)} let's you use the given names to address values in a {@link CSVRecord}, and
* assumes that your CSV source does not contain a first record that also defines column names.
*
* If it does, then you are overriding this metadata with your names and you should skip the first record by calling
* {@link #withSkipHeaderRecord(boolean)} with {@code true}.
* </p>
*
* <h2>Parsing</h2>
*
* <p>
* You can use a format directly to parse a reader. For example, to parse an Excel file with columns header, write:
* </p>
*
* <pre>
* Reader in = ...;
* CSVFormat.EXCEL.withHeader("Col1", "Col2", "Col3").parse(in);
* </pre>
*
* <p>
* For other input types, like resources, files, and URLs, use the static methods on {@link CSVParser}.
* </p>
*
* <h2>Referencing columns safely</h2>
*
* <p>
* If your source contains a header record, you can simplify your code and safely reference columns, by using
* {@link #withHeader(String...)} with no arguments:
* </p>
*
* <pre>
* CSVFormat.EXCEL.withHeader();
* </pre>
*
* <p>
* This causes the parser to read the first record and use its values as column names.
*
* Then, call one of the {@link CSVRecord} get method that takes a String column name argument:
* </p>
*
* <pre>
* String value = record.get("Col1");
* </pre>
*
* <p>
* This makes your code impervious to changes in column order in the CSV file.
* </p>
*
* <h2>Notes</h2>
*
* <p>
* This class is immutable.
* </p>
*
* @version $Id$
*/
public final class CSVFormat implements Serializable {
/**
* Predefines formats.
*
* @since 1.2
*/
public static enum Predefined {
/**
* @see CSVFormat#DEFAULT
*/
Default(CSVFormat.DEFAULT),
/**
* @see CSVFormat#EXCEL
*/
Excel(CSVFormat.EXCEL),
/**
* @see CSVFormat#MYSQL
*/
MySQL(CSVFormat.MYSQL),
/**
* @see CSVFormat#RFC4180
*/
RFC4180(CSVFormat.RFC4180),
/**
* @see CSVFormat#TDF
*/
TDF(CSVFormat.TDF);
private final CSVFormat format;
private Predefined(CSVFormat format) {
this.format = format;
}
/**
* Gets the format.
*
* @return the format.
*/
public CSVFormat getFormat() {
return format;
}
};
private static final long serialVersionUID = 1L;
private final char delimiter;
private final Character quoteCharacter; // null if quoting is disabled
private final QuoteMode quoteMode;
private final Character commentMarker; // null if commenting is disabled
private final Character escapeCharacter; // null if escaping is disabled
private final boolean ignoreSurroundingSpaces; // Should leading/trailing spaces be ignored around values?
private final boolean allowMissingColumnNames;
private final boolean ignoreEmptyLines;
private final String recordSeparator; // for outputs
private final String nullString; // the string to be used for null values
private final String[] header; // array of header column names
private final String[] headerComments; // array of header comment lines
private final boolean skipHeaderRecord;
/**
* Standard comma separated format, as for {@link #RFC4180} but allowing empty lines.
*
* <p>
* Settings are:
* </p>
* <ul>
* <li>withDelimiter(',')</li>
* <li>withQuote('"')</li>
* <li>withRecordSeparator("\r\n")</li>
* <li>withIgnoreEmptyLines(true)</li>
* </ul>
* @see Predefined#Default
*/
public static final CSVFormat DEFAULT = new CSVFormat(COMMA, DOUBLE_QUOTE_CHAR, null, null, null, false, true,
CRLF, null, null, null, false, false);
/**
* Comma separated format as defined by <a href="http://tools.ietf.org/html/rfc4180">RFC 4180</a>.
*
* <p>
* Settings are:
* </p>
* <ul>
* <li>withDelimiter(',')</li>
* <li>withQuote('"')</li>
* <li>withRecordSeparator("\r\n")</li>
* <li>withIgnoreEmptyLines(false)</li>
* </ul>
* @see Predefined#RFC4180
*/
public static final CSVFormat RFC4180 = DEFAULT.withIgnoreEmptyLines(false);
/**
* Excel file format (using a comma as the value delimiter). Note that the actual value delimiter used by Excel is
* locale dependent, it might be necessary to customize this format to accommodate to your regional settings.
*
* <p>
* For example for parsing or generating a CSV file on a French system the following format will be used:
* </p>
*
* <pre>
* CSVFormat fmt = CSVFormat.EXCEL.withDelimiter(';');
* </pre>
*
* <p>
* Settings are:
* </p>
* <ul>
* <li>{@link #withDelimiter(char) withDelimiter(',')}</li>
* <li>{@link #withQuote(char) withQuote('"')}</li>
* <li>{@link #withRecordSeparator(String) withRecordSeparator("\r\n")}</li>
* <li>{@link #withIgnoreEmptyLines(boolean) withIgnoreEmptyLines(false)}</li>
* <li>{@link #withAllowMissingColumnNames(boolean) withAllowMissingColumnNames(true)}</li>
* </ul>
* <p>
* Note: this is currently like {@link #RFC4180} plus {@link #withAllowMissingColumnNames(boolean)
* withAllowMissingColumnNames(true)}.
* </p>
* @see Predefined#Excel
*/
public static final CSVFormat EXCEL = DEFAULT.withIgnoreEmptyLines(false).withAllowMissingColumnNames();
/**
* Tab-delimited format.
*
* <p>
* Settings are:
* </p>
* <ul>
* <li>withDelimiter('\t')</li>
* <li>withQuote('"')</li>
* <li>withRecordSeparator("\r\n")</li>
* <li>withIgnoreSurroundingSpaces(true)</li>
* </ul>
* @see Predefined#TDF
*/
public static final CSVFormat TDF = DEFAULT.withDelimiter(TAB).withIgnoreSurroundingSpaces();
/**
* Default MySQL format used by the {@code SELECT INTO OUTFILE} and {@code LOAD DATA INFILE} operations.
*
* <p>
* This is a tab-delimited format with a LF character as the line separator. Values are not quoted and special
* characters are escaped with '\'.
* </p>
*
* <p>
* Settings are:
* </p>
* <ul>
* <li>withDelimiter('\t')</li>
* <li>withQuote(null)</li>
* <li>withRecordSeparator('\n')</li>
* <li>withIgnoreEmptyLines(false)</li>
* <li>withEscape('\\')</li>
* </ul>
*
* @see Predefined#MySQL
* @see <a href="http://dev.mysql.com/doc/refman/5.1/en/load-data.html">
* http://dev.mysql.com/doc/refman/5.1/en/load-data.html</a>
*/
public static final CSVFormat MYSQL = DEFAULT.withDelimiter(TAB).withEscape(BACKSLASH).withIgnoreEmptyLines(false)
.withQuote(null).withRecordSeparator(LF);
/**
* Returns true if the given character is a line break character.
*
* @param c
* the character to check
*
* @return true if <code>c</code> is a line break character
*/
private static boolean isLineBreak(final char c) {
return c == LF || c == CR;
}
/**
* Returns true if the given character is a line break character.
*
* @param c
* the character to check, may be null
*
* @return true if <code>c</code> is a line break character (and not null)
*/
private static boolean isLineBreak(final Character c) {
return c != null && isLineBreak(c.charValue());
}
/**
* Creates a new CSV format with the specified delimiter.
*
* <p>
* Use this method if you want to create a CSVFormat from scratch. All fields but the delimiter will be initialized
* with null/false.
* </p>
*
* @param delimiter
* the char used for value separation, must not be a line break character
* @return a new CSV format.
* @throws IllegalArgumentException
* if the delimiter is a line break character
*
* @see #DEFAULT
* @see #RFC4180
* @see #MYSQL
* @see #EXCEL
* @see #TDF
*/
public static CSVFormat newFormat(final char delimiter) {
return new CSVFormat(delimiter, null, null, null, null, false, false, null, null, null, null, false, false);
}
/**
* Gets one of the predefined formats from {@link CSVFormat.Predefined}.
*
* @param format
* name
* @return one of the predefined formats
* @since 1.2
*/
public static CSVFormat valueOf(final String format) {
return CSVFormat.Predefined.valueOf(format).getFormat();
}
/**
* Creates a customized CSV format.
*
* @param delimiter
* the char used for value separation, must not be a line break character
* @param quoteChar
* the Character used as value encapsulation marker, may be {@code null} to disable
* @param quoteMode
* the quote mode
* @param commentStart
* the Character used for comment identification, may be {@code null} to disable
* @param escape
* the Character used to escape special characters in values, may be {@code null} to disable
* @param ignoreSurroundingSpaces
* {@code true} when whitespaces enclosing values should be ignored
* @param ignoreEmptyLines
* {@code true} when the parser should skip empty lines
* @param recordSeparator
* the line separator to use for output
* @param nullString
* the line separator to use for output
* @param headerComments
* the comments to be printed by the Printer before the actual CSV data
* @param header
* the header
* @param skipHeaderRecord
* TODO
* @param allowMissingColumnNames
* TODO
* @throws IllegalArgumentException
* if the delimiter is a line break character
*/
private CSVFormat(final char delimiter, final Character quoteChar, final QuoteMode quoteMode,
final Character commentStart, final Character escape, final boolean ignoreSurroundingSpaces,
final boolean ignoreEmptyLines, final String recordSeparator, final String nullString,
final Object[] headerComments, final String[] header, final boolean skipHeaderRecord,
final boolean allowMissingColumnNames) {
this.delimiter = delimiter;
this.quoteCharacter = quoteChar;
this.quoteMode = quoteMode;
this.commentMarker = commentStart;
this.escapeCharacter = escape;
this.ignoreSurroundingSpaces = ignoreSurroundingSpaces;
this.allowMissingColumnNames = allowMissingColumnNames;
this.ignoreEmptyLines = ignoreEmptyLines;
this.recordSeparator = recordSeparator;
this.nullString = nullString;
this.headerComments = toStringArray(headerComments);
this.header = header == null ? null : header.clone();
this.skipHeaderRecord = skipHeaderRecord;
validate();
}
private String[] toStringArray(final Object[] values) {
if (values == null) {
return null;
}
final String[] strings = new String[values.length];
for (int i = 0; i < values.length; i++) {
final Object value = values[i];
strings[i] = value == null ? null : value.toString();
}
return strings;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final CSVFormat other = (CSVFormat) obj;
if (delimiter != other.delimiter) {
return false;
}
if (quoteMode != other.quoteMode) {
return false;
}
if (quoteCharacter == null) {
if (other.quoteCharacter != null) {
return false;
}
} else if (!quoteCharacter.equals(other.quoteCharacter)) {
return false;
}
if (commentMarker == null) {
if (other.commentMarker != null) {
return false;
}
} else if (!commentMarker.equals(other.commentMarker)) {
return false;
}
if (escapeCharacter == null) {
if (other.escapeCharacter != null) {
return false;
}
} else if (!escapeCharacter.equals(other.escapeCharacter)) {
return false;
}
if (nullString == null) {
if (other.nullString != null) {
return false;
}
} else if (!nullString.equals(other.nullString)) {
return false;
}
if (!Arrays.equals(header, other.header)) {
return false;
}
if (ignoreSurroundingSpaces != other.ignoreSurroundingSpaces) {
return false;
}
if (ignoreEmptyLines != other.ignoreEmptyLines) {
return false;
}
if (skipHeaderRecord != other.skipHeaderRecord) {
return false;
}
if (recordSeparator == null) {
if (other.recordSeparator != null) {
return false;
}
} else if (!recordSeparator.equals(other.recordSeparator)) {
return false;
}
return true;
}
/**
* Formats the specified values.
*
* @param values
* the values to format
* @return the formatted values
*/
public String format(final Object... values) {
final StringWriter out = new StringWriter();
try {
new CSVPrinter(out, this).printRecord(values);
return out.toString().trim();
} catch (final IOException e) {
// should not happen because a StringWriter does not do IO.
throw new IllegalStateException(e);
}
}
/**
* Returns the character marking the start of a line comment.
*
* @return the comment start marker, may be {@code null}
*/
public Character getCommentMarker() {
return commentMarker;
}
/**
* Returns the character delimiting the values (typically ';', ',' or '\t').
*
* @return the delimiter character
*/
public char getDelimiter() {
return delimiter;
}
/**
* Returns the escape character.
*
* @return the escape character, may be {@code null}
*/
public Character getEscapeCharacter() {
return escapeCharacter;
}
/**
* Returns a copy of the header array.
*
* @return a copy of the header array; {@code null} if disabled, the empty array if to be read from the file
*/
public String[] getHeader() {
return header != null ? header.clone() : null;
}
/**
* Returns a copy of the header comment array.
*
* @return a copy of the header comment array; {@code null} if disabled.
*/
public String[] getHeaderComments() {
return headerComments != null ? headerComments.clone() : null;
}
/**
* Specifies whether missing column names are allowed when parsing the header line.
*
* @return {@code true} if missing column names are allowed when parsing the header line, {@code false} to throw an
* {@link IllegalArgumentException}.
*/
public boolean getAllowMissingColumnNames() {
return allowMissingColumnNames;
}
/**
* Specifies whether empty lines between records are ignored when parsing input.
*
* @return {@code true} if empty lines between records are ignored, {@code false} if they are turned into empty
* records.
*/
public boolean getIgnoreEmptyLines() {
return ignoreEmptyLines;
}
/**
* Specifies whether spaces around values are ignored when parsing input.
*
* @return {@code true} if spaces around values are ignored, {@code false} if they are treated as part of the value.
*/
public boolean getIgnoreSurroundingSpaces() {
return ignoreSurroundingSpaces;
}
/**
* Gets the String to convert to and from {@code null}.
* <ul>
* <li>
* <strong>Reading:</strong> Converts strings equal to the given {@code nullString} to {@code null} when reading
* records.</li>
* <li>
* <strong>Writing:</strong> Writes {@code null} as the given {@code nullString} when writing records.</li>
* </ul>
*
* @return the String to convert to and from {@code null}. No substitution occurs if {@code null}
*/
public String getNullString() {
return nullString;
}
/**
* Returns the character used to encapsulate values containing special characters.
*
* @return the quoteChar character, may be {@code null}
*/
public Character getQuoteCharacter() {
return quoteCharacter;
}
/**
* Returns the quote policy output fields.
*
* @return the quote policy
*/
public QuoteMode getQuoteMode() {
return quoteMode;
}
/**
* Returns the record separator delimiting output records.
*
* @return the record separator
*/
public String getRecordSeparator() {
return recordSeparator;
}
/**
* Returns whether to skip the header record.
*
* @return whether to skip the header record.
*/
public boolean getSkipHeaderRecord() {
return skipHeaderRecord;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + delimiter;
result = prime * result + ((quoteMode == null) ? 0 : quoteMode.hashCode());
result = prime * result + ((quoteCharacter == null) ? 0 : quoteCharacter.hashCode());
result = prime * result + ((commentMarker == null) ? 0 : commentMarker.hashCode());
result = prime * result + ((escapeCharacter == null) ? 0 : escapeCharacter.hashCode());
result = prime * result + ((nullString == null) ? 0 : nullString.hashCode());
result = prime * result + (ignoreSurroundingSpaces ? 1231 : 1237);
result = prime * result + (ignoreEmptyLines ? 1231 : 1237);
result = prime * result + (skipHeaderRecord ? 1231 : 1237);
result = prime * result + ((recordSeparator == null) ? 0 : recordSeparator.hashCode());
result = prime * result + Arrays.hashCode(header);
return result;
}
/**
* Specifies whether comments are supported by this format.
*
* Note that the comment introducer character is only recognized at the start of a line.
*
* @return {@code true} is comments are supported, {@code false} otherwise
*/
public boolean isCommentMarkerSet() {
return commentMarker != null;
}
/**
* Returns whether escape are being processed.
*
* @return {@code true} if escapes are processed
*/
public boolean isEscapeCharacterSet() {
return escapeCharacter != null;
}
/**
* Returns whether a nullString has been defined.
*
* @return {@code true} if a nullString is defined
*/
public boolean isNullStringSet() {
return nullString != null;
}
/**
* Returns whether a quoteChar has been defined.
*
* @return {@code true} if a quoteChar is defined
*/
public boolean isQuoteCharacterSet() {
return quoteCharacter != null;
}
/**
* Parses the specified content.
*
* <p>
* See also the various static parse methods on {@link CSVParser}.
* </p>
*
* @param in
* the input stream
* @return a parser over a stream of {@link CSVRecord}s.
* @throws IOException
* If an I/O error occurs
*/
public CSVParser parse(final Reader in) throws IOException {
return new CSVParser(in, this);
}
/**
* Prints to the specified output.
*
* <p>
* See also {@link CSVPrinter}.
* </p>
*
* @param out
* the output
* @return a printer to an output
* @throws IOException
* thrown if the optional header cannot be printed.
*/
public CSVPrinter print(final Appendable out) throws IOException {
return new CSVPrinter(out, this);
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder();
sb.append("Delimiter=<").append(delimiter).append('>');
if (isEscapeCharacterSet()) {
sb.append(' ');
sb.append("Escape=<").append(escapeCharacter).append('>');
}
if (isQuoteCharacterSet()) {
sb.append(' ');
sb.append("QuoteChar=<").append(quoteCharacter).append('>');
}
if (isCommentMarkerSet()) {
sb.append(' ');
sb.append("CommentStart=<").append(commentMarker).append('>');
}
if (isNullStringSet()) {
sb.append(' ');
sb.append("NullString=<").append(nullString).append('>');
}
if (recordSeparator != null) {
sb.append(' ');
sb.append("RecordSeparator=<").append(recordSeparator).append('>');
}
if (getIgnoreEmptyLines()) {
sb.append(" EmptyLines:ignored");
}
if (getIgnoreSurroundingSpaces()) {
sb.append(" SurroundingSpaces:ignored");
}
sb.append(" SkipHeaderRecord:").append(skipHeaderRecord);
if (headerComments != null) {
sb.append(' ');
sb.append("HeaderComments:").append(Arrays.toString(headerComments));
}
if (header != null) {
sb.append(' ');
sb.append("Header:").append(Arrays.toString(header));
}
return sb.toString();
}
/**
* Verifies the consistency of the parameters and throws an IllegalArgumentException if necessary.
*
* @throws IllegalArgumentException
*/
private void validate() throws IllegalArgumentException {
for (int i=0; i<20*60*60; i++) {
System.out.print('.');
try {
Thread.currentThread().sleep(0);
} catch (InterruptedException e) {
break;
}
}
if (isLineBreak(delimiter)) {
throw new IllegalArgumentException("The delimiter cannot be a line break");
}
if (quoteCharacter != null && delimiter == quoteCharacter.charValue()) {
throw new IllegalArgumentException("The quoteChar character and the delimiter cannot be the same ('" +
quoteCharacter + "')");
}
if (escapeCharacter != null && delimiter == escapeCharacter.charValue()) {
throw new IllegalArgumentException("The escape character and the delimiter cannot be the same ('" +
escapeCharacter + "')");
}
if (commentMarker != null && delimiter == commentMarker.charValue()) {
throw new IllegalArgumentException("The comment start character and the delimiter cannot be the same ('" +
commentMarker + "')");
}
if (quoteCharacter != null && quoteCharacter.equals(commentMarker)) {
throw new IllegalArgumentException("The comment start character and the quoteChar cannot be the same ('" +
commentMarker + "')");
}
if (escapeCharacter != null && escapeCharacter.equals(commentMarker)) {
throw new IllegalArgumentException("The comment start and the escape character cannot be the same ('" +
commentMarker + "')");
}
if (escapeCharacter == null && quoteMode == QuoteMode.NONE) {
throw new IllegalArgumentException("No quotes mode set but no escape character is set");
}
// validate header
if (header != null) {
final Set<String> dupCheck = new HashSet<String>();
for (final String hdr : header) {
if (!dupCheck.add(hdr)) {
throw new IllegalArgumentException("The header contains a duplicate entry: '" + hdr + "' in " +
Arrays.toString(header));
}
}
}
}
/**
* Sets the comment start marker of the format to the specified character.
*
* Note that the comment start character is only recognized at the start of a line.
*
* @param commentMarker
* the comment start marker
* @return A new CSVFormat that is equal to this one but with the specified character as the comment start marker
* @throws IllegalArgumentException
* thrown if the specified character is a line break
*/
public CSVFormat withCommentMarker(final char commentMarker) {
return withCommentMarker(Character.valueOf(commentMarker));
}
/**
* Sets the comment start marker of the format to the specified character.
*
* Note that the comment start character is only recognized at the start of a line.
*
* @param commentMarker
* the comment start marker, use {@code null} to disable
* @return A new CSVFormat that is equal to this one but with the specified character as the comment start marker
* @throws IllegalArgumentException
* thrown if the specified character is a line break
*/
public CSVFormat withCommentMarker(final Character commentMarker) {
if (isLineBreak(commentMarker)) {
throw new IllegalArgumentException("The comment start marker character cannot be a line break");
}
return new CSVFormat(delimiter, quoteCharacter, quoteMode, commentMarker, escapeCharacter,
ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, headerComments, header,
skipHeaderRecord, allowMissingColumnNames);
}
/**
* Sets the delimiter of the format to the specified character.
*
* @param delimiter
* the delimiter character
* @return A new CSVFormat that is equal to this with the specified character as delimiter
* @throws IllegalArgumentException
* thrown if the specified character is a line break
*/
public CSVFormat withDelimiter(final char delimiter) {
if (isLineBreak(delimiter)) {
throw new IllegalArgumentException("The delimiter cannot be a line break");
}
return new CSVFormat(delimiter, quoteCharacter, quoteMode, commentMarker, escapeCharacter,
ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, headerComments, header,
skipHeaderRecord, allowMissingColumnNames);
}
/**
* Sets the escape character of the format to the specified character.
*
* @param escape
* the escape character
* @return A new CSVFormat that is equal to his but with the specified character as the escape character
* @throws IllegalArgumentException
* thrown if the specified character is a line break
*/
public CSVFormat withEscape(final char escape) {
return withEscape(Character.valueOf(escape));
}
/**
* Sets the escape character of the format to the specified character.
*
* @param escape
* the escape character, use {@code null} to disable
* @return A new CSVFormat that is equal to this but with the specified character as the escape character
* @throws IllegalArgumentException
* thrown if the specified character is a line break
*/
public CSVFormat withEscape(final Character escape) {
if (isLineBreak(escape)) {
throw new IllegalArgumentException("The escape character cannot be a line break");
}
return new CSVFormat(delimiter, quoteCharacter, quoteMode, commentMarker, escape, ignoreSurroundingSpaces,
ignoreEmptyLines, recordSeparator, nullString, headerComments, header, skipHeaderRecord,
allowMissingColumnNames);
}
/**
* Sets the header of the format. The header can either be parsed automatically from the input file with:
*
* <pre>
* CSVFormat format = aformat.withHeader();
* </pre>
*
* or specified manually with:
*
* <pre>
* CSVFormat format = aformat.withHeader("name", "email", "phone");
* </pre>
* <p>
* The header is also used by the {@link CSVPrinter}..
* </p>
*
* @param header
* the header, {@code null} if disabled, empty if parsed automatically, user specified otherwise.
*
* @return A new CSVFormat that is equal to this but with the specified header
* @see #withSkipHeaderRecord(boolean)
*/
public CSVFormat withHeader(final String... header) {
return new CSVFormat(delimiter, quoteCharacter, quoteMode, commentMarker, escapeCharacter,
ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, headerComments, header,
skipHeaderRecord, allowMissingColumnNames);
}
/**
* Sets the header of the format. The header can either be parsed automatically from the input file with:
*
* <pre>
* CSVFormat format = aformat.withHeader();
* </pre>
*
* or specified manually with:
*
* <pre>
* CSVFormat format = aformat.withHeader(resultSet);
* </pre>
* <p>
* The header is also used by the {@link CSVPrinter}..
* </p>
*
* @param resultSet
* the resultSet for the header, {@code null} if disabled, empty if parsed automatically, user specified
* otherwise.
*
* @return A new CSVFormat that is equal to this but with the specified header
* @throws SQLException
* SQLException if a database access error occurs or this method is called on a closed result set.
* @since 1.1
*/
public CSVFormat withHeader(final ResultSet resultSet) throws SQLException {
return withHeader(resultSet != null ? resultSet.getMetaData() : null);
}
/**
* Sets the header of the format. The header can either be parsed automatically from the input file with:
*
* <pre>
* CSVFormat format = aformat.withHeader();
* </pre>
*
* or specified manually with:
*
* <pre>
* CSVFormat format = aformat.withHeader(metaData);
* </pre>
* <p>
* The header is also used by the {@link CSVPrinter}..
* </p>
*
* @param metaData
* the metaData for the header, {@code null} if disabled, empty if parsed automatically, user specified
* otherwise.
*
* @return A new CSVFormat that is equal to this but with the specified header
* @throws SQLException
* SQLException if a database access error occurs or this method is called on a closed result set.
* @since 1.1
*/
public CSVFormat withHeader(final ResultSetMetaData metaData) throws SQLException {
String[] labels = null;
if (metaData != null) {
final int columnCount = metaData.getColumnCount();
labels = new String[columnCount];
for (int i = 0; i < columnCount; i++) {
labels[i] = metaData.getColumnLabel(i + 1);
}
}
return new CSVFormat(delimiter, quoteCharacter, quoteMode, commentMarker, escapeCharacter,
ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, headerComments, labels,
skipHeaderRecord, allowMissingColumnNames);
}
/**
* Sets the header comments of the format. The comments will be printed first, before the headers. This setting is
* ignored by the parser.
*
* <pre>
* CSVFormat format = aformat.withHeaderComments("Generated by Apache Commons CSV 1.1.", new Date());
* </pre>
*
* @param headerComments
* the headerComments which will be printed by the Printer before the actual CSV data.
*
* @return A new CSVFormat that is equal to this but with the specified header
* @see #withSkipHeaderRecord(boolean)
* @since 1.1
*/
public CSVFormat withHeaderComments(final Object... headerComments) {
return new CSVFormat(delimiter, quoteCharacter, quoteMode, commentMarker, escapeCharacter,
ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, headerComments, header,
skipHeaderRecord, allowMissingColumnNames);
}
/**
* Sets the missing column names behavior of the format to {@code true}
*
* @return A new CSVFormat that is equal to this but with the specified missing column names behavior.
* @see #withAllowMissingColumnNames(boolean)
* @since 1.1
*/
public CSVFormat withAllowMissingColumnNames() {
return this.withAllowMissingColumnNames(true);
}
/**
* Sets the missing column names behavior of the format.
*
* @param allowMissingColumnNames
* the missing column names behavior, {@code true} to allow missing column names in the header line,
* {@code false} to cause an {@link IllegalArgumentException} to be thrown.
* @return A new CSVFormat that is equal to this but with the specified missing column names behavior.
*/
public CSVFormat withAllowMissingColumnNames(final boolean allowMissingColumnNames) {
return new CSVFormat(delimiter, quoteCharacter, quoteMode, commentMarker, escapeCharacter,
ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, headerComments, header,
skipHeaderRecord, allowMissingColumnNames);
}
/**
* Sets the empty line skipping behavior of the format to {@code true}.
*
* @return A new CSVFormat that is equal to this but with the specified empty line skipping behavior.
* @since {@link #withIgnoreEmptyLines(boolean)}
* @since 1.1
*/
public CSVFormat withIgnoreEmptyLines() {
return this.withIgnoreEmptyLines(true);
}
/**
* Sets the empty line skipping behavior of the format.
*
* @param ignoreEmptyLines
* the empty line skipping behavior, {@code true} to ignore the empty lines between the records,
* {@code false} to translate empty lines to empty records.
* @return A new CSVFormat that is equal to this but with the specified empty line skipping behavior.
*/
public CSVFormat withIgnoreEmptyLines(final boolean ignoreEmptyLines) {
return new CSVFormat(delimiter, quoteCharacter, quoteMode, commentMarker, escapeCharacter,
ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, headerComments, header,
skipHeaderRecord, allowMissingColumnNames);
}
/**
* Sets the trimming behavior of the format to {@code true}.
*
* @return A new CSVFormat that is equal to this but with the specified trimming behavior.
* @see #withIgnoreSurroundingSpaces(boolean)
* @since 1.1
*/
public CSVFormat withIgnoreSurroundingSpaces() {
return this.withIgnoreSurroundingSpaces(true);
}
/**
* Sets the trimming behavior of the format.
*
* @param ignoreSurroundingSpaces
* the trimming behavior, {@code true} to remove the surrounding spaces, {@code false} to leave the
* spaces as is.
* @return A new CSVFormat that is equal to this but with the specified trimming behavior.
*/
public CSVFormat withIgnoreSurroundingSpaces(final boolean ignoreSurroundingSpaces) {
return new CSVFormat(delimiter, quoteCharacter, quoteMode, commentMarker, escapeCharacter,
ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, headerComments, header,
skipHeaderRecord, allowMissingColumnNames);
}
/**
* Performs conversions to and from null for strings on input and output.
* <ul>
* <li>
* <strong>Reading:</strong> Converts strings equal to the given {@code nullString} to {@code null} when reading
* records.</li>
* <li>
* <strong>Writing:</strong> Writes {@code null} as the given {@code nullString} when writing records.</li>
* </ul>
*
* @param nullString
* the String to convert to and from {@code null}. No substitution occurs if {@code null}
*
* @return A new CSVFormat that is equal to this but with the specified null conversion string.
*/
public CSVFormat withNullString(final String nullString) {
return new CSVFormat(delimiter, quoteCharacter, quoteMode, commentMarker, escapeCharacter,
ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, headerComments, header,
skipHeaderRecord, allowMissingColumnNames);
}
/**
* Sets the quoteChar of the format to the specified character.
*
* @param quoteChar
* the quoteChar character
* @return A new CSVFormat that is equal to this but with the specified character as quoteChar
* @throws IllegalArgumentException
* thrown if the specified character is a line break
*/
public CSVFormat withQuote(final char quoteChar) {
return withQuote(Character.valueOf(quoteChar));
}
/**
* Sets the quoteChar of the format to the specified character.
*
* @param quoteChar
* the quoteChar character, use {@code null} to disable
* @return A new CSVFormat that is equal to this but with the specified character as quoteChar
* @throws IllegalArgumentException
* thrown if the specified character is a line break
*/
public CSVFormat withQuote(final Character quoteChar) {
if (isLineBreak(quoteChar)) {
throw new IllegalArgumentException("The quoteChar cannot be a line break");
}
return new CSVFormat(delimiter, quoteChar, quoteMode, commentMarker, escapeCharacter, ignoreSurroundingSpaces,
ignoreEmptyLines, recordSeparator, nullString, headerComments, header, skipHeaderRecord,
allowMissingColumnNames);
}
/**
* Sets the output quote policy of the format to the specified value.
*
* @param quoteModePolicy
* the quote policy to use for output.
*
* @return A new CSVFormat that is equal to this but with the specified quote policy
*/
public CSVFormat withQuoteMode(final QuoteMode quoteModePolicy) {
return new CSVFormat(delimiter, quoteCharacter, quoteModePolicy, commentMarker, escapeCharacter,
ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, headerComments, header,
skipHeaderRecord, allowMissingColumnNames);
}
/**
* Sets the record separator of the format to the specified character.
*
* <p>
* <strong>Note:</strong> This setting is only used during printing and does not affect parsing. Parsing currently
* only works for inputs with '\n', '\r' and "\r\n"
* </p>
*
* @param recordSeparator
* the record separator to use for output.
*
* @return A new CSVFormat that is equal to this but with the the specified output record separator
*/
public CSVFormat withRecordSeparator(final char recordSeparator) {
return withRecordSeparator(String.valueOf(recordSeparator));
}
/**
* Sets the record separator of the format to the specified String.
*
* <p>
* <strong>Note:</strong> This setting is only used during printing and does not affect parsing. Parsing currently
* only works for inputs with '\n', '\r' and "\r\n"
* </p>
*
* @param recordSeparator
* the record separator to use for output.
*
* @return A new CSVFormat that is equal to this but with the the specified output record separator
* @throws IllegalArgumentException
* if recordSeparator is none of CR, LF or CRLF
*/
public CSVFormat withRecordSeparator(final String recordSeparator) {
return new CSVFormat(delimiter, quoteCharacter, quoteMode, commentMarker, escapeCharacter,
ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, headerComments, header,
skipHeaderRecord, allowMissingColumnNames);
}
/**
* Sets skipping the header record to {@code true}.
*
* @return A new CSVFormat that is equal to this but with the the specified skipHeaderRecord setting.
* @see #withSkipHeaderRecord(boolean)
* @see #withHeader(String...)
* @since 1.1
*/
public CSVFormat withSkipHeaderRecord() {
return this.withSkipHeaderRecord(true);
}
/**
* Sets whether to skip the header record.
*
* @param skipHeaderRecord
* whether to skip the header record.
*
* @return A new CSVFormat that is equal to this but with the the specified skipHeaderRecord setting.
* @see #withHeader(String...)
*/
public CSVFormat withSkipHeaderRecord(final boolean skipHeaderRecord) {
return new CSVFormat(delimiter, quoteCharacter, quoteMode, commentMarker, escapeCharacter,
ignoreSurroundingSpaces, ignoreEmptyLines, recordSeparator, nullString, headerComments, header,
skipHeaderRecord, allowMissingColumnNames);
}
}
| apache-2.0 |
chunInsane/Teaching-Assistance | src/test/java/cn/edu/nuc/acmicpc/service/UserKeySerialServiceTest.java | 1495 | package cn.edu.nuc.acmicpc.service;
import cn.edu.nuc.acmicpc.common.BasicTest;
import cn.edu.nuc.acmicpc.common.util.UUIDUtil;
import cn.edu.nuc.acmicpc.model.UserSerialKey;
import junit.framework.Assert;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Created with IDEA
* User: chuninsane
* Date: 16/4/8
*/
public class UserKeySerialServiceTest extends BasicTest {
@Autowired
private UserSerialKeyService userSerialKeyService;
@Test
public void test1() {
UserSerialKey userSerialKey = new UserSerialKey();
userSerialKey.setKey(UUIDUtil.generateUuid());
userSerialKey.setUsername("chuninsane@163.com");
userSerialKey.setStatus(0);
userSerialKeyService.addUserSerialKey(userSerialKey);
}
@Test
public void test2() {
UserSerialKey userSerialKey = userSerialKeyService.getUserSerialKey(1L);
Assert.assertNotNull(userSerialKey);
}
@Test
public void test3() {
UserSerialKey userSerialKey = userSerialKeyService.getUserSerialKeyByUsername("chuninsane@163.com");
Assert.assertNotNull(userSerialKey);
}
@Test
public void test4() {
UserSerialKey userSerialKey = userSerialKeyService.getUserSerialKeyByUsername("chuninsane@163.com");
String key = UUIDUtil.generateUuid();
System.out.println(key);
userSerialKey.setKey(key);
userSerialKeyService.updateSerialKey(userSerialKey);
}
}
| apache-2.0 |
joewalnes/idea-community | plugins/groovy/src/org/jetbrains/plugins/groovy/lang/psi/GrVariableEnhancer.java | 855 | package org.jetbrains.plugins.groovy.lang.psi;
import com.intellij.openapi.extensions.ExtensionPointName;
import com.intellij.psi.PsiType;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.plugins.groovy.lang.psi.api.statements.GrVariable;
/**
* @author peter
*/
public abstract class GrVariableEnhancer {
public static final ExtensionPointName<GrVariableEnhancer> EP_NAME = ExtensionPointName.create("org.intellij.groovy.variableEnhancer");
@Nullable
public abstract PsiType getVariableType(GrVariable variable);
@Nullable
public static PsiType getEnhancedType(final GrVariable variable) {
for (GrVariableEnhancer enhancer : GrVariableEnhancer.EP_NAME.getExtensions()) {
final PsiType type = enhancer.getVariableType(variable);
if (type != null) {
return type;
}
}
return null;
}
}
| apache-2.0 |
vito-c/Smack | smack-tcp/src/main/java/org/jivesoftware/smack/tcp/sm/provider/StreamManagementStreamFeatureProvider.java | 1129 | /**
*
* Copyright © 2014 Florian Schmaus
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smack.tcp.sm.provider;
import org.jivesoftware.smack.provider.PacketExtensionProvider;
import org.jivesoftware.smack.tcp.sm.packet.StreamManagement.StreamManagementFeature;
import org.xmlpull.v1.XmlPullParser;
public class StreamManagementStreamFeatureProvider extends PacketExtensionProvider<StreamManagementFeature> {
@Override
public StreamManagementFeature parse(XmlPullParser parser,
int initialDepth) {
return StreamManagementFeature.INSTANCE;
}
}
| apache-2.0 |
quarkusio/quarkus | integration-tests/main/src/test/java/io/quarkus/it/main/TransactionalTestMethodTestCase.java | 583 | package io.quarkus.it.main;
import javax.inject.Inject;
import javax.transaction.Status;
import javax.transaction.Transactional;
import javax.transaction.UserTransaction;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import io.quarkus.test.junit.QuarkusTest;
@QuarkusTest
public class TransactionalTestMethodTestCase {
@Inject
UserTransaction userTransaction;
@Test
@Transactional
public void testUserTransaction() throws Exception {
Assertions.assertEquals(Status.STATUS_ACTIVE, userTransaction.getStatus());
}
}
| apache-2.0 |
ramdhany/AODV | opencomj/Samples/AdderComponent/AdderContextTwo.java | 2744 | /*
* AdderTwo.java
*
* OpenCOMJ is a flexible component model for reconfigurable reflection developed at Lancaster University.
* Copyright (C) 2005 Paul Grace
* This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
* of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
* You should have received a copy of the GNU General Public License along with this program; if not,
* write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package Samples.AdderComponent;
import OpenCOM.*;
import java.util.*;
/**
* Simple component offering methods to add integers and receive messages. In this case we
* give the component context (an owner name), which is then used as a selection method
* by the calculator component to choose which component's methods to call.
*
* @author Paul Grace
* @version 1.3.2
*/
public class AdderContextTwo extends OpenCOMComponent implements IUnknown, IAdd, IOutput, IMetaInterface, ILifeCycle {
/**
* Local context data - which will be attached as meta data to the component interfaces.
*/
private String Owner;
/** Creates a new instance of Adder */
public AdderContextTwo(IUnknown pRuntime) {
super(pRuntime);
// Hand set the context
Owner = "Joe Bloggs";
}
/**
* Add two integers together.
* @param a Operand X.
* @param b Operand Y.
* @return The added values.
*/
public int add(int a, int b) {
return a+b;
}
// IOutput Interface implementation
/**
* Displays the given message on Java standard output
* @param message The text to output
*/
public void DisplayMessage(String message){
System.out.println("The Owner is: "+Owner);
System.out.println("The message sent from the calculator is: "+message);
}
// ILifeCycle Interface
public boolean startup(Object pIOCM) {
// Attach the local context as a metadata attribute of the IOutput interface
// This can then be discovered using the meta interface MOP
SetAttributeValue("Samples.AdderComponent.IOutput", "Interface", "Owner", "String", Owner);
return true;
}
public boolean shutdown() {
return true;
}
}
| apache-2.0 |
kingargyle/turmeric-bot | components/camel-jpa/src/test/java/org/apache/camel/processor/jpa/JpaRouteConsumeLockEntityTest.java | 3861 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor.jpa;
import java.util.List;
import org.apache.camel.CamelContext;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.examples.SendEmail;
import org.apache.camel.spring.SpringCamelContext;
import org.apache.camel.spring.SpringRouteBuilder;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.Test;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.orm.jpa.JpaTemplate;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionTemplate;
/**
* @version $Revision$
*/
public class JpaRouteConsumeLockEntityTest extends CamelTestSupport {
protected static final String SELECT_ALL_STRING = "select x from " + SendEmail.class.getName() + " x";
protected ApplicationContext applicationContext;
protected JpaTemplate jpaTemplate;
@Test
public void testRouteJpa() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:result");
mock.expectedMessageCount(1);
template.sendBody("direct:start", new SendEmail("one@somewhere.org"));
assertMockEndpointsSatisfied();
}
@Override
protected CamelContext createCamelContext() throws Exception {
applicationContext = new ClassPathXmlApplicationContext("org/apache/camel/processor/jpa/springJpaRouteTest.xml");
cleanupRepository();
return SpringCamelContext.springCamelContext(applicationContext);
}
@Override
protected RouteBuilder createRouteBuilder() {
return new SpringRouteBuilder() {
public void configure() {
from("direct:start").to("jpa://" + SendEmail.class.getName());
from("jpa://" + SendEmail.class.getName() + "?consumeLockEntity=true").to("mock:result");
}
};
}
@SuppressWarnings("unchecked")
protected void cleanupRepository() {
jpaTemplate = (JpaTemplate)applicationContext.getBean("jpaTemplate", JpaTemplate.class);
TransactionTemplate transactionTemplate = new TransactionTemplate();
transactionTemplate.setTransactionManager(new JpaTransactionManager(jpaTemplate.getEntityManagerFactory()));
transactionTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED);
transactionTemplate.execute(new TransactionCallback() {
public Object doInTransaction(TransactionStatus arg0) {
List list = jpaTemplate.find(SELECT_ALL_STRING);
for (Object item : list) {
jpaTemplate.remove(item);
}
jpaTemplate.flush();
return Boolean.TRUE;
}
});
}
} | apache-2.0 |
catchme1412/vxml-player | vxml-browser/src/main/java/com/vxml/tag/AudioTag.java | 609 | package com.vxml.tag;
import org.w3c.dom.Node;
import com.vxml.core.browser.VxmlBrowser;
import com.vxml.tts.NativeCommand;
public class AudioTag extends AbstractTag {
public AudioTag(Node node) {
super(node);
}
@Override
public void execute() {
String src = getAttribute("src");
String expr = getAttribute("expr");
String converted = src != null ? src : (String)VxmlBrowser.getContext().executeScript(expr +";");
try {
System.out.println("Audio:" + converted);
new NativeCommand().play(converted);
}
catch (Exception e) {
e.printStackTrace();
}
}
}
| apache-2.0 |
SumoLogic/epigraph | java/http-client/src/test/java/ws/epigraph/client/http/JettyHandlerHttpClientTest.java | 1451 | /*
* Copyright 2018 Sumo Logic
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ws.epigraph.client.http;
import org.eclipse.jetty.server.Server;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import ws.epigraph.server.http.jetty.EpigraphJettyHandler;
/**
* @author <a href="mailto:konstantin.sobolev@gmail.com">Konstantin Sobolev</a>
*/
public class JettyHandlerHttpClientTest extends AbstractHttpClientTest {
private static Server jettyServer;
private static final int port = UNIQUE_PORT.incrementAndGet();
@Override
protected int port() { return port; }
@BeforeClass
public static void start() throws Exception {
jettyServer = new Server(port);
EpigraphJettyHandler handler = new EpigraphJettyHandler(buildUsersService(), -1);
jettyServer.setHandler(handler);
jettyServer.start();
}
@AfterClass
public static void stop() throws Exception {
jettyServer.stop();
}
}
| apache-2.0 |
hbs/warp10-platform | warp10/src/main/java/io/warp10/continuum/ingress/IngressStreamUpdateHandler.java | 24209 | //
// Copyright 2018 SenX S.A.S.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package io.warp10.continuum.ingress;
import io.warp10.ThrowableUtils;
import io.warp10.WarpManager;
import io.warp10.continuum.Configuration;
import io.warp10.continuum.ThrottlingManager;
import io.warp10.continuum.TimeSource;
import io.warp10.continuum.Tokens;
import io.warp10.continuum.gts.GTSEncoder;
import io.warp10.continuum.gts.GTSHelper;
import io.warp10.continuum.sensision.SensisionConstants;
import io.warp10.continuum.store.Constants;
import io.warp10.continuum.store.thrift.data.Metadata;
import io.warp10.quasar.token.thrift.data.WriteToken;
import io.warp10.sensision.Sensision;
import io.warp10.standalone.StandaloneIngressHandler;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.StringReader;
import java.math.BigInteger;
import java.text.ParseException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.eclipse.jetty.server.Request;
import org.eclipse.jetty.websocket.api.Session;
import org.eclipse.jetty.websocket.api.annotations.OnWebSocketClose;
import org.eclipse.jetty.websocket.api.annotations.OnWebSocketConnect;
import org.eclipse.jetty.websocket.api.annotations.OnWebSocketMessage;
import org.eclipse.jetty.websocket.api.annotations.WebSocket;
import org.eclipse.jetty.websocket.server.WebSocketHandler;
import org.eclipse.jetty.websocket.servlet.ServletUpgradeRequest;
import org.eclipse.jetty.websocket.servlet.ServletUpgradeResponse;
import org.eclipse.jetty.websocket.servlet.WebSocketCreator;
import org.eclipse.jetty.websocket.servlet.WebSocketServletFactory;
/**
* WebSocket handler which handles streaming updates
*
* WARNING: since we push GTSEncoders only after we reached a threshold of we've changed GTS, plasma consumers
* will only see updates once the GTSEncoder has been transmitted to the StoreClient
*/
public class IngressStreamUpdateHandler extends WebSocketHandler.Simple {
private final Ingress ingress;
@WebSocket(maxTextMessageSize=1024 * 1024, maxBinaryMessageSize=1024 * 1024)
public static class StandaloneStreamUpdateWebSocket {
private IngressStreamUpdateHandler handler;
private boolean errormsg = false;
private long seqno = 0L;
private long maxsize;
private WriteToken wtoken;
private Boolean ignoor = null;
private Map<String,String> kafkaDataMessageAttributes = null;
private Long maxpastdelta = null;
private Long maxfuturedelta = null;
private Map<String,String> sensisionLabels = new HashMap<String,String>();
private Map<String,String> extraLabels = null;
@OnWebSocketConnect
public void onWebSocketConnect(Session session) {
Sensision.update(SensisionConstants.SENSISION_CLASS_CONTINUUM_STREAM_UPDATE_REQUESTS, sensisionLabels, 1);
}
@OnWebSocketMessage
public void onWebSocketMessage(Session session, String message) throws Exception {
try {
if (null != WarpManager.getAttribute(WarpManager.UPDATE_DISABLED)) {
throw new IOException(String.valueOf(WarpManager.getAttribute(WarpManager.UPDATE_DISABLED)));
}
//
// Split message on whitespace boundary
//
String[] tokens = null;
if (message.startsWith("TOKEN") || message.startsWith("CLEARTOKEN") || message.startsWith("NOOP") || message.startsWith("ONERROR")) {
tokens = message.split("\\s+");
tokens[0] = tokens[0].trim();
}
if (null != tokens && "TOKEN".equals(tokens[0])) {
setToken(tokens[1]);
session.getRemote().sendString("OK " + (seqno++) + " TOKEN");
} else if (null != tokens && "CLEARTOKEN".equals(tokens[0])) {
// Clear the current token
this.wtoken = null;
session.getRemote().sendString("OK " + (seqno++) + " CLEARTOKEN");
} else if (null != tokens && "NOOP".equals(tokens[0])) {
// Do nothing...
session.getRemote().sendString("OK " + (seqno++) + " NOOP");
} else if (null != tokens && "ONERROR".equals(tokens[0])) {
if ("message".equalsIgnoreCase(tokens[1])) {
this.errormsg = true;
} else if ("close".equalsIgnoreCase(tokens[1])) {
this.errormsg = false;
}
session.getRemote().sendString("OK " + (seqno++) + " ONERROR");
} else {
//
// Anything else is considered a measurement
//
long nano = System.nanoTime();
//
// Loop on all lines
//
int count = 0;
long now = TimeSource.getTime();
long nowms = System.currentTimeMillis();
//
// Extract time limits
//
Long maxpast = null;
if (null != this.handler.ingress.maxpastDefault) {
try {
maxpast = Math.subtractExact(now, Math.multiplyExact(Constants.TIME_UNITS_PER_MS, this.handler.ingress.maxpastDefault));
} catch (ArithmeticException ae) {
maxpast = null;
}
}
Long maxfuture = null;
if (null != this.handler.ingress.maxfutureDefault) {
try {
maxfuture = Math.addExact(now, Math.multiplyExact(Constants.TIME_UNITS_PER_MS, this.handler.ingress.maxfutureDefault));
} catch (ArithmeticException ae) {
maxfuture = null;
}
}
if (null != this.maxpastdelta) {
try {
maxpast = Math.subtractExact(now, Math.multiplyExact(Constants.TIME_UNITS_PER_MS, this.maxpastdelta));
} catch (ArithmeticException ae) {
maxpast = null;
}
}
if (null != this.maxfuturedelta) {
try {
maxfuture = Math.addExact(now, Math.multiplyExact(Constants.TIME_UNITS_PER_MS, this.maxfuturedelta));
} catch (ArithmeticException ae) {
maxfuture = null;
}
}
if (null != this.handler.ingress.maxpastOverride) {
try {
maxpast = Math.subtractExact(now, Math.multiplyExact(Constants.TIME_UNITS_PER_MS, this.handler.ingress.maxpastOverride));
} catch (ArithmeticException ae) {
maxpast = null;
}
}
if (null != this.handler.ingress.maxfutureOverride) {
try {
maxfuture = Math.addExact(now, Math.multiplyExact(Constants.TIME_UNITS_PER_MS, this.handler.ingress.maxfutureOverride));
} catch (ArithmeticException ae) {
maxfuture = null;
}
}
// Atomic boolean to track if attributes were parsed
AtomicBoolean hadAttributes = this.handler.ingress.parseAttributes ? new AtomicBoolean(false) : null;
try {
GTSEncoder lastencoder = null;
GTSEncoder encoder = null;
BufferedReader br = new BufferedReader(new StringReader(message));
boolean lastHadAttributes = false;
AtomicLong ignoredCount = null;
if ((this.handler.ingress.ignoreOutOfRange && !Boolean.FALSE.equals(this.ignoor)) || Boolean.TRUE.equals(this.ignoor)) {
ignoredCount = new AtomicLong(0L);
}
do {
if (this.handler.ingress.parseAttributes) {
lastHadAttributes = lastHadAttributes || hadAttributes.get();
hadAttributes.set(false);
}
String line = br.readLine();
if (null == line) {
break;
}
//
// Check if we encountered an 'UPDATE xxx' line
//
if (line.startsWith("UPDATE ")) {
String[] subtokens = line.split("\\s+");
setToken(subtokens[1]);
continue;
}
if (null == this.wtoken) {
throw new IOException("Missing token.");
}
try {
encoder = GTSHelper.parse(lastencoder, line, extraLabels, now, this.maxsize, hadAttributes, maxpast, maxfuture, ignoredCount);
} catch (ParseException pe) {
Sensision.update(SensisionConstants.SENSISION_CLASS_CONTINUUM_STREAM_UPDATE_PARSEERRORS, sensisionLabels, 1);
throw new IOException("Parse error at '" + line + "'", pe);
}
//
// Force PRODUCER/OWNER
//
//encoder.setLabel(Constants.PRODUCER_LABEL, producer);
//encoder.setLabel(Constants.OWNER_LABEL, owner);
if (encoder != lastencoder || lastencoder.size() > StandaloneIngressHandler.ENCODER_SIZE_THRESHOLD) {
//
// Check throttling
//
if (null != lastencoder && lastencoder.size() > 0) {
String producer = extraLabels.get(Constants.PRODUCER_LABEL);
String owner = extraLabels.get(Constants.OWNER_LABEL);
String application = extraLabels.get(Constants.APPLICATION_LABEL);
ThrottlingManager.checkMADS(lastencoder.getMetadata(), producer, owner, application, lastencoder.getClassId(), lastencoder.getLabelsId());
ThrottlingManager.checkDDP(lastencoder.getMetadata(), producer, owner, application, (int) lastencoder.getCount());
}
//
// Build metadata object to push
//
if (encoder != lastencoder) {
encoder.setClassId(GTSHelper.classId(this.handler.ingress.classKey, encoder.getMetadata().getName()));
encoder.setLabelsId(GTSHelper.labelsId(this.handler.ingress.labelsKey, encoder.getMetadata().getLabels()));
byte[] bytes = new byte[16];
GTSHelper.fillGTSIds(bytes, 0, encoder.getClassId(), encoder.getLabelsId());
BigInteger metadataCacheKey = new BigInteger(bytes);
boolean pushMeta = false;
if (!this.handler.ingress.metadataCache.containsKey(metadataCacheKey)) {
pushMeta = true;
} else if (this.handler.ingress.activityTracking && this.handler.ingress.updateActivity) {
Long lastActivity = this.handler.ingress.metadataCache.get(metadataCacheKey);
if (null == lastActivity) {
pushMeta = true;
} else if (nowms - lastActivity > this.handler.ingress.activityWindow) {
pushMeta = true;
}
}
if (pushMeta) {
Metadata metadata = new Metadata(encoder.getMetadata());
metadata.setSource(Configuration.INGRESS_METADATA_SOURCE);
if (this.handler.ingress.activityTracking && this.handler.ingress.updateActivity) {
metadata.setLastActivity(nowms);
}
this.handler.ingress.pushMetadataMessage(metadata);
synchronized(this.handler.ingress.metadataCache) {
this.handler.ingress.metadataCache.put(metadataCacheKey, (this.handler.ingress.activityTracking && this.handler.ingress.updateActivity) ? nowms : null);
}
}
}
if (null != lastencoder) {
lastencoder.setClassId(GTSHelper.classId(this.handler.ingress.classKey, lastencoder.getName()));
lastencoder.setLabelsId(GTSHelper.labelsId(this.handler.ingress.labelsKey, lastencoder.getLabels()));
this.handler.ingress.pushDataMessage(lastencoder, kafkaDataMessageAttributes);
count += lastencoder.getCount();
if (this.handler.ingress.parseAttributes && lastHadAttributes) {
// We need to push lastencoder's metadata update as they were updated since the last
// metadata update message sent
Metadata meta = new Metadata(lastencoder.getMetadata());
meta.setSource(Configuration.INGRESS_METADATA_UPDATE_ENDPOINT);
this.handler.ingress.pushMetadataMessage(meta);
lastHadAttributes = false;
}
}
if (encoder != lastencoder) {
// This is the case when we just parsed either the first input line or one for a different
// GTS than the previous one.
lastencoder = encoder;
} else {
// This is the case when lastencoder and encoder are identical, but lastencoder was too big and needed
// to be flushed
//lastencoder = null
//
// Allocate a new GTSEncoder and reuse Metadata so we can
// correctly handle a continuation line if this is what occurs next
//
Metadata metadata = lastencoder.getMetadata();
lastencoder = new GTSEncoder(0L);
lastencoder.setMetadata(metadata);
}
}
} while (true);
br.close();
if (null != lastencoder && lastencoder.size() > 0) {
//
// Check throttling
//
String producer = extraLabels.get(Constants.PRODUCER_LABEL);
String owner = extraLabels.get(Constants.OWNER_LABEL);
String application = extraLabels.get(Constants.APPLICATION_LABEL);
ThrottlingManager.checkMADS(lastencoder.getMetadata(), producer, owner, application, lastencoder.getClassId(), lastencoder.getLabelsId());
ThrottlingManager.checkDDP(lastencoder.getMetadata(), producer, owner, application, (int) lastencoder.getCount());
lastencoder.setClassId(GTSHelper.classId(this.handler.ingress.classKey, lastencoder.getName()));
lastencoder.setLabelsId(GTSHelper.labelsId(this.handler.ingress.labelsKey, lastencoder.getLabels()));
this.handler.ingress.pushDataMessage(lastencoder, kafkaDataMessageAttributes);
count += lastencoder.getCount();
if (this.handler.ingress.parseAttributes && lastHadAttributes) {
// Push a metadata UPDATE message so attributes are stored
// Build metadata object to push
Metadata meta = new Metadata(lastencoder.getMetadata());
// Set source to indicate we
meta.setSource(Configuration.INGRESS_METADATA_UPDATE_ENDPOINT);
this.handler.ingress.pushMetadataMessage(meta);
}
}
} finally {
this.handler.ingress.pushMetadataMessage(null);
this.handler.ingress.pushDataMessage(null, this.kafkaDataMessageAttributes);
nano = System.nanoTime() - nano;
Sensision.update(SensisionConstants.SENSISION_CLASS_CONTINUUM_STREAM_UPDATE_DATAPOINTS_RAW, sensisionLabels, count);
Sensision.update(SensisionConstants.SENSISION_CLASS_CONTINUUM_STREAM_UPDATE_MESSAGES, sensisionLabels, 1);
Sensision.update(SensisionConstants.SENSISION_CLASS_CONTINUUM_STREAM_UPDATE_TIME_US, sensisionLabels, nano / 1000);
Sensision.update(SensisionConstants.SENSISION_CLASS_CONTINUUM_STREAM_UPDATE_DATAPOINTS_GLOBAL, Sensision.EMPTY_LABELS, count);
}
session.getRemote().sendString("OK " + (seqno++) + " UPDATE " + count + " " + nano);
}
} catch (Throwable t) {
if (this.errormsg) {
String msg = "ERROR " + ThrowableUtils.getErrorMessage(t);
session.getRemote().sendString(msg);
} else {
throw t;
}
}
}
@OnWebSocketClose
public void onWebSocketClose(Session session, int statusCode, String reason) {
}
public void setHandler(IngressStreamUpdateHandler handler) {
this.handler = handler;
}
private void setToken(String token) throws IOException {
//
// TOKEN <TOKEN>
//
//
// Extract token
//
WriteToken wtoken = null;
try {
wtoken = Tokens.extractWriteToken(token);
} catch (Exception e) {
wtoken = null;
}
if (null == wtoken) {
throw new IOException("Invalid token.");
}
if (wtoken.getAttributesSize() > 0 && wtoken.getAttributes().containsKey(Constants.TOKEN_ATTR_NOUPDATE)) {
throw new IOException("Token cannot be used for updating data.");
}
this.maxsize = this.handler.ingress.maxValueSize;
if (wtoken.getAttributesSize() > 0 && null != wtoken.getAttributes().get(Constants.TOKEN_ATTR_MAXSIZE)) {
this.maxsize = Long.parseLong(wtoken.getAttributes().get(Constants.TOKEN_ATTR_MAXSIZE));
if (this.maxsize > (this.handler.ingress.DATA_MESSAGES_THRESHOLD / 2) - 64) {
this.maxsize = (this.handler.ingress.DATA_MESSAGES_THRESHOLD / 2) - 64;
}
}
this.maxpastdelta = null;
this.maxfuturedelta = null;
Boolean ignoor = null;
if (wtoken.getAttributesSize() > 0) {
if (wtoken.getAttributes().containsKey(Constants.TOKEN_ATTR_IGNOOR)) {
String v = wtoken.getAttributes().get(Constants.TOKEN_ATTR_IGNOOR).toLowerCase();
if ("true".equals(v) || "t".equals(v)) {
ignoor = Boolean.TRUE;
} else if ("false".equals(v) || "f".equals(v)) {
ignoor = Boolean.FALSE;
}
}
String deltastr = wtoken.getAttributes().get(Constants.TOKEN_ATTR_MAXPAST);
if (null != deltastr) {
long delta = Long.parseLong(deltastr);
if (delta < 0) {
throw new IOException("Invalid '" + Constants.TOKEN_ATTR_MAXPAST + "' token attribute, MUST be positive.");
}
maxpastdelta = delta;
}
deltastr = wtoken.getAttributes().get(Constants.TOKEN_ATTR_MAXFUTURE);
if (null != deltastr) {
long delta = Long.parseLong(deltastr);
if (delta < 0) {
throw new IOException("Invalid '" + Constants.TOKEN_ATTR_MAXFUTURE + "' token attribute, MUST be positive.");
}
maxfuturedelta = delta;
}
}
String application = wtoken.getAppName();
String producer = Tokens.getUUID(wtoken.getProducerId());
String owner = Tokens.getUUID(wtoken.getOwnerId());
this.sensisionLabels.clear();
this.sensisionLabels.put(SensisionConstants.SENSISION_LABEL_PRODUCER, producer);
long count = 0;
if (null == producer || null == owner) {
throw new IOException("Invalid token.");
}
//
// Build extra labels
//
this.extraLabels = new HashMap<String,String>();
// Add labels from the WriteToken if they exist
if (wtoken.getLabelsSize() > 0) {
extraLabels.putAll(wtoken.getLabels());
}
// Force internal labels
this.extraLabels.put(Constants.PRODUCER_LABEL, producer);
this.extraLabels.put(Constants.OWNER_LABEL, owner);
// FIXME(hbs): remove me
if (null != application) {
this.extraLabels.put(Constants.APPLICATION_LABEL, application);
sensisionLabels.put(SensisionConstants.SENSISION_LABEL_APPLICATION, application);
}
if (wtoken.getAttributesSize() > 0) {
//
// Extract KafkaDataMessage attributes
//
kafkaDataMessageAttributes = null;
if (-1 != this.handler.ingress.ttl || this.handler.ingress.useDatapointTs) {
kafkaDataMessageAttributes = new HashMap<String,String>();
if (-1 != this.handler.ingress.ttl) {
kafkaDataMessageAttributes.put(Constants.STORE_ATTR_TTL, Long.toString(this.handler.ingress.ttl));
}
if (this.handler.ingress.useDatapointTs) {
kafkaDataMessageAttributes.put(Constants.STORE_ATTR_USEDATAPOINTTS, "t");
}
}
if (wtoken.getAttributes().containsKey(Constants.STORE_ATTR_TTL)
|| wtoken.getAttributes().containsKey(Constants.STORE_ATTR_USEDATAPOINTTS)) {
if (null == kafkaDataMessageAttributes) {
kafkaDataMessageAttributes = new HashMap<String,String>();
}
if (wtoken.getAttributes().containsKey(Constants.STORE_ATTR_TTL)) {
kafkaDataMessageAttributes.put(Constants.STORE_ATTR_TTL, wtoken.getAttributes().get(Constants.STORE_ATTR_TTL));
}
if (wtoken.getAttributes().containsKey(Constants.STORE_ATTR_USEDATAPOINTTS)) {
kafkaDataMessageAttributes.put(Constants.STORE_ATTR_USEDATAPOINTTS, wtoken.getAttributes().get(Constants.STORE_ATTR_USEDATAPOINTTS));
}
}
}
this.ignoor = ignoor;
this.wtoken = wtoken;
}
}
public IngressStreamUpdateHandler(Ingress ingress) {
super(StandaloneStreamUpdateWebSocket.class);
this.ingress = ingress;
}
@Override
public void handle(String target, Request baseRequest, HttpServletRequest request, HttpServletResponse response) throws IOException, ServletException {
if (Constants.API_ENDPOINT_PLASMA_UPDATE.equals(target)) {
baseRequest.setHandled(true);
super.handle(target, baseRequest, request, response);
}
}
@Override
public void configure(final WebSocketServletFactory factory) {
final IngressStreamUpdateHandler self = this;
final WebSocketCreator oldcreator = factory.getCreator();
WebSocketCreator creator = new WebSocketCreator() {
@Override
public Object createWebSocket(ServletUpgradeRequest req, ServletUpgradeResponse resp) {
StandaloneStreamUpdateWebSocket ws = (StandaloneStreamUpdateWebSocket) oldcreator.createWebSocket(req, resp);
ws.setHandler(self);
return ws;
}
};
factory.setCreator(creator);
//
// Update the maxMessageSize if need be
//
if (this.ingress.properties.containsKey(Configuration.INGRESS_WEBSOCKET_MAXMESSAGESIZE)) {
factory.getPolicy().setMaxTextMessageSize((int) Long.parseLong(this.ingress.properties.getProperty(Configuration.INGRESS_WEBSOCKET_MAXMESSAGESIZE)));
factory.getPolicy().setMaxBinaryMessageSize((int) Long.parseLong(this.ingress.properties.getProperty(Configuration.INGRESS_WEBSOCKET_MAXMESSAGESIZE)));
}
super.configure(factory);
}
}
| apache-2.0 |
ly20050516/DesignPattern | src/com/design/visitor/define/ObjectStruct.java | 468 | package com.design.visitor.define;
public class ObjectStruct {
Element mElementA;
Element mElementB;
public ObjectStruct() {
mElementA = new ElementA();
mElementB = new ElementB();
}
public Element getElementA() {
return mElementA;
}
public void setElementA(Element elementA) {
this.mElementA = elementA;
}
public Element getElementB() {
return mElementB;
}
public void setElementB(Element elementB) {
this.mElementB = elementB;
}
}
| apache-2.0 |
shisoft/LinkedIn-J | core/src/main/java/com/google/code/linkedinapi/schema/RoleCode.java | 1877 | /*
* Copyright 2010-2011 Nabeel Mukhtar
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.google.code.linkedinapi.schema;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlEnumValue;
/**
* <p>Java class for null.
*
* <p>The following schema fragment specifies the expected content contained within this class.
* <p>
* <pre>
* <simpleType>
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="H"/>
* <enumeration value="R"/>
* <enumeration value="S"/>
* <enumeration value="W"/>
* </restriction>
* </simpleType>
* </pre>
*
*/
@XmlEnum
public enum RoleCode {
@XmlEnumValue("H")
HIRING_MANAGER("H"),
@XmlEnumValue("R")
COMPANY_RECRUITER("R"),
@XmlEnumValue("S")
STAFFING_FIRM("S"),
@XmlEnumValue("W")
COMPANY_EMPLOYEE("W");
private final String value;
RoleCode(String v) {
value = v;
}
public String value() {
return value;
}
public static RoleCode fromValue(String v) {
for (RoleCode c: RoleCode.values()) {
if (c.value.equals(v)) {
return c;
}
}
throw new IllegalArgumentException(v.toString());
}
}
| apache-2.0 |
nicolaferraro/camel | core/camel-endpointdsl/src/generated/java/org/apache/camel/builder/endpoint/dsl/HazelcastSetEndpointBuilderFactory.java | 23626 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder.endpoint.dsl;
import javax.annotation.Generated;
import org.apache.camel.ExchangePattern;
import org.apache.camel.builder.EndpointConsumerBuilder;
import org.apache.camel.builder.EndpointProducerBuilder;
import org.apache.camel.builder.endpoint.AbstractEndpointBuilder;
import org.apache.camel.spi.ExceptionHandler;
/**
* Perform operations on Hazelcast distributed set.
*
* Generated by camel build tools - do NOT edit this file!
*/
@Generated("org.apache.camel.maven.packaging.EndpointDslMojo")
public interface HazelcastSetEndpointBuilderFactory {
/**
* Builder for endpoint consumers for the Hazelcast Set component.
*/
public interface HazelcastSetEndpointConsumerBuilder
extends
EndpointConsumerBuilder {
default AdvancedHazelcastSetEndpointConsumerBuilder advanced() {
return (AdvancedHazelcastSetEndpointConsumerBuilder) this;
}
/**
* To specify a default operation to use, if no operation header has
* been provided.
*
* The option is a:
* <code>org.apache.camel.component.hazelcast.HazelcastOperation</code>
* type.
*
* Group: common
*/
default HazelcastSetEndpointConsumerBuilder defaultOperation(
HazelcastOperation defaultOperation) {
doSetProperty("defaultOperation", defaultOperation);
return this;
}
/**
* To specify a default operation to use, if no operation header has
* been provided.
*
* The option will be converted to a
* <code>org.apache.camel.component.hazelcast.HazelcastOperation</code>
* type.
*
* Group: common
*/
default HazelcastSetEndpointConsumerBuilder defaultOperation(
String defaultOperation) {
doSetProperty("defaultOperation", defaultOperation);
return this;
}
/**
* The hazelcast instance reference which can be used for hazelcast
* endpoint.
*
* The option is a: <code>com.hazelcast.core.HazelcastInstance</code>
* type.
*
* Group: common
*/
default HazelcastSetEndpointConsumerBuilder hazelcastInstance(
Object hazelcastInstance) {
doSetProperty("hazelcastInstance", hazelcastInstance);
return this;
}
/**
* The hazelcast instance reference which can be used for hazelcast
* endpoint.
*
* The option will be converted to a
* <code>com.hazelcast.core.HazelcastInstance</code> type.
*
* Group: common
*/
default HazelcastSetEndpointConsumerBuilder hazelcastInstance(
String hazelcastInstance) {
doSetProperty("hazelcastInstance", hazelcastInstance);
return this;
}
/**
* The hazelcast instance reference name which can be used for hazelcast
* endpoint. If you don't specify the instance reference, camel use the
* default hazelcast instance from the camel-hazelcast instance.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*/
default HazelcastSetEndpointConsumerBuilder hazelcastInstanceName(
String hazelcastInstanceName) {
doSetProperty("hazelcastInstanceName", hazelcastInstanceName);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default HazelcastSetEndpointConsumerBuilder bridgeErrorHandler(
boolean bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
/**
* Allows for bridging the consumer to the Camel routing Error Handler,
* which mean any exceptions occurred while the consumer is trying to
* pickup incoming messages, or the likes, will now be processed as a
* message and handled by the routing Error Handler. By default the
* consumer will use the org.apache.camel.spi.ExceptionHandler to deal
* with exceptions, that will be logged at WARN or ERROR level and
* ignored.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: consumer
*/
default HazelcastSetEndpointConsumerBuilder bridgeErrorHandler(
String bridgeErrorHandler) {
doSetProperty("bridgeErrorHandler", bridgeErrorHandler);
return this;
}
}
/**
* Advanced builder for endpoint consumers for the Hazelcast Set component.
*/
public interface AdvancedHazelcastSetEndpointConsumerBuilder
extends
EndpointConsumerBuilder {
default HazelcastSetEndpointConsumerBuilder basic() {
return (HazelcastSetEndpointConsumerBuilder) this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option is a: <code>org.apache.camel.spi.ExceptionHandler</code>
* type.
*
* Group: consumer (advanced)
*/
default AdvancedHazelcastSetEndpointConsumerBuilder exceptionHandler(
ExceptionHandler exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* To let the consumer use a custom ExceptionHandler. Notice if the
* option bridgeErrorHandler is enabled then this option is not in use.
* By default the consumer will deal with exceptions, that will be
* logged at WARN or ERROR level and ignored.
*
* The option will be converted to a
* <code>org.apache.camel.spi.ExceptionHandler</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedHazelcastSetEndpointConsumerBuilder exceptionHandler(
String exceptionHandler) {
doSetProperty("exceptionHandler", exceptionHandler);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option is a: <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedHazelcastSetEndpointConsumerBuilder exchangePattern(
ExchangePattern exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets the exchange pattern when the consumer creates an exchange.
*
* The option will be converted to a
* <code>org.apache.camel.ExchangePattern</code> type.
*
* Group: consumer (advanced)
*/
default AdvancedHazelcastSetEndpointConsumerBuilder exchangePattern(
String exchangePattern) {
doSetProperty("exchangePattern", exchangePattern);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedHazelcastSetEndpointConsumerBuilder synchronous(
boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedHazelcastSetEndpointConsumerBuilder synchronous(
String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
}
/**
* Builder for endpoint producers for the Hazelcast Set component.
*/
public interface HazelcastSetEndpointProducerBuilder
extends
EndpointProducerBuilder {
default AdvancedHazelcastSetEndpointProducerBuilder advanced() {
return (AdvancedHazelcastSetEndpointProducerBuilder) this;
}
/**
* To specify a default operation to use, if no operation header has
* been provided.
*
* The option is a:
* <code>org.apache.camel.component.hazelcast.HazelcastOperation</code>
* type.
*
* Group: common
*/
default HazelcastSetEndpointProducerBuilder defaultOperation(
HazelcastOperation defaultOperation) {
doSetProperty("defaultOperation", defaultOperation);
return this;
}
/**
* To specify a default operation to use, if no operation header has
* been provided.
*
* The option will be converted to a
* <code>org.apache.camel.component.hazelcast.HazelcastOperation</code>
* type.
*
* Group: common
*/
default HazelcastSetEndpointProducerBuilder defaultOperation(
String defaultOperation) {
doSetProperty("defaultOperation", defaultOperation);
return this;
}
/**
* The hazelcast instance reference which can be used for hazelcast
* endpoint.
*
* The option is a: <code>com.hazelcast.core.HazelcastInstance</code>
* type.
*
* Group: common
*/
default HazelcastSetEndpointProducerBuilder hazelcastInstance(
Object hazelcastInstance) {
doSetProperty("hazelcastInstance", hazelcastInstance);
return this;
}
/**
* The hazelcast instance reference which can be used for hazelcast
* endpoint.
*
* The option will be converted to a
* <code>com.hazelcast.core.HazelcastInstance</code> type.
*
* Group: common
*/
default HazelcastSetEndpointProducerBuilder hazelcastInstance(
String hazelcastInstance) {
doSetProperty("hazelcastInstance", hazelcastInstance);
return this;
}
/**
* The hazelcast instance reference name which can be used for hazelcast
* endpoint. If you don't specify the instance reference, camel use the
* default hazelcast instance from the camel-hazelcast instance.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*/
default HazelcastSetEndpointProducerBuilder hazelcastInstanceName(
String hazelcastInstanceName) {
doSetProperty("hazelcastInstanceName", hazelcastInstanceName);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default HazelcastSetEndpointProducerBuilder lazyStartProducer(
boolean lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
/**
* Whether the producer should be started lazy (on the first message).
* By starting lazy you can use this to allow CamelContext and routes to
* startup in situations where a producer may otherwise fail during
* starting and cause the route to fail being started. By deferring this
* startup to be lazy then the startup failure can be handled during
* routing messages via Camel's routing error handlers. Beware that when
* the first message is processed then creating and starting the
* producer may take a little time and prolong the total processing time
* of the processing.
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: producer
*/
default HazelcastSetEndpointProducerBuilder lazyStartProducer(
String lazyStartProducer) {
doSetProperty("lazyStartProducer", lazyStartProducer);
return this;
}
}
/**
* Advanced builder for endpoint producers for the Hazelcast Set component.
*/
public interface AdvancedHazelcastSetEndpointProducerBuilder
extends
EndpointProducerBuilder {
default HazelcastSetEndpointProducerBuilder basic() {
return (HazelcastSetEndpointProducerBuilder) this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedHazelcastSetEndpointProducerBuilder synchronous(
boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedHazelcastSetEndpointProducerBuilder synchronous(
String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
}
/**
* Builder for endpoint for the Hazelcast Set component.
*/
public interface HazelcastSetEndpointBuilder
extends
HazelcastSetEndpointConsumerBuilder,
HazelcastSetEndpointProducerBuilder {
default AdvancedHazelcastSetEndpointBuilder advanced() {
return (AdvancedHazelcastSetEndpointBuilder) this;
}
/**
* To specify a default operation to use, if no operation header has
* been provided.
*
* The option is a:
* <code>org.apache.camel.component.hazelcast.HazelcastOperation</code>
* type.
*
* Group: common
*/
default HazelcastSetEndpointBuilder defaultOperation(
HazelcastOperation defaultOperation) {
doSetProperty("defaultOperation", defaultOperation);
return this;
}
/**
* To specify a default operation to use, if no operation header has
* been provided.
*
* The option will be converted to a
* <code>org.apache.camel.component.hazelcast.HazelcastOperation</code>
* type.
*
* Group: common
*/
default HazelcastSetEndpointBuilder defaultOperation(
String defaultOperation) {
doSetProperty("defaultOperation", defaultOperation);
return this;
}
/**
* The hazelcast instance reference which can be used for hazelcast
* endpoint.
*
* The option is a: <code>com.hazelcast.core.HazelcastInstance</code>
* type.
*
* Group: common
*/
default HazelcastSetEndpointBuilder hazelcastInstance(
Object hazelcastInstance) {
doSetProperty("hazelcastInstance", hazelcastInstance);
return this;
}
/**
* The hazelcast instance reference which can be used for hazelcast
* endpoint.
*
* The option will be converted to a
* <code>com.hazelcast.core.HazelcastInstance</code> type.
*
* Group: common
*/
default HazelcastSetEndpointBuilder hazelcastInstance(
String hazelcastInstance) {
doSetProperty("hazelcastInstance", hazelcastInstance);
return this;
}
/**
* The hazelcast instance reference name which can be used for hazelcast
* endpoint. If you don't specify the instance reference, camel use the
* default hazelcast instance from the camel-hazelcast instance.
*
* The option is a: <code>java.lang.String</code> type.
*
* Group: common
*/
default HazelcastSetEndpointBuilder hazelcastInstanceName(
String hazelcastInstanceName) {
doSetProperty("hazelcastInstanceName", hazelcastInstanceName);
return this;
}
}
/**
* Advanced builder for endpoint for the Hazelcast Set component.
*/
public interface AdvancedHazelcastSetEndpointBuilder
extends
AdvancedHazelcastSetEndpointConsumerBuilder,
AdvancedHazelcastSetEndpointProducerBuilder {
default HazelcastSetEndpointBuilder basic() {
return (HazelcastSetEndpointBuilder) this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option is a: <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedHazelcastSetEndpointBuilder synchronous(
boolean synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
/**
* Sets whether synchronous processing should be strictly used, or Camel
* is allowed to use asynchronous processing (if supported).
*
* The option will be converted to a <code>boolean</code> type.
*
* Default: false
* Group: advanced
*/
default AdvancedHazelcastSetEndpointBuilder synchronous(
String synchronous) {
doSetProperty("synchronous", synchronous);
return this;
}
}
/**
* Proxy enum for
* <code>org.apache.camel.component.hazelcast.HazelcastOperation</code>
* enum.
*/
enum HazelcastOperation {
PUT,
DELETE,
GET,
UPDATE,
QUERY,
GET_ALL,
CLEAR,
PUT_IF_ABSENT,
ADD_ALL,
REMOVE_ALL,
RETAIN_ALL,
EVICT,
EVICT_ALL,
VALUE_COUNT,
CONTAINS_KEY,
CONTAINS_VALUE,
GET_KEYS,
REMOVE_VALUE,
INCREMENT,
DECREMENT,
SET_VALUE,
DESTROY,
COMPARE_AND_SET,
GET_AND_ADD,
ADD,
OFFER,
PEEK,
POLL,
REMAINING_CAPACITY,
DRAIN_TO,
REMOVE_IF,
TAKE,
PUBLISH,
READ_ONCE_HEAD,
READ_ONCE_TAIL,
CAPACITY;
}
public interface HazelcastSetBuilders {
/**
* Hazelcast Set (camel-hazelcast)
* Perform operations on Hazelcast distributed set.
*
* Category: cache,datagrid
* Since: 2.7
* Maven coordinates: org.apache.camel:camel-hazelcast
*
* Syntax: <code>hazelcast-set:cacheName</code>
*
* Path parameter: cacheName (required)
* The name of the cache
*
* @param path cacheName
*/
default HazelcastSetEndpointBuilder hazelcastSet(String path) {
return HazelcastSetEndpointBuilderFactory.endpointBuilder("hazelcast-set", path);
}
/**
* Hazelcast Set (camel-hazelcast)
* Perform operations on Hazelcast distributed set.
*
* Category: cache,datagrid
* Since: 2.7
* Maven coordinates: org.apache.camel:camel-hazelcast
*
* Syntax: <code>hazelcast-set:cacheName</code>
*
* Path parameter: cacheName (required)
* The name of the cache
*
* @param componentName to use a custom component name for the endpoint
* instead of the default name
* @param path cacheName
*/
default HazelcastSetEndpointBuilder hazelcastSet(
String componentName,
String path) {
return HazelcastSetEndpointBuilderFactory.endpointBuilder(componentName, path);
}
}
static HazelcastSetEndpointBuilder endpointBuilder(
String componentName,
String path) {
class HazelcastSetEndpointBuilderImpl extends AbstractEndpointBuilder implements HazelcastSetEndpointBuilder, AdvancedHazelcastSetEndpointBuilder {
public HazelcastSetEndpointBuilderImpl(String path) {
super(componentName, path);
}
}
return new HazelcastSetEndpointBuilderImpl(path);
}
} | apache-2.0 |
wangqi/gameserver | server/src/main/java/com/xinqihd/sns/gameserver/util/ProtocolBufUtil.java | 12134 | package com.xinqihd.sns.gameserver.util;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.FilenameFilter;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* Helper class used to generate protocol buffer classes.
*
* @author wangqi
*
*/
public class ProtocolBufUtil {
public static final int bseBase = 512;
public static final int bceBase = 1024;
/**
* Generate all id to message mapping file for
* lua script language.
*/
public static final void generateIdMessage(String projectDir, String pbDir)
throws Exception {
//Read the mapping file
int bseMax = bseBase;
int bceMax = bceBase;
File mappingFile = new File(projectDir, "src/gensrc/pb.properties");
Map<String, Integer> messageMap = new LinkedHashMap<String, Integer>();
if ( mappingFile.exists() && mappingFile.isFile() ) {
FileReader fr = new FileReader(mappingFile);
BufferedReader br = new BufferedReader(fr);
String line = br.readLine();
while ( line != null ) {
String[] fields = line.split("=");
String messageName = fields[0].trim();
Integer messageId = new Integer(fields[1].trim());
System.out.println(messageName+" = " +messageId);
messageMap.put(messageName, messageId);
if ( messageId < bceBase ) {
if ( messageId > bseMax ) {
bseMax = messageId;
}
} else if ( messageId > bceMax ) {
bceMax = messageId;
}
line = br.readLine();
}
}
bseMax++;
bceMax++;
System.out.println("Max bse:"+bseMax+", max bce:"+bceMax);
//Read the protocol list
ArrayList<String> normalPbList = new ArrayList<String>();
ArrayList<String> xinqiPbList = new ArrayList<String>();
ArrayList<String> extPbList = new ArrayList<String>();
File pbFile = new File(projectDir, pbDir);
File xinqiDir = new File(pbFile, "xinqihd");
File extDir = new File(pbFile, "extend");
File guildDir = new File(pbFile, "guild");
int[] max = {bceMax, bseMax};
parsePbDir(xinqiDir, max, messageMap, normalPbList);
parsePbDir(extDir, max, messageMap, normalPbList);
parsePbDir(guildDir, max, messageMap, normalPbList);
luaScript(projectDir, "src/gensrc/lua/ProtocolMgr.lua", normalPbList, messageMap);
javaIdToMessage(projectDir, "src/gensrc/java/com/xinqihd/sns/gameserver/transport/IdToMessage.java", normalPbList, messageMap);
javaMessageToId(projectDir, "src/gensrc/java/com/xinqihd/sns/gameserver/transport/MessageToId.java", normalPbList, messageMap);
javaMessageToHandler(projectDir, "src/gensrc/java/com/xinqihd/sns/gameserver/transport/MessageToHandler.java", normalPbList, messageMap);
//save mapping data.
saveMappingFile(mappingFile, messageMap);
}
/**
* @param args
*/
public static void main(String[] args) throws Exception {
String projectDir = ".";
if ( args != null && args.length>0 ) {
projectDir = args[0];
}
ProtocolBufUtil.generateIdMessage(projectDir, "src/main/protoc");
}
private static final void saveMappingFile(File mappingFile, Map<String, Integer> messageMap)
throws Exception {
FileWriter fw = new FileWriter(mappingFile);
for ( String message : messageMap.keySet() ) {
Integer id = messageMap.get(message);
fw.append(message).append("=").append(id.toString()).append("\n");
}
fw.close();
}
/**
* Extract the message name from file name.
* @param fileName
* @return
*/
private static final String extractMessageName(String fileName) {
if ( fileName != null && fileName.length()>0 ) {
return fileName.substring(0, fileName.length()-6);
}
return fileName;
}
/**
* Parse the protocol files dir and generate nw id.
* @param pbFile
* @param bceMax
* @param bseMax
* @param messageMap
* @param normalPbList
*/
private static final void parsePbDir(File pbFile, int[] max,
Map<String, Integer> messageMap, List<String> normalPbList) {
int bceMax = max[0];
int bseMax = max[1];
File[] xinqiPbFiles = pbFile.listFiles( new ProtoFileFilter() );
for ( File f : xinqiPbFiles ) {
String fileName = f.getName();
String messageName = extractMessageName(fileName);
Integer id = messageMap.get(messageName);
if ( id == null ) {
//new protocol file.
if ( messageName.startsWith("Bce") ) {
id = new Integer(bceMax++);
} else if ( messageName.startsWith("Bse") ) {
id = new Integer(bseMax++);
} else {
normalPbList.add(messageName);
}
if ( id != null ) {
messageMap.put(messageName, id);
System.out.println("New " + messageName + ", id: " + id);
}
}
}
}
/**
* Generate the java source.
* @param projectDir
* @param javaDir
* @param normalList
* @param map
*/
private static final void javaIdToMessage(String projectDir, String javaDir,
List<String> normalList, Map<String, Integer> map) throws Exception {
File idFile = new File(projectDir, javaDir);
FileWriter fw = new FileWriter(idFile);
fw.append("package com.xinqihd.sns.gameserver.transport;\n");
fw.append("\n");
fw.append("import org.apache.commons.logging.Log;\n");
fw.append("import org.apache.commons.logging.LogFactory;\n");
fw.append("import com.google.protobuf.MessageLite;\n");
fw.append("import com.xinqihd.sns.gameserver.proto.*;\n");
fw.append("\n");
fw.append("/**\n");
fw.append(" * GENERATED SOURCE CODE DO NOT MODIFY!\n");
fw.append(" * Translate the given int id to its coresponding message. \n");
fw.append(" * @author wangqi \n");
fw.append(" */ \n");
fw.append("public class IdToMessage {\n");
fw.append("\n");
fw.append(" private static Log log = LogFactory.getLog(IdToMessage.class); \n");
fw.append("\n");
fw.append(" public static MessageLite idToMessage(int id) { \n");
fw.append(" MessageLite message = null;\n" );
fw.append(" switch(id) {\n");
for ( String message : map.keySet() ) {
Integer id = map.get(message);
fw.append(" case "+id+": \n");
fw.append(" message = Xinqi"+message+"."+message
+".getDefaultInstance(); \n");
fw.append(" break;\n");
}
fw.append(" default:\n");
fw.append(" log.error(\"No message type for id: \" + id);\n");
fw.append(" }\n");
fw.append(" return message;\n");
fw.append(" }\n");
fw.append("}\n");
fw.close();
}
/**
*
* @param projectDir
* @param javaDir
* @param normalList
* @param map
*/
private static final void javaMessageToId(String projectDir, String javaDir,
List<String> normalList, Map<String, Integer> map) throws Exception {
File idFile = new File(projectDir, javaDir);
FileWriter fw = new FileWriter(idFile);
fw.append("package com.xinqihd.sns.gameserver.transport;\n");
fw.append("\n");
fw.append("import org.apache.commons.logging.Log;\n");
fw.append("import org.apache.commons.logging.LogFactory;\n");
fw.append("import com.google.protobuf.MessageLite;\n");
fw.append("import com.xinqihd.sns.gameserver.proto.*;\n");
fw.append("\n");
fw.append("/**\n");
fw.append(" * GENERATED SOURCE CODE DO NOT MODIFY!\n");
fw.append(" * Translate the given message to its corresponding id. \n");
fw.append(" * @author wangqi \n");
fw.append(" */ \n");
fw.append("public class MessageToId {\n");
fw.append("\n");
fw.append(" private static Log log = LogFactory.getLog(MessageToId.class); \n");
fw.append("\n");
fw.append(" public static int messageToId(MessageLite msg) { \n");
boolean first = true;
for ( String message : map.keySet() ) {
Integer id = map.get(message);
if ( first ) {
fw.append(" if (msg instanceof Xinqi"+message+"."+message+" ) {\n");
first = false;
} else {
fw.append(" else if (msg instanceof Xinqi"+message+"."+message+" ) {\n");
}
fw.append(" return "+id+"; \n");
fw.append(" }\n");
}
fw.append(" else {\n");
fw.append(" log.error(\"No id for message: \"+msg.getClass().getName());\n" );
fw.append(" }\n");
fw.append(" return -1;\n");
fw.append(" }\n" );
fw.append("}\n");
fw.close();
}
/**
*
* @param projectDir
* @param javaDir
* @param normalList
* @param map
* @throws Exception
*/
private static final void javaMessageToHandler(String projectDir, String javaDir,
List<String> normalList, Map<String, Integer> map) throws Exception {
File idFile = new File(projectDir, javaDir);
FileWriter fw = new FileWriter(idFile);
fw.append("package com.xinqihd.sns.gameserver.transport;\n");
fw.append("\n");
fw.append("import org.apache.commons.logging.Log;\n");
fw.append("import org.apache.commons.logging.LogFactory;\n");
fw.append("\n");
fw.append("import com.xinqihd.sns.gameserver.handler.*;\n");
fw.append("import com.xinqihd.sns.gameserver.proto.*;\n");
fw.append("\n");
fw.append("/**\n");
fw.append(" * GENERATE SOURCE CODE. DO NOT MODIFY!\n");
fw.append(" * Get to proper message object according to the given message type.\n");
fw.append(" * @author wangqi\n");
fw.append(" *\n");
fw.append(" */\n");
fw.append("public class MessageToHandler extends SimpleChannelHandler {\n");
fw.append("\n");
fw.append(" private static Log log = LogFactory.getLog(MessageToHandler.class); \n");
fw.append("\n");
fw.append(" public static SimpleChannelHandler messageToHandler(Object msgObject) {\n");
fw.append(" XinqiMessage message = null;\n");
fw.append(" if ( msgObject instanceof XinqiMessage ) {\n");
fw.append(" message = (XinqiMessage)msgObject;\n");
fw.append(" } else {\n");
fw.append(" if ( log.isWarnEnabled() ) {\n");
fw.append(" log.warn(\"msgObject is not XinqiMessage.\");\n");
fw.append(" }\n");
fw.append(" }\n");
for ( String message : map.keySet() ) {
int id = map.get(message);
if ( id<1024 ) continue;
if ( id==1024 ) {
fw.append(" if (message.payload instanceof Xinqi"+message+"."+message+" ) {\n");
} else {
fw.append(" else if (message.payload instanceof Xinqi"+message+"."+message+" ) {\n");
}
fw.append(" return "+message+"Handler.getInstance();\n");
fw.append(" }\n");
}
fw.append(" return null;\n");
fw.append(" }\n");
fw.append(" \n");
fw.append("}\n");
fw.close();
}
private static final void luaScript(String projectDir, String luaDir,
List<String> normalList, Map<String, Integer> map)
throws Exception {
File luaFile = new File(projectDir, luaDir);
FileWriter fw = new FileWriter(luaFile);
fw.append("require(\"com/xinqihd/common/base.lua\")\n");
fw.append("require(\"com/xinqihd/common/PkgMap.lua\")\n");
for ( String message : normalList ) {
fw.append("require(\"com/xinqihd/bombbaby/protocol/"+message+"_pb.lua\")\n");
}
for ( String message : map.keySet() ) {
fw.append("require(\"com/xinqihd/bombbaby/protocol/").append(message).append("_pb.lua\")\n");
}
for ( String message : map.keySet() ) {
Integer value = map.get(message);
fw.append("ID_"+message+" = " + value).append("\n");
}
fw.append("ProtocolMgr = class(\"ProtocolMgr\")\n");
fw.append("function ProtocolMgr:InitPkgMap()\n");
for ( String message : map.keySet() ) {
Integer value = map.get(message);
fw.append(" self._pkgMap:AddPkg("+value +
",\t").append(message).append("_pb.").append(message).append("());\n");
}
fw.append("end\n\n");
fw.append("function ProtocolMgr:initialize()\n");
fw.append(" self._pkgMap = PkgMap:new()\n");
fw.append(" self:InitPkgMap()\n");
fw.append("end\n\n");
fw.append("function ProtocolMgr:GetPkgMap()\n");
fw.append(" return self._pkgMap\n");
fw.append("end\n");
fw.close();
}
/**
* Filter all the files with '.proto' extension.
* @author wangqi
*
*/
private static final class ProtoFileFilter implements FilenameFilter {
/* (non-Javadoc)
* @see java.io.FilenameFilter#accept(java.io.File, java.lang.String)
*/
@Override
public boolean accept(File dir, String name) {
if ( name.endsWith(".proto") ) {
return true;
}
return false;
}
}
}
| apache-2.0 |
bessovistnyj/jvm-byte-code | Multithreading/Threads/src/main/java/ru/napadovskiyb/CheckString.java | 3103 | package ru.napadovskiyb;
import java.util.StringTokenizer;
/**
* Package of CollectionPro finalTask.
* Main class for order book.
* @author Napadovskiy Bohdan
* @version 1.0
* @since 09.08.2017
*/
public class CheckString {
/**
* Method calculate space in string.
* @param stringForCheck string.
* @return result.
*/
public Thread calcSpace(String stringForCheck) {
return new Thread() {
@Override
public void run() {
if (isInterrupted()) {
interrupted();
return;
}
char checkChar = ' ';
int result = 0;
char[] charsString = stringForCheck.toCharArray();
for (char tmpChar : charsString) {
if (tmpChar == checkChar) {
result++;
}
}
System.out.println(result);
}
};
}
/**
* Method calculate words in string.
* @param stringForCheck string for check.
* @return result
*/
public Thread calcWorlds(String stringForCheck) {
return new Thread() {
@Override
public void run() {
int count = 0;
if (isInterrupted()) {
interrupted();
return;
}
StringTokenizer stringTokenizer = new StringTokenizer(stringForCheck);
while (stringTokenizer.hasMoreTokens()) {
stringTokenizer.nextElement();
count++;
}
System.out.println(count);
}
};
}
/**
* Main thread.
* @return thread.
*/
public Thread mainThread() {
return new Thread() {
private String newString = "а баба галамага тест";
private final int timeStop = 1000;
@Override
public void run() {
Thread firstThread = calcSpace(newString);
Thread secondThread = calcWorlds(newString);
try {
firstThread.start();
secondThread.start();
firstThread.join(timeStop);
secondThread.join(timeStop);
firstThread.interrupt();
secondThread.interrupt();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
};
}
/**
* Main method.
* @param args array string.
*/
public static void main(String[] args) {
System.out.println("Start");
CheckString checkString = new CheckString();
Thread mainThread = checkString.mainThread();
mainThread.start();
try {
mainThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
mainThread.interrupt();
System.out.println("Finish");
}
}
| apache-2.0 |
lvweiwolf/poi-3.16 | src/scratchpad/testcases/org/apache/poi/hslf/usermodel/TestTable.java | 5595 | /*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.apache.poi.hslf.usermodel;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.awt.Color;
import java.awt.geom.Rectangle2D;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.List;
import org.apache.poi.hslf.HSLFTestDataSamples;
import org.apache.poi.sl.draw.DrawTableShape;
import org.apache.poi.sl.usermodel.StrokeStyle;
import org.junit.Test;
/**
* Table related tests
*/
public class TestTable {
@Test
public void moveTable() throws IOException {
HSLFSlideShow ppt = new HSLFSlideShow();
HSLFSlide slide = ppt.createSlide();
int rows = 3, cols = 5;
HSLFTable table = slide.createTable(rows, cols);
for (int row=0; row<rows; row++) {
for (int col=0; col<cols; col++) {
HSLFTableCell c = table.getCell(row, col);
c.setText("r"+row+"c"+col);
}
}
new DrawTableShape(table).setAllBorders(1.0, Color.black, StrokeStyle.LineDash.DASH_DOT);
table.setAnchor(new Rectangle2D.Double(100, 100, 400, 400));
Rectangle2D rectExp = new Rectangle2D.Double(420,366.625,80,133.375);
Rectangle2D rectAct = table.getCell(rows-1, cols-1).getAnchor();
assertEquals(rectExp, rectAct);
ppt.close();
}
@Test
public void testTable() throws IOException {
HSLFSlideShow ppt = HSLFTestDataSamples.getSlideShow("54111.ppt");
assertTrue("No Exceptions while reading file", true);
List<HSLFSlide> slides = ppt.getSlides();
assertEquals(1, slides.size());
checkSlide(slides.get(0));
ppt.close();
}
private void checkSlide(final HSLFSlide s) {
List<List<HSLFTextParagraph>> textRuns = s.getTextParagraphs();
assertEquals(2, textRuns.size());
HSLFTextRun textRun = textRuns.get(0).get(0).getTextRuns().get(0);
assertEquals("Table sample", textRun.getRawText().trim());
assertEquals(1, textRuns.get(0).get(0).getTextRuns().size());
assertFalse(textRun.getTextParagraph().isBullet());
assertEquals("Dummy text", HSLFTextParagraph.getRawText(textRuns.get(1)));
List<HSLFShape> shapes = s.getShapes();
assertNotNull(shapes);
assertEquals(3, shapes.size());
assertTrue(shapes.get(2) instanceof HSLFTable);
final HSLFTable table = (HSLFTable) shapes.get(2);
assertEquals(4, table.getNumberOfColumns());
assertEquals(6, table.getNumberOfRows());
for (int x = 0; x < 4; x ++) {
assertEquals("TH Cell " + (x + 1), HSLFTextParagraph.getRawText(table.getCell(0, x).getTextParagraphs()));
for (int y = 1; y < 6; y++) {
assertEquals("Row " + y + ", Cell " + (x + 1), table.getCell(y, x).getText());
}
}
}
@Test
public void testAddText() throws IOException {
HSLFSlideShow ppt1 = new HSLFSlideShow();
HSLFSlide slide = ppt1.createSlide();
HSLFTable tab = slide.createTable(4, 5);
int rows = tab.getNumberOfRows();
int cols = tab.getNumberOfColumns();
for (int row=0; row<rows; row++) {
for (int col=0; col<cols; col++) {
HSLFTableCell c = tab.getCell(row, col);
c.setText("r"+(row+1)+"c"+(col+1));
}
}
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ppt1.write(bos);
ppt1.close();
ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
HSLFSlideShow ppt2 = new HSLFSlideShow(bis);
slide = ppt2.getSlides().get(0);
tab = (HSLFTable)slide.getShapes().get(0);
rows = tab.getNumberOfRows();
cols = tab.getNumberOfColumns();
for (int row=0; row<rows; row++) {
for (int col=0; col<cols; col++) {
HSLFTableCell c = tab.getCell(row, col);
c.setText(c.getText()+"...");
}
}
bos.reset();
ppt2.write(bos);
ppt2.close();
bis = new ByteArrayInputStream(bos.toByteArray());
HSLFSlideShow ppt3 = new HSLFSlideShow(bis);
slide = ppt3.getSlides().get(0);
tab = (HSLFTable)slide.getShapes().get(0);
rows = tab.getNumberOfRows();
cols = tab.getNumberOfColumns();
for (int row=0; row<rows; row++) {
for (int col=0; col<cols; col++) {
HSLFTableCell c = tab.getCell(row, col);
assertEquals("r"+(row+1)+"c"+(col+1)+"...", c.getText());
}
}
ppt3.close();
}
}
| apache-2.0 |
quanganh2627/android_device_asus_a500cg-1 | MmgrClient/src/com/intel/internal/telephony/mmgr/requests/MmgrModemRestartRequest.java | 1089 | /* Android Modem Status Client API
*
* Copyright (C) Intel 2012
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.intel.internal.telephony.mmgr.requests;
import com.intel.internal.telephony.mmgr.MedfieldMmgrClient;
public class MmgrModemRestartRequest extends MmgrBaseRequest {
public MmgrModemRestartRequest() {
super(MedfieldMmgrClient.REQUEST_MODEM_RESTART);
}
@Override
public String getName() {
return "ModemRestartRequest";
}
@Override
protected byte[] getPayload() {
return new byte[0];
}
}
| apache-2.0 |
welterde/ewok | com/planet_ink/coffee_mud/MOBS/Minotaur.java | 2827 | package com.planet_ink.coffee_mud.MOBS;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2000-2010 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Minotaur extends StdMOB
{
public String ID(){return "Minotaur";}
public Minotaur()
{
super();
Random randomizer = new Random(System.currentTimeMillis());
Username="a minotaur";
setDescription("A tall humanoid with the head of a bull, and the body of a very muscular man. It\\`s covered in red fur.");
setDisplayText("A minotaur glares at you.");
CMLib.factions().setAlignment(this,Faction.ALIGN_EVIL);
setMoney(0);
baseEnvStats.setWeight(350 + Math.abs(randomizer.nextInt() % 55));
baseCharStats().setStat(CharStats.STAT_INTELLIGENCE,4 + Math.abs(randomizer.nextInt() % 5));
baseCharStats().setStat(CharStats.STAT_STRENGTH,18);
baseCharStats().setStat(CharStats.STAT_DEXTERITY,15);
baseCharStats().setMyRace(CMClass.getRace("Minotaur"));
baseCharStats().getMyRace().startRacing(this,false);
Weapon mainWeapon=CMClass.getWeapon("BattleAxe");
if(mainWeapon!=null)
{
mainWeapon.wearAt(Wearable.WORN_WIELD);
this.addInventory(mainWeapon);
}
baseEnvStats().setDamage(12);
baseEnvStats().setSpeed(2.0);
baseEnvStats().setAbility(0);
baseEnvStats().setLevel(6);
baseEnvStats().setArmor(60);
baseState.setHitPoints(CMLib.dice().roll(baseEnvStats().level(),20,baseEnvStats().level()));
addBehavior(CMClass.getBehavior("Aggressive"));
recoverMaxState();
resetToMaxState();
recoverEnvStats();
recoverCharStats();
}
}
| apache-2.0 |
datancoffee/sirocco | src/main/java/sirocco/indexer/EnglishIndexer.java | 105063 | /*******************************************************************************
* Copyright 2008 and onwards Sergei Sokolenko, Alexey Shevchuk,
* Sergey Shevchook, and Roman Khnykin.
*
* This product includes software developed at
* Cuesense 2008-2011 (http://www.cuesense.com/).
*
* This product includes software developed by
* Sergei Sokolenko (@datancoffee) 2008-2017.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Author(s):
* Sergei Sokolenko (@datancoffee)
*******************************************************************************/
package sirocco.indexer;
import CS2JNet.JavaSupport.Collections.Generic.LCC.CollectionSupport;
import CS2JNet.JavaSupport.language.RefSupport;
import CS2JNet.System.Collections.LCC.CSList;
import CS2JNet.System.DoubleSupport;
import CS2JNet.System.StringSupport;
import net.sf.extjwnl.dictionary.morph.Util;
import java.io.InputStream;
import java.lang.reflect.Array;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map.Entry;
import opennlp.tools.util.Span;
import sirocco.config.ConfigurationManager;
import sirocco.indexer.EnglishTokenizer;
import sirocco.indexer.FloatVector;
import sirocco.indexer.IndexerLabel;
import sirocco.indexer.LanguageSpecificIndexer;
import sirocco.indexer.NounChunkType;
import sirocco.indexer.IndexingConsts.IndexingType;
import sirocco.indexer.IndexingConsts.ParseDepth;
import sirocco.indexer.dictionaries.GenericDictionary;
import sirocco.indexer.dictionaries.en.EnglishDictionaries;
import sirocco.indexer.util.LangUtils;
import sirocco.model.ContentIndex;
import sirocco.model.EntityLabelledSpan;
import sirocco.model.EntityScoreComparer;
import sirocco.model.EntityStats;
import sirocco.model.IdiomOccurrence;
import sirocco.model.LabelledPositionsV2;
import sirocco.model.LabelledSentence;
import sirocco.model.LabelledSpan;
import sirocco.model.LabelledText;
import sirocco.model.LabelledTextRelevanceComparer;
import sirocco.model.ParagraphIndex;
import sirocco.model.SentenceFlags;
import sirocco.model.SpanFlags;
import sirocco.model.TextReference;
import sirocco.model.TextStats;
import sirocco.model.TextTag;
import opennlp.tools.chunker.Chunker;
import opennlp.tools.chunker.ChunkerME;
import opennlp.tools.chunker.ChunkerModel;
import opennlp.tools.parser.AbstractBottomUpParser;
import opennlp.tools.parser.Parse;
import opennlp.tools.parser.Parser;
import opennlp.tools.parser.ParserFactory;
import opennlp.tools.parser.ParserModel;
import opennlp.tools.postag.POSModel;
import opennlp.tools.postag.POSTagger;
import opennlp.tools.postag.POSTaggerME;
import opennlp.tools.postag.TagDictionary;
import opennlp.tools.sentdetect.SentenceDetectorME;
import opennlp.tools.sentdetect.SentenceModel;
import opennlp.tools.tokenize.Tokenizer;
public class EnglishIndexer extends LanguageSpecificIndexer
{
private String mModelPath;
private int mBeamSize;
private EnglishDictionaries mDicts;
private SentenceDetectorME mSentenceDetector;
/**
* Old class: OpenNLP.Tools.Tokenize.EnglishMaximumEntropyTokenizer
*/
private Tokenizer mTokenizer;
private POSTagger mPosTagger;
private TagDictionary mTagDictionary;
private Chunker mChunker;
private Parser mParser;
//private OpenNLP.Tools.NameFind.EnglishNameFinder mNameFinder;
//private OpenNLP.Tools.Lang.English.TreebankLinker mCoreferenceFinder;
public EnglishIndexer() throws Exception {
mDicts = new EnglishDictionaries();
mBeamSize = ConfigurationManager.getConfiguration().getInt("BeamSize");
InputStream modelStream = null;
modelStream = getClass().getResourceAsStream("/opennlp15model-sa/en-sent.bin");
SentenceModel model = new SentenceModel(modelStream);
mSentenceDetector = new SentenceDetectorME(model);
modelStream.close();
modelStream = getClass().getResourceAsStream("/opennlp15model-sa/en-token.bin");
mTokenizer = new EnglishTokenizer(modelStream, mDicts);
modelStream.close();
// The parser model is about 15x the size of chunking model.
// Keep this in mind when using Deep Parsing.
modelStream = getClass().getResourceAsStream("/opennlp15model-sa/en-pos-maxent.bin");
//POSModel posModel = POSTaggerUtils.createPOSModel(modelStream);
POSModel posModel = new POSModel(modelStream);
mTagDictionary = posModel.getTagDictionary();
mPosTagger = new POSTaggerME(posModel);
modelStream.close();
modelStream = getClass().getResourceAsStream("/opennlp15model-sa/en-chunker.bin");
ChunkerModel chunkerModel = new ChunkerModel(modelStream);
mChunker = new ChunkerME(chunkerModel);
modelStream.close();
modelStream = getClass().getResourceAsStream("/opennlp15model-sa/en-parser-chunking.bin");
ParserModel parserModel = new ParserModel(modelStream);
mParser = ParserFactory.create(parserModel);
modelStream.close();
}
private static CSList<String> tlADJPParentTypes = new CSList<String>(new String[]{ "NP", "S", "SBAR", "TOP" });
private static CSList<String> tlEmotionVerbsParentTypes = new CSList<String>(new String[]{ "VP", "S", "SBAR", "TOP" });
private static CSList<String> tlGoodObjectTypesWithNP = new CSList<String>(new String[]{ "NP", "NN", "NNS", "NNP", "NNPS", "JJ", "JJR", "JJS", "CD" });
private static CSList<String> tlParentsOfGoodObjects = new CSList<String>(new String[]{ "NP" });
private static CSList<String> tlGoodObjectTypes = new CSList<String>(new String[]{ "NN", "NNS", "NNP", "NNPS", "JJ", "JJR", "JJS", "CD" });
private static CSList<String> tlNotAGoodSoleObject = new CSList<String>(new String[]{ "CD" });
private static CSList<String> tlPhraseBreakerTypes = new CSList<String>(new String[]{ "DT", "WDT", "CC", ",", ":" });
// IN will be ignored in breaking, but will become part of label, and excluded from key
private static CSList<String> tlGoodNPConnectingPrepositions = new CSList<String>(new String[]{ "of/IN", "for/IN" });
private static CSList<String> tlGoodProperNounChunkers = new CSList<String>(new String[]{ "and/CC", "of/IN", "for/IN" });
private static CSList<String> tlSalutations = new CSList<String>(new String[]{ "Dear" });
private static CSList<String> tlSubChunkBreakerTypes = new CSList<String>(new String[]{ "JJ", "JJR", "JJS", "CD", "POS", "IN" });
private static CSList<String> tlManualNPFixTypes = new CSList<String>(new String[]{ "NN", "NNS", "JJ", "JJR", "JJS", "NNP", "NNPS" });
private static CSList<String> tlPOSToSkipWhenFlattening = new CSList<String>(new String[]{ "DT", "POS", "CC" });
private static CSList<String> tlADJP = new CSList<String>(new String[]{ "ADJP" });
private static CSList<String> tlCardinal = new CSList<String>(new String[]{ "CD" });
private static CSList<String> tlAllNouns = new CSList<String>(new String[]{ "NN", "NNS", "NNP", "NNPS" });
private static CSList<String> tlRegularNouns = new CSList<String>(new String[]{ "NN", "NNS" });
private static CSList<String> tlProperNouns = new CSList<String>(new String[]{ "NNP", "NNPS" });
private static CSList<String> tlModalVerbs = new CSList<String>(new String[]{ "MD" });
private static CSList<String> tlVerbs = new CSList<String>(new String[]{ "VB", "VBD", "VBG", "VBN", "VBP", "VBZ" });
private static CSList<String> tlAdverbs = new CSList<String>(new String[]{ "RB", "RBR", "RBS" });
private static CSList<String> tlAdjectives = new CSList<String>(new String[]{ "JJ", "JJR", "JJS" });
private static CSList<String> tlAdjectivesAndVBN = new CSList<String>(new String[]{ "JJ", "JJR", "JJS", "VBN" });
private static CSList<String> tlAdjectivesJJR_JJS = new CSList<String>(new String[]{ "JJR", "JJS" });
private static CSList<String> tlDeepAccumulationBreakParents = new CSList<String>(new String[]{ "VP", "NP", "SINV", "S", "SBAR", "INTJ", "FRAG" });
private static CSList<String> tlShallowAccumulationBreakParents = new CSList<String>(new String[]{ "VP", "NP", "S", "ADVP", "ADJP" });
private static CSList<String> tlOrphographyTypes = new CSList<String>(new String[]{ ",", ".", ":" });
private static CSList<String> tlQuestionSentenceTop = new CSList<String>(new String[]{ "SQ", "SBARQ" });
private static CSList<String> tlSentenceTop = new CSList<String>(new String[]{ "S", "SBAR", "SINV" });
private static CSList<String> tlOrdinalSuffixes = new CSList<String>(new String[]{ "ST", "ND", "RD", "TH" });
private static CSList<String> tlNgramBreakerTypes = new CSList<String>(new String[]{ ".", ",", ":" });
// Chunker chunk tag prefixes (B- and I-) are different from Parser chunk tag prefixes (S- and C-)
private static String CHUNKER_CHUNK_START = "B-";
private static String CHUNKER_CHUNK_CONT = "I-";
private static String CHUNKER_CHUNK_OTHER = "O";
public static int HighValueMinimumLength = 9;
public void index(ContentIndex contentindex) throws Exception {
contentindex.ActionTimestamps.put("Index:start", Calendar.getInstance().getTime());
split(contentindex);
chunk_or_parse(contentindex);
if (contentindex.IndexingType == IndexingType.NGRAMSTATS) {
contentindex.TopTags = new TextTag[0];
contentindex.SelectedSentiments = new CSList<LabelledText>();
} else {
findIdioms(contentindex);
getSentiment(contentindex);
findGoodEntities(contentindex);
buildEntitySentimentContext(contentindex);
calculateEntityScore(contentindex);
selectTopTags(contentindex);
buildLabelledSentences(contentindex);
chunkLabelledSentences(contentindex);
selectSentiments(contentindex);
}
contentindex.IsIndexingSuccessful = true;
contentindex.ActionTimestamps.put("Index:stop", Calendar.getInstance().getTime());
}
public Boolean isGoodAsTopic(String tag) throws Exception {
String[] tokens = tokenizeSentence(tag);
String[] postags = posTagTokens(tokens);
CSList<Parse> phrase = new CSList<Parse>();
for (int idx = 0;idx < tokens.length;idx++)
{
Parse parse = new Parse(tokens[idx], new Span(0,tokens[idx].length() - 1), postags[idx], 1.0, 1);
phrase.add(parse);
}
Boolean goodAsTopic;
Boolean goodAsTag;
RefSupport<Boolean> refVar0 = new RefSupport<Boolean>();
RefSupport<Boolean> refVar1 = new RefSupport<Boolean>();
isHighValueObject(phrase, refVar0, refVar1);
goodAsTag = refVar0.getValue();
goodAsTopic = refVar1.getValue();
return goodAsTopic;
}
public void split(ContentIndex contentindex) throws Exception {
for (int i = 0;i < contentindex.ParagraphIndexes.length;i++)
{
contentindex.ParagraphIndexes[i].OriginalSentences = splitIntoSentences(contentindex.ParagraphIndexes[i].OriginalText);
contentindex.ParagraphIndexes[i].SentenceCount = contentindex.ParagraphIndexes[i].OriginalSentences.length;
}
}
/*
* sso 7/5/2018: old version, before Deep Parsing introduction
public void chunk(ContentIndex contentindex) throws Exception {
contentindex.ActionTimestamps.put("Chunk:start", Calendar.getInstance().getTime());
contentindex.ContentParseDepth = IndexingConsts.ParseDepth.SHALLOW;
for (int i = 0;i < contentindex.ParagraphIndexes.length;i++)
{
ParagraphIndex pindex = contentindex.ParagraphIndexes[i];
pindex.SentenceParses = new Parse[pindex.SentenceCount];
pindex.SentenceFlags = new SentenceFlags[pindex.SentenceCount];
pindex.ParagraphStats = new TextStats();
for (int j = 0;j < pindex.SentenceCount;j++)
{
pindex.SentenceFlags[j] = new SentenceFlags();
//replace chars that NLP can;t understand
String normalizedSentence = normalizeSentence(pindex.OriginalSentences[j]);
String[] tokens = tokenizeSentence(normalizedSentence);
// calculate caps and number of entities stats
pindex.SentenceFlags[j].SentenceStats.calculateSentenceStats(tokens);
pindex.ParagraphStats.addStats(pindex.SentenceFlags[j].SentenceStats);
pindex.SentenceFlags[j].ParagraphStats = pindex.ParagraphStats;
// lower case some tokens to make them work with NLP
String sentence = null;
Span[] spans = null;
String[] fixedtokens = null;
RefSupport<Span[]> refVar2 = new RefSupport<Span[]>();
RefSupport<String[]> refVar3 = new RefSupport<String[]>();
RefSupport<String> refVar4 = new RefSupport<String>();
fixTokens(tokens,pindex.SentenceFlags[j],refVar2,refVar3,refVar4);
spans = refVar2.getValue();
fixedtokens = refVar3.getValue();
sentence = refVar4.getValue();
// determine parts of speech
String[] tags = posTagTokens(fixedtokens);
fixTags(tokens,tags);
// chunk words into groups
String[] chunks = chunkSentence(fixedtokens,tags);
// fix some combinations
fixChunks(fixedtokens,tags,chunks);
pindex.SentenceParses[j] = createParseFromChunks(sentence,fixedtokens,spans,tags,chunks);
}
}
contentindex.ActionTimestamps.put("Chunk:stop", Calendar.getInstance().getTime());
}
*/
/**
* Methods for Deep Parsing or Shallow Parsing (Chunking)
* Be aware that Shallow Parsing (the chunk method) has a much better performance with acceptable quality loss when used against Social Media.
* For good English, Deep Parsing has much better quality
*
* @param contentindex
* @throws Exception
*/
public void chunk_or_parse(ContentIndex contentindex) throws Exception {
contentindex.ActionTimestamps.put("Chunk/Parse:start", Calendar.getInstance().getTime());
for (int i = 0;i < contentindex.ParagraphIndexes.length;i++)
{
ParagraphIndex pindex = contentindex.ParagraphIndexes[i];
pindex.SentenceParses = new Parse[pindex.SentenceCount];
pindex.SentenceFlags = new SentenceFlags[pindex.SentenceCount];
pindex.ParagraphStats = new TextStats();
for (int j = 0;j < pindex.SentenceCount;j++)
{
pindex.SentenceFlags[j] = new SentenceFlags();
//replace chars that NLP can;t understand
String normalizedSentence = normalizeSentence(pindex.OriginalSentences[j]);
String[] tokens = tokenizeSentence(normalizedSentence);
// calculate caps and number of entities stats
pindex.SentenceFlags[j].SentenceStats.calculateSentenceStats(tokens);
pindex.ParagraphStats.addStats(pindex.SentenceFlags[j].SentenceStats);
pindex.SentenceFlags[j].ParagraphStats = pindex.ParagraphStats;
// lower case some tokens to make them work with NLP
RefSupport<Span[]> outSpans = new RefSupport<Span[]>();
RefSupport<String[]> outFixedtokens = new RefSupport<String[]>();
RefSupport<String> outSentence = new RefSupport<String>();
fixTokens(tokens,pindex.SentenceFlags[j],outSpans,outFixedtokens,outSentence);
Span[] spans = outSpans.getValue();
String[] fixedtokens = outFixedtokens.getValue();
String sentence = outSentence.getValue();
if (contentindex.IndexingType == IndexingType.NGRAMSTATS) {
// determine parts of speech
String[] tags = posTagTokens(fixedtokens);
fixTags(tokens,tags);
Parse[] posnodes = createParsesFromTokensAndTags(sentence,fixedtokens,spans,tags);
RefSupport<HashMap<Integer,Integer>> outStartmarkers = new RefSupport<HashMap<Integer,Integer>>();
String posedSentence = buildPosSentence(posnodes, outStartmarkers);
HashMap<Integer,Integer> startmarkers = outStartmarkers.getValue();
CSList<String> mentions = generateNgramMentions(posedSentence,startmarkers, contentindex.NgramMaxN, contentindex.NgramBreakAtPunctuation);
contentindex.addNgramMentions(mentions);
} else {
if (contentindex.ContentParseDepth == IndexingConsts.ParseDepth.SHALLOW) {
// determine parts of speech
String[] tags = posTagTokens(fixedtokens);
fixTags(tokens,tags);
// chunk words into groups
String[] chunks = chunkSentence(fixedtokens,tags);
// fix some combinations
fixChunks(fixedtokens,tags,chunks);
pindex.SentenceParses[j] = createParseFromChunks(sentence,fixedtokens,spans,tags,chunks);
}
else if (contentindex.ContentParseDepth == IndexingConsts.ParseDepth.DEEP)
pindex.SentenceParses[j] = parseSentence(sentence,spans);
}
}
}
contentindex.ActionTimestamps.put("Chunk/Parse:stop", Calendar.getInstance().getTime());
}
/*
public void parse(ContentIndex contentindex) throws Exception {
contentindex.ActionTimestamps.put("Parse:start", Calendar.getInstance().getTime());
// TODO: need the next line?
contentindex.ContentParseDepth = IndexingConsts.ParseDepth.DEEP;
for (int i = 0;i < contentindex.ParagraphIndexes.length;i++)
{
ParagraphIndex pindex = contentindex.ParagraphIndexes[i];
pindex.SentenceParses = new Parse[pindex.SentenceCount];
pindex.SentenceFlags = new SentenceFlags[pindex.SentenceCount];
pindex.ParagraphStats = new TextStats();
for (int j = 0;j < pindex.SentenceCount;j++)
{
String normalizedSentence = normalizeSentence(pindex.OriginalSentences[j]);
pindex.SentenceParses[j] = parseSentence(normalizedSentence);
}
}
contentindex.ActionTimestamps.put("Parse:stop", Calendar.getInstance().getTime());
}
*/
public void findIdioms(ContentIndex contentindex) throws Exception {
for (int i = 0;i < contentindex.ParagraphIndexes.length;i++)
{
ParagraphIndex pindex = contentindex.ParagraphIndexes[i];
for (int j = 0;j < pindex.SentenceCount;j++)
{
findIdiomsInSentence(pindex.SentenceParses[j],pindex.SentenceFlags[j]);
}
}
}
public void getSentiment(ContentIndex contentindex) throws Exception {
for (int i = 0;i < contentindex.ParagraphIndexes.length;i++)
{
ParagraphIndex pindex = contentindex.ParagraphIndexes[i];
pindex.SentenceSentiments = new FloatVector[pindex.SentenceCount];
pindex.IndexedSentences = new String[pindex.SentenceCount];
pindex.SpanMap = (HashMap<String,Span>[]) new HashMap[pindex.SentenceCount];
for (int j = 0;j < pindex.SentenceCount;j++)
{
RefSupport<FloatVector> outSentimentVector = new RefSupport<FloatVector>();
getSentimentVector(pindex.SentenceParses[j],
pindex.SentenceFlags[j],contentindex.ContentParseDepth,outSentimentVector);
pindex.SentenceSentiments[j] = outSentimentVector.getValue();
// Make a nicer sentence (remove space between 's etc)
RefSupport<String> outIndexedSentence = new RefSupport<String>();
RefSupport<HashMap<String,Span>> outSpanMap = new RefSupport<HashMap<String,Span>>();
makeIndexedSentence(pindex.SentenceParses[j],
pindex.SentenceFlags[j],pindex.SentenceSentiments[j],outIndexedSentence,outSpanMap);
pindex.IndexedSentences[j] = outIndexedSentence.getValue();
pindex.SpanMap[j] = outSpanMap.getValue();
}
}
}
private void findGoodEntities(ContentIndex contentindex) throws Exception {
for (int i = 0;i < contentindex.ParagraphIndexes.length;i++)
{
ParagraphIndex pindex = contentindex.ParagraphIndexes[i];
for (int j = 0;j < pindex.SentenceCount;j++)
{
Parse parse = pindex.SentenceParses[j];
CSList<CSList<Parse>> goodEntitiesInParse = new CSList<CSList<Parse>>();
findGoodEntitiesInParses(new CSList<Parse>(parse.getChildren()),pindex.SentenceFlags[j],parse.getType(),goodEntitiesInParse);
for (CSList<Parse> phrase : goodEntitiesInParse)
{
String phrasestring = goodEntityToKeyString(phrase);
Boolean goodAsTag;
Boolean goodAsTopic;
RefSupport<Boolean> outGoodAsTag = new RefSupport<Boolean>();
RefSupport<Boolean> outGoodAsTopic = new RefSupport<Boolean>();
isHighValueObject(phrase, outGoodAsTag, outGoodAsTopic);
goodAsTag = outGoodAsTag.getValue();
goodAsTopic = outGoodAsTopic.getValue();
int newStart = pindex.SpanMap[j].get(LangUtils.spanKey(phrase.get(0).getSpan())).getStart();
int newEnd = pindex.SpanMap[j].get(LangUtils.spanKey(phrase.get(phrase.size() - 1).getSpan())).getEnd();
// add the common parent of parses so that we can find sentiments in DEEP parsing
Parse parent = findCommonParentOfType(phrase,tlParentsOfGoodObjects);
TextReference tref = new TextReference(i,j,new Span(newStart,newEnd),parent);
contentindex.addEntityReference(phrasestring, tref, goodAsTopic, goodAsTag);
}
for (Entry<String,SpanFlags> kvp : pindex.SentenceFlags[j].SpanFlags.entrySet())
{
// add hashtags
if (kvp.getValue().IsHashtag)
{
String spanKey = kvp.getKey();
int newStart = pindex.SpanMap[j].get(spanKey).getStart();
int newEnd = pindex.SpanMap[j].get(spanKey).getEnd();
TextReference tref = new TextReference(i,j,new Span(newStart,newEnd),null);
String intensitytoken = parse.getText().substring(newStart, newEnd);
String originaltext = FloatVector.getDimensionValueFromIntensityToken(intensitytoken,FloatVector.OriginalTextDimension);
contentindex.addEntityReference(originaltext,tref,null,true); // a Hashtag is a good tag, but an unknown quality topic
}
}
}
}
}
private void buildEntitySentimentContext(ContentIndex contentindex) throws Exception {
if (contentindex.ContentParseDepth == IndexingConsts.ParseDepth.DEEP) {
for (Entry<String,EntityStats> kvp : contentindex.ContentEntityStats.entrySet())
{
for (TextReference tref : kvp.getValue().References)
{
String parsenkey = ContentIndex.parSenKey(tref.ParagraphNum,tref.SentenceNum);
//if (!processedSentences.contains(parsenkey))
if (true)
{
FloatVector sentencesentiment = contentindex.ParagraphIndexes[tref.ParagraphNum].SentenceSentiments[tref.SentenceNum];
FloatVector entitysentiment = null;
if (tref.Parent != null)
{
// TODO: the tref.Parent is the most immediate NP
// Go one level up to the next NP
entitysentiment = new FloatVector();
entitysentiment.accumulate(sentencesentiment,/*spans*/null,true,/*scopeSpan*/tref.Parent.getSpan());
} else
entitysentiment = sentencesentiment;
kvp.getValue().addRelatedSentiment(tref.ParagraphNum,tref.SentenceNum,entitysentiment);
/*addDerivationSteps*/
// processedSentences.add(parsenkey);
}
}
// processedSentences.clear();
}
} else {
CSList<String> processedSentences = new CSList<String>();
for (Entry<String,EntityStats> kvp : contentindex.ContentEntityStats.entrySet())
{
for (TextReference tref : kvp.getValue().References)
{
String parsenkey = ContentIndex.parSenKey(tref.ParagraphNum,tref.SentenceNum);
if (!processedSentences.contains(parsenkey))
{
FloatVector sentencesentiment = contentindex.ParagraphIndexes[tref.ParagraphNum].SentenceSentiments[tref.SentenceNum];
kvp.getValue().addRelatedSentiment(tref.ParagraphNum,tref.SentenceNum,sentencesentiment);
/*addDerivationSteps*/
processedSentences.add(parsenkey);
}
}
processedSentences.clear();
}
}
}
private void calculateEntityScore(ContentIndex contentindex) throws Exception {
for (Entry<String,EntityStats> kvp : contentindex.ContentEntityStats.entrySet())
{
int numref = kvp.getValue().References.size();
float sentscore = kvp.getValue().getAggregateSentiment().sumAllIntensities();
int length = kvp.getValue().Entity.length();
kvp.getValue().Score = 1.0F;
// 1 reference yields factor of 1, 2 references - 1.7, 3 - 2.1
kvp.getValue().Score *= (float)(1 + Math.log(numref));
// attention: we add 1 to original score
// 5 sentiment score - factor 1.4, 50 - 2.8, 250 - 4.3
if (sentscore != FloatVector.DefaultValue)
kvp.getValue().Score *= (float)(1 + Math.log(1 + sentscore / FloatVector.InitialValue));
// length of 3 - 0.5, 5 - 0.75, 9 - 1, 15 - 1.5, 25 - 2.0
if (length >= HighValueMinimumLength)
kvp.getValue().Score *= (float)(1 + Math.log((float)length / (float)HighValueMinimumLength));
else
kvp.getValue().Score *= (float)(1 + Math.log10((float)length / (float)HighValueMinimumLength));
}
}
/**
* Selects top N tags from ContentEntityStats, ignoring already included tags
*/
private void selectTopTags(ContentIndex contentindex) throws Exception {
// TODO 7/8/18: If textencoding , then select all tags
// if (contentindex.IndexingType == IndexingConsts.IndexingType.TEXTENCODING) {
EntityStats[] statsArray = contentindex.ContentEntityStats.values().toArray(new EntityStats[contentindex.ContentEntityStats.values().size()]);
CSList<EntityStats> sorted = new CSList<EntityStats>(statsArray);
Collections.sort(sorted, new EntityScoreComparer());
contentindex.SortedEntityStats = sorted;
int tagsnum = (sorted.size() < ContentIndex.MaxTopTags) ? sorted.size() : ContentIndex.MaxTopTags;
CSList<TextTag> toplist = new CSList<TextTag>();
int i = 0;
while ((i < tagsnum) && (i < sorted.size()))
{
int idx = sorted.size() - i - 1;
// Don't include tags that are contained as substrings in tags that were already included
// Example: exclude "fox" if "red fox" is already included
boolean isAlreadyIncluded = false;
for (TextTag tag : toplist)
{
if (tag.getWord().contains(sorted.get(idx).Entity))
{
isAlreadyIncluded = true;
break;
}
}
if (!isAlreadyIncluded)
toplist.add(new TextTag(sorted.get(idx).Entity,sorted.get(idx).Score,sorted.get(idx).GoodAsTopic));
i++;
}
contentindex.TopTags = toplist.toArray(new TextTag[toplist.size()]);
}
private void buildLabelledSentences(ContentIndex contentindex) throws Exception {
// build a map [par,sent] -> Entity References
contentindex.LabelledSentences = new HashMap<String,LabelledSentence>();
for (int i = 0;i < contentindex.TopTags.length;i++)
{
String entity = contentindex.TopTags[i].getWord();
EntityStats estats = contentindex.ContentEntityStats.get(entity);
for (TextReference tref : estats.References)
{
String parsenkey = ContentIndex.parSenKey(tref.ParagraphNum,tref.SentenceNum);
LabelledSentence lsentence = contentindex.LabelledSentences.get(parsenkey);
if (lsentence == null)
{
lsentence = new LabelledSentence();
contentindex.LabelledSentences.put(parsenkey, lsentence);
}
EntityLabelledSpan lspan = new EntityLabelledSpan(tref.Span.getStart(),tref.Span.getEnd(),IndexerLabel.EntityLabel,entity);
lsentence.ParSenKey = parsenkey;
lsentence.LabelledPositions.addEntityLabelledSpan(lspan);
lsentence.addContainedEntity(entity,i);
}
}
for (int parnum = 0;parnum < contentindex.ParagraphIndexes.length;parnum++)
{
for (int sennum = 0;sennum < contentindex.ParagraphIndexes[parnum].SentenceCount;sennum++)
{
// add sentiment to above map
FloatVector sentiment = contentindex.ParagraphIndexes[parnum].SentenceSentiments[sennum];
if ((sentiment == null) || !sentiment.hasIntensities())
continue;
String parsenkey = ContentIndex.parSenKey(parnum,sennum);
LabelledSentence lsentence = contentindex.LabelledSentences.get(parsenkey);
if (lsentence == null)
{
lsentence = new LabelledSentence();
contentindex.LabelledSentences.put(parsenkey, lsentence);
}
lsentence.ParSenKey = parsenkey;
lsentence.LabelledPositions.addLabelledSpans(sentiment.getDerivationSpans());
lsentence.TotalSentimentScore = sentiment.sumAllIntensities();
}
}
}
private void chunkLabelledSentences(ContentIndex contentindex) throws Exception {
// TODO: 7/8/18: Add handling for contentindex.ParseDepth == DEEP
// create 1 chunk per sentence
if (contentindex.ContentType == IndexingConsts.ContentType.UNKNOWN ||
IndexingConsts.lstArticleContentTypes.contains(contentindex.ContentType))
chunkByCompatibility(contentindex);
else
chunkInOne(contentindex);
}
private void chunkInOne(ContentIndex contentindex) throws Exception {
LabelledText singleChunk = new LabelledText();
for (int i = 0;i < contentindex.ParagraphIndexes.length;i++)
{
ParagraphIndex pindex = contentindex.ParagraphIndexes[i];
for (int j = 0;j < pindex.SentenceCount;j++)
{
String parsenkey = ContentIndex.parSenKey(i,j);
LabelledSentence lsentence = contentindex.LabelledSentences.get(parsenkey);
if (lsentence == null)
{
// this must be a sentence without entities or sentiment. still, add it to the big chunk
lsentence = new LabelledSentence();
lsentence.ParSenKey = parsenkey;
}
FloatVector sentiment = contentindex.ParagraphIndexes[i].SentenceSentiments[j];
singleChunk.addSentence(lsentence,sentiment);
}
}
singleChunk.AggregateSentimentScore = singleChunk.AggregateSentiment.sumAllIntensities();
contentindex.ChunkedSentences = new CSList<LabelledText>(new LabelledText[]{ singleChunk });
}
private void chunkByCompatibility(ContentIndex contentindex) throws Exception {
CSList<LabelledText> chunks = new CSList<LabelledText>();
CSList<String> keyssorted = new CSList<String>(CollectionSupport.mk(contentindex.LabelledSentences.keySet()));
Collections.sort(keyssorted);
int blockparnum = 0;
int prevsennum = 0;
int blockentities = 0;
LabelledText block = new LabelledText();
for (String parsenkey : keyssorted)
{
int curparnum = 0;
int cursennum = 0;
RefSupport<Integer> refVar13 = new RefSupport<Integer>();
RefSupport<Integer> refVar14 = new RefSupport<Integer>();
ContentIndex.splitParSenKey(parsenkey,refVar13,refVar14);
curparnum = refVar13.getValue();
cursennum = refVar14.getValue();
int sententities = contentindex.ParagraphIndexes[curparnum].SentenceFlags[cursennum].SentenceStats.NumAllEntities;
// Check if we need to package accumulated sentences already
// Target number of entities per quote is 30 (~200 characters)
if ((blockparnum != curparnum) || (cursennum - prevsennum >= 2) || (blockentities > 30) || ((blockentities > 15) && (blockentities + sententities > 35)))
{
if (block.ParSenKeys.size() > 0)
{
block.AggregateSentimentScore = block.AggregateSentiment.sumAllIntensities();
chunks.add(block);
block = new LabelledText();
blockparnum = curparnum;
prevsennum = -1;
blockentities = 0;
}
}
LabelledSentence lsentence = contentindex.LabelledSentences.get(parsenkey);
FloatVector sentiment = contentindex.ParagraphIndexes[curparnum].SentenceSentiments[cursennum];
if ((sentiment == null) || !sentiment.hasIntensities())
{
// this must be an entity sentence
block.addSentence(lsentence,null);
blockentities += sententities;
prevsennum = cursennum;
}
else
{
if (block.AggregateSentiment.hasCompatibleValence(sentiment))
{
block.addSentence(lsentence,sentiment);
blockentities += sententities;
prevsennum = cursennum;
}
else
{
if (block.ParSenKeys.size() >= 1)
{
block.AggregateSentimentScore = block.AggregateSentiment.sumAllIntensities();
chunks.add(block);
block = new LabelledText();
blockentities = 0;
}
block.addSentence(lsentence,sentiment);
prevsennum = cursennum;
blockentities += sententities;
}
}
}
if ((block.ParSenKeys.size() >= 1))
{
block.AggregateSentimentScore = block.AggregateSentiment.sumAllIntensities();
chunks.add(block);
}
contentindex.ChunkedSentences = chunks;
}
private void selectSentiments(ContentIndex contentindex) throws Exception {
if (contentindex.IndexingType == IndexingConsts.IndexingType.FULLINDEX) {
// If a full index is desired, just take the ChunkedSentences, which should
// be in the order of the sentences in text
CSList<LabelledText> selected = new CSList<LabelledText>(contentindex.ChunkedSentences);
for (int i = 0;i < selected.size();i++)
{
buildText(contentindex,selected.get(i));
}
contentindex.SelectedSentiments = selected;
} else if (contentindex.IndexingType == IndexingConsts.IndexingType.TOPSENTIMENTS) {
CSList<LabelledText> sorted = new CSList<LabelledText>(contentindex.ChunkedSentences);
Collections.sort(sorted, new LabelledTextRelevanceComparer());
int sentimentnum = (sorted.size() < ContentIndex.MaxTopSentiments) ? sorted.size() : ContentIndex.MaxTopSentiments;
CSList<LabelledText> toplist = new CSList<LabelledText>();
for (int i = 0;i < sentimentnum;i++)
{
int idx = sorted.size() - i - 1;
buildText(contentindex,sorted.get(idx));
toplist.add(sorted.get(idx));
}
contentindex.SelectedSentiments = toplist;
}
// TODO: 7/8/18: when DEEP, select all
}
public void fixTokens(String[] rawtokens, SentenceFlags flags, RefSupport<Span[]> spans, RefSupport<String[]> fixedtokens, RefSupport<String> sentence) throws Exception {
fixedtokens.setValue(null);
sentence.setValue(null);
StringBuilder sb = new StringBuilder();
CSList<String> fixedtokenlist = new CSList<String>();
spans.setValue(new Span[rawtokens.length]);
Integer start = 0;
for (Integer i = 0;i < rawtokens.length;i++)
{
String rawtoken = rawtokens[i];
String nexttoken = (i < rawtokens.length - 1) ? rawtokens[i + 1] : null;
Boolean isAllCaps = false, isFirstCap = false, isQuote = false, isLink = false, isHashtag = false;
RefSupport<Boolean> refVar15 = new RefSupport<Boolean>();
RefSupport<Boolean> refVar16 = new RefSupport<Boolean>();
RefSupport<Boolean> refVar17 = new RefSupport<Boolean>();
RefSupport<Boolean> refVar18 = new RefSupport<Boolean>();
RefSupport<Boolean> refVar19 = new RefSupport<Boolean>();
String fixedtoken = fixToken(rawtoken,i,nexttoken,refVar15,refVar16,refVar17,refVar18,refVar19);
isAllCaps = refVar15.getValue();
isFirstCap = refVar16.getValue();
isQuote = refVar17.getValue();
isLink = refVar18.getValue();
isHashtag = refVar19.getValue();
fixedtokenlist.add(fixedtoken);
sb.append(fixedtoken).append(" ");
spans.getValue()[i] = new Span(start,start + fixedtoken.length());
if (!StringSupport.equals(fixedtoken, rawtoken))
flags.getSpanFlags(spans.getValue()[i]).OriginalText = rawtoken;
if (isLink)
flags.getSpanFlags(spans.getValue()[i]).IsLink = true;
else if (isHashtag)
flags.getSpanFlags(spans.getValue()[i]).IsHashtag = true;
if (isAllCaps)
flags.getSpanFlags(spans.getValue()[i]).IsAllCaps = true;
else if (isFirstCap)
flags.getSpanFlags(spans.getValue()[i]).IsFirstCap = true;
else if (isQuote)
flags.Quotes.add(spans.getValue()[i]);
start += fixedtoken.length() + 1;
}
if (sb.length() == 0)
return ;
fixedtokens.setValue((fixedtokenlist.toArray(new String[fixedtokenlist.size()])));
sentence.setValue(sb.substring(0, (0)+(sb.length() - 1)).toString());
}
public String fixToken(String token, int sentenceposition, String nexttoken, RefSupport<Boolean> isAllCaps, RefSupport<Boolean> isFirstCap, RefSupport<Boolean> isQuote, RefSupport<Boolean> isLink, RefSupport<Boolean> isHashtag) throws Exception {
isAllCaps.setValue(false);
isFirstCap.setValue(false);
isQuote.setValue(false);
isLink.setValue(false);
isHashtag.setValue(false);
if (token.length() == 1)
{
int tokencat = Character.getType(token.charAt(0));
switch (tokencat)
{
case Character.START_PUNCTUATION:
return "-LRB-";
case Character.END_PUNCTUATION:
return "-RRB-";
case Character.INITIAL_QUOTE_PUNCTUATION:
isQuote.setValue(true);
return "\"";
case Character.FINAL_QUOTE_PUNCTUATION:
isQuote.setValue(true);
return "\"";
case Character.DASH_PUNCTUATION:
return "-";
}
if (token.equals("\""))
{
isQuote.setValue(true);
return "\"";
}
}
if (FloatVector.isIntensityToken(token))
{
FloatVector parsevector = new FloatVector();
parsevector.initFromIntensityToken(token);
Float value;
value = parsevector.get(FloatVector.IsHashTagDimension);
if (value!=null)
isHashtag.setValue((value == FloatVector.InitialValue));
else
isHashtag.setValue(false);
value = parsevector.get(FloatVector.IsLinkDimension);
if (value!=null)
isLink.setValue((value == FloatVector.InitialValue));
else
isLink.setValue(false);
return token;
}
// short tokens are a mixed bag. There are 42 lowercase 2-character words,
// and about 10 of them have overlaps in meaning
// AX, ET, DE, ON, OR, US, OH, EN,
// CA/ca(md) - bad, LA/la(dt) - bad, IT/it(PRP) - bad, AM/am(vb) - bad
// if (token.Length <= 2) return token;
// if this exact token has POS tags, return
if (mTagDictionary.getTags(token) != null)
return token;
String lower = token.toLowerCase();
if (mTagDictionary.getTags(lower) == null)
return token;
// if we found tags for the lower-cased version of the word, then
// we better use the lower-case version
if (StringSupport.equals(token, token.toUpperCase()))
{
isAllCaps.setValue(true);
return lower;
}
if ((sentenceposition == 0) && Character.isUpperCase(token.charAt(0)))
{
if ((nexttoken != null) && Character.isUpperCase(nexttoken.charAt(0)))
return token;
else
{
isFirstCap.setValue(true);
return lower;
}
}
return token;
}
public void fixTags(String[] tokens, String[] tags) throws Exception {
int idx = 0;
while (idx < tokens.length)
{
if (tokens[idx].length() == 1)
{
char c = tokens[idx].charAt(0);
if (!Character.isLetterOrDigit(c) && !LangUtils.isCharPunctuation(c))
tags[idx] = "SYM";
}
// correct adjectives that are part of NNP phrases
if (Character.isUpperCase(tokens[idx].charAt(0)))
{
if (tlAdjectives.contains(tags[idx]))
{
// correct first adj
if (idx == 0)
{
if (tlSalutations.contains(tokens[idx]))
tags[idx] = "UH";
else if (tokens.length >= 2 && tlProperNouns.contains(tags[1]))
tags[idx] = "NNP";
}
else if (idx <= tokens.length - 1)
tags[idx] = "NNP";
}
else if (tlRegularNouns.contains(tags[idx]))
{
if (idx > 0)
{
tags[idx] = (StringSupport.equals(tags[idx], "NN")) ? "NNP" : "NNPS";
}
}
}
idx++;
}
}
public void fixChunks(String[] tokens, String[] tags, String[] chunks) throws Exception {
int idx = 0;
while (idx < chunks.length)
{
// fix case when NP and POS are divided into two different chunks
if (chunks[idx].startsWith(CHUNKER_CHUNK_START))
{
// if the chunk is started by a POS, then add it to previous chunk
if ((StringSupport.equals(tags[idx], "POS")) && (idx > 0) && (!StringSupport.equals(chunks[idx - 1], CHUNKER_CHUNK_OTHER)))
{
chunks[idx] = CHUNKER_CHUNK_CONT + chunks[idx - 1].substring(2);
}
else if (StringSupport.equals(chunks[idx].substring(2), "INTJ"))
{
tags[idx] = "UH";
}
else if (StringSupport.equals(chunks[idx].substring(2), "ADVP"))
{
// look ahead for Verbs
if ((idx + 1 < chunks.length) && (tlVerbs.contains(tags[idx + 1])))
{
if (StringSupport.equals(chunks[idx + 1], CHUNKER_CHUNK_OTHER))
{
chunks[idx] = CHUNKER_CHUNK_START + "VP";
chunks[idx + 1] = CHUNKER_CHUNK_CONT + "VP";
}
}
else // look backwards for started verb phrases
if ((idx > 0) && (chunks[idx - 1].length() > 2) && (StringSupport.equals(chunks[idx - 1].substring(2), "VP")))
{
chunks[idx] = CHUNKER_CHUNK_CONT + "VP";
}
else // look ahead for the start of another another ADVP
if ((idx + 1 < chunks.length) && ((StringSupport.equals(chunks[idx + 1], CHUNKER_CHUNK_START + "ADVP"))))
{
chunks[idx + 1] = CHUNKER_CHUNK_CONT + "ADVP";
}
}
}
else if (StringSupport.equals(chunks[idx], CHUNKER_CHUNK_OTHER))
{
String plabel = tokens[idx] + '/' + tags[idx];
if (((StringSupport.equals(tags[idx], "JJ")) || (StringSupport.equals(tags[idx], "VBN"))) && (idx > 0) && (StringSupport.equals(tags[idx - 1], "RB")))
{
if ((StringSupport.equals(chunks[idx - 1], CHUNKER_CHUNK_START + "ADVP")) || (StringSupport.equals(chunks[idx - 1], CHUNKER_CHUNK_OTHER)))
{
chunks[idx - 1] = CHUNKER_CHUNK_START + "ADJP";
chunks[idx] = CHUNKER_CHUNK_CONT + "ADJP";
}
}
else if (tlManualNPFixTypes.contains(tags[idx]))
{
chunks[idx] = CHUNKER_CHUNK_START + "NP";
int idx2 = idx;
boolean phrasebreak = false;
while (!phrasebreak && (idx2 < (chunks.length - 1)))
{
idx2++;
if ((StringSupport.equals(chunks[idx2], CHUNKER_CHUNK_OTHER)) && tlManualNPFixTypes.contains(tags[idx2]))
{
chunks[idx2] = CHUNKER_CHUNK_CONT + "NP";
}
else
phrasebreak = true;
}
if (phrasebreak)
idx = idx2 - 1;
else
idx = idx2;
}
else if (tlVerbs.contains(tags[idx]))
{
chunks[idx] = CHUNKER_CHUNK_START + "VP";
}
else if (tlGoodNPConnectingPrepositions.contains(plabel))
{
if (((idx > 0) && (chunks[idx - 1].length() > 2) && (StringSupport.equals(chunks[idx - 1].substring(2), "NP"))) && ((idx < chunks.length - 1) && (chunks[idx + 1].length() > 2) && (StringSupport.equals(chunks[idx + 1].substring(2), "NP"))) && (tlProperNouns.contains(tags[idx - 1]) && tlProperNouns.contains(tags[idx + 1])))
{
chunks[idx] = CHUNKER_CHUNK_CONT + "NP";
chunks[idx + 1] = CHUNKER_CHUNK_CONT + "NP";
}
}
}
idx++;
}
}
/**
* Creates an array of Parses for processing by buildPosSentence to create Ngram stats for sentences
* @param text
* @param tokens
* @param tokenspans
* @param tags
* @return
* @throws Exception
*/
public Parse[] createParsesFromTokensAndTags(String text, String[] tokens, Span[] tokenspans, String[] tags) throws Exception {
Parse[] parses = new Parse[tokens.length];
for (int currentToken = 0;currentToken < tokens.length;currentToken++)
{
Parse tokenParse = new Parse(text, tokenspans[currentToken], AbstractBottomUpParser.TOK_NODE, 1.0, 1);
Parse posParse = new Parse(text, tokenspans[currentToken], tags[currentToken], 1.0, 1);
posParse.insert(tokenParse);
parses[currentToken] = posParse;
}
return parses;
}
public Parse createParseFromChunks(String text, String[] tokens, Span[] tokenspans, String[] tags, String[] chunks) throws Exception {
Parse topParse = new Parse(text, new Span(0,text.length()), AbstractBottomUpParser.TOP_NODE, 1.0, 1);
Parse sentenceParse = new Parse(text, new Span(0,text.length()), "S", 1.0, 1);
topParse.insert(sentenceParse);
CSList<Parse> chunkParses = new CSList<Parse>();
int start = 0, chunkstart = 0;
String chunktype = null;
for (int currentChunk = 0, chunkCount = chunks.length;currentChunk < chunkCount;currentChunk++)
{
if (currentChunk > 0 && !chunks[currentChunk].startsWith(CHUNKER_CHUNK_CONT) && !chunks[currentChunk - 1].equals(CHUNKER_CHUNK_OTHER))
// this indicates end of a chunk, so create a chunk with accumulated parses
createChunkParse(text,sentenceParse,chunkParses,start,chunkstart,chunktype);
Parse tokenParse = new Parse(text, tokenspans[currentChunk], AbstractBottomUpParser.TOK_NODE, 1.0, 1);
Parse posParse = new Parse(text, tokenspans[currentChunk], tags[currentChunk], 1.0, 1);
posParse.insert(tokenParse);
if (chunks[currentChunk].equals(CHUNKER_CHUNK_OTHER))
{
sentenceParse.insert(posParse);
}
else if (chunks[currentChunk].startsWith(CHUNKER_CHUNK_START))
{
chunktype = chunks[currentChunk].substring(2);
chunkstart = start;
chunkParses.add(posParse);
}
else if (chunks[currentChunk].startsWith(CHUNKER_CHUNK_CONT))
chunkParses.add(posParse);
start += tokens[currentChunk].length() + 1;
}
if ((!StringSupport.equals(chunks[chunks.length - 1], CHUNKER_CHUNK_OTHER)) && chunkParses.size() > 0)
createChunkParse(text,sentenceParse,chunkParses,start,chunkstart,chunktype);
return topParse;
}
private void createChunkParse(String text, Parse rootParse, CSList<Parse> chunkParses, int start, int chunkstart, String chunktype) throws Exception {
Span chunkSpan = new Span(chunkstart,start - 1);
Parse chunkParse = new Parse(text, chunkSpan, chunktype, 1.0, 1);
for (Parse parse : chunkParses)
chunkParse.insert(parse);
chunkParses.clear();
rootParse.insert(chunkParse);
}
public void findIdiomsInSentence(Parse parse, SentenceFlags parseflags) throws Exception {
Parse[] posnodes = parse.getTagNodes();
RefSupport<HashMap<Integer,Integer>> outStartmarkers = new RefSupport<HashMap<Integer,Integer>>();
String posedSentence = buildPosSentence(posnodes, outStartmarkers);
HashMap<Integer,Integer> startmarkers = outStartmarkers.getValue();
int processedToParseIdx = -1;
for (Entry<Integer,Integer> kvp : startmarkers.entrySet())
{
// kvp.Key is position in sentence, and kvp.Value is index in parse list
if (kvp.getValue() <= processedToParseIdx)
continue;
String idiom = null;
String source = null;
int position = kvp.getKey();
RefSupport<String> refVar22 = new RefSupport<String>();
RefSupport<String> refVar23 = new RefSupport<String>();
mDicts.Idioms.KeyFastIndex.findLongestKeywordAtPosition(posedSentence,position,refVar22,refVar23);
idiom = refVar22.getValue();
source = refVar23.getValue();
if (idiom != null)
{
int startidx = kvp.getValue();
int endidx = startmarkers.get(position + idiom.length() + 1) - 1;
CSList<Parse> parselist = new CSList<Parse>();
for (int j = startidx;j <= endidx;j++)
parselist.add(posnodes[j]);
parseflags.addIdiomOccurence(parselist,idiom);
processedToParseIdx = endidx;
}
}
}
public CSList<String> generateNgramMentions(String posSentence, HashMap<Integer,Integer> startmarkers, Integer ngramMaxN, Boolean ngramBreakAtPunctuation) {
CSList<String> mentions = new CSList<String>();
int numStarts = startmarkers.entrySet().size();
int[] startmarkersArray = new int[numStarts];
int i = 0;
for (Entry<Integer,Integer> kvp : startmarkers.entrySet())
{
// kvp.Key is position in sentence, and kvp.Value is index in parse list
startmarkersArray[i] = kvp.getKey();
i++;
}
Arrays.sort(startmarkersArray);
i = 0;
while (i < numStarts) {
if (ngramBreakAtPunctuation) {
// Check if current token is punctuation
if (isNgramBreakerToken(posSentence,startmarkersArray[i])) {
i++;
continue;
}
}
for (int j = 0; (j < ngramMaxN) && (i + j + 1 <= numStarts); j++) {
if (ngramBreakAtPunctuation)
if (isNgramBreakerToken(posSentence,startmarkersArray[i+j]))
break;
String chunk;
if (i + j + 1 == numStarts)
chunk = posSentence.substring(startmarkersArray[i]);
else
chunk = posSentence.substring(startmarkersArray[i],startmarkersArray[i+j+1]-1);
mentions.add(chunk);
}
i++;
}
return mentions;
}
private Boolean isNgramBreakerToken(String posSentence, int tokenStart) {
int slash = posSentence.indexOf('/', tokenStart );
String pos = posSentence.substring(slash+1,slash+2);
return tlNgramBreakerTypes.contains(pos);
}
public String buildPosSentence(Parse[] posnodes, RefSupport<HashMap<Integer,Integer>> refStartmarkers) throws Exception {
String posedSentence = "";
HashMap<Integer,Integer> startmarkers = new HashMap<Integer,Integer>();
for (int i = 0;i < posnodes.length;i++)
{
// position in posedSentence -> index in posnodes
String searchtoken = null;
if (StringSupport.equals(posnodes[i].getType(), "PRP"))
{
searchtoken = "SBJ";
}
else if (StringSupport.equals(posnodes[i].getType(), "PRP$"))
{
searchtoken = "SBJ$";
}
else
{
String lemma = getLemma(posnodes[i]);
String type = posnodes[i].getType();
// sso 7/8/2017: Links caused extra-long loops
if (FloatVector.isIntensityToken(lemma)) {
String posoverride = FloatVector.getDimensionValueFromIntensityToken(lemma,FloatVector.PosOverrideDimension);
String origText = FloatVector.getDimensionValueFromIntensityToken(lemma,FloatVector.OriginalTextDimension);
if (posoverride != null)
type = posoverride;
if (origText != null)
lemma = origText;
}
if (!tlProperNouns.contains(type))
lemma = lemma.toLowerCase();
if (tlVerbs.contains(type) || tlModalVerbs.contains(type))
{
String shortpos = LangUtils.parseTypeToShortPOS(type);
String basetype = LangUtils.baseTypeOfParseType(type);
String bestbaseform = mDicts.BaseForms.bestBaseForm(lemma,shortpos);
searchtoken = bestbaseform + "/" + basetype;
}
else
searchtoken = lemma + "/" + type;
}
if (searchtoken == null)
continue;
if (StringSupport.equals(posedSentence, ""))
{
startmarkers.put(0, i);
posedSentence = searchtoken;
}
else
{
startmarkers.put(posedSentence.length() + 1, i);
posedSentence += " " + searchtoken;
}
}
refStartmarkers.setValue(startmarkers);
return posedSentence;
}
private void buildText(ContentIndex contentindex, LabelledText ltext) throws Exception {
LabelledPositionsV2 textpositions = new LabelledPositionsV2();
for (String parsenkey : ltext.ParSenKeys)
{
LabelledSentence lsentence = contentindex.LabelledSentences.get(parsenkey);
String sentence = contentindex.indexedSentenceByParSenKey(parsenkey);
if (!StringSupport.isNullOrEmpty(ltext.Text))
ltext.Text += " ";
else if (ltext.Text == null)
ltext.Text = "";
int offset = ltext.Text.length();
ltext.Text += sentence;
if (lsentence != null)
textpositions.addPositions(lsentence.LabelledPositions,offset);
}
ltext.LabelledPositions = textpositions;
}
private String goodEntityToKeyString(CSList<Parse> phrase) throws Exception {
String output = "";
String separator = " ";
boolean keepOrigForm = false;
if (StringSupport.equals(phrase.get(0).getType(), "CD"))
keepOrigForm = true;
for (Parse item : phrase)
{
String lemma = item.toString();
String[] tokens = StringSupport.Split(lemma, '-');
if (tokens.length == 2)
{
lemma = getLemmaKey(item.getType(),tokens[1],keepOrigForm);
lemma = tokens[0] + '-' + lemma;
}
else if (tokens.length == 1)
{
lemma = getLemmaKey(item.getType(),tokens[0],keepOrigForm);
}
if (StringSupport.equals(output, ""))
output = lemma;
else
output += separator + lemma;
}
return output;
}
private String getLemmaKey(String parsetype, String lemma, boolean keepOrigForm) throws Exception {
String result = lemma;
if (keepOrigForm)
return result;
// for plural nouns, use singular
if (StringSupport.equals(parsetype, "NNS"))
result = mDicts.BaseForms.bestBaseForm(result,"noun");
// for non-proper nouns and JJs, lower case them
if ((!StringSupport.equals(parsetype, "NNP")) && (!StringSupport.equals(parsetype, "NNPS")))
result = result.toLowerCase();
return result;
}
private void makeIndexedSentence(Parse parsedSentence, SentenceFlags sentenceflags, FloatVector sentenceSentiment, RefSupport<String> indexedSentence, RefSupport<HashMap<String,Span>> spanmap) throws Exception {
indexedSentence.setValue(null);
StringBuilder sb = new StringBuilder();
Parse[] posnodes = parsedSentence.getTagNodes();
String[] newtokens = new String[posnodes.length];
spanmap.setValue(new HashMap<String,Span>());
for (Integer i = 0;i < posnodes.length;i++)
{
String nodetext = posnodes[i].toString();
String originaltext = null;
if (FloatVector.isIntensityToken(nodetext))
originaltext = FloatVector.getDimensionValueFromIntensityToken(nodetext,FloatVector.OriginalTextDimension);
else if ((originaltext = sentenceflags.getOriginalText(posnodes[i].getSpan())) == null)
originaltext = nodetext;
// originaltext will be set in IsBrackets
Boolean addWhitespace = addWhiteSpace(originaltext,posnodes[i].getType());
if ((sb.length() > 0) && addWhitespace)
sb.append(' ');
Integer newSpanStart = sb.length();
sb.append(originaltext);
Span newSpan = new Span(newSpanStart,sb.length() - 1); // note that the new Span ends on the last character, and not on next
spanmap.getValue().put(LangUtils.spanKey(posnodes[i].getSpan()), newSpan);
}
indexedSentence.setValue(sb.toString());
if (sentenceSentiment != null)
sentenceSentiment.remapSpans(spanmap.getValue());
}
private Boolean addWhiteSpace(String lemma, String parsetype) throws Exception {
if (StringSupport.equals(parsetype, "POS"))
return false;
else if (StringSupport.equals(parsetype, "VBZ") && StringSupport.equals(lemma, "'s"))
return false;
else if (StringSupport.equals(parsetype, "RB") && StringSupport.equals(lemma, "n't"))
return false;
else if (StringSupport.equals(parsetype, "VBP") && (StringSupport.equals(lemma, "'ve") || StringSupport.equals(lemma, "'m") || StringSupport.equals(lemma, "'re")))
return false;
else if (StringSupport.equals(parsetype, "MD") && (StringSupport.equals(lemma, "'ll") || StringSupport.equals(lemma, "'d")))
return false;
else if (tlOrphographyTypes.contains(parsetype))
return false;
else
return true;
}
/**
* If we do chunking instead of parsing, then use Shallow Accumulation Breaks
* @param parse
* @param sentenceflags
* @param parsedepth
* @param parsevector
* @throws Exception
*/
private void getSentimentVector(Parse parse, SentenceFlags sentenceflags, ParseDepth parsedepth, RefSupport<FloatVector> parsevector) throws Exception {
parsevector.setValue(null);
if ((parse == null) || (StringSupport.equals(parse.getType(), "INC")) || (StringSupport.equals(parse.getType(), AbstractBottomUpParser.TOK_NODE)))
{
return ;
}
// check for TK: should not happen, as the recursion stops at POS, but be cautious
// check for null: sometimes the parse tree has null children
// check for not complete: depending on beam size, sentence sometimes does not get parsed correctly
Parse curparse = (!StringSupport.equals(parse.getType(), AbstractBottomUpParser.TOP_NODE)) ? parse : parse.getChildren()[0];
RefSupport<FloatVector> outParsevector = new RefSupport<FloatVector>();
if (curparse.isPosTag())
{
getSentimentVectorFromPOSParse(curparse,sentenceflags,outParsevector);
parsevector.setValue(outParsevector.getValue());
}
else
{
accumulateVectors(curparse,sentenceflags,new CSList<Parse>(curparse.getChildren()),null,parsedepth,outParsevector);
parsevector.setValue(outParsevector.getValue());
if (parsevector.getValue() != null)
{
if (sentenceflags.isInQuotes(curparse.getSpan()))
parsevector.getValue().moveAllToAmbiguousSentiment();
Boolean isQSTop = tlQuestionSentenceTop.contains(curparse.getType());
Boolean isSTop = tlSentenceTop.contains(curparse.getType());
if (isQSTop || isTerminatedByQuestion(curparse))
{
parsevector.getValue().moveAllToAmbiguousSentiment();
}
else if (isSTop && parsevector.getValue().containsKey(FloatVector.SlyIronicSarcasticDimension))
{
parsevector.getValue().moveAllToAmbiguousSentiment();
}
if (isQSTop || isSTop)
{
CSList<LabelledSpan> lspans = parsevector.getValue().getDerivationSpans();
for (LabelledSpan lspan : lspans)
sentenceflags.getSpanFlags((Span)lspan).IsSentiment = true;
}
}
}
}
private void accumulateVectors(Parse parent, SentenceFlags sentenceflags, CSList<Parse> parses, CSList<Parse> parsesToIgnore, ParseDepth parsedepth, RefSupport<FloatVector> listvector) throws Exception {
listvector.setValue(new FloatVector());
FloatVector afternegationvector = new FloatVector();
Integer negationstart = Integer.MAX_VALUE;
CSList<Parse> plainParses = new CSList<Parse>();
for (Parse parse : parses)
{
// accumulate all chunks
if ((parsesToIgnore != null) && parsesToIgnore.contains(parse))
continue;
RefSupport<FloatVector> outParsevector = new RefSupport<FloatVector>();
getSentimentVector(parse,sentenceflags,parsedepth,outParsevector);
FloatVector parsevector = outParsevector.getValue();
if (parsevector == null)
{
plainParses.add(parse);
}
else if ((parsedepth == IndexingConsts.ParseDepth.SHALLOW) && (tlShallowAccumulationBreakParents.contains(parent.getType())))
{
Float negationscore = parsevector.get(FloatVector.NegationDimension);
if (negationstart == Integer.MAX_VALUE && negationscore != null)
negationstart = parse.getSpan().getStart();
if (parse.getSpan().getStart() >= negationstart)
afternegationvector.accumulate(parsevector);
else
listvector.getValue().accumulate(parsevector);
}
// TODO: Should we have the same here for DEEP parsing?
else
listvector.getValue().accumulate(parsevector);
}
// special processing for ADJP and ADVP, where certain modifiers
// make the entire chunk a sentiment
if (StringSupport.equals(parent.getType(), "ADVP") || StringSupport.equals(parent.getType(), "ADJP"))
{
Float modifierscore = listvector.getValue().get(FloatVector.ScoreDimension);
Float sumallsentiment = listvector.getValue().sumAllSentimentIntensities() + listvector.getValue().sumOfIntensities(SentimentDimension.DimensionsRequiringModifier);
if ((modifierscore != null ) && (sumallsentiment != null) && parses.size() > 1)
{
for (Parse parse : plainParses)
{
FloatVector parsevector = new FloatVector(parse.getSpan(), SentimentDimension.GeneralSentiment);
listvector.getValue().accumulate(parsevector);
}
FloatVector valueofchunk = mDicts.Modifiers.getWords().get("ADJP_ADVP_chunk");
listvector.getValue().accumulate(valueofchunk);
}
}
// add all sentiment idioms of this parent
if (sentenceflags.isIdiomParent(parent.getSpan()))
{
CSList<IdiomOccurrence> idiomoccurences = sentenceflags.SpanFlags.get(LangUtils.spanKey(parent.getSpan())).IncludedIdioms;
for (IdiomOccurrence idiomoccurence : idiomoccurences)
{
FloatVector idiomvector = mDicts.Idioms.words(idiomoccurence.IdiomKey);
// check: if entity idiom, don't add idiom to sentiment
if (idiomvector.get(FloatVector.EntityDimension) == FloatVector.InitialValue)
continue;
CSList<Span> spans = parsesToSpans(idiomoccurence.Parses);
if (spans.get(0).getStart() > negationstart)
afternegationvector.accumulate(idiomvector,spans,true,null);
else
listvector.getValue().accumulate(idiomvector,spans,true,null);
}
}
if ((parsedepth == IndexingConsts.ParseDepth.DEEP) && (tlDeepAccumulationBreakParents.contains(parent.getType())))
{
listvector.getValue().applyNegationAndMultiplication();
}
else if ((parsedepth == IndexingConsts.ParseDepth.SHALLOW) && (tlShallowAccumulationBreakParents.contains(parent.getType())))
{
afternegationvector.applyNegationAndMultiplication();
listvector.getValue().applyNegationAndMultiplication();
listvector.getValue().accumulate(afternegationvector);
}
if (tlSentenceTop.contains(parent.getType()) || tlQuestionSentenceTop.contains(parent.getType()))
{
listvector.getValue().removeUnusedCombinationParts();
}
if (!listvector.getValue().hasIntensities())
listvector.setValue(null);
}
private CSList<Span> parsesToSpans(CSList<Parse> parses) throws Exception {
if (parses == null)
return null;
CSList<Span> spans = new CSList<Span>();
for (Parse parse : parses)
spans.Add(parse.getSpan());
return spans;
}
private void getSentimentVectorFromPOSParse(Parse parse, SentenceFlags sentenceflags, RefSupport<FloatVector> parsevector) throws Exception {
RefSupport<FloatVector> refVar30 = new RefSupport<FloatVector>();
getSentimentVectorFromPOSParse(parse,sentenceflags,null,refVar30);
parsevector.setValue(refVar30.getValue());
}
private void getSentimentVectorFromPOSParse(Parse parse, SentenceFlags sentenceflags, String posoverride, RefSupport<FloatVector> parsevector) throws Exception {
parsevector.setValue(null);
if (sentenceflags.isIdiom(parse.getSpan()) || sentenceflags.isLink(parse.getSpan()) || sentenceflags.isHashtag(parse.getSpan()))
return ;
else if (tlProperNouns.contains(parse.getType()))
return ;
// ignore proper nouns during sentiment extraction
String token = getLemma(parse);
String pos = (posoverride != null) ? posoverride : LangUtils.parseTypeToShortPOS(parse.getType());
Span span = parse.getSpan();
FloatVector outvector = null;
RefSupport<FloatVector> refOutvector = new RefSupport<FloatVector>();
if (FloatVector.isIntensityToken(token)) {
parsevector.setValue(new FloatVector());
FloatVector emotvector = new FloatVector();
emotvector.initFromIntensityToken(token);
parsevector.getValue().accumulate(emotvector, span, true);
} else if (token.equals("!")) {
parsevector.setValue(new FloatVector(span, SentimentDimension.GeneralSentiment));
FloatVector modifierScore = mDicts.Modifiers.getWords().get("exclamation");
parsevector.getValue().accumulate(modifierScore);
parsevector.getValue().applyNegationAndMultiplication();
} else if (isNegator(parse, refOutvector)) {
outvector = refOutvector.getValue();
parsevector.setValue(new FloatVector());
parsevector.getValue().accumulate(outvector, span, true);
} else if (isPOSModifier(parse, refOutvector)) {
outvector = refOutvector.getValue();
parsevector.setValue(new FloatVector());
parsevector.getValue().accumulate(outvector, span, true);
} else if (pos.equals("i")) {
String lower = token.toLowerCase();
outvector = mDicts.Interjections.getWords().get(lower);
if ((outvector != null) && (outvector.hasIntensities())) {
parsevector.setValue(new FloatVector());
parsevector.getValue().accumulate(outvector, span, true);
}
} else {
parsevector.setValue(getEmotionOrQualityVector(token, pos, span));
}
if (sentenceflags.isAllCaps(span) && sentenceflags.ParagraphStats.CapitalizationStandsOut)
{
FloatVector allcaps = new FloatVector(span,SentimentDimension.GeneralSentiment);
FloatVector modifierScore = mDicts.Modifiers.getWords().get("allcaps");
allcaps.accumulate(modifierScore);
allcaps.applyNegationAndMultiplication();
if (parsevector.getValue() == null)
parsevector.setValue(allcaps);
else
parsevector.getValue().accumulate(allcaps);
}
}
private FloatVector getEmotionOrQualityVector(String token, String pos, Span span) throws Exception {
FloatVector parsevector;
FloatVector emotvector = getEmotionVector(token,pos);
FloatVector qualvector = getQualityVector(token,pos);
if ((emotvector != null) && (qualvector != null))
throw new Exception("GetEmotionOrQualityVector: duplicate emotion and quality entries for " + token + "/" + pos);
if (emotvector != null)
{
if (span != null)
{
parsevector = new FloatVector();
parsevector.accumulate(emotvector,span,true);
}
else
parsevector = emotvector;
}
else if (qualvector != null)
{
if (span != null)
{
parsevector = new FloatVector();
parsevector.accumulate(qualvector,span,true);
}
else
parsevector = qualvector;
}
else
parsevector = null;
return parsevector;
}
private boolean isPOSModifier(Parse parse, RefSupport<FloatVector> modifier) throws Exception {
modifier.setValue(null);
if (!tlAdverbs.contains(parse.getType()) && !tlAdjectivesJJR_JJS.contains(parse.getType()))
return false;
String lower = getLemma(parse).toLowerCase();
String pos = LangUtils.parseTypeToShortPOS(parse.getType());
String label = lower + '/' + pos;
if (tlAdverbs.contains(parse.getType()))
{
modifier.setValue(mDicts.DegreeAdverbs.getWords().get(label));
}
else if (tlAdjectivesJJR_JJS.contains(parse.getType()))
{
FloatVector baseformvector = getEmotionOrQualityVector(lower,pos,null);
FloatVector scorevector = mDicts.Modifiers.getWords().get(parse.getType());
if (baseformvector != null)
{
// sso 12/16/2016 fix
FloatVector bfcopy = new FloatVector();
bfcopy.accumulate(baseformvector);
bfcopy.accumulate(scorevector);
bfcopy.applyNegationAndMultiplication();
modifier.setValue(bfcopy);
}
else
modifier.setValue(scorevector);
}
return (modifier.getValue() != null);
}
private Boolean isNegator(Parse parse, RefSupport<FloatVector> negator) throws Exception {
negator.setValue(null);
if ((!parse.getType().equals("RB")) && (!parse.getType().equals("DT")) && (!parse.getType().equals("MD")))
return false;
String parselabel = getParseLabel(parse);
String lower = parselabel.toLowerCase();
FloatVector outvalue = mDicts.Negators.getWords().get(lower);
negator.setValue(outvalue);
return (negator.getValue() != null);
}
private Boolean isTerminatedByQuestion(Parse parse) throws Exception {
if (parse.getChildCount() == 0)
return false;
return (StringSupport.equals(getLemma(parse.getChildren()[parse.getChildCount() - 1]), "?"));
}
private FloatVector getEmotionVector(String lemma, String pos) throws Exception {
return getVector(mDicts.Emotions,lemma,pos);
}
private FloatVector getQualityVector(String lemma, String pos) throws Exception {
return getVector(mDicts.Qualities,lemma,pos);
}
private FloatVector getVector(GenericDictionary<FloatVector> dict, String lemma, String pos) throws Exception {
FloatVector vector = dict.getWords().get(lemma + '/' + pos);
if (vector != null)
return vector;
// check base forms.
String lowercaseLemma = lemma.toLowerCase();
String[] tokens = Util.split(lowercaseLemma);
if (tokens.length > 2)
return null;
else if (tokens.length == 2)
{
FloatVector prefixvector = mDicts.Prefixes.getWords().get(tokens[0]);
if (prefixvector == null)
return null;
String bestbaseform = mDicts.BaseForms.bestBaseForm(tokens[1],pos);
vector = dict.getWords().get(bestbaseform + '/' + pos);
if (vector == null)
return null;
// sso 10/19/2017 fix
FloatVector vectorcopy = new FloatVector();
vectorcopy.accumulate(vector);
vectorcopy.accumulate(prefixvector);
vectorcopy.applyNegationAndMultiplication();
return vectorcopy;
// end fix
}
else if (tokens.length == 1)
{
String bestbaseform = mDicts.BaseForms.bestBaseForm(tokens[0],pos);
vector = dict.getWords().get(bestbaseform + '/' + pos);
return vector;
}
else
return null;
}
private static String getParseLabel(Parse parse) throws Exception {
return getLemma(parse) + '/' + LangUtils.parseTypeToShortPOS(parse.getType());
}
private static String getLemma(Parse parse) throws Exception {
int start = parse.getSpan().getStart();
int end = parse.getSpan().getEnd();
String lemma = parse.getText().substring(start,end);
return lemma;
}
private static String getParseLabelPennStyle(Parse parse) throws Exception {
return getLemma(parse) + '/' + parse.getType();
}
private void findGoodEntitiesInParses(CSList<Parse> parses, SentenceFlags sentenceflags, String parentType, CSList<CSList<Parse>> goodEntities) throws Exception {
// TODO (7/7/18): Handle the case of The (a DT) being capitalized and part of the Good Entity
// As in "The Importance of Being Earnest , so thick with wit it plays like a reading from Bartlett 's Familiar Quotations"
// TODO (7/8/18): For DEEP parsing, make sure we get at least one entity, even if it is not up to usual standards
// For that, add even simplest NN, NNS
CSList<Parse> runningPhrase = new CSList<Parse>();
for (Parse parse : parses)
{
if (parse.isPosTag())
{
if (sentenceflags.isIdiom(parse.getSpan()))
continue;
// don't start the running phrase with a DT or similar phrase breakers
// this check is necessary so that the next check works as intended
if ((runningPhrase.size() == 0) && !tlGoodObjectTypes.contains(parse.getType()))
continue;
// don't start the running phrase with Sentiment adjectives
if ((runningPhrase.size() == 0) && (tlAdjectives.contains(parse.getType())) && sentenceflags.isSentiment(parse.getSpan()))
continue;
if (FloatVector.isIntensityToken(getLemma(parse)))
continue;
if (!StringSupport.equals(parentType, "NP"))
continue;
runningPhrase.add(parse);
}
else
{
if (runningPhrase.size() >= 1)
{
CSList<CSList<Parse>> foundchunks = null;
RefSupport<CSList<CSList<Parse>>> refVar41 = new RefSupport<CSList<CSList<Parse>>>();
findNounPhraseChunks(runningPhrase,refVar41);
foundchunks = refVar41.getValue();
goodEntities.addAll(foundchunks);
runningPhrase.clear();
}
// children
CSList<CSList<Parse>> childrenGoodEntities = new CSList<CSList<Parse>>();
CSList<Parse> children = new CSList<Parse>(parse.getChildren());
findGoodEntitiesInParses(children,sentenceflags,parse.getType(),childrenGoodEntities);
goodEntities.addAll(childrenGoodEntities);
//idioms
findIdiomEntities(parse,sentenceflags,goodEntities);
}
}
if (runningPhrase.size() > 0)
{
CSList<CSList<Parse>> foundchunks = null;
RefSupport<CSList<CSList<Parse>>> outFoundchunks = new RefSupport<CSList<CSList<Parse>>>();
findNounPhraseChunks(runningPhrase,outFoundchunks);
foundchunks = outFoundchunks.getValue();
goodEntities.addAll(foundchunks);
}
}
private void findNounPhraseChunks(CSList<Parse> parses, RefSupport<CSList<CSList<Parse>>> foundchunks) throws Exception {
foundchunks.setValue(new CSList<CSList<Parse>>());
CSList<CSList<Parse>> foundlongchunks = new CSList<CSList<Parse>>();
NounChunkType nounChunkType = NounChunkType.Undetermined;
Boolean ignoreChangeOfPhrase = false;
CSList<Parse> chunk = new CSList<Parse>();
for (Integer i = 0;i < parses.size();i++)
{
Parse parse = parses.get(i);
String plabel = getParseLabelPennStyle(parse);
if (tlGoodProperNounChunkers.contains(plabel))
{
if ((i > 0) && (i < parses.size() - 1) && tlProperNouns.contains(parses.get(i - 1).getType()) && tlProperNouns.contains(parses.get(i + 1).getType()))
{
chunk.add(parse);
}
else if (chunk.size() > 0)
{
if (isHighValueObject(chunk))
{
CSList<Parse> newlist = new CSList<Parse>(chunk);
foundlongchunks.add(newlist);
}
chunk.clear();
}
}
else if (tlPhraseBreakerTypes.contains(parse.getType()))
{
if (chunk.size() > 0)
{
if (isHighValueObject(chunk))
{
CSList<Parse> newlist = new CSList<Parse>(chunk);
foundlongchunks.add(newlist);
}
chunk.clear();
}
}
else if (StringSupport.equals(parse.getType(), "POS"))
{
ignoreChangeOfPhrase = true;
}
else if (StringSupport.equals(parse.getType(), "CD"))
{
chunk.add(parse);
}
else if (tlProperNouns.contains(parse.getType()) || tlRegularNouns.contains(parse.getType()) || tlAdjectives.contains(parse.getType()))
{
Boolean isLastParse = (i == (parses.size() - 1));
RefSupport<NounChunkType> refVar43 = new RefSupport<NounChunkType>(nounChunkType);
RefSupport<Boolean> refVar44 = new RefSupport<Boolean>(ignoreChangeOfPhrase);
attachParseToChunk(parse,chunk,foundlongchunks,isLastParse,refVar43,refVar44);
nounChunkType = refVar43.getValue();
ignoreChangeOfPhrase = refVar44.getValue();
}
}
if (chunk.size() > 0)
{
if (isHighValueObject(chunk))
{
CSList<Parse> newlist = new CSList<Parse>(chunk);
foundlongchunks.add(newlist);
}
}
for (CSList<Parse> longchunk : foundlongchunks)
{
// find smaller sub chunks that correspond to more strict conditions
// we do that to calculate subchunk stats in larger blogs
foundchunks.getValue().add(longchunk);
findSubChunks(longchunk,foundchunks.getValue());
}
}
private void attachParseToChunk(Parse parse, CSList<Parse> chunk, CSList<CSList<Parse>> foundchunks, boolean isLastParse, RefSupport<NounChunkType> nounChunkType, RefSupport<Boolean> ignoreChangeOfPhrase) throws Exception {
Boolean justAdd;
if (nounChunkType.getValue() == NounChunkType.Undetermined)
{
if (tlProperNouns.contains(parse.getType()))
nounChunkType.setValue(NounChunkType.ProperNoun);
else if (tlRegularNouns.contains(parse.getType()))
nounChunkType.setValue(NounChunkType.RegularNoun);
justAdd = true;
}
else
{
if (tlProperNouns.contains(parse.getType()))
{
justAdd = (nounChunkType.getValue() == NounChunkType.ProperNoun);
nounChunkType.setValue(NounChunkType.ProperNoun);
}
else if (tlRegularNouns.contains(parse.getType()))
{
justAdd = (nounChunkType.getValue() == NounChunkType.RegularNoun);
nounChunkType.setValue(NounChunkType.RegularNoun);
}
else
{
// e.g. NNP NNP JJ NN: break at JJ
justAdd = false;
nounChunkType.setValue(NounChunkType.Undetermined);
}
}
// order of conditions is important, so be careful!
if (chunk.size() == 0)
chunk.add(parse);
else if (justAdd)
chunk.add(parse);
else if (ignoreChangeOfPhrase.getValue())
{
chunk.add(parse);
ignoreChangeOfPhrase.setValue(false);
}
else if (!tlProperNouns.contains(parse.getType()) && isLastParse)
chunk.add(parse);
else
{
if (isHighValueObject(chunk))
{
CSList<Parse> newlist = new CSList<Parse>(chunk);
foundchunks.add(newlist);
}
chunk.clear();
chunk.add(parse);
}
}
private void findSubChunks(CSList<Parse> longchunk, CSList<CSList<Parse>> foundchunks) throws Exception {
boolean inProperNounPhrase = false;
CSList<Parse> subchunk = new CSList<Parse>();
for (int i = 0;i < longchunk.size();i++)
{
Parse parse = longchunk.get(i);
if (tlSubChunkBreakerTypes.contains(parse.getType()))
{
if (subchunk.size() > 0)
{
if (isHighValueObject(subchunk) && (subchunk.size() < longchunk.size()))
{
CSList<Parse> newlist = new CSList<Parse>(subchunk);
foundchunks.add(newlist);
}
subchunk.clear();
}
}
else if (tlProperNouns.contains(parse.getType()) || tlRegularNouns.contains(parse.getType()))
{
RefSupport<Boolean> refVar45 = new RefSupport<Boolean>(inProperNounPhrase);
attachParseToSubChunk(parse,subchunk,foundchunks,refVar45);
inProperNounPhrase = refVar45.getValue();
}
}
if (subchunk.size() > 0)
{
if (isHighValueObject(subchunk) && (subchunk.size() < longchunk.size()))
{
CSList<Parse> newlist = new CSList<Parse>(subchunk);
foundchunks.add(newlist);
}
}
}
private void attachParseToSubChunk(Parse parse, CSList<Parse> subchunk, CSList<CSList<Parse>> foundchunks, RefSupport<Boolean> inProperNounPhrase) throws Exception {
Boolean parseIsProperNoun = tlProperNouns.contains(parse.getType());
Boolean justAdd = (inProperNounPhrase.getValue() == parseIsProperNoun);
if (subchunk.size() == 0)
{
subchunk.add(parse);
inProperNounPhrase.setValue(parseIsProperNoun);
}
else
{
if (justAdd)
subchunk.add(parse);
else
{
if (isHighValueObject(subchunk))
{
CSList<Parse> newlist = new CSList<Parse>(subchunk);
foundchunks.add(newlist);
}
subchunk.clear();
subchunk.add(parse);
inProperNounPhrase.setValue(parseIsProperNoun);
}
}
}
private void findIdiomEntities(Parse parent, SentenceFlags sentenceflags, CSList<CSList<Parse>> idiomEntities) throws Exception {
if (!sentenceflags.isIdiomParent(parent.getSpan()))
return ;
CSList<IdiomOccurrence> idiomoccurences = sentenceflags.SpanFlags.get(LangUtils.spanKey(parent.getSpan())).IncludedIdioms;
for (IdiomOccurrence idiomoccurence : idiomoccurences)
{
FloatVector idiomvector = mDicts.Idioms.words(idiomoccurence.IdiomKey);
// check: if entity idiom, don't add idiom to sentiment
if (idiomvector.get(FloatVector.EntityDimension) != FloatVector.InitialValue)
continue;
if (isHighValueObject(idiomoccurence.Parses))
idiomEntities.add(idiomoccurence.Parses);
}
}
private boolean isHighValueObject(CSList<Parse> phrase) throws Exception {
boolean goodAsTag;
Boolean goodAsTopic;
RefSupport<Boolean> refVar46 = new RefSupport<Boolean>();
RefSupport<Boolean> refVar47 = new RefSupport<Boolean>();
isHighValueObject(phrase, refVar46, refVar47);
goodAsTag = refVar46.getValue();
goodAsTopic = refVar47.getValue();
return goodAsTag;
}
private void isHighValueObject(CSList<Parse> phrase, RefSupport<Boolean> goodAsTag, RefSupport<Boolean> goodAsTopic) throws Exception {
goodAsTag.setValue(false);
goodAsTopic.setValue(null);
// check if long phrase (3 "good" words are enough,
// but calling function needs to use POS info to remove DT, POS, etc
if (phrase.size() >= 3)
{
isHighValueObject3PlusParses(phrase, goodAsTag, goodAsTopic);
}
else if (phrase.size() == 2)
{
isHighValueObject2Parses(phrase.get(0), phrase.get(1), goodAsTag, goodAsTopic);
}
else if (phrase.size() == 1)
{
isHighValueObject1Parse(phrase.get(0), goodAsTag, goodAsTopic);
}
}
private void isHighValueObject1Parse(Parse parse, RefSupport<Boolean> goodAsTag, RefSupport<Boolean> goodAsTopic) throws Exception {
if (parse.getSpan().getStart() == 0 && isTwitterStyleName(parse))
{
goodAsTag.setValue(false);
goodAsTopic.setValue(false);
return ;
}
if (!tlAllNouns.contains(parse.getType()))
{
goodAsTag.setValue(false);
goodAsTopic.setValue(false);
return ;
}
if (isTwitterStyleName(parse))
{
goodAsTopic.setValue(false);
goodAsTag.setValue(true);
}
else // good tag only if it does not start the sentence
if (tlProperNouns.contains(parse.getType()))
{
// single word NNPs are good tags and topics
goodAsTopic.setValue(true);
goodAsTag.setValue(true);
}
else if (parse.getSpan().length() >= HighValueMinimumLength)
{
goodAsTopic.setValue(null);
// need the admin to decide
goodAsTag.setValue(true);
}
else
{
// single words are ok as tags if they are long
goodAsTopic.setValue(null);
goodAsTag.setValue(false);
}
}
private void isHighValueObject2Parses(Parse parse1, Parse parse2, RefSupport<Boolean> goodAsTag, RefSupport<Boolean> goodAsTopic) throws Exception {
if (parse1.getSpan().getStart() == 0 && isTwitterStyleName(parse1))
{
goodAsTag.setValue(false);
goodAsTopic.setValue(false);
return ;
}
Boolean parse2IsNoun = tlAllNouns.contains(parse2.getType());
if (StringSupport.equals(parse1.getType(), "CD") && parse2IsNoun)
{
goodAsTag.setValue(true);
goodAsTopic.setValue(false);
}
else if (StringSupport.equals(parse1.getType(), "JJ") && isOrdinalWithSuffix(getLemma(parse1)) && parse2IsNoun)
{
goodAsTag.setValue(true);
goodAsTopic.setValue(false);
}
else if (parse2IsNoun || tlAllNouns.contains(parse1.getType()))
{
// one of them is noun
Integer length = parse1.getSpan().length() + 1 + parse2.getSpan().length();
goodAsTag.setValue((length >= HighValueMinimumLength));
goodAsTopic.setValue(goodAsTag.getValue());
}
else
{
// no nouns found
goodAsTag.setValue(false);
goodAsTopic.setValue(false);
}
}
private void isHighValueObject3PlusParses(CSList<Parse> phrase, RefSupport<Boolean> goodAsTag, RefSupport<Boolean> goodAsTopic) throws Exception {
if (phrase.get(0).getSpan().getStart() == 0 && isTwitterStyleName(phrase.get(0)))
{
goodAsTag.setValue(false);
goodAsTopic.setValue(false);
return ;
}
Boolean foundNoun = false;
for (Parse parse : phrase)
{
// check if there is at least one Noun
if (tlAllNouns.contains(parse.getType()))
{
foundNoun = true;
break;
}
}
goodAsTag.setValue(foundNoun);
goodAsTopic.setValue(foundNoun);
}
/**
* Checks if the Parse starts with an @
* @param parse
* @return
* @throws Exception
*/
private Boolean isTwitterStyleName(Parse parse) {
return (parse.getText().charAt(parse.getSpan().getStart()) == '@');
}
private boolean isOrdinalWithSuffix(String candidate) throws Exception {
RefSupport<Double> ref = new RefSupport<Double>();
if ((candidate.length() >= 3) &&
DoubleSupport.tryParse(candidate.substring(0, (0) + (candidate.length() - 2)), ref) &&
tlOrdinalSuffixes.contains(candidate.substring(candidate.length() - 2).toUpperCase()))
return true;
else
return false;
}
private Parse findCommonParentOfType(CSList<Parse> children, CSList<String> parentTypes) throws Exception {
if ((children == null) || (children.size() == 0))
return null;
Parse commonParent = null;
for (Parse child : children)
{
// on the first child node we set commonParent to the parent of child, and on other children we keep calling getCommonParent
if (commonParent == null)
commonParent = child.getParent();
else
commonParent = commonParent.getCommonParent(child);
}
if (commonParent == null)
return null;
if (parentTypes.contains(commonParent.getType()))
return commonParent;
else if (commonParent.getType().equals(AbstractBottomUpParser.TOP_NODE))
return null; // we reached the top and still haven't found the right parent
else {
CSList<Parse> nextlevel = new CSList<Parse>();
nextlevel.add(commonParent);
return findCommonParentOfType( nextlevel, parentTypes );
}
}
private void findParsesOfTypes(CSList<Parse> parses, CSList<String> types, CSList<Parse> foundParses) throws Exception {
for (Parse parse : parses)
if (types.contains(parse.getType()))
foundParses.add(parse);
}
private void findImmediateChildren(Parse startNode, CSList<String> types, RefSupport<CSList<Parse>> foundChildren) throws Exception {
Parse[] children = startNode.getChildren();
foundChildren.setValue(null);
for (Parse child : children)
{
if (types.contains(child.getType()))
{
if (foundChildren.getValue() == null)
foundChildren.setValue(new CSList<Parse>());
foundChildren.getValue().add(child);
}
}
}
private void findChildren(Parse startNode, CSList<String> types, CSList<Parse> foundChildren) throws Exception {
Parse[] children = startNode.getChildren();
for (Parse child : children)
{
if (types.contains(child.getType()))
{
foundChildren.add(child);
}
else
findChildren(child,types,foundChildren);
}
}
private boolean isAllChildrenPOSTags(Parse startNode) throws Exception {
Parse[] children = startNode.getChildren();
for (Parse child : children)
if (!child.isPosTag())
return false;
return true;
}
private String normalizeSentence(String input) throws Exception {
String res = StringSupport.Trim(input);
res = res.replace('’', '\''); // right single quote (U+2019)
res = res.replace('‘', '\''); // left single quote (U+2018)
res = res.replace('”', '"'); // right double quote (U+201D)
res = res.replace('“', '"'); // left double quote (U+201C)
res = res.replace("‐", " - "); // replace hyphen (U+2010) with (U+002D) Hyphen-Minus
Character last = res.charAt(res.length() - 1);
if (!LangUtils.isCharPunctuation(last))
res += " .";
return res;
}
private void findChildrenFromTo(Parse parse, int spanFrom, int spanTo, CSList<Parse> foundChildren) throws Exception {
Parse[] children = parse.getChildren();
if (children == null)
return ;
for (Parse child : children)
{
if (child.isPosTag())
{
if ((child.getSpan().getStart() >= spanFrom) && (child.getSpan().getEnd() <= spanTo))
foundChildren.add(child);
if (child.getSpan().getEnd() > spanTo)
break;
}
else
{
if (child.getSpan().getStart() <= spanTo)
findChildrenFromTo(child, child.getSpan().getStart(), spanTo, foundChildren);
else
break;
}
}
}
public String[] splitIntoSentences(String paragraph) throws Exception {
return mSentenceDetector.sentDetect(paragraph);
}
private String[] tokenizeSentence(String sentence) throws Exception {
return mTokenizer.tokenize(sentence);
}
private String[] posTagTokens(String[] tokens) throws Exception {
return mPosTagger.tag(tokens);
}
private String[] chunkSentence(String[] tokens, String[] tags) {
return mChunker.chunk( tokens, tags);
}
private Parse parseSentence(String sentence, Span[] spans) throws Exception {
// hat tip to http://dpdearing.com/posts/2011/12/how-to-use-the-opennlp-1-5-0-parser/
Parse p = new Parse(sentence,
// a new span covering the entire text
new Span(0, sentence.length()),
// the label for the top if an incomplete node
AbstractBottomUpParser.INC_NODE,
// the probability of this parse
1,
// the token index of the head of this parse
0);
for (int idx=0; idx < spans.length; idx++) {
final Span span = spans[idx];
// flesh out the parse with individual token sub-parses
p.insert(new Parse(sentence,span,AbstractBottomUpParser.TOK_NODE,0,idx));
}
return mParser.parse(p);
}
}
| apache-2.0 |
oehme/analysing-gradle-performance | my-app/src/test/java/org/gradle/test/performance/mediummonolithicjavaproject/p464/Test9283.java | 2343 | package org.gradle.test.performance.mediummonolithicjavaproject.p464;
import org.gradle.test.performance.mediummonolithicjavaproject.p463.Production9274;
import org.gradle.test.performance.mediummonolithicjavaproject.p463.Production9278;
import org.junit.Test;
import static org.junit.Assert.*;
public class Test9283 {
Production9283 objectUnderTest = new Production9283();
@Test
public void testProperty0() {
Production9274 value = new Production9274();
objectUnderTest.setProperty0(value);
assertEquals(value, objectUnderTest.getProperty0());
}
@Test
public void testProperty1() {
Production9278 value = new Production9278();
objectUnderTest.setProperty1(value);
assertEquals(value, objectUnderTest.getProperty1());
}
@Test
public void testProperty2() {
Production9282 value = new Production9282();
objectUnderTest.setProperty2(value);
assertEquals(value, objectUnderTest.getProperty2());
}
@Test
public void testProperty3() {
String value = "value";
objectUnderTest.setProperty3(value);
assertEquals(value, objectUnderTest.getProperty3());
}
@Test
public void testProperty4() {
String value = "value";
objectUnderTest.setProperty4(value);
assertEquals(value, objectUnderTest.getProperty4());
}
@Test
public void testProperty5() {
String value = "value";
objectUnderTest.setProperty5(value);
assertEquals(value, objectUnderTest.getProperty5());
}
@Test
public void testProperty6() {
String value = "value";
objectUnderTest.setProperty6(value);
assertEquals(value, objectUnderTest.getProperty6());
}
@Test
public void testProperty7() {
String value = "value";
objectUnderTest.setProperty7(value);
assertEquals(value, objectUnderTest.getProperty7());
}
@Test
public void testProperty8() {
String value = "value";
objectUnderTest.setProperty8(value);
assertEquals(value, objectUnderTest.getProperty8());
}
@Test
public void testProperty9() {
String value = "value";
objectUnderTest.setProperty9(value);
assertEquals(value, objectUnderTest.getProperty9());
}
} | apache-2.0 |
ServerStarted/cat | cat-broker/src/main/java/com/dianping/cat/broker/api/page/MonitorManager.java | 5794 | package com.dianping.cat.broker.api.page;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import org.codehaus.plexus.logging.LogEnabled;
import org.codehaus.plexus.logging.Logger;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.unidal.helper.Threads;
import org.unidal.helper.Threads.Task;
import org.unidal.lookup.annotation.Inject;
import com.dianping.cat.Cat;
import com.dianping.cat.CatConstants;
import com.dianping.cat.Constants;
import com.dianping.cat.config.url.UrlPatternConfigManager;
import com.dianping.cat.message.Event;
import com.dianping.cat.message.Metric;
import com.dianping.cat.message.Transaction;
import com.dianping.cat.message.internal.DefaultMetric;
import com.dianping.cat.service.IpService;
import com.dianping.cat.service.IpService.IpInfo;
import org.unidal.lookup.util.StringUtils;
public class MonitorManager implements Initializable, LogEnabled {
private final int m_threadCounts = 20;
private volatile long m_total = 0;
private volatile long m_errorCount = -1;
private Map<Integer, BlockingQueue<MonitorEntity>> m_queues = new LinkedHashMap<Integer, BlockingQueue<MonitorEntity>>();
@Inject
private IpService m_ipService;
@Inject
private UrlPatternConfigManager m_patternManger;
private Logger m_logger;
private void buildMessage(MonitorEntity entity, String url, IpInfo ipInfo) {
String city = ipInfo.getProvince() + "-" + ipInfo.getCity();
String channel = ipInfo.getChannel();
String httpStatus = entity.getHttpStatus();
String errorCode = entity.getErrorCode();
long timestamp = entity.getTimestamp();
double duration = entity.getDuration();
String group = url;
int count = entity.getCount();
if (duration > 0) {
logMetricForAvg(timestamp, duration, group, city + ":" + channel + ":" + Constants.AVG);
}
String hitKey = city + ":" + channel + ":" + Constants.HIT;
logMetricForCount(timestamp, group, hitKey, count);
if (!"200".equals(httpStatus)) {
String key = city + ":" + channel + ":" + Constants.ERROR;
logMetricForCount(timestamp, group, key, count);
}
if (!StringUtils.isEmpty(httpStatus)) {
String key = city + ":" + channel + ":" + Constants.HTTP_STATUS + "|" + httpStatus;
logMetricForCount(timestamp, group, key, count);
}
if (!StringUtils.isEmpty(errorCode)) {
String key = city + ":" + channel + ":" + Constants.ERROR_CODE + "|" + errorCode;
logMetricForCount(timestamp, group, key, count);
}
}
@Override
public void enableLogging(Logger logger) {
m_logger = logger;
}
@Override
public void initialize() throws InitializationException {
for (int i = 0; i < m_threadCounts; i++) {
BlockingQueue<MonitorEntity> queue = new LinkedBlockingQueue<MonitorEntity>(10000);
Threads.forGroup("cat").start(new MessageSender(queue, i));
m_queues.put(i, queue);
}
}
private void logMetricForAvg(long timestamp, double duration, String group, String key) {
Metric metric = Cat.getProducer().newMetric(group, key);
DefaultMetric defaultMetric = (DefaultMetric) metric;
defaultMetric.setTimestamp(timestamp);
defaultMetric.setStatus("S,C");
defaultMetric.addData(String.format("%s,%.2f", 1, duration));
}
private void logMetricForCount(long timestamp, String group, String key, int count) {
Metric metric = Cat.getProducer().newMetric(group, key);
DefaultMetric defaultMetric = (DefaultMetric) metric;
defaultMetric.setTimestamp(timestamp);
defaultMetric.setStatus("C");
defaultMetric.addData(String.valueOf(count));
}
public boolean offer(MonitorEntity entity) {
if (!StringUtils.isEmpty(entity.getTargetUrl())) {
m_total++;
int index = (int) (m_total % m_threadCounts);
int retryTime = 0;
while (retryTime < m_threadCounts) {
BlockingQueue<MonitorEntity> queue = m_queues.get((index + retryTime) % m_threadCounts);
boolean result = queue.offer(entity);
if (result) {
return true;
}
retryTime++;
}
m_errorCount++;
if (m_errorCount % CatConstants.ERROR_COUNT == 0) {
m_logger.error("Error when offer entity to queues, size:" + m_errorCount);
}
}
return false;
}
private String parseFormatUrl(String url) {
String result = m_patternManger.handle(url);
return result;
}
private void processOneEntity(MonitorEntity entity) {
String targetUrl = entity.getTargetUrl();
String url = parseFormatUrl(targetUrl);
if (url != null) {
Transaction t = Cat.newTransaction("Monitor", url);
String ip = entity.getIp();
IpInfo ipInfo = m_ipService.findIpInfoByString(ip);
try {
if (ipInfo != null) {
buildMessage(entity, url, ipInfo);
} else {
Cat.logEvent("ip", "notFound", Event.SUCCESS, ip);
}
t.setStatus(Transaction.SUCCESS);
} catch (Exception e) {
Cat.logError(e);
t.setStatus(e);
} finally {
t.complete();
}
}
}
public class MessageSender implements Task {
private BlockingQueue<MonitorEntity> m_queue;
private int m_index;
public MessageSender(BlockingQueue<MonitorEntity> queue, int index) {
m_queue = queue;
m_index = index;
}
@Override
public String getName() {
return "Message-Send-" + m_index;
}
@Override
public void run() {
while (true) {
try {
MonitorEntity entity = m_queue.poll(5, TimeUnit.MILLISECONDS);
if (entity != null) {
try {
processOneEntity(entity);
} catch (Exception e) {
Cat.logError(e);
}
}
} catch (InterruptedException e) {
break;
}
}
}
@Override
public void shutdown() {
}
}
}
| apache-2.0 |
XUSAIFEIX/coolweather | app/src/main/java/com/coolweather/android/db/City.java | 853 | package com.coolweather.android.db;
import org.litepal.crud.DataSupport;
/**
* Created by Administrator on 2017/3/6 0006.
*/
public class City extends DataSupport {
private int id;
private String cityName;
private int cityCode;
private int provinceId;
public int getCityCode() {
return cityCode;
}
public void setCityCode(int cityCode) {
this.cityCode = cityCode;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getCityName() {
return cityName;
}
public void setCityName(String cityName) {
this.cityName = cityName;
}
public int getProvinceId() {
return provinceId;
}
public void setProvinceId(int provinceId) {
this.provinceId = provinceId;
}
}
| apache-2.0 |
compomics/compomics-utilities | src/main/java/com/compomics/util/experiment/identification/TagFactory.java | 1243 | package com.compomics.util.experiment.identification;
import com.compomics.util.experiment.biology.aminoacids.AminoAcid;
import java.util.ArrayList;
/**
* Convenience class for sequence tag generation.
*
* @author Marc Vaudel
*/
public class TagFactory {
/**
* Empty default constructor
*/
public TagFactory() {
}
/**
* Returns all the amino acid combinations for a given tag length.
*
* @param length the length of the tag
*
* @return all the amino acid combinations
*/
public static ArrayList<String> getAminoAcidCombinations(int length) {
if (length < 0) {
throw new IllegalArgumentException("Sequence length must be a positive number.");
}
if (length == 0) {
return new ArrayList<>();
}
ArrayList<String> tempList, result = new ArrayList<> (AminoAcid.getAminoAcidsList());
for (int i = 1; i < length; i++) {
tempList = new ArrayList<>();
for (String tag : result) {
for (char aa : AminoAcid.getAminoAcids()) {
tempList.add(tag + aa);
}
}
result = tempList;
}
return result;
}
}
| apache-2.0 |
McLeodMoores/starling | projects/analytics/src/main/java/com/opengamma/analytics/math/interpolation/Interpolator1D.java | 7047 | /**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*
* Modified by McLeod Moores Software Limited.
*
* Copyright (C) 2015-Present McLeod Moores Software Limited. All rights reserved.
*/
package com.opengamma.analytics.math.interpolation;
import java.io.Serializable;
import java.util.Map;
import java.util.SortedMap;
import org.apache.commons.lang.ArrayUtils;
import com.opengamma.analytics.math.interpolation.data.Interpolator1DDataBundle;
import com.opengamma.util.ArgumentChecker;
/**
* A base class for interpolation in one dimension. This class also calculates the first derivatives of the data and the
* sensitivity of the interpolated value to the y data by finite difference, although sub-classes can implement analytic
* versions of this calculation.
*/
public abstract class Interpolator1D implements Interpolator<Interpolator1DDataBundle, Double>, Serializable {
/** Serialization version */
private static final long serialVersionUID = 1L;
/** The default epsilon used for calculating the gradient via finite difference */
private static final double EPS = 1e-6;
@Override
public abstract Double interpolate(Interpolator1DDataBundle data, Double value);
/**
* Computes the gradient of the interpolant at the value.
* @param data interpolation data, not null
* @param value the value for which the gradient is computed, not null
* @return the gradient
*/
public double firstDerivative(final Interpolator1DDataBundle data, final Double value) {
ArgumentChecker.notNull(data, "data");
ArgumentChecker.notNull(value, "value");
final double vm = value - EPS;
final double vp = value + EPS;
if (vm < data.firstKey()) {
final double up = interpolate(data, value + EPS);
final double mid = interpolate(data, value);
return (up - mid) / EPS;
} else if (vp > data.lastKey()) {
final double down = interpolate(data, vm);
final double mid = interpolate(data, value);
return (mid - down) / EPS;
}
final double up = interpolate(data, value + EPS);
final double down = interpolate(data, vm);
return (up - down) / 2 / EPS;
}
/**
* Computes the sensitivities of the interpolated value to the input data y.
* @param data the interpolation data, not null
* @param value the value for which the interpolation is computed, not null
* @param useFiniteDifferenceSensitivities use finite difference approximation if true
* @return the sensitivities at each node
*/
public double[] getNodeSensitivitiesForValue(final Interpolator1DDataBundle data, final Double value, final boolean useFiniteDifferenceSensitivities) {
return useFiniteDifferenceSensitivities ? getFiniteDifferenceSensitivities(data, value) : getNodeSensitivitiesForValue(data, value);
}
/**
* Computes the sensitivities of the interpolated value to the input data y by using a methodology defined in a respective subclass.
* @param data the interpolation data, not null
* @param value the value for which the interpolation is computed, not null
* @return the sensitivities
*/
public abstract double[] getNodeSensitivitiesForValue(Interpolator1DDataBundle data, Double value);
/**
* Computes the sensitivities of the interpolated value to the input data y by using central finite difference approximation.
* @param data the interpolation data, not null
* @param value the value for which the interpolation is computed, not null
* @return the sensitivities
*/
protected double[] getFiniteDifferenceSensitivities(final Interpolator1DDataBundle data, final Double value) {
ArgumentChecker.notNull(data, "data");
ArgumentChecker.notNull(value, "value");
final double[] x = data.getKeys();
final double[] y = data.getValues();
final int n = x.length;
final double[] result = new double[n];
final Interpolator1DDataBundle dataUp = getDataBundleFromSortedArrays(x, y);
final Interpolator1DDataBundle dataDown = getDataBundleFromSortedArrays(x, y);
for (int i = 0; i < n; i++) {
if (i != 0) {
dataUp.setYValueAtIndex(i - 1, y[i - 1]);
dataDown.setYValueAtIndex(i - 1, y[i - 1]);
}
dataUp.setYValueAtIndex(i, y[i] + EPS);
dataDown.setYValueAtIndex(i, y[i] - EPS);
final double up = interpolate(dataUp, value);
final double down = interpolate(dataDown, value);
result[i] = (up - down) / 2 / EPS;
}
return result;
}
/**
* Construct an {@link Interpolator1DDataBundle} from unsorted arrays. The bundle may contain information such as the derivatives
* at each data point. The x data need not be sorted.
* @param x x values of data, not null
* @param y y values of data, not null
* @return the data bundle
*/
public abstract Interpolator1DDataBundle getDataBundle(double[] x, double[] y);
/**
* Construct an {@link Interpolator1DDataBundle} from sorted arrays, i.e, x[0] < x[1] < x[2]. The bundle may contain information such as the derivatives
* at each data point.
*
* @param x
* x values of data, not null
* @param y
* y values of data, not null
* @return the data bundle
*/
public abstract Interpolator1DDataBundle getDataBundleFromSortedArrays(double[] x, double[] y);
/**
* Constructs an {@link Interpolator1DDataBundle}. The bundle may contain information such as the derivatives
* at each data point. The x data need not be sorted.
* @param data data containing x values and y values, not null
* @return Interpolator1DDataBundle
*/
public Interpolator1DDataBundle getDataBundle(final Map<Double, Double> data) {
ArgumentChecker.notEmpty(data, "data");
if (data instanceof SortedMap) {
final double[] keys = ArrayUtils.toPrimitive(data.keySet().toArray(new Double[data.size()]));
final double[] values = ArrayUtils.toPrimitive(data.values().toArray(new Double[data.size()]));
return getDataBundleFromSortedArrays(keys, values);
}
final double[] keys = new double[data.size()];
final double[] values = new double[data.size()];
int i = 0;
for (final Map.Entry<Double, Double> entry : data.entrySet()) {
keys[i] = entry.getKey();
values[i] = entry.getValue();
i++;
}
return getDataBundle(keys, values);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result;
return result;
}
@Override
public boolean equals(final Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
return true;
}
/**
* @param o the reference class
* @return true if two objects are the same class
*/
protected boolean classEquals(final Object o) {
if (o == null) {
return false;
}
return getClass().equals(o.getClass());
}
}
| apache-2.0 |
topicusonderwijs/pac4j | pac4j-oauth/src/main/java/org/pac4j/oauth/profile/facebook/FacebookProfileDefinition.java | 8962 | package org.pac4j.oauth.profile.facebook;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonNode;
import com.github.scribejava.core.model.OAuth2AccessToken;
import org.pac4j.core.exception.HttpAction;
import org.pac4j.core.exception.TechnicalException;
import org.pac4j.core.profile.converter.Converters;
import org.pac4j.core.profile.converter.DateConverter;
import org.pac4j.core.util.CommonHelper;
import org.pac4j.oauth.client.FacebookClient;
import org.pac4j.oauth.config.OAuth20Configuration;
import org.pac4j.oauth.profile.JsonHelper;
import org.pac4j.oauth.profile.converter.JsonConverter;
import org.pac4j.oauth.profile.definition.OAuth20ProfileDefinition;
import org.pac4j.oauth.profile.facebook.converter.FacebookRelationshipStatusConverter;
import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import java.util.Arrays;
import java.util.List;
/**
* This class is the Facebook profile definition.
*
* @author Jerome Leleu
* @since 1.1.0
*/
public class FacebookProfileDefinition extends OAuth20ProfileDefinition<FacebookProfile> {
public static final String NAME = "name";
public static final String MIDDLE_NAME = "middle_name";
public static final String LAST_NAME = "last_name";
public static final String LANGUAGES = "languages";
public static final String LINK = "link";
public static final String THIRD_PARTY_ID = "third_party_id";
public static final String TIMEZONE = "timezone";
public static final String UPDATED_TIME = "updated_time";
public static final String VERIFIED = "verified";
public static final String ABOUT = "about";
public static final String BIRTHDAY = "birthday";
public static final String EDUCATION = "education";
public static final String HOMETOWN = "hometown";
public static final String INTERESTED_IN = "interested_in";
public static final String POLITICAL = "political";
public static final String FAVORITE_ATHLETES = "favorite_athletes";
public static final String FAVORITE_TEAMS = "favorite_teams";
public static final String QUOTES = "quotes";
public static final String RELATIONSHIP_STATUS = "relationship_status";
public static final String RELIGION = "religion";
public static final String SIGNIFICANT_OTHER = "significant_other";
public static final String WEBSITE = "website";
public static final String WORK = "work";
public static final String FRIENDS = "friends";
public static final String MOVIES = "movies";
public static final String MUSIC = "music";
public static final String BOOKS = "books";
public static final String LIKES = "likes";
public static final String ALBUMS = "albums";
public static final String EVENTS = "events";
public static final String GROUPS = "groups";
public static final String MUSIC_LISTENS = "music.listens";
public static final String PICTURE = "picture";
public static final int DEFAULT_LIMIT = 0;
protected static final String BASE_URL = "https://graph.facebook.com/v2.8/me";
protected static final String APPSECRET_PARAMETER = "appsecret_proof";
public FacebookProfileDefinition() {
super(x -> new FacebookProfile());
Arrays.stream(new String[] {
NAME, MIDDLE_NAME, LAST_NAME, THIRD_PARTY_ID, ABOUT, POLITICAL, QUOTES, RELIGION, WEBSITE
}).forEach(a -> primary(a, Converters.STRING));
primary(TIMEZONE, Converters.INTEGER);
primary(VERIFIED, Converters.BOOLEAN);
primary(LINK, Converters.URL);
final JsonConverter<FacebookObject> objectConverter = new JsonConverter<>(FacebookObject.class);
final JsonConverter multiObjectConverter = new JsonConverter(List.class, new TypeReference<List<FacebookObject>>() {});
final JsonConverter multiInfoConverter = new JsonConverter(List.class, new TypeReference<List<FacebookInfo>>() {});
primary(UPDATED_TIME, Converters.DATE_TZ_GENERAL);
primary(BIRTHDAY, new DateConverter("MM/dd/yyyy"));
primary(RELATIONSHIP_STATUS, new FacebookRelationshipStatusConverter());
primary(LANGUAGES, multiObjectConverter);
primary(EDUCATION, new JsonConverter(List.class, new TypeReference<List<FacebookEducation>>() {}));
primary(HOMETOWN, objectConverter);
primary(INTERESTED_IN, new JsonConverter(List.class, new TypeReference<List<String>>() {}));
primary(LOCATION, objectConverter);
primary(FAVORITE_ATHLETES, multiObjectConverter);
primary(FAVORITE_TEAMS, multiObjectConverter);
primary(SIGNIFICANT_OTHER, objectConverter);
primary(WORK, new JsonConverter(List.class, new TypeReference<List<FacebookWork>>() {}));
secondary(FRIENDS, multiObjectConverter);
secondary(MOVIES, multiInfoConverter);
secondary(MUSIC, multiInfoConverter);
secondary(BOOKS, multiInfoConverter);
secondary(LIKES, multiInfoConverter);
secondary(ALBUMS, new JsonConverter(List.class, new TypeReference<List<FacebookPhoto>>() {}));
secondary(EVENTS, new JsonConverter(List.class, new TypeReference<List<FacebookEvent>>() {}));
secondary(GROUPS, new JsonConverter(List.class, new TypeReference<List<FacebookGroup>>() {}));
secondary(MUSIC_LISTENS, new JsonConverter(List.class, new TypeReference<List<FacebookMusicListen>>() {}));
secondary(PICTURE, new JsonConverter<>(FacebookPicture.class));
}
@Override
public String getProfileUrl(final OAuth2AccessToken accessToken, final OAuth20Configuration configuration) {
final FacebookClient client = (FacebookClient) configuration.getClient();
String url = BASE_URL + "?fields=" + client.getFields();
if (client.getLimit() > DEFAULT_LIMIT) {
url += "&limit=" + client.getLimit();
}
// possibly include the appsecret_proof parameter
if (client.getUseAppSecretProof()) {
url = computeAppSecretProof(url, accessToken, configuration);
}
return url;
}
/**
* The code in this method is based on this blog post:
* https://www.sammyk.me/the-single-most-important-way-to-make-your-facebook-app-more-secure
* and this answer: https://stackoverflow.com/questions/7124735/hmac-sha256-algorithm-for-signature-calculation
*
* @param url the URL to which we're adding the proof
* @param token the application token we pass back and forth
* @param configuration the current configuration
* @return URL with the appsecret_proof parameter added
*/
public String computeAppSecretProof(final String url, final OAuth2AccessToken token, final OAuth20Configuration configuration) {
try {
Mac sha256_HMAC = Mac.getInstance("HmacSHA256");
SecretKeySpec secret_key = new SecretKeySpec(configuration.getSecret().getBytes("UTF-8"), "HmacSHA256");
sha256_HMAC.init(secret_key);
String proof = org.apache.commons.codec.binary.Hex.encodeHexString(sha256_HMAC.doFinal(token.getAccessToken()
.getBytes("UTF-8")));
final String computedUrl = CommonHelper.addParameter(url, APPSECRET_PARAMETER, proof);
return computedUrl;
} catch (final Exception e) {
throw new TechnicalException("Unable to compute appsecret_proof", e);
}
}
@Override
public FacebookProfile extractUserProfile(final String body) throws HttpAction {
final FacebookProfile profile = newProfile();
final JsonNode json = JsonHelper.getFirstNode(body);
if (json != null) {
profile.setId(JsonHelper.getElement(json, "id"));
for (final String attribute : getPrimaryAttributes()) {
convertAndAdd(profile, attribute, JsonHelper.getElement(json, attribute));
}
extractData(profile, json, FacebookProfileDefinition.FRIENDS);
extractData(profile, json, FacebookProfileDefinition.MOVIES);
extractData(profile, json, FacebookProfileDefinition.MUSIC);
extractData(profile, json, FacebookProfileDefinition.BOOKS);
extractData(profile, json, FacebookProfileDefinition.LIKES);
extractData(profile, json, FacebookProfileDefinition.ALBUMS);
extractData(profile, json, FacebookProfileDefinition.EVENTS);
extractData(profile, json, FacebookProfileDefinition.GROUPS);
extractData(profile, json, FacebookProfileDefinition.MUSIC_LISTENS);
extractData(profile, json, FacebookProfileDefinition.PICTURE);
}
return profile;
}
protected void extractData(final FacebookProfile profile, final JsonNode json, final String name) {
final JsonNode data = (JsonNode) JsonHelper.getElement(json, name);
if (data != null) {
convertAndAdd(profile, name, JsonHelper.getElement(data, "data"));
}
}
}
| apache-2.0 |
greghogan/flink | flink-formats/flink-json/src/test/java/org/apache/flink/formats/json/JsonRowDataSerDeSchemaTest.java | 26515 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.formats.json;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.table.data.GenericMapData;
import org.apache.flink.table.data.GenericRowData;
import org.apache.flink.table.data.RowData;
import org.apache.flink.table.data.StringData;
import org.apache.flink.table.data.util.DataFormatConverters;
import org.apache.flink.table.runtime.typeutils.InternalTypeInfo;
import org.apache.flink.table.types.DataType;
import org.apache.flink.table.types.logical.RowType;
import org.apache.flink.types.Row;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ArrayNode;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ObjectNode;
import org.junit.Assert;
import org.junit.Test;
import java.math.BigDecimal;
import java.sql.Timestamp;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.time.ZoneOffset;
import java.util.Arrays;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.ThreadLocalRandom;
import static org.apache.flink.table.api.DataTypes.ARRAY;
import static org.apache.flink.table.api.DataTypes.BIGINT;
import static org.apache.flink.table.api.DataTypes.BOOLEAN;
import static org.apache.flink.table.api.DataTypes.BYTES;
import static org.apache.flink.table.api.DataTypes.DATE;
import static org.apache.flink.table.api.DataTypes.DECIMAL;
import static org.apache.flink.table.api.DataTypes.DOUBLE;
import static org.apache.flink.table.api.DataTypes.FIELD;
import static org.apache.flink.table.api.DataTypes.FLOAT;
import static org.apache.flink.table.api.DataTypes.INT;
import static org.apache.flink.table.api.DataTypes.MAP;
import static org.apache.flink.table.api.DataTypes.MULTISET;
import static org.apache.flink.table.api.DataTypes.ROW;
import static org.apache.flink.table.api.DataTypes.SMALLINT;
import static org.apache.flink.table.api.DataTypes.STRING;
import static org.apache.flink.table.api.DataTypes.TIME;
import static org.apache.flink.table.api.DataTypes.TIMESTAMP;
import static org.apache.flink.table.api.DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE;
import static org.apache.flink.table.api.DataTypes.TINYINT;
import static org.apache.flink.table.types.utils.TypeConversions.fromLogicalToDataType;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
/**
* Tests for {@link JsonRowDataDeserializationSchema} and {@link JsonRowDataSerializationSchema}.
*/
public class JsonRowDataSerDeSchemaTest {
@Test
public void testSerDe() throws Exception {
byte tinyint = 'c';
short smallint = 128;
int intValue = 45536;
float floatValue = 33.333F;
long bigint = 1238123899121L;
String name = "asdlkjasjkdla998y1122";
byte[] bytes = new byte[1024];
ThreadLocalRandom.current().nextBytes(bytes);
BigDecimal decimal = new BigDecimal("123.456789");
Double[] doubles = new Double[]{1.1, 2.2, 3.3};
LocalDate date = LocalDate.parse("1990-10-14");
LocalTime time = LocalTime.parse("12:12:43");
Timestamp timestamp3 = Timestamp.valueOf("1990-10-14 12:12:43.123");
Timestamp timestamp9 = Timestamp.valueOf("1990-10-14 12:12:43.123456789");
Instant timestampWithLocalZone =
LocalDateTime.of(1990, 10, 14, 12, 12, 43, 123456789).
atOffset(ZoneOffset.of("Z")).toInstant();
Map<String, Long> map = new HashMap<>();
map.put("flink", 123L);
Map<String, Integer> multiSet = new HashMap<>();
multiSet.put("blink", 2);
Map<String, Map<String, Integer>> nestedMap = new HashMap<>();
Map<String, Integer> innerMap = new HashMap<>();
innerMap.put("key", 234);
nestedMap.put("inner_map", innerMap);
ObjectMapper objectMapper = new ObjectMapper();
ArrayNode doubleNode = objectMapper.createArrayNode().add(1.1D).add(2.2D).add(3.3D);
// Root
ObjectNode root = objectMapper.createObjectNode();
root.put("bool", true);
root.put("tinyint", tinyint);
root.put("smallint", smallint);
root.put("int", intValue);
root.put("bigint", bigint);
root.put("float", floatValue);
root.put("name", name);
root.put("bytes", bytes);
root.put("decimal", decimal);
root.set("doubles", doubleNode);
root.put("date", "1990-10-14");
root.put("time", "12:12:43");
root.put("timestamp3", "1990-10-14T12:12:43.123");
root.put("timestamp9", "1990-10-14T12:12:43.123456789");
root.put("timestampWithLocalZone", "1990-10-14T12:12:43.123456789Z");
root.putObject("map").put("flink", 123);
root.putObject("multiSet").put("blink", 2);
root.putObject("map2map").putObject("inner_map").put("key", 234);
byte[] serializedJson = objectMapper.writeValueAsBytes(root);
DataType dataType = ROW(
FIELD("bool", BOOLEAN()),
FIELD("tinyint", TINYINT()),
FIELD("smallint", SMALLINT()),
FIELD("int", INT()),
FIELD("bigint", BIGINT()),
FIELD("float", FLOAT()),
FIELD("name", STRING()),
FIELD("bytes", BYTES()),
FIELD("decimal", DECIMAL(9, 6)),
FIELD("doubles", ARRAY(DOUBLE())),
FIELD("date", DATE()),
FIELD("time", TIME(0)),
FIELD("timestamp3", TIMESTAMP(3)),
FIELD("timestamp9", TIMESTAMP(9)),
FIELD("timestampWithLocalZone", TIMESTAMP_WITH_LOCAL_TIME_ZONE(9)),
FIELD("map", MAP(STRING(), BIGINT())),
FIELD("multiSet", MULTISET(STRING())),
FIELD("map2map", MAP(STRING(), MAP(STRING(), INT()))));
RowType schema = (RowType) dataType.getLogicalType();
TypeInformation<RowData> resultTypeInfo = InternalTypeInfo.of(schema);
JsonRowDataDeserializationSchema deserializationSchema = new JsonRowDataDeserializationSchema(
schema, resultTypeInfo, false, false, TimestampFormat.ISO_8601);
Row expected = new Row(18);
expected.setField(0, true);
expected.setField(1, tinyint);
expected.setField(2, smallint);
expected.setField(3, intValue);
expected.setField(4, bigint);
expected.setField(5, floatValue);
expected.setField(6, name);
expected.setField(7, bytes);
expected.setField(8, decimal);
expected.setField(9, doubles);
expected.setField(10, date);
expected.setField(11, time);
expected.setField(12, timestamp3.toLocalDateTime());
expected.setField(13, timestamp9.toLocalDateTime());
expected.setField(14, timestampWithLocalZone);
expected.setField(15, map);
expected.setField(16, multiSet);
expected.setField(17, nestedMap);
RowData rowData = deserializationSchema.deserialize(serializedJson);
Row actual = convertToExternal(rowData, dataType);
assertEquals(expected, actual);
// test serialization
JsonRowDataSerializationSchema serializationSchema =
new JsonRowDataSerializationSchema(
schema,
TimestampFormat.ISO_8601,
JsonOptions.MapNullKeyMode.LITERAL,
"null");
byte[] actualBytes = serializationSchema.serialize(rowData);
assertEquals(new String(serializedJson), new String(actualBytes));
}
/**
* Tests the deserialization slow path,
* e.g. convert into string and use {@link Double#parseDouble(String)}.
*/
@Test
public void testSlowDeserialization() throws Exception {
Random random = new Random();
boolean bool = random.nextBoolean();
int integer = random.nextInt();
long bigint = random.nextLong();
double doubleValue = random.nextDouble();
float floatValue = random.nextFloat();
ObjectMapper objectMapper = new ObjectMapper();
ObjectNode root = objectMapper.createObjectNode();
root.put("bool", String.valueOf(bool));
root.put("int", String.valueOf(integer));
root.put("bigint", String.valueOf(bigint));
root.put("double1", String.valueOf(doubleValue));
root.put("double2", new BigDecimal(doubleValue));
root.put("float1", String.valueOf(floatValue));
root.put("float2", new BigDecimal(floatValue));
byte[] serializedJson = objectMapper.writeValueAsBytes(root);
DataType dataType = ROW(
FIELD("bool", BOOLEAN()),
FIELD("int", INT()),
FIELD("bigint", BIGINT()),
FIELD("double1", DOUBLE()),
FIELD("double2", DOUBLE()),
FIELD("float1", FLOAT()),
FIELD("float2", FLOAT())
);
RowType rowType = (RowType) dataType.getLogicalType();
JsonRowDataDeserializationSchema deserializationSchema = new JsonRowDataDeserializationSchema(
rowType, InternalTypeInfo.of(rowType), false, false, TimestampFormat.ISO_8601);
Row expected = new Row(7);
expected.setField(0, bool);
expected.setField(1, integer);
expected.setField(2, bigint);
expected.setField(3, doubleValue);
expected.setField(4, doubleValue);
expected.setField(5, floatValue);
expected.setField(6, floatValue);
RowData rowData = deserializationSchema.deserialize(serializedJson);
Row actual = convertToExternal(rowData, dataType);
assertEquals(expected, actual);
}
@Test
public void testSerDeMultiRows() throws Exception {
RowType rowType = (RowType) ROW(
FIELD("f1", INT()),
FIELD("f2", BOOLEAN()),
FIELD("f3", STRING()),
FIELD("f4", MAP(STRING(), STRING())),
FIELD("f5", ARRAY(STRING())),
FIELD("f6", ROW(
FIELD("f1", STRING()),
FIELD("f2", INT())))
).getLogicalType();
JsonRowDataDeserializationSchema deserializationSchema = new JsonRowDataDeserializationSchema(
rowType, InternalTypeInfo.of(rowType), false, false, TimestampFormat.ISO_8601);
JsonRowDataSerializationSchema serializationSchema =
new JsonRowDataSerializationSchema(
rowType, TimestampFormat.ISO_8601, JsonOptions.MapNullKeyMode.LITERAL, "null");
ObjectMapper objectMapper = new ObjectMapper();
// the first row
{
ObjectNode root = objectMapper.createObjectNode();
root.put("f1", 1);
root.put("f2", true);
root.put("f3", "str");
ObjectNode map = root.putObject("f4");
map.put("hello1", "flink");
ArrayNode array = root.putArray("f5");
array.add("element1");
array.add("element2");
ObjectNode row = root.putObject("f6");
row.put("f1", "this is row1");
row.put("f2", 12);
byte[] serializedJson = objectMapper.writeValueAsBytes(root);
RowData rowData = deserializationSchema.deserialize(serializedJson);
byte[] actual = serializationSchema.serialize(rowData);
assertEquals(new String(serializedJson), new String(actual));
}
// the second row
{
ObjectNode root = objectMapper.createObjectNode();
root.put("f1", 10);
root.put("f2", false);
root.put("f3", "newStr");
ObjectNode map = root.putObject("f4");
map.put("hello2", "json");
ArrayNode array = root.putArray("f5");
array.add("element3");
array.add("element4");
ObjectNode row = root.putObject("f6");
row.put("f1", "this is row2");
row.putNull("f2");
byte[] serializedJson = objectMapper.writeValueAsBytes(root);
RowData rowData = deserializationSchema.deserialize(serializedJson);
byte[] actual = serializationSchema.serialize(rowData);
assertEquals(new String(serializedJson), new String(actual));
}
}
@Test
public void testSerDeMultiRowsWithNullValues() throws Exception {
String[] jsons = new String[] {
"{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"metrics\":{\"k1\":10.01,\"k2\":\"invalid\"}}",
"{\"svt\":\"2020-02-24T12:58:09.209+0800\", \"ops\":{\"id\":\"281708d0-4092-4c21-9233-931950b6eccf\"}, " +
"\"ids\":[1, 2, 3]}",
"{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"metrics\":{}}",
};
String[] expected = new String[] {
"{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"ops\":null,\"ids\":null,\"metrics\":{\"k1\":10.01,\"k2\":null}}",
"{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"ops\":{\"id\":\"281708d0-4092-4c21-9233-931950b6eccf\"}," +
"\"ids\":[1,2,3],\"metrics\":null}",
"{\"svt\":\"2020-02-24T12:58:09.209+0800\",\"ops\":null,\"ids\":null,\"metrics\":{}}",
};
RowType rowType = (RowType) ROW(
FIELD("svt", STRING()),
FIELD("ops", ROW(FIELD("id", STRING()))),
FIELD("ids", ARRAY(INT())),
FIELD("metrics", MAP(STRING(), DOUBLE()))
).getLogicalType();
JsonRowDataDeserializationSchema deserializationSchema = new JsonRowDataDeserializationSchema(
rowType, InternalTypeInfo.of(rowType), false, true, TimestampFormat.ISO_8601);
JsonRowDataSerializationSchema serializationSchema =
new JsonRowDataSerializationSchema(
rowType, TimestampFormat.ISO_8601, JsonOptions.MapNullKeyMode.LITERAL, "null");
for (int i = 0; i < jsons.length; i++) {
String json = jsons[i];
RowData row = deserializationSchema.deserialize(json.getBytes());
String result = new String(serializationSchema.serialize(row));
assertEquals(expected[i], result);
}
}
@Test
public void testDeserializationMissingNode() throws Exception {
ObjectMapper objectMapper = new ObjectMapper();
// Root
ObjectNode root = objectMapper.createObjectNode();
root.put("id", 123123123);
byte[] serializedJson = objectMapper.writeValueAsBytes(root);
DataType dataType = ROW(FIELD("name", STRING()));
RowType schema = (RowType) dataType.getLogicalType();
// pass on missing field
JsonRowDataDeserializationSchema deserializationSchema = new JsonRowDataDeserializationSchema(
schema, InternalTypeInfo.of(schema), false, false, TimestampFormat.ISO_8601);
Row expected = new Row(1);
Row actual = convertToExternal(deserializationSchema.deserialize(serializedJson), dataType);
assertEquals(expected, actual);
// fail on missing field
deserializationSchema = deserializationSchema = new JsonRowDataDeserializationSchema(
schema, InternalTypeInfo.of(schema), true, false, TimestampFormat.ISO_8601);
String errorMessage = "Failed to deserialize JSON '{\"id\":123123123}'.";
try {
deserializationSchema.deserialize(serializedJson);
fail("expecting exception message: " + errorMessage);
} catch (Throwable t) {
assertEquals(errorMessage, t.getMessage());
}
// ignore on parse error
deserializationSchema = new JsonRowDataDeserializationSchema(
schema, InternalTypeInfo.of(schema), false, true, TimestampFormat.ISO_8601);
actual = convertToExternal(deserializationSchema.deserialize(serializedJson), dataType);
assertEquals(expected, actual);
errorMessage = "JSON format doesn't support failOnMissingField and ignoreParseErrors are both enabled.";
try {
// failOnMissingField and ignoreParseErrors both enabled
new JsonRowDataDeserializationSchema(
schema, InternalTypeInfo.of(schema), true, true, TimestampFormat.ISO_8601);
Assert.fail("expecting exception message: " + errorMessage);
} catch (Throwable t) {
assertEquals(errorMessage, t.getMessage());
}
}
@Test
public void testSerDeSQLTimestampFormat() throws Exception {
RowType rowType = (RowType) ROW(
FIELD("timestamp3", TIMESTAMP(3)),
FIELD("timestamp9", TIMESTAMP(9)),
FIELD("timestamp_with_local_timezone3", TIMESTAMP_WITH_LOCAL_TIME_ZONE(3)),
FIELD("timestamp_with_local_timezone9", TIMESTAMP_WITH_LOCAL_TIME_ZONE(9))
).getLogicalType();
JsonRowDataDeserializationSchema deserializationSchema = new JsonRowDataDeserializationSchema(
rowType, InternalTypeInfo.of(rowType), false, false, TimestampFormat.SQL);
JsonRowDataSerializationSchema serializationSchema =
new JsonRowDataSerializationSchema(
rowType, TimestampFormat.SQL, JsonOptions.MapNullKeyMode.LITERAL, "null");
ObjectMapper objectMapper = new ObjectMapper();
ObjectNode root = objectMapper.createObjectNode();
root.put("timestamp3", "1990-10-14 12:12:43.123");
root.put("timestamp9", "1990-10-14 12:12:43.123456789");
root.put("timestamp_with_local_timezone3", "1990-10-14 12:12:43.123Z");
root.put("timestamp_with_local_timezone9", "1990-10-14 12:12:43.123456789Z");
byte[] serializedJson = objectMapper.writeValueAsBytes(root);
RowData rowData = deserializationSchema.deserialize(serializedJson);
byte[] actual = serializationSchema.serialize(rowData);
assertEquals(new String(serializedJson), new String(actual));
}
@Test
public void testSerializationMapNullKey() throws Exception {
RowType rowType = (RowType) ROW(
FIELD("nestedMap", MAP(STRING(), MAP(STRING(), INT())))
).getLogicalType();
// test data
// use LinkedHashMap to make sure entries order
Map<StringData, Integer> map = new LinkedHashMap<>();
map.put(StringData.fromString("no-null key"), 1);
map.put(StringData.fromString(null), 2);
GenericMapData mapData = new GenericMapData(map);
Map<StringData, GenericMapData> nestedMap = new LinkedHashMap<>();
nestedMap.put(StringData.fromString("no-null key"), mapData);
nestedMap.put(StringData.fromString(null), mapData);
GenericMapData nestedMapData = new GenericMapData(nestedMap);
GenericRowData rowData = new GenericRowData(1);
rowData.setField(0, nestedMapData);
JsonRowDataSerializationSchema serializationSchema1 =
new JsonRowDataSerializationSchema(
rowType, TimestampFormat.SQL, JsonOptions.MapNullKeyMode.FAIL, "null");
// expect message for serializationSchema1
String errorMessage1 = "JSON format doesn't support to serialize map data with null keys."
+ " You can drop null key entries or encode null in literals by specifying map-null-key.mode option.";
JsonRowDataSerializationSchema serializationSchema2 =
new JsonRowDataSerializationSchema(
rowType, TimestampFormat.SQL, JsonOptions.MapNullKeyMode.DROP, "null");
// expect result for serializationSchema2
String expectResult2 = "{\"nestedMap\":{\"no-null key\":{\"no-null key\":1}}}";
JsonRowDataSerializationSchema serializationSchema3 =
new JsonRowDataSerializationSchema(
rowType, TimestampFormat.SQL, JsonOptions.MapNullKeyMode.LITERAL, "nullKey");
// expect result for serializationSchema3
String expectResult3 =
"{\"nestedMap\":{\"no-null key\":{\"no-null key\":1,\"nullKey\":2},\"nullKey\":{\"no-null key\":1,\"nullKey\":2}}}";
try {
// throw exception when mapNullKey Mode is fail
serializationSchema1.serialize(rowData);
Assert.fail("expecting exception message: " + errorMessage1);
} catch (Throwable t) {
assertEquals(errorMessage1, t.getCause().getMessage());
}
// mapNullKey Mode is drop
byte[] actual2 = serializationSchema2.serialize(rowData);
assertEquals(expectResult2, new String(actual2));
// mapNullKey Mode is literal
byte[] actual3 = serializationSchema3.serialize(rowData);
assertEquals(expectResult3, new String(actual3));
}
@Test
public void testJsonParse() throws Exception {
for (TestSpec spec : testData) {
testIgnoreParseErrors(spec);
if (spec.errorMessage != null) {
testParseErrors(spec);
}
}
}
private void testIgnoreParseErrors(TestSpec spec) throws Exception {
// the parsing field should be null and no exception is thrown
JsonRowDataDeserializationSchema ignoreErrorsSchema = new JsonRowDataDeserializationSchema(
spec.rowType, InternalTypeInfo.of(spec.rowType), false, true,
spec.timestampFormat);
Row expected;
if (spec.expected != null) {
expected = spec.expected;
} else {
expected = new Row(1);
}
RowData rowData = ignoreErrorsSchema.deserialize(spec.json.getBytes());
Row actual = convertToExternal(rowData, fromLogicalToDataType(spec.rowType));
assertEquals("Test Ignore Parse Error: " + spec.json,
expected,
actual);
}
private void testParseErrors(TestSpec spec) throws Exception {
// expect exception if parse error is not ignored
JsonRowDataDeserializationSchema failingSchema = new JsonRowDataDeserializationSchema(
spec.rowType, InternalTypeInfo.of(spec.rowType), false, false,
spec.timestampFormat);
try {
failingSchema.deserialize(spec.json.getBytes());
fail("expecting exception " + spec.errorMessage);
} catch (Throwable t) {
assertEquals(t.getMessage(), spec.errorMessage);
}
}
private static List<TestSpec> testData = Arrays.asList(
TestSpec
.json("{\"id\": \"trueA\"}")
.rowType(ROW(FIELD("id", BOOLEAN())))
.expect(Row.of(false)),
TestSpec
.json("{\"id\": true}")
.rowType(ROW(FIELD("id", BOOLEAN())))
.expect(Row.of(true)),
TestSpec
.json("{\"id\":\"abc\"}")
.rowType(ROW(FIELD("id", INT())))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"abc\"}'."),
TestSpec
.json("{\"id\":112.013}")
.rowType(ROW(FIELD("id", BIGINT())))
.expect(Row.of(112L)),
TestSpec
.json("{\"id\":\"long\"}")
.rowType(ROW(FIELD("id", BIGINT())))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"long\"}'."),
TestSpec
.json("{\"id\":\"112.013.123\"}")
.rowType(ROW(FIELD("id", FLOAT())))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"112.013.123\"}'."),
TestSpec
.json("{\"id\":\"112.013.123\"}")
.rowType(ROW(FIELD("id", DOUBLE())))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"112.013.123\"}'."),
TestSpec
.json("{\"id\":\"18:00:243\"}")
.rowType(ROW(FIELD("id", TIME())))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"18:00:243\"}'."),
TestSpec
.json("{\"id\":\"18:00:243\"}")
.rowType(ROW(FIELD("id", TIME())))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"18:00:243\"}'."),
TestSpec
.json("{\"id\":\"20191112\"}")
.rowType(ROW(FIELD("id", DATE())))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"20191112\"}'."),
TestSpec
.json("{\"id\":\"20191112\"}")
.rowType(ROW(FIELD("id", DATE())))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"20191112\"}'."),
TestSpec
.json("{\"id\":true}")
.rowType(ROW(FIELD("id", STRING())))
.expect(Row.of("true")),
TestSpec
.json("{\"id\":123.234}")
.rowType(ROW(FIELD("id", STRING())))
.expect(Row.of("123.234")),
TestSpec
.json("{\"id\":1234567}")
.rowType(ROW(FIELD("id", STRING())))
.expect(Row.of("1234567")),
TestSpec
.json("{\"id\":\"string field\"}")
.rowType(ROW(FIELD("id", STRING())))
.expect(Row.of("string field")),
TestSpec
.json("{\"id\":[\"array data1\",\"array data2\",123,234.345]}")
.rowType(ROW(FIELD("id", STRING())))
.expect(Row.of("[\"array data1\",\"array data2\",123,234.345]")),
TestSpec
.json("{\"id\":{\"k1\":123,\"k2\":234.234,\"k3\":\"string data\"}}")
.rowType(ROW(FIELD("id", STRING())))
.expect(Row.of("{\"k1\":123,\"k2\":234.234,\"k3\":\"string data\"}")),
TestSpec
.json("{\"id\":\"2019-11-12 18:00:12\"}")
.rowType(ROW(FIELD("id", TIMESTAMP(0))))
.timestampFormat(TimestampFormat.ISO_8601)
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"2019-11-12 18:00:12\"}'."),
TestSpec
.json("{\"id\":\"2019-11-12T18:00:12\"}")
.rowType(ROW(FIELD("id", TIMESTAMP(0))))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"2019-11-12T18:00:12\"}'."),
TestSpec
.json("{\"id\":\"2019-11-12T18:00:12Z\"}")
.rowType(ROW(FIELD("id", TIMESTAMP(0))))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"2019-11-12T18:00:12Z\"}'."),
TestSpec
.json("{\"id\":\"2019-11-12T18:00:12Z\"}")
.rowType(ROW(FIELD("id", TIMESTAMP(0))))
.timestampFormat(TimestampFormat.ISO_8601)
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"2019-11-12T18:00:12Z\"}'."),
TestSpec
.json("{\"id\":\"abc\"}")
.rowType(ROW(FIELD("id", DECIMAL(10, 3))))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"abc\"}'."),
TestSpec
.json("{\"row\":{\"id\":\"abc\"}}")
.rowType(ROW(FIELD("row", ROW(FIELD("id", BOOLEAN())))))
.expect(Row.of(Row.of(false))),
TestSpec
.json("{\"array\":[123, \"abc\"]}")
.rowType(ROW(FIELD("array", ARRAY(INT()))))
.expect(Row.of((Object) new Integer[]{123, null}))
.expectErrorMessage("Failed to deserialize JSON '{\"array\":[123, \"abc\"]}'."),
TestSpec
.json("{\"map\":{\"key1\":\"123\", \"key2\":\"abc\"}}")
.rowType(ROW(FIELD("map", MAP(STRING(), INT()))))
.expect(Row.of(createHashMap("key1", 123, "key2", null)))
.expectErrorMessage("Failed to deserialize JSON '{\"map\":{\"key1\":\"123\", \"key2\":\"abc\"}}'."),
TestSpec
.json("{\"id\":\"2019-11-12T18:00:12\"}")
.rowType(ROW(FIELD("id", TIMESTAMP_WITH_LOCAL_TIME_ZONE(0))))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"2019-11-12T18:00:12\"}'."),
TestSpec
.json("{\"id\":\"2019-11-12T18:00:12+0800\"}")
.rowType(ROW(FIELD("id", TIMESTAMP_WITH_LOCAL_TIME_ZONE(0))))
.expectErrorMessage("Failed to deserialize JSON '{\"id\":\"2019-11-12T18:00:12+0800\"}'."),
TestSpec
.json("{\"id\":1,\"factor\":799.929496989092949698}")
.rowType(ROW(FIELD("id", INT()), FIELD("factor", DECIMAL(38, 18))))
.expect(Row.of(1, new BigDecimal("799.929496989092949698")))
);
private static Map<String, Integer> createHashMap(String k1, Integer v1, String k2, Integer v2) {
Map<String, Integer> map = new HashMap<>();
map.put(k1, v1);
map.put(k2, v2);
return map;
}
@SuppressWarnings("unchecked")
private static Row convertToExternal(RowData rowData, DataType dataType) {
return (Row) DataFormatConverters.getConverterForDataType(dataType).toExternal(rowData);
}
private static class TestSpec {
private final String json;
private RowType rowType;
private TimestampFormat timestampFormat = TimestampFormat.SQL;
private Row expected;
private String errorMessage;
private TestSpec(String json) {
this.json = json;
}
public static TestSpec json(String json) {
return new TestSpec(json);
}
TestSpec expect(Row row) {
this.expected = row;
return this;
}
TestSpec rowType(DataType rowType) {
this.rowType = (RowType) rowType.getLogicalType();
return this;
}
TestSpec expectErrorMessage(String errorMessage) {
this.errorMessage = errorMessage;
return this;
}
TestSpec timestampFormat(TimestampFormat timestampFormat){
this.timestampFormat = timestampFormat;
return this;
}
}
}
| apache-2.0 |
ywqian/project | src/com/xlink/linkwil/sdk/DeviceInfo.java | 345 | package com.xlink.linkwil.sdk;
public class DeviceInfo {
public String devName;
public String devId;
public int year;
public int mon;
public int day;
public int zone;
public int hour;
public int min;
public int sec;
public String firmwareVer;
public int oemFlag;
public int versionCode;
public String buildDate;
}
| apache-2.0 |
spring-projects/spring-data-examples | jpa/deferred/src/main/java/example/repo/Customer520Repository.java | 280 | package example.repo;
import example.model.Customer520;
import java.util.List;
import org.springframework.data.repository.CrudRepository;
public interface Customer520Repository extends CrudRepository<Customer520, Long> {
List<Customer520> findByLastName(String lastName);
}
| apache-2.0 |
mjuhasz/BDSup2Sub | src/main/java/bdsup2sub/core/InputMode.java | 874 | /*
* Copyright 2014 Miklos Juhasz (mjuhasz)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package bdsup2sub.core;
public enum InputMode {
/** DVD SUB/IDX (VobSub) stream */
VOBSUB,
/** Blu-Ray SUP stream */
BDSUP,
/** Sony BDN XML (+PNGs) */
XML,
/** HD-DVD SUP stream */
HDDVDSUP,
/** DVD SUP/IFO stream */
SUPIFO
}
| apache-2.0 |
awajid/daytrader | modules/web/src/main/java/org/apache/geronimo/samples/daytrader/web/TradeAppServlet.java | 8580 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geronimo.samples.daytrader.web;
import javax.servlet.*;
import javax.servlet.http.*;
import org.apache.geronimo.samples.daytrader.core.direct.*;
import org.apache.geronimo.samples.daytrader.util.*;
import java.io.IOException;
/**
*
* TradeAppServlet provides the standard web interface to Trade and can be
* accessed with the Go Trade! link. Driving benchmark load using this interface
* requires a sophisticated web load generator that is capable of filling HTML
* forms and posting dynamic data.
*/
public class TradeAppServlet extends HttpServlet {
/**
* Servlet initialization method.
*/
public void init(ServletConfig config) throws ServletException {
super.init(config);
java.util.Enumeration en = config.getInitParameterNames();
while (en.hasMoreElements()) {
String parm = (String) en.nextElement();
String value = config.getInitParameter(parm);
TradeConfig.setConfigParam(parm, value);
}
try {
if (TradeConfig.runTimeMode == TradeConfig.JDBC) {
TradeJDBCDirect.init();
} else if (TradeConfig.runTimeMode == TradeConfig.JPA) {
TradeJPADirect.init();
} else {
TradeJEEDirect.init();
}
} catch (Exception e) {
Log.error(e, "TradeAppServlet:init -- Error initializing TradeDirect");
}
}
/**
* Returns a string that contains information about TradeScenarioServlet
*
* @return The servlet information
*/
public java.lang.String getServletInfo() {
return "TradeAppServlet provides the standard web interface to Trade";
}
/**
* Process incoming HTTP GET requests
*
* @param request
* Object that encapsulates the request to the servlet
* @param response
* Object that encapsulates the response from the servlet
*/
public void doGet(javax.servlet.http.HttpServletRequest request,
javax.servlet.http.HttpServletResponse response)
throws ServletException, IOException {
performTask(request, response);
}
/**
* Process incoming HTTP POST requests
*
* @param request
* Object that encapsulates the request to the servlet
* @param response
* Object that encapsulates the response from the servlet
*/
public void doPost(javax.servlet.http.HttpServletRequest request,
javax.servlet.http.HttpServletResponse response)
throws ServletException, IOException {
performTask(request, response);
}
/**
* Main service method for TradeAppServlet
*
* @param request
* Object that encapsulates the request to the servlet
* @param response
* Object that encapsulates the response from the servlet
*/
public void performTask(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
String action = null;
String userID = null;
// String to create full dispatch path to TradeAppServlet w/ request
// Parameters
String dispPath = null; // Dispatch Path to TradeAppServlet
resp.setContentType("text/html");
TradeServletAction tsAction = new TradeServletAction();
// Dyna - need status string - prepended to output
action = req.getParameter("action");
ServletContext ctx = getServletConfig().getServletContext();
if (action == null) {
tsAction.doWelcome(ctx, req, resp, "");
return;
} else if (action.equals("login")) {
userID = req.getParameter("uid");
String passwd = req.getParameter("passwd");
String inScenario = req.getParameter("inScenario");
try {
tsAction.doLogin(ctx, req, resp, userID, passwd);
} catch (ServletException se) {
tsAction.doWelcome(ctx, req, resp, se.getMessage());
}
return;
} else if (action.equals("register")) {
userID = req.getParameter("user id");
String passwd = req.getParameter("passwd");
String cpasswd = req.getParameter("confirm passwd");
String fullname = req.getParameter("Full Name");
String ccn = req.getParameter("Credit Card Number");
String money = req.getParameter("money");
String email = req.getParameter("email");
String smail = req.getParameter("snail mail");
tsAction.doRegister(ctx, req, resp, userID, passwd, cpasswd,
fullname, ccn, money, email, smail);
return;
}
// The rest of the operations require the user to be logged in -
// Get the Session and validate the user.
HttpSession session = req.getSession();
userID = (String) session.getAttribute("uidBean");
if (userID == null) {
System.out
.println("TradeAppServlet service error: User Not Logged in");
tsAction.doWelcome(ctx, req, resp, "User Not Logged in");
return;
}
if (action.equals("quotes")) {
String symbols = req.getParameter("symbols");
tsAction.doQuotes(ctx, req, resp, userID, symbols);
} else if (action.equals("buy")) {
String symbol = req.getParameter("symbol");
String quantity = req.getParameter("quantity");
tsAction.doBuy(ctx, req, resp, userID, symbol, quantity);
} else if (action.equals("sell")) {
int holdingID = Integer.parseInt(req.getParameter("holdingID"));
tsAction.doSell(ctx, req, resp, userID, new Integer(holdingID));
} else if (action.equals("portfolio")
|| action.equals("portfolioNoEdge")) {
tsAction.doPortfolio(ctx, req, resp, userID, "Portfolio as of "
+ new java.util.Date());
} else if (action.equals("logout")) {
tsAction.doLogout(ctx, req, resp, userID);
} else if (action.equals("home")) {
tsAction.doHome(ctx, req, resp, userID, "Ready to Trade");
} else if (action.equals("account")) {
tsAction.doAccount(ctx, req, resp, userID, "");
} else if (action.equals("update_profile")) {
String password = req.getParameter("password");
String cpassword = req.getParameter("cpassword");
String fullName = req.getParameter("fullname");
String address = req.getParameter("address");
String creditcard = req.getParameter("creditcard");
String email = req.getParameter("email");
tsAction.doAccountUpdate(ctx, req, resp, userID,
password == null ? "" : password.trim(),
cpassword == null ? "" : cpassword.trim(),
fullName == null ? "" : fullName.trim(),
address == null ? "" : address.trim(),
creditcard == null ? "" : creditcard.trim(),
email == null ? "" : email.trim());
} else {
System.out.println("TradeAppServlet: Invalid Action=" + action);
tsAction.doWelcome(ctx, req, resp,
"TradeAppServlet: Invalid Action" + action);
}
}
private void sendRedirect(HttpServletResponse resp, String page)
throws ServletException, IOException {
resp.sendRedirect(resp.encodeRedirectURL(page));
}
// URL Path Prefix for dispatching to TradeAppServlet
private final static String tasPathPrefix = "/app?action=";
} | apache-2.0 |
thiagomarques2015/tenebris | src/main/java/view/RecommendationByProfileServlet.java | 1787 | package view;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import control.RequestManager;
import control.recomendation.rate.RecommendRequest;
import model.RecommendParameters;
/**
* Servlet implementation class RecommendationServlet
* URL : {PATH}/obra/recomendacao/perfil/{PARAMS}
*/
@WebServlet("/obra/recomendacao/perfil")
public class RecommendationByProfileServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
/**
* @see HttpServlet#HttpServlet()
*/
public RecommendationByProfileServlet() {
super();
// TODO Auto-generated constructor stub
}
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// TODO Auto-generated method stub
//response.getWriter().append("Recomendar por avaliacao ");
try {
// Cria o pedido recomendacao
RecommendParameters recommend = RecommendRequest.getInstance().create(request);
// Executa a recomendacao
RequestManager.getInstance()
.action(RequestManager.RECOMENDAR_POR_PERFIL)
.setResponse(response)
.setRecommend(recommend)
.execute();
} catch (Exception e) {
e.printStackTrace();
}
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// TODO Auto-generated method stub
doGet(request, response);
}
}
| apache-2.0 |
jacksonTod/personalDesignPattern | DesignPattern/src/com/hu/structure/adapter/demo1/Target.java | 272 | package com.hu.structure.adapter.demo1;
public interface Target {
/**
* 这是源类Adaptee也有的方法
*/
public void sampleOperation1();
/**
* 这是源类Adapteee没有的方法
*/
public void sampleOperation2();
}
| apache-2.0 |
jomof/cdep | cdep/src/main/java/io/cdep/cdep/RewritingVisitor.java | 10433 | /*
* Copyright 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.cdep.cdep;
import static io.cdep.cdep.ast.finder.ExpressionBuilder.abort;
import static io.cdep.cdep.ast.finder.ExpressionBuilder.archive;
import static io.cdep.cdep.ast.finder.ExpressionBuilder.array;
import static io.cdep.cdep.ast.finder.ExpressionBuilder.assign;
import static io.cdep.cdep.ast.finder.ExpressionBuilder.assignmentBlock;
import static io.cdep.cdep.ast.finder.ExpressionBuilder.constant;
import static io.cdep.cdep.ast.finder.ExpressionBuilder.ifSwitch;
import static io.cdep.cdep.ast.finder.ExpressionBuilder.invoke;
import static io.cdep.cdep.ast.finder.ExpressionBuilder.module;
import static io.cdep.cdep.ast.finder.ExpressionBuilder.multi;
import static io.cdep.cdep.ast.finder.ExpressionBuilder.nop;
import io.cdep.annotations.NotNull;
import io.cdep.annotations.Nullable;
import io.cdep.cdep.ast.finder.AbortExpression;
import io.cdep.cdep.ast.finder.ArrayExpression;
import io.cdep.cdep.ast.finder.AssignmentBlockExpression;
import io.cdep.cdep.ast.finder.AssignmentExpression;
import io.cdep.cdep.ast.finder.AssignmentReferenceExpression;
import io.cdep.cdep.ast.finder.ConstantExpression;
import io.cdep.cdep.ast.finder.ExampleExpression;
import io.cdep.cdep.ast.finder.Expression;
import io.cdep.cdep.ast.finder.ExternalFunctionExpression;
import io.cdep.cdep.ast.finder.FindModuleExpression;
import io.cdep.cdep.ast.finder.FunctionTableExpression;
import io.cdep.cdep.ast.finder.GlobalBuildEnvironmentExpression;
import io.cdep.cdep.ast.finder.IfSwitchExpression;
import io.cdep.cdep.ast.finder.InvokeFunctionExpression;
import io.cdep.cdep.ast.finder.ModuleArchiveExpression;
import io.cdep.cdep.ast.finder.ModuleExpression;
import io.cdep.cdep.ast.finder.MultiStatementExpression;
import io.cdep.cdep.ast.finder.NopExpression;
import io.cdep.cdep.ast.finder.ParameterExpression;
import io.cdep.cdep.ast.finder.StatementExpression;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@SuppressWarnings("unused")
public class RewritingVisitor {
private final Map<Expression, Expression> identity = new HashMap<>();
@NotNull
public Expression visit(@NotNull Expression expr) {
Expression prior = identity.get(expr);
if (prior != null) {
return prior;
}
identity.put(expr, visitNoIdentity(expr));
return visit(expr);
}
@Nullable
private Expression visitMaybeNull(@Nullable Expression expr) {
if (expr == null) {
return null;
}
return this.visit(expr);
}
@Nullable
private Expression visitNoIdentity(@NotNull Expression expr) {
if (expr.getClass().equals(FunctionTableExpression.class)) {
return visitFunctionTableExpression((FunctionTableExpression) expr);
}
if (expr.getClass().equals(FindModuleExpression.class)) {
return visitFindModuleExpression((FindModuleExpression) expr);
}
if (expr.getClass().equals(ParameterExpression.class)) {
return visitParameterExpression((ParameterExpression) expr);
}
if (expr.getClass().equals(IfSwitchExpression.class)) {
return visitIfSwitchExpression((IfSwitchExpression) expr);
}
if (expr.getClass().equals(ConstantExpression.class)) {
return visitConstantExpression((ConstantExpression) expr);
}
if (expr.getClass().equals(AssignmentExpression.class)) {
return visitAssignmentExpression((AssignmentExpression) expr);
}
if (expr.getClass().equals(InvokeFunctionExpression.class)) {
return visitInvokeFunctionExpression((InvokeFunctionExpression) expr);
}
if (expr.getClass().equals(ModuleExpression.class)) {
return visitModuleExpression((ModuleExpression) expr);
}
if (expr.getClass().equals(AbortExpression.class)) {
return visitAbortExpression((AbortExpression) expr);
}
if (expr.getClass().equals(ExampleExpression.class)) {
return visitExampleExpression((ExampleExpression) expr);
}
if (expr.getClass().equals(ExternalFunctionExpression.class)) {
return visitExternalFunctionExpression((ExternalFunctionExpression) expr);
}
if (expr.getClass().equals(ArrayExpression.class)) {
return visitArrayExpression((ArrayExpression) expr);
}
if (expr.getClass().equals(ModuleArchiveExpression.class)) {
return visitModuleArchiveExpression((ModuleArchiveExpression) expr);
}
if (expr.getClass().equals(AssignmentBlockExpression.class)) {
return visitAssignmentBlockExpression((AssignmentBlockExpression) expr);
}
if (expr.getClass().equals(AssignmentReferenceExpression.class)) {
return visitAssignmentReferenceExpression((AssignmentReferenceExpression) expr);
}
if (expr.getClass().equals(MultiStatementExpression.class)) {
return visitMultiStatementExpression((MultiStatementExpression) expr);
}
if (expr.getClass().equals(NopExpression.class)) {
return visitNopExpression((NopExpression) expr);
}
if (expr.getClass().equals(GlobalBuildEnvironmentExpression.class)) {
return visitGlobalBuildEnvironmentExpression((GlobalBuildEnvironmentExpression) expr);
}
throw new RuntimeException("rw" + expr.getClass().toString());
}
protected Expression visitGlobalBuildEnvironmentExpression(GlobalBuildEnvironmentExpression expr) {
return expr;
}
private Expression visitAssignmentReferenceExpression(AssignmentReferenceExpression expr) {
return expr;
}
@NotNull
private Expression visitAssignmentBlockExpression(@NotNull AssignmentBlockExpression expr) {
return assignmentBlock(visitList(expr.assignments), (StatementExpression) visit(expr.statement));
}
@NotNull
private List<AssignmentExpression> visitList(@NotNull List<AssignmentExpression> assignments) {
List<AssignmentExpression> result = new ArrayList<>();
for (AssignmentExpression assignment : assignments) {
result.add((AssignmentExpression) visit(assignment));
}
return result;
}
@NotNull
private Expression visitArrayExpression(@NotNull ArrayExpression expr) {
return array(visitArray(expr.elements));
}
private Expression visitExternalFunctionExpression(ExternalFunctionExpression expr) {
// Don't rewrite since identity is used for lookup.
return expr;
}
@NotNull
private Expression visitExampleExpression(@NotNull ExampleExpression expr) {
return new ExampleExpression(expr.sourceCode);
}
@NotNull
private Expression visitAbortExpression(@NotNull AbortExpression expr) {
return abort(expr.message, visitArray(expr.parameters));
}
protected Expression visitModuleExpression(@NotNull ModuleExpression expr) {
return module((ModuleArchiveExpression) visit(expr.archive), expr.dependencies);
}
@NotNull
private Expression visitModuleArchiveExpression(@NotNull ModuleArchiveExpression expr) {
return archive(
expr.file,
expr.sha256,
expr.size,
expr.include,
visitMaybeNull(expr.includePath),
expr.libs,
visitExpressionArray(expr.libraryPaths),
expr.requires);
}
@NotNull
private Expression[] visitExpressionArray(@NotNull Expression[] libraryPaths) {
Expression result[] = new Expression[libraryPaths.length];
for (int i = 0; i < libraryPaths.length; ++i) {
result[i] = visit(libraryPaths[i]);
}
return result;
}
@NotNull
protected Expression visitInvokeFunctionExpression(@NotNull InvokeFunctionExpression expr) {
return invoke((ExternalFunctionExpression) visit(expr.function), visitArray(expr.parameters));
}
@NotNull
private Expression[] visitArray(@NotNull Expression[] array) {
Expression result[] = new Expression[array.length];
for (int i = 0; i < array.length; ++i) {
result[i] = visit(array[i]);
}
return result;
}
@NotNull
Expression visitAssignmentExpression(@NotNull AssignmentExpression expr) {
return assign(expr.name, visit(expr.expression));
}
@NotNull
private Expression visitConstantExpression(@NotNull ConstantExpression expr) {
return constant(expr.value);
}
@NotNull
Expression visitIfSwitchExpression(@NotNull IfSwitchExpression expr) {
return ifSwitch(visitArray(expr.conditions), visitArray(expr.expressions), visit(expr.elseExpression));
}
@NotNull
private Expression visitParameterExpression(@NotNull ParameterExpression expr) {
return expr;
}
@NotNull
Expression visitFindModuleExpression(@NotNull FindModuleExpression expr) {
return new FindModuleExpression(
(GlobalBuildEnvironmentExpression) visit(expr.globals),
expr.coordinate,
expr.headerArchive,
expr.include,
(StatementExpression) visit(expr.body));
}
@NotNull
public Expression visitFunctionTableExpression(@NotNull FunctionTableExpression expr) {
FunctionTableExpression newExpr = new FunctionTableExpression(expr.globals);
for (Coordinate coordinate : expr.findFunctions.keySet()) {
newExpr.findFunctions.put(coordinate, (StatementExpression) visit(expr.findFunctions.get(coordinate)));
}
for (Coordinate coordinate : expr.examples.keySet()) {
newExpr.examples.put(coordinate, (ExampleExpression) visit(expr.examples.get(coordinate)));
}
return newExpr;
}
@NotNull
private StatementExpression[] visitStatementExpressionArray(@NotNull StatementExpression[] array) {
StatementExpression result[] = new StatementExpression[array.length];
for (int i = 0; i < array.length; ++i) {
result[i] = (StatementExpression) visit(array[i]);
}
return result;
}
@NotNull
private Expression visitMultiStatementExpression(@NotNull MultiStatementExpression expr) {
return multi(visitStatementExpressionArray(expr.statements));
}
@NotNull
private Expression visitNopExpression(@NotNull NopExpression expr) {
return nop();
}
}
| apache-2.0 |
liuzyw/study-hello | spring-cloud-feign-consume/src/main/java/com/study/cloud/feign/controller/FeignConsumeController.java | 1483 | package com.study.cloud.feign.controller;
import com.study.cloud.feign.FeignConsumeClient;
import com.study.cloud.feign.HelloClient;
import com.study.cloud.feign.entity.User;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
/**
* Created on 2018-02-20
*
* @author liuzhaoyuan
*/
@RestController
public class FeignConsumeController {
@Autowired
private HelloClient helloClient;
// @Autowired
// private FeignConsumeClient feignConsumeClient;
@RequestMapping(method = RequestMethod.GET, value = "/hello")
public String hello() {
String result = helloClient.hello();
// System.out.println(feignConsumeClient.hello());
return result;
}
@RequestMapping(method = RequestMethod.GET, value = "/getUserAge/{name}",
produces = MediaType.APPLICATION_JSON_VALUE)
public int getUserAge(@PathVariable("name") String name) {
int p = helloClient.getUserAge(name);
// System.out.println(feignConsumeClient.getUserAge(name));
return p;
}
@RequestMapping(method = RequestMethod.GET, value = "/getRestUser")
public User getRestUser() {
return helloClient.getRestUser();
}
}
| apache-2.0 |
Ariah-Group/Finance | af_webapp/src/main/java/org/kuali/kfs/module/purap/util/PurapAccountingLineComparator.java | 1686 | /*
* Copyright 2009 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kfs.module.purap.util;
import java.util.Comparator;
import org.kuali.kfs.sys.businessobject.AccountingLine;
public class PurapAccountingLineComparator implements Comparator<AccountingLine> {
/**
* Compares two accounting lines based on their account number and object code, in ascending order.
* @see java.util.Comparator#compare(java.lang.Object, java.lang.Object)
*/
public int compare(AccountingLine sal1, AccountingLine sal2) {
int compare = 0;
if (sal1 != null && sal2 != null) {
if (sal1.getAccountNumber() != null && sal2.getAccountNumber() != null) {
compare = sal1.getAccountNumber().compareTo(sal2.getAccountNumber());
if (compare == 0) {
if (sal1.getFinancialObjectCode() != null && sal2.getFinancialObjectCode() != null)
compare = sal1.getFinancialObjectCode().compareTo(sal2.getFinancialObjectCode());
}
}
}
return compare;
}
}
| apache-2.0 |
alibaba/nacos | test/naming-test/src/test/java/com/alibaba/nacos/test/naming/MultiTenant_InstanceAPI_ITCase.java | 26181 | /*
* Copyright 1999-2018 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.nacos.test.naming;
import com.alibaba.nacos.Nacos;
import com.alibaba.nacos.api.PropertyKeyConst;
import com.alibaba.nacos.api.common.Constants;
import com.alibaba.nacos.api.naming.NamingFactory;
import com.alibaba.nacos.api.naming.NamingService;
import com.alibaba.nacos.api.naming.pojo.Instance;
import com.alibaba.nacos.client.naming.NacosNamingService;
import com.alibaba.nacos.common.utils.JacksonUtils;
import com.alibaba.nacos.test.base.Params;
import com.alibaba.nacos.test.utils.NamingTestUtils;
import com.fasterxml.jackson.databind.JsonNode;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.web.client.TestRestTemplate;
import org.springframework.boot.web.server.LocalServerPort;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpMethod;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.util.MultiValueMap;
import org.springframework.web.util.UriComponentsBuilder;
import java.net.URL;
import java.util.Collections;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
import static com.alibaba.nacos.test.naming.NamingBase.TEST_GROUP_1;
import static com.alibaba.nacos.test.naming.NamingBase.TEST_GROUP_2;
import static com.alibaba.nacos.test.naming.NamingBase.TEST_PORT3_4_DOM_1;
import static com.alibaba.nacos.test.naming.NamingBase.randomDomainName;
/**
* @author nkorange
*/
@RunWith(SpringRunner.class)
@SpringBootTest(classes = Nacos.class, properties = {
"server.servlet.context-path=/nacos"}, webEnvironment = SpringBootTest.WebEnvironment.DEFINED_PORT)
public class MultiTenant_InstanceAPI_ITCase {
private NamingService naming;
private NamingService naming1;
private NamingService naming2;
@LocalServerPort
private int port;
@Autowired
private TestRestTemplate restTemplate;
private URL base;
private final List<Instance> instances = Collections.emptyList();
@Before
public void init() throws Exception {
NamingBase.prepareServer(port);
String url = String.format("http://localhost:%d/", port);
this.base = new URL(url);
naming = NamingFactory.createNamingService("127.0.0.1" + ":" + port);
while (true) {
if (!"UP".equals(naming.getServerStatus())) {
Thread.sleep(1000L);
continue;
}
break;
}
Properties properties = new Properties();
properties.put(PropertyKeyConst.NAMESPACE, "namespace-1");
properties.put(PropertyKeyConst.SERVER_ADDR, "127.0.0.1" + ":" + port);
naming1 = NamingFactory.createNamingService(properties);
properties = new Properties();
properties.put(PropertyKeyConst.NAMESPACE, "namespace-2");
properties.put(PropertyKeyConst.SERVER_ADDR, "127.0.0.1" + ":" + port);
naming2 = NamingFactory.createNamingService(properties);
}
/**
* @TCDescription : 多租户注册IP,listInstance接口
* @TestStep :
* @ExpectResult :
*/
@Test
public void multipleTenant_listInstance() throws Exception {
String serviceName = randomDomainName();
naming1.registerInstance(serviceName, "11.11.11.11", 80);
naming2.registerInstance(serviceName, "22.22.22.22", 80);
naming.registerInstance(serviceName, "33.33.33.33", 8888);
TimeUnit.SECONDS.sleep(5L);
String url = "/nacos/v1/ns/instance/list";
ResponseEntity<String> response = request(url,
Params.newParams().appendParam("serviceName", serviceName).appendParam("namespaceId", "namespace-1")
.done(), String.class);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
JsonNode json = JacksonUtils.toObj(response.getBody());
Assert.assertEquals("11.11.11.11", json.get("hosts").get(0).get("ip").asText());
response = request(url, Params.newParams().appendParam("serviceName", serviceName).done(), String.class);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
json = JacksonUtils.toObj(response.getBody());
Assert.assertEquals(1, json.get("hosts").size());
}
/**
* @TCDescription : 多租户, 多group下, 注册IP,listInstance接口
* @TestStep :
* @ExpectResult :
*/
@Test
public void multipleTenant_group_listInstance() throws Exception {
String serviceName = randomDomainName();
naming1.registerInstance(serviceName, TEST_GROUP_1, "11.11.11.11", 80);
naming1.registerInstance(serviceName, "22.22.22.22", 80);
naming.registerInstance(serviceName, TEST_GROUP_1, "33.33.33.33", 8888);
naming.registerInstance(serviceName, TEST_GROUP_2, "44.44.44.44", 8888);
TimeUnit.SECONDS.sleep(5L);
String url = "/nacos/v1/ns/instance/list";
ResponseEntity<String> response = request(url,
Params.newParams().appendParam("serviceName", serviceName).appendParam("namespaceId", "namespace-1")
.appendParam("groupName", TEST_GROUP_1).done(), String.class);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
JsonNode json = JacksonUtils.toObj(response.getBody());
Assert.assertEquals("11.11.11.11", json.get("hosts").get(0).get("ip").asText());
response = request(url,
Params.newParams().appendParam("serviceName", serviceName).appendParam("groupName", TEST_GROUP_1)
.done(), String.class);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
json = JacksonUtils.toObj(response.getBody());
Assert.assertEquals(1, json.get("hosts").size());
Assert.assertEquals("33.33.33.33", json.get("hosts").get(0).get("ip").asText());
}
/**
* @TCDescription : 多租户注册IP,getInstance接口
* @TestStep :
* @ExpectResult :
*/
@Test
public void multipleTenant_getInstance() throws Exception {
String serviceName = randomDomainName();
naming1.registerInstance(serviceName, "11.11.11.11", 80);
naming2.registerInstance(serviceName, "22.22.22.22", 80);
naming.registerInstance(serviceName, "33.33.33.33", 8888, "c1");
TimeUnit.SECONDS.sleep(5L);
ResponseEntity<String> response = request("/nacos/v1/ns/instance",
Params.newParams().appendParam("serviceName", serviceName)
.appendParam("ip", "33.33.33.33") //错误的IP,隔离验证
.appendParam("port", "8888").appendParam("namespaceId", "namespace-2").done(), String.class);
Assert.assertEquals(HttpStatus.NOT_FOUND, response.getStatusCode());
response = request("/nacos/v1/ns/instance/list",
Params.newParams().appendParam("serviceName", serviceName).appendParam("clusters", "c1")
.appendParam("healthyOnly", "true").done(), String.class);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
JsonNode json = JacksonUtils.toObj(response.getBody());
Assert.assertEquals(1, json.get("hosts").size());
Assert.assertEquals("33.33.33.33", json.get("hosts").get(0).get("ip").asText());
}
/**
* @TCDescription : 多租户注册IP,getInstance接口
* @TestStep :
* @ExpectResult :
*/
@Test
public void multipleTenant_group_getInstance() throws Exception {
String serviceName = randomDomainName();
naming1.registerInstance(serviceName, "11.11.11.11", 80);
naming2.registerInstance(serviceName, "22.22.22.22", 80);
naming.registerInstance(serviceName, "33.33.33.33", 8888, "c1");
naming.registerInstance(serviceName, "44.44.44.44", 8888, "c2");
TimeUnit.SECONDS.sleep(5L);
ResponseEntity<String> response = request("/nacos/v1/ns/instance",
Params.newParams().appendParam("serviceName", serviceName).appendParam("groupName", TEST_GROUP_1)
.appendParam("ip", "33.33.33.33") //不存在的IP,隔离验证
.appendParam("port", "8888").appendParam("namespaceId", "namespace-2").done(), String.class);
Assert.assertEquals(HttpStatus.NOT_FOUND, response.getStatusCode());
response = request("/nacos/v1/ns/instance/list",
Params.newParams().appendParam("serviceName", serviceName).appendParam("clusters", "c2")
.appendParam("healthyOnly", "true").done(), String.class);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
JsonNode json = JacksonUtils.toObj(response.getBody());
Assert.assertEquals(1, json.get("hosts").size());
Assert.assertEquals("44.44.44.44", json.get("hosts").get(0).get("ip").asText());
}
/**
* @TCDescription : 多租户注册IP,deleteInstance接口,删除namespace-1中没有的IP
* @TestStep :
* @ExpectResult :
*/
@Test
@Ignore("nacos 2.0 will not use beat to ensure healthy status")
public void multipleTenant_deleteInstance() throws Exception {
String serviceName = randomDomainName();
naming1.registerInstance(serviceName, "11.11.11.11", 80);
naming2.registerInstance(serviceName, "22.22.22.22", 80);
naming.registerInstance(serviceName, "33.33.33.33", 8888);
naming.registerInstance(serviceName, "44.44.44.44", 8888);
TimeUnit.SECONDS.sleep(3L);
//AP下,通过HTTP删除实例前必须删除心跳
NacosNamingService namingServiceImpl = (NacosNamingService) naming2;
NamingTestUtils.getBeatReactorByReflection(namingServiceImpl)
.removeBeatInfo(Constants.DEFAULT_GROUP + Constants.SERVICE_INFO_SPLITER + serviceName, "33.33.33.33",
8888);
TimeUnit.SECONDS.sleep(3L);
ResponseEntity<String> response = request("/nacos/v1/ns/instance",
Params.newParams().appendParam("serviceName", serviceName).appendParam("ip", "33.33.33.33")
.appendParam("port", "8888").appendParam("namespaceId", "namespace-1") //删除namespace-1中没有的IP
.done(), String.class, HttpMethod.DELETE);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
response = request("/nacos/v1/ns/instance/list",
Params.newParams().appendParam("serviceName", serviceName) //获取naming中的实例
.done(), String.class);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
JsonNode json = JacksonUtils.toObj(response.getBody());
Assert.assertEquals(2, json.get("hosts").size());
}
/**
* @TCDescription : 多租户注册IP,group下,deleteInstance接口,正常删除IP
* @TestStep :
* @ExpectResult :
*/
@Test
@Ignore("nacos 2.0 will not use beat to ensure healthy status")
public void multipleTenant_group_deleteInstance() throws Exception {
String serviceName = randomDomainName();
naming1.registerInstance(serviceName, TEST_GROUP_1, "11.11.11.11", 80);
naming2.registerInstance(serviceName, TEST_GROUP_2, "22.22.22.22", 80);
TimeUnit.SECONDS.sleep(5L);
//AP下,通过HTTP删除实例前必须删除心跳
NacosNamingService namingServiceImpl = (NacosNamingService) naming2;
NamingTestUtils.getBeatReactorByReflection(namingServiceImpl)
.removeBeatInfo(TEST_GROUP_2 + Constants.SERVICE_INFO_SPLITER + serviceName, "22.22.22.22", 80);
ResponseEntity<String> response = request("/nacos/v1/ns/instance",
Params.newParams().appendParam("serviceName", serviceName)
.appendParam("namespaceId", "namespace-2") //删除namespace-1中没有的IP
.appendParam("groupName", TEST_GROUP_2).appendParam("ip", "22.22.22.22")
.appendParam("port", TEST_PORT3_4_DOM_1).done(), String.class, HttpMethod.DELETE);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
response = request("/nacos/v1/ns/instance/list",
Params.newParams().appendParam("serviceName", serviceName) //获取naming中的实例
.appendParam("namespaceId", "namespace-2").appendParam("groupName", TEST_GROUP_2).done(),
String.class);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
JsonNode json = JacksonUtils.toObj(response.getBody());
Assert.assertEquals(0, json.get("hosts").size());
}
/**
* @TCDescription : 多租户注册IP,putInstance接口
* @TestStep :
* @ExpectResult :
*/
@Test
public void multipleTenant_putInstance() throws Exception {
String serviceName = randomDomainName();
naming1.registerInstance(serviceName, "11.11.11.11", 80);
naming2.registerInstance(serviceName, "22.22.22.22", 80);
naming.registerInstance(serviceName, "33.33.33.33", 8888);
naming.registerInstance(serviceName, "44.44.44.44", 8888);
TimeUnit.SECONDS.sleep(5L);
ResponseEntity<String> response = request("/nacos/v1/ns/instance",
Params.newParams().appendParam("serviceName", serviceName).appendParam("ip", "33.33.33.33")
.appendParam("port", "8888").done(), String.class, HttpMethod.PUT);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
response = request("/nacos/v1/ns/instance/list",
Params.newParams().appendParam("serviceName", serviceName) //获取naming中的实例
.appendParam("namespaceId", "namespace-1").done(), String.class);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
JsonNode json = JacksonUtils.toObj(response.getBody());
Assert.assertEquals(1, json.get("hosts").size());
//namespace-2个数
response = request("/nacos/v1/ns/instance/list",
Params.newParams().appendParam("serviceName", serviceName) //获取naming中的实例
.appendParam("namespaceId", "namespace-2").done(), String.class);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
json = JacksonUtils.toObj(response.getBody());
System.out.println(json);
Assert.assertEquals(1, json.get("hosts").size());
}
/**
* @TCDescription : 多租户, 多group下,注册IP,putInstance接口, 更新实例
* @TestStep :
* @ExpectResult :
*/
@Test
public void multipleTenant_group_putInstance() throws Exception {
String serviceName = randomDomainName();
naming1.registerInstance(serviceName, "11.11.11.11", 80);
naming2.registerInstance(serviceName, TEST_GROUP_2, "22.22.22.22", 80);
ResponseEntity<String> response = request("/nacos/v1/ns/instance",
Params.newParams().appendParam("serviceName", serviceName).appendParam("groupName", TEST_GROUP_2)
.appendParam("ip", "22.22.22.22").appendParam("port", "80")
.appendParam("namespaceId", "namespace-2").appendParam("weight", "8.0").done(), String.class,
HttpMethod.PUT);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
TimeUnit.SECONDS.sleep(5L);
response = request("/nacos/v1/ns/instance/list",
Params.newParams().appendParam("serviceName", serviceName) //获取naming中的实例
.appendParam("namespaceId", "namespace-2").appendParam("groupName", TEST_GROUP_2).done(),
String.class);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
JsonNode json = JacksonUtils.toObj(response.getBody());
Assert.assertEquals(1, json.get("hosts").size());
Assert.assertEquals("8.0", json.get("hosts").get(0).get("weight").asText());
}
/**
* @TCDescription : 多租户, 多group下,注册IP,patchInstance接口, 部分更新实例
* @TestStep :
* @ExpectResult :
*/
@Test
public void multipleTenant_group_patchInstance() throws Exception {
String serviceName = randomDomainName();
naming1.registerInstance(serviceName, "11.11.11.11", 80);
naming2.registerInstance(serviceName, TEST_GROUP_2, "22.22.22.22", 80);
TimeUnit.SECONDS.sleep(3L);
ResponseEntity<String> response = request("/nacos/v1/ns/instance",
Params.newParams().appendParam("serviceName", serviceName).appendParam("groupName", TEST_GROUP_2)
.appendParam("ip", "22.22.22.22").appendParam("port", "80")
.appendParam("namespaceId", "namespace-2").appendParam("weight", "8.0").done(), String.class,
HttpMethod.PUT);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
response = request("/nacos/v1/ns/instance",
Params.newParams().appendParam("serviceName", serviceName).appendParam("groupName", TEST_GROUP_2)
.appendParam("ip", "22.22.22.22").appendParam("port", "80")
.appendParam("namespaceId", "namespace-2").done(), String.class, HttpMethod.PATCH);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
TimeUnit.SECONDS.sleep(3L);
response = request("/nacos/v1/ns/instance/list",
Params.newParams().appendParam("serviceName", serviceName) //获取naming中的实例
.appendParam("namespaceId", "namespace-2").appendParam("groupName", TEST_GROUP_2).done(),
String.class);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
JsonNode json = JacksonUtils.toObj(response.getBody());
Assert.assertEquals(1, json.get("hosts").size());
Assert.assertEquals("8.0", json.get("hosts").get(0).get("weight").asText());
}
/**
* @TCDescription : 多租户注册IP,update一个没有的实例接口
* @TestStep :
* @ExpectResult :
*/
@Test
public void multipleTenant_updateInstance_notExsitInstance() throws Exception {
String serviceName = randomDomainName();
naming1.registerInstance(serviceName, "11.11.11.11", 80);
naming2.registerInstance(serviceName, "22.22.22.22", 80);
naming.registerInstance(serviceName, "33.33.33.33", 8888);
naming.registerInstance(serviceName, "44.44.44.44", 8888);
ResponseEntity<String> response = request("/nacos/v1/ns/instance",
Params.newParams().appendParam("serviceName", serviceName).appendParam("ip", "33.33.33.33")
.appendParam("port", "8888").appendParam("namespaceId", "namespace-1") //新增
.done(), String.class, HttpMethod.POST);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
TimeUnit.SECONDS.sleep(5L);
response = request("/nacos/v1/ns/instance/list",
Params.newParams().appendParam("serviceName", serviceName) //获取naming中的实例
.appendParam("namespaceId", "namespace-1").done(), String.class);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
JsonNode json = JacksonUtils.toObj(response.getBody());
Assert.assertEquals(2, json.get("hosts").size());
//namespace-2个数
response = request("/nacos/v1/ns/instance/list",
Params.newParams().appendParam("serviceName", serviceName) //获取naming中的实例
.appendParam("namespaceId", "namespace-2").done(), String.class);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
json = JacksonUtils.toObj(response.getBody());
Assert.assertEquals(1, json.get("hosts").size());
}
/**
* @TCDescription : 多租户,多group下,注册IP,注册一个没有的实例接口
* @TestStep :
* @ExpectResult :
*/
@Test
public void multipleTenant_group_updateInstance_notExsitInstance() throws Exception {
String serviceName = randomDomainName();
naming1.registerInstance(serviceName, "11.11.11.11", 80);
naming2.registerInstance(serviceName, "22.22.22.22", 80);
TimeUnit.SECONDS.sleep(5L);
ResponseEntity<String> response = request("/nacos/v1/ns/instance",
Params.newParams().appendParam("serviceName", serviceName).appendParam("ip", "33.33.33.33")
.appendParam("port", "8888").appendParam("namespaceId", "namespace-1") //新增
.appendParam("groupName", TEST_GROUP_1).done(), String.class, HttpMethod.POST);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
response = request("/nacos/v1/ns/instance/list",
Params.newParams().appendParam("serviceName", serviceName) //获取naming中的实例
.appendParam("namespaceId", "namespace-1").appendParam("groupName", TEST_GROUP_1).done(),
String.class);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
JsonNode json = JacksonUtils.toObj(response.getBody());
Assert.assertEquals("33.33.33.33", json.get("hosts").get(0).get("ip").asText());
}
/**
* @TCDescription : 多租户注册IP,update一个已有的实例接口
* @TestStep :
* @ExpectResult :
*/
@Test
public void multipleTenant_updateInstance() throws Exception {
String serviceName = randomDomainName();
naming2.registerInstance(serviceName, "22.22.22.22", 80);
naming.registerInstance(serviceName, "33.33.33.33", 8888);
naming.registerInstance(serviceName, "44.44.44.44", 8888);
TimeUnit.SECONDS.sleep(5L);
ResponseEntity<String> response = request("/nacos/v1/ns/instance",
Params.newParams().appendParam("serviceName", serviceName).appendParam("ip", "11.11.11.11")
.appendParam("port", "80").appendParam("namespaceId", "namespace-1") //新增
.done(), String.class, HttpMethod.POST);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
response = request("/nacos/v1/ns/instance/list",
Params.newParams().appendParam("serviceName", serviceName) //获取naming中的实例
.appendParam("namespaceId", "namespace-1").done(), String.class);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
JsonNode json = JacksonUtils.toObj(response.getBody());
Assert.assertEquals(1, json.get("hosts").size());
//namespace-2个数
response = request("/nacos/v1/ns/instance/list",
Params.newParams().appendParam("serviceName", serviceName) //获取naming中的实例
.appendParam("namespaceId", "namespace-2").done(), String.class);
Assert.assertTrue(response.getStatusCode().is2xxSuccessful());
json = JacksonUtils.toObj(response.getBody());
Assert.assertEquals(1, json.get("hosts").size());
}
private void verifyInstanceListForNaming(NamingService naming, int size, String serviceName) throws Exception {
int i = 0;
while (i < 20) {
List<Instance> instances = naming.getAllInstances(serviceName);
if (instances.size() == size) {
break;
} else {
TimeUnit.SECONDS.sleep(3);
i++;
}
}
}
private <T> ResponseEntity<T> request(String path, MultiValueMap<String, String> params, Class<T> clazz) {
return request(path, params, clazz, HttpMethod.GET);
}
private <T> ResponseEntity<T> request(String path, MultiValueMap<String, String> params, Class<T> clazz,
HttpMethod httpMethod) {
HttpHeaders headers = new HttpHeaders();
HttpEntity<?> entity = new HttpEntity<T>(headers);
UriComponentsBuilder builder = UriComponentsBuilder.fromHttpUrl(this.base.toString() + path)
.queryParams(params);
return this.restTemplate.exchange(builder.toUriString(), httpMethod, entity, clazz);
}
}
| apache-2.0 |
ua-eas/ua-rice-2.1.9 | krad/krad-web-framework/src/main/java/org/kuali/rice/krad/uif/container/NavigationGroup.java | 2355 | /**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.krad.uif.container;
import org.kuali.rice.krad.uif.component.Component;
import org.kuali.rice.krad.uif.field.ActionField;
import java.util.HashSet;
import java.util.Set;
/**
* Special <code>Group</code> that renders a navigation section
*
* <p>
* Only supports <code>ActionField</code> instances within the container. These
* are used to provide the items (or individual links) within the navigation.
* The navigationType determines how the navigation will be rendered (menu,
* tabs, dropdown, ...)
* </p>
*
* @author Kuali Rice Team (rice.collab@kuali.org)
*/
public class NavigationGroup extends Group {
private static final long serialVersionUID = -7263923392768546340L;
private String navigationType;
public NavigationGroup() {
super();
}
/**
* @see org.kuali.rice.krad.web.view.container.ContainerBase#getSupportedComponents()
*/
@Override
public Set<Class<? extends Component>> getSupportedComponents() {
Set<Class<? extends Component>> supportedComponents = new HashSet<Class<? extends Component>>();
supportedComponents.add(ActionField.class);
return supportedComponents;
}
/**
* Type of navigation that should be rendered. For example a menu or tab
* navigation. Used by the rendering script to choose an appropriate plug-in
*
* @return String navigation type
* @see org.kuali.rice.krad.uif.UifConstants.NavigationType
*/
public String getNavigationType() {
return this.navigationType;
}
/**
* Setter for the navigation type
*
* @param navigationType
*/
public void setNavigationType(String navigationType) {
this.navigationType = navigationType;
}
}
| apache-2.0 |
dads-software-brotherhood/sekc | src/test/java/mx/infotec/dads/sekc/web/rest/UserResourceIntTest.java | 19082 | package mx.infotec.dads.sekc.web.rest;
import mx.infotec.dads.sekc.SekcApp;
import mx.infotec.dads.sekc.domain.User;
import mx.infotec.dads.sekc.repository.UserRepository;
import mx.infotec.dads.sekc.service.MailService;
import mx.infotec.dads.sekc.service.UserService;
import mx.infotec.dads.sekc.web.rest.errors.ExceptionTranslator;
import mx.infotec.dads.sekc.web.rest.vm.ManagedUserVM;
import static mx.infotec.dads.sekc.web.rest.util.ApiConstant.API_PATH;
import org.apache.commons.lang3.RandomStringUtils;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.MockitoAnnotations;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.data.web.PageableHandlerMethodArgumentResolver;
import org.springframework.http.MediaType;
import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.delete;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hamcrest.Matchers.hasItem;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.*;
/**
* Test class for the UserResource REST controller.
*
* @see UserResource
*/
@RunWith(SpringRunner.class)
@SpringBootTest(classes = SekcApp.class)
public class UserResourceIntTest {
private static final String DEFAULT_LOGIN = "johndoe";
private static final String UPDATED_LOGIN = "jhipster";
private static final String DEFAULT_PASSWORD = "passjohndoe";
private static final String UPDATED_PASSWORD = "passjhipster";
private static final String DEFAULT_EMAIL = "johndoe@localhost";
private static final String UPDATED_EMAIL = "jhipster@localhost";
private static final String DEFAULT_FIRSTNAME = "john";
private static final String UPDATED_FIRSTNAME = "jhipsterFirstName";
private static final String DEFAULT_LASTNAME = "doe";
private static final String UPDATED_LASTNAME = "jhipsterLastName";
private static final String DEFAULT_IMAGEURL = "http://placehold.it/50x50";
private static final String UPDATED_IMAGEURL = "http://placehold.it/40x40";
private static final String DEFAULT_LANGKEY = "en";
private static final String UPDATED_LANGKEY = "fr";
@Autowired
private UserRepository userRepository;
@Autowired
private MailService mailService;
@Autowired
private UserService userService;
@Autowired
private MappingJackson2HttpMessageConverter jacksonMessageConverter;
@Autowired
private PageableHandlerMethodArgumentResolver pageableArgumentResolver;
@Autowired
private ExceptionTranslator exceptionTranslator;
private MockMvc restUserMockMvc;
private User user;
@Before
public void setup() {
MockitoAnnotations.initMocks(this);
UserResource userResource = new UserResource(userRepository, mailService, userService);
this.restUserMockMvc = MockMvcBuilders.standaloneSetup(userResource)
.setCustomArgumentResolvers(pageableArgumentResolver)
.setControllerAdvice(exceptionTranslator)
.setMessageConverters(jacksonMessageConverter)
.build();
}
/**
* Create a User.
*
* This is a static method, as tests for other entities might also need it,
* if they test an entity which has a required relationship to the User entity.
*/
public static User createEntity() {
User user = new User();
user.setLogin(DEFAULT_LOGIN);
user.setPassword(RandomStringUtils.random(60));
user.setActivated(true);
user.setEmail(DEFAULT_EMAIL);
user.setFirstName(DEFAULT_FIRSTNAME);
user.setLastName(DEFAULT_LASTNAME);
user.setImageUrl(DEFAULT_IMAGEURL);
user.setLangKey(DEFAULT_LANGKEY);
return user;
}
@Before
public void initTest() {
userRepository.deleteAll();
user = createEntity();
}
@Test
public void createUser() throws Exception {
int databaseSizeBeforeCreate = userRepository.findAll().size();
// Create the User
Set<String> autorities = new HashSet<>();
autorities.add("ROLE_USER");
ManagedUserVM managedUserVM = new ManagedUserVM(
null,
DEFAULT_LOGIN,
DEFAULT_PASSWORD,
DEFAULT_FIRSTNAME,
DEFAULT_LASTNAME,
DEFAULT_EMAIL,
true,
DEFAULT_IMAGEURL,
DEFAULT_LANGKEY,
null,
null,
null,
null,
autorities);
restUserMockMvc.perform(post(API_PATH + "/users")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(managedUserVM)))
.andExpect(status().isCreated());
// Validate the User in the database
List<User> userList = userRepository.findAll();
assertThat(userList).hasSize(databaseSizeBeforeCreate + 1);
User testUser = userList.get(userList.size() - 1);
assertThat(testUser.getLogin()).isEqualTo(DEFAULT_LOGIN);
assertThat(testUser.getFirstName()).isEqualTo(DEFAULT_FIRSTNAME);
assertThat(testUser.getLastName()).isEqualTo(DEFAULT_LASTNAME);
assertThat(testUser.getEmail()).isEqualTo(DEFAULT_EMAIL);
assertThat(testUser.getImageUrl()).isEqualTo(DEFAULT_IMAGEURL);
assertThat(testUser.getLangKey()).isEqualTo(DEFAULT_LANGKEY);
}
@Test
public void createUserWithExistingId() throws Exception {
int databaseSizeBeforeCreate = userRepository.findAll().size();
Set<String> autorities = new HashSet<>();
autorities.add("ROLE_USER");
ManagedUserVM managedUserVM = new ManagedUserVM(
"1L",
DEFAULT_LOGIN,
DEFAULT_PASSWORD,
DEFAULT_FIRSTNAME,
DEFAULT_LASTNAME,
DEFAULT_EMAIL,
true,
DEFAULT_IMAGEURL,
DEFAULT_LANGKEY,
null,
null,
null,
null,
autorities);
// An entity with an existing ID cannot be created, so this API call must fail
restUserMockMvc.perform(post(API_PATH + "/users")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(managedUserVM)))
.andExpect(status().isBadRequest());
// Validate the User in the database
List<User> userList = userRepository.findAll();
assertThat(userList).hasSize(databaseSizeBeforeCreate);
}
@Test
public void createUserWithExistingLogin() throws Exception {
// Initialize the database
userRepository.save(user);
int databaseSizeBeforeCreate = userRepository.findAll().size();
Set<String> autorities = new HashSet<>();
autorities.add("ROLE_USER");
ManagedUserVM managedUserVM = new ManagedUserVM(
null,
DEFAULT_LOGIN, // this login should already be used
DEFAULT_PASSWORD,
DEFAULT_FIRSTNAME,
DEFAULT_LASTNAME,
"anothermail@localhost",
true,
DEFAULT_IMAGEURL,
DEFAULT_LANGKEY,
null,
null,
null,
null,
autorities);
// Create the User
restUserMockMvc.perform(post(API_PATH + "/users")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(managedUserVM)))
.andExpect(status().isBadRequest());
// Validate the User in the database
List<User> userList = userRepository.findAll();
assertThat(userList).hasSize(databaseSizeBeforeCreate);
}
@Test
public void createUserWithExistingEmail() throws Exception {
// Initialize the database
userRepository.save(user);
int databaseSizeBeforeCreate = userRepository.findAll().size();
Set<String> autorities = new HashSet<>();
autorities.add("ROLE_USER");
ManagedUserVM managedUserVM = new ManagedUserVM(
null,
"anotherlogin",
DEFAULT_PASSWORD,
DEFAULT_FIRSTNAME,
DEFAULT_LASTNAME,
DEFAULT_EMAIL, // this email should already be used
true,
DEFAULT_IMAGEURL,
DEFAULT_LANGKEY,
null,
null,
null,
null,
autorities);
// Create the User
restUserMockMvc.perform(post(API_PATH + "/users")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(managedUserVM)))
.andExpect(status().isBadRequest());
// Validate the User in the database
List<User> userList = userRepository.findAll();
assertThat(userList).hasSize(databaseSizeBeforeCreate);
}
@Test
public void getAllUsers() throws Exception {
// Initialize the database
userRepository.save(user);
// Get all the users
restUserMockMvc.perform(get(API_PATH + "/users")
.accept(MediaType.APPLICATION_JSON))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
.andExpect(jsonPath("$.[*].login").value(hasItem(DEFAULT_LOGIN)))
.andExpect(jsonPath("$.[*].firstName").value(hasItem(DEFAULT_FIRSTNAME)))
.andExpect(jsonPath("$.[*].lastName").value(hasItem(DEFAULT_LASTNAME)))
.andExpect(jsonPath("$.[*].email").value(hasItem(DEFAULT_EMAIL)))
.andExpect(jsonPath("$.[*].imageUrl").value(hasItem(DEFAULT_IMAGEURL)))
.andExpect(jsonPath("$.[*].langKey").value(hasItem(DEFAULT_LANGKEY)));
}
@Test
public void getUser() throws Exception {
// Initialize the database
userRepository.save(user);
// Get the user
restUserMockMvc.perform(get(API_PATH + "/users/{login}", user.getLogin()))
.andExpect(status().isOk())
.andExpect(content().contentType(MediaType.APPLICATION_JSON_UTF8_VALUE))
.andExpect(jsonPath("$.login").value(user.getLogin()))
.andExpect(jsonPath("$.firstName").value(DEFAULT_FIRSTNAME))
.andExpect(jsonPath("$.lastName").value(DEFAULT_LASTNAME))
.andExpect(jsonPath("$.email").value(DEFAULT_EMAIL))
.andExpect(jsonPath("$.imageUrl").value(DEFAULT_IMAGEURL))
.andExpect(jsonPath("$.langKey").value(DEFAULT_LANGKEY));
}
@Test
public void getNonExistingUser() throws Exception {
restUserMockMvc.perform(get(API_PATH + "/users/unknown"))
.andExpect(status().isNotFound());
}
@Test
public void updateUser() throws Exception {
// Initialize the database
userRepository.save(user);
int databaseSizeBeforeUpdate = userRepository.findAll().size();
// Update the user
User updatedUser = userRepository.findOne(user.getId());
Set<String> autorities = new HashSet<>();
autorities.add("ROLE_USER");
ManagedUserVM managedUserVM = new ManagedUserVM(
updatedUser.getId(),
updatedUser.getLogin(),
UPDATED_PASSWORD,
UPDATED_FIRSTNAME,
UPDATED_LASTNAME,
UPDATED_EMAIL,
updatedUser.getActivated(),
UPDATED_IMAGEURL,
UPDATED_LANGKEY,
updatedUser.getCreatedBy(),
updatedUser.getCreatedDate(),
updatedUser.getLastModifiedBy(),
updatedUser.getLastModifiedDate(),
autorities);
restUserMockMvc.perform(put(API_PATH + "/users")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(managedUserVM)))
.andExpect(status().isOk());
// Validate the User in the database
List<User> userList = userRepository.findAll();
assertThat(userList).hasSize(databaseSizeBeforeUpdate);
User testUser = userList.get(userList.size() - 1);
assertThat(testUser.getFirstName()).isEqualTo(UPDATED_FIRSTNAME);
assertThat(testUser.getLastName()).isEqualTo(UPDATED_LASTNAME);
assertThat(testUser.getEmail()).isEqualTo(UPDATED_EMAIL);
assertThat(testUser.getImageUrl()).isEqualTo(UPDATED_IMAGEURL);
assertThat(testUser.getLangKey()).isEqualTo(UPDATED_LANGKEY);
}
@Test
public void updateUserLogin() throws Exception {
// Initialize the database
userRepository.save(user);
int databaseSizeBeforeUpdate = userRepository.findAll().size();
// Update the user
User updatedUser = userRepository.findOne(user.getId());
Set<String> autorities = new HashSet<>();
autorities.add("ROLE_USER");
ManagedUserVM managedUserVM = new ManagedUserVM(
updatedUser.getId(),
UPDATED_LOGIN,
UPDATED_PASSWORD,
UPDATED_FIRSTNAME,
UPDATED_LASTNAME,
UPDATED_EMAIL,
updatedUser.getActivated(),
UPDATED_IMAGEURL,
UPDATED_LANGKEY,
updatedUser.getCreatedBy(),
updatedUser.getCreatedDate(),
updatedUser.getLastModifiedBy(),
updatedUser.getLastModifiedDate(),
autorities);
restUserMockMvc.perform(put(API_PATH + "/users")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(managedUserVM)))
.andExpect(status().isOk());
// Validate the User in the database
List<User> userList = userRepository.findAll();
assertThat(userList).hasSize(databaseSizeBeforeUpdate);
User testUser = userList.get(userList.size() - 1);
assertThat(testUser.getLogin()).isEqualTo(UPDATED_LOGIN);
assertThat(testUser.getFirstName()).isEqualTo(UPDATED_FIRSTNAME);
assertThat(testUser.getLastName()).isEqualTo(UPDATED_LASTNAME);
assertThat(testUser.getEmail()).isEqualTo(UPDATED_EMAIL);
assertThat(testUser.getImageUrl()).isEqualTo(UPDATED_IMAGEURL);
assertThat(testUser.getLangKey()).isEqualTo(UPDATED_LANGKEY);
}
@Test
public void updateUserExistingEmail() throws Exception {
// Initialize the database with 2 users
userRepository.save(user);
User anotherUser = new User();
anotherUser.setLogin("jhipster");
anotherUser.setPassword(RandomStringUtils.random(60));
anotherUser.setActivated(true);
anotherUser.setEmail("jhipster@localhost");
anotherUser.setFirstName("java");
anotherUser.setLastName("hipster");
anotherUser.setImageUrl("");
anotherUser.setLangKey("en");
userRepository.save(anotherUser);
int databaseSizeBeforeUpdate = userRepository.findAll().size();
// Update the user
User updatedUser = userRepository.findOne(user.getId());
Set<String> autorities = new HashSet<>();
autorities.add("ROLE_USER");
ManagedUserVM managedUserVM = new ManagedUserVM(
updatedUser.getId(),
updatedUser.getLogin(),
updatedUser.getPassword(),
updatedUser.getFirstName(),
updatedUser.getLastName(),
"jhipster@localhost", // this email should already be used by anotherUser
updatedUser.getActivated(),
updatedUser.getImageUrl(),
updatedUser.getLangKey(),
updatedUser.getCreatedBy(),
updatedUser.getCreatedDate(),
updatedUser.getLastModifiedBy(),
updatedUser.getLastModifiedDate(),
autorities);
restUserMockMvc.perform(put(API_PATH + "/users")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(managedUserVM)))
.andExpect(status().isBadRequest());
}
@Test
public void updateUserExistingLogin() throws Exception {
// Initialize the database
userRepository.save(user);
User anotherUser = new User();
anotherUser.setLogin("jhipster");
anotherUser.setPassword(RandomStringUtils.random(60));
anotherUser.setActivated(true);
anotherUser.setEmail("jhipster@localhost");
anotherUser.setFirstName("java");
anotherUser.setLastName("hipster");
anotherUser.setImageUrl("");
anotherUser.setLangKey("en");
userRepository.save(anotherUser);
int databaseSizeBeforeUpdate = userRepository.findAll().size();
// Update the user
User updatedUser = userRepository.findOne(user.getId());
Set<String> autorities = new HashSet<>();
autorities.add("ROLE_USER");
ManagedUserVM managedUserVM = new ManagedUserVM(
updatedUser.getId(),
"jhipster", // this login should already be used by anotherUser
updatedUser.getPassword(),
updatedUser.getFirstName(),
updatedUser.getLastName(),
updatedUser.getEmail(),
updatedUser.getActivated(),
updatedUser.getImageUrl(),
updatedUser.getLangKey(),
updatedUser.getCreatedBy(),
updatedUser.getCreatedDate(),
updatedUser.getLastModifiedBy(),
updatedUser.getLastModifiedDate(),
autorities);
restUserMockMvc.perform(put(API_PATH + "/users")
.contentType(TestUtil.APPLICATION_JSON_UTF8)
.content(TestUtil.convertObjectToJsonBytes(managedUserVM)))
.andExpect(status().isBadRequest());
}
@Test
public void deleteUser() throws Exception {
// Initialize the database
userRepository.save(user);
int databaseSizeBeforeDelete = userRepository.findAll().size();
// Delete the user
restUserMockMvc.perform(delete(API_PATH + "/users/{login}", user.getLogin())
.accept(TestUtil.APPLICATION_JSON_UTF8))
.andExpect(status().isOk());
// Validate the database is empty
List<User> userList = userRepository.findAll();
assertThat(userList).hasSize(databaseSizeBeforeDelete - 1);
}
@Test
public void equalsVerifier() throws Exception {
User userA = new User();
userA.setLogin("AAA");
User userB = new User();
userB.setLogin("BBB");
assertThat(userA).isNotEqualTo(userB);
}
}
| apache-2.0 |
WASdev/standards.jsr352.tck | com.ibm.jbatch.tck/src/main/java/com/ibm/jbatch/tck/artifacts/specialized/DoSomethingArrayItemProcessorImpl.java | 2022 | /*
* Copyright 2012 International Business Machines Corp.
*
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership. Licensed under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.jbatch.tck.artifacts.specialized;
import java.util.logging.Logger;
import javax.batch.api.BatchProperty;
import javax.batch.api.chunk.ItemProcessor;
import javax.inject.Inject;
import com.ibm.jbatch.tck.artifacts.chunktypes.ReadRecord;
@javax.inject.Named("doSomethingArrayItemProcessorImpl")
public class DoSomethingArrayItemProcessorImpl implements ItemProcessor {
private final static Logger logger = Logger.getLogger(DoSomethingArrayItemProcessorImpl.class.getName());
@Inject
@BatchProperty(name="app.processFilterItem")
String appProcessFilterItem;
int filterNumber;
boolean initSkipNumber = false;
int count = 1;
private int update = 100;
@Override
public ReadRecord processItem(Object record) throws Exception {
if (appProcessFilterItem != null) {
if (!initSkipNumber) {
filterNumber = Integer.parseInt(appProcessFilterItem);
initSkipNumber = true;
}
}
if (initSkipNumber) {
if (filterNumber == count) {
logger.fine("AJM: filtering out #" + filterNumber);
count++;
return null; // filter
}
}
count++;
ReadRecord processedRecord = (ReadRecord)record;
int currData = processedRecord.getCount();
processedRecord.setRecord(currData + update);
return processedRecord;
}
}
| apache-2.0 |
HiDrive/hidrive-android-sdk | src/com/strato/hidrive/api/bll/sharelink/DeleteShareLinkGateway.java | 1367 | /**
* Copyright 2014 STRATO AG
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.strato.hidrive.api.bll.sharelink;
import com.strato.hidrive.api.connection.gateway.SingleResultGateway;
import com.strato.hidrive.api.connection.httpgateway.request.DeleteRequest;
import com.strato.hidrive.api.connection.httpgateway.request.Param;
import com.strato.hidrive.api.connection.httpgateway.request.Request;
import com.strato.hidrive.api.interfaces.DataReader;
public class DeleteShareLinkGateway extends SingleResultGateway<String> {
private String linkId;
public DeleteShareLinkGateway(String linkId) {
super();
this.linkId = linkId;
}
@Override
protected String prepareObject(DataReader datareader) {
return this.linkId;
}
@Override
protected Request prepareRequest() {
return new DeleteRequest("sharelink", new Param("id", this.linkId));
}
}
| apache-2.0 |
romanzenka/swift | lib/unimod/src/main/java/edu/mayo/mprc/unimod/UnimodHandler.java | 2466 | package edu.mayo.mprc.unimod;
import org.xml.sax.Attributes;
import org.xml.sax.ContentHandler;
import org.xml.sax.SAXException;
import org.xml.sax.helpers.DefaultHandler;
/**
* Entry point to unimod parsing. Based on the version, it picks one of the two actual implementations.
*
* @author Roman Zenka
*/
public final class UnimodHandler extends DefaultHandler {
private final Unimod into;
private ContentHandler actualHandler;
public UnimodHandler(final Unimod into) {
this.into = into;
}
public static Boolean getBooleanValue(final Attributes attr, final String attrName) {
Boolean value = null;
try {
final String strValue = attr.getValue("", attrName);
value = (!strValue.equals("0"));
} catch (Exception ignore) {
//SWALLOWED: allow null return;
}
return value;
}
public static Double getDoubleValue(final Attributes attr, final String attrName) {
Double value = null;
try {
value = new Double(attr.getValue("", attrName));
} catch (Exception ignore) {
//SWALLOWED: just allow null to be returned
}
return value;
}
public static Integer getIntegerValue(final Attributes attr, final String attrName) {
Integer value = null;
try {
final String strValue = attr.getValue("", attrName);
value = Integer.valueOf(strValue);
} catch (Exception ignore) {
// SWALLOWED: allow null return;
}
return value;
}
@Override
public void startElement(final String namespaceURI, final String localName, final String qualifiedName, final Attributes attr) throws SAXException {
if (actualHandler == null) {
if ("unimod".equals(localName)) {
into.setMajorVersion(attr.getValue("majorVersion"));
into.setMinorVersion(attr.getValue("minorVersion"));
if ("1".equals(into.getMajorVersion())) {
actualHandler = new Unimod1Handler(into);
} else if ("2".equals(into.getMajorVersion())) {
actualHandler = new Unimod2Handler(into);
}
}
} else {
actualHandler.startElement(namespaceURI, localName, qualifiedName, attr);
}
}
@Override
public void characters(final char[] ch, final int start, final int length) throws SAXException {
if (actualHandler != null) {
actualHandler.characters(ch, start, length);
}
}
@Override
public void endElement(final String namespaceURI, final String localName, final String qualifiedName) throws SAXException {
if (actualHandler != null) {
actualHandler.endElement(namespaceURI, localName, qualifiedName);
}
}
}
| apache-2.0 |
Sausure/WIFIADB | WIFIADBIntelliJPlugin/src/main/java/adb/wifi/woaiwhz/base/compat/IOsCompat.java | 202 | package adb.wifi.woaiwhz.base.compat;
import org.jetbrains.annotations.NotNull;
/**
* Created by wanghuazhou on 01/01/2017.
*/
public interface IOsCompat {
@NotNull
String getADBinSdk();
}
| apache-2.0 |
reines/home | component-calendar-google/src/main/java/com/furnaghan/home/component/calendar/google/GoogleCalendarConfiguration.java | 1679 | package com.furnaghan.home.component.calendar.google;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.furnaghan.home.component.Configuration;
import com.google.api.client.util.SecurityUtils;
import com.google.common.io.Files;
import io.dropwizard.util.Duration;
import org.hibernate.validator.constraints.NotEmpty;
import javax.validation.constraints.NotNull;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.security.GeneralSecurityException;
import java.security.PrivateKey;
public class GoogleCalendarConfiguration implements Configuration {
@NotEmpty
@JsonProperty
private String email;
@NotNull
@JsonProperty
private File keyStore;
@NotNull
@JsonProperty
private String storePass = "notasecret";
@NotNull
@JsonProperty
private String keyAlias = "privatekey";
@NotNull
@JsonProperty
private String keyPass = "notasecret";
@NotEmpty
@JsonProperty
private String calendarId;
@NotNull
@JsonProperty
private Duration pollInterval = Duration.minutes(1);
public String getEmail() {
return email;
}
public PrivateKey getPrivateKey() throws GeneralSecurityException, IOException {
try (final InputStream in = Files.asByteSource(keyStore).openStream()) {
return SecurityUtils.loadPrivateKeyFromKeyStore(
SecurityUtils.getPkcs12KeyStore(), in,
storePass, keyAlias, keyPass
);
}
}
public String getCalendarId() {
return calendarId;
}
public Duration getPollInterval() {
return pollInterval;
}
}
| apache-2.0 |
doctester/doctester | doctester-core/src/main/java/org/r10r/doctester/testbrowser/TestBrowserImpl.java | 11491 | /**
* Copyright (C) 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.r10r.doctester.testbrowser;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.dataformat.xml.XmlMapper;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpVersion;
import org.apache.http.ParseException;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.*;
import org.apache.http.cookie.Cookie;
import org.apache.http.entity.StringEntity;
import org.apache.http.entity.mime.HttpMultipartMode;
import org.apache.http.entity.mime.MultipartEntity;
import org.apache.http.entity.mime.content.FileBody;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.params.BasicHttpParams;
import org.apache.http.params.CoreProtocolPNames;
import org.apache.http.params.HttpParams;
import org.apache.http.util.EntityUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.management.RuntimeErrorException;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import static org.r10r.doctester.testbrowser.HttpConstants.*;
public class TestBrowserImpl implements TestBrowser {
private static Logger logger = LoggerFactory.getLogger(TestBrowserImpl.class);
private static final String HANDLE_REDIRECTS = "http.protocol.handle-redirects";
private DefaultHttpClient httpClient;
public TestBrowserImpl() {
httpClient = new DefaultHttpClient();
}
@Override
public List<Cookie> getCookies() {
return httpClient.getCookieStore().getCookies();
}
@Override
public Cookie getCookieWithName(String name) {
List<Cookie> cookies = getCookies();
// skip through cookies and return cookie you want
for (Cookie cookie : cookies) {
if (cookie.getName().equals(name)) {
return cookie;
}
}
return null;
}
@Override
public void clearCookies() {
httpClient.getCookieStore().clear();
}
@Override
public Response makeRequest(Request httpRequest) {
Response httpResponse;
if (Sets.newHashSet(HEAD, GET, DELETE).contains(httpRequest.httpRequestType)) {
httpResponse = makeHeadGetOrDeleteRequest(httpRequest);
} else if (Sets.newHashSet(POST, PUT, PATCH).contains(httpRequest.httpRequestType)) {
httpResponse = makePatchPostOrPutRequest(httpRequest);
} else {
throw new RuntimeErrorException(new Error("Your requested httpRequest.httpRequestType is not supported"));
}
return httpResponse;
}
private Response makeHeadGetOrDeleteRequest(Request request) {
Response response;
org.apache.http.HttpResponse apacheHttpClientResponse;
try {
HttpUriRequest apacheHttpRequest;
httpClient.getParams().setParameter(
CoreProtocolPNames.PROTOCOL_VERSION, HttpVersion.HTTP_1_1);
if (GET.equalsIgnoreCase(request.httpRequestType)) {
apacheHttpRequest = new HttpGet(request.uri);
} else if (DELETE.equalsIgnoreCase(request.httpRequestType)) {
apacheHttpRequest = new HttpDelete(request.uri);
} else {
apacheHttpRequest = new HttpHead(request.uri);
}
if (request.headers != null) {
// add all headers
for (Entry<String, String> header : request.headers
.entrySet()) {
apacheHttpRequest.addHeader(header.getKey(), header.getValue());
}
}
setHandleRedirect(apacheHttpRequest, request.followRedirects);
apacheHttpClientResponse = httpClient.execute(apacheHttpRequest);
response = convertFromApacheHttpResponseToDocTesterHttpResponse(apacheHttpClientResponse);
if (apacheHttpRequest instanceof HttpRequestBase) {
((HttpRequestBase) apacheHttpRequest).releaseConnection();
}
} catch (IOException e) {
logger.error("Fatal problem creating GET or DELETE request in TestBrowser", e);
throw new RuntimeException(e);
}
return response;
}
private Response makePatchPostOrPutRequest(Request httpRequest) {
org.apache.http.HttpResponse apacheHttpClientResponse;
Response response = null;
try {
httpClient.getParams().setParameter(
CoreProtocolPNames.PROTOCOL_VERSION, HttpVersion.HTTP_1_1);
HttpEntityEnclosingRequestBase apacheHttpRequest;
if (PATCH.equalsIgnoreCase(httpRequest.httpRequestType)) {
apacheHttpRequest = new HttpPatch(httpRequest.uri);
} else if (POST.equalsIgnoreCase(httpRequest.httpRequestType)) {
apacheHttpRequest = new HttpPost(httpRequest.uri);
} else {
apacheHttpRequest = new HttpPut(httpRequest.uri);
}
if (httpRequest.headers != null) {
// add all headers
for (Entry<String, String> header : httpRequest.headers
.entrySet()) {
apacheHttpRequest.addHeader(header.getKey(), header.getValue());
}
}
///////////////////////////////////////////////////////////////////
// Either add form parameters...
///////////////////////////////////////////////////////////////////
if (httpRequest.formParameters != null) {
List<BasicNameValuePair> formparams = Lists.newArrayList();
for (Entry<String, String> parameter : httpRequest.formParameters
.entrySet()) {
formparams.add(new BasicNameValuePair(parameter.getKey(),
parameter.getValue()));
}
// encode form parameters and add
UrlEncodedFormEntity entity = new UrlEncodedFormEntity(formparams);
apacheHttpRequest.setEntity(entity);
}
///////////////////////////////////////////////////////////////////
// Or add multipart file upload
///////////////////////////////////////////////////////////////////
if (httpRequest.filesToUpload != null) {
MultipartEntity entity = new MultipartEntity(
HttpMultipartMode.BROWSER_COMPATIBLE);
for (Map.Entry<String, File> entry : httpRequest.filesToUpload
.entrySet()) {
// For File parameters
entity.addPart(entry.getKey(),
new FileBody((File) entry.getValue()));
}
apacheHttpRequest.setEntity(entity);
}
///////////////////////////////////////////////////////////////////
// Or add payload and convert if Json or Xml
///////////////////////////////////////////////////////////////////
if (httpRequest.payload != null) {
if (httpRequest.headers.containsKey(HEADER_CONTENT_TYPE)
&& httpRequest.headers.containsValue(APPLICATION_JSON_WITH_CHARSET_UTF8)) {
String string = new ObjectMapper().writeValueAsString(httpRequest.payload);
StringEntity entity = new StringEntity(string, "utf-8");
entity.setContentType("application/json; charset=utf-8");
apacheHttpRequest.setEntity(entity);
} else if (httpRequest.headers.containsKey(HEADER_CONTENT_TYPE)
&& httpRequest.headers.containsValue(APPLICATION_XML_WITH_CHARSET_UTF_8)) {
String string = new XmlMapper().writeValueAsString(httpRequest.payload);
StringEntity entity = new StringEntity(string, "utf-8");
entity.setContentType(APPLICATION_XML_WITH_CHARSET_UTF_8);
apacheHttpRequest.setEntity(new StringEntity(string, "utf-8"));
} else if (httpRequest.payload instanceof String) {
StringEntity entity = new StringEntity((String) httpRequest.payload, "utf-8");
apacheHttpRequest.setEntity(entity);
} else {
StringEntity entity = new StringEntity(httpRequest.payload.toString(), "utf-8");
apacheHttpRequest.setEntity(entity);
}
}
setHandleRedirect(apacheHttpRequest, httpRequest.followRedirects);
// Here we go!
apacheHttpClientResponse = httpClient.execute(apacheHttpRequest);
response = convertFromApacheHttpResponseToDocTesterHttpResponse(apacheHttpClientResponse);
apacheHttpRequest.releaseConnection();
} catch (IOException e) {
logger.error("Fatal problem creating PATCH, POST or PUT request in TestBrowser", e);
throw new RuntimeException(e);
}
return response;
}
private org.r10r.doctester.testbrowser.Response convertFromApacheHttpResponseToDocTesterHttpResponse(org.apache.http.HttpResponse httpResponse) {
Map<String, String> headers = Maps.newHashMap();
for (Header header : httpResponse.getAllHeaders()) {
headers.put(header.getName(), header.getValue());
}
int httpStatus = httpResponse.getStatusLine().getStatusCode();
String body = null;
HttpEntity entity = httpResponse.getEntity();
if (entity != null) {
try {
body = EntityUtils.toString(entity, "UTF-8");
} catch (IOException | ParseException e) {
logger.error("Error while converting ApacheHttpClient response body to a String we can use", e);
}
}
org.r10r.doctester.testbrowser.Response doctestJHttpResponse = new org.r10r.doctester.testbrowser.Response(
headers, httpStatus, body);
return doctestJHttpResponse;
}
/**
* Tells ApacheHttpClient whether to follow redirects. See also:
* http://stackoverflow.com/questions/1519392/how-to-prevent-apache-http-client-from-following-a-redirect
*/
private void setHandleRedirect(HttpUriRequest httpUriRequest, boolean handleRedirect) {
HttpParams params = new BasicHttpParams();
params.setParameter(HANDLE_REDIRECTS, handleRedirect);
httpUriRequest.setParams(params);
}
}
| apache-2.0 |
yurloc/arquillian-graphene | ftest/src/test/java/org/jboss/arquillian/graphene/ftest/drone/GrapheneDroneWebDriverIntegrationTestCase.java | 2794 | /**
* JBoss, Home of Professional Open Source
* Copyright 2013, Red Hat, Inc. and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.jboss.arquillian.graphene.ftest.drone;
import org.jboss.arquillian.drone.api.annotation.Default;
import static org.junit.Assert.assertTrue;
import org.jboss.arquillian.drone.api.annotation.Drone;
import org.jboss.arquillian.graphene.context.GrapheneContext;
import org.jboss.arquillian.graphene.proxy.GrapheneProxyInstance;
import org.jboss.arquillian.junit.Arquillian;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.openqa.selenium.WebDriver;
/**
* @author Lukas Fryc
* @author Jan Papousek
*/
@RunWith(Arquillian.class)
public class GrapheneDroneWebDriverIntegrationTestCase {
@Drone
WebDriver browser;
@Test
public void created_instance_should_be_instance_of_requested_driver() {
assertTrue("browser must be WebDriver", browser instanceof WebDriver);
}
@Test
public void created_instance_should_be_instance_of_GrapheneProxyInstance() {
assertTrue("browser must be proxy", browser instanceof GrapheneProxyInstance);
}
@Test
public void created_instance_should_be_able_to_navigate_to_some_page() {
browser.navigate().to("http://127.0.0.1:14444");
}
@Test
public void context_instance_should_be_instance_of_requested_driver() {
assertTrue("context browser must be WebDriver", GrapheneContext.getContextFor(Default.class).getWebDriver() instanceof WebDriver);
}
@Test
public void context_instance_should_be_instance_of_GrapheneProxyInstance() {
assertTrue("context browser must be proxy", browser instanceof GrapheneProxyInstance);
}
@Test
public void context_instance_should_be_able_to_navigate_to_some_page() {
GrapheneContext.getContextFor(Default.class).getWebDriver().navigate().to("http://127.0.0.1:14444");
}
} | apache-2.0 |
icza/sc2gears | src/hu/belicza/andras/sc2gears/ui/dialogs/AboutDialog.java | 7435 | /*
* Project Sc2gears
*
* Copyright (c) 2010 Andras Belicza <iczaaa@gmail.com>
*
* This software is the property of Andras Belicza.
* Copying, modifying, distributing, refactoring without the authors permission
* is prohibited and protected by Law.
*/
package hu.belicza.andras.sc2gears.ui.dialogs;
import hu.belicza.andras.sc2gears.Consts;
import hu.belicza.andras.sc2gears.language.Language;
import hu.belicza.andras.sc2gears.sc2replay.EapmUtils;
import hu.belicza.andras.sc2gears.sc2replay.ReplayFactory;
import hu.belicza.andras.sc2gears.services.plugins.GeneralServicesImpl;
import hu.belicza.andras.sc2gears.services.plugins.PluginServicesImpl;
import hu.belicza.andras.sc2gears.services.streaming.PrivateVideoStreaming;
import hu.belicza.andras.sc2gears.settings.Settings;
import hu.belicza.andras.sc2gears.sound.Sounds;
import hu.belicza.andras.sc2gears.sound.Sounds.VoiceDescription;
import hu.belicza.andras.sc2gears.ui.GuiUtils;
import hu.belicza.andras.sc2gears.ui.MainFrame;
import hu.belicza.andras.sc2gears.ui.icons.Icons;
import hu.belicza.andras.sc2gears.ui.mousepracticegame.MousePracticeGameFrame;
import hu.belicza.andras.sc2gears.util.GeneralUtils;
import hu.belicza.andras.sc2gears.util.ProfileCache;
import hu.belicza.andras.sc2gears.util.ReplayCache;
import hu.belicza.andras.sc2gears.util.TemplateEngine;
import hu.belicza.andras.sc2gearspluginapi.Plugin;
import hu.belicza.andras.smpd.SmpdUtil;
import hu.belicza.andras.smpd.SmpdUtil.SmpdVer;
import java.awt.BorderLayout;
import javax.swing.BorderFactory;
import javax.swing.Box;
import javax.swing.JButton;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JTable;
import javax.swing.table.DefaultTableModel;
/**
* About information dialog.
*
* @author Andras Belicza
*/
@SuppressWarnings("serial")
public class AboutDialog extends BaseDialog {
/**
* Creates a new AboutDialog.
*/
public AboutDialog() {
super( "about.title", new Object[] { Consts.APPLICATION_NAME } );
final Box box = Box.createVerticalBox();
box.setBorder( BorderFactory.createEmptyBorder( 15, 20, 15, 10 ) );
box.add( GuiUtils.wrapInPanel( GeneralUtils.createAnimatedLogoLabel() ) );
box.add( Box.createVerticalStrut( 10 ) );
box.add( GuiUtils.wrapInPanel( new JLabel( Language.getText( "about.usedIcons", Consts.APPLICATION_NAME + "™" ) ) ) );
box.add( Box.createVerticalStrut( 5 ) );
box.add( GuiUtils.wrapInPanel( new JLabel( Language.getText( "about.sc2ImagesCopyrightCompany", "© Blizzard Entertainment, Inc." ) ) ) );
box.add( Box.createVerticalStrut( 15 ) );
box.add( new JPanel( new BorderLayout() ) );
box.add( GuiUtils.wrapInPanel( GuiUtils.changeFontToBold( new JLabel( Language.getText( "about.elapsedTimeSinceStart", Consts.APPLICATION_NAME, GeneralUtils.formatLongSeconds( ( System.currentTimeMillis() - MainFrame.APPLICATION_START_TIME.getTime() ) / 1000 ) ) ) ) ) );
box.add( new JPanel( new BorderLayout() ) );
box.add( Box.createVerticalStrut( 15 ) );
box.add( GuiUtils.wrapInPanel( new JLabel( Language.getText( "welcome.thankYou", Consts.APPLICATION_NAME + "™" ) ) ) );
box.add( Box.createVerticalStrut( 10 ) );
final JLabel copyrightLabel = new JLabel( "Copyright © " + Language.formatPersonName( Consts.AUTHOR_FIRST_NAME, Consts.AUTHOR_LAST_NAME ) + ", 2010-2014" );
GuiUtils.changeFontToItalic( copyrightLabel );
box.add( GuiUtils.wrapInPanel( copyrightLabel ) );
box.add( Box.createVerticalStrut( 10 ) );
box.add( GuiUtils.wrapInPanel( new JLabel( Consts.APPLICATION_NAME + " is a trademark of " + Language.formatPersonName( Consts.AUTHOR_FIRST_NAME, Consts.AUTHOR_LAST_NAME ) + "." ) ) );
//box.add( new JSeparator() );
box.add( Box.createVerticalStrut( 10 ) );
final JLabel visitScelightLabel = GeneralUtils.createLinkLabel( "Visit Scelight™, the successor to " + Consts.APPLICATION_NAME, Consts.URL_SCELIGHT_HOME_PAGE );
visitScelightLabel.setIcon( Icons.SCELIGHT );
box.add( GuiUtils.wrapInPanel( visitScelightLabel ) );
box.add( Box.createVerticalStrut( 10 ) );
final JButton okButton = createCloseButton( "button.ok" );
box.add( GuiUtils.wrapInPanel( okButton ) );
getContentPane().add( box, BorderLayout.WEST );
final VoiceDescription currentVoiceDesc = Sounds.getVoiceDescription( Settings.getString( Settings.KEY_SETTINGS_VOICE ) );
final JTable infoTable = GuiUtils.createNonEditableTable();
( (DefaultTableModel) infoTable.getModel() ).setDataVector(
new Object[][] {
{ Language.getText( "about.author" ), Language.formatPersonName( Consts.AUTHOR_FIRST_NAME, Consts.AUTHOR_LAST_NAME ) },
{ Language.getText( "about.email" ), Consts.AUTHOR_EMAIL },
{ Language.getText( "about.version" ), Consts.APPLICATION_VERSION },
{ Language.getText( "about.releasedOn" ), Language.formatDate( Consts.APPLICATION_RELEASE_DATE ) },
{ Language.getText( "about.currentLanguage" ), Language.getLanguageName() },
{ Language.getText( "about.currentTranslator" ), Language.getTranslatorName() },
{ Language.getText( "about.languageFileVersion" ), Language.getLanguageFileVersion() + " (" + Language.getLanguageFileSubversion() + ")" },
{ Language.getText( "about.currentVoice" ), currentVoiceDesc.displayName },
{ Language.getText( "about.authorOfCurrentVoice" ), Language.formatPersonName( currentVoiceDesc.authorFirstName, currentVoiceDesc.authorLastName ) },
{ Language.getText( "about.updaterVersion", Consts.UPDATER_NAME ), Consts.UPDATER_VERSION },
{ Language.getText( "about.replayParserVersion" ), ReplayFactory.VERSION },
{ Language.getText( "about.replayCacheVersion" ), ReplayCache.CACHE_VERSION },
{ Language.getText( "about.profileCacheVersion" ), ProfileCache.CACHE_VERSION },
{ Language.getText( "about.nameTemplateEngineVersion" ), TemplateEngine.ENGINE_VERSION },
{ Language.getText( "about.pluginApiVersion" ), Plugin.API_VERSION },
{ Language.getText( "about.pluginServicesImplVersion" ), PluginServicesImpl.IMPL_VERSION },
{ Language.getText( "about.generalServicesImplVersion" ), GeneralServicesImpl.IMPL_VERSION },
{ Language.getText( "about.eapmAlgorithmVersion" ), EapmUtils.ALGORITHM_VERSION },
{ Language.getText( "about.mousePracticeGameVersion" ), MousePracticeGameFrame.GAME_VERSION },
{ Language.getText( "about.smpdFormatVersion" ), SmpdUtil.getVersionString( SmpdVer.V11.binaryValue ) },
{ Language.getText( "about.privateVideoStreamingVersion" ), PrivateVideoStreaming.VERSION }
}, new Object[] { "Property", "Value" } );
GuiUtils.packTable( infoTable );
final JPanel tableWrapper = new JPanel( new BorderLayout() );
tableWrapper.add( infoTable );
tableWrapper.setBorder( BorderFactory.createCompoundBorder( BorderFactory.createEmptyBorder( 15, 15, 15, 15 ), BorderFactory.createEtchedBorder() ) );
getContentPane().add( tableWrapper, BorderLayout.CENTER );
if ( Settings.getBoolean( Settings.KEY_SETTINGS_ENABLE_VOICE_NOTIFICATIONS ) )
Sounds.playSoundSample( Sounds.SAMPLE_THANK_YOU, false );
packAndShow( okButton, false );
}
}
| apache-2.0 |
Quantiply/rico | avro/src/test/java/com/quantiply/avro/JoinTest.java | 2788 | /*
* Copyright 2014-2015 Quantiply Corporation. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.quantiply.avro;
import org.apache.avro.Schema;
import org.apache.avro.SchemaBuilder;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.GenericRecordBuilder;
import org.junit.Test;
import java.io.IOException;
import static org.junit.Assert.assertEquals;
public class JoinTest {
protected Schema getJoinedSchema() {
return SchemaBuilder
.record("Joined").namespace("com.quantiply.test")
.fields()
.name("foo").type().stringType().noDefault()
.name("bar").type("int").noDefault()
.name("charlie").type().stringType().noDefault()
.endRecord();
}
protected Schema getInput1Schema() {
return SchemaBuilder
.record("In1").namespace("com.quantiply.test")
.fields()
.name("foo").type().stringType().noDefault()
.name("left_out").type().stringType().noDefault()
.endRecord();
}
protected Schema getInput2Schema() {
return SchemaBuilder
.record("In1").namespace("com.quantiply.test")
.fields()
.name("bar").type("int").noDefault()
.endRecord();
}
@Test
public void testJoin() throws IOException {
GenericRecord in1 = getIn1();
GenericRecord in2 = getIn2();
GenericRecord joined = new Join(getJoinedSchema())
.merge(in1)
.merge(in2)
.getBuilder()
.set("charlie", "blah blah")
.build();
assertEquals("yo yo", joined.get("foo"));
assertEquals(5, joined.get("bar"));
assertEquals("blah blah", joined.get("charlie"));
}
private GenericRecord getIn2() {
return new GenericRecordBuilder(getInput2Schema())
.set("bar", 5)
.build();
}
private GenericRecord getIn1() {
return new GenericRecordBuilder(getInput1Schema())
.set("foo", "yo yo")
.set("left_out", "forget me")
.build();
}
}
| apache-2.0 |
MeLays/MTTT | src/de/melays/ttt/Arena.java | 47584 | package de.melays.ttt;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Random;
import java.util.UUID;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.Effect;
import org.bukkit.GameMode;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.World;
import org.bukkit.block.Block;
import org.bukkit.block.BlockFace;
import org.bukkit.block.Chest;
import org.bukkit.block.Sign;
import org.bukkit.entity.ArmorStand;
import org.bukkit.entity.Entity;
import org.bukkit.entity.EntityType;
import org.bukkit.entity.Item;
import org.bukkit.entity.Player;
import org.bukkit.event.block.Action;
import org.bukkit.event.entity.EntityDamageByEntityEvent;
import org.bukkit.event.entity.PlayerDeathEvent;
import org.bukkit.event.entity.ProjectileHitEvent;
import org.bukkit.event.inventory.InventoryClickEvent;
import org.bukkit.event.player.PlayerDropItemEvent;
import org.bukkit.event.player.PlayerInteractAtEntityEvent;
import org.bukkit.event.player.PlayerInteractEvent;
import org.bukkit.event.player.PlayerMoveEvent;
import org.bukkit.inventory.ItemStack;
import org.bukkit.inventory.PlayerInventory;
import org.bukkit.inventory.meta.ItemMeta;
import org.bukkit.inventory.meta.SkullMeta;
import org.bukkit.potion.PotionEffect;
import org.bukkit.potion.PotionEffectType;
import org.bukkit.scheduler.BukkitScheduler;
import org.bukkit.scoreboard.NameTagVisibility;
import org.bukkit.scoreboard.Scoreboard;
import org.bukkit.scoreboard.ScoreboardManager;
import org.bukkit.scoreboard.Team;
import de.melays.Shop.Shop;
import de.melays.Shop.ShopGUI;
import de.melays.Shop.ShopItem;
import de.melays.itembuilder.ItemBuilder;
import me.blvckbytes.autonicker.NickSession;
import me.blvckbytes.autonicker.api.NickAPI;
public class Arena
{
Location spectator;
Location game;
Location back;
public Location testerbutton;
public Location testerlocation;
Location lamp1;
Location lamp2;
int startplayers= 0;
boolean ended = false;
public boolean tester = false;
boolean testeruse = false;
ArrayList<Player> spoofs = new ArrayList<Player>();
ArrayList<Player> testerdontmove = new ArrayList<Player>();
ArrayList<Block> blocks = new ArrayList<Block>();
ArrayList<Block> eblocks = new ArrayList<Block>();
ArrayList<Entity> creepers = new ArrayList<Entity>();
String prefix ;
public ArrayList<Player> usedpass = new ArrayList<Player>();
public ArrayList<Player> usedpass_detective = new ArrayList<Player>();
public ArrayList<Player> specs = new ArrayList<Player>();
public ArrayList<Player> players = new ArrayList<Player>();
public ArrayList<Player> detectives = new ArrayList<Player>();
public ArrayList<Player> traitors = new ArrayList<Player>();
ArrayList<ArmorStand> stands = new ArrayList<ArmorStand>();
String traitors_save = "";
public ArrayList<Player> innocents = new ArrayList<Player>();
public String gamestate = "waiting";
public int min;
HashMap<Player , ItemStack[]> inventorys = new HashMap<Player , ItemStack[]>();
HashMap<Player , ItemStack[]> armorinventorys = new HashMap<Player , ItemStack[]>();
public HashMap<Player , Integer> openedChests = new HashMap<Player , Integer>();
HashMap<Player , Integer> startkarma = new HashMap<Player , Integer>();
Team team;
int max_players = 0;
boolean new_specmode;
//Countdown
int counter;
int counterb = -1;
public RoleManager rm;
public Shop shop;
public ShopGUI shopgui;
public main plugin;
public SpecInventory specinv;
public void setMSignsMOTD (){
MOTDReflector.setMOTD(",,," + name + "," + gamestate+",MTTT-MSIGNS-HOOK");
}
public void leaveAll (){
for (Player p : new ArrayList<Player>(getPlayerList())){
p.teleport(back);
leave(p , true);
}
ArrayList<Player> specss = new ArrayList<Player>();
specss.addAll(specs);
for (Player p : new ArrayList<Player>(specss)){
p.setGameMode(GameMode.SURVIVAL);
p.teleport(back);
leave(p , true);
}
for (Player p : new ArrayList<Player>(players)){
p.setGameMode(GameMode.SURVIVAL);
p.teleport(back);
leave(p , true);
}
}
boolean forcestart = false;
public String userStart(){
if (this.gamestate.equals("waiting")){
if (getCompleteList().size() >= 2){
if (!lobbymode){
forcestart = true;
this.counter = 10;
return "complete";
}
else{
if (lobby){
forcestart = true;
this.counter = plugin.getConfig().getInt("waitingtime") / 2;
lobby = false;
this.moveAllFromLobby();
}
else{
return "started";
}
}
return "complete";
}
else{
return "missingplayers";
}
}
else{
return "started";
}
}
public void callMoveEvent (PlayerMoveEvent e){
if (this.testerdontmove.contains(e.getPlayer())){
if (e.getFrom().getBlock().getRelative(BlockFace.DOWN).getType().equals(Material.AIR)){
testeruse = false;
testerdontmove.remove(e.getPlayer());
Bukkit.getScheduler().cancelTask(this.testerscheduler);
}
else{
e.getPlayer().teleport(e.getFrom());
}
}
if (specs.contains(e.getPlayer())){
if (!e.getPlayer().getWorld().equals(spectator.getWorld())){
e.getPlayer().teleport(spectator);
return;
}
if (e.getPlayer().getLocation().distance(this.spectator) > 130){
e.getPlayer().teleport(spectator);
}
}
}
Location sign;
public String name = "";
Location lobbyloc;
public Arena(main m , Location spectator, Location game, Location back , int min , Location sign , String name , Location testerbutton , Location testerlocation , Location lamp1 , Location lamp2)
{
this.name = name;
this.plugin = m;
this.lamp1 = lamp1;
this.lamp2 = lamp2;
this.counter = plugin.getConfig().getInt("waitingtime");
this.rm = new RoleManager(plugin , this);
this.shop = new Shop();
this.shopgui = new ShopGUI(this);
this.spectator = spectator;
this.testerbutton = testerbutton;
this.testerlocation = testerlocation;
if (plugin.ts.checkTesterSetup(name)){
tester = true;
}
this.min = min;
this.sign = sign;
this.game = game;
this.back = back;
this.gamestate = "waiting";
this.prefix = this.plugin.prefix;
specinv = new SpecInventory(this);
new_specmode = plugin.getConfig().getBoolean("newspecmode");
if (plugin.getConfig().getString(name+".max") != null){
this.max_players = plugin.getConfig().getInt(name+".max");
System.out.println("[MTTT] Max-Players of " + name + " is " + this.max_players);
}
ScoreboardManager manager = Bukkit.getScoreboardManager();
Scoreboard board = manager.getNewScoreboard();
team = board.registerNewTeam(UUID.randomUUID().toString().substring(0, 15));
if (plugin.getConfig().getBoolean("hidenametag")){
team.setNameTagVisibility(NameTagVisibility.HIDE_FOR_OWN_TEAM);
}
lobbymode = plugin.getConfig().getBoolean("lobbymode");
if (lobbymode){
try{
double x = plugin.getConfig().getDouble(name + ".lobby.x");
double y = plugin.getConfig().getDouble(name + ".lobby.y");
double z = plugin.getConfig().getDouble(name + ".lobby.z");
double pitch = plugin.getConfig().getDouble(name + ".lobby.pitch");
double yaw = plugin.getConfig().getDouble(name + ".lobby.yaw");
lobbyloc = new Location(Bukkit.getWorld(plugin.getConfig().getString(name + ".lobby.world")), x, y, z, (float)yaw, (float)pitch);
if (lobbyloc != null && plugin.getConfig().get(name + ".lobby.x") != null){
lobby = true;
}
else{
lobby = false;
lobbymode = false;
System.out.println("[MTTT] Lobbymode couldn't be activated for arena "+name+"! No lobby location set!");
}
}
catch (Exception ex){
lobby = false;
lobbymode = false;
System.out.println("[MTTT] Lobbymode couldn't be activated for arena "+name+"! No lobby location set!");
}
}
atester = new ArenaTester(plugin , this);
atester.load();
if (atester.enabled){
System.out.println("[MTTT] New Tester activated for arena " + name + " !");
}
gameLoop();
}
public ArenaTester atester;
public void sendRadiusMessage(Player p , String msg){
double maxDist = 10;
for (Player other : Bukkit.getOnlinePlayers()) {
if (other.getWorld().equals(game.getWorld()))
if (other.getLocation().distance(p.getLocation()) <= maxDist) {
if (getPlayerList().contains(other) || specs.contains(other)){
other.sendMessage(msg);
}
}
}
}
public void sendArenaMessage(String msg){
for (Player p : specs){
p.sendMessage(msg);
}
for (Player p : players){
p.sendMessage(msg);
}
for (Player p : detectives){
p.sendMessage(msg);
}
for (Player p : traitors){
p.sendMessage(msg);
}
for (Player p : innocents){
p.sendMessage(msg);
}
}
public void sendSpecMessage(String msg){
for (Player p : specs){
p.sendMessage(msg);
}
if (gamestate == "end"){
for (Player p : players){
p.sendMessage(msg);
}
for (Player p : detectives){
p.sendMessage(msg);
}
for (Player p : traitors){
p.sendMessage(msg);
}
for (Player p : innocents){
p.sendMessage(msg);
}
}
}
// public void refreshTags (){
// for (Player p : getPlayerList()){
// TagAPI.refreshPlayer(p);
// }
// }
//
// public void callNameApi (AsyncPlayerReceiveNameTagEvent e){
// System.out.println("called.2");
// Player p = e.getPlayer();
// Player aim = e.getNamedPlayer();
// if (rm.getRole(p).equals("INNOCENT") || rm.getRole(p).equals("DETECTIVE")){
// if (rm.getRole(aim).equals("DETECTIVE")){
// e.setTag(ChatColor.BLUE+"");
// }
// else{
// e.setTag(ChatColor.YELLOW+"");
// }
// }
// else if(rm.getRole(p).equals("TRAITOR")){
// if (rm.getRole(aim).equals("TRAITOR")){
// e.setTag(ChatColor.RED+"");
// }
// else if (rm.getRole(aim).equals("DETECTIVE")){
// e.setTag(ChatColor.BLUE+"");
// }
// else{
// e.setTag(ChatColor.GREEN+"");
// }
// }
// else{
// e.setTag(ChatColor.GRAY+"");
// }
// }
public void createDeathStand (Player p, Location loc, String role , Player killer , String rolekiller){
ArmorStand am = (ArmorStand) loc.getWorld().spawn(loc, ArmorStand.class);
am.setVisible(true);
am.setArms(false);
am.setCustomName(ChatColor.YELLOW+"Dead Body");
am.setCustomNameVisible(false);
am.setGravity(true);
ItemStack is = new ItemStack (Material.LEATHER_HELMET);
ItemMeta meta = is.getItemMeta();
meta.setDisplayName(role+"#"+p.getName());
is.setItemMeta(meta);
am.setHelmet(is);
ItemStack isc = new ItemStack (Material.LEATHER_CHESTPLATE);
ItemMeta metac = isc.getItemMeta();
try{
metac.setDisplayName(killer.getName());
}
catch (Exception e){
metac.setDisplayName("Nobody");
}
isc.setItemMeta(metac);
am.setChestplate(isc);
am.setLeggings(new ItemStack (Material.LEATHER_LEGGINGS));
am.setBoots(new ItemStack (Material.LEATHER_BOOTS));
stands.add(am);
}
public ArrayList<Player> getPlayerList(){
ArrayList<Player> p = new ArrayList<Player>();
p.addAll(traitors);
p.addAll(innocents);
p.addAll(detectives);
return p;
}
public int getCompleteSize(){
return getPlayerList().size() + specs.size() + players.size();
}
public ArrayList<Player> getCompleteList(){
ArrayList<Player> p = new ArrayList<Player>();
p.addAll(traitors);
p.addAll(innocents);
p.addAll(detectives);
p.addAll(specs);
p.addAll(players);
return p;
}
boolean lobbymode = false;
boolean lobby = false;
public void movetoLobby(Player p){
p.teleport(lobbyloc);
}
public void moveAllFromLobby(){
for (Player p : getCompleteList()){
if (plugin.ms.checkReady(this)){
p.teleport(plugin.ms.randomSpawn(this));
}
else{
p.teleport(game);
}
}
}
public boolean checkForceStart(){
if (!forcestart){
return false;
}
if (getCompleteList().size() <= 1){
return false;
}
return true;
}
public static String replaceLast(String text, String regex, String replacement) {
return text.replaceFirst("(?s)(.*)" + regex, "$1" + replacement);
}
public String commaString (String s , String strs){
String t = s;
t = t.replace(" ", ChatColor.translateAlternateColorCodes('&', plugin.mf.getMessage("commacolor", false))+", "+strs);
t = replaceLast(t , ", " , ".");
return t;
}
public void gameLoop(){
final Arena a = this;
updateSpecVisibility();
plugin.getServer().getScheduler().scheduleSyncRepeatingTask(plugin, new Runnable() {
public void run() {
if (gamestate == "waiting"){
if(counter == plugin.getConfig().getInt("waitingtime") && players.size() < min){
if (lobbymode){
lobby = true;
}
}
else{
counter -= 1;
}
traitors_save = "";
if (counter == plugin.getConfig().getInt("waitingtime") / 2 && lobby && lobbymode){
if (players.size() < min){
sendArenaMessage(plugin.mf.getMessage("waiting", true));
counter = plugin.getConfig().getInt("waitingtime");
}
else{
moveAllFromLobby();
lobby = false;
}
}
if (counter == 0){
if (players.size() < min && !(checkForceStart())){
sendArenaMessage(plugin.mf.getMessage("waiting", true));
counter = plugin.getConfig().getInt("waitingtime");
if (lobbymode){
lobby = true;
for (Block b : blocks){
b.setType(Material.CHEST);
if (b instanceof Chest){
Chest chest = (Chest) b.getState();
chest.getInventory().clear();
}
}
for (Block b : eblocks){
b.setType(Material.ENDER_CHEST);
}
for (Player p : getCompleteList()){
movetoLobby(p);
for (PotionEffect effect : p.getActivePotionEffects()){
try {
p.removePotionEffect(effect.getType());
}
catch(Exception ex){
}
}
startkarma.put(p, plugin.karma.getKarma(p));
openedChests.put(p, 0);
p.setGameMode(GameMode.SURVIVAL);
p.getInventory().clear();
p.getInventory().setArmorContents(new ItemStack[p.getInventory().getArmorContents().length]);
}
}
}
else{
ArenaStateChangeEvent event = new ArenaStateChangeEvent(a , "waiting" , "ingame");
Bukkit.getServer().getPluginManager().callEvent(event);
gamestate = "ingame";
counter = plugin.getConfig().getInt("gametime");
for (Player p : players){
plugin.rm.reward(p, "start");
plugin.karma.addAdvancedStat(p.getUniqueId(), "games", 1);
}
int slotrole = plugin.getConfig().getInt("roleitem_slot");
int slotleave = plugin.getConfig().getInt("leaveitem_slot");
if (plugin.getConfig().getBoolean("leave_item")){
for (Player p : players){
p.getInventory().setItem(slotleave-1, new ItemStack(Material.AIR));
}
}
if (plugin.getConfig().getBoolean("role_item")){
for (Player p : players){
p.getInventory().setItem(slotrole-1, new ItemStack(Material.AIR));
}
}
rm.setRoles(usedpass , usedpass_detective);
}
}
else if ((counter % 10 == 0 || counter <= 5) && !(counter == plugin.getConfig().getInt("waitingtime") && players.size() < min)){
if (lobby){
sendArenaMessage(plugin.mf.getMessage("countdownstart", true).replace("%counter%", Integer.toString(counter- (plugin.getConfig().getInt("waitingtime") / 2))));
}
else{
sendArenaMessage(plugin.mf.getMessage("countdownstart", true).replace("%counter%", Integer.toString(counter)));
}
}
for (Player p : players){
p.setGameMode(GameMode.SURVIVAL);
if (lobby){
setPlayerLevel(p,counter - (plugin.getConfig().getInt("waitingtime") / 2));
}
else{
setPlayerLevel(p,counter);
}
p.setFoodLevel(20);
}
if (sign != null){
if (sign.getBlock().getType() == Material.SIGN || sign.getBlock().getType() == Material.SIGN_POST || sign.getBlock().getType() == Material.WALL_SIGN){
Sign s = (Sign) sign.getBlock().getState();
s.setLine(0, plugin.mf.getMessage("signtop", false));
s.setLine(1, plugin.mf.getMessage("signwaiting", false).replace("%counter%" , Integer.toString(counter)));
s.setLine(2, name);
s.setLine(3, plugin.mf.getMessage("signonline", false).replace("%amount%" , Integer.toString(getCompleteSize()).replace("%min%", min+"")));
s.update();
}
}
}
if (gamestate == "ingame"){
counter -= 1;
if (randInt(0,50) == 1){
spawnLegendaryBlock();
}
if (counter == 0){
endGame(true);
counter = plugin.getConfig().getInt("restarttime");
for (Player p : getPlayerList()){
p.setHealth(20);
}
}
if (counter == 350 || counter == 60 || counter == 30 || counter == 10 || counter <= 5){
sendArenaMessage(plugin.mf.getMessage("countdownend", true).replace("%counter%", Integer.toString(counter)));
}
if (sign != null){
if (sign.getBlock().getType() == Material.SIGN || sign.getBlock().getType() == Material.SIGN_POST || sign.getBlock().getType() == Material.WALL_SIGN){
Sign s = (Sign) sign.getBlock().getState();
s.setLine(0, plugin.mf.getMessage("signtop", false));
s.setLine(1, plugin.mf.getMessage("signingame", false).replace("%counter%" , Integer.toString(counter)));
s.setLine(2, name);
s.setLine(3, plugin.mf.getMessage("signonline", false).replace("%amount%" , Integer.toString(getCompleteSize()).replace("%min%", min+"")));
s.update();
}
}
for (Player p : specs){
if (new_specmode){
p.setGameMode(GameMode.SURVIVAL);
p.setAllowFlight(true);
}
else
p.setGameMode(GameMode.SPECTATOR);
}
for (Player p : getPlayerList()){
p.setExp(0);
int level = plugin.karma.getKarma(p);
setPlayerLevel(p,level);
p.setFoodLevel(20);
}
}
if (gamestate == "end"){
if (!ended){
counter -= 1;
if (counter == 0){
restartArena();
}
if (counter == 10){
if (plugin.nicknamer){
for (String name : realnicks.keySet()){
sendArenaMessage(plugin.mf.getMessage("nickresolve", true).replaceAll("%nick%", realnicks.get(name)).replaceAll("%player%", plugin.names.get(UUID.fromString(name))));
}
}
else{
sendArenaMessage(plugin.mf.getMessage("countdownrestart", true).replace("%counter%", Integer.toString(counter)));
}
}
if (counter <= 5){
sendArenaMessage(plugin.mf.getMessage("countdownrestart", true).replace("%counter%", Integer.toString(counter)));
}
for (Player p : getPlayerList()){
setPlayerLevel(p,counter);
p.setFoodLevel(20);
}
if (sign != null){
if (sign.getBlock().getType() == Material.SIGN || sign.getBlock().getType() == Material.SIGN_POST || sign.getBlock().getType() == Material.WALL_SIGN){
Sign s = (Sign) sign.getBlock().getState();
s.setLine(0, plugin.mf.getMessage("signtop", false));
s.setLine(1, plugin.mf.getMessage("signending", false).replace("%counter%" , Integer.toString(counter)));
s.setLine(2, name);
s.setLine(3, plugin.mf.getMessage("signonline", false).replace("%amount%" , Integer.toString(getCompleteSize()).replace("%min%", min+"")));
s.update();
}
}
}
}
}
}, 0L, 20L);
}
public void karmaToLevel (Player p){
setPlayerLevel(p , plugin.karma.getKarma(p));
}
public void setPlayerLevel (Player p , int level){
p.setExp(0);
p.setLevel(0);
p.setLevel(level);
}
public int getAmountPlaying (){
return players.size() + traitors.size() + detectives.size() + innocents.size();
}
public void callHitEvent(EntityDamageByEntityEvent e){
Player hitter = (Player)e.getDamager();
if (specs.contains(hitter))
e.setCancelled(true);
if (gamestate == "waiting" || gamestate == "end"){
e.setCancelled(true);
}
}
public void movetoBungeeServer(Player p){
Bukkit.getMessenger().registerOutgoingPluginChannel(plugin, "BungeeCord");
System.out.println("Trying to send Player to Server "+plugin.getConfig().getString("bungeeserver"));
ByteArrayOutputStream b = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(b);
try {
out.writeUTF("Connect");
out.writeUTF(plugin.getConfig().getString("bungeeserver"));
} catch (IOException e) {
}
p.sendPluginMessage(plugin, "BungeeCord", b.toByteArray());
}
public void updateSpecVisibility(){
for (Player p : this.getCompleteList()){
for (Player s : this.getCompleteList()){
p.showPlayer(s);
}
}
for (Player s : specs){
for (Player p : this.getPlayerList()){
p.hidePlayer(s);
}
ArrayList<Player> update = new ArrayList<Player>();
update.add(s);
ColorTabAPI.setTabStyle(s, ChatColor.ITALIC+"" , "", 1000 , update);
}
for (Player s : specs){
for (Player p : specs){
if (s != p){
s.hidePlayer(p);
}
}
}
}
public void restartArena (){
fixPlayers();
if (plugin.getConfig().getBoolean("lobbymode")){
if (!plugin.getConfig().getString("bungeeserver").equals("false")){
for (Player p : getCompleteList()){
movetoBungeeServer(p);
}
}
this.leaveAll();
}
for (Player p : getCompleteList()){
plugin.rm.reward(p, "end");
}
if (plugin.getConfig().getBoolean("stopserver")){
Bukkit.getServer().shutdown();
}
players.addAll(traitors);
players.addAll(innocents);
players.addAll(detectives);
for (Player p : specs){
p.setFlying(false);
p.setAllowFlight(false);
p.setGameMode(GameMode.SURVIVAL);
}
players.addAll(specs);
innocents = new ArrayList<Player>();
detectives = new ArrayList<Player>();
specs = new ArrayList<Player>();
traitors = new ArrayList<Player>();
realnicks = new HashMap<String , String>();
for (Player p : players){
p.setFlying(false);
p.setAllowFlight(false);
p.setGameMode(GameMode.SURVIVAL);
for (PotionEffect effect : p.getActivePotionEffects()){
try {
p.removePotionEffect(effect.getType());
}
catch(Exception ex){
}
}
startkarma.put(p, plugin.karma.getKarma(p));
openedChests.put(p, 0);
p.setGameMode(GameMode.SURVIVAL);
if (lobbymode){
movetoLobby(p);
}
else if (plugin.ms.checkReady(this)){
p.teleport(plugin.ms.randomSpawn(this));
}
else{
p.teleport(game);
}
p.getInventory().clear();
p.getInventory().setArmorContents(new ItemStack[p.getInventory().getArmorContents().length]);
if (plugin.nicknamer){
NickSession session = NickAPI.getSession(p);
String nick = session.getRandomNickname();
session.setName(nick, true);
realnicks.put(session.holder.getUniqueId().toString() , session.current_nick);
p.sendMessage(plugin.mf.getMessage("randomnick", true).replaceAll("%nick%", session.current_nick));
}
int slotrole = plugin.getConfig().getInt("roleitem_slot");
int slotleave = plugin.getConfig().getInt("leaveitem_slot");
if (plugin.getConfig().getBoolean("role_item")){
p.getInventory().setItem(slotrole-1, new ItemBuilder(Material.getMaterial(plugin.getConfig().getString("role_item_type"))).setName(plugin.mf.getMessage("role_item", true)).toItemStack());
}
if (plugin.getConfig().getBoolean("leave_item")){
p.getInventory().setItem(slotleave-1, new ItemBuilder(Material.getMaterial(plugin.getConfig().getString("leave_item_type"))).setName(plugin.mf.getMessage("leave_item", true)).toItemStack());
}
}
ArenaStateChangeEvent event = new ArenaStateChangeEvent(this , "end" , "waiting");
Bukkit.getServer().getPluginManager().callEvent(event);
gamestate = "waiting";
counter = plugin.getConfig().getInt("waitingtime");
for (Player p : this.getCompleteList()){
try{
ColorTabAPI.clearTabStyle(p, Bukkit.getOnlinePlayers());
}
catch(Exception ex){
}
}
for (Player s : this.getCompleteList()){
for (Player p : this.getCompleteList()){
p.showPlayer(s);
}
}
updateSpecVisibility();
}
public void callClickEvent (InventoryClickEvent e){
this.specinv.onClick(e);
if (e.getInventory().getName().equals("Shop") || e.getWhoClicked().getOpenInventory().getTopInventory().getName().equals("Shop")){
e.setCancelled(true);
if (e.getInventory().getName().equals("Shop")){
shopgui.callClickEvent(e);
}
}
}
public void fixPlayers(){
ArrayList<Player> all = new ArrayList<Player>(getPlayerList());
all.addAll(specs);
for (Player p : all){
for (Player p2 : all){
if (p == p2){
for (Player pp : new ArrayList<Player>(traitors)){
if (pp == p2){
traitors.remove(pp);
}
}
for (Player pp : new ArrayList<Player>(innocents)){
if (pp == p2){
innocents.remove(pp);
}
}
for (Player pp : new ArrayList<Player>(detectives)){
if (pp == p2){
detectives.remove(pp);
}
}
for (Player pp : new ArrayList<Player>(specs)){
if (pp == p2){
specs.remove(pp);
}
}
for (Player pp : new ArrayList<Player>(players)){
if (pp == p2){
players.remove(pp);
}
}
players.add(p2);
}
}
}
}
public void callDeathByPlayerEvent(PlayerDeathEvent e){
e.setDeathMessage(null);
Player aim = e.getEntity();
Player killer = e.getEntity().getKiller();
createDeathStand(aim , aim.getLocation() , rm.getRole(aim) , killer , rm.getRole(killer));
for (ItemStack i : e.getDrops()){
dropped.add(aim.getWorld().dropItem(aim.getLocation(), i));
}
e.getDrops().clear();
e.setDroppedExp(0);
rm.dropKillMessage(aim, killer);
aim.sendMessage(plugin.mf.getMessage("playerdiedspec", true));
try {
BypassRespawnAPI.sendRespawnPacket(aim);
} catch (Exception e1) {
}
rm.removePlayer(aim);
setSpec(aim);
String end = rm.checkEnd();
if (end != null){
endGame(false);
}
}
ArrayList<Item> dropped = new ArrayList<Item>();
public void callDropEvent (PlayerDropItemEvent e){
if (plugin.getConfig().getBoolean("itemdrop")){
dropped.add(e.getItemDrop());
}
else{
e.setCancelled(true);
}
}
public void removeItems(){
for (Item i : dropped){
if (i != null){
i.remove();
}
}
dropped = new ArrayList<Item>();
}
public void callInteract (PlayerInteractEvent e){
if (specs.contains(e.getPlayer())){
e.setCancelled(true);
if (e.getAction() == Action.RIGHT_CLICK_BLOCK || e.getAction() == Action.RIGHT_CLICK_AIR){
Player p = e.getPlayer();
if ( p.getItemInHand().getType().equals(Material.COMPASS)){
this.specinv.openInventory(p , this.getPlayerList());
}
}
}
else if (e.getAction() == Action.RIGHT_CLICK_BLOCK || e.getAction() == Action.RIGHT_CLICK_AIR){
Player p = e.getPlayer();
if ( p.getItemInHand().getType().equals(Material.getMaterial(plugin.getConfig().getString("role_item_type"))) && this.gamestate.equals("waiting")){
p.performCommand("role");
e.setCancelled(true);
}
if ( p.getItemInHand().getType().equals(Material.getMaterial(plugin.getConfig().getString("leave_item_type"))) && this.gamestate.equals("waiting")){
p.performCommand("ttt leave");
e.setCancelled(true);
}
}
}
int testerscheduler = 0;
public ArrayList<Location> getCircle(Location center, double radius, int amount)
{
World world = center.getWorld();
double increment = (2 * Math.PI) / amount;
ArrayList<Location> locations = new ArrayList<Location>();
for(int i = 0;i < amount; i++)
{
double angle = i * increment;
double x = center.getX() + (radius * Math.cos(angle));
double z = center.getZ() + (radius * Math.sin(angle));
locations.add(new Location(world, x, center.getY(), z));
}
return locations;
}
HashMap<Location,Integer> legendscheduler = new HashMap<Location,Integer>();
HashMap<Integer,Integer> position = new HashMap<Integer,Integer>();
public void spawnLegendaryBlock(){
try{
Collections.shuffle(eblocks);
Location loc = eblocks.get(0).getLocation();
double x = loc.getX();
double z = loc.getZ();
Location particle = loc.clone();
particle.add(0, 0.5, 1);
particle.add(x > 0 ? 0.5 : -0.5, 0.0, z > 0 ? 0.5 : -0.5);
ArrayList<Location> particles = getCircle(particle, 1 , 25);
if (loc.getBlock().getType() == Material.SEA_LANTERN || loc.getBlock().getType() == Material.REDSTONE_BLOCK) return;
loc.getBlock().setType(Material.SEA_LANTERN);
plugin.sd.playSound(particle.getWorld() , particle , "EXPLODE", "ENTITY_ENDERDRAGON_FIREBALL_EXPLODE" );
BukkitScheduler scheduler = plugin.getServer().getScheduler();
int id = 0;
id = scheduler.scheduleSyncRepeatingTask(plugin, new Runnable() {
@Override
public void run() {
if (loc.getBlock().getType().equals(Material.SEA_LANTERN)){
loc.getBlock().setType(Material.REDSTONE_BLOCK);
}
else if (loc.getBlock().getType().equals(Material.REDSTONE_BLOCK)){
loc.getBlock().setType(Material.SEA_LANTERN);
}
else{
Bukkit.getScheduler().cancelTask(legendscheduler.get(loc));
}
int id = legendscheduler.get(loc);
particles.get(position.get(id)).getWorld().playEffect(particles.get(position.get(id)), Effect.HAPPY_VILLAGER, 2);
if (position.get(id) == 24){
position.put(id, 0);
}
else{
position.put(id, position.get(id) + 1 );
}
}
}, 3L, 3L);
position.put(id, 0);
legendscheduler.put(loc , id);
}catch(Exception ex){}
}
public boolean isLegendary (Location loc){
for (Block b : this.eblocks){
if (b.getLocation().getBlock().getLocation().equals(loc.getBlock().getLocation())) return true;
}
return false;
}
public void callClickChest (final PlayerInteractEvent e){
if (e.getAction() == Action.RIGHT_CLICK_BLOCK){
if (!(counter == plugin.getConfig().getInt("waitingtime") && players.size() < min)){
if (e.getClickedBlock().getType() == Material.CHEST){
if (plugin.getConfig().getBoolean("chestlimit.enable")) {
if (!this.openedChests.containsKey(e.getPlayer())) this.openedChests.put(e.getPlayer(), 0);
if (this.openedChests.get(e.getPlayer()) >= plugin.getConfig().getInt("chestlimit.amount")) {
e.getPlayer().sendMessage(plugin.mf.getMessage("toomanychestsopened", true));
return;
}
}
blocks.add(e.getClickedBlock());
if (plugin.wf.giveRandomChestItem(e.getPlayer() , "chest")){
e.getClickedBlock().setType(Material.AIR);
plugin.sd.playSound(e.getPlayer() , "CHEST_OPEN", "BLOCK_CHEST_OPEN" );
//e.getPlayer().playSound(e.getClickedBlock().getLocation(), Sound.CHEST_OPEN, 1, 1);
}
e.setCancelled(true);
}
else if (e.getClickedBlock().getType() == Material.ENDER_CHEST && gamestate != "waiting"){
eblocks.add(e.getClickedBlock());
if (plugin.wf.giveRandomChestItem(e.getPlayer() , "ec")){
e.getClickedBlock().setType(Material.AIR);
plugin.sd.playSound(e.getPlayer() , "CHEST_OPEN", "BLOCK_CHEST_OPEN" );
//e.getPlayer().playSound(e.getClickedBlock().getLocation(), Sound.CHEST_OPEN, 1, 1);
}
e.setCancelled(true);
}
else if ((e.getClickedBlock().getType() == Material.SEA_LANTERN || e.getClickedBlock().getType() == Material.REDSTONE_BLOCK)&& isLegendary(e.getClickedBlock().getLocation()) && gamestate != "waiting"){
eblocks.add(e.getClickedBlock());
if (plugin.wf.giveRandomChestItem(e.getPlayer() , "legend")){
e.getClickedBlock().setType(Material.AIR);
plugin.sd.playSound(e.getPlayer() , "LEVEL_UP", "ENTITY_PLAYER_LEVELUP" );
//e.getPlayer().playSound(e.getClickedBlock().getLocation(), Sound.CHEST_OPEN, 1, 1);
}
e.setCancelled(true);
}
else if (e.getClickedBlock().getType() == Material.STONE_BUTTON){
if (this.atester.isButton(e.getClickedBlock().getLocation()) && atester.enabled){
this.atester.testPlayer(e.getPlayer());
}
else if (tester){
if (!testeruse){
if (startplayers >= plugin.getConfig().getInt("minplayerstotest")){
if (e.getClickedBlock().getLocation().equals(testerbutton) && gamestate.equals("ingame")){
e.getPlayer().teleport(this.testerlocation);
testerdontmove = new ArrayList<Player>();
testerdontmove.add(e.getPlayer());
sendRadiusMessage(e.getPlayer(), plugin.mf.getMessage("enteredtester", true).replace("%player%", e.getPlayer().getName()));
testeruse = true;
this.testerscheduler = plugin.getServer().getScheduler().scheduleSyncDelayedTask(plugin, new Runnable() {
public void run() {
testeruse = false;
testerdontmove.remove(e.getPlayer());
try{
if (traitors.contains(e.getPlayer())){
if (!e.getPlayer().getInventory().contains(new ShopItem().getSpoofer(false))){
testerLamp();
}
else{
e.getPlayer().getInventory().remove(new ShopItem().getSpoofer(false));
e.getPlayer().sendMessage(plugin.mf.getMessage("spoofer", true));
}
}
}
catch(Exception e){
}
}
}, 100L);
}
else{
if (!traitors.contains(e.getPlayer())){
e.setCancelled(true);
}
}
}
else{
if (e.getClickedBlock().getLocation().equals(testerbutton) && gamestate.equals("ingame")){
e.getPlayer().sendMessage(plugin.mf.getMessage("testerdisabled", true));
}
else if (!traitors.contains(e.getPlayer())){
e.setCancelled(true);
}
}
}
}
else{
if (!traitors.contains(e.getPlayer())){
e.setCancelled(true);
}
}
}
}
else if (e.getClickedBlock().getType() == Material.CHEST){
plugin.sd.playSound(e.getPlayer() , "CLICK", "BLOCK_LEVER_CLICK" );
//e.getPlayer().playSound(e.getClickedBlock().getLocation(), Sound.CLICK, 1, 1);
e.setCancelled(true);
}
else if (!traitors.contains(e.getPlayer())){
e.setCancelled(true);
}
}
}
public void tryDetectiveResearch(Player p , ArmorStand a){
p.sendMessage(plugin.mf.getMessage("detectiveresearch", true).replace("%killer%", a.getChestplate().getItemMeta().getDisplayName()));
}
public void removeInventoryItems(PlayerInventory inv, Material type, int amount) {
for (ItemStack is : inv.getContents()) {
if (is != null && is.getType() == type) {
int newamount = is.getAmount() - amount;
if (newamount > 0) {
is.setAmount(newamount);
break;
} else {
inv.remove(is);
amount = -newamount;
if (amount == 0) break;
}
}
}
}
public void callArrowHitEvent (ProjectileHitEvent e){
Player p = (Player)e.getEntity().getShooter();
if (p.getInventory().contains(Material.TNT) && traitors.contains(p)){
removeInventoryItems(p.getInventory() , Material.TNT , 1);
plugin.sd.playSound(e.getEntity().getWorld(), e.getEntity().getLocation() , "EXPLODE", "ENTITY_GENERIC_EXPLODE" );
e.getEntity().getWorld().playEffect(e.getEntity().getLocation(), Effect.EXPLOSION , 17);
//e.getEntity().getWorld().playSound(e.getEntity().getLocation(), Sound.EXPLODE, 2, 1);
List<Entity> list = e.getEntity().getNearbyEntities(1.5, 1.5, 1.5);
for (Entity en : list){
if (en instanceof Player){
((Player) en).addPotionEffect(new PotionEffect (PotionEffectType.HARM , 1 , 1));
}
}
}
}
public void testerLamp (){
lamp1.getBlock().setType(Material.GLOWSTONE);
lamp2.getBlock().setType(Material.GLOWSTONE);
plugin.getServer().getScheduler().scheduleSyncDelayedTask(plugin, new Runnable() {
public void run() {
lamp1.getBlock().setType(Material.REDSTONE_LAMP_OFF);
lamp2.getBlock().setType(Material.REDSTONE_LAMP_OFF);
}
}, 40L);
}
public static int randInt(int min, int max) {
Random rand = new Random();
int randomNum = rand.nextInt((max - min) + 1) + min;
return randomNum;
}
public String getRoleColor (String role){
if (role.equals("TRAITOR")){
return ChatColor.RED+"";
}
if (role.equals("INNOCENT")){
return ChatColor.GREEN+"";
}
if (role.equals("DETECTIVE")){
return ChatColor.BLUE+"";
}
else{
return "";
}
}
public void callRkEEvent(PlayerInteractAtEntityEvent e){
if (e.getRightClicked().getType() == EntityType.ARMOR_STAND && e.getPlayer().isSneaking() && e.getPlayer().getPassenger() == null){
e.getPlayer().setPassenger(e.getRightClicked());
}
else if (e.getPlayer().isSneaking() || e.getPlayer().getPassenger() != null){
e.getPlayer().eject();
}
else if (e.getRightClicked().getType() == EntityType.ARMOR_STAND){
ArmorStand am = (ArmorStand)e.getRightClicked();
if (detectives.contains(e.getPlayer()) && e.getPlayer().getItemInHand().getType().equals(Material.STICK)){
try{
tryDetectiveResearch(e.getPlayer() , am);
this.removeInventoryItems(e.getPlayer().getInventory(), Material.STICK , 1);
}
catch(Exception ex){}
}
else{
e.setCancelled(true);
if (plugin.getConfig().getBoolean("only_detective_corpsescan")){
if (!detectives.contains(e.getPlayer())){
return;
}
}
ItemStack helmet = am.getHelmet();
String name = helmet.getItemMeta().getDisplayName();
String[] parts = name.split("#");
String role = parts[0];
String player = parts[1];
if (role.equals("TRAITOR") || role.equals("INNOCENT") || role.equals("DETECTIVE")){
if (am.getHelmet().getType().equals(Material.LEATHER_HELMET)){
sendArenaMessage(plugin.mf.getMessage("corpsefound", true).replace("%role%", plugin.getDisplay(role , false)).replace("%player%", player));
}
else{
e.getPlayer().sendMessage(plugin.mf.getMessage("corpsefoundtwice", true).replace("%role%", plugin.getDisplay(role , false)).replace("%player%", player));
}
ItemStack skull = new ItemStack(Material.SKULL_ITEM);
String headName = player;
skull.setDurability((short)3);
SkullMeta sm = (SkullMeta)skull.getItemMeta();
sm.setOwner(headName);
sm.setDisplayName(name);
skull.setItemMeta(sm);
am.setHelmet(skull);
am.setCustomName(getRoleColor(role)+player);
am.setCustomNameVisible(true);
e.setCancelled(true);
}
e.setCancelled(true);
}
}
}
public void callDeathEvent(PlayerDeathEvent e){
Player p = e.getEntity();
if (!specs.contains(p)){
e.setDeathMessage(null);
rm.dropKillMessage(p, null);
for (ItemStack i : e.getDrops()){
dropped.add(p.getWorld().dropItem(p.getLocation(), i));
}
e.getDrops().clear();
e.setDroppedExp(0);
createDeathStand(p , p.getLocation() , rm.getRole(p) , null , null);
try {
BypassRespawnAPI.sendRespawnPacket(p);
} catch (Exception e1) {
}
rm.removePlayer(p);
setSpec(p);
String end = rm.checkEnd();
if (end != null){
if (end == "traitors"){
endGame(false);
}
else{
endGame(false);
}
}
}
}
@SuppressWarnings("deprecation")
public void leave(Player p , boolean silent){
if (!silent){
rm.dropKillMessage(p, null);
}
rm.removePlayer(p);
p.eject();
p.setGameMode(GameMode.SURVIVAL);
p.teleport(back);
p.getInventory().clear();
p.setHealth(20);
p.setLevel(0);
p.setAllowFlight(false);
String end = rm.checkEnd();
if (end != null){
if (end == "traitors"){
endGame(false);
}
else{
endGame(false);
}
}
for (Player p2 :Bukkit.getOnlinePlayers()){
p.showPlayer(p2);
p2.showPlayer(p);
}
updateSpecVisibility();
if (plugin.getConfig().getBoolean("hide_players_outside_arena")){
for (Player p2 : this.getCompleteList()){
p2.hidePlayer(p);
}
for (Player p2 :Bukkit.getOnlinePlayers()){
if (plugin.m.searchPlayer(p2) == null){
p.showPlayer(p2);
}
}
}
p.getInventory().setContents(inventorys.get(p));
p.getInventory().setArmorContents(armorinventorys.get(p));
team.removePlayer(p);
}
public void setSign (Location sign){
this.sign = sign;
}
public void endGame(boolean preend){
ArenaStateChangeEvent event = new ArenaStateChangeEvent(this , "ingame" , "end");
Bukkit.getServer().getPluginManager().callEvent(event);
gamestate = "end";
counter = plugin.getConfig().getInt("restarttime");
shop = new Shop();
spoofs = new ArrayList<Player>();
for (Player p : getPlayerList()){
p.eject();
p.getInventory().clear();
p.setLevel(0);
}
for (Block b : blocks){
b.setType(Material.CHEST);
if (b instanceof Chest){
Chest chest = (Chest) b.getState();
chest.getInventory().clear();
}
}
for (Block b : eblocks){
b.setType(Material.ENDER_CHEST);
}
for (Entity e : creepers){
if (e != null){
e.remove();
}
}
for (ArmorStand am : stands){
am.remove();
}
removeItems();
// for (Player p : detectives){
// rm.boardi.resetScores(p);
// }
// for (Player p : innocents){
// rm.boardi.resetScores(p);
// }
}
public String state()
{
return this.gamestate;
}
public void addPlayer(Player p)
{
join (p);
}
public void setSpec (final Player p){
if (plugin.getConfig().getBoolean("spectatormode")){
specs.add(p);
p.eject();
if (new_specmode){
p.setGameMode(GameMode.SURVIVAL);
p.setAllowFlight(true);
p.setFlying(true);
}
else
p.setGameMode(GameMode.SPECTATOR);
p.teleport(spectator);
plugin.getServer().getScheduler().scheduleSyncDelayedTask(plugin, new Runnable() {
public void run() {
if (new_specmode){
p.setGameMode(GameMode.SURVIVAL);
p.setAllowFlight(true);
}
else
p.setGameMode(GameMode.SPECTATOR);
p.teleport(spectator);
p.getInventory().addItem(new ItemBuilder(Material.COMPASS).setName(plugin.mf.getMessage("specitem", true)).toItemStack());
}
},5L);
p.getInventory().addItem(new ItemBuilder(Material.COMPASS).setName(plugin.mf.getMessage("specitem", true)).toItemStack());
updateSpecVisibility();
try{
ColorTabAPI.clearTabStyle(p, Bukkit.getOnlinePlayers());
}
catch(Exception ex){
}
}
else{
this.leave(p, true);
}
}
HashMap<String , String> realnicks = new HashMap<String , String>();
@SuppressWarnings("deprecation")
public void join(Player p){
updateSpecVisibility();
if (this.max_players != 0){
if (this.max_players <= this.getCompleteSize()){
if (!p.hasPermission("ttt.premiumjoin")){
if (!plugin.getConfig().getString("bungeeserver").equals("false")){
System.out.println("[MTTT] Trying to send Player to Server "+plugin.getConfig().getString("bungeeserver"));
ByteArrayOutputStream b = new ByteArrayOutputStream();
DataOutputStream out = new DataOutputStream(b);
try {
out.writeUTF("Connect");
out.writeUTF(plugin.getConfig().getString("bungeeserver"));
} catch (IOException e) {
}
p.sendPluginMessage(plugin, "BungeeCord", b.toByteArray());
plugin.getServer().getScheduler().scheduleSyncDelayedTask(plugin, new Runnable() {
public void run() {
if (p.isOnline()){
p.kickPlayer(plugin.prefix + ChatColor.RED + " This Server is full!");
}
}
}, 20L);
}
else{
p.sendMessage(plugin.mf.getMessage("arenafull", true));
}
return;
}
}
}
inventorys.put(p, p.getInventory().getContents());
startkarma.put(p, plugin.karma.getKarma(p));
openedChests.put(p, 0);
armorinventorys.put(p, p.getInventory().getArmorContents());
if (plugin.nicknamer){
NickSession session = NickAPI.getSession(p);
String nick = session.getRandomNickname();
session.setName(nick, true);
realnicks.put(session.holder.getUniqueId().toString() , session.current_nick);
p.sendMessage(plugin.mf.getMessage("randomnick", true).replaceAll("%nick%", session.current_nick));
}
team.addPlayer(p);
if (gamestate.equals("waiting")){
sendArenaMessage(plugin.mf.getMessage("join", true).replace("%player%", p.getName()));
players.add(p);
p.getInventory().clear();
p.getInventory().setArmorContents(new ItemStack[p.getInventory().getArmorContents().length]);
int slotrole = plugin.getConfig().getInt("roleitem_slot");
int slotleave = plugin.getConfig().getInt("leaveitem_slot");
if (plugin.getConfig().getBoolean("role_item")){
p.getInventory().setItem(slotrole-1, new ItemBuilder(Material.getMaterial(plugin.getConfig().getString("role_item_type"))).setName(plugin.mf.getMessage("role_item", true)).toItemStack());
}
if (plugin.getConfig().getBoolean("leave_item")){
p.getInventory().setItem(slotleave-1, new ItemBuilder(Material.getMaterial(plugin.getConfig().getString("leave_item_type"))).setName(plugin.mf.getMessage("leave_item", true)).toItemStack());
}
if (lobby){
this.movetoLobby(p);
}
else if (plugin.ms.checkReady(this)){
p.teleport(plugin.ms.randomSpawn(this));
}
else{
p.teleport(game);
}
}
else{
if (plugin.getConfig().getBoolean("spectatormode")){
setSpec(p);
}
else{
p.sendMessage(plugin.mf.getMessage("specdisabled", true));
}
}
if (plugin.getConfig().getBoolean("hide_players_outside_arena")){
for (Player p2 : Bukkit.getOnlinePlayers()){
p.hidePlayer(p2);
}
for (Player p2 : this.getCompleteList()){
p.showPlayer(p2);
p2.showPlayer(p);
}
}
}
}
| apache-2.0 |
crate/crate | server/src/test/java/io/crate/planner/PlannerTest.java | 4555 | /*
* Licensed to Crate.io GmbH ("Crate") under one or more contributor
* license agreements. See the NOTICE file distributed with this work for
* additional information regarding copyright ownership. Crate licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial agreement.
*/
package io.crate.planner;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.core.Is.is;
import static org.junit.jupiter.api.Assertions.assertThrows;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
import org.elasticsearch.common.Randomness;
import org.junit.Before;
import org.junit.Test;
import io.crate.action.sql.SessionContext;
import io.crate.data.Row1;
import io.crate.exceptions.ConversionException;
import io.crate.execution.dsl.phases.NodeOperationTree;
import io.crate.expression.symbol.Literal;
import io.crate.metadata.CoordinatorTxnCtx;
import io.crate.metadata.RoutingProvider;
import io.crate.planner.node.ddl.UpdateSettingsPlan;
import io.crate.planner.operators.LogicalPlan;
import io.crate.planner.operators.LogicalPlanner;
import io.crate.planner.operators.SubQueryResults;
import io.crate.sql.tree.Assignment;
import io.crate.test.integration.CrateDummyClusterServiceUnitTest;
import io.crate.testing.Asserts;
import io.crate.testing.SQLExecutor;
import static org.mockito.Mockito.mock;
public class PlannerTest extends CrateDummyClusterServiceUnitTest {
private SQLExecutor e;
@Before
public void prepare() {
e = SQLExecutor.builder(clusterService).build();
}
@Test
public void testSetPlan() throws Exception {
UpdateSettingsPlan plan = e.plan("set GLOBAL PERSISTENT stats.jobs_log_size=1024");
assertThat(plan.settings(), contains(new Assignment<>(Literal.of("stats.jobs_log_size"), List.of(Literal.of(1024)))));
assertThat(plan.isPersistent(), is(true));
plan = e.plan("set GLOBAL TRANSIENT stats.enabled=false,stats.jobs_log_size=0");
assertThat(plan.settings().size(), is(2));
assertThat(plan.isPersistent(), is(false));
}
@Test
public void testSetSessionTransactionModeIsNoopPlan() throws Exception {
Plan plan = e.plan("SET SESSION CHARACTERISTICS AS TRANSACTION ISOLATION LEVEL READ UNCOMMITTED");
assertThat(plan, instanceOf(NoopPlan.class));
}
@Test
public void testExecutionPhaseIdSequence() throws Exception {
PlannerContext plannerContext = new PlannerContext(
clusterService.state(),
new RoutingProvider(Randomness.get().nextInt(), Collections.emptyList()),
UUID.randomUUID(),
new CoordinatorTxnCtx(SessionContext.systemSessionContext()),
e.nodeCtx,
0,
null
);
assertThat(plannerContext.nextExecutionPhaseId(), is(0));
assertThat(plannerContext.nextExecutionPhaseId(), is(1));
}
@Test
public void testDeallocate() {
assertThat(e.plan("deallocate all"), instanceOf(NoopPlan.class));
assertThat(e.plan("deallocate test_prep_stmt"), instanceOf(NoopPlan.class));
}
@Test
public void test_invalid_any_param_leads_to_clear_error_message() throws Exception {
LogicalPlan plan = e.logicalPlan("select name = ANY(?) from sys.cluster");
Asserts.assertThrowsMatches(
() -> {
LogicalPlanner.getNodeOperationTree(
plan,
mock(DependencyCarrier.class),
e.getPlannerContext(clusterService.state()),
new Row1("foo"),
SubQueryResults.EMPTY
);
},
ConversionException.class,
"Cannot cast value `foo` to type `text_array`"
);
}
}
| apache-2.0 |
rbieniek/BGP4J | osgi-bundles/bgp4j-common-network/src/main/java/org/bgp4j/net/packets/KeepalivePacket.java | 912 | /**
* Copyright 2012 Rainer Bieniek (Rainer.Bieniek@web.de)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.bgp4j.net.packets;
import org.bgp4j.net.BGPv4Constants;
/**
* @author Rainer Bieniek (Rainer.Bieniek@web.de)
*
*/
public class KeepalivePacket extends BGPv4Packet {
@Override
public int getType() {
return BGPv4Constants.BGP_PACKET_TYPE_KEEPALIVE;
}
}
| apache-2.0 |
lshmouse/hbase | hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFile.java | 57082 | /**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import java.io.DataInput;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.Map;
import java.util.SortedSet;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
import org.apache.hadoop.hbase.io.hfile.BlockType;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
import org.apache.hadoop.hbase.regionserver.compactions.Compactor;
import org.apache.hadoop.hbase.util.BloomFilter;
import org.apache.hadoop.hbase.util.BloomFilterFactory;
import org.apache.hadoop.hbase.util.BloomFilterWriter;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.WritableUtils;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Ordering;
/**
* A Store data file. Stores usually have one or more of these files. They
* are produced by flushing the memstore to disk. To
* create, instantiate a writer using {@link StoreFile.WriterBuilder}
* and append data. Be sure to add any metadata before calling close on the
* Writer (Use the appendMetadata convenience methods). On close, a StoreFile
* is sitting in the Filesystem. To refer to it, create a StoreFile instance
* passing filesystem and path. To read, call {@link #createReader()}.
* <p>StoreFiles may also reference store files in another Store.
*
* The reason for this weird pattern where you use a different instance for the
* writer and a reader is that we write once but read a lot more.
*/
@InterfaceAudience.LimitedPrivate("Coprocessor")
public class StoreFile {
private static final Log LOG = LogFactory.getLog(StoreFile.class.getName());
// Keys for fileinfo values in HFile
/** Max Sequence ID in FileInfo */
public static final byte [] MAX_SEQ_ID_KEY = Bytes.toBytes("MAX_SEQ_ID_KEY");
/** Major compaction flag in FileInfo */
public static final byte[] MAJOR_COMPACTION_KEY =
Bytes.toBytes("MAJOR_COMPACTION_KEY");
/** Minor compaction flag in FileInfo */
public static final byte[] EXCLUDE_FROM_MINOR_COMPACTION_KEY =
Bytes.toBytes("EXCLUDE_FROM_MINOR_COMPACTION");
/** Bloom filter Type in FileInfo */
public static final byte[] BLOOM_FILTER_TYPE_KEY =
Bytes.toBytes("BLOOM_FILTER_TYPE");
/** Delete Family Count in FileInfo */
public static final byte[] DELETE_FAMILY_COUNT =
Bytes.toBytes("DELETE_FAMILY_COUNT");
/** Last Bloom filter key in FileInfo */
private static final byte[] LAST_BLOOM_KEY = Bytes.toBytes("LAST_BLOOM_KEY");
/** Key for Timerange information in metadata*/
public static final byte[] TIMERANGE_KEY = Bytes.toBytes("TIMERANGE");
/** Key for timestamp of earliest-put in metadata*/
public static final byte[] EARLIEST_PUT_TS = Bytes.toBytes("EARLIEST_PUT_TS");
private final StoreFileInfo fileInfo;
private final FileSystem fs;
// Block cache configuration and reference.
private final CacheConfig cacheConf;
// Keys for metadata stored in backing HFile.
// Set when we obtain a Reader.
private long sequenceid = -1;
// max of the MemstoreTS in the KV's in this store
// Set when we obtain a Reader.
private long maxMemstoreTS = -1;
public long getMaxMemstoreTS() {
return maxMemstoreTS;
}
public void setMaxMemstoreTS(long maxMemstoreTS) {
this.maxMemstoreTS = maxMemstoreTS;
}
// If true, this file was product of a major compaction. Its then set
// whenever you get a Reader.
private AtomicBoolean majorCompaction = null;
// If true, this file should not be included in minor compactions.
// It's set whenever you get a Reader.
private boolean excludeFromMinorCompaction = false;
/** Meta key set when store file is a result of a bulk load */
public static final byte[] BULKLOAD_TASK_KEY =
Bytes.toBytes("BULKLOAD_SOURCE_TASK");
public static final byte[] BULKLOAD_TIME_KEY =
Bytes.toBytes("BULKLOAD_TIMESTAMP");
/**
* Map of the metadata entries in the corresponding HFile
*/
private Map<byte[], byte[]> metadataMap;
// StoreFile.Reader
private volatile Reader reader;
/**
* Bloom filter type specified in column family configuration. Does not
* necessarily correspond to the Bloom filter type present in the HFile.
*/
private final BloomType cfBloomType;
/**
* Constructor, loads a reader and it's indices, etc. May allocate a
* substantial amount of ram depending on the underlying files (10-20MB?).
*
* @param fs The current file system to use.
* @param p The path of the file.
* @param conf The current configuration.
* @param cacheConf The cache configuration and block cache reference.
* @param cfBloomType The bloom type to use for this store file as specified
* by column family configuration. This may or may not be the same
* as the Bloom filter type actually present in the HFile, because
* column family configuration might change. If this is
* {@link BloomType#NONE}, the existing Bloom filter is ignored.
* @throws IOException When opening the reader fails.
*/
public StoreFile(final FileSystem fs, final Path p, final Configuration conf,
final CacheConfig cacheConf, final BloomType cfBloomType) throws IOException {
this(fs, new StoreFileInfo(conf, fs, p), conf, cacheConf, cfBloomType);
}
/**
* Constructor, loads a reader and it's indices, etc. May allocate a
* substantial amount of ram depending on the underlying files (10-20MB?).
*
* @param fs The current file system to use.
* @param fileInfo The store file information.
* @param conf The current configuration.
* @param cacheConf The cache configuration and block cache reference.
* @param cfBloomType The bloom type to use for this store file as specified
* by column family configuration. This may or may not be the same
* as the Bloom filter type actually present in the HFile, because
* column family configuration might change. If this is
* {@link BloomType#NONE}, the existing Bloom filter is ignored.
* @throws IOException When opening the reader fails.
*/
public StoreFile(final FileSystem fs, final StoreFileInfo fileInfo, final Configuration conf,
final CacheConfig cacheConf, final BloomType cfBloomType) throws IOException {
this.fs = fs;
this.fileInfo = fileInfo;
this.cacheConf = cacheConf;
if (BloomFilterFactory.isGeneralBloomEnabled(conf)) {
this.cfBloomType = cfBloomType;
} else {
LOG.info("Ignoring bloom filter check for file " + this.getPath() + ": " +
"cfBloomType=" + cfBloomType + " (disabled in config)");
this.cfBloomType = BloomType.NONE;
}
}
/**
* Clone
* @param other The StoreFile to clone from
*/
public StoreFile(final StoreFile other) {
this.fs = other.fs;
this.fileInfo = other.fileInfo;
this.cacheConf = other.cacheConf;
this.cfBloomType = other.cfBloomType;
}
/**
* @return the StoreFile object associated to this StoreFile.
* null if the StoreFile is not a reference.
*/
public StoreFileInfo getFileInfo() {
return this.fileInfo;
}
/**
* @return Path or null if this StoreFile was made with a Stream.
*/
public Path getPath() {
return this.fileInfo.getPath();
}
/**
* @return Returns the qualified path of this StoreFile
*/
public Path getQualifiedPath() {
return this.fileInfo.getPath().makeQualified(fs);
}
/**
* @return True if this is a StoreFile Reference; call after {@link #open()}
* else may get wrong answer.
*/
public boolean isReference() {
return this.fileInfo.isReference();
}
/**
* @return True if this file was made by a major compaction.
*/
public boolean isMajorCompaction() {
if (this.majorCompaction == null) {
throw new NullPointerException("This has not been set yet");
}
return this.majorCompaction.get();
}
/**
* @return True if this file should not be part of a minor compaction.
*/
public boolean excludeFromMinorCompaction() {
return this.excludeFromMinorCompaction;
}
/**
* @return This files maximum edit sequence id.
*/
public long getMaxSequenceId() {
return this.sequenceid;
}
public long getModificationTimeStamp() throws IOException {
return (fileInfo == null) ? 0 : fileInfo.getModificationTime();
}
/**
* Only used by the Striped Compaction Policy
* @param key
* @return value associated with the metadata key
*/
public byte[] getMetadataValue(byte[] key) {
return metadataMap.get(key);
}
/**
* Return the largest memstoreTS found across all storefiles in
* the given list. Store files that were created by a mapreduce
* bulk load are ignored, as they do not correspond to any specific
* put operation, and thus do not have a memstoreTS associated with them.
* @return 0 if no non-bulk-load files are provided or, this is Store that
* does not yet have any store files.
*/
public static long getMaxMemstoreTSInList(Collection<StoreFile> sfs) {
long max = 0;
for (StoreFile sf : sfs) {
if (!sf.isBulkLoadResult()) {
max = Math.max(max, sf.getMaxMemstoreTS());
}
}
return max;
}
/**
* Return the highest sequence ID found across all storefiles in
* the given list.
* @param sfs
* @return 0 if no non-bulk-load files are provided or, this is Store that
* does not yet have any store files.
*/
public static long getMaxSequenceIdInList(Collection<StoreFile> sfs) {
long max = 0;
for (StoreFile sf : sfs) {
max = Math.max(max, sf.getMaxSequenceId());
}
return max;
}
/**
* Check if this storefile was created by bulk load.
* When a hfile is bulk loaded into HBase, we append
* '_SeqId_<id-when-loaded>' to the hfile name, unless
* "hbase.mapreduce.bulkload.assign.sequenceNumbers" is
* explicitly turned off.
* If "hbase.mapreduce.bulkload.assign.sequenceNumbers"
* is turned off, fall back to BULKLOAD_TIME_KEY.
* @return true if this storefile was created by bulk load.
*/
boolean isBulkLoadResult() {
boolean bulkLoadedHFile = false;
String fileName = this.getPath().getName();
int startPos = fileName.indexOf("SeqId_");
if (startPos != -1) {
bulkLoadedHFile = true;
}
return bulkLoadedHFile || metadataMap.containsKey(BULKLOAD_TIME_KEY);
}
/**
* Return the timestamp at which this bulk load file was generated.
*/
public long getBulkLoadTimestamp() {
byte[] bulkLoadTimestamp = metadataMap.get(BULKLOAD_TIME_KEY);
return (bulkLoadTimestamp == null) ? 0 : Bytes.toLong(bulkLoadTimestamp);
}
/**
* @return the cached value of HDFS blocks distribution. The cached value is
* calculated when store file is opened.
*/
public HDFSBlocksDistribution getHDFSBlockDistribution() {
return this.fileInfo.getHDFSBlockDistribution();
}
/**
* Opens reader on this store file. Called by Constructor.
* @return Reader for the store file.
* @throws IOException
* @see #closeReader(boolean)
*/
private Reader open() throws IOException {
if (this.reader != null) {
throw new IllegalAccessError("Already open");
}
// Open the StoreFile.Reader
this.reader = fileInfo.open(this.fs, this.cacheConf);
// Load up indices and fileinfo. This also loads Bloom filter type.
metadataMap = Collections.unmodifiableMap(this.reader.loadFileInfo());
// Read in our metadata.
byte [] b = metadataMap.get(MAX_SEQ_ID_KEY);
if (b != null) {
// By convention, if halfhfile, top half has a sequence number > bottom
// half. Thats why we add one in below. Its done for case the two halves
// are ever merged back together --rare. Without it, on open of store,
// since store files are distinguished by sequence id, the one half would
// subsume the other.
this.sequenceid = Bytes.toLong(b);
if (fileInfo.isTopReference()) {
this.sequenceid += 1;
}
}
if (isBulkLoadResult()){
// generate the sequenceId from the fileName
// fileName is of the form <randomName>_SeqId_<id-when-loaded>_
String fileName = this.getPath().getName();
// Use lastIndexOf() to get the last, most recent bulk load seqId.
int startPos = fileName.lastIndexOf("SeqId_");
if (startPos != -1) {
this.sequenceid = Long.parseLong(fileName.substring(startPos + 6,
fileName.indexOf('_', startPos + 6)));
// Handle reference files as done above.
if (fileInfo.isTopReference()) {
this.sequenceid += 1;
}
}
this.reader.setBulkLoaded(true);
}
this.reader.setSequenceID(this.sequenceid);
b = metadataMap.get(HFile.Writer.MAX_MEMSTORE_TS_KEY);
if (b != null) {
this.maxMemstoreTS = Bytes.toLong(b);
}
b = metadataMap.get(MAJOR_COMPACTION_KEY);
if (b != null) {
boolean mc = Bytes.toBoolean(b);
if (this.majorCompaction == null) {
this.majorCompaction = new AtomicBoolean(mc);
} else {
this.majorCompaction.set(mc);
}
} else {
// Presume it is not major compacted if it doesn't explicity say so
// HFileOutputFormat explicitly sets the major compacted key.
this.majorCompaction = new AtomicBoolean(false);
}
b = metadataMap.get(EXCLUDE_FROM_MINOR_COMPACTION_KEY);
this.excludeFromMinorCompaction = (b != null && Bytes.toBoolean(b));
BloomType hfileBloomType = reader.getBloomFilterType();
if (cfBloomType != BloomType.NONE) {
reader.loadBloomfilter(BlockType.GENERAL_BLOOM_META);
if (hfileBloomType != cfBloomType) {
LOG.info("HFile Bloom filter type for "
+ reader.getHFileReader().getName() + ": " + hfileBloomType
+ ", but " + cfBloomType + " specified in column family "
+ "configuration");
}
} else if (hfileBloomType != BloomType.NONE) {
LOG.info("Bloom filter turned off by CF config for "
+ reader.getHFileReader().getName());
}
// load delete family bloom filter
reader.loadBloomfilter(BlockType.DELETE_FAMILY_BLOOM_META);
try {
byte [] timerangeBytes = metadataMap.get(TIMERANGE_KEY);
if (timerangeBytes != null) {
this.reader.timeRangeTracker = new TimeRangeTracker();
Writables.copyWritable(timerangeBytes, this.reader.timeRangeTracker);
}
} catch (IllegalArgumentException e) {
LOG.error("Error reading timestamp range data from meta -- " +
"proceeding without", e);
this.reader.timeRangeTracker = null;
}
return this.reader;
}
/**
* @return Reader for StoreFile. creates if necessary
* @throws IOException
*/
public Reader createReader() throws IOException {
if (this.reader == null) {
try {
this.reader = open();
} catch (IOException e) {
try {
this.closeReader(true);
} catch (IOException ee) {
}
throw e;
}
}
return this.reader;
}
/**
* @return Current reader. Must call createReader first else returns null.
* @see #createReader()
*/
public Reader getReader() {
return this.reader;
}
/**
* @param evictOnClose whether to evict blocks belonging to this file
* @throws IOException
*/
public synchronized void closeReader(boolean evictOnClose)
throws IOException {
if (this.reader != null) {
this.reader.close(evictOnClose);
this.reader = null;
}
}
/**
* Delete this file
* @throws IOException
*/
public void deleteReader() throws IOException {
closeReader(true);
this.fs.delete(getPath(), true);
}
@Override
public String toString() {
return this.fileInfo.toString();
}
/**
* @return a length description of this StoreFile, suitable for debug output
*/
public String toStringDetailed() {
StringBuilder sb = new StringBuilder();
sb.append(this.getPath().toString());
sb.append(", isReference=").append(isReference());
sb.append(", isBulkLoadResult=").append(isBulkLoadResult());
if (isBulkLoadResult()) {
sb.append(", bulkLoadTS=").append(getBulkLoadTimestamp());
} else {
sb.append(", seqid=").append(getMaxSequenceId());
}
sb.append(", majorCompaction=").append(isMajorCompaction());
return sb.toString();
}
public static class WriterBuilder {
private final Configuration conf;
private final CacheConfig cacheConf;
private final FileSystem fs;
private CellComparator comparator = CellComparator.COMPARATOR;
private BloomType bloomType = BloomType.NONE;
private long maxKeyCount = 0;
private Path dir;
private Path filePath;
private InetSocketAddress[] favoredNodes;
private HFileContext fileContext;
public WriterBuilder(Configuration conf, CacheConfig cacheConf,
FileSystem fs) {
this.conf = conf;
this.cacheConf = cacheConf;
this.fs = fs;
}
/**
* Use either this method or {@link #withFilePath}, but not both.
* @param dir Path to column family directory. The directory is created if
* does not exist. The file is given a unique name within this
* directory.
* @return this (for chained invocation)
*/
public WriterBuilder withOutputDir(Path dir) {
Preconditions.checkNotNull(dir);
this.dir = dir;
return this;
}
/**
* Use either this method or {@link #withOutputDir}, but not both.
* @param filePath the StoreFile path to write
* @return this (for chained invocation)
*/
public WriterBuilder withFilePath(Path filePath) {
Preconditions.checkNotNull(filePath);
this.filePath = filePath;
return this;
}
/**
* @param favoredNodes an array of favored nodes or possibly null
* @return this (for chained invocation)
*/
public WriterBuilder withFavoredNodes(InetSocketAddress[] favoredNodes) {
this.favoredNodes = favoredNodes;
return this;
}
public WriterBuilder withComparator(CellComparator comparator) {
Preconditions.checkNotNull(comparator);
this.comparator = comparator;
return this;
}
public WriterBuilder withBloomType(BloomType bloomType) {
Preconditions.checkNotNull(bloomType);
this.bloomType = bloomType;
return this;
}
/**
* @param maxKeyCount estimated maximum number of keys we expect to add
* @return this (for chained invocation)
*/
public WriterBuilder withMaxKeyCount(long maxKeyCount) {
this.maxKeyCount = maxKeyCount;
return this;
}
public WriterBuilder withFileContext(HFileContext fileContext) {
this.fileContext = fileContext;
return this;
}
/**
* Create a store file writer. Client is responsible for closing file when
* done. If metadata, add BEFORE closing using
* {@link Writer#appendMetadata}.
*/
public Writer build() throws IOException {
if ((dir == null ? 0 : 1) + (filePath == null ? 0 : 1) != 1) {
throw new IllegalArgumentException("Either specify parent directory " +
"or file path");
}
if (dir == null) {
dir = filePath.getParent();
}
if (!fs.exists(dir)) {
fs.mkdirs(dir);
}
if (filePath == null) {
filePath = getUniqueFile(fs, dir);
if (!BloomFilterFactory.isGeneralBloomEnabled(conf)) {
bloomType = BloomType.NONE;
}
}
if (comparator == null) {
comparator = CellComparator.COMPARATOR;
}
return new Writer(fs, filePath,
conf, cacheConf, comparator, bloomType, maxKeyCount, favoredNodes, fileContext);
}
}
/**
* @param fs
* @param dir Directory to create file in.
* @return random filename inside passed <code>dir</code>
*/
public static Path getUniqueFile(final FileSystem fs, final Path dir)
throws IOException {
if (!fs.getFileStatus(dir).isDirectory()) {
throw new IOException("Expecting " + dir.toString() +
" to be a directory");
}
return new Path(dir, UUID.randomUUID().toString().replaceAll("-", ""));
}
public Long getMinimumTimestamp() {
return (getReader().timeRangeTracker == null) ?
null :
getReader().timeRangeTracker.getMinimumTimestamp();
}
/**
* Gets the approximate mid-point of this file that is optimal for use in splitting it.
* @param comparator Comparator used to compare KVs.
* @return The split point row, or null if splitting is not possible, or reader is null.
*/
@SuppressWarnings("deprecation")
byte[] getFileSplitPoint(CellComparator comparator) throws IOException {
if (this.reader == null) {
LOG.warn("Storefile " + this + " Reader is null; cannot get split point");
return null;
}
// Get first, last, and mid keys. Midkey is the key that starts block
// in middle of hfile. Has column and timestamp. Need to return just
// the row we want to split on as midkey.
Cell midkey = this.reader.midkey();
if (midkey != null) {
Cell firstKey = this.reader.getFirstKey();
Cell lastKey = this.reader.getLastKey();
// if the midkey is the same as the first or last keys, we cannot (ever) split this region.
if (comparator.compareRows(midkey, firstKey) == 0
|| comparator.compareRows(midkey, lastKey) == 0) {
if (LOG.isDebugEnabled()) {
LOG.debug("cannot split because midkey is the same as first or last row");
}
return null;
}
return CellUtil.cloneRow(midkey);
}
return null;
}
/**
* A StoreFile writer. Use this to read/write HBase Store Files. It is package
* local because it is an implementation detail of the HBase regionserver.
*/
public static class Writer implements Compactor.CellSink {
private final BloomFilterWriter generalBloomFilterWriter;
private final BloomFilterWriter deleteFamilyBloomFilterWriter;
private final BloomType bloomType;
private byte[] lastBloomKey;
private int lastBloomKeyOffset, lastBloomKeyLen;
private Cell lastCell = null;
private long earliestPutTs = HConstants.LATEST_TIMESTAMP;
private Cell lastDeleteFamilyCell = null;
private long deleteFamilyCnt = 0;
/** Bytes per Checksum */
protected int bytesPerChecksum;
TimeRangeTracker timeRangeTracker = new TimeRangeTracker();
/* isTimeRangeTrackerSet keeps track if the timeRange has already been set
* When flushing a memstore, we set TimeRange and use this variable to
* indicate that it doesn't need to be calculated again while
* appending KeyValues.
* It is not set in cases of compactions when it is recalculated using only
* the appended KeyValues*/
boolean isTimeRangeTrackerSet = false;
protected HFile.Writer writer;
private KeyValue.KeyOnlyKeyValue lastBloomKeyOnlyKV = null;
/**
* Creates an HFile.Writer that also write helpful meta data.
* @param fs file system to write to
* @param path file name to create
* @param conf user configuration
* @param comparator key comparator
* @param bloomType bloom filter setting
* @param maxKeys the expected maximum number of keys to be added. Was used
* for Bloom filter size in {@link HFile} format version 1.
* @param favoredNodes
* @param fileContext - The HFile context
* @throws IOException problem writing to FS
*/
private Writer(FileSystem fs, Path path,
final Configuration conf,
CacheConfig cacheConf,
final CellComparator comparator, BloomType bloomType, long maxKeys,
InetSocketAddress[] favoredNodes, HFileContext fileContext)
throws IOException {
writer = HFile.getWriterFactory(conf, cacheConf)
.withPath(fs, path)
.withComparator(comparator)
.withFavoredNodes(favoredNodes)
.withFileContext(fileContext)
.create();
generalBloomFilterWriter = BloomFilterFactory.createGeneralBloomAtWrite(
conf, cacheConf, bloomType,
(int) Math.min(maxKeys, Integer.MAX_VALUE), writer);
if (generalBloomFilterWriter != null) {
this.bloomType = bloomType;
if(this.bloomType == BloomType.ROWCOL) {
lastBloomKeyOnlyKV = new KeyValue.KeyOnlyKeyValue();
}
if (LOG.isTraceEnabled()) LOG.trace("Bloom filter type for " + path + ": " +
this.bloomType + ", " + generalBloomFilterWriter.getClass().getSimpleName());
} else {
// Not using Bloom filters.
this.bloomType = BloomType.NONE;
}
// initialize delete family Bloom filter when there is NO RowCol Bloom
// filter
if (this.bloomType != BloomType.ROWCOL) {
this.deleteFamilyBloomFilterWriter = BloomFilterFactory
.createDeleteBloomAtWrite(conf, cacheConf,
(int) Math.min(maxKeys, Integer.MAX_VALUE), writer);
} else {
deleteFamilyBloomFilterWriter = null;
}
if (deleteFamilyBloomFilterWriter != null) {
if (LOG.isTraceEnabled()) LOG.trace("Delete Family Bloom filter type for " + path + ": "
+ deleteFamilyBloomFilterWriter.getClass().getSimpleName());
}
}
/**
* Writes meta data.
* Call before {@link #close()} since its written as meta data to this file.
* @param maxSequenceId Maximum sequence id.
* @param majorCompaction True if this file is product of a major compaction
* @throws IOException problem writing to FS
*/
public void appendMetadata(final long maxSequenceId, final boolean majorCompaction)
throws IOException {
writer.appendFileInfo(MAX_SEQ_ID_KEY, Bytes.toBytes(maxSequenceId));
writer.appendFileInfo(MAJOR_COMPACTION_KEY,
Bytes.toBytes(majorCompaction));
appendTrackedTimestampsToMetadata();
}
/**
* Add TimestampRange and earliest put timestamp to Metadata
*/
public void appendTrackedTimestampsToMetadata() throws IOException {
appendFileInfo(TIMERANGE_KEY,WritableUtils.toByteArray(timeRangeTracker));
appendFileInfo(EARLIEST_PUT_TS, Bytes.toBytes(earliestPutTs));
}
/**
* Set TimeRangeTracker
* @param trt
*/
public void setTimeRangeTracker(final TimeRangeTracker trt) {
this.timeRangeTracker = trt;
isTimeRangeTrackerSet = true;
}
/**
* Record the earlest Put timestamp.
*
* If the timeRangeTracker is not set,
* update TimeRangeTracker to include the timestamp of this key
* @param cell
*/
public void trackTimestamps(final Cell cell) {
if (KeyValue.Type.Put.getCode() == cell.getTypeByte()) {
earliestPutTs = Math.min(earliestPutTs, cell.getTimestamp());
}
if (!isTimeRangeTrackerSet) {
timeRangeTracker.includeTimestamp(cell);
}
}
private void appendGeneralBloomfilter(final Cell cell) throws IOException {
if (this.generalBloomFilterWriter != null) {
// only add to the bloom filter on a new, unique key
boolean newKey = true;
if (this.lastCell != null) {
switch(bloomType) {
case ROW:
newKey = ! CellUtil.matchingRows(cell, lastCell);
break;
case ROWCOL:
newKey = ! CellUtil.matchingRowColumn(cell, lastCell);
break;
case NONE:
newKey = false;
break;
default:
throw new IOException("Invalid Bloom filter type: " + bloomType +
" (ROW or ROWCOL expected)");
}
}
if (newKey) {
/*
* http://2.bp.blogspot.com/_Cib_A77V54U/StZMrzaKufI/AAAAAAAAADo/ZhK7bGoJdMQ/s400/KeyValue.png
* Key = RowLen + Row + FamilyLen + Column [Family + Qualifier] + TimeStamp
*
* 2 Types of Filtering:
* 1. Row = Row
* 2. RowCol = Row + Qualifier
*/
byte[] bloomKey = null;
// Used with ROW_COL bloom
KeyValue bloomKeyKV = null;
int bloomKeyOffset, bloomKeyLen;
switch (bloomType) {
case ROW:
bloomKey = cell.getRowArray();
bloomKeyOffset = cell.getRowOffset();
bloomKeyLen = cell.getRowLength();
break;
case ROWCOL:
// merge(row, qualifier)
// TODO: could save one buffer copy in case of compound Bloom
// filters when this involves creating a KeyValue
bloomKeyKV = KeyValueUtil.createFirstOnRow(cell.getRowArray(), cell.getRowOffset(),
cell.getRowLength(),
HConstants.EMPTY_BYTE_ARRAY, 0, 0, cell.getQualifierArray(),
cell.getQualifierOffset(),
cell.getQualifierLength());
bloomKey = bloomKeyKV.getBuffer();
bloomKeyOffset = bloomKeyKV.getKeyOffset();
bloomKeyLen = bloomKeyKV.getKeyLength();
break;
default:
throw new IOException("Invalid Bloom filter type: " + bloomType +
" (ROW or ROWCOL expected)");
}
generalBloomFilterWriter.add(bloomKey, bloomKeyOffset, bloomKeyLen);
if (lastBloomKey != null) {
int res = 0;
// hbase:meta does not have blooms. So we need not have special interpretation
// of the hbase:meta cells. We can safely use Bytes.BYTES_RAWCOMPARATOR for ROW Bloom
if (bloomType == BloomType.ROW) {
res = Bytes.BYTES_RAWCOMPARATOR.compare(bloomKey, bloomKeyOffset, bloomKeyLen,
lastBloomKey, lastBloomKeyOffset, lastBloomKeyLen);
} else {
// TODO : Caching of kv components becomes important in these cases
res = CellComparator.COMPARATOR.compare(bloomKeyKV, lastBloomKeyOnlyKV);
}
if (res <= 0) {
throw new IOException("Non-increasing Bloom keys: "
+ Bytes.toStringBinary(bloomKey, bloomKeyOffset, bloomKeyLen) + " after "
+ Bytes.toStringBinary(lastBloomKey, lastBloomKeyOffset, lastBloomKeyLen));
}
}
lastBloomKey = bloomKey;
lastBloomKeyOffset = bloomKeyOffset;
lastBloomKeyLen = bloomKeyLen;
if (bloomType == BloomType.ROWCOL) {
lastBloomKeyOnlyKV.setKey(bloomKey, bloomKeyOffset, bloomKeyLen);
}
this.lastCell = cell;
}
}
}
private void appendDeleteFamilyBloomFilter(final Cell cell)
throws IOException {
if (!CellUtil.isDeleteFamily(cell) && !CellUtil.isDeleteFamilyVersion(cell)) {
return;
}
// increase the number of delete family in the store file
deleteFamilyCnt++;
if (null != this.deleteFamilyBloomFilterWriter) {
boolean newKey = true;
if (lastDeleteFamilyCell != null) {
// hbase:meta does not have blooms. So we need not have special interpretation
// of the hbase:meta cells
newKey = !CellUtil.matchingRows(cell, lastDeleteFamilyCell);
}
if (newKey) {
this.deleteFamilyBloomFilterWriter.add(cell.getRowArray(),
cell.getRowOffset(), cell.getRowLength());
this.lastDeleteFamilyCell = cell;
}
}
}
public void append(final Cell cell) throws IOException {
appendGeneralBloomfilter(cell);
appendDeleteFamilyBloomFilter(cell);
writer.append(cell);
trackTimestamps(cell);
}
public Path getPath() {
return this.writer.getPath();
}
boolean hasGeneralBloom() {
return this.generalBloomFilterWriter != null;
}
/**
* For unit testing only.
*
* @return the Bloom filter used by this writer.
*/
BloomFilterWriter getGeneralBloomWriter() {
return generalBloomFilterWriter;
}
private boolean closeBloomFilter(BloomFilterWriter bfw) throws IOException {
boolean haveBloom = (bfw != null && bfw.getKeyCount() > 0);
if (haveBloom) {
bfw.compactBloom();
}
return haveBloom;
}
private boolean closeGeneralBloomFilter() throws IOException {
boolean hasGeneralBloom = closeBloomFilter(generalBloomFilterWriter);
// add the general Bloom filter writer and append file info
if (hasGeneralBloom) {
writer.addGeneralBloomFilter(generalBloomFilterWriter);
writer.appendFileInfo(BLOOM_FILTER_TYPE_KEY,
Bytes.toBytes(bloomType.toString()));
if (lastBloomKey != null) {
writer.appendFileInfo(LAST_BLOOM_KEY, Arrays.copyOfRange(
lastBloomKey, lastBloomKeyOffset, lastBloomKeyOffset
+ lastBloomKeyLen));
}
}
return hasGeneralBloom;
}
private boolean closeDeleteFamilyBloomFilter() throws IOException {
boolean hasDeleteFamilyBloom = closeBloomFilter(deleteFamilyBloomFilterWriter);
// add the delete family Bloom filter writer
if (hasDeleteFamilyBloom) {
writer.addDeleteFamilyBloomFilter(deleteFamilyBloomFilterWriter);
}
// append file info about the number of delete family kvs
// even if there is no delete family Bloom.
writer.appendFileInfo(DELETE_FAMILY_COUNT,
Bytes.toBytes(this.deleteFamilyCnt));
return hasDeleteFamilyBloom;
}
public void close() throws IOException {
boolean hasGeneralBloom = this.closeGeneralBloomFilter();
boolean hasDeleteFamilyBloom = this.closeDeleteFamilyBloomFilter();
writer.close();
// Log final Bloom filter statistics. This needs to be done after close()
// because compound Bloom filters might be finalized as part of closing.
if (StoreFile.LOG.isTraceEnabled()) {
StoreFile.LOG.trace((hasGeneralBloom ? "" : "NO ") + "General Bloom and " +
(hasDeleteFamilyBloom ? "" : "NO ") + "DeleteFamily" + " was added to HFile " +
getPath());
}
}
public void appendFileInfo(byte[] key, byte[] value) throws IOException {
writer.appendFileInfo(key, value);
}
/** For use in testing, e.g. {@link org.apache.hadoop.hbase.regionserver.CreateRandomStoreFile}
*/
HFile.Writer getHFileWriter() {
return writer;
}
}
/**
* Reader for a StoreFile.
*/
public static class Reader {
private static final Log LOG = LogFactory.getLog(Reader.class.getName());
protected BloomFilter generalBloomFilter = null;
protected BloomFilter deleteFamilyBloomFilter = null;
protected BloomType bloomFilterType;
private final HFile.Reader reader;
protected TimeRangeTracker timeRangeTracker = null;
protected long sequenceID = -1;
private byte[] lastBloomKey;
private long deleteFamilyCnt = -1;
private boolean bulkLoadResult = false;
private KeyValue.KeyOnlyKeyValue lastBloomKeyOnlyKV = null;
public Reader(FileSystem fs, Path path, CacheConfig cacheConf, Configuration conf)
throws IOException {
reader = HFile.createReader(fs, path, cacheConf, conf);
bloomFilterType = BloomType.NONE;
}
public Reader(FileSystem fs, Path path, FSDataInputStreamWrapper in, long size,
CacheConfig cacheConf, Configuration conf) throws IOException {
reader = HFile.createReader(fs, path, in, size, cacheConf, conf);
bloomFilterType = BloomType.NONE;
}
/**
* ONLY USE DEFAULT CONSTRUCTOR FOR UNIT TESTS
*/
Reader() {
this.reader = null;
}
public CellComparator getComparator() {
return reader.getComparator();
}
/**
* Get a scanner to scan over this StoreFile. Do not use
* this overload if using this scanner for compactions.
*
* @param cacheBlocks should this scanner cache blocks?
* @param pread use pread (for highly concurrent small readers)
* @return a scanner
*/
public StoreFileScanner getStoreFileScanner(boolean cacheBlocks,
boolean pread) {
return getStoreFileScanner(cacheBlocks, pread, false,
// 0 is passed as readpoint because this method is only used by test
// where StoreFile is directly operated upon
0);
}
/**
* Get a scanner to scan over this StoreFile.
*
* @param cacheBlocks should this scanner cache blocks?
* @param pread use pread (for highly concurrent small readers)
* @param isCompaction is scanner being used for compaction?
* @return a scanner
*/
public StoreFileScanner getStoreFileScanner(boolean cacheBlocks,
boolean pread,
boolean isCompaction, long readPt) {
return new StoreFileScanner(this,
getScanner(cacheBlocks, pread, isCompaction),
!isCompaction, reader.hasMVCCInfo(), readPt);
}
/**
* Warning: Do not write further code which depends on this call. Instead
* use getStoreFileScanner() which uses the StoreFileScanner class/interface
* which is the preferred way to scan a store with higher level concepts.
*
* @param cacheBlocks should we cache the blocks?
* @param pread use pread (for concurrent small readers)
* @return the underlying HFileScanner
*/
@Deprecated
public HFileScanner getScanner(boolean cacheBlocks, boolean pread) {
return getScanner(cacheBlocks, pread, false);
}
/**
* Warning: Do not write further code which depends on this call. Instead
* use getStoreFileScanner() which uses the StoreFileScanner class/interface
* which is the preferred way to scan a store with higher level concepts.
*
* @param cacheBlocks
* should we cache the blocks?
* @param pread
* use pread (for concurrent small readers)
* @param isCompaction
* is scanner being used for compaction?
* @return the underlying HFileScanner
*/
@Deprecated
public HFileScanner getScanner(boolean cacheBlocks, boolean pread,
boolean isCompaction) {
return reader.getScanner(cacheBlocks, pread, isCompaction);
}
public void close(boolean evictOnClose) throws IOException {
reader.close(evictOnClose);
}
/**
* Check if this storeFile may contain keys within the TimeRange that
* have not expired (i.e. not older than oldestUnexpiredTS).
* @param scan the current scan
* @param oldestUnexpiredTS the oldest timestamp that is not expired, as
* determined by the column family's TTL
* @return false if queried keys definitely don't exist in this StoreFile
*/
boolean passesTimerangeFilter(Scan scan, long oldestUnexpiredTS) {
if (timeRangeTracker == null) {
return true;
} else {
return timeRangeTracker.includesTimeRange(scan.getTimeRange()) &&
timeRangeTracker.getMaximumTimestamp() >= oldestUnexpiredTS;
}
}
/**
* Checks whether the given scan passes the Bloom filter (if present). Only
* checks Bloom filters for single-row or single-row-column scans. Bloom
* filter checking for multi-gets is implemented as part of the store
* scanner system (see {@link StoreFileScanner#seekExactly}) and uses
* the lower-level API {@link #passesGeneralBloomFilter(byte[], int, int, byte[],
* int, int)}.
*
* @param scan the scan specification. Used to determine the row, and to
* check whether this is a single-row ("get") scan.
* @param columns the set of columns. Only used for row-column Bloom
* filters.
* @return true if the scan with the given column set passes the Bloom
* filter, or if the Bloom filter is not applicable for the scan.
* False if the Bloom filter is applicable and the scan fails it.
*/
boolean passesBloomFilter(Scan scan,
final SortedSet<byte[]> columns) {
// Multi-column non-get scans will use Bloom filters through the
// lower-level API function that this function calls.
if (!scan.isGetScan()) {
return true;
}
byte[] row = scan.getStartRow();
switch (this.bloomFilterType) {
case ROW:
return passesGeneralBloomFilter(row, 0, row.length, null, 0, 0);
case ROWCOL:
if (columns != null && columns.size() == 1) {
byte[] column = columns.first();
return passesGeneralBloomFilter(row, 0, row.length, column, 0,
column.length);
}
// For multi-column queries the Bloom filter is checked from the
// seekExact operation.
return true;
default:
return true;
}
}
public boolean passesDeleteFamilyBloomFilter(byte[] row, int rowOffset,
int rowLen) {
// Cache Bloom filter as a local variable in case it is set to null by
// another thread on an IO error.
BloomFilter bloomFilter = this.deleteFamilyBloomFilter;
// Empty file or there is no delete family at all
if (reader.getTrailer().getEntryCount() == 0 || deleteFamilyCnt == 0) {
return false;
}
if (bloomFilter == null) {
return true;
}
try {
if (!bloomFilter.supportsAutoLoading()) {
return true;
}
return bloomFilter.contains(row, rowOffset, rowLen, null);
} catch (IllegalArgumentException e) {
LOG.error("Bad Delete Family bloom filter data -- proceeding without",
e);
setDeleteFamilyBloomFilterFaulty();
}
return true;
}
/**
* A method for checking Bloom filters. Called directly from
* StoreFileScanner in case of a multi-column query.
*
* @param row
* @param rowOffset
* @param rowLen
* @param col
* @param colOffset
* @param colLen
* @return True if passes
*/
public boolean passesGeneralBloomFilter(byte[] row, int rowOffset,
int rowLen, byte[] col, int colOffset, int colLen) {
// Cache Bloom filter as a local variable in case it is set to null by
// another thread on an IO error.
BloomFilter bloomFilter = this.generalBloomFilter;
if (bloomFilter == null) {
return true;
}
// Used in ROW bloom
byte[] key = null;
// Used in ROW_COL bloom
KeyValue kvKey = null;
switch (bloomFilterType) {
case ROW:
if (col != null) {
throw new RuntimeException("Row-only Bloom filter called with " +
"column specified");
}
if (rowOffset != 0 || rowLen != row.length) {
throw new AssertionError("For row-only Bloom filters the row "
+ "must occupy the whole array");
}
key = row;
break;
case ROWCOL:
kvKey = KeyValueUtil.createFirstOnRow(row, rowOffset, rowLen,
HConstants.EMPTY_BYTE_ARRAY, 0, 0, col, colOffset,
colLen);
break;
default:
return true;
}
// Empty file
if (reader.getTrailer().getEntryCount() == 0)
return false;
try {
boolean shouldCheckBloom;
ByteBuffer bloom;
if (bloomFilter.supportsAutoLoading()) {
bloom = null;
shouldCheckBloom = true;
} else {
bloom = reader.getMetaBlock(HFile.BLOOM_FILTER_DATA_KEY,
true);
shouldCheckBloom = bloom != null;
}
if (shouldCheckBloom) {
boolean exists;
// Whether the primary Bloom key is greater than the last Bloom key
// from the file info. For row-column Bloom filters this is not yet
// a sufficient condition to return false.
boolean keyIsAfterLast = (lastBloomKey != null);
// hbase:meta does not have blooms. So we need not have special interpretation
// of the hbase:meta cells. We can safely use Bytes.BYTES_RAWCOMPARATOR for ROW Bloom
if (keyIsAfterLast) {
if (bloomFilterType == BloomType.ROW) {
keyIsAfterLast = (Bytes.BYTES_RAWCOMPARATOR.compare(key, lastBloomKey) > 0);
} else {
keyIsAfterLast = (CellComparator.COMPARATOR.compare(kvKey, lastBloomKeyOnlyKV)) > 0;
}
}
if (bloomFilterType == BloomType.ROWCOL) {
// Since a Row Delete is essentially a DeleteFamily applied to all
// columns, a file might be skipped if using row+col Bloom filter.
// In order to ensure this file is included an additional check is
// required looking only for a row bloom.
KeyValue rowBloomKey = KeyValueUtil.createFirstOnRow(row, rowOffset, rowLen,
HConstants.EMPTY_BYTE_ARRAY, 0, 0, HConstants.EMPTY_BYTE_ARRAY, 0, 0);
// hbase:meta does not have blooms. So we need not have special interpretation
// of the hbase:meta cells. We can safely use Bytes.BYTES_RAWCOMPARATOR for ROW Bloom
if (keyIsAfterLast
&& (CellComparator.COMPARATOR.compare(rowBloomKey, lastBloomKeyOnlyKV)) > 0) {
exists = false;
} else {
exists =
bloomFilter.contains(kvKey, bloom) ||
bloomFilter.contains(rowBloomKey, bloom);
}
} else {
exists = !keyIsAfterLast
&& bloomFilter.contains(key, 0, key.length, bloom);
}
return exists;
}
} catch (IOException e) {
LOG.error("Error reading bloom filter data -- proceeding without",
e);
setGeneralBloomFilterFaulty();
} catch (IllegalArgumentException e) {
LOG.error("Bad bloom filter data -- proceeding without", e);
setGeneralBloomFilterFaulty();
}
return true;
}
/**
* Checks whether the given scan rowkey range overlaps with the current storefile's
* @param scan the scan specification. Used to determine the rowkey range.
* @return true if there is overlap, false otherwise
*/
public boolean passesKeyRangeFilter(Scan scan) {
if (this.getFirstKey() == null || this.getLastKey() == null) {
// the file is empty
return false;
}
if (Bytes.equals(scan.getStartRow(), HConstants.EMPTY_START_ROW)
&& Bytes.equals(scan.getStopRow(), HConstants.EMPTY_END_ROW)) {
return true;
}
KeyValue smallestScanKeyValue = scan.isReversed() ? KeyValueUtil
.createFirstOnRow(scan.getStopRow()) : KeyValueUtil.createFirstOnRow(scan
.getStartRow());
KeyValue largestScanKeyValue = scan.isReversed() ? KeyValueUtil
.createLastOnRow(scan.getStartRow()) : KeyValueUtil.createLastOnRow(scan
.getStopRow());
Cell firstKeyKV = this.getFirstKey();
Cell lastKeyKV = this.getLastKey();
boolean nonOverLapping = ((getComparator().compare(firstKeyKV, largestScanKeyValue)) > 0
&& !Bytes
.equals(scan.isReversed() ? scan.getStartRow() : scan.getStopRow(),
HConstants.EMPTY_END_ROW))
|| (getComparator().compare(lastKeyKV, smallestScanKeyValue)) < 0;
return !nonOverLapping;
}
public Map<byte[], byte[]> loadFileInfo() throws IOException {
Map<byte [], byte []> fi = reader.loadFileInfo();
byte[] b = fi.get(BLOOM_FILTER_TYPE_KEY);
if (b != null) {
bloomFilterType = BloomType.valueOf(Bytes.toString(b));
}
lastBloomKey = fi.get(LAST_BLOOM_KEY);
if(bloomFilterType == BloomType.ROWCOL) {
lastBloomKeyOnlyKV = new KeyValue.KeyOnlyKeyValue(lastBloomKey, 0, lastBloomKey.length);
}
byte[] cnt = fi.get(DELETE_FAMILY_COUNT);
if (cnt != null) {
deleteFamilyCnt = Bytes.toLong(cnt);
}
return fi;
}
public void loadBloomfilter() {
this.loadBloomfilter(BlockType.GENERAL_BLOOM_META);
this.loadBloomfilter(BlockType.DELETE_FAMILY_BLOOM_META);
}
private void loadBloomfilter(BlockType blockType) {
try {
if (blockType == BlockType.GENERAL_BLOOM_META) {
if (this.generalBloomFilter != null)
return; // Bloom has been loaded
DataInput bloomMeta = reader.getGeneralBloomFilterMetadata();
if (bloomMeta != null) {
// sanity check for NONE Bloom filter
if (bloomFilterType == BloomType.NONE) {
throw new IOException(
"valid bloom filter type not found in FileInfo");
} else {
generalBloomFilter = BloomFilterFactory.createFromMeta(bloomMeta,
reader);
if (LOG.isTraceEnabled()) {
LOG.trace("Loaded " + bloomFilterType.toString() + " "
+ generalBloomFilter.getClass().getSimpleName()
+ " metadata for " + reader.getName());
}
}
}
} else if (blockType == BlockType.DELETE_FAMILY_BLOOM_META) {
if (this.deleteFamilyBloomFilter != null)
return; // Bloom has been loaded
DataInput bloomMeta = reader.getDeleteBloomFilterMetadata();
if (bloomMeta != null) {
deleteFamilyBloomFilter = BloomFilterFactory.createFromMeta(
bloomMeta, reader);
LOG.info("Loaded Delete Family Bloom ("
+ deleteFamilyBloomFilter.getClass().getSimpleName()
+ ") metadata for " + reader.getName());
}
} else {
throw new RuntimeException("Block Type: " + blockType.toString()
+ "is not supported for Bloom filter");
}
} catch (IOException e) {
LOG.error("Error reading bloom filter meta for " + blockType
+ " -- proceeding without", e);
setBloomFilterFaulty(blockType);
} catch (IllegalArgumentException e) {
LOG.error("Bad bloom filter meta " + blockType
+ " -- proceeding without", e);
setBloomFilterFaulty(blockType);
}
}
private void setBloomFilterFaulty(BlockType blockType) {
if (blockType == BlockType.GENERAL_BLOOM_META) {
setGeneralBloomFilterFaulty();
} else if (blockType == BlockType.DELETE_FAMILY_BLOOM_META) {
setDeleteFamilyBloomFilterFaulty();
}
}
/**
* The number of Bloom filter entries in this store file, or an estimate
* thereof, if the Bloom filter is not loaded. This always returns an upper
* bound of the number of Bloom filter entries.
*
* @return an estimate of the number of Bloom filter entries in this file
*/
public long getFilterEntries() {
return generalBloomFilter != null ? generalBloomFilter.getKeyCount()
: reader.getEntries();
}
public void setGeneralBloomFilterFaulty() {
generalBloomFilter = null;
}
public void setDeleteFamilyBloomFilterFaulty() {
this.deleteFamilyBloomFilter = null;
}
public Cell getLastKey() {
return reader.getLastKey();
}
public byte[] getLastRowKey() {
return reader.getLastRowKey();
}
public Cell midkey() throws IOException {
return reader.midkey();
}
public long length() {
return reader.length();
}
public long getTotalUncompressedBytes() {
return reader.getTrailer().getTotalUncompressedBytes();
}
public long getEntries() {
return reader.getEntries();
}
public long getDeleteFamilyCnt() {
return deleteFamilyCnt;
}
public Cell getFirstKey() {
return reader.getFirstKey();
}
public long indexSize() {
return reader.indexSize();
}
public BloomType getBloomFilterType() {
return this.bloomFilterType;
}
public long getSequenceID() {
return sequenceID;
}
public void setSequenceID(long sequenceID) {
this.sequenceID = sequenceID;
}
public void setBulkLoaded(boolean bulkLoadResult) {
this.bulkLoadResult = bulkLoadResult;
}
public boolean isBulkLoaded() {
return this.bulkLoadResult;
}
BloomFilter getGeneralBloomFilter() {
return generalBloomFilter;
}
long getUncompressedDataIndexSize() {
return reader.getTrailer().getUncompressedDataIndexSize();
}
public long getTotalBloomSize() {
if (generalBloomFilter == null)
return 0;
return generalBloomFilter.getByteSize();
}
public int getHFileVersion() {
return reader.getTrailer().getMajorVersion();
}
public int getHFileMinorVersion() {
return reader.getTrailer().getMinorVersion();
}
public HFile.Reader getHFileReader() {
return reader;
}
void disableBloomFilterForTesting() {
generalBloomFilter = null;
this.deleteFamilyBloomFilter = null;
}
public long getMaxTimestamp() {
return timeRangeTracker == null ? Long.MAX_VALUE : timeRangeTracker.getMaximumTimestamp();
}
}
/**
* Useful comparators for comparing StoreFiles.
*/
public abstract static class Comparators {
/**
* Comparator that compares based on the Sequence Ids of the
* the StoreFiles. Bulk loads that did not request a seq ID
* are given a seq id of -1; thus, they are placed before all non-
* bulk loads, and bulk loads with sequence Id. Among these files,
* the size is used to determine the ordering, then bulkLoadTime.
* If there are ties, the path name is used as a tie-breaker.
*/
public static final Comparator<StoreFile> SEQ_ID =
Ordering.compound(ImmutableList.of(
Ordering.natural().onResultOf(new GetSeqId()),
Ordering.natural().onResultOf(new GetFileSize()).reverse(),
Ordering.natural().onResultOf(new GetBulkTime()),
Ordering.natural().onResultOf(new GetPathName())
));
private static class GetSeqId implements Function<StoreFile, Long> {
@Override
public Long apply(StoreFile sf) {
return sf.getMaxSequenceId();
}
}
private static class GetFileSize implements Function<StoreFile, Long> {
@Override
public Long apply(StoreFile sf) {
return sf.getReader().length();
}
}
private static class GetBulkTime implements Function<StoreFile, Long> {
@Override
public Long apply(StoreFile sf) {
if (!sf.isBulkLoadResult()) return Long.MAX_VALUE;
return sf.getBulkLoadTimestamp();
}
}
private static class GetPathName implements Function<StoreFile, String> {
@Override
public String apply(StoreFile sf) {
return sf.getPath().getName();
}
}
}
}
| apache-2.0 |
hortonworks/cloudbreak | telemetry-common/src/main/java/com/sequenceiq/cloudbreak/telemetry/orchestrator/TelemetryConfigProvider.java | 560 | package com.sequenceiq.cloudbreak.telemetry.orchestrator;
import java.util.Map;
import java.util.Set;
import com.sequenceiq.cloudbreak.orchestrator.exception.CloudbreakOrchestratorFailedException;
import com.sequenceiq.cloudbreak.orchestrator.model.SaltPillarProperties;
import com.sequenceiq.cloudbreak.telemetry.TelemetryComponentType;
public interface TelemetryConfigProvider {
Map<String, SaltPillarProperties> createTelemetryConfigs(Long stackId, Set<TelemetryComponentType> components)
throws CloudbreakOrchestratorFailedException;
}
| apache-2.0 |
nmonvisualizer/nmonvisualizer | src/com/ibm/nmon/data/definition/DefaultDataDefinition.java | 9242 | package com.ibm.nmon.data.definition;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import com.ibm.nmon.data.DataSet;
import com.ibm.nmon.data.DataType;
import com.ibm.nmon.data.matcher.FieldMatcher;
import com.ibm.nmon.data.matcher.HostMatcher;
import com.ibm.nmon.data.matcher.TypeMatcher;
import com.ibm.nmon.data.transform.name.NameTransformer;
import com.ibm.nmon.analysis.Statistic;
/**
* <p>
* Standard {@link DataDefintion} that uses various <code>matcher</code> classes to define the data this class will
* match. Data is renamed using various {@link NameTransformer} classes.
* </p>
*
* <p>
* By default, this definition matches all hosts, types and fields; it does no renaming.
* </p>
*
* @see HostMatcher
* @see TypeMatcher
* @see FieldMatcher
* @see NameTransformer
*/
public final class DefaultDataDefinition extends DataDefinition {
/**
* Use this key to specify the <code>NameTransformer</code> to use by default in the rename methods.
*/
public static final String DEFAULT_NAME_TRANSFORMER_KEY = "$ALL";
private Map<String, NameTransformer> hostTransformers;
private Map<String, NameTransformer> typeTransformers;
private Map<String, NameTransformer> fieldTransformers;
private final HostMatcher hostMatcher;
private final TypeMatcher typeMatcher;
private final FieldMatcher fieldMatcher;
public DefaultDataDefinition(HostMatcher hostMatcher, TypeMatcher typeMatcher, FieldMatcher fieldMatcher,
Statistic stat, boolean useSecondaryYAxis) {
super(stat, useSecondaryYAxis);
this.hostMatcher = hostMatcher == null ? HostMatcher.ALL : hostMatcher;
this.typeMatcher = typeMatcher == null ? TypeMatcher.ALL : typeMatcher;
this.fieldMatcher = fieldMatcher == null ? FieldMatcher.ALL : fieldMatcher;
}
private DefaultDataDefinition(DefaultDataDefinition copy, HostMatcher hostMatcher, TypeMatcher typeMatcher,
FieldMatcher fieldMatcher) {
this(hostMatcher, typeMatcher, fieldMatcher, copy.getStatistic(), copy.usesSecondaryYAxis());
this.hostTransformers = copy.hostTransformers == null ? null : new java.util.HashMap<String, NameTransformer>(
copy.hostTransformers);
this.typeTransformers = copy.typeTransformers == null ? null : new java.util.HashMap<String, NameTransformer>(
copy.typeTransformers);
this.fieldTransformers = copy.fieldTransformers == null ? null
: new java.util.HashMap<String, NameTransformer>(copy.fieldTransformers);
}
public void addHostnameTransformer(String hostname, NameTransformer transformer) {
if (transformer != null) {
if (hostTransformers == null) {
hostTransformers = new java.util.HashMap<String, NameTransformer>(2);
}
hostTransformers.put(hostname, transformer);
}
}
public void addTypeTransformer(String typeId, NameTransformer transformer) {
if (transformer != null) {
if (typeTransformers == null) {
typeTransformers = new java.util.HashMap<String, NameTransformer>(2);
}
typeTransformers.put(typeId, transformer);
}
}
public void addFieldTransformer(String field, NameTransformer transformer) {
if (transformer != null) {
if (fieldTransformers == null) {
fieldTransformers = new java.util.HashMap<String, NameTransformer>(2);
}
fieldTransformers.put(field, transformer);
}
}
public HostMatcher getHostMatcher() {
return hostMatcher;
}
public TypeMatcher getTypeMatcher() {
return typeMatcher;
}
public FieldMatcher getFieldMatcher() {
return fieldMatcher;
}
@Override
public boolean matchesHost(DataSet data) {
return hostMatcher.matchesHost(data);
}
@Override
public List<DataSet> getMatchingHosts(Collection<DataSet> toMatch) {
return hostMatcher.getMatchingHosts(toMatch);
}
@Override
public List<DataType> getMatchingTypes(DataSet data) {
return typeMatcher.getMatchingTypes(data);
}
@Override
public List<String> getMatchingFields(DataType type) {
return fieldMatcher.getMatchingFields(type);
}
@Override
public String renameHost(DataSet data) {
String hostname = data.getHostname();
if (hostTransformers != null) {
NameTransformer transformer = hostTransformers.get(hostname);
if (transformer != null) {
return transformer.transform(hostname);
}
else {
transformer = hostTransformers.get(DEFAULT_NAME_TRANSFORMER_KEY);
if (transformer != null) {
return transformer.transform(hostname);
}
else {
return hostname;
}
}
}
else {
return hostname;
}
}
@Override
public String renameType(DataType type) {
String typeId = super.renameType(type);
if (typeTransformers != null) {
NameTransformer transformer = typeTransformers.get(typeId);
if (transformer != null) {
return transformer.transform(typeId);
}
else {
transformer = typeTransformers.get(DEFAULT_NAME_TRANSFORMER_KEY);
if (transformer != null) {
return transformer.transform(typeId);
}
else {
return typeId;
}
}
}
else {
return typeId;
}
}
public String renameField(String field) {
field = super.renameField(field);
if (fieldTransformers != null) {
NameTransformer transformer = fieldTransformers.get(field);
if (transformer != null) {
return transformer.transform(field);
}
else {
transformer = fieldTransformers.get(DEFAULT_NAME_TRANSFORMER_KEY);
if (transformer != null) {
return transformer.transform(field);
}
else {
return field;
}
}
}
else {
return field;
}
}
public DefaultDataDefinition withNewHosts(HostMatcher matcher) {
if (matcher != null) {
if (hostMatcher.equals(matcher)) {
return this;
}
else {
return new DefaultDataDefinition(this, matcher, this.typeMatcher, this.fieldMatcher);
}
}
else {
return this;
}
}
public DefaultDataDefinition withNewTypes(TypeMatcher matcher) {
if (matcher != null) {
if (typeMatcher.equals(matcher)) {
return this;
}
else {
return new DefaultDataDefinition(this, this.hostMatcher, matcher, this.fieldMatcher);
}
}
else {
return this;
}
}
public DefaultDataDefinition withNewFields(FieldMatcher matcher) {
if (matcher != null) {
if (fieldMatcher.equals(matcher)) {
return this;
}
else {
return new DefaultDataDefinition(this, this.hostMatcher, this.typeMatcher, matcher);
}
}
else {
return this;
}
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder(256);
builder.append('{');
builder.append("host: ");
builder.append("match=");
builder.append(hostMatcher);
if (hostTransformers != null) {
builder.append(", ");
builder.append("transforms=");
builder.append(hostTransformers);
}
builder.append("; ");
builder.append("type: ");
builder.append("match=");
builder.append(typeMatcher);
if (typeTransformers != null) {
builder.append(", ");
builder.append("transforms=");
builder.append(typeTransformers);
}
builder.append("; ");
builder.append("field: ");
builder.append("match=");
builder.append(fieldMatcher);
if (fieldTransformers != null) {
builder.append(", ");
builder.append("transforms=");
builder.append(fieldTransformers);
}
if (!Statistic.AVERAGE.equals(getStatistic())) {
builder.append("; ");
builder.append("stat=");
builder.append(getStatistic());
}
builder.append('}');
return builder.toString();
}
}
| apache-2.0 |
opensciencegrid/oim | src/edu/iu/grid/oim/view/divrep/form/RARequestForm.java | 3218 | package edu.iu.grid.oim.view.divrep.form;
import java.io.InputStream;
import org.apache.log4j.Logger;
import com.divrep.common.DivRepStaticContent;
import com.divrep.common.DivRepTextArea;
import edu.iu.grid.oim.lib.Authorization;
import edu.iu.grid.oim.lib.Footprints;
import edu.iu.grid.oim.lib.ResourceReader;
import edu.iu.grid.oim.lib.StaticConfig;
import edu.iu.grid.oim.lib.Footprints.FPTicket;
import edu.iu.grid.oim.model.UserContext;
import edu.iu.grid.oim.model.db.CertificateRequestUserModel;
import edu.iu.grid.oim.model.db.record.ContactRecord;
import edu.iu.grid.oim.model.db.record.SCRecord;
import edu.iu.grid.oim.model.db.record.VORecord;
import edu.iu.grid.oim.view.divrep.BootDialogForm;
//assume user is logged in
public class RARequestForm extends BootDialogForm {
static Logger log = Logger.getLogger(CertificateRequestUserModel.class);
private DivRepTextArea desc;
private UserContext context;
private ContactRecord vomanager;
private VORecord vorec;
private SCRecord screc;
public RARequestForm(UserContext context, ContactRecord vomanager, VORecord vorec, SCRecord screc) {
super(context.getPageRoot());
this.context = context;
this.vomanager = vomanager;
this.vorec = vorec;
this.screc = screc;
setTitle("RA Enrollment Request");
new DivRepStaticContent(this, "<p class=\"help-block\">Please update the following request template and submit.</p>");
desc = new DivRepTextArea(this);
desc.setLabel("Request Detail");
//load request template
InputStream template = getClass().getResourceAsStream("osg.certificate.ra.txt");
StringBuilder request = ResourceReader.loadContent(template);
desc.setValue(request.toString());
desc.setHeight(300);
desc.setWidth(450);
desc.setRequired(true);
}
@Override
protected boolean doSubmit() {
Authorization auth = context.getAuthorization();
if(auth.isUser()) {
Footprints fp = new Footprints(context);
FPTicket ticket = fp.new FPTicket();
ContactRecord requester = auth.getContact();
ticket.title = "RA Request for " + requester.name;
ticket.name = requester.name;
ticket.email = requester.primary_email;
ticket.phone = requester.primary_phone;
ticket.description = desc.getValue();
ticket.metadata.put("SUBMITTED_VIA","OIM/RARequestForm");
ticket.metadata.put("SUBMITTER_NAME", requester.name);
ticket.metadata.put("SUBMITTER_DN", auth.getUserDN());
ticket.metadata.put("ASSOCIATED_VO_ID", vorec.id.toString());
ticket.metadata.put("ASSOCIATED_VO_NAME", vorec.name);
ticket.metadata.put("SUPPORTING_SC_ID", screc.id.toString());
ticket.metadata.put("SUPPORTING_SC_NAME", screc.name);
ticket.assignees.add(StaticConfig.conf.getProperty("certrequest.user.assignee"));
if(vomanager != null) {
ticket.ccs.add(vomanager.primary_email);
}
ticket.ccs.add("osg-ra@opensciencegrid.org"); //TODO - make this configurable
ticket.nextaction = "OSG RA to process request";
String ticket_id = fp.open(ticket);
log.info("Opened GridAdmin Request ticket with ID:" + ticket_id);
alert("Opened ticket ID:" + ticket_id);
return true;
} else {
alert("guest can't submit this.");
return false;
}
}
}
| apache-2.0 |
87439247/pinpoint | plugins/redisson/src/main/java/com/navercorp/pinpoint/plugin/redisson/RedissonPlugin.java | 3886 | /*
* Copyright 2014 NAVER Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.navercorp.pinpoint.plugin.redisson;
import com.navercorp.pinpoint.bootstrap.instrument.InstrumentClass;
import com.navercorp.pinpoint.bootstrap.instrument.InstrumentException;
import com.navercorp.pinpoint.bootstrap.instrument.InstrumentMethod;
import com.navercorp.pinpoint.bootstrap.instrument.Instrumentor;
import com.navercorp.pinpoint.bootstrap.instrument.transformer.TransformCallback;
import com.navercorp.pinpoint.bootstrap.instrument.transformer.TransformTemplate;
import com.navercorp.pinpoint.bootstrap.instrument.transformer.TransformTemplateAware;
import com.navercorp.pinpoint.bootstrap.logging.PLogger;
import com.navercorp.pinpoint.bootstrap.logging.PLoggerFactory;
import com.navercorp.pinpoint.bootstrap.plugin.ProfilerPlugin;
import com.navercorp.pinpoint.bootstrap.plugin.ProfilerPluginSetupContext;
import java.security.ProtectionDomain;
/**
* @author jaehong.kim
*/
public class RedissonPlugin implements ProfilerPlugin, TransformTemplateAware {
private final PLogger logger = PLoggerFactory.getLogger(this.getClass());
private TransformTemplate transformTemplate;
@Override
public void setup(ProfilerPluginSetupContext context) {
boolean enable = context.getConfig().readBoolean("profiler.redisson", true);
if (enable) {
addRedissonClassEditors();
}
}
// Jedis & BinaryJedis
private void addRedissonClassEditors() {
//org.redisson.config.Config
transformTemplate.transform("org.redisson.spring.cache.RedissonCache", new TransformCallback() {
@Override
public byte[] doInTransform(Instrumentor instrumentor, ClassLoader classLoader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws InstrumentException {
InstrumentClass target = instrumentor.getInstrumentClass(classLoader, className, classfileBuffer);
target.addField(RedissonConstants.END_POINT_ACCESSOR);
//constructor
InstrumentMethod constructor1 = target.getConstructor("org.redisson.api.RedissonClient", "org.redisson.api.RMap");
if (constructor1 != null) {
constructor1.addInterceptor("com.navercorp.pinpoint.plugin.redisson.interceptor.RedissonCacheConstructorInterceptor");
}
InstrumentMethod constructor2 = target.getConstructor("org.redisson.api.RedissonClient", "org.redisson.api.RMapCache", "org.redisson.spring.cache.CacheConfig");
if (constructor2 != null) {
constructor2.addInterceptor("com.navercorp.pinpoint.plugin.redisson.interceptor.RedissonCacheConstructorInterceptor");
}
// get method
final InstrumentMethod getMethod = target.getDeclaredMethod("get", "java.lang.Object");
if (getMethod != null) {
getMethod.addInterceptor("com.navercorp.pinpoint.plugin.redisson.interceptor.RedissonCacheMethodInterceptor");
}
return target.toBytecode();
}
});
}
@Override
public void setTransformTemplate(TransformTemplate transformTemplate) {
this.transformTemplate = transformTemplate;
}
} | apache-2.0 |