gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.geronimo.gshell.console;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.lang.NullArgumentException;
/**
* Provides the framework to interactivly get input from a console
* and "do something" with the line that was read.
*
* @version $Rev$ $Date$
*/
public class InteractiveConsole
implements Runnable
{
//
// TODO: Rename to *Runner, since this is not really a Console impl
//
private static final Log log = LogFactory.getLog(InteractiveConsole.class);
private final Console console;
private final Executor executor;
private final Prompter prompter;
private boolean running = false;
private boolean shutdownOnNull = true;
public InteractiveConsole(final Console console, final Executor executor, final Prompter prompter) {
if (console == null) {
throw new NullArgumentException("console");
}
if (executor == null) {
throw new NullArgumentException("executor");
}
if (prompter == null) {
throw new NullArgumentException("prompter");
}
this.console = console;
this.executor = executor;
this.prompter = prompter;
}
/**
* Enable or disable shutting down the interactive loop when
* a null value is read from the given console.
*
* @param flag True to shutdown when a null is received; else false
*/
public void setShutdownOnNull(final boolean flag) {
this.shutdownOnNull = flag;
}
/**
* @see #setShutdownOnNull
*/
public boolean isShutdownOnNull() {
return shutdownOnNull;
}
public boolean isRunning() {
return running;
}
//
// abort() ?
//
public void run() {
log.info("Running...");
running = true;
while (running) {
try {
doRun();
}
catch (Exception e) {
log.error("Exception", e);
}
catch (Error e) {
log.error("Error", e);
}
}
log.info("Stopped");
}
private void doRun() throws Exception {
boolean debug = log.isDebugEnabled();
String line;
while ((line = console.readLine(doGetPrompt())) != null) {
if (debug) {
log.debug("Read line: " + line);
// Log the line as hex
StringBuffer idx = new StringBuffer();
StringBuffer hex = new StringBuffer();
byte[] bytes = line.getBytes();
for (byte b : bytes) {
String h = Integer.toHexString(b);
hex.append("x").append(h).append(" ");
idx.append(" ").append((char)b).append(" ");
}
log.debug("HEX: " + hex);
log.debug(" " + idx);
}
Executor.Result result = doExecute(line);
// Allow executor to request that the loop stop
if (result == Executor.Result.STOP) {
log.debug("Executor requested STOP");
running = false;
break;
}
}
// Line was null, maybe shutdown
if (shutdownOnNull) {
log.debug("Input was null; which will cause shutdown");
running = false;
}
//
// TODO: Probably need to expose more configurability for handing/rejecting shutdown
//
// Use-case is that Shell might want to disallow and print a "use exit command",
// but Script interp wants this to exit and return control to Shell.
//
}
protected Executor.Result doExecute(final String line) throws Exception {
return executor.execute(line);
}
protected String doGetPrompt() {
return prompter.getPrompt();
}
//
// Executor
//
/**
* Allows custom processing, the "do something".
*/
public static interface Executor
{
enum Result {
CONTINUE,
STOP
}
Result execute(String line) throws Exception;
}
//
// Prompter
//
/**
* Allows custom prompt handling.
*/
public static interface Prompter
{
/**
* Return the prompt to be displayed.
*
* @return The prompt to be displayed; must not be null
*/
String getPrompt();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.mapreduce;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.EOFException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.regionserver.wal.HLog;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.RecordReader;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
/**
* Simple {@link InputFormat} for {@link HLog} files.
*/
@InterfaceAudience.Public
public class HLogInputFormat extends InputFormat<HLogKey, WALEdit> {
private static Log LOG = LogFactory.getLog(HLogInputFormat.class);
public static String START_TIME_KEY = "hlog.start.time";
public static String END_TIME_KEY = "hlog.end.time";
/**
* {@link InputSplit} for {@link HLog} files. Each split represent
* exactly one log file.
*/
static class HLogSplit extends InputSplit implements Writable {
private String logFileName;
private long fileSize;
private long startTime;
private long endTime;
/** for serialization */
public HLogSplit() {}
/**
* Represent an HLogSplit, i.e. a single HLog file.
* Start- and EndTime are managed by the split, so that HLog files can be
* filtered before WALEdits are passed to the mapper(s).
* @param logFileName
* @param fileSize
* @param startTime
* @param endTime
*/
public HLogSplit(String logFileName, long fileSize, long startTime, long endTime) {
this.logFileName = logFileName;
this.fileSize = fileSize;
this.startTime = startTime;
this.endTime = endTime;
}
@Override
public long getLength() throws IOException, InterruptedException {
return fileSize;
}
@Override
public String[] getLocations() throws IOException, InterruptedException {
// TODO: Find the data node with the most blocks for this HLog?
return new String[] {};
}
public String getLogFileName() {
return logFileName;
}
public long getStartTime() {
return startTime;
}
public long getEndTime() {
return endTime;
}
@Override
public void readFields(DataInput in) throws IOException {
logFileName = in.readUTF();
fileSize = in.readLong();
startTime = in.readLong();
endTime = in.readLong();
}
@Override
public void write(DataOutput out) throws IOException {
out.writeUTF(logFileName);
out.writeLong(fileSize);
out.writeLong(startTime);
out.writeLong(endTime);
}
@Override
public String toString() {
return logFileName + " (" + startTime + ":" + endTime + ") length:" + fileSize;
}
}
/**
* {@link RecordReader} for an {@link HLog} file.
*/
static class HLogRecordReader extends RecordReader<HLogKey, WALEdit> {
private HLog.Reader reader = null;
private HLog.Entry currentEntry = new HLog.Entry();
private long startTime;
private long endTime;
@Override
public void initialize(InputSplit split, TaskAttemptContext context)
throws IOException, InterruptedException {
HLogSplit hsplit = (HLogSplit)split;
Path logFile = new Path(hsplit.getLogFileName());
Configuration conf = context.getConfiguration();
LOG.info("Opening reader for "+split);
try {
this.reader = HLog.getReader(logFile.getFileSystem(conf), logFile, conf);
} catch (EOFException x) {
LOG.info("Ignoring corrupted HLog file: " + logFile
+ " (This is normal when a RegionServer crashed.)");
}
this.startTime = hsplit.getStartTime();
this.endTime = hsplit.getEndTime();
}
@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
if (reader == null) return false;
HLog.Entry temp;
long i = -1;
do {
// skip older entries
try {
temp = reader.next(currentEntry);
i++;
} catch (EOFException x) {
LOG.info("Corrupted entry detected. Ignoring the rest of the file."
+ " (This is normal when a RegionServer crashed.)");
return false;
}
}
while(temp != null && temp.getKey().getWriteTime() < startTime);
if (temp == null) {
if (i > 0) LOG.info("Skipped " + i + " entries.");
LOG.info("Reached end of file.");
return false;
} else if (i > 0) {
LOG.info("Skipped " + i + " entries, until ts: " + temp.getKey().getWriteTime() + ".");
}
boolean res = temp.getKey().getWriteTime() <= endTime;
if (!res) {
LOG.info("Reached ts: " + temp.getKey().getWriteTime() + " ignoring the rest of the file.");
}
return res;
}
@Override
public HLogKey getCurrentKey() throws IOException, InterruptedException {
return currentEntry.getKey();
}
@Override
public WALEdit getCurrentValue() throws IOException, InterruptedException {
return currentEntry.getEdit();
}
@Override
public float getProgress() throws IOException, InterruptedException {
// N/A depends on total number of entries, which is unknown
return 0;
}
@Override
public void close() throws IOException {
LOG.info("Closing reader");
if (reader != null) this.reader.close();
}
}
@Override
public List<InputSplit> getSplits(JobContext context) throws IOException,
InterruptedException {
Configuration conf = context.getConfiguration();
Path inputDir = new Path(conf.get("mapred.input.dir"));
long startTime = conf.getLong(START_TIME_KEY, Long.MIN_VALUE);
long endTime = conf.getLong(END_TIME_KEY, Long.MAX_VALUE);
FileSystem fs = inputDir.getFileSystem(conf);
List<FileStatus> files = getFiles(fs, inputDir, startTime, endTime);
List<InputSplit> splits = new ArrayList<InputSplit>(files.size());
for (FileStatus file : files) {
splits.add(new HLogSplit(file.getPath().toString(), file.getLen(), startTime, endTime));
}
return splits;
}
private List<FileStatus> getFiles(FileSystem fs, Path dir, long startTime, long endTime)
throws IOException {
List<FileStatus> result = new ArrayList<FileStatus>();
LOG.debug("Scanning " + dir.toString() + " for HLog files");
FileStatus[] files = fs.listStatus(dir);
if (files == null) return Collections.emptyList();
for (FileStatus file : files) {
if (file.isDir()) {
// recurse into sub directories
result.addAll(getFiles(fs, file.getPath(), startTime, endTime));
} else {
String name = file.getPath().toString();
int idx = name.lastIndexOf('.');
if (idx > 0) {
try {
long fileStartTime = Long.parseLong(name.substring(idx+1));
if (fileStartTime <= endTime) {
LOG.info("Found: " + name);
result.add(file);
}
} catch (NumberFormatException x) {
idx = 0;
}
}
if (idx == 0) {
LOG.warn("File " + name + " does not appear to be an HLog file. Skipping...");
}
}
}
return result;
}
@Override
public RecordReader<HLogKey, WALEdit> createRecordReader(InputSplit split,
TaskAttemptContext context) throws IOException, InterruptedException {
return new HLogRecordReader();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.synapse.startup.quartz;
import org.apache.axiom.om.OMElement;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.synapse.ManagedLifecycle;
import org.apache.synapse.SynapseException;
import org.apache.synapse.commons.util.PropertyHelper;
import org.apache.synapse.core.SynapseEnvironment;
import org.apache.synapse.startup.AbstractStartup;
import org.apache.synapse.task.*;
import javax.xml.namespace.QName;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.*;
public class StartUpController extends AbstractStartup {
private static final Log logger = LogFactory.getLog(StartUpController.class.getName());
private TaskDescription taskDescription;
private SynapseTaskManager synapseTaskManager;
private Object task = null;
public QName getTagQName() {
return SimpleQuartzFactory.TASK;
}
public void destroy() {
if (!destroyTask()) {
return;
}
if (synapseTaskManager.isInitialized()) {
TaskScheduler taskScheduler = synapseTaskManager.getTaskScheduler();
if (taskScheduler != null && taskScheduler.isInitialized()) {
taskScheduler.deleteTask(taskDescription.getName(), taskDescription.getTaskGroup());
}
TaskDescriptionRepository repository = synapseTaskManager.getTaskDescriptionRepository();
if (repository != null) {
repository.removeTaskDescription(taskDescription.getName());
}
}
}
public void init(SynapseEnvironment synapseEnvironment) {
if (taskDescription == null) {
handleException("Error while initializing the startup. TaskDescription is null.");
}
initSynapseTaskManager(synapseEnvironment);
TaskDescriptionRepository repository = synapseTaskManager.getTaskDescriptionRepository();
if (repository == null) {
handleException("Task Description Repository cannot be found");
return;
}
repository.addTaskDescription(taskDescription);
if (!processPinnedServers(taskDescription, synapseEnvironment)) {
return;
}
resolveTaskImpl(taskDescription, synapseEnvironment);
loadTaskProperties();
initializeTask(synapseEnvironment);
if(taskDescription.getResource(TaskDescription.INSTANCE) == null
|| taskDescription.getResource(TaskDescription.CLASSNAME) == null) {
taskDescription.addResource(TaskDescription.INSTANCE, task);
taskDescription.addResource(TaskDescription.CLASSNAME, task.getClass().getName());
}
try {
Map<String, Object> map = new HashMap<String, Object>();
map.put(TaskConstants.SYNAPSE_ENV, synapseEnvironment);
TaskScheduler taskScheduler = synapseTaskManager.getTaskScheduler();
TaskManager taskManager = synapseTaskManager.getTaskManagerImpl();
if (taskManager == null) {
logger.error("Could not initialize Start up controller. TaskManager not found.");
return;
}
taskManager.setProperties(map);
taskScheduler.init(synapseEnvironment.getSynapseConfiguration().getProperties(),
taskManager);
submitTask(taskScheduler, taskDescription);
logger.debug("Submitted task [" + taskDescription.getName() + "] to Synapse task scheduler.");
} catch (Exception e) {
String msg = "Error starting up Scheduler : " + e.getLocalizedMessage();
logger.fatal(msg, e);
throw new SynapseException(msg, e);
}
}
private void initializeTask(SynapseEnvironment synapseEnvironment) {
if (task instanceof ManagedLifecycle) {
((ManagedLifecycle) task).init(synapseEnvironment);
}
}
private boolean destroyTask() {
if (taskDescription == null) {
logger.debug("No task found to delete.");
return false;
}
if (task instanceof ManagedLifecycle) {
((ManagedLifecycle) task).destroy();
}
return true;
}
private void loadTaskProperties() {
Set properties = taskDescription.getXmlProperties();
for (Object property : properties) {
OMElement prop = (OMElement) property;
logger.debug("loaded task property : " + prop.toString());
PropertyHelper.setStaticProperty(prop, task);
}
}
private boolean initSynapseTaskManager(SynapseEnvironment synapseEnvironment) {
synapseTaskManager = synapseEnvironment.getTaskManager();
if (!synapseTaskManager.isInitialized()) {
logger.warn("SynapseTaskManager is not properly initialized. Initializing now with " +
"default parameters.");
synapseTaskManager.init(null, null, null);
}
return true;
}
private boolean submitTask(TaskScheduler taskScheduler, TaskDescription taskDescription) {
if (taskDescription == null) {
return false;
}
if (taskScheduler != null) {
return taskScheduler.scheduleTask(taskDescription);
} else {
if (logger.isDebugEnabled()) {
logger.debug("TaskScheduler cannot be found for :" +
TaskConstants.TASK_SCHEDULER + " , " +
"therefore ignore scheduling of Task " + taskDescription);
}
return false;
}
}
private boolean processPinnedServers(TaskDescription taskDescription, SynapseEnvironment synapseEnvironment) {
String thisServerName = synapseEnvironment.getServerContextInformation()
.getServerConfigurationInformation().getServerName();
if (thisServerName == null || thisServerName.equals("")) {
try {
InetAddress address = InetAddress.getLocalHost();
thisServerName = address.getHostName();
} catch (UnknownHostException e) {
logger.warn("Could not get the host name", e);
}
if (thisServerName == null || thisServerName.equals("")) {
thisServerName = "localhost";
}
}
logger.debug("Synapse server name : " + thisServerName);
List pinnedServers = taskDescription.getPinnedServers();
if (pinnedServers != null && !pinnedServers.isEmpty()) {
if (!pinnedServers.contains(thisServerName)) {
logger.info("Server name not in pinned servers list. Not starting Task : " +
getName());
return false; // do not continue the caller of this method.
}
}
return true;
}
private boolean resolveTaskImpl(TaskDescription taskDescription, SynapseEnvironment synapseEnvironment) {
if (synapseEnvironment == null) {
return false;
}
String taskImplClassName = taskDescription.getTaskImplClassName();
if (taskImplClassName == null || taskImplClassName.isEmpty()) {
taskImplClassName = "org.apache.synapse.startup.tasks.MessageInjector";
}
taskDescription.setTaskImplClassName(taskImplClassName);
try {
task = getClass().getClassLoader().loadClass(
taskDescription.getTaskImplClassName()).newInstance();
if (!(task instanceof Task)) {
logger.warn("Task implementation is not a Synapse Task.");
}
} catch (Exception e) {
handleException("Cannot instantiate task : " + taskDescription.getTaskImplClassName(), e);
}
return true;
}
private static void handleException(String message) {
logger.error(message);
throw new SynapseException(message);
}
private static void handleException(String message, Exception e) {
logger.error(message, e);
throw new SynapseException(message, e);
}
public TaskDescription getTaskDescription() {
return taskDescription;
}
public void setTaskDescription(TaskDescription taskDescription) {
this.taskDescription = taskDescription;
}
}
| |
package com.runetooncraft.warpigeon.engine.utils;
import java.awt.Canvas;
import java.awt.Dimension;
import javax.swing.JFrame;
import com.runetooncraft.warpigeon.engine.GameType;
import com.runetooncraft.warpigeon.engine.WPEngine4;
public class BasicGameWindow extends Canvas implements Window {
private static final long serialVersionUID = 1L;
int width = 300;
int height = width / 16 * 9;
public int scale = 3;
JFrame frame;
BasicFrame BFrame;
SDKFrame SDK;
Dimension size = null;
private GameType gametype;
public Dimension GameFrame;
/**
* for PigionSDK
*/
public BasicGameWindow(int Width, int Height, int Scale, GameType gametype) {
this.gametype = gametype;
if(gametype.equals(GameType.PIGION_SDK)) {
this.width = Width;
this.height = Height;
this.scale = Scale;
size = new Dimension(width * (scale / 1000), height * (scale / 1000));
//BFrame = new BasicFrame();
SDK = new SDKFrame();
//frame = BFrame;
frame = SDK;
GameFrame = new Dimension(size.width,size.height);
setPreferredSize(GameFrame);
SDK.GamePanel.add(this);
SDK.GamePanel.setSize(GameFrame);
//BFrame.gridBagLayout.columnWidths = new int[]{size.width, 0};
//BFrame.gridBagLayout.rowHeights = new int[]{size.height, 116, 0};
SDK.setGameSize(GameFrame.width,GameFrame.height, scale);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.setLocationRelativeTo(null);
frame.setVisible(true);
} else if(gametype.equals(GameType.FREE_ROAM_TILE_BASED)) {
this.width = Width;
this.height = Height;
this.scale = Scale;
size = new Dimension(width * (scale / 1000), height * (scale / 1000));
setPreferredSize(size);
frame = new JFrame();
frame.setSize(size);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.setLocationRelativeTo(null);
frame.setVisible(true);
} else if(gametype.equals(GameType.THREE_DIMENSIONAL_GAME)) {
this.width = Width;
this.height = Height;
this.scale = Scale;
size = new Dimension(width * (scale / 1000), height * (scale / 1000));
setPreferredSize(size);
setMinimumSize(size);
setMaximumSize(size);
frame = new JFrame();
frame.pack();
frame.setResizable(true);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.setLocationRelativeTo(null);
frame.setSize(size);
frame.setVisible(true);
}
}
@SuppressWarnings("static-access")
public void setDimension(Dimension dimension, int scale, WPEngine4 engine) {
engine.state = engine.state.SCREEN_RESETTING;
this.scale = scale;
GameFrame = dimension;
this.width = GameFrame.width;
this.height = GameFrame.height;
setPreferredSize(GameFrame);
SDK.GamePanel.setSize(GameFrame);
SDK.setGameSize(GameFrame.width,GameFrame.height, scale);
createBufferStrategy(3);
engine.state = engine.state.PLAY;
}
@Override
public void SetWidth(int width) {
this.width = width;
size.setSize(this.width * scale, this.height * scale);
GameFrame = new Dimension(size.width,size.height);
setPreferredSize(GameFrame);
SDK.GamePanel.setSize(GameFrame);
SDK.setGameSize(GameFrame.width,GameFrame.height, scale);
}
@Override
public void SetHeight(int height) {
this.height = height;
size.setSize(this.width * scale, this.height * scale);
GameFrame = new Dimension(size.width,size.height);
setPreferredSize(GameFrame);
SDK.GamePanel.setSize(GameFrame);
SDK.setGameSize(GameFrame.width,GameFrame.height, scale);
}
@Override
public int getWidth() {
if(gametype.equals(GameType.PIGION_SDK)) {
return GameFrame.width;
}
return frame.getWidth(); //because scaling is not included
}
@Override
public int getHeight() {
if(gametype.equals(GameType.PIGION_SDK)) {
return GameFrame.height;
}
return frame.getHeight();
}
/**
* @return JFrame
*/
public JFrame GetFrame() {
return frame;
}
/**
* Set if you want the JFrame to be resizable or not
* @param bool
*/
public void SetWindowResizable(boolean bool) {
frame.setResizable(bool);
}
/**
* Set the game window title
* @param title
*/
public void SetWindowTitle(String title) {
frame.setTitle(title);
}
/**
* for JFrame purposes, please use this to set your class as the JFrame's class instantiation
* @param extendedclass
*/
public void SetClassInstance(Canvas extendedclass, Boolean PigionSDK) {
if(PigionSDK) {
//BFrame.GamePanel.add(extendedclass);
//SDK.GamePanel.add(extendedclass);
} else {
frame.add(extendedclass);
}
}
/**
* Sets the Frame size the same as the component
*/
/**
* By default true
* Sets the window visibility
* @param visibile
*/
public void SetVisible(boolean visibile) {
frame.setVisible(visibile);
}
@Override
public int getUnscaledWidth() {
return width;
}
@Override
public int getUnscaledHeight() {
return height;
}
/**
* Gets window title
* @return
*/
public String GetWindowTitle() {
return frame.getTitle();
}
/**
* Used by PigionSDK
*/
public void SetViewedFrame(JFrame frame) {
this.frame = frame;
}
/**
* Returns basicframe instance
*/
public BasicFrame getBasicFrame() {
return BFrame;
}
/**
* Returns SKKFrame instanse
*/
public SDKFrame getSDKFrame() {
return SDK;
}
}
| |
/* Copyright (C) 2014 TU Dortmund
* This file is part of LearnLib, http://www.learnlib.de/.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.learnlib.algorithms.discriminationtree.hypothesis;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import net.automatalib.automata.UniversalDeterministicAutomaton;
import net.automatalib.automata.concepts.StateIDs;
import net.automatalib.graphs.Graph;
import net.automatalib.graphs.concepts.NodeIDs;
import net.automatalib.graphs.dot.DefaultDOTHelper;
import net.automatalib.graphs.dot.GraphDOTHelper;
import net.automatalib.words.Alphabet;
import net.automatalib.words.Word;
import de.learnlib.api.AccessSequenceTransformer;
/**
* Basic hypothesis data structure for Discrimination Tree learning algorithms.
*
* @author Malte Isberner
*
* @param <I> input symbol type
* @param <O> SUL output type
* @param <SP> state property type
* @param <TP> transition property type
*/
public class DTLearnerHypothesis<I, O, SP, TP> implements
UniversalDeterministicAutomaton<HState<I,O,SP,TP>, I, HTransition<I,O,SP,TP>, SP, TP>,
AccessSequenceTransformer<I>,
StateIDs<HState<I,O,SP,TP>> {
private final Alphabet<I> alphabet;
private final HState<I, O, SP, TP> root;
private final List<HState<I, O, SP, TP>> nodes = new ArrayList<>();
public DTLearnerHypothesis(Alphabet<I> alphabet) {
this.alphabet = alphabet;
this.root = new HState<>(alphabet.size());
this.nodes.add(root);
}
public HState<I, O, SP, TP> createState(
HTransition<I, O, SP, TP> treeIncoming) {
HState<I, O, SP, TP> state = new HState<>(alphabet.size(),
nodes.size(), treeIncoming);
nodes.add(state);
treeIncoming.makeTree(state);
return state;
}
@Override
public HTransition<I, O, SP, TP> getTransition(
HState<I, O, SP, TP> state, I symbol) {
int symIdx = alphabet.getSymbolIndex(symbol);
return state.getTransition(symIdx);
}
@Override
public Collection<HState<I, O, SP, TP>> getStates() {
return Collections.unmodifiableCollection(nodes);
}
@Override
public StateIDs<HState<I, O, SP, TP>> stateIDs() {
return this;
}
@Override
public HState<I, O, SP, TP> getInitialState() {
return root;
}
@Override
public SP getStateProperty(HState<I, O, SP, TP> state) {
return state.getProperty();
}
@Override
public HState<I, O, SP, TP> getState(int id) {
return nodes.get(id);
}
@Override
public int getStateId(HState<I, O, SP, TP> state) {
return state.getId();
}
@Override
public boolean isAccessSequence(Word<I> word) {
HState<I, O, SP, TP> curr = root;
for (I sym : word) {
int symIdx = alphabet.getSymbolIndex(sym);
HTransition<I, O, SP, TP> trans = curr.getTransition(symIdx);
if (!trans.isTree())
return false;
curr = trans.getTreeTarget();
}
return true;
}
@Override
public Word<I> transformAccessSequence(Word<I> word) {
HState<I, O, SP, TP> state = getState(word);
return state.getAccessSequence();
}
@Override
public HState<I, O, SP, TP> getSuccessor(HTransition<I, O, SP, TP> trans) {
return trans.currentTarget();
}
@Override
public TP getTransitionProperty(HTransition<I, O, SP, TP> trans) {
return trans.getProperty();
}
public class GraphView implements Graph<HState<I,O,SP,TP>, HTransition<I, O, SP, TP>>,
NodeIDs<HState<I,O,SP,TP>> {
@Override
public Collection<HState<I, O, SP, TP>> getNodes() {
return Collections.unmodifiableCollection(nodes);
}
@Override
public Collection<HTransition<I, O, SP, TP>> getOutgoingEdges(
HState<I, O, SP, TP> node) {
return node.getOutgoingTransitions();
}
@Override
public HState<I, O, SP, TP> getTarget(HTransition<I, O, SP, TP> edge) {
return edge.currentTarget();
}
@Override
public NodeIDs<HState<I, O, SP, TP>> nodeIDs() {
return this;
}
@Override
public HState<I, O, SP, TP> getNode(int id) {
return nodes.get(id);
}
@Override
public int getNodeId(HState<I, O, SP, TP> node) {
return node.getId();
}
@Override
public GraphDOTHelper<HState<I, O, SP, TP>, HTransition<I, O, SP, TP>> getGraphDOTHelper() {
return new DefaultDOTHelper<HState<I, O, SP, TP>, HTransition<I, O, SP, TP>>() {
/*
* (non-Javadoc)
*
* @see
* net.automatalib.graphs.dot.DefaultDOTHelper#initialNodes()
*/
@Override
protected Collection<? extends HState<I, O, SP, TP>> initialNodes() {
return Collections.singleton(root);
}
/*
* (non-Javadoc)
*
* @see
* net.automatalib.graphs.dot.DefaultDOTHelper#getNodeProperties
* (java.lang.Object, java.util.Map)
*/
@Override
public boolean getNodeProperties(HState<I, O, SP, TP> node,
Map<String, String> properties) {
if (!super.getNodeProperties(node, properties))
return false;
properties.put(NodeAttrs.LABEL, node.toString());
return true;
}
/*
* (non-Javadoc)
*
* @see
* net.automatalib.graphs.dot.DefaultDOTHelper#getEdgeProperties
* (java.lang.Object, java.lang.Object, java.lang.Object,
* java.util.Map)
*/
@Override
public boolean getEdgeProperties(HState<I, O, SP, TP> src,
HTransition<I, O, SP, TP> edge,
HState<I, O, SP, TP> tgt, Map<String, String> properties) {
if (!super.getEdgeProperties(src, edge, tgt, properties))
return false;
properties.put(EdgeAttrs.LABEL, String.valueOf(edge.getSymbol()));
if (edge.isTree()) {
properties.put(EdgeAttrs.STYLE, "bold");
}
return true;
}
};
}
}
public GraphView graphView() {
return new GraphView();
}
}
| |
/**
* SAHARA Scheduling Server
*
* Schedules and assigns local laboratory rigs.
*
* @license See LICENSE in the top level directory for complete license terms.
*
* Copyright (c) 2009, University of Technology, Sydney
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the University of Technology, Sydney nor the names
* of its contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* @author Michael Diponio (mdiponio)
* @date 3rd March 2009
*/
package au.edu.uts.eng.remotelabs.schedserver.permissions.intf.types;
import java.io.Serializable;
import java.util.ArrayList;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamReader;
import javax.xml.stream.XMLStreamWriter;
import org.apache.axiom.om.OMConstants;
import org.apache.axiom.om.OMDataSource;
import org.apache.axiom.om.OMElement;
import org.apache.axiom.om.OMFactory;
import org.apache.axiom.om.impl.llom.OMSourcedElementImpl;
import org.apache.axis2.databinding.ADBBean;
import org.apache.axis2.databinding.ADBDataSource;
import org.apache.axis2.databinding.ADBException;
import org.apache.axis2.databinding.utils.BeanUtil;
import org.apache.axis2.databinding.utils.ConverterUtil;
import org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl;
import org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter;
/**
* UserClassIDType bean class.
*/
public class UserClassIDType extends OperationRequestType implements ADBBean
{
/*
* This type was generated from the piece of schema that had
* name = UserClassIDType
* Namespace URI = http://remotelabs.eng.uts.edu.au/schedserver/permissions
* Namespace Prefix = ns1
*/
private static final long serialVersionUID = -8732214636385329013L;
protected int userClassID;
protected boolean userClassIDTracker = false;
protected String userClassName;
protected boolean userClassNameTracker = false;
public int getUserClassID()
{
return this.userClassID;
}
public void setUserClassID(final int param)
{
if (param == Integer.MIN_VALUE)
{
this.userClassIDTracker = false;
}
else
{
this.userClassIDTracker = true;
}
this.userClassID = param;
}
public String getUserClassName()
{
return this.userClassName;
}
public void setUserClassName(final String param)
{
if (param != null)
{
this.userClassNameTracker = true;
}
else
{
this.userClassNameTracker = false;
}
this.userClassName = param;
}
private static String generatePrefix(final String namespace)
{
if (namespace.equals("http://remotelabs.eng.uts.edu.au/schedserver/permissions"))
{
return "ns1";
}
return BeanUtil.getUniquePrefix();
}
public static boolean isReaderMTOMAware(final XMLStreamReader reader)
{
boolean isReaderMTOMAware = false;
try
{
isReaderMTOMAware = Boolean.TRUE.equals(reader.getProperty(OMConstants.IS_DATA_HANDLERS_AWARE));
}
catch (final IllegalArgumentException e)
{
isReaderMTOMAware = false;
}
return isReaderMTOMAware;
}
@Override
public OMElement getOMElement(final QName parentQName, final OMFactory factory) throws ADBException
{
final OMDataSource dataSource = new ADBDataSource(this, parentQName)
{
@Override
public void serialize(final MTOMAwareXMLStreamWriter xmlWriter) throws XMLStreamException
{
UserClassIDType.this.serialize(this.parentQName, factory, xmlWriter);
}
};
return new OMSourcedElementImpl(parentQName, factory, dataSource);
}
@Override
public void serialize(final QName parentQName, final OMFactory factory, final MTOMAwareXMLStreamWriter xmlWriter)
throws XMLStreamException, ADBException
{
this.serialize(parentQName, factory, xmlWriter, false);
}
@Override
public void serialize(final QName parentQName, final OMFactory factory, final MTOMAwareXMLStreamWriter xmlWriter,
final boolean serializeType) throws XMLStreamException, ADBException
{
String prefix = parentQName.getPrefix();
String namespace = parentQName.getNamespaceURI();
if ((namespace != null) && (namespace.trim().length() > 0))
{
final String writerPrefix = xmlWriter.getPrefix(namespace);
if (writerPrefix != null)
{
xmlWriter.writeStartElement(namespace, parentQName.getLocalPart());
}
else
{
if (prefix == null)
{
prefix = UserClassIDType.generatePrefix(namespace);
}
xmlWriter.writeStartElement(prefix, parentQName.getLocalPart(), namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
}
else
{
xmlWriter.writeStartElement(parentQName.getLocalPart());
}
final String namespacePrefix = this.registerPrefix(xmlWriter,
"http://remotelabs.eng.uts.edu.au/schedserver/permissions");
if ((namespacePrefix != null) && (namespacePrefix.trim().length() > 0))
{
this.writeAttribute("xsi", "http://www.w3.org/2001/XMLSchema-instance", "type", namespacePrefix
+ ":UserClassIDType", xmlWriter);
}
else
{
this.writeAttribute("xsi", "http://www.w3.org/2001/XMLSchema-instance", "type", "UserClassIDType",
xmlWriter);
}
if (this.requestorIDTracker)
{
namespace = "";
if (!namespace.equals(""))
{
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null)
{
prefix = UserClassIDType.generatePrefix(namespace);
xmlWriter.writeStartElement(prefix, "requestorID", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
else
{
xmlWriter.writeStartElement(namespace, "requestorID");
}
}
else
{
xmlWriter.writeStartElement("requestorID");
}
if (this.requestorID == Integer.MIN_VALUE)
{
throw new ADBException("requestorID cannot be null!!");
}
else
{
xmlWriter.writeCharacters(ConverterUtil.convertToString(this.requestorID));
}
xmlWriter.writeEndElement();
}
if (this.nameNameSpaceSequenceTracker)
{
if (this.nameNameSpaceSequence == null)
{
throw new ADBException("OperationRequestTypeSequence_type0 cannot be null!!");
}
this.nameNameSpaceSequence.serialize(null, factory, xmlWriter);
}
if (this.requestorQNameTracker)
{
namespace = "";
if (!namespace.equals(""))
{
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null)
{
prefix = UserClassIDType.generatePrefix(namespace);
xmlWriter.writeStartElement(prefix, "requestorQName", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
else
{
xmlWriter.writeStartElement(namespace, "requestorQName");
}
}
else
{
xmlWriter.writeStartElement("requestorQName");
}
if (this.requestorQName == null)
{
throw new ADBException("requestorQName cannot be null!!");
}
else
{
xmlWriter.writeCharacters(this.requestorQName);
}
xmlWriter.writeEndElement();
}
if (this.userClassIDTracker)
{
namespace = "";
if (!namespace.equals(""))
{
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null)
{
prefix = UserClassIDType.generatePrefix(namespace);
xmlWriter.writeStartElement(prefix, "userClassID", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
else
{
xmlWriter.writeStartElement(namespace, "userClassID");
}
}
else
{
xmlWriter.writeStartElement("userClassID");
}
if (this.userClassID == Integer.MIN_VALUE)
{
throw new ADBException("userClassID cannot be null!!");
}
else
{
xmlWriter.writeCharacters(ConverterUtil.convertToString(this.userClassID));
}
xmlWriter.writeEndElement();
}
if (this.userClassNameTracker)
{
namespace = "";
if (!namespace.equals(""))
{
prefix = xmlWriter.getPrefix(namespace);
if (prefix == null)
{
prefix = UserClassIDType.generatePrefix(namespace);
xmlWriter.writeStartElement(prefix, "userClassName", namespace);
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
else
{
xmlWriter.writeStartElement(namespace, "userClassName");
}
}
else
{
xmlWriter.writeStartElement("userClassName");
}
if (this.userClassName == null)
{
throw new ADBException("userClassName cannot be null!!");
}
else
{
xmlWriter.writeCharacters(this.userClassName);
}
xmlWriter.writeEndElement();
}
xmlWriter.writeEndElement();
}
private void writeAttribute(final String prefix, final String namespace, final String attName,
final String attValue, final XMLStreamWriter xmlWriter) throws XMLStreamException
{
if (xmlWriter.getPrefix(namespace) == null)
{
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
xmlWriter.writeAttribute(namespace, attName, attValue);
}
private String registerPrefix(final XMLStreamWriter xmlWriter, final String namespace) throws XMLStreamException
{
String prefix = xmlWriter.getPrefix(namespace);
if (prefix == null)
{
prefix = UserClassIDType.generatePrefix(namespace);
while (xmlWriter.getNamespaceContext().getNamespaceURI(prefix) != null)
{
prefix = BeanUtil.getUniquePrefix();
}
xmlWriter.writeNamespace(prefix, namespace);
xmlWriter.setPrefix(prefix, namespace);
}
return prefix;
}
@Override
public XMLStreamReader getPullParser(final QName qName) throws ADBException
{
final ArrayList<Serializable> elementList = new ArrayList<Serializable>();
final ArrayList<QName> attribList = new ArrayList<QName>();
attribList.add(new QName("http://www.w3.org/2001/XMLSchema-instance", "type"));
attribList.add(new QName("http://remotelabs.eng.uts.edu.au/schedserver/permissions", "UserClassIDType"));
if (this.requestorIDTracker)
{
elementList.add(new QName("", "requestorID"));
elementList.add(ConverterUtil.convertToString(this.requestorID));
}
if (this.nameNameSpaceSequenceTracker)
{
elementList.add(new QName("http://remotelabs.eng.uts.edu.au/schedserver/permissions",
"OperationRequestTypeSequence_type0"));
if (this.nameNameSpaceSequence == null)
{
throw new ADBException("OperationRequestTypeSequence_type0 cannot be null!!");
}
elementList.add(this.nameNameSpaceSequence);
}
if (this.requestorQNameTracker)
{
elementList.add(new QName("", "requestorQName"));
if (this.requestorQName != null)
{
elementList.add(ConverterUtil.convertToString(this.requestorQName));
}
else
{
throw new ADBException("requestorQName cannot be null!!");
}
}
if (this.userClassIDTracker)
{
elementList.add(new QName("", "userClassID"));
elementList.add(ConverterUtil.convertToString(this.userClassID));
}
if (this.userClassNameTracker)
{
elementList.add(new QName("", "userClassName"));
if (this.userClassName != null)
{
elementList.add(ConverterUtil.convertToString(this.userClassName));
}
else
{
throw new ADBException("userClassName cannot be null!!");
}
}
return new ADBXMLStreamReaderImpl(qName, elementList.toArray(), attribList.toArray());
}
public static class Factory
{
public static UserClassIDType parse(final XMLStreamReader reader) throws Exception
{
final UserClassIDType object = new UserClassIDType();
try
{
while (!reader.isStartElement() && !reader.isEndElement())
{
reader.next();
}
if (reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance", "type") != null)
{
final String fullTypeName = reader.getAttributeValue("http://www.w3.org/2001/XMLSchema-instance",
"type");
if (fullTypeName != null)
{
String nsPrefix = null;
if (fullTypeName.indexOf(":") > -1)
{
nsPrefix = fullTypeName.substring(0, fullTypeName.indexOf(":"));
}
nsPrefix = nsPrefix == null ? "" : nsPrefix;
final String type = fullTypeName.substring(fullTypeName.indexOf(":") + 1);
if (!"UserClassIDType".equals(type))
{
final String nsUri = reader.getNamespaceContext().getNamespaceURI(nsPrefix);
return (UserClassIDType) ExtensionMapper.getTypeObject(nsUri, type, reader);
}
}
}
reader.next();
while (!reader.isStartElement() && !reader.isEndElement())
{
reader.next();
}
if (reader.isStartElement() && new QName("", "requestorID").equals(reader.getName()))
{
final String content = reader.getElementText();
object.setRequestorID(ConverterUtil.convertToInt(content));
reader.next();
}
else
{
object.setRequestorID(Integer.MIN_VALUE);
}
while (!reader.isStartElement() && !reader.isEndElement())
{
reader.next();
}
try
{
if (reader.isStartElement())
{
object.setOperationRequestTypeSequence(OperationRequestTypeSequence.Factory
.parse(reader));
} }
catch (final Exception e)
{
/* Optional. */
}
while (!reader.isStartElement() && !reader.isEndElement())
{
reader.next();
}
if (reader.isStartElement() && new QName("", "requestorQName").equals(reader.getName()))
{
final String content = reader.getElementText();
object.setRequestorQName(ConverterUtil.convertToString(content));
reader.next();
}
while (!reader.isStartElement() && !reader.isEndElement())
{
reader.next();
}
if (reader.isStartElement() && new QName("", "userClassID").equals(reader.getName()))
{
final String content = reader.getElementText();
object.setUserClassID(ConverterUtil.convertToInt(content));
reader.next();
}
else
{
object.setUserClassID(Integer.MIN_VALUE);
}
while (!reader.isStartElement() && !reader.isEndElement())
{
reader.next();
}
if (reader.isStartElement() && new QName("", "userClassName").equals(reader.getName()))
{
final String content = reader.getElementText();
object.setUserClassName(ConverterUtil.convertToString(content));
reader.next();
}
while (!reader.isStartElement() && !reader.isEndElement())
{
reader.next();
}
if (reader.isStartElement())
{
throw new ADBException("Unexpected subelement " + reader.getLocalName());
}
}
catch (final XMLStreamException e)
{
throw new Exception(e);
}
return object;
}
}
}
| |
/**
* Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.integration.marketdata.manipulator.dsl;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import org.joda.beans.Bean;
import org.joda.beans.BeanDefinition;
import org.joda.beans.ImmutableBean;
import org.joda.beans.ImmutableConstructor;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaProperty;
import org.joda.beans.Property;
import org.joda.beans.PropertyDefinition;
import org.joda.beans.impl.direct.DirectFieldsBeanBuilder;
import org.joda.beans.impl.direct.DirectMetaBean;
import org.joda.beans.impl.direct.DirectMetaProperty;
import org.joda.beans.impl.direct.DirectMetaPropertyMap;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.threeten.bp.Period;
import org.threeten.bp.ZonedDateTime;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Maps;
import com.opengamma.engine.function.FunctionExecutionContext;
import com.opengamma.engine.marketdata.manipulator.function.StructureManipulator;
import com.opengamma.engine.value.ValueSpecification;
import com.opengamma.financial.analytics.ircurve.FixedIncomeStripWithSecurity;
import com.opengamma.financial.analytics.ircurve.StripInstrumentType;
import com.opengamma.financial.analytics.ircurve.YieldCurveData;
import com.opengamma.id.ExternalId;
import com.opengamma.id.ExternalIdBundle;
import com.opengamma.util.ArgumentChecker;
import java.util.Arrays;
/**
* A {@link StructureManipulator} which performs a list of bucketed shifts on {@link YieldCurveData}.
*/
@BeanDefinition
public final class YieldCurveDataBucketedShiftManipulator implements ImmutableBean, StructureManipulator<YieldCurveData> {
private static final Logger s_logger = LoggerFactory.getLogger(YieldCurveDataBucketedShiftManipulator.class);
/** Shift type */
@PropertyDefinition(validate = "notNull")
private final ScenarioShiftType _shiftType;
/** Shifts to apply */
@PropertyDefinition(validate = "notNull")
private final ImmutableList<YieldCurveBucketedShift> _shifts;
@ImmutableConstructor
public YieldCurveDataBucketedShiftManipulator(ScenarioShiftType shiftType, List<YieldCurveBucketedShift> shifts) {
_shiftType = ArgumentChecker.notNull(shiftType, "shiftType");
_shifts = ImmutableList.copyOf(ArgumentChecker.notEmpty(shifts, "shifts"));
}
@Override
public YieldCurveData execute(YieldCurveData curveData,
ValueSpecification valueSpecification,
FunctionExecutionContext executionContext) {
ZonedDateTime valuationTime = ZonedDateTime.now(executionContext.getValuationClock());
Map<ExternalIdBundle, Double> data = Maps.newHashMap(curveData.getDataPoints());
Map<ExternalId, ExternalIdBundle> index = curveData.getIndex();
for (YieldCurveBucketedShift shift : _shifts) {
for (FixedIncomeStripWithSecurity strip : curveData.getCurveSpecification().getStrips()) {
Period stripPeriod = strip.getTenor().getPeriod();
Period shiftStart = shift.getStart();
Period shiftEnd = shift.getEnd();
ZonedDateTime stripTime = valuationTime.plus(stripPeriod);
ZonedDateTime shiftStartTime = valuationTime.plus(shiftStart);
ZonedDateTime shiftEndTime = valuationTime.plus(shiftEnd);
if (stripTime.compareTo(shiftStartTime) >= 0 && stripTime.compareTo(shiftEndTime) <= 0) {
ExternalIdBundle bundle = index.get(strip.getSecurityIdentifier());
boolean future = (strip.getInstrumentType() == StripInstrumentType.FUTURE);
Double originalData = data.get(bundle);
Double stripData;
// futures are quoted the other way round from other instruments
if (future) {
stripData = 1 - originalData;
} else {
stripData = originalData;
}
Double shiftedData;
if (_shiftType == ScenarioShiftType.RELATIVE) {
// add shift amount to 1. i.e. 10.pc actualy means 'value * 1.1' and -10.pc means 'value * 0.9'
shiftedData = stripData * (shift.getShift() + 1);
} else {
shiftedData = stripData + shift.getShift();
}
Double shiftedStripData;
if (future) {
shiftedStripData = 1 - shiftedData;
} else {
shiftedStripData = shiftedData;
}
data.put(bundle, shiftedStripData);
s_logger.debug("Shifting data {}, tenor {} by {} from {} to {}",
strip.getSecurityIdentifier(), strip.getTenor(), shift.getShift(), originalData, shiftedStripData);
}
}
}
return new YieldCurveData(curveData.getCurveSpecification(), data);
}
@Override
public Class<YieldCurveData> getExpectedType() {
return YieldCurveData.class;
}
//------------------------- AUTOGENERATED START -------------------------
///CLOVER:OFF
/**
* The meta-bean for {@code YieldCurveDataBucketedShiftManipulator}.
* @return the meta-bean, not null
*/
public static YieldCurveDataBucketedShiftManipulator.Meta meta() {
return YieldCurveDataBucketedShiftManipulator.Meta.INSTANCE;
}
static {
JodaBeanUtils.registerMetaBean(YieldCurveDataBucketedShiftManipulator.Meta.INSTANCE);
}
/**
* Returns a builder used to create an instance of the bean.
* @return the builder, not null
*/
public static YieldCurveDataBucketedShiftManipulator.Builder builder() {
return new YieldCurveDataBucketedShiftManipulator.Builder();
}
@Override
public YieldCurveDataBucketedShiftManipulator.Meta metaBean() {
return YieldCurveDataBucketedShiftManipulator.Meta.INSTANCE;
}
@Override
public <R> Property<R> property(String propertyName) {
return metaBean().<R>metaProperty(propertyName).createProperty(this);
}
@Override
public Set<String> propertyNames() {
return metaBean().metaPropertyMap().keySet();
}
//-----------------------------------------------------------------------
/**
* Gets shift type
* @return the value of the property, not null
*/
public ScenarioShiftType getShiftType() {
return _shiftType;
}
//-----------------------------------------------------------------------
/**
* Gets shifts to apply
* @return the value of the property, not null
*/
public ImmutableList<YieldCurveBucketedShift> getShifts() {
return _shifts;
}
//-----------------------------------------------------------------------
/**
* Returns a builder that allows this bean to be mutated.
* @return the mutable builder, not null
*/
public Builder toBuilder() {
return new Builder(this);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
YieldCurveDataBucketedShiftManipulator other = (YieldCurveDataBucketedShiftManipulator) obj;
return JodaBeanUtils.equal(getShiftType(), other.getShiftType()) &&
JodaBeanUtils.equal(getShifts(), other.getShifts());
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
hash = hash * 31 + JodaBeanUtils.hashCode(getShiftType());
hash = hash * 31 + JodaBeanUtils.hashCode(getShifts());
return hash;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(96);
buf.append("YieldCurveDataBucketedShiftManipulator{");
buf.append("shiftType").append('=').append(getShiftType()).append(',').append(' ');
buf.append("shifts").append('=').append(JodaBeanUtils.toString(getShifts()));
buf.append('}');
return buf.toString();
}
//-----------------------------------------------------------------------
/**
* The meta-bean for {@code YieldCurveDataBucketedShiftManipulator}.
*/
public static final class Meta extends DirectMetaBean {
/**
* The singleton instance of the meta-bean.
*/
static final Meta INSTANCE = new Meta();
/**
* The meta-property for the {@code shiftType} property.
*/
private final MetaProperty<ScenarioShiftType> _shiftType = DirectMetaProperty.ofImmutable(
this, "shiftType", YieldCurveDataBucketedShiftManipulator.class, ScenarioShiftType.class);
/**
* The meta-property for the {@code shifts} property.
*/
@SuppressWarnings({"unchecked", "rawtypes" })
private final MetaProperty<ImmutableList<YieldCurveBucketedShift>> _shifts = DirectMetaProperty.ofImmutable(
this, "shifts", YieldCurveDataBucketedShiftManipulator.class, (Class) ImmutableList.class);
/**
* The meta-properties.
*/
private final Map<String, MetaProperty<?>> _metaPropertyMap$ = new DirectMetaPropertyMap(
this, null,
"shiftType",
"shifts");
/**
* Restricted constructor.
*/
private Meta() {
}
@Override
protected MetaProperty<?> metaPropertyGet(String propertyName) {
switch (propertyName.hashCode()) {
case 893345500: // shiftType
return _shiftType;
case -903338959: // shifts
return _shifts;
}
return super.metaPropertyGet(propertyName);
}
@Override
public YieldCurveDataBucketedShiftManipulator.Builder builder() {
return new YieldCurveDataBucketedShiftManipulator.Builder();
}
@Override
public Class<? extends YieldCurveDataBucketedShiftManipulator> beanType() {
return YieldCurveDataBucketedShiftManipulator.class;
}
@Override
public Map<String, MetaProperty<?>> metaPropertyMap() {
return _metaPropertyMap$;
}
//-----------------------------------------------------------------------
/**
* The meta-property for the {@code shiftType} property.
* @return the meta-property, not null
*/
public MetaProperty<ScenarioShiftType> shiftType() {
return _shiftType;
}
/**
* The meta-property for the {@code shifts} property.
* @return the meta-property, not null
*/
public MetaProperty<ImmutableList<YieldCurveBucketedShift>> shifts() {
return _shifts;
}
//-----------------------------------------------------------------------
@Override
protected Object propertyGet(Bean bean, String propertyName, boolean quiet) {
switch (propertyName.hashCode()) {
case 893345500: // shiftType
return ((YieldCurveDataBucketedShiftManipulator) bean).getShiftType();
case -903338959: // shifts
return ((YieldCurveDataBucketedShiftManipulator) bean).getShifts();
}
return super.propertyGet(bean, propertyName, quiet);
}
@Override
protected void propertySet(Bean bean, String propertyName, Object newValue, boolean quiet) {
metaProperty(propertyName);
if (quiet) {
return;
}
throw new UnsupportedOperationException("Property cannot be written: " + propertyName);
}
}
//-----------------------------------------------------------------------
/**
* The bean-builder for {@code YieldCurveDataBucketedShiftManipulator}.
*/
public static final class Builder extends DirectFieldsBeanBuilder<YieldCurveDataBucketedShiftManipulator> {
private ScenarioShiftType _shiftType;
private List<YieldCurveBucketedShift> _shifts = new ArrayList<YieldCurveBucketedShift>();
/**
* Restricted constructor.
*/
private Builder() {
}
/**
* Restricted copy constructor.
* @param beanToCopy the bean to copy from, not null
*/
private Builder(YieldCurveDataBucketedShiftManipulator beanToCopy) {
this._shiftType = beanToCopy.getShiftType();
this._shifts = new ArrayList<YieldCurveBucketedShift>(beanToCopy.getShifts());
}
//-----------------------------------------------------------------------
@Override
public Object get(String propertyName) {
switch (propertyName.hashCode()) {
case 893345500: // shiftType
return _shiftType;
case -903338959: // shifts
return _shifts;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
}
@SuppressWarnings("unchecked")
@Override
public Builder set(String propertyName, Object newValue) {
switch (propertyName.hashCode()) {
case 893345500: // shiftType
this._shiftType = (ScenarioShiftType) newValue;
break;
case -903338959: // shifts
this._shifts = (List<YieldCurveBucketedShift>) newValue;
break;
default:
throw new NoSuchElementException("Unknown property: " + propertyName);
}
return this;
}
@Override
public Builder set(MetaProperty<?> property, Object value) {
super.set(property, value);
return this;
}
@Override
public Builder setString(String propertyName, String value) {
setString(meta().metaProperty(propertyName), value);
return this;
}
@Override
public Builder setString(MetaProperty<?> property, String value) {
super.setString(property, value);
return this;
}
@Override
public Builder setAll(Map<String, ? extends Object> propertyValueMap) {
super.setAll(propertyValueMap);
return this;
}
@Override
public YieldCurveDataBucketedShiftManipulator build() {
return new YieldCurveDataBucketedShiftManipulator(
_shiftType,
_shifts);
}
//-----------------------------------------------------------------------
/**
* Sets the {@code shiftType} property in the builder.
* @param shiftType the new value, not null
* @return this, for chaining, not null
*/
public Builder shiftType(ScenarioShiftType shiftType) {
JodaBeanUtils.notNull(shiftType, "shiftType");
this._shiftType = shiftType;
return this;
}
/**
* Sets the {@code shifts} property in the builder.
* @param shifts the new value, not null
* @return this, for chaining, not null
*/
public Builder shifts(List<YieldCurveBucketedShift> shifts) {
JodaBeanUtils.notNull(shifts, "shifts");
this._shifts = shifts;
return this;
}
/**
* Sets the {@code shifts} property in the builder
* from an array of objects.
* @param shifts the new value, not null
* @return this, for chaining, not null
*/
public Builder shifts(YieldCurveBucketedShift... shifts) {
return shifts(Arrays.asList(shifts));
}
//-----------------------------------------------------------------------
@Override
public String toString() {
StringBuilder buf = new StringBuilder(96);
buf.append("YieldCurveDataBucketedShiftManipulator.Builder{");
buf.append("shiftType").append('=').append(JodaBeanUtils.toString(_shiftType)).append(',').append(' ');
buf.append("shifts").append('=').append(JodaBeanUtils.toString(_shifts));
buf.append('}');
return buf.toString();
}
}
///CLOVER:ON
//-------------------------- AUTOGENERATED END --------------------------
}
| |
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.core.marshalling.impl;
import com.google.protobuf.ExtensionRegistry;
import org.drools.core.SessionConfiguration;
import org.drools.core.common.ActivationsFilter;
import org.drools.core.common.AgendaGroupQueueImpl;
import org.drools.core.common.DefaultFactHandle;
import org.drools.core.common.EqualityKey;
import org.drools.core.common.EventFactHandle;
import org.drools.core.common.InternalAgenda;
import org.drools.core.common.InternalFactHandle;
import org.drools.core.common.InternalWorkingMemory;
import org.drools.core.common.InternalWorkingMemoryEntryPoint;
import org.drools.core.common.NamedEntryPoint;
import org.drools.core.common.ObjectStore;
import org.drools.core.common.PropagationContextFactory;
import org.drools.core.common.QueryElementFactHandle;
import org.drools.core.common.TruthMaintenanceSystem;
import org.drools.core.common.WorkingMemoryAction;
import org.drools.core.common.WorkingMemoryFactory;
import org.drools.core.impl.EnvironmentFactory;
import org.drools.core.impl.StatefulKnowledgeSessionImpl;
import org.drools.core.marshalling.impl.ProtobufMessages.FactHandle;
import org.drools.core.marshalling.impl.ProtobufMessages.ObjectTypeConfiguration;
import org.drools.core.marshalling.impl.ProtobufMessages.RuleData;
import org.drools.core.marshalling.impl.ProtobufMessages.Timers.Timer;
import org.drools.core.phreak.PhreakTimerNode.Scheduler;
import org.drools.core.phreak.RuleAgendaItem;
import org.drools.core.phreak.RuleExecutor;
import org.drools.core.phreak.StackEntry;
import org.drools.core.process.instance.WorkItem;
import org.drools.core.reteoo.LeftTuple;
import org.drools.core.reteoo.ObjectTypeConf;
import org.drools.core.reteoo.TerminalNode;
import org.drools.core.rule.EntryPointId;
import org.drools.core.spi.Activation;
import org.drools.core.spi.FactHandleFactory;
import org.drools.core.spi.GlobalResolver;
import org.drools.core.spi.PropagationContext;
import org.drools.core.time.Trigger;
import org.drools.core.time.impl.CronTrigger;
import org.drools.core.time.impl.IntervalTrigger;
import org.drools.core.time.impl.PointInTimeTrigger;
import org.drools.core.time.impl.PseudoClockScheduler;
import org.kie.api.marshalling.ObjectMarshallingStrategy;
import org.kie.api.runtime.Environment;
import org.kie.api.runtime.EnvironmentName;
import org.kie.api.runtime.rule.AgendaFilter;
import org.kie.api.runtime.rule.EntryPoint;
import org.kie.api.runtime.rule.Match;
import org.kie.api.runtime.rule.RuleRuntime;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.TimeUnit;
/**
* An input marshaller that uses protobuf.
*
* @author etirelli
*/
public class ProtobufInputMarshaller {
// NOTE: all variables prefixed with _ (underscore) are protobuf structs
private static ProcessMarshaller processMarshaller = createProcessMarshaller();
private static ProcessMarshaller createProcessMarshaller() {
try {
return ProcessMarshallerFactory.newProcessMarshaller();
} catch ( IllegalArgumentException e ) {
return null;
}
}
/**
* Stream the data into an existing session
*
* @param session
* @param context
* @return
* @throws IOException
* @throws ClassNotFoundException
*/
public static StatefulKnowledgeSessionImpl readSession(StatefulKnowledgeSessionImpl session,
MarshallerReaderContext context) throws IOException,
ClassNotFoundException {
ProtobufMessages.KnowledgeSession _session = loadAndParseSession( context );
InternalAgenda agenda = resetSession( session,
context,
_session );
readSession( _session,
session,
agenda,
context );
return session;
}
/**
* Create a new session into which to read the stream data
*/
public static StatefulKnowledgeSessionImpl readSession(MarshallerReaderContext context,
int id) throws IOException,
ClassNotFoundException {
StatefulKnowledgeSessionImpl session = readSession( context,
id,
EnvironmentFactory.newEnvironment(),
SessionConfiguration.getDefaultInstance() );
return session;
}
public static StatefulKnowledgeSessionImpl readSession(MarshallerReaderContext context,
int id,
Environment environment,
SessionConfiguration config) throws IOException,
ClassNotFoundException {
ProtobufMessages.KnowledgeSession _session = loadAndParseSession( context );
StatefulKnowledgeSessionImpl session = createAndInitializeSession( context,
id,
environment,
config,
_session );
return readSession( _session,
session,
(InternalAgenda) session.getAgenda(),
context );
}
private static InternalAgenda resetSession(StatefulKnowledgeSessionImpl session,
MarshallerReaderContext context,
ProtobufMessages.KnowledgeSession _session) {
session.reset( _session.getRuleData().getLastId(),
_session.getRuleData().getLastRecency(),
1 );
InternalAgenda agenda = (InternalAgenda) session.getAgenda();
readAgenda( context,
_session.getRuleData(),
agenda );
return agenda;
}
private static StatefulKnowledgeSessionImpl createAndInitializeSession(MarshallerReaderContext context,
int id,
Environment environment,
SessionConfiguration config,
ProtobufMessages.KnowledgeSession _session) throws IOException {
FactHandleFactory handleFactory = context.kBase.newFactHandleFactory( _session.getRuleData().getLastId(),
_session.getRuleData().getLastRecency() );
InternalAgenda agenda = context.kBase.getConfiguration().getComponentFactory().getAgendaFactory().createAgenda( context.kBase, false );
readAgenda( context,
_session.getRuleData(),
agenda );
WorkingMemoryFactory wmFactory = context.kBase.getConfiguration().getComponentFactory().getWorkingMemoryFactory();
StatefulKnowledgeSessionImpl session = ( StatefulKnowledgeSessionImpl ) wmFactory.createWorkingMemory( id,
context.kBase,
handleFactory,
null,
1, // pCTx starts at 1, as InitialFact is 0
config,
agenda,
environment );
return session;
}
private static ProtobufMessages.KnowledgeSession loadAndParseSession(MarshallerReaderContext context) throws IOException,
ClassNotFoundException {
ExtensionRegistry registry = PersisterHelper.buildRegistry( context, processMarshaller );
ProtobufMessages.Header _header = PersisterHelper.readFromStreamWithHeaderPreloaded( context, registry );
return ProtobufMessages.KnowledgeSession.parseFrom( _header.getPayload(), registry );
}
public static StatefulKnowledgeSessionImpl readSession(ProtobufMessages.KnowledgeSession _session,
StatefulKnowledgeSessionImpl session,
InternalAgenda agenda,
MarshallerReaderContext context) throws IOException,
ClassNotFoundException {
GlobalResolver globalResolver = (GlobalResolver) context.env.get( EnvironmentName.GLOBALS );
if ( globalResolver != null ) {
session.setGlobalResolver( globalResolver );
}
if ( session.getTimerService() instanceof PseudoClockScheduler ) {
PseudoClockScheduler clock = (PseudoClockScheduler) session.getTimerService();
clock.advanceTime( _session.getTime(),
TimeUnit.MILLISECONDS );
}
// RuleFlowGroups need to reference the session
// for ( InternalAgendaGroup group : agenda.getAgendaGroupsMap().values() ) {
// ((RuleFlowGroupImpl) group).setWorkingMemory( session );
// }
context.wm = session;
// need to read node memories before reading the fact handles
// because this data is required during fact propagation
readNodeMemories( context,
_session.getRuleData() );
List<PropagationContext> pctxs = new ArrayList<PropagationContext>();
if ( context.kBase.getConfiguration().isPhreakEnabled() || _session.getRuleData().hasInitialFact() ) {
((StatefulKnowledgeSessionImpl)context.wm).initInitialFact(context.kBase, context);
context.handles.put( session.getInitialFactHandle().getId(), session.getInitialFactHandle() );
}
for ( ProtobufMessages.EntryPoint _ep : _session.getRuleData().getEntryPointList() ) {
EntryPoint wmep = ((StatefulKnowledgeSessionImpl)context.wm).getEntryPointMap().get(_ep.getEntryPointId());
readFactHandles( context,
_ep,
((InternalWorkingMemoryEntryPoint) wmep).getObjectStore(),
pctxs );
readTruthMaintenanceSystem( context,
wmep,
_ep,
pctxs );
}
cleanReaderContexts( pctxs );
readActionQueue( context,
_session.getRuleData() );
if ( processMarshaller != null ) {
if ( _session.hasProcessData() ) {
context.parameterObject = _session.getProcessData();
processMarshaller.readProcessInstances( context );
context.parameterObject = _session.getProcessData();
processMarshaller.readWorkItems( context );
// This actually does ALL timers, due to backwards compatability issues
// It will read in old JBPM binaries, but always write to the new binary format.
context.parameterObject = _session.getProcessData();
processMarshaller.readProcessTimers( context );
}
} else {
if ( _session.hasProcessData() ) {
throw new IllegalStateException( "No process marshaller, unable to unmarshall process data." );
}
}
if ( _session.hasTimers() ) {
for ( ProtobufMessages.Timers.Timer _timer : _session.getTimers().getTimerList() ) {
readTimer( context,
_timer );
}
}
// need to process any eventual left over timer node timers
if( ! context.timerNodeSchedulers.isEmpty() ) {
for( Map<TupleKey, Scheduler> schedulers : context.timerNodeSchedulers.values() ) {
for( Scheduler scheduler : schedulers.values() ) {
scheduler.schedule( scheduler.getTrigger() );
}
}
context.timerNodeSchedulers.clear();
}
// remove the activations filter
agenda.setActivationsFilter( null );
return session;
}
private static void readNodeMemories(MarshallerReaderContext context,
RuleData _session) {
for ( ProtobufMessages.NodeMemory _node : _session.getNodeMemoryList() ) {
Object memory = null;
switch ( _node.getNodeType() ) {
case ACCUMULATE : {
Map<TupleKey, ProtobufMessages.FactHandle> map = new HashMap<TupleKey, ProtobufMessages.FactHandle>();
for ( ProtobufMessages.NodeMemory.AccumulateNodeMemory.AccumulateContext _ctx : _node.getAccumulate().getContextList() ) {
map.put( PersisterHelper.createTupleKey( _ctx.getTuple() ), _ctx.getResultHandle() );
}
memory = map;
break;
}
case RIA : {
Map<TupleKey, ProtobufMessages.FactHandle> map = new HashMap<TupleKey, ProtobufMessages.FactHandle>();
for ( ProtobufMessages.NodeMemory.RIANodeMemory.RIAContext _ctx : _node.getRia().getContextList() ) {
map.put( PersisterHelper.createTupleKey( _ctx.getTuple() ), _ctx.getResultHandle() );
}
memory = map;
break;
}
case FROM : {
Map<TupleKey, List<ProtobufMessages.FactHandle>> map = new HashMap<TupleKey, List<ProtobufMessages.FactHandle>>();
for ( ProtobufMessages.NodeMemory.FromNodeMemory.FromContext _ctx : _node.getFrom().getContextList() ) {
// have to instantiate a modifiable list
map.put( PersisterHelper.createTupleKey( _ctx.getTuple() ), new LinkedList<ProtobufMessages.FactHandle>( _ctx.getHandleList() ) );
}
memory = map;
break;
}
case QUERY_ELEMENT : {
Map<TupleKey, QueryElementContext> map = new HashMap<TupleKey, QueryElementContext>();
for ( ProtobufMessages.NodeMemory.QueryElementNodeMemory.QueryContext _ctx : _node.getQueryElement().getContextList() ) {
// we have to use a "cloned" query element context as we need to write on it during deserialization process and the
// protobuf one is read-only
map.put( PersisterHelper.createTupleKey( _ctx.getTuple() ), new QueryElementContext( _ctx ) );
}
memory = map;
break;
}
default : {
throw new IllegalArgumentException( "Unknown node type " + _node.getNodeType() + " while deserializing session." );
}
}
context.nodeMemories.put( _node.getNodeId(), memory );
}
}
public static class QueryElementContext {
public final ProtobufMessages.FactHandle handle;
public final LinkedList<ProtobufMessages.FactHandle> results;
public QueryElementContext(ProtobufMessages.NodeMemory.QueryElementNodeMemory.QueryContext _ctx) {
this.handle = _ctx.getHandle();
this.results = new LinkedList<ProtobufMessages.FactHandle>( _ctx.getResultList() );
}
}
private static void readInitialFactHandle(MarshallerReaderContext context,
RuleData _session,
List<PropagationContext> pctxs) {
int ifhId = context.wm.getInitialFactHandle().getId();
context.handles.put( ifhId,
context.wm.getInitialFactHandle() );
// special case we have to handle for the initial fact
boolean initialFactPropagated = true;
for ( ProtobufMessages.ActionQueue.Action _action : _session.getActionQueue().getActionList() ) {
if ( _action.getType() == ProtobufMessages.ActionQueue.ActionType.ASSERT ) {
if ( _action.getAssert().getHandleId() == ifhId ) {
initialFactPropagated = false;
break;
}
}
}
if ( initialFactPropagated ) {
assertHandleIntoOTN( context,
context.wm,
context.wm.getInitialFactHandle(),
pctxs );
}
}
public static void readAgenda(MarshallerReaderContext context,
RuleData _ruleData,
InternalAgenda agenda) {
ProtobufMessages.Agenda _agenda = _ruleData.getAgenda();
for ( org.drools.core.marshalling.impl.ProtobufMessages.Agenda.AgendaGroup _agendaGroup : _agenda.getAgendaGroupList() ) {
AgendaGroupQueueImpl group = (AgendaGroupQueueImpl) agenda.getAgendaGroup( _agendaGroup.getName(), context.kBase );
group.setActive( _agendaGroup.getIsActive() );
group.setAutoDeactivate( _agendaGroup.getIsAutoDeactivate() );
group.setClearedForRecency( _agendaGroup.getClearedForRecency() );
group.hasRuleFlowListener( _agendaGroup.getHasRuleFlowLister() );
group.setActivatedForRecency( _agendaGroup.getActivatedForRecency() );
for ( org.drools.core.marshalling.impl.ProtobufMessages.Agenda.AgendaGroup.NodeInstance _nodeInstance : _agendaGroup.getNodeInstanceList() ) {
group.addNodeInstance( _nodeInstance.getProcessInstanceId(),
_nodeInstance.getNodeInstanceId() );
}
agenda.getAgendaGroupsMap().put( group.getName(),
group );
}
for ( String _groupName : _agenda.getFocusStack().getGroupNameList() ) {
agenda.addAgendaGroupOnStack( agenda.getAgendaGroup( _groupName ) );
}
for ( ProtobufMessages.Agenda.RuleFlowGroup _ruleFlowGroup : _agenda.getRuleFlowGroupList() ) {
AgendaGroupQueueImpl group = (AgendaGroupQueueImpl) agenda.getAgendaGroup( _ruleFlowGroup.getName(), context.kBase );
group.setActive( _ruleFlowGroup.getIsActive() );
group.setAutoDeactivate( _ruleFlowGroup.getIsAutoDeactivate() );
for ( org.drools.core.marshalling.impl.ProtobufMessages.Agenda.RuleFlowGroup.NodeInstance _nodeInstance : _ruleFlowGroup.getNodeInstanceList() ) {
group.addNodeInstance( _nodeInstance.getProcessInstanceId(),
_nodeInstance.getNodeInstanceId() );
}
agenda.getAgendaGroupsMap().put( group.getName(),
group );
if (group.isActive()) {
agenda.addAgendaGroupOnStack( agenda.getAgendaGroup( group.getName() ) );
}
}
readActivations( context,
_agenda.getMatchList(),
_agenda.getRuleActivationList() );
agenda.setActivationsFilter( context.filter );
}
public static void readActionQueue(MarshallerReaderContext context,
RuleData _session) throws IOException,
ClassNotFoundException {
StatefulKnowledgeSessionImpl wm = (StatefulKnowledgeSessionImpl) context.wm;
Queue<WorkingMemoryAction> actionQueue = wm.getActionQueue();
for ( ProtobufMessages.ActionQueue.Action _action : _session.getActionQueue().getActionList() ) {
actionQueue.offer( PersisterHelper.deserializeWorkingMemoryAction( context,
_action ) );
}
}
public static void readFactHandles(MarshallerReaderContext context,
org.drools.core.marshalling.impl.ProtobufMessages.EntryPoint _ep,
ObjectStore objectStore,
List<PropagationContext> pctxs) throws IOException,
ClassNotFoundException {
InternalWorkingMemory wm = context.wm;
EntryPoint entryPoint = ((StatefulKnowledgeSessionImpl)context.wm).getEntryPointMap().get(_ep.getEntryPointId());
// load the handles
for ( ProtobufMessages.FactHandle _handle : _ep.getHandleList() ) {
InternalFactHandle handle = readFactHandle( context,
entryPoint,
_handle );
context.handles.put( handle.getId(),
handle );
if ( !_handle.getIsJustified() ) {
// BeliefSystem handles the Object type
if ( handle.getObject() != null ) {
objectStore.addHandle( handle,
handle.getObject() );
}
// add handle to object type node
assertHandleIntoOTN( context,
wm,
handle,
pctxs );
}
}
}
private static void assertHandleIntoOTN(MarshallerReaderContext context,
InternalWorkingMemory wm,
InternalFactHandle handle,
List<PropagationContext> pctxs) {
Object object = handle.getObject();
InternalWorkingMemoryEntryPoint ep = (InternalWorkingMemoryEntryPoint) handle.getEntryPoint();
ObjectTypeConf typeConf = ((InternalWorkingMemoryEntryPoint) handle.getEntryPoint()).getObjectTypeConfigurationRegistry().getObjectTypeConf( ep.getEntryPoint(), object );
PropagationContextFactory pctxFactory = wm.getKnowledgeBase().getConfiguration().getComponentFactory().getPropagationContextFactory();
PropagationContext propagationContext = pctxFactory.createPropagationContext(wm.getNextPropagationIdCounter(), PropagationContext.INSERTION, null, null, handle, ep.getEntryPoint(), context);
// keeping this list for a later cleanup is necessary because of the lazy propagations that might occur
pctxs.add( propagationContext );
ep.getEntryPointNode().assertObject( handle,
propagationContext,
typeConf,
wm );
propagationContext.evaluateActionQueue( wm );
wm.executeQueuedActions();
}
private static void cleanReaderContexts(List<PropagationContext> pctxs) {
for ( PropagationContext ctx : pctxs ) {
ctx.cleanReaderContext();
}
}
public static InternalFactHandle readFactHandle(MarshallerReaderContext context,
EntryPoint entryPoint,
FactHandle _handle) throws IOException,
ClassNotFoundException {
Object object = null;
ObjectMarshallingStrategy strategy = null;
if ( _handle.hasStrategyIndex() ) {
strategy = context.usedStrategies.get( _handle.getStrategyIndex() );
object = strategy.unmarshal( context.strategyContexts.get( strategy ),
context,
_handle.getObject().toByteArray(),
(context.kBase == null) ? null : context.kBase.getRootClassLoader() );
}
EntryPointId confEP;
if ( entryPoint != null ) {
confEP = ((NamedEntryPoint) entryPoint).getEntryPoint();
} else {
confEP = context.wm.getEntryPoint();
}
ObjectTypeConf typeConf = context.wm.getObjectTypeConfigurationRegistry().getObjectTypeConf( confEP, object );
InternalFactHandle handle = null;
switch ( _handle.getType() ) {
case FACT : {
handle = new DefaultFactHandle( _handle.getId(),
object,
_handle.getRecency(),
entryPoint,
typeConf != null && typeConf.isTrait() );
break;
}
case QUERY : {
handle = new QueryElementFactHandle( object,
_handle.getId(),
_handle.getRecency() );
break;
}
case EVENT : {
handle = new EventFactHandle( _handle.getId(),
object,
_handle.getRecency(),
_handle.getTimestamp(),
_handle.getDuration(),
entryPoint,
typeConf != null && typeConf.isTrait() );
((EventFactHandle) handle).setExpired( _handle.getIsExpired() );
// the event is re-propagated through the network, so the activations counter will be recalculated
//((EventFactHandle) handle).setActivationsCount( _handle.getActivationsCount() );
break;
}
default : {
throw new IllegalStateException( "Unable to marshal FactHandle, as type does not exist:" + _handle.getType() );
}
}
return handle;
}
public static void readTruthMaintenanceSystem(MarshallerReaderContext context,
EntryPoint wmep,
ProtobufMessages.EntryPoint _ep,
List<PropagationContext> pctxs) throws IOException,
ClassNotFoundException {
TruthMaintenanceSystem tms = ((NamedEntryPoint) wmep).getTruthMaintenanceSystem();
boolean wasOTCSerialized = _ep.getOtcCount() > 0; // if 0, then the OTC was not serialized (older versions of drools)
Set<String> tmsEnabled = new HashSet<String>();
for( ObjectTypeConfiguration _otc : _ep.getOtcList() ) {
if( _otc.getTmsEnabled() ) {
tmsEnabled.add( _otc.getType() );
}
}
ProtobufMessages.TruthMaintenanceSystem _tms = _ep.getTms();
for ( ProtobufMessages.EqualityKey _key : _tms.getKeyList() ) {
InternalFactHandle handle = (InternalFactHandle) context.handles.get( _key.getHandleId() );
// ObjectTypeConf state is not marshalled, so it needs to be re-determined
ObjectTypeConf typeConf = context.wm.getObjectTypeConfigurationRegistry().getObjectTypeConf( ((NamedEntryPoint) handle.getEntryPoint()).getEntryPoint(),
handle.getObject() );
if ( !typeConf.isTMSEnabled() && (!wasOTCSerialized || tmsEnabled.contains(typeConf.getTypeName()) ) ) {
typeConf.enableTMS();
}
EqualityKey key = new EqualityKey( handle,
_key.getStatus() );
handle.setEqualityKey( key );
if ( key.getStatus() == EqualityKey.JUSTIFIED ) {
// not yet added to the object stores
((NamedEntryPoint) handle.getEntryPoint()).getObjectStore().addHandle( handle,
handle.getObject() );
// add handle to object type node
assertHandleIntoOTN( context,
context.wm,
handle,
pctxs );
}
for ( Integer factHandleId : _key.getOtherHandleList() ) {
handle = (InternalFactHandle) context.handles.get( factHandleId.intValue() );
key.addFactHandle( handle );
handle.setEqualityKey( key );
}
tms.put( key );
readBeliefSet( context, tms, key, _key );
}
}
private static void readBeliefSet(MarshallerReaderContext context,
TruthMaintenanceSystem tms,
EqualityKey key,
ProtobufMessages.EqualityKey _key) throws IOException,
ClassNotFoundException {
if( _key.hasBeliefSet() ) {
ProtobufMessages.BeliefSet _beliefSet = _key.getBeliefSet();
InternalFactHandle handle = (InternalFactHandle) context.handles.get( _key.getHandleId() );
// phreak might serialize empty belief sets, so he have to handle it during deserialization
if( _beliefSet.getLogicalDependencyCount() > 0 ) {
for ( ProtobufMessages.LogicalDependency _logicalDependency : _beliefSet.getLogicalDependencyList() ) {
ProtobufMessages.Activation _activation = _logicalDependency.getActivation();
Activation activation = (Activation) context.filter.getTuplesCache().get(
PersisterHelper.createActivationKey( _activation.getPackageName(),
_activation.getRuleName(),
_activation.getTuple() ) ).getObject();
Object object = null;
ObjectMarshallingStrategy strategy = null;
if ( _logicalDependency.hasObjectStrategyIndex() ) {
strategy = context.usedStrategies.get( _logicalDependency.getObjectStrategyIndex() );
object = strategy.unmarshal( context.strategyContexts.get( strategy ),
context,
_logicalDependency.getObject().toByteArray(),
(context.kBase == null) ? null : context.kBase.getRootClassLoader() );
}
Object value = null;
if ( _logicalDependency.hasValueStrategyIndex() ) {
strategy = context.usedStrategies.get( _logicalDependency.getValueStrategyIndex() );
value = strategy.unmarshal( context.strategyContexts.get( strategy ),
context,
_logicalDependency.getValue().toByteArray(),
(context.kBase == null) ? null : context.kBase.getRootClassLoader() );
}
ObjectTypeConf typeConf = context.wm.getObjectTypeConfigurationRegistry().getObjectTypeConf( ((NamedEntryPoint) handle.getEntryPoint()).getEntryPoint(),
handle.getObject() );
tms.readLogicalDependency( handle,
object,
value,
activation,
activation.getPropagationContext(),
activation.getRule(),
typeConf );
}
} else {
handle.getEqualityKey().setBeliefSet( tms.getBeliefSystem().newBeliefSet( handle ) );
}
}
}
private static void readActivations(MarshallerReaderContext context,
List<ProtobufMessages.Activation> _dormant,
List<ProtobufMessages.Activation> _rneas) {
for ( ProtobufMessages.Activation _activation : _dormant ) {
// this is a dormant activation
context.filter.getDormantActivationsMap().put( PersisterHelper.createActivationKey( _activation.getPackageName(),
_activation.getRuleName(),
_activation.getTuple() ),
_activation );
}
for ( ProtobufMessages.Activation _activation : _rneas ) {
// this is an active rule network evaluator
context.filter.getRneActivations().put( PersisterHelper.createActivationKey( _activation.getPackageName(),
_activation.getRuleName(),
_activation.getTuple() ),
_activation );
}
}
public static void readTimer(MarshallerReaderContext inCtx,
Timer _timer) throws IOException,
ClassNotFoundException {
TimersInputMarshaller reader = inCtx.readersByInt.get( _timer.getType().getNumber() );
reader.deserialize( inCtx, _timer );
}
public static Trigger readTrigger(MarshallerReaderContext inCtx,
ProtobufMessages.Trigger _trigger) {
switch ( _trigger.getType() ) {
case CRON : {
ProtobufMessages.Trigger.CronTrigger _cron = _trigger.getCron();
CronTrigger trigger = new CronTrigger();
trigger.setStartTime( new Date( _cron.getStartTime() ) );
if ( _cron.hasEndTime() ) {
trigger.setEndTime( new Date( _cron.getEndTime() ) );
}
trigger.setRepeatLimit( _cron.getRepeatLimit() );
trigger.setRepeatCount( _cron.getRepeatCount() );
trigger.setCronExpression( _cron.getCronExpression() );
if ( _cron.hasNextFireTime() ) {
trigger.setNextFireTime( new Date( _cron.getNextFireTime() ) );
}
String[] calendarNames = new String[_cron.getCalendarNameCount()];
for ( int i = 0; i < calendarNames.length; i++ ) {
calendarNames[i] = _cron.getCalendarName( i );
}
trigger.setCalendarNames( calendarNames );
return trigger;
}
case INTERVAL : {
ProtobufMessages.Trigger.IntervalTrigger _interval = _trigger.getInterval();
IntervalTrigger trigger = new IntervalTrigger();
trigger.setStartTime( new Date( _interval.getStartTime() ) );
if ( _interval.hasEndTime() ) {
trigger.setEndTime( new Date( _interval.getEndTime() ) );
}
trigger.setRepeatLimit( _interval.getRepeatLimit() );
trigger.setRepeatCount( _interval.getRepeatCount() );
if ( _interval.hasNextFireTime() ) {
trigger.setNextFireTime( new Date( _interval.getNextFireTime() ) );
}
trigger.setPeriod( _interval.getPeriod() );
String[] calendarNames = new String[_interval.getCalendarNameCount()];
for ( int i = 0; i < calendarNames.length; i++ ) {
calendarNames[i] = _interval.getCalendarName( i );
}
trigger.setCalendarNames( calendarNames );
return trigger;
}
case POINT_IN_TIME : {
PointInTimeTrigger trigger = new PointInTimeTrigger( _trigger.getPit().getNextFireTime(), null, null );
return trigger;
}
}
throw new RuntimeException( "Unable to deserialize Trigger for type: " + _trigger.getType() );
}
public static WorkItem readWorkItem( MarshallerReaderContext context ) {
return processMarshaller.readWorkItem( context );
}
public static class PBActivationsFilter
implements
ActivationsFilter,
AgendaFilter {
private Map<ActivationKey, ProtobufMessages.Activation> dormantActivations;
private Map<ActivationKey, ProtobufMessages.Activation> rneActivations;
private Map<ActivationKey, LeftTuple> tuplesCache;
private Queue<RuleAgendaItem> rneaToFire;
public PBActivationsFilter() {
this.dormantActivations = new HashMap<ProtobufInputMarshaller.ActivationKey, ProtobufMessages.Activation>();
this.rneActivations = new HashMap<ProtobufInputMarshaller.ActivationKey, ProtobufMessages.Activation>();
this.tuplesCache = new HashMap<ProtobufInputMarshaller.ActivationKey, LeftTuple>();
this.rneaToFire = new ConcurrentLinkedQueue<RuleAgendaItem>();
}
public Map<ActivationKey, ProtobufMessages.Activation> getDormantActivationsMap() {
return this.dormantActivations;
}
public boolean accept(Activation activation,
InternalWorkingMemory workingMemory,
TerminalNode rtn) {
if ( activation.isRuleAgendaItem() ) {
ActivationKey key = PersisterHelper.createActivationKey( activation.getRule().getPackageName(), activation.getRule().getName(), activation.getTuple() );
if ( !this.rneActivations.containsKey( key ) || this.rneActivations.get( key ).getEvaluated() ) {
rneaToFire.add( (RuleAgendaItem) activation );
}
return true;
} else {
ActivationKey key = PersisterHelper.createActivationKey( rtn.getRule().getPackageName(), rtn.getRule().getName(), activation.getTuple() );
// add the tuple to the cache for correlation
this.tuplesCache.put( key, activation.getTuple() );
// check if there was an active activation for it
return !this.dormantActivations.containsKey( key );
}
}
public Map<ActivationKey, LeftTuple> getTuplesCache() {
return tuplesCache;
}
public Map<ActivationKey, ProtobufMessages.Activation> getRneActivations() {
return rneActivations;
}
public void fireRNEAs(final InternalWorkingMemory wm) {
RuleAgendaItem rai = null;
while ( (rai = rneaToFire.poll()) != null ) {
RuleExecutor ruleExecutor = rai.getRuleExecutor();
ruleExecutor.reEvaluateNetwork( wm, new org.drools.core.util.LinkedList<StackEntry>(), false );
ruleExecutor.removeRuleAgendaItemWhenEmpty( wm );
}
}
@Override
public boolean accept(Match match) {
LeftTuple tuple = ((Activation)match).getTuple();
ActivationKey key = PersisterHelper.createActivationKey( match.getRule().getPackageName(),
match.getRule().getName(),
tuple );
// add the tuple to the cache for correlation
this.tuplesCache.put( key, tuple );
// check if there was an active activation for it
return !this.dormantActivations.containsKey( key );
}
}
public static class ActivationKey {
private final String pkgName;
private final String ruleName;
private final int[] tuple;
public ActivationKey(String pkgName,
String ruleName,
int[] tuple) {
this.pkgName = pkgName;
this.ruleName = ruleName;
this.tuple = tuple;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((pkgName == null) ? 0 : pkgName.hashCode());
result = prime * result + ((ruleName == null) ? 0 : ruleName.hashCode());
result = prime * result + Arrays.hashCode( tuple );
return result;
}
@Override
public boolean equals(Object obj) {
if ( this == obj ) return true;
if ( obj == null ) return false;
if ( getClass() != obj.getClass() ) return false;
ActivationKey other = (ActivationKey) obj;
if ( pkgName == null ) {
if ( other.pkgName != null ) return false;
} else if ( !pkgName.equals( other.pkgName ) ) return false;
if ( ruleName == null ) {
if ( other.ruleName != null ) return false;
} else if ( !ruleName.equals( other.ruleName ) ) return false;
if ( !Arrays.equals( tuple, other.tuple ) ) return false;
return true;
}
}
public static class TupleKey {
private final int[] tuple;
public TupleKey(int[] tuple) {
super();
this.tuple = tuple;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + Arrays.hashCode( tuple );
return result;
}
@Override
public boolean equals(Object obj) {
if ( this == obj ) return true;
if ( obj == null ) return false;
if ( getClass() != obj.getClass() ) return false;
TupleKey other = (TupleKey) obj;
if ( !Arrays.equals( tuple, other.tuple ) ) return false;
return true;
}
}
}
| |
package ru.shutoff.caralarm;
import android.app.Activity;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.media.Ringtone;
import android.media.RingtoneManager;
import android.net.Uri;
import android.os.Vibrator;
import android.preference.PreferenceManager;
import android.support.v4.app.NotificationCompat;
import android.telephony.SmsManager;
import android.telephony.SmsMessage;
import java.util.HashMap;
import java.util.Map;
public class SmsMonitor extends BroadcastReceiver {
private static final String ACTION = "android.provider.Telephony.SMS_RECEIVED";
private static final String SMS_SENT = "ru.shutoff.caralarm.SMS_SENT";
static final String SMS_ANSWER = "ru.shutoff.caralarm.SMS_ANSWER";
static final int INCORRECT_MESSAGE = 10001;
static Map<String, String> answers;
@Override
public void onReceive(Context context, Intent intent) {
if (intent == null)
return;
String action = intent.getAction();
if (action == null)
return;
if (action.equals(SMS_SENT)) {
int result_code = getResultCode();
if (result_code != Activity.RESULT_OK) {
Intent i = new Intent(SMS_ANSWER);
i.putExtra(Names.ANSWER, result_code);
i.putExtra(Names.ID, intent.getStringExtra(Names.ID));
context.sendBroadcast(i);
}
if (answers == null)
answers = new HashMap<String, String>();
answers.put(intent.getStringExtra(Names.ID), intent.getStringExtra(Names.ANSWER));
return;
}
if (action.equals(ACTION)) {
Object[] pduArray = (Object[]) intent.getExtras().get("pdus");
SmsMessage[] messages = new SmsMessage[pduArray.length];
for (int i = 0; i < pduArray.length; i++) {
messages[i] = SmsMessage.createFromPdu((byte[]) pduArray[i]);
}
String sms_from = messages[0].getOriginatingAddress();
StringBuilder bodyText = new StringBuilder();
for (SmsMessage m : messages) {
bodyText.append(m.getMessageBody());
}
String body = bodyText.toString();
SharedPreferences preferences = PreferenceManager.getDefaultSharedPreferences(context);
String[] cars = preferences.getString(Names.CARS, "").split(",");
for (String car : cars) {
String phone_config = digitsOnly(preferences.getString(Names.CAR_PHONE + car, ""));
if ((phone_config.length() > 0) && phone_config.equals(digitsOnly(sms_from))) {
if (processCarMessage(context, body, car))
abortBroadcast();
return;
}
}
}
}
String digitsOnly(String phone) {
return phone.replaceAll("[^0-9]", "");
}
static String[] notifications = {
"ALARM Light shock",
"Low Card Battery",
"Supply reserve",
"Supply regular",
"ERROR LAN-devices",
"Low reserve voltage",
"Roaming. Internet OFF"
};
static String[] alarms = {
"ALARM Heavy shock",
"ALARM Trunk",
"ALARM Hood",
"ALARM Doors",
"ALARM Lock",
"ALARM MovTilt sensor",
"ALARM Rogue"
};
boolean processCarMessage(Context context, String body, String car_id) {
if ((answers != null) && answers.containsKey(car_id)) {
String answer = answers.get(car_id);
if (body.substring(0, answer.length()).equalsIgnoreCase(answer)) {
answers.remove(car_id);
Intent i = new Intent(SMS_ANSWER);
i.putExtra(Names.ANSWER, Activity.RESULT_OK);
i.putExtra(Names.ID, car_id);
context.sendBroadcast(i);
return true;
}
if (body.equals("Incorrect Message")) {
answers.remove(car_id);
Intent i = new Intent(SMS_ANSWER);
i.putExtra(Names.ANSWER, INCORRECT_MESSAGE);
i.putExtra(Names.ID, car_id);
context.sendBroadcast(i);
return true;
}
}
for (int i = 0; i < notifications.length; i++) {
if (compare(body, notifications[i])) {
String[] msg = context.getString(R.string.notification).split("\\|");
showNotification(context, msg[i], car_id);
return true;
}
}
for (int i = 0; i < alarms.length; i++) {
if (compare(body, alarms[i])) {
String[] msg = context.getString(R.string.alarm).split("\\|");
showAlarm(context, msg[i], car_id);
return true;
}
}
return false;
}
static boolean compare(String body, String message) {
if (body.length() < message.length())
return false;
return body.substring(0, message.length()).equalsIgnoreCase(message);
}
private void showNotification(Context context, String text, String car_id) {
SharedPreferences preferences = PreferenceManager.getDefaultSharedPreferences(context);
String title = "Car Alarm";
String[] cars = preferences.getString(Names.CARS, "").split(",");
if (cars.length > 1) {
title = preferences.getString(Names.CAR_NAME, "");
if (title.length() == 0) {
title = context.getString(R.string.car);
if (car_id.length() > 0)
title += " " + car_id;
}
}
NotificationCompat.Builder builder =
new NotificationCompat.Builder(context)
.setSmallIcon(R.drawable.ic_launcher)
.setContentTitle(title)
.setContentText(text);
Intent notificationIntent = new Intent(context, MainActivity.class);
notificationIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
notificationIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
PendingIntent contentIntent = PendingIntent.getActivity(context, 0, notificationIntent,
PendingIntent.FLAG_UPDATE_CURRENT);
builder.setContentIntent(contentIntent);
int id = preferences.getInt(Names.IDS, 0);
id++;
SharedPreferences.Editor ed = preferences.edit();
ed.putInt(Names.IDS, id);
ed.putBoolean(Names.SMS_ALARM, true);
ed.commit();
// Add as notification
NotificationManager manager = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE);
manager.notify(id, builder.build());
String sound = preferences.getString(Names.NOTIFY, "");
Uri uri = Uri.parse(sound);
Ringtone ringtone = RingtoneManager.getRingtone(context, uri);
if (ringtone == null)
uri = RingtoneManager.getDefaultUri(RingtoneManager.TYPE_NOTIFICATION);
try {
AudioManager audioManager = (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
MediaPlayer player = new MediaPlayer();
player.setDataSource(context, uri);
if (audioManager.getStreamVolume(AudioManager.STREAM_NOTIFICATION) != 0) {
player.setAudioStreamType(AudioManager.STREAM_NOTIFICATION);
player.setLooping(false);
player.prepare();
player.start();
}
Vibrator vibrator = (Vibrator) context.getSystemService(Context.VIBRATOR_SERVICE);
if (vibrator != null)
vibrator.vibrate(500);
} catch (Exception err) {
// ignore
}
}
private void showAlarm(Context context, String text, String car_id) {
SharedPreferences preferences = PreferenceManager.getDefaultSharedPreferences(context);
SharedPreferences.Editor ed = preferences.edit();
ed.putBoolean(Names.SMS_ALARM, true);
ed.commit();
Intent alarmIntent = new Intent(context, Alarm.class);
alarmIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
alarmIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
alarmIntent.putExtra(Names.ALARM, text);
alarmIntent.putExtra(Names.ID, car_id);
context.startActivity(alarmIntent);
}
static void sendSMS(Context context, String car_id, String sms, String answer) {
Intent intent = new Intent(SMS_SENT);
intent.putExtra(Names.ID, car_id);
intent.putExtra(Names.ANSWER, answer);
PendingIntent sendPI = PendingIntent.getBroadcast(context, 0, intent, PendingIntent.FLAG_UPDATE_CURRENT);
SmsManager smsManager = SmsManager.getDefault();
SharedPreferences preferences = PreferenceManager.getDefaultSharedPreferences(context);
String phoneNumber = preferences.getString(Names.CAR_PHONE + car_id, "");
try {
smsManager.sendTextMessage(phoneNumber, null, sms, sendPI, null);
} catch (Exception ex) {
try {
sendPI.send(context, Activity.RESULT_CANCELED, intent);
} catch (Exception e) {
// ignore
}
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.wss4j.dom.message;
import java.io.File;
import java.security.cert.X509Certificate;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import javax.security.auth.callback.CallbackHandler;
import javax.xml.crypto.dom.DOMCryptoContext;
import javax.xml.crypto.dom.DOMStructure;
import javax.xml.crypto.dsig.XMLSignatureFactory;
import javax.xml.crypto.dsig.keyinfo.KeyInfo;
import javax.xml.crypto.dsig.keyinfo.KeyInfoFactory;
import javax.xml.crypto.dsig.keyinfo.X509Data;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.wss4j.common.WSEncryptionPart;
import org.apache.wss4j.common.bsp.BSPRule;
import org.apache.wss4j.common.crypto.Crypto;
import org.apache.wss4j.common.crypto.CryptoFactory;
import org.apache.wss4j.common.crypto.CryptoType;
import org.apache.wss4j.common.ext.WSSecurityException;
import org.apache.wss4j.common.token.Reference;
import org.apache.wss4j.common.token.SecurityTokenReference;
import org.apache.wss4j.common.util.XMLUtils;
import org.apache.wss4j.dom.WSConstants;
import org.apache.wss4j.dom.common.CustomHandler;
import org.apache.wss4j.dom.common.KeystoreCallbackHandler;
import org.apache.wss4j.dom.common.SOAPUtil;
import org.apache.wss4j.dom.common.SecurityTestUtil;
import org.apache.wss4j.dom.engine.WSSConfig;
import org.apache.wss4j.dom.engine.WSSecurityEngine;
import org.apache.wss4j.dom.engine.WSSecurityEngineResult;
import org.apache.wss4j.dom.handler.HandlerAction;
import org.apache.wss4j.dom.handler.RequestData;
import org.apache.wss4j.dom.handler.WSHandlerConstants;
import org.apache.wss4j.dom.handler.WSHandlerResult;
import org.apache.wss4j.dom.str.STRParser.REFERENCE_TYPE;
import org.apache.wss4j.dom.util.WSSecurityUtil;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
/**
* A set of test-cases for signing and verifying SOAP requests.
*/
public class SignatureTest {
private static final org.slf4j.Logger LOG =
org.slf4j.LoggerFactory.getLogger(SignatureTest.class);
private WSSecurityEngine secEngine = new WSSecurityEngine();
private CallbackHandler callbackHandler = new KeystoreCallbackHandler();
private Crypto crypto;
@AfterAll
public static void cleanup() throws Exception {
SecurityTestUtil.cleanup();
}
public SignatureTest() throws Exception {
WSSConfig.init();
crypto = CryptoFactory.getInstance();
}
/**
* The test uses the Issuer Serial key identifier type.
* <p/>
*
* @throws java.lang.Exception Thrown when there is any problem in signing or verification
*/
@Test
public void testX509SignatureIS() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecSignature builder = new WSSecSignature(secHeader);
builder.setUserInfo("16c73ab6-b892-458f-abf5-2f875f74882e", "security");
builder.setKeyIdentifierType(WSConstants.ISSUER_SERIAL);
LOG.info("Before Signing IS....");
Document signedDoc = builder.build(crypto);
if (LOG.isDebugEnabled()) {
LOG.debug("Signed message with IssuerSerial key identifier:");
String outputString =
XMLUtils.prettyDocumentToString(signedDoc);
LOG.debug(outputString);
}
LOG.info("After Signing IS....");
WSHandlerResult results = verify(signedDoc);
WSSecurityEngineResult actionResult =
results.getActionResults().get(WSConstants.SIGN).get(0);
assertNotNull(actionResult.get(WSSecurityEngineResult.TAG_X509_CERTIFICATE));
assertNotNull(actionResult.get(WSSecurityEngineResult.TAG_X509_REFERENCE_TYPE));
REFERENCE_TYPE referenceType =
(REFERENCE_TYPE)actionResult.get(WSSecurityEngineResult.TAG_X509_REFERENCE_TYPE);
assertTrue(referenceType == REFERENCE_TYPE.ISSUER_SERIAL);
}
@Test
public void testX509SignatureISAttached() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecSignature builder = new WSSecSignature(secHeader);
builder.setUserInfo("16c73ab6-b892-458f-abf5-2f875f74882e", "security");
builder.setKeyIdentifierType(WSConstants.ISSUER_SERIAL);
builder.setIncludeSignatureToken(true);
LOG.info("Before Signing IS....");
Document signedDoc = builder.build(crypto);
if (LOG.isDebugEnabled()) {
LOG.debug("Signed message with IssuerSerial key identifier:");
String outputString =
XMLUtils.prettyDocumentToString(signedDoc);
LOG.debug(outputString);
}
LOG.info("After Signing IS....");
WSHandlerResult results = verify(signedDoc);
WSSecurityEngineResult actionResult =
results.getActionResults().get(WSConstants.SIGN).get(0);
assertNotNull(actionResult.get(WSSecurityEngineResult.TAG_X509_CERTIFICATE));
assertNotNull(actionResult.get(WSSecurityEngineResult.TAG_X509_REFERENCE_TYPE));
REFERENCE_TYPE referenceType =
(REFERENCE_TYPE)actionResult.get(WSSecurityEngineResult.TAG_X509_REFERENCE_TYPE);
assertTrue(referenceType == REFERENCE_TYPE.ISSUER_SERIAL);
}
/**
* Test that signs (twice) and verifies a WS-Security envelope.
* <p/>
*
* @throws java.lang.Exception Thrown when there is any problem in signing or verification
*/
@Test
public void testDoubleX509SignatureIS() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecSignature builder = new WSSecSignature(secHeader);
builder.setUserInfo("16c73ab6-b892-458f-abf5-2f875f74882e", "security");
builder.build(crypto);
Document signedDoc1 = builder.build(crypto);
verify(signedDoc1);
}
/**
* Test that signs and verifies a WS-Security envelope
* <p/>
*
* @throws java.lang.Exception Thrown when there is any problem in signing or verification
*/
@Test
public void testIssuerSerialSignature() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecSignature builder = new WSSecSignature(secHeader);
builder.setUserInfo("16c73ab6-b892-458f-abf5-2f875f74882e", "security");
LOG.info("Before Signing....");
Document signedDoc = builder.build(crypto);
if (LOG.isDebugEnabled()) {
LOG.debug("After Signing....");
String outputString =
XMLUtils.prettyDocumentToString(signedDoc);
LOG.debug(outputString);
}
verify(signedDoc);
}
/**
* Test that signs and verifies a WS-Security envelope
* <p/>
*
* @throws java.lang.Exception Thrown when there is any problem in signing or verification
*/
@Test
public void testSignatureInclusiveC14N() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecSignature builder = new WSSecSignature(secHeader);
builder.setSigCanonicalization(WSConstants.C14N_OMIT_COMMENTS);
builder.setUserInfo("16c73ab6-b892-458f-abf5-2f875f74882e", "security");
LOG.info("Before Signing....");
Document signedDoc = builder.build(crypto);
if (LOG.isDebugEnabled()) {
LOG.debug("After Signing....");
String outputString =
XMLUtils.prettyDocumentToString(signedDoc);
LOG.debug(outputString);
}
WSSecurityEngine newEngine = new WSSecurityEngine();
try {
newEngine.processSecurityHeader(doc, null, null, crypto);
fail("Failure expected on a bad c14n algorithm");
} catch (WSSecurityException ex) {
assertTrue(ex.getErrorCode() == WSSecurityException.ErrorCode.INVALID_SECURITY);
}
RequestData data = new RequestData();
data.setSigVerCrypto(crypto);
List<BSPRule> ignoredRules = new ArrayList<>();
ignoredRules.add(BSPRule.R5404);
ignoredRules.add(BSPRule.R5406);
data.setIgnoredBSPRules(ignoredRules);
newEngine.processSecurityHeader(doc, data);
}
/**
* Test that signs and verifies a WS-Security envelope
* <p/>
*
* @throws java.lang.Exception Thrown when there is any problem in signing or verification
*/
@Test
public void testSignatureInclusivePrefixes() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecSignature builder = new WSSecSignature(secHeader);
builder.setAddInclusivePrefixes(true);
builder.setUserInfo("16c73ab6-b892-458f-abf5-2f875f74882e", "security");
LOG.info("Before Signing....");
Document signedDoc = builder.build(crypto);
if (LOG.isDebugEnabled()) {
LOG.debug("After Signing....");
String outputString =
XMLUtils.prettyDocumentToString(signedDoc);
LOG.debug(outputString);
}
verify(signedDoc);
}
/**
* Test that signs and verifies a WS-Security envelope
* <p/>
*
* @throws java.lang.Exception Thrown when there is any problem in signing or verification
*/
@Test
public void testBSTSignature() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecSignature builder = new WSSecSignature(secHeader);
builder.setUserInfo("16c73ab6-b892-458f-abf5-2f875f74882e", "security");
builder.setKeyIdentifierType(WSConstants.BST_DIRECT_REFERENCE);
LOG.info("Before Signing....");
Document signedDoc = builder.build(crypto);
if (LOG.isDebugEnabled()) {
LOG.debug("After Signing....");
String outputString =
XMLUtils.prettyDocumentToString(signedDoc);
LOG.debug(outputString);
}
WSHandlerResult results = verify(signedDoc);
WSSecurityEngineResult actionResult =
results.getActionResults().get(WSConstants.SIGN).get(0);
assertNotNull(actionResult.get(WSSecurityEngineResult.TAG_X509_CERTIFICATE));
assertNotNull(actionResult.get(WSSecurityEngineResult.TAG_X509_REFERENCE_TYPE));
REFERENCE_TYPE referenceType =
(REFERENCE_TYPE)actionResult.get(WSSecurityEngineResult.TAG_X509_REFERENCE_TYPE);
assertTrue(referenceType == REFERENCE_TYPE.DIRECT_REF);
}
/**
* Test that signs and verifies a WS-Security envelope
* <p/>
*
* @throws java.lang.Exception Thrown when there is any problem in signing or verification
*/
@Test
public void testBSTPKIPathSignature() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecSignature builder = new WSSecSignature(secHeader);
builder.setUserInfo("wss40", "security");
builder.setKeyIdentifierType(WSConstants.BST_DIRECT_REFERENCE);
builder.setUseSingleCertificate(false);
LOG.info("Before Signing....");
Crypto pkiCrypto = CryptoFactory.getInstance("wss40.properties");
Document signedDoc = builder.build(pkiCrypto);
if (LOG.isDebugEnabled()) {
LOG.debug("After PKI Signing....");
String outputString =
XMLUtils.prettyDocumentToString(signedDoc);
LOG.debug(outputString);
}
secEngine.processSecurityHeader(doc, null, callbackHandler, pkiCrypto, null);
}
/**
* Test that signs and verifies a WS-Security envelope
* <p/>
*
* @throws java.lang.Exception Thrown when there is any problem in signing or verification
*/
@Test
public void testX509Signature() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecSignature builder = new WSSecSignature(secHeader);
builder.setUserInfo("16c73ab6-b892-458f-abf5-2f875f74882e", "security");
builder.setKeyIdentifierType(WSConstants.X509_KEY_IDENTIFIER);
LOG.info("Before Signing....");
Document signedDoc = builder.build(crypto);
if (LOG.isDebugEnabled()) {
LOG.debug("After Signing....");
String outputString =
XMLUtils.prettyDocumentToString(signedDoc);
LOG.debug(outputString);
}
WSSecurityEngine newEngine = new WSSecurityEngine();
WSHandlerResult results =
newEngine.processSecurityHeader(doc, null, null, crypto);
WSSecurityEngineResult actionResult =
results.getActionResults().get(WSConstants.SIGN).get(0);
assertNotNull(actionResult.get(WSSecurityEngineResult.TAG_X509_CERTIFICATE));
assertNotNull(actionResult.get(WSSecurityEngineResult.TAG_X509_REFERENCE_TYPE));
REFERENCE_TYPE referenceType =
(REFERENCE_TYPE)actionResult.get(WSSecurityEngineResult.TAG_X509_REFERENCE_TYPE);
assertTrue(referenceType == REFERENCE_TYPE.KEY_IDENTIFIER);
}
/**
* Test that signs and verifies a WS-Security envelope.
* The test uses the ThumbprintSHA1 key identifier type.
* <p/>
*
* @throws java.lang.Exception Thrown when there is any problem in signing or verification
*/
@Test
public void testX509SignatureThumb() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecSignature builder = new WSSecSignature(secHeader);
builder.setUserInfo("16c73ab6-b892-458f-abf5-2f875f74882e", "security");
builder.setKeyIdentifierType(WSConstants.THUMBPRINT_IDENTIFIER);
LOG.info("Before Signing ThumbprintSHA1....");
Document signedDoc = builder.build(crypto);
if (LOG.isDebugEnabled()) {
LOG.debug("Signed message with ThumbprintSHA1 key identifier:");
String outputString =
XMLUtils.prettyDocumentToString(signedDoc);
LOG.debug(outputString);
}
LOG.info("After Signing ThumbprintSHA1....");
WSHandlerResult results = verify(signedDoc);
WSSecurityEngineResult actionResult =
results.getActionResults().get(WSConstants.SIGN).get(0);
assertNotNull(actionResult.get(WSSecurityEngineResult.TAG_X509_CERTIFICATE));
assertNotNull(actionResult.get(WSSecurityEngineResult.TAG_X509_REFERENCE_TYPE));
REFERENCE_TYPE referenceType =
(REFERENCE_TYPE)actionResult.get(WSSecurityEngineResult.TAG_X509_REFERENCE_TYPE);
assertTrue(referenceType == REFERENCE_TYPE.THUMBPRINT_SHA1);
}
@Test
public void testX509SignatureThumbAttached() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecSignature builder = new WSSecSignature(secHeader);
builder.setUserInfo("16c73ab6-b892-458f-abf5-2f875f74882e", "security");
builder.setKeyIdentifierType(WSConstants.THUMBPRINT_IDENTIFIER);
builder.setIncludeSignatureToken(true);
LOG.info("Before Signing ThumbprintSHA1....");
Document signedDoc = builder.build(crypto);
if (LOG.isDebugEnabled()) {
LOG.debug("Signed message with ThumbprintSHA1 key identifier:");
String outputString =
XMLUtils.prettyDocumentToString(signedDoc);
LOG.debug(outputString);
}
LOG.info("After Signing ThumbprintSHA1....");
WSHandlerResult results = verify(signedDoc);
WSSecurityEngineResult actionResult =
results.getActionResults().get(WSConstants.SIGN).get(0);
assertNotNull(actionResult.get(WSSecurityEngineResult.TAG_X509_CERTIFICATE));
assertNotNull(actionResult.get(WSSecurityEngineResult.TAG_X509_REFERENCE_TYPE));
REFERENCE_TYPE referenceType =
(REFERENCE_TYPE)actionResult.get(WSSecurityEngineResult.TAG_X509_REFERENCE_TYPE);
assertTrue(referenceType == REFERENCE_TYPE.THUMBPRINT_SHA1);
}
/**
* Test that signs (twice) and verifies a WS-Security envelope.
* The test uses the ThumbprintSHA1 key identifier type.
* <p/>
*
* @throws java.lang.Exception Thrown when there is any problem in signing or verification
*/
@Test
public void testDoubleX509SignatureThumb() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecSignature builder = new WSSecSignature(secHeader);
builder.setUserInfo("16c73ab6-b892-458f-abf5-2f875f74882e", "security");
builder.setKeyIdentifierType(WSConstants.THUMBPRINT_IDENTIFIER);
builder.build(crypto);
Document signedDoc1 = builder.build(crypto);
verify(signedDoc1);
}
/**
* Test that signs and verifies a Timestamp. The request is then modified so that the
* Timestamp has a default (WSU) namespace inserted. The signature validation should still
* pass due to c14n (see WSS-181).
*
* @throws java.lang.Exception Thrown when there is any problem in signing or verification
*/
@Test
public void testValidModifiedSignature() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecSignature builder = new WSSecSignature(secHeader);
builder.setUserInfo("16c73ab6-b892-458f-abf5-2f875f74882e", "security");
LOG.info("Before Signing....");
WSSecTimestamp timestamp = new WSSecTimestamp(secHeader);
timestamp.setTimeToLive(300);
timestamp.build();
WSEncryptionPart encP =
new WSEncryptionPart(
"Timestamp",
WSConstants.WSU_NS,
"");
builder.getParts().add(encP);
Document signedDoc = builder.build(crypto);
Element secHeaderElement = secHeader.getSecurityHeaderElement();
Node timestampNode =
secHeaderElement.getElementsByTagNameNS(WSConstants.WSU_NS, "Timestamp").item(0);
((Element)timestampNode).setAttributeNS(
WSConstants.XMLNS_NS, "xmlns", WSConstants.WSU_NS
);
if (LOG.isDebugEnabled()) {
LOG.debug("After Signing....");
String outputString =
XMLUtils.prettyDocumentToString(signedDoc);
LOG.debug(outputString);
}
verify(signedDoc);
}
/**
* Sign using a different digest algorithm (SHA-256).
* <p/>
*
* @throws java.lang.Exception Thrown when there is any problem in signing or verification
*/
@Test
public void testX509SignatureSha256() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecSignature builder = new WSSecSignature(secHeader);
builder.setUserInfo("16c73ab6-b892-458f-abf5-2f875f74882e", "security");
builder.setKeyIdentifierType(WSConstants.ISSUER_SERIAL);
builder.setSignatureAlgorithm("http://www.w3.org/2001/04/xmldsig-more#rsa-sha256");
builder.setDigestAlgo("http://www.w3.org/2001/04/xmlenc#sha256");
LOG.info("Before Signing IS....");
Document signedDoc = builder.build(crypto);
if (LOG.isDebugEnabled()) {
LOG.debug("Signed message with IssuerSerial key identifier:");
String outputString =
XMLUtils.prettyDocumentToString(signedDoc);
LOG.debug(outputString);
}
LOG.info("After Signing IS....");
verify(signedDoc);
}
/**
* A test for "SignatureAction does not set DigestAlgorithm on WSSecSignature instance"
*/
@Test
public void
testWSS170() throws Exception {
final WSSConfig cfg = WSSConfig.getNewInstance();
final RequestData reqData = new RequestData();
reqData.setWssConfig(cfg);
reqData.setUsername("16c73ab6-b892-458f-abf5-2f875f74882e");
java.util.Map<String, Object> config = new java.util.TreeMap<>();
config.put(WSHandlerConstants.SIG_PROP_FILE, "crypto.properties");
config.put("password", "security");
config.put(
WSHandlerConstants.SIG_ALGO,
"http://www.w3.org/2001/04/xmldsig-more#rsa-sha256"
);
config.put(
WSHandlerConstants.SIG_DIGEST_ALGO,
"http://www.w3.org/2001/04/xmlenc#sha256"
);
reqData.setMsgContext(config);
final Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
CustomHandler handler = new CustomHandler();
HandlerAction action = new HandlerAction(WSConstants.SIGN);
handler.send(
doc,
reqData,
Collections.singletonList(action),
true
);
String outputString =
XMLUtils.prettyDocumentToString(doc);
if (LOG.isDebugEnabled()) {
LOG.debug("Signed message:");
LOG.debug(outputString);
}
assertTrue(
outputString.contains("http://www.w3.org/2001/04/xmldsig-more#rsa-sha256")
);
assertTrue(
outputString.contains("http://www.w3.org/2001/04/xmlenc#sha256")
);
verify(doc);
}
/**
* This is a test for WSS-234 -
* "When a document contains a comment as its first child element,
* wss4j will not find the SOAP body."
*/
@Test
public void testWSS234() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecSignature builder = new WSSecSignature(secHeader);
builder.setUserInfo("16c73ab6-b892-458f-abf5-2f875f74882e", "security");
LOG.info("Before Signing....");
Document signedDoc = builder.build(crypto);
// Add a comment node as the first node element
Node firstChild = signedDoc.getFirstChild();
Node newNode = signedDoc.removeChild(firstChild);
Node commentNode = signedDoc.createComment("This is a comment");
signedDoc.appendChild(commentNode);
signedDoc.appendChild(newNode);
if (LOG.isDebugEnabled()) {
LOG.debug("After Signing....");
String outputString =
XMLUtils.prettyDocumentToString(signedDoc);
LOG.debug(outputString);
}
verify(signedDoc);
}
/**
* Test that signs and verifies a Timestamp. The Signature element is appended to the security
* header, and so appears after the Timestamp element.
*/
@Test
public void testSignedTimestamp() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecTimestamp timestamp = new WSSecTimestamp(secHeader);
timestamp.setTimeToLive(300);
timestamp.build();
WSSecSignature builder = new WSSecSignature(secHeader);
builder.setUserInfo("16c73ab6-b892-458f-abf5-2f875f74882e", "security");
WSEncryptionPart encP =
new WSEncryptionPart(
"Timestamp",
WSConstants.WSU_NS,
"");
builder.getParts().add(encP);
builder.prepare(crypto);
List<javax.xml.crypto.dsig.Reference> referenceList =
builder.addReferencesToSign(builder.getParts());
builder.computeSignature(referenceList, false, null);
if (LOG.isDebugEnabled()) {
LOG.debug("After Signing....");
String outputString =
XMLUtils.prettyDocumentToString(doc);
LOG.debug(outputString);
}
verify(doc);
}
/**
* This is a test for WSS-283 - "ClassCastException when signing message with existing
* WSSE header containing Text as first child":
*
* https://issues.apache.org/jira/browse/WSS-283
*/
@Test
public void testWSS283() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
Element secHeaderElement = secHeader.insertSecurityHeader();
WSSecSignature builder = new WSSecSignature(secHeader);
builder.setUserInfo("16c73ab6-b892-458f-abf5-2f875f74882e", "security");
builder.setKeyIdentifierType(WSConstants.ISSUER_SERIAL);
Node textNode = doc.createTextNode("This is a text node");
secHeaderElement.appendChild(textNode);
Document signedDoc = builder.build(crypto);
if (LOG.isDebugEnabled()) {
LOG.debug("Signed message with text node:");
String outputString =
XMLUtils.prettyDocumentToString(signedDoc);
LOG.debug(outputString);
}
verify(signedDoc);
}
/**
* Create a signature that uses a custom SecurityTokenReference.
*/
@Test
public void testCustomSTR() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecSignature builder = new WSSecSignature(secHeader);
builder.setUserInfo("16c73ab6-b892-458f-abf5-2f875f74882e", "security");
builder.setKeyIdentifierType(WSConstants.ISSUER_SERIAL);
LOG.info("Before Signing IS....");
SecurityTokenReference secRef = new SecurityTokenReference(doc);
Reference ref = new Reference(doc);
ref.setURI("custom-uri");
secRef.setReference(ref);
builder.setSecurityTokenReference(secRef);
Document signedDoc = builder.build(crypto);
if (LOG.isDebugEnabled()) {
String outputString =
XMLUtils.prettyDocumentToString(signedDoc);
LOG.debug(outputString);
}
}
/**
* The test uses the Issuer Serial key identifier type.
* <p/>
*
* @throws java.lang.Exception Thrown when there is any problem in signing or verification
*/
@Test
public void testX509SignatureDefaultPassword() throws Exception {
Crypto passwordCrypto = CryptoFactory.getInstance("alice.properties");
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecSignature builder = new WSSecSignature(secHeader);
builder.setUserInfo(passwordCrypto.getDefaultX509Identifier(), null);
builder.setKeyIdentifierType(WSConstants.ISSUER_SERIAL);
LOG.info("Before Signing IS....");
Document signedDoc = builder.build(passwordCrypto);
if (LOG.isDebugEnabled()) {
LOG.debug("Signed message with IssuerSerial key identifier:");
String outputString =
XMLUtils.prettyDocumentToString(signedDoc);
LOG.debug(outputString);
}
LOG.info("After Signing IS....");
WSSecurityEngine newEngine = new WSSecurityEngine();
newEngine.processSecurityHeader(doc, null, null, passwordCrypto);
}
/**
* A test for "There is an issue with the position of the <Timestamp> element in the
* <Security> header when using WSS4J calling .NET Web Services with WS-Security."
*/
@Test
public void
testWSS231() throws Exception {
final WSSConfig cfg = WSSConfig.getNewInstance();
final RequestData reqData = new RequestData();
reqData.setWssConfig(cfg);
reqData.setUsername("16c73ab6-b892-458f-abf5-2f875f74882e");
java.util.Map<String, Object> config = new java.util.TreeMap<>();
config.put(WSHandlerConstants.SIG_PROP_FILE, "crypto.properties");
config.put("password", "security");
config.put(
WSHandlerConstants.SIGNATURE_PARTS, "{}{" + WSConstants.WSU_NS + "}Timestamp"
);
reqData.setMsgContext(config);
final Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
CustomHandler handler = new CustomHandler();
List<HandlerAction> actions = new ArrayList<>();
actions.add(new HandlerAction(WSConstants.SIGN));
actions.add(new HandlerAction(WSConstants.TS));
handler.send(
doc,
reqData,
actions,
true
);
String outputString =
XMLUtils.prettyDocumentToString(doc);
if (LOG.isDebugEnabled()) {
LOG.debug("Signed message:");
LOG.debug(outputString);
}
WSHandlerResult results = verify(doc);
List<Integer> receivedActions = new ArrayList<>();
receivedActions.add(WSConstants.SIGN);
receivedActions.add(WSConstants.TS);
assertTrue(handler.checkResults(results.getResults(), receivedActions));
}
@Test
public void
testSignatureEncryptTimestampOrder() throws Exception {
final WSSConfig cfg = WSSConfig.getNewInstance();
final RequestData reqData = new RequestData();
reqData.setWssConfig(cfg);
reqData.setUsername("16c73ab6-b892-458f-abf5-2f875f74882e");
java.util.Map<String, Object> config = new java.util.TreeMap<>();
config.put(WSHandlerConstants.SIG_PROP_FILE, "crypto.properties");
config.put(WSHandlerConstants.ENC_PROP_FILE, "crypto.properties");
config.put("password", "security");
config.put(
WSHandlerConstants.SIGNATURE_PARTS, "{}{" + WSConstants.WSU_NS + "}Timestamp"
);
reqData.setMsgContext(config);
final Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
CustomHandler handler = new CustomHandler();
List<HandlerAction> actions = new ArrayList<>();
actions.add(new HandlerAction(WSConstants.SIGN));
actions.add(new HandlerAction(WSConstants.ENCR));
actions.add(new HandlerAction(WSConstants.TS));
handler.send(
doc,
reqData,
actions,
true
);
String outputString =
XMLUtils.prettyDocumentToString(doc);
if (LOG.isDebugEnabled()) {
LOG.debug("Signed message:");
LOG.debug(outputString);
}
}
@Test
public void
testEncryptSignatureTimestampOrder() throws Exception {
final WSSConfig cfg = WSSConfig.getNewInstance();
final RequestData reqData = new RequestData();
reqData.setWssConfig(cfg);
reqData.setUsername("16c73ab6-b892-458f-abf5-2f875f74882e");
java.util.Map<String, Object> config = new java.util.TreeMap<>();
config.put(WSHandlerConstants.SIG_PROP_FILE, "crypto.properties");
config.put(WSHandlerConstants.ENC_PROP_FILE, "crypto.properties");
config.put("password", "security");
config.put(
WSHandlerConstants.SIGNATURE_PARTS, "{}{" + WSConstants.WSU_NS + "}Timestamp"
);
reqData.setMsgContext(config);
final Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
CustomHandler handler = new CustomHandler();
List<HandlerAction> actions = new ArrayList<>();
actions.add(new HandlerAction(WSConstants.ENCR));
actions.add(new HandlerAction(WSConstants.SIGN));
actions.add(new HandlerAction(WSConstants.TS));
handler.send(
doc,
reqData,
actions,
true
);
String outputString =
XMLUtils.prettyDocumentToString(doc);
if (LOG.isDebugEnabled()) {
LOG.debug("Signed message:");
LOG.debug(outputString);
}
}
@Test
public void testWSHandlerSignatureCanonicalization() throws Exception {
final WSSConfig cfg = WSSConfig.getNewInstance();
final RequestData reqData = new RequestData();
reqData.setWssConfig(cfg);
reqData.setUsername("16c73ab6-b892-458f-abf5-2f875f74882e");
java.util.Map<String, Object> config = new java.util.TreeMap<>();
config.put(WSHandlerConstants.SIG_PROP_FILE, "crypto.properties");
config.put(WSHandlerConstants.SIG_C14N_ALGO, WSConstants.C14N_WITH_COMMENTS);
config.put("password", "security");
reqData.setMsgContext(config);
final Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
CustomHandler handler = new CustomHandler();
HandlerAction action = new HandlerAction(WSConstants.SIGN);
handler.send(
doc,
reqData,
Collections.singletonList(action),
true
);
String outputString =
XMLUtils.prettyDocumentToString(doc);
if (LOG.isDebugEnabled()) {
LOG.debug("Signed message:");
LOG.debug(outputString);
}
RequestData data = new RequestData();
data.setWssConfig(WSSConfig.getNewInstance());
data.setSigVerCrypto(crypto);
List<BSPRule> disabledRules = new ArrayList<>();
disabledRules.add(BSPRule.R5404);
disabledRules.add(BSPRule.R5406);
data.setIgnoredBSPRules(disabledRules);
WSSecurityEngine newSecEngine = new WSSecurityEngine();
WSHandlerResult results =
newSecEngine.processSecurityHeader(doc, data);
assertTrue(handler.checkResults(results.getResults(),
Collections.singletonList(WSConstants.SIGN)));
}
// See WSS-540
@Test
public void testLoadSignaturePropertiesFromFileSystem() throws Exception {
final WSSConfig cfg = WSSConfig.getNewInstance();
final RequestData reqData = new RequestData();
reqData.setWssConfig(cfg);
reqData.setUsername("16c73ab6-b892-458f-abf5-2f875f74882e");
java.util.Map<String, Object> config = new java.util.TreeMap<>();
String basedir = System.getProperty("basedir");
if (basedir == null) {
basedir = new File(".").getCanonicalPath();
}
File propsFile = new File(basedir + "/src/test/resources/crypto.properties");
config.put(WSHandlerConstants.SIG_PROP_FILE, propsFile.getPath());
config.put("password", "security");
reqData.setMsgContext(config);
final Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
CustomHandler handler = new CustomHandler();
HandlerAction action = new HandlerAction(WSConstants.SIGN);
handler.send(
doc,
reqData,
Collections.singletonList(action),
true
);
String outputString =
XMLUtils.prettyDocumentToString(doc);
if (LOG.isDebugEnabled()) {
LOG.debug("Signed message:");
LOG.debug(outputString);
}
RequestData data = new RequestData();
data.setWssConfig(WSSConfig.getNewInstance());
data.setSigVerCrypto(crypto);
WSSecurityEngine newSecEngine = new WSSecurityEngine();
WSHandlerResult results =
newSecEngine.processSecurityHeader(doc, data);
assertTrue(handler.checkResults(results.getResults(),
Collections.singletonList(WSConstants.SIGN)));
}
@Test
public void testCommentInSOAPBody() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
WSSecSignature builder = new WSSecSignature(secHeader);
builder.setUserInfo("16c73ab6-b892-458f-abf5-2f875f74882e", "security");
LOG.info("Before Signing....");
Document signedDoc = builder.build(crypto);
// Add a comment node
Element body = WSSecurityUtil.findBodyElement(signedDoc);
Node commentNode = signedDoc.createComment("This is a comment");
body.getFirstChild().appendChild(commentNode);
if (LOG.isDebugEnabled()) {
String outputString =
XMLUtils.prettyDocumentToString(signedDoc);
LOG.debug(outputString);
}
verify(signedDoc);
}
@Test
public void testCustomKeyInfoElementCreation() throws Exception {
Document doc = SOAPUtil.toSOAPPart(SOAPUtil.SAMPLE_SOAP_MSG);
WSSecHeader secHeader = new WSSecHeader(doc);
secHeader.insertSecurityHeader();
// Create the KeyInfo
DocumentBuilderFactory docBuilderFactory =
DocumentBuilderFactory.newInstance();
docBuilderFactory.setNamespaceAware(true);
DocumentBuilder docBuilder = docBuilderFactory.newDocumentBuilder();
Document keyInfoDoc = docBuilder.newDocument();
CryptoType cryptoType = new CryptoType(CryptoType.TYPE.ALIAS);
cryptoType.setAlias("16c73ab6-b892-458f-abf5-2f875f74882e");
X509Certificate[] certs = crypto.getX509Certificates(cryptoType);
KeyInfoFactory keyInfoFactory =
XMLSignatureFactory.getInstance("DOM", "ApacheXMLDSig").getKeyInfoFactory();
// X.509
X509Data x509Data = keyInfoFactory.newX509Data(Collections.singletonList(certs[0]));
KeyInfo keyInfo = keyInfoFactory.newKeyInfo(Collections.singletonList(x509Data), null);
// Marshal the KeyInfo to DOM
Element parent = keyInfoDoc.createElement("temp");
DOMCryptoContext cryptoContext = new DOMCryptoContext() { };
cryptoContext.putNamespacePrefix(WSConstants.SIG_NS, WSConstants.SIG_PREFIX);
keyInfo.marshal(new DOMStructure(parent), cryptoContext);
Element keyInfoElement = (Element)parent.getFirstChild();
WSSecSignature builder = new WSSecSignature(secHeader);
builder.setUserInfo("16c73ab6-b892-458f-abf5-2f875f74882e", "security");
builder.setCustomKeyInfoElement(keyInfoElement);
LOG.info("Before Signing IS....");
Document signedDoc = builder.build(crypto);
if (LOG.isDebugEnabled()) {
String outputString =
XMLUtils.prettyDocumentToString(signedDoc);
LOG.debug(outputString);
}
assertNotNull(signedDoc);
}
/**
* Verifies the soap envelope.
* This method verifies all the signature generated.
*
* @param env soap envelope
* @throws java.lang.Exception Thrown when there is a problem in verification
*/
private WSHandlerResult verify(Document doc) throws Exception {
return secEngine.processSecurityHeader(doc, null, null, crypto);
}
}
| |
package macrobase.analysis.summary.itemset;
import com.codahale.metrics.Timer;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import macrobase.MacroBase;
import macrobase.analysis.summary.itemset.result.ItemsetWithCount;
import macrobase.datamodel.Datum;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
import java.util.stream.Collectors;
import static com.codahale.metrics.MetricRegistry.name;
public class FPGrowth {
private static final Logger log = LoggerFactory.getLogger(FPGrowth.class);
private final Timer singleItemCounts = MacroBase.metrics.timer(name(FPGrowth.class, "itemCounts"));
private final Timer insertTransactions = MacroBase.metrics.timer(name(FPGrowth.class, "insertTransactions"));
private final Timer fpMine = MacroBase.metrics.timer(name(FPGrowth.class, "fpMine"));
class FPTree {
private FPTreeNode root = new FPTreeNode(-1, null, 0);
// used to calculate the order
private Map<Integer, Double> frequentItemCounts = new HashMap<>();
// item order -- need canonical to break ties; 0 is smallest, N is largest
private Map<Integer, Integer> frequentItemOrder = new HashMap<>();
protected Map<Integer, FPTreeNode> nodeHeaders = new HashMap<>();
protected void printTreeDebug() {
log.debug("Frequent Item Counts:");
frequentItemCounts.entrySet().forEach(e -> log.debug(String.format("%d: %f", e.getKey(), e.getValue())));
walkTree(root, 1);
}
private void walkTree(FPTreeNode start, int treeDepth) {
log.debug(String.format("%s node: %d, count: %f",
new String(new char[treeDepth]).replaceAll("\0", "\t"),
start.getItem(), start.getCount()));
if (start.getChildren() != null) {
for (FPTreeNode child : start.getChildren()) {
walkTree(child, treeDepth + 1);
}
}
}
private class FPTreeNode {
private int item;
private double count;
private FPTreeNode nextLink;
private FPTreeNode parent;
private List<FPTreeNode> children;
public FPTreeNode(int item, FPTreeNode parent, int initialCount) {
this.item = item;
this.parent = parent;
this.count = initialCount;
}
public int getItem() {
return item;
}
public double getCount() {
return count;
}
public void incrementCount(double by) {
count += by;
}
public void setNextLink(FPTreeNode nextLink) {
this.nextLink = nextLink;
}
public FPTreeNode getNextLink() {
return nextLink;
}
public FPTreeNode getParent() {
return parent;
}
public List<FPTreeNode> getChildren() {
return children;
}
// insert the transaction at this node starting with transaction[currentIndex]
// then find the child that matches
public void insertTransaction(List<Integer> fullTransaction,
int currentIndex,
final double transactionCount) {
incrementCount(transactionCount);
if (currentIndex == fullTransaction.size()) {
return;
}
int currentItem = fullTransaction.get(currentIndex);
FPTreeNode matchingChild = null;
if (children != null) {
for (FPTreeNode child : children) {
if (child.getItem() == currentItem) {
matchingChild = child;
break;
}
}
}
if (matchingChild == null) {
matchingChild = new FPTreeNode(currentItem, this, 0);
FPTreeNode prevHeader = nodeHeaders.get(currentItem);
nodeHeaders.put(currentItem, matchingChild);
if (prevHeader != null) {
matchingChild.setNextLink(prevHeader);
}
if (children == null) {
children = new ArrayList<>();
}
children.add(matchingChild);
}
matchingChild.insertTransaction(fullTransaction, currentIndex + 1, transactionCount);
}
}
public void setFrequentCounts(Map<Integer, Double> counts) {
frequentItemCounts = counts;
sortFrequentItems();
}
public void insertFrequentItems(List<Set<Integer>> transactions,
int countRequiredForSupport) {
Map<Integer, Double> itemCounts = new HashMap<>();
for (Set<Integer> t : transactions) {
for (Integer item : t) {
itemCounts.compute(item, (k, v) -> v == null ? 1 : v + 1);
}
}
for (Map.Entry<Integer, Double> e : itemCounts.entrySet()) {
if (e.getValue() >= countRequiredForSupport) {
frequentItemCounts.put(e.getKey(), e.getValue());
}
}
sortFrequentItems();
}
private void sortFrequentItems() {
// we have to materialize a canonical order so that items with equal counts
// are consistently ordered when they are sorted during transaction insertion
List<Map.Entry<Integer, Double>> sortedItemCounts = Lists.newArrayList(frequentItemCounts.entrySet());
sortedItemCounts.sort((i1, i2) -> frequentItemCounts.get(i1.getKey())
.compareTo(frequentItemCounts.get(i2.getKey())));
for (int i = 0; i < sortedItemCounts.size(); ++i) {
frequentItemOrder.put(sortedItemCounts.get(i).getKey(), i);
}
}
public void insertConditionalFrequentItems(List<ItemsetWithCount> patterns,
int countRequiredForSupport) {
Map<Integer, Double> itemCounts = new HashMap<>();
for (ItemsetWithCount i : patterns) {
for (Integer item : i.getItems()) {
itemCounts.compute(item, (k, v) -> v == null ? i.getCount() : v + i.getCount());
}
}
for (Map.Entry<Integer, Double> e : itemCounts.entrySet()) {
if (e.getValue() >= countRequiredForSupport) {
frequentItemCounts.put(e.getKey(), e.getValue());
}
}
// we have to materialize a canonical order so that items with equal counts
// are consistently ordered when they are sorted during transaction insertion
List<Map.Entry<Integer, Double>> sortedItemCounts = Lists.newArrayList(frequentItemCounts.entrySet());
sortedItemCounts.sort((i1, i2) -> frequentItemCounts.get(i1.getKey())
.compareTo(frequentItemCounts.get(i2.getKey())));
for (int i = 0; i < sortedItemCounts.size(); ++i) {
frequentItemOrder.put(sortedItemCounts.get(i).getKey(), i);
}
}
public void insertDatum(List<Datum> datums) {
for (Datum d : datums) {
List<Integer> filtered = new ArrayList<>();
for(Integer attr : d.attributes()) {
if(frequentItemCounts.containsKey(attr)) {
filtered.add(attr);
}
}
if (!filtered.isEmpty()) {
filtered.sort((i1, i2) -> frequentItemOrder.get(i2).compareTo(frequentItemOrder.get(i1)));
root.insertTransaction(filtered, 0, 1);
}
}
}
public void insertConditionalFrequentPatterns(List<ItemsetWithCount> patterns) {
for (ItemsetWithCount is : patterns) {
List<Integer> filtered = is.getItems().stream().filter(i -> frequentItemCounts.containsKey(i)).collect(
Collectors.toList());
filtered.sort((i1, i2) -> frequentItemOrder.get(i2).compareTo(frequentItemOrder.get(i1)));
root.insertTransaction(filtered, 0, is.getCount());
}
}
public void insertTransactions(List<Set<Integer>> transactions) {
for (Set<Integer> t : transactions) {
List<Integer> filtered = t.stream().filter(i -> frequentItemCounts.containsKey(i)).collect(
Collectors.toList());
if (!filtered.isEmpty()) {
filtered.sort((i1, i2) -> frequentItemOrder.get(i2).compareTo(frequentItemOrder.get(i1)));
root.insertTransaction(filtered, 0, 1);
}
}
}
public int getSupport(Set<Integer> pattern) {
for (Integer i : pattern) {
if (!frequentItemCounts.containsKey(i)) {
return 0;
}
}
List<Integer> plist = Lists.newArrayList(pattern);
// traverse bottom to top
plist.sort((i1, i2) -> frequentItemOrder.get(i1).compareTo(frequentItemOrder.get(i2)));
int count = 0;
FPTreeNode pathHead = nodeHeaders.get(plist.get(0));
while (pathHead != null) {
FPTreeNode curNode = pathHead;
int itemsToFind = plist.size();
while (curNode != null) {
if (pattern.contains(curNode.getItem())) {
itemsToFind -= 1;
}
if (itemsToFind == 0) {
count += pathHead.count;
break;
}
curNode = curNode.getParent();
}
pathHead = pathHead.getNextLink();
}
return count;
}
List<ItemsetWithCount> mineItemsets(Integer supportCountRequired) {
List<ItemsetWithCount> singlePathItemsets = new ArrayList<>();
List<ItemsetWithCount> branchingItemsets = new ArrayList<>();
// mine single-path itemsets first
FPTreeNode curNode = root;
FPTreeNode nodeOfBranching = null;
Set<FPTreeNode> singlePathNodes = new HashSet<>();
while (true) {
if (curNode.children != null && curNode.children.size() > 1) {
nodeOfBranching = curNode;
break;
}
if (curNode != root) {
singlePathNodes.add(curNode);
}
if (curNode.children == null || curNode.children.size() == 0) {
break;
} else {
curNode = curNode.children.get(0);
}
}
for (Set<FPTreeNode> subset : Sets.powerSet(singlePathNodes)) {
if (subset.isEmpty()) {
continue;
}
double minSupportInSubset = -1;
Set<Integer> items = new HashSet<>();
for (FPTreeNode n : subset) {
items.add(n.getItem());
if (minSupportInSubset == -1 || n.getCount() < minSupportInSubset) {
minSupportInSubset = n.getCount();
}
}
assert (minSupportInSubset >= supportCountRequired);
singlePathItemsets.add(new ItemsetWithCount(items, minSupportInSubset));
}
// the entire tree was a single path...
if (nodeOfBranching == null) {
return singlePathItemsets;
}
// all of the items in the single path will have been mined now
// due to the descending frequency count of the FPTree structure, so
// we remove them from consideration in the rest
// instead of destructively removing the nodes from NodeHeader table
// which would be valid but would make mining non-idempotent, we
// instead store the nodes to skip in a separate set
Set<Integer> alreadyMinedItems = new HashSet<>();
for (FPTreeNode node : singlePathNodes) {
alreadyMinedItems.add(node.getItem());
}
for (Map.Entry<Integer, FPTreeNode> header : nodeHeaders.entrySet()) {
if (alreadyMinedItems.contains(header.getKey())) {
continue;
}
// add the singleton item set
branchingItemsets.add(new ItemsetWithCount(Sets.newHashSet(header.getKey()),
frequentItemCounts.get(header.getKey())));
List<ItemsetWithCount> conditionalPatternBase = new ArrayList<>();
// walk each "leaf" node
FPTreeNode conditionalNode = header.getValue();
while (conditionalNode != null) {
final double leafSupport = conditionalNode.getCount();
// walk the tree up to the branch node
Set<Integer> conditionalPattern = new HashSet<>();
FPTreeNode walkNode = conditionalNode.getParent();
while (walkNode != nodeOfBranching.getParent() && walkNode != root) {
conditionalPattern.add(walkNode.getItem());
walkNode = walkNode.getParent();
}
if (conditionalPattern.size() > 0) {
conditionalPatternBase.add(new ItemsetWithCount(conditionalPattern, leafSupport));
}
conditionalNode = conditionalNode.getNextLink();
}
if (conditionalPatternBase.isEmpty()) {
continue;
}
// build and mine the conditional FPTree
FPTree conditionalTree = new FPTree();
conditionalTree.insertConditionalFrequentItems(conditionalPatternBase, supportCountRequired);
conditionalTree.insertConditionalFrequentPatterns(conditionalPatternBase);
List<ItemsetWithCount> conditionalFrequentItemsets = conditionalTree.mineItemsets(supportCountRequired);
if (!conditionalFrequentItemsets.isEmpty()) {
for (ItemsetWithCount is : conditionalFrequentItemsets) {
is.getItems().add(header.getKey());
}
branchingItemsets.addAll(conditionalFrequentItemsets);
}
}
if (singlePathItemsets.isEmpty()) {
return branchingItemsets;
}
// take the cross product of the mined itemsets
List<ItemsetWithCount> ret = new ArrayList<>();
ret.addAll(singlePathItemsets);
ret.addAll(branchingItemsets);
for (ItemsetWithCount i : singlePathItemsets) {
for (ItemsetWithCount j : branchingItemsets) {
Set<Integer> combinedItems = new HashSet<>();
combinedItems.addAll(i.getItems());
combinedItems.addAll(j.getItems());
ret.add(new ItemsetWithCount(combinedItems, Math.min(i.getCount(), j.getCount())));
}
}
return ret;
}
}
public List<ItemsetWithCount> getItemsetsWithSupportRatio(List<Set<Integer>> transactions,
Double supportRatio) {
return getItemsetsWithSupportRatio(transactions, null, supportRatio);
}
public List<ItemsetWithCount> getItemsetsWithSupportRatio(List<Set<Integer>> transactions,
Map<Integer, Double> initialCounts,
Double supportRatio) {
return getItemsetsWithSupportCount(transactions, initialCounts, supportRatio * transactions.size());
}
public List<ItemsetWithCount> getItemsetsWithSupportCount(List<Set<Integer>> transactions,
Double supportCount) {
return getItemsetsWithSupportCount(transactions, null, supportCount);
}
public List<ItemsetWithCount> getItemsetsWithSupportCount(List<Set<Integer>> transactions,
Map<Integer, Double> initialCounts,
Double supportCount) {
return getItemsetsWithSupportCount(transactions, initialCounts, supportCount, false);
}
protected FPTree constructTree(List<Set<Integer>> transactions, int supportCount) {
FPTree fp = new FPTree();
fp.insertFrequentItems(transactions, supportCount);
fp.insertTransactions(transactions);
return fp;
}
public List<ItemsetWithCount> getItemsetsWithSupportCount(List<Set<Integer>> transactions,
Map<Integer, Double> initialCounts,
Double supportCount,
boolean printTreeDebug) {
FPTree fp = new FPTree();
int countRequiredForSupport = supportCount.intValue();
log.debug("count required: {}", countRequiredForSupport);
long st = System.currentTimeMillis();
Timer.Context context = singleItemCounts.time();
if (initialCounts == null) {
fp.insertFrequentItems(transactions, countRequiredForSupport);
} else {
fp.setFrequentCounts(initialCounts);
}
fp.insertFrequentItems(transactions, countRequiredForSupport);
context.stop();
context = insertTransactions.time();
fp.insertTransactions(transactions);
context.stop();
long en = System.currentTimeMillis();
log.debug("FPTree load: {}", en - st);
//fp.printTreeDebug();
st = System.currentTimeMillis();
context = fpMine.time();
List<ItemsetWithCount> ret = fp.mineItemsets(countRequiredForSupport);
context.stop();
en = System.currentTimeMillis();
log.debug("FPTree mine: {}", en - st);
return ret;
}
// ugh, this is a really ugly function sig, but it's efficient
public List<ItemsetWithCount> getCounts(
List<Datum> transactions,
Map<Integer, Double> initialCounts,
Set<Integer> targetItems,
List<ItemsetWithCount> toCount) {
FPTree countTree = new FPTree();
Map<Integer, Double> frequentCounts = new HashMap<>();
for (Integer i : targetItems) {
Double initialCount = initialCounts.get(i);
if (initialCount == null) {
initialCount = 0.;
}
frequentCounts.put(i, initialCount);
}
countTree.setFrequentCounts(frequentCounts);
countTree.insertDatum(transactions);
List<ItemsetWithCount> ret = new ArrayList<>();
for (ItemsetWithCount c : toCount) {
ret.add(new ItemsetWithCount(c.getItems(), countTree.getSupport(c.getItems())));
}
return ret;
}
}
| |
// GENERATED FILE - DO NOT EDIT
package com.google.common.net;
import java.util.Set;
import com.google.common.annotations.GwtCompatible;
import com.google.common.collect.ImmutableSet;
/**
* A generated static class containing public members which provide domain
* name patterns used in determining whether a given domain name is an
* effective top-level domain (TLD).
*/
@GwtCompatible
class TldPatterns {
private TldPatterns() {
// Prevent instantiation.
}
/**
* If a hostname is contained in this set, it is a TLD.
*/
static final Set<String> EXACT = ImmutableSet.of(
"ac",
"com.ac",
"edu.ac",
"gov.ac",
"net.ac",
"mil.ac",
"org.ac",
"ad",
"nom.ad",
"ae",
"co.ae",
"net.ae",
"org.ae",
"sch.ae",
"ac.ae",
"gov.ae",
"mil.ae",
"aero",
"accident-investigation.aero",
"accident-prevention.aero",
"aerobatic.aero",
"aeroclub.aero",
"aerodrome.aero",
"agents.aero",
"aircraft.aero",
"airline.aero",
"airport.aero",
"air-surveillance.aero",
"airtraffic.aero",
"air-traffic-control.aero",
"ambulance.aero",
"amusement.aero",
"association.aero",
"author.aero",
"ballooning.aero",
"broker.aero",
"caa.aero",
"cargo.aero",
"catering.aero",
"certification.aero",
"championship.aero",
"charter.aero",
"civilaviation.aero",
"club.aero",
"conference.aero",
"consultant.aero",
"consulting.aero",
"control.aero",
"council.aero",
"crew.aero",
"design.aero",
"dgca.aero",
"educator.aero",
"emergency.aero",
"engine.aero",
"engineer.aero",
"entertainment.aero",
"equipment.aero",
"exchange.aero",
"express.aero",
"federation.aero",
"flight.aero",
"freight.aero",
"fuel.aero",
"gliding.aero",
"government.aero",
"groundhandling.aero",
"group.aero",
"hanggliding.aero",
"homebuilt.aero",
"insurance.aero",
"journal.aero",
"journalist.aero",
"leasing.aero",
"logistics.aero",
"magazine.aero",
"maintenance.aero",
"marketplace.aero",
"media.aero",
"microlight.aero",
"modelling.aero",
"navigation.aero",
"parachuting.aero",
"paragliding.aero",
"passenger-association.aero",
"pilot.aero",
"press.aero",
"production.aero",
"recreation.aero",
"repbody.aero",
"res.aero",
"research.aero",
"rotorcraft.aero",
"safety.aero",
"scientist.aero",
"services.aero",
"show.aero",
"skydiving.aero",
"software.aero",
"student.aero",
"taxi.aero",
"trader.aero",
"trading.aero",
"trainer.aero",
"union.aero",
"workinggroup.aero",
"works.aero",
"af",
"gov.af",
"com.af",
"org.af",
"net.af",
"edu.af",
"ag",
"com.ag",
"org.ag",
"net.ag",
"co.ag",
"nom.ag",
"ai",
"off.ai",
"com.ai",
"net.ai",
"org.ai",
"al",
"com.al",
"edu.al",
"gov.al",
"mil.al",
"net.al",
"org.al",
"am",
"an",
"com.an",
"net.an",
"org.an",
"edu.an",
"ao",
"ed.ao",
"gv.ao",
"og.ao",
"co.ao",
"pb.ao",
"it.ao",
"aq",
"e164.arpa",
"in-addr.arpa",
"ip6.arpa",
"iris.arpa",
"uri.arpa",
"urn.arpa",
"as",
"gov.as",
"asia",
"at",
"ac.at",
"co.at",
"gv.at",
"or.at",
"biz.at",
"info.at",
"priv.at",
"act.edu.au",
"nsw.edu.au",
"nt.edu.au",
"qld.edu.au",
"sa.edu.au",
"tas.edu.au",
"vic.edu.au",
"wa.edu.au",
"act.gov.au",
"nt.gov.au",
"qld.gov.au",
"sa.gov.au",
"tas.gov.au",
"vic.gov.au",
"wa.gov.au",
"act.au",
"nsw.au",
"nt.au",
"qld.au",
"sa.au",
"tas.au",
"vic.au",
"wa.au",
"aw",
"com.aw",
"ax",
"az",
"com.az",
"net.az",
"int.az",
"gov.az",
"org.az",
"edu.az",
"info.az",
"pp.az",
"mil.az",
"name.az",
"pro.az",
"biz.az",
"ba",
"org.ba",
"net.ba",
"edu.ba",
"gov.ba",
"mil.ba",
"unsa.ba",
"unbi.ba",
"co.ba",
"com.ba",
"rs.ba",
"bb",
"biz.bb",
"com.bb",
"edu.bb",
"gov.bb",
"info.bb",
"net.bb",
"org.bb",
"store.bb",
"be",
"ac.be",
"bf",
"gov.bf",
"bg",
"a.bg",
"b.bg",
"c.bg",
"d.bg",
"e.bg",
"f.bg",
"g.bg",
"h.bg",
"i.bg",
"j.bg",
"k.bg",
"l.bg",
"m.bg",
"n.bg",
"o.bg",
"p.bg",
"q.bg",
"r.bg",
"s.bg",
"t.bg",
"u.bg",
"v.bg",
"w.bg",
"x.bg",
"y.bg",
"z.bg",
"0.bg",
"1.bg",
"2.bg",
"3.bg",
"4.bg",
"5.bg",
"6.bg",
"7.bg",
"8.bg",
"9.bg",
"bh",
"com.bh",
"edu.bh",
"net.bh",
"org.bh",
"gov.bh",
"bi",
"co.bi",
"com.bi",
"edu.bi",
"or.bi",
"org.bi",
"biz",
"bj",
"asso.bj",
"barreau.bj",
"gouv.bj",
"bm",
"com.bm",
"edu.bm",
"gov.bm",
"net.bm",
"org.bm",
"bo",
"com.bo",
"edu.bo",
"gov.bo",
"gob.bo",
"int.bo",
"org.bo",
"net.bo",
"mil.bo",
"tv.bo",
"br",
"adm.br",
"adv.br",
"agr.br",
"am.br",
"arq.br",
"art.br",
"ato.br",
"bio.br",
"blog.br",
"bmd.br",
"can.br",
"cim.br",
"cng.br",
"cnt.br",
"com.br",
"coop.br",
"ecn.br",
"edu.br",
"eng.br",
"esp.br",
"etc.br",
"eti.br",
"far.br",
"flog.br",
"fm.br",
"fnd.br",
"fot.br",
"fst.br",
"g12.br",
"ggf.br",
"gov.br",
"imb.br",
"ind.br",
"inf.br",
"jor.br",
"jus.br",
"lel.br",
"mat.br",
"med.br",
"mil.br",
"mus.br",
"net.br",
"nom.br",
"not.br",
"ntr.br",
"odo.br",
"org.br",
"ppg.br",
"pro.br",
"psc.br",
"psi.br",
"qsl.br",
"rec.br",
"slg.br",
"srv.br",
"tmp.br",
"trd.br",
"tur.br",
"tv.br",
"vet.br",
"vlog.br",
"wiki.br",
"zlg.br",
"bs",
"com.bs",
"net.bs",
"org.bs",
"edu.bs",
"gov.bs",
"bw",
"co.bw",
"org.bw",
"by",
"gov.by",
"mil.by",
"com.by",
"of.by",
"bz",
"com.bz",
"net.bz",
"org.bz",
"edu.bz",
"gov.bz",
"ca",
"ab.ca",
"bc.ca",
"mb.ca",
"nb.ca",
"nf.ca",
"nl.ca",
"ns.ca",
"nt.ca",
"nu.ca",
"on.ca",
"pe.ca",
"qc.ca",
"sk.ca",
"yk.ca",
"gc.ca",
"cat",
"cc",
"cd",
"gov.cd",
"cf",
"cg",
"ch",
"ci",
"org.ci",
"or.ci",
"com.ci",
"co.ci",
"edu.ci",
"ed.ci",
"ac.ci",
"net.ci",
"go.ci",
"asso.ci",
"a\u00e9roport.ci",
"int.ci",
"presse.ci",
"md.ci",
"gouv.ci",
"cl",
"gov.cl",
"gob.cl",
"cm",
"gov.cm",
"cn",
"ac.cn",
"com.cn",
"edu.cn",
"gov.cn",
"net.cn",
"org.cn",
"mil.cn",
"\u516c\u53f8.cn",
"\u7f51\u7edc.cn",
"\u7db2\u7d61.cn",
"ah.cn",
"bj.cn",
"cq.cn",
"fj.cn",
"gd.cn",
"gs.cn",
"gz.cn",
"gx.cn",
"ha.cn",
"hb.cn",
"he.cn",
"hi.cn",
"hl.cn",
"hn.cn",
"jl.cn",
"js.cn",
"jx.cn",
"ln.cn",
"nm.cn",
"nx.cn",
"qh.cn",
"sc.cn",
"sd.cn",
"sh.cn",
"sn.cn",
"sx.cn",
"tj.cn",
"xj.cn",
"xz.cn",
"yn.cn",
"zj.cn",
"hk.cn",
"mo.cn",
"tw.cn",
"co",
"arts.co",
"com.co",
"edu.co",
"firm.co",
"gov.co",
"info.co",
"int.co",
"mil.co",
"net.co",
"nom.co",
"org.co",
"rec.co",
"web.co",
"com",
"ar.com",
"br.com",
"cn.com",
"de.com",
"eu.com",
"gb.com",
"hu.com",
"jpn.com",
"kr.com",
"no.com",
"qc.com",
"ru.com",
"sa.com",
"se.com",
"uk.com",
"us.com",
"uy.com",
"za.com",
"operaunite.com",
"coop",
"cr",
"ac.cr",
"co.cr",
"ed.cr",
"fi.cr",
"go.cr",
"or.cr",
"sa.cr",
"cu",
"com.cu",
"edu.cu",
"org.cu",
"net.cu",
"gov.cu",
"inf.cu",
"cv",
"cx",
"gov.cx",
"cz",
"de",
"dj",
"dk",
"dm",
"com.dm",
"net.dm",
"org.dm",
"edu.dm",
"gov.dm",
"dz",
"com.dz",
"org.dz",
"net.dz",
"gov.dz",
"edu.dz",
"asso.dz",
"pol.dz",
"art.dz",
"ec",
"com.ec",
"info.ec",
"net.ec",
"fin.ec",
"k12.ec",
"med.ec",
"pro.ec",
"org.ec",
"edu.ec",
"gov.ec",
"mil.ec",
"edu",
"ee",
"edu.ee",
"gov.ee",
"riik.ee",
"lib.ee",
"med.ee",
"com.ee",
"pri.ee",
"aip.ee",
"org.ee",
"fie.ee",
"es",
"com.es",
"nom.es",
"org.es",
"gob.es",
"edu.es",
"eu",
"fi",
"aland.fi",
"iki.fi",
"fm",
"fo",
"fr",
"com.fr",
"asso.fr",
"nom.fr",
"prd.fr",
"presse.fr",
"tm.fr",
"aeroport.fr",
"assedic.fr",
"avocat.fr",
"avoues.fr",
"cci.fr",
"chambagri.fr",
"chirurgiens-dentistes.fr",
"experts-comptables.fr",
"geometre-expert.fr",
"gouv.fr",
"greta.fr",
"huissier-justice.fr",
"medecin.fr",
"notaires.fr",
"pharmacien.fr",
"port.fr",
"veterinaire.fr",
"ga",
"gd",
"ge",
"com.ge",
"edu.ge",
"gov.ge",
"org.ge",
"mil.ge",
"net.ge",
"pvt.ge",
"gf",
"gg",
"co.gg",
"org.gg",
"net.gg",
"sch.gg",
"gov.gg",
"gh",
"com.gh",
"edu.gh",
"gov.gh",
"org.gh",
"mil.gh",
"gi",
"com.gi",
"ltd.gi",
"gov.gi",
"mod.gi",
"edu.gi",
"org.gi",
"gl",
"gm",
"ac.gn",
"com.gn",
"edu.gn",
"gov.gn",
"org.gn",
"net.gn",
"gov",
"gp",
"com.gp",
"net.gp",
"mobi.gp",
"edu.gp",
"org.gp",
"asso.gp",
"gq",
"gr",
"com.gr",
"edu.gr",
"net.gr",
"org.gr",
"gov.gr",
"gs",
"gw",
"gy",
"co.gy",
"com.gy",
"net.gy",
"hk",
"com.hk",
"edu.hk",
"gov.hk",
"idv.hk",
"net.hk",
"org.hk",
"\u516c\u53f8.hk",
"\u6559\u80b2.hk",
"\u654e\u80b2.hk",
"\u653f\u5e9c.hk",
"\u500b\u4eba.hk",
"\u4e2a\u4eba.hk",
"\u7b87\u4eba.hk",
"\u7db2\u7edc.hk",
"\u7f51\u7edc.hk",
"\u7ec4\u7e54.hk",
"\u7db2\u7d61.hk",
"\u7f51\u7d61.hk",
"\u7ec4\u7ec7.hk",
"\u7d44\u7e54.hk",
"\u7d44\u7ec7.hk",
"hm",
"hn",
"com.hn",
"edu.hn",
"org.hn",
"net.hn",
"mil.hn",
"gob.hn",
"hr",
"iz.hr",
"from.hr",
"name.hr",
"com.hr",
"ht",
"com.ht",
"shop.ht",
"firm.ht",
"info.ht",
"adult.ht",
"net.ht",
"pro.ht",
"org.ht",
"med.ht",
"art.ht",
"coop.ht",
"pol.ht",
"asso.ht",
"edu.ht",
"rel.ht",
"gouv.ht",
"perso.ht",
"hu",
"co.hu",
"info.hu",
"org.hu",
"priv.hu",
"sport.hu",
"tm.hu",
"2000.hu",
"agrar.hu",
"bolt.hu",
"casino.hu",
"city.hu",
"erotica.hu",
"erotika.hu",
"film.hu",
"forum.hu",
"games.hu",
"hotel.hu",
"ingatlan.hu",
"jogasz.hu",
"konyvelo.hu",
"lakas.hu",
"media.hu",
"news.hu",
"reklam.hu",
"sex.hu",
"shop.hu",
"suli.hu",
"szex.hu",
"tozsde.hu",
"utazas.hu",
"video.hu",
"ie",
"gov.ie",
"im",
"co.im",
"ltd.co.im",
"plc.co.im",
"net.im",
"gov.im",
"org.im",
"nic.im",
"ac.im",
"in",
"co.in",
"firm.in",
"net.in",
"org.in",
"gen.in",
"ind.in",
"nic.in",
"ac.in",
"edu.in",
"res.in",
"gov.in",
"mil.in",
"info",
"int",
"eu.int",
"io",
"com.io",
"iq",
"gov.iq",
"edu.iq",
"mil.iq",
"com.iq",
"org.iq",
"net.iq",
"ir",
"ac.ir",
"co.ir",
"gov.ir",
"id.ir",
"net.ir",
"org.ir",
"sch.ir",
"\u0627\u06cc\u0631\u0627\u0646.ir",
"\u0627\u064a\u0631\u0627\u0646.ir",
"is",
"net.is",
"com.is",
"edu.is",
"gov.is",
"org.is",
"int.is",
"it",
"gov.it",
"edu.it",
"agrigento.it",
"ag.it",
"alessandria.it",
"al.it",
"ancona.it",
"an.it",
"aosta.it",
"aoste.it",
"ao.it",
"arezzo.it",
"ar.it",
"ascoli-piceno.it",
"ascolipiceno.it",
"ap.it",
"asti.it",
"at.it",
"avellino.it",
"av.it",
"bari.it",
"ba.it",
"barlettaandriatrani.it",
"barletta-andria-trani.it",
"belluno.it",
"bl.it",
"benevento.it",
"bn.it",
"bergamo.it",
"bg.it",
"biella.it",
"bi.it",
"bologna.it",
"bo.it",
"bolzano.it",
"bozen.it",
"balsan.it",
"alto-adige.it",
"altoadige.it",
"suedtirol.it",
"bz.it",
"brescia.it",
"bs.it",
"brindisi.it",
"br.it",
"cagliari.it",
"ca.it",
"caltanissetta.it",
"cl.it",
"campobasso.it",
"cb.it",
"caserta.it",
"ce.it",
"catania.it",
"ct.it",
"catanzaro.it",
"cz.it",
"chieti.it",
"ch.it",
"como.it",
"co.it",
"cosenza.it",
"cs.it",
"cremona.it",
"cr.it",
"crotone.it",
"kr.it",
"cuneo.it",
"cn.it",
"enna.it",
"en.it",
"fermo.it",
"ferrara.it",
"fe.it",
"firenze.it",
"florence.it",
"fi.it",
"foggia.it",
"fg.it",
"forli-cesena.it",
"forlicesena.it",
"fc.it",
"frosinone.it",
"fr.it",
"genova.it",
"genoa.it",
"ge.it",
"gorizia.it",
"go.it",
"grosseto.it",
"gr.it",
"imperia.it",
"im.it",
"isernia.it",
"is.it",
"laquila.it",
"aquila.it",
"aq.it",
"la-spezia.it",
"laspezia.it",
"sp.it",
"latina.it",
"lt.it",
"lecce.it",
"le.it",
"lecco.it",
"lc.it",
"livorno.it",
"li.it",
"lodi.it",
"lo.it",
"lucca.it",
"lu.it",
"macerata.it",
"mc.it",
"mantova.it",
"mn.it",
"massa-carrara.it",
"massacarrara.it",
"ms.it",
"matera.it",
"mt.it",
"messina.it",
"me.it",
"milano.it",
"milan.it",
"mi.it",
"modena.it",
"mo.it",
"monza.it",
"napoli.it",
"naples.it",
"na.it",
"novara.it",
"no.it",
"nuoro.it",
"nu.it",
"oristano.it",
"or.it",
"padova.it",
"padua.it",
"pd.it",
"palermo.it",
"pa.it",
"parma.it",
"pr.it",
"pavia.it",
"pv.it",
"perugia.it",
"pg.it",
"pescara.it",
"pe.it",
"pesaro-urbino.it",
"pesarourbino.it",
"pu.it",
"piacenza.it",
"pc.it",
"pisa.it",
"pi.it",
"pistoia.it",
"pt.it",
"pordenone.it",
"pn.it",
"potenza.it",
"pz.it",
"prato.it",
"po.it",
"ragusa.it",
"rg.it",
"ravenna.it",
"ra.it",
"reggio-calabria.it",
"reggiocalabria.it",
"rc.it",
"reggio-emilia.it",
"reggioemilia.it",
"re.it",
"rieti.it",
"ri.it",
"rimini.it",
"rn.it",
"roma.it",
"rome.it",
"rm.it",
"rovigo.it",
"ro.it",
"salerno.it",
"sa.it",
"sassari.it",
"ss.it",
"savona.it",
"sv.it",
"siena.it",
"si.it",
"siracusa.it",
"sr.it",
"sondrio.it",
"so.it",
"taranto.it",
"ta.it",
"teramo.it",
"te.it",
"terni.it",
"tr.it",
"torino.it",
"turin.it",
"to.it",
"trapani.it",
"tp.it",
"trento.it",
"trentino.it",
"tn.it",
"treviso.it",
"tv.it",
"trieste.it",
"ts.it",
"udine.it",
"ud.it",
"varese.it",
"va.it",
"venezia.it",
"venice.it",
"ve.it",
"verbania.it",
"vb.it",
"vercelli.it",
"vc.it",
"verona.it",
"vr.it",
"vibo-valentia.it",
"vibovalentia.it",
"vv.it",
"vicenza.it",
"vi.it",
"viterbo.it",
"vt.it",
"je",
"co.je",
"org.je",
"net.je",
"sch.je",
"gov.je",
"jo",
"com.jo",
"org.jo",
"net.jo",
"edu.jo",
"sch.jo",
"gov.jo",
"mil.jo",
"name.jo",
"jobs",
"jp",
"ac.jp",
"ad.jp",
"co.jp",
"ed.jp",
"go.jp",
"gr.jp",
"lg.jp",
"ne.jp",
"or.jp",
"kg",
"org.kg",
"net.kg",
"com.kg",
"edu.kg",
"gov.kg",
"mil.kg",
"ki",
"edu.ki",
"biz.ki",
"net.ki",
"org.ki",
"gov.ki",
"info.ki",
"com.ki",
"km",
"org.km",
"nom.km",
"gov.km",
"prd.km",
"tm.km",
"edu.km",
"mil.km",
"ass.km",
"com.km",
"coop.km",
"asso.km",
"presse.km",
"medecin.km",
"notaires.km",
"pharmaciens.km",
"veterinaire.km",
"gouv.km",
"kn",
"net.kn",
"org.kn",
"edu.kn",
"gov.kn",
"kr",
"ac.kr",
"co.kr",
"es.kr",
"go.kr",
"hs.kr",
"kg.kr",
"mil.kr",
"ms.kr",
"ne.kr",
"or.kr",
"pe.kr",
"re.kr",
"sc.kr",
"busan.kr",
"chungbuk.kr",
"chungnam.kr",
"daegu.kr",
"daejeon.kr",
"gangwon.kr",
"gwangju.kr",
"gyeongbuk.kr",
"gyeonggi.kr",
"gyeongnam.kr",
"incheon.kr",
"jeju.kr",
"jeonbuk.kr",
"jeonnam.kr",
"seoul.kr",
"ulsan.kr",
"ky",
"edu.ky",
"gov.ky",
"com.ky",
"org.ky",
"net.ky",
"kz",
"org.kz",
"edu.kz",
"net.kz",
"gov.kz",
"mil.kz",
"com.kz",
"la",
"int.la",
"net.la",
"info.la",
"edu.la",
"gov.la",
"per.la",
"com.la",
"org.la",
"c.la",
"com.lb",
"edu.lb",
"gov.lb",
"net.lb",
"org.lb",
"lc",
"com.lc",
"net.lc",
"co.lc",
"org.lc",
"edu.lc",
"gov.lc",
"li",
"lk",
"gov.lk",
"sch.lk",
"net.lk",
"int.lk",
"com.lk",
"org.lk",
"edu.lk",
"ngo.lk",
"soc.lk",
"web.lk",
"ltd.lk",
"assn.lk",
"grp.lk",
"hotel.lk",
"local",
"com.lr",
"edu.lr",
"gov.lr",
"org.lr",
"net.lr",
"ls",
"co.ls",
"org.ls",
"lt",
"gov.lt",
"lu",
"lv",
"com.lv",
"edu.lv",
"gov.lv",
"org.lv",
"mil.lv",
"id.lv",
"net.lv",
"asn.lv",
"conf.lv",
"ly",
"com.ly",
"net.ly",
"gov.ly",
"plc.ly",
"edu.ly",
"sch.ly",
"med.ly",
"org.ly",
"id.ly",
"ma",
"co.ma",
"net.ma",
"gov.ma",
"org.ma",
"ac.ma",
"press.ma",
"mc",
"tm.mc",
"asso.mc",
"md",
"me",
"co.me",
"net.me",
"org.me",
"edu.me",
"ac.me",
"gov.me",
"its.me",
"priv.me",
"mg",
"org.mg",
"nom.mg",
"gov.mg",
"prd.mg",
"tm.mg",
"edu.mg",
"mil.mg",
"com.mg",
"mh",
"mil",
"mk",
"com.mk",
"org.mk",
"net.mk",
"edu.mk",
"gov.mk",
"inf.mk",
"name.mk",
"ml",
"com.ml",
"edu.ml",
"gouv.ml",
"gov.ml",
"net.ml",
"org.ml",
"presse.ml",
"mn",
"gov.mn",
"edu.mn",
"org.mn",
"mo",
"com.mo",
"net.mo",
"org.mo",
"edu.mo",
"gov.mo",
"mobi",
"mp",
"mq",
"mr",
"gov.mr",
"ms",
"mu",
"com.mu",
"net.mu",
"org.mu",
"gov.mu",
"ac.mu",
"co.mu",
"or.mu",
"museum",
"academy.museum",
"agriculture.museum",
"air.museum",
"airguard.museum",
"alabama.museum",
"alaska.museum",
"amber.museum",
"ambulance.museum",
"american.museum",
"americana.museum",
"americanantiques.museum",
"americanart.museum",
"amsterdam.museum",
"and.museum",
"annefrank.museum",
"anthro.museum",
"anthropology.museum",
"antiques.museum",
"aquarium.museum",
"arboretum.museum",
"archaeological.museum",
"archaeology.museum",
"architecture.museum",
"art.museum",
"artanddesign.museum",
"artcenter.museum",
"artdeco.museum",
"arteducation.museum",
"artgallery.museum",
"arts.museum",
"artsandcrafts.museum",
"asmatart.museum",
"assassination.museum",
"assisi.museum",
"association.museum",
"astronomy.museum",
"atlanta.museum",
"austin.museum",
"australia.museum",
"automotive.museum",
"aviation.museum",
"axis.museum",
"badajoz.museum",
"baghdad.museum",
"bahn.museum",
"bale.museum",
"baltimore.museum",
"barcelona.museum",
"baseball.museum",
"basel.museum",
"baths.museum",
"bauern.museum",
"beauxarts.museum",
"beeldengeluid.museum",
"bellevue.museum",
"bergbau.museum",
"berkeley.museum",
"berlin.museum",
"bern.museum",
"bible.museum",
"bilbao.museum",
"bill.museum",
"birdart.museum",
"birthplace.museum",
"bonn.museum",
"boston.museum",
"botanical.museum",
"botanicalgarden.museum",
"botanicgarden.museum",
"botany.museum",
"brandywinevalley.museum",
"brasil.museum",
"bristol.museum",
"british.museum",
"britishcolumbia.museum",
"broadcast.museum",
"brunel.museum",
"brussel.museum",
"brussels.museum",
"bruxelles.museum",
"building.museum",
"burghof.museum",
"bus.museum",
"bushey.museum",
"cadaques.museum",
"california.museum",
"cambridge.museum",
"can.museum",
"canada.museum",
"capebreton.museum",
"carrier.museum",
"cartoonart.museum",
"casadelamoneda.museum",
"castle.museum",
"castres.museum",
"celtic.museum",
"center.museum",
"chattanooga.museum",
"cheltenham.museum",
"chesapeakebay.museum",
"chicago.museum",
"children.museum",
"childrens.museum",
"childrensgarden.museum",
"chiropractic.museum",
"chocolate.museum",
"christiansburg.museum",
"cincinnati.museum",
"cinema.museum",
"circus.museum",
"civilisation.museum",
"civilization.museum",
"civilwar.museum",
"clinton.museum",
"clock.museum",
"coal.museum",
"coastaldefence.museum",
"cody.museum",
"coldwar.museum",
"collection.museum",
"colonialwilliamsburg.museum",
"coloradoplateau.museum",
"columbia.museum",
"columbus.museum",
"communication.museum",
"communications.museum",
"community.museum",
"computer.museum",
"computerhistory.museum",
"comunica\u00e7\u00f5es.museum",
"contemporary.museum",
"contemporaryart.museum",
"convent.museum",
"copenhagen.museum",
"corporation.museum",
"correios-e-telecomunica\u00e7\u00f5es.museum",
"corvette.museum",
"costume.museum",
"countryestate.museum",
"county.museum",
"crafts.museum",
"cranbrook.museum",
"creation.museum",
"cultural.museum",
"culturalcenter.museum",
"culture.museum",
"cyber.museum",
"cymru.museum",
"dali.museum",
"dallas.museum",
"database.museum",
"ddr.museum",
"decorativearts.museum",
"delaware.museum",
"delmenhorst.museum",
"denmark.museum",
"depot.museum",
"design.museum",
"detroit.museum",
"dinosaur.museum",
"discovery.museum",
"dolls.museum",
"donostia.museum",
"durham.museum",
"eastafrica.museum",
"eastcoast.museum",
"education.museum",
"educational.museum",
"egyptian.museum",
"eisenbahn.museum",
"elburg.museum",
"elvendrell.museum",
"embroidery.museum",
"encyclopedic.museum",
"england.museum",
"entomology.museum",
"environment.museum",
"environmentalconservation.museum",
"epilepsy.museum",
"essex.museum",
"estate.museum",
"ethnology.museum",
"exeter.museum",
"exhibition.museum",
"family.museum",
"farm.museum",
"farmequipment.museum",
"farmers.museum",
"farmstead.museum",
"field.museum",
"figueres.museum",
"filatelia.museum",
"film.museum",
"fineart.museum",
"finearts.museum",
"finland.museum",
"flanders.museum",
"florida.museum",
"force.museum",
"fortmissoula.museum",
"fortworth.museum",
"foundation.museum",
"francaise.museum",
"frankfurt.museum",
"franziskaner.museum",
"freemasonry.museum",
"freiburg.museum",
"fribourg.museum",
"frog.museum",
"fundacio.museum",
"furniture.museum",
"gallery.museum",
"garden.museum",
"gateway.museum",
"geelvinck.museum",
"gemological.museum",
"geology.museum",
"georgia.museum",
"giessen.museum",
"glas.museum",
"glass.museum",
"gorge.museum",
"grandrapids.museum",
"graz.museum",
"guernsey.museum",
"halloffame.museum",
"hamburg.museum",
"handson.museum",
"harvestcelebration.museum",
"hawaii.museum",
"health.museum",
"heimatunduhren.museum",
"hellas.museum",
"helsinki.museum",
"hembygdsforbund.museum",
"heritage.museum",
"histoire.museum",
"historical.museum",
"historicalsociety.museum",
"historichouses.museum",
"historisch.museum",
"historisches.museum",
"history.museum",
"historyofscience.museum",
"horology.museum",
"house.museum",
"humanities.museum",
"illustration.museum",
"imageandsound.museum",
"indian.museum",
"indiana.museum",
"indianapolis.museum",
"indianmarket.museum",
"intelligence.museum",
"interactive.museum",
"iraq.museum",
"iron.museum",
"isleofman.museum",
"jamison.museum",
"jefferson.museum",
"jerusalem.museum",
"jewelry.museum",
"jewish.museum",
"jewishart.museum",
"jfk.museum",
"journalism.museum",
"judaica.museum",
"judygarland.museum",
"juedisches.museum",
"juif.museum",
"karate.museum",
"karikatur.museum",
"kids.museum",
"koebenhavn.museum",
"koeln.museum",
"kunst.museum",
"kunstsammlung.museum",
"kunstunddesign.museum",
"labor.museum",
"labour.museum",
"lajolla.museum",
"lancashire.museum",
"landes.museum",
"lans.museum",
"l\u00e4ns.museum",
"larsson.museum",
"lewismiller.museum",
"lincoln.museum",
"linz.museum",
"living.museum",
"livinghistory.museum",
"localhistory.museum",
"london.museum",
"losangeles.museum",
"louvre.museum",
"loyalist.museum",
"lucerne.museum",
"luxembourg.museum",
"luzern.museum",
"mad.museum",
"madrid.museum",
"mallorca.museum",
"manchester.museum",
"mansion.museum",
"mansions.museum",
"manx.museum",
"marburg.museum",
"maritime.museum",
"maritimo.museum",
"maryland.museum",
"marylhurst.museum",
"media.museum",
"medical.museum",
"medizinhistorisches.museum",
"meeres.museum",
"memorial.museum",
"mesaverde.museum",
"michigan.museum",
"midatlantic.museum",
"military.museum",
"mill.museum",
"miners.museum",
"mining.museum",
"minnesota.museum",
"missile.museum",
"missoula.museum",
"modern.museum",
"moma.museum",
"money.museum",
"monmouth.museum",
"monticello.museum",
"montreal.museum",
"moscow.museum",
"motorcycle.museum",
"muenchen.museum",
"muenster.museum",
"mulhouse.museum",
"muncie.museum",
"museet.museum",
"museumcenter.museum",
"museumvereniging.museum",
"music.museum",
"national.museum",
"nationalfirearms.museum",
"nationalheritage.museum",
"nativeamerican.museum",
"naturalhistory.museum",
"naturalhistorymuseum.museum",
"naturalsciences.museum",
"nature.museum",
"naturhistorisches.museum",
"natuurwetenschappen.museum",
"naumburg.museum",
"naval.museum",
"nebraska.museum",
"neues.museum",
"newhampshire.museum",
"newjersey.museum",
"newmexico.museum",
"newport.museum",
"newspaper.museum",
"newyork.museum",
"niepce.museum",
"norfolk.museum",
"north.museum",
"nrw.museum",
"nuernberg.museum",
"nuremberg.museum",
"nyc.museum",
"nyny.museum",
"oceanographic.museum",
"oceanographique.museum",
"omaha.museum",
"online.museum",
"ontario.museum",
"openair.museum",
"oregon.museum",
"oregontrail.museum",
"otago.museum",
"oxford.museum",
"pacific.museum",
"paderborn.museum",
"palace.museum",
"paleo.museum",
"palmsprings.museum",
"panama.museum",
"paris.museum",
"pasadena.museum",
"pharmacy.museum",
"philadelphia.museum",
"philadelphiaarea.museum",
"philately.museum",
"phoenix.museum",
"photography.museum",
"pilots.museum",
"pittsburgh.museum",
"planetarium.museum",
"plantation.museum",
"plants.museum",
"plaza.museum",
"portal.museum",
"portland.museum",
"portlligat.museum",
"posts-and-telecommunications.museum",
"preservation.museum",
"presidio.museum",
"press.museum",
"project.museum",
"public.museum",
"pubol.museum",
"quebec.museum",
"railroad.museum",
"railway.museum",
"research.museum",
"resistance.museum",
"riodejaneiro.museum",
"rochester.museum",
"rockart.museum",
"roma.museum",
"russia.museum",
"saintlouis.museum",
"salem.museum",
"salvadordali.museum",
"salzburg.museum",
"sandiego.museum",
"sanfrancisco.museum",
"santabarbara.museum",
"santacruz.museum",
"santafe.museum",
"saskatchewan.museum",
"satx.museum",
"savannahga.museum",
"schlesisches.museum",
"schoenbrunn.museum",
"schokoladen.museum",
"school.museum",
"schweiz.museum",
"science.museum",
"scienceandhistory.museum",
"scienceandindustry.museum",
"sciencecenter.museum",
"sciencecenters.museum",
"science-fiction.museum",
"sciencehistory.museum",
"sciences.museum",
"sciencesnaturelles.museum",
"scotland.museum",
"seaport.museum",
"settlement.museum",
"settlers.museum",
"shell.museum",
"sherbrooke.museum",
"sibenik.museum",
"silk.museum",
"ski.museum",
"skole.museum",
"society.museum",
"sologne.museum",
"soundandvision.museum",
"southcarolina.museum",
"southwest.museum",
"space.museum",
"spy.museum",
"square.museum",
"stadt.museum",
"stalbans.museum",
"starnberg.museum",
"state.museum",
"stateofdelaware.museum",
"station.museum",
"steam.museum",
"steiermark.museum",
"stjohn.museum",
"stockholm.museum",
"stpetersburg.museum",
"stuttgart.museum",
"suisse.museum",
"surgeonshall.museum",
"surrey.museum",
"svizzera.museum",
"sweden.museum",
"sydney.museum",
"tank.museum",
"tcm.museum",
"technology.museum",
"telekommunikation.museum",
"television.museum",
"texas.museum",
"textile.museum",
"theater.museum",
"time.museum",
"timekeeping.museum",
"topology.museum",
"torino.museum",
"touch.museum",
"town.museum",
"transport.museum",
"tree.museum",
"trolley.museum",
"trust.museum",
"trustee.museum",
"uhren.museum",
"ulm.museum",
"undersea.museum",
"university.museum",
"usa.museum",
"usantiques.museum",
"usarts.museum",
"uscountryestate.museum",
"usculture.museum",
"usdecorativearts.museum",
"usgarden.museum",
"ushistory.museum",
"ushuaia.museum",
"uslivinghistory.museum",
"utah.museum",
"uvic.museum",
"valley.museum",
"vantaa.museum",
"versailles.museum",
"viking.museum",
"village.museum",
"virginia.museum",
"virtual.museum",
"virtuel.museum",
"vlaanderen.museum",
"volkenkunde.museum",
"wales.museum",
"wallonie.museum",
"war.museum",
"washingtondc.museum",
"watchandclock.museum",
"watch-and-clock.museum",
"western.museum",
"westfalen.museum",
"whaling.museum",
"wildlife.museum",
"williamsburg.museum",
"windmill.museum",
"workshop.museum",
"york.museum",
"yorkshire.museum",
"yosemite.museum",
"youth.museum",
"zoological.museum",
"zoology.museum",
"\u05d9\u05e8\u05d5\u05e9\u05dc\u05d9\u05dd.museum",
"\u0438\u043a\u043e\u043c.museum",
"mv",
"aero.mv",
"biz.mv",
"com.mv",
"coop.mv",
"edu.mv",
"gov.mv",
"info.mv",
"int.mv",
"mil.mv",
"museum.mv",
"name.mv",
"net.mv",
"org.mv",
"pro.mv",
"mw",
"ac.mw",
"biz.mw",
"co.mw",
"com.mw",
"coop.mw",
"edu.mw",
"gov.mw",
"int.mw",
"museum.mw",
"net.mw",
"org.mw",
"mx",
"com.mx",
"org.mx",
"gob.mx",
"edu.mx",
"net.mx",
"my",
"com.my",
"net.my",
"org.my",
"gov.my",
"edu.my",
"mil.my",
"name.my",
"na",
"info.na",
"pro.na",
"name.na",
"school.na",
"or.na",
"dr.na",
"us.na",
"mx.na",
"ca.na",
"in.na",
"cc.na",
"tv.na",
"ws.na",
"mobi.na",
"co.na",
"com.na",
"org.na",
"name",
"nc",
"asso.nc",
"ne",
"net",
"gb.net",
"se.net",
"uk.net",
"za.net",
"nf",
"com.nf",
"net.nf",
"per.nf",
"rec.nf",
"web.nf",
"arts.nf",
"firm.nf",
"info.nf",
"other.nf",
"store.nf",
"ac.ng",
"com.ng",
"edu.ng",
"gov.ng",
"net.ng",
"org.ng",
"nl",
"no",
"fhs.no",
"vgs.no",
"fylkesbibl.no",
"folkebibl.no",
"museum.no",
"idrett.no",
"priv.no",
"mil.no",
"stat.no",
"dep.no",
"kommune.no",
"herad.no",
"aa.no",
"ah.no",
"bu.no",
"fm.no",
"hl.no",
"hm.no",
"jan-mayen.no",
"mr.no",
"nl.no",
"nt.no",
"of.no",
"ol.no",
"oslo.no",
"rl.no",
"sf.no",
"st.no",
"svalbard.no",
"tm.no",
"tr.no",
"va.no",
"vf.no",
"gs.aa.no",
"gs.ah.no",
"gs.bu.no",
"gs.fm.no",
"gs.hl.no",
"gs.hm.no",
"gs.jan-mayen.no",
"gs.mr.no",
"gs.nl.no",
"gs.nt.no",
"gs.of.no",
"gs.ol.no",
"gs.oslo.no",
"gs.rl.no",
"gs.sf.no",
"gs.st.no",
"gs.svalbard.no",
"gs.tm.no",
"gs.tr.no",
"gs.va.no",
"gs.vf.no",
"akrehamn.no",
"\u00e5krehamn.no",
"algard.no",
"\u00e5lg\u00e5rd.no",
"arna.no",
"brumunddal.no",
"bryne.no",
"bronnoysund.no",
"br\u00f8nn\u00f8ysund.no",
"drobak.no",
"dr\u00f8bak.no",
"egersund.no",
"fetsund.no",
"floro.no",
"flor\u00f8.no",
"fredrikstad.no",
"hokksund.no",
"honefoss.no",
"h\u00f8nefoss.no",
"jessheim.no",
"jorpeland.no",
"j\u00f8rpeland.no",
"kirkenes.no",
"kopervik.no",
"krokstadelva.no",
"langevag.no",
"langev\u00e5g.no",
"leirvik.no",
"mjondalen.no",
"mj\u00f8ndalen.no",
"mo-i-rana.no",
"mosjoen.no",
"mosj\u00f8en.no",
"nesoddtangen.no",
"orkanger.no",
"osoyro.no",
"os\u00f8yro.no",
"raholt.no",
"r\u00e5holt.no",
"sandnessjoen.no",
"sandnessj\u00f8en.no",
"skedsmokorset.no",
"slattum.no",
"spjelkavik.no",
"stathelle.no",
"stavern.no",
"stjordalshalsen.no",
"stj\u00f8rdalshalsen.no",
"tananger.no",
"tranby.no",
"vossevangen.no",
"afjord.no",
"\u00e5fjord.no",
"agdenes.no",
"al.no",
"\u00e5l.no",
"alesund.no",
"\u00e5lesund.no",
"alstahaug.no",
"alta.no",
"\u00e1lt\u00e1.no",
"alaheadju.no",
"\u00e1laheadju.no",
"alvdal.no",
"amli.no",
"\u00e5mli.no",
"amot.no",
"\u00e5mot.no",
"andebu.no",
"andoy.no",
"and\u00f8y.no",
"andasuolo.no",
"ardal.no",
"\u00e5rdal.no",
"aremark.no",
"arendal.no",
"\u00e5s.no",
"aseral.no",
"\u00e5seral.no",
"asker.no",
"askim.no",
"askvoll.no",
"askoy.no",
"ask\u00f8y.no",
"asnes.no",
"\u00e5snes.no",
"audnedaln.no",
"aukra.no",
"aure.no",
"aurland.no",
"aurskog-holand.no",
"aurskog-h\u00f8land.no",
"austevoll.no",
"austrheim.no",
"averoy.no",
"aver\u00f8y.no",
"balestrand.no",
"ballangen.no",
"balat.no",
"b\u00e1l\u00e1t.no",
"balsfjord.no",
"bahccavuotna.no",
"b\u00e1hccavuotna.no",
"bamble.no",
"bardu.no",
"beardu.no",
"beiarn.no",
"bajddar.no",
"b\u00e1jddar.no",
"baidar.no",
"b\u00e1id\u00e1r.no",
"berg.no",
"bergen.no",
"berlevag.no",
"berlev\u00e5g.no",
"bearalvahki.no",
"bearalv\u00e1hki.no",
"bindal.no",
"birkenes.no",
"bjarkoy.no",
"bjark\u00f8y.no",
"bjerkreim.no",
"bjugn.no",
"bodo.no",
"bod\u00f8.no",
"badaddja.no",
"b\u00e5d\u00e5ddj\u00e5.no",
"budejju.no",
"bokn.no",
"bremanger.no",
"bronnoy.no",
"br\u00f8nn\u00f8y.no",
"bygland.no",
"bykle.no",
"barum.no",
"b\u00e6rum.no",
"bo.telemark.no",
"b\u00f8.telemark.no",
"bo.nordland.no",
"b\u00f8.nordland.no",
"bievat.no",
"biev\u00e1t.no",
"bomlo.no",
"b\u00f8mlo.no",
"batsfjord.no",
"b\u00e5tsfjord.no",
"bahcavuotna.no",
"b\u00e1hcavuotna.no",
"dovre.no",
"drammen.no",
"drangedal.no",
"dyroy.no",
"dyr\u00f8y.no",
"donna.no",
"d\u00f8nna.no",
"eid.no",
"eidfjord.no",
"eidsberg.no",
"eidskog.no",
"eidsvoll.no",
"eigersund.no",
"elverum.no",
"enebakk.no",
"engerdal.no",
"etne.no",
"etnedal.no",
"evenes.no",
"evenassi.no",
"even\u00e1\u0161\u0161i.no",
"evje-og-hornnes.no",
"farsund.no",
"fauske.no",
"fuossko.no",
"fuoisku.no",
"fedje.no",
"fet.no",
"finnoy.no",
"finn\u00f8y.no",
"fitjar.no",
"fjaler.no",
"fjell.no",
"flakstad.no",
"flatanger.no",
"flekkefjord.no",
"flesberg.no",
"flora.no",
"fla.no",
"fl\u00e5.no",
"folldal.no",
"forsand.no",
"fosnes.no",
"frei.no",
"frogn.no",
"froland.no",
"frosta.no",
"frana.no",
"fr\u00e6na.no",
"froya.no",
"fr\u00f8ya.no",
"fusa.no",
"fyresdal.no",
"forde.no",
"f\u00f8rde.no",
"gamvik.no",
"gangaviika.no",
"g\u00e1\u014bgaviika.no",
"gaular.no",
"gausdal.no",
"gildeskal.no",
"gildesk\u00e5l.no",
"giske.no",
"gjemnes.no",
"gjerdrum.no",
"gjerstad.no",
"gjesdal.no",
"gjovik.no",
"gj\u00f8vik.no",
"gloppen.no",
"gol.no",
"gran.no",
"grane.no",
"granvin.no",
"gratangen.no",
"grimstad.no",
"grong.no",
"kraanghke.no",
"kr\u00e5anghke.no",
"grue.no",
"gulen.no",
"hadsel.no",
"halden.no",
"halsa.no",
"hamar.no",
"hamaroy.no",
"habmer.no",
"h\u00e1bmer.no",
"hapmir.no",
"h\u00e1pmir.no",
"hammerfest.no",
"hammarfeasta.no",
"h\u00e1mm\u00e1rfeasta.no",
"haram.no",
"hareid.no",
"harstad.no",
"hasvik.no",
"aknoluokta.no",
"\u00e1k\u014boluokta.no",
"hattfjelldal.no",
"aarborte.no",
"haugesund.no",
"hemne.no",
"hemnes.no",
"hemsedal.no",
"heroy.more-og-romsdal.no",
"her\u00f8y.m\u00f8re-og-romsdal.no",
"heroy.nordland.no",
"her\u00f8y.nordland.no",
"hitra.no",
"hjartdal.no",
"hjelmeland.no",
"hobol.no",
"hob\u00f8l.no",
"hof.no",
"hol.no",
"hole.no",
"holmestrand.no",
"holtalen.no",
"holt\u00e5len.no",
"hornindal.no",
"horten.no",
"hurdal.no",
"hurum.no",
"hvaler.no",
"hyllestad.no",
"hagebostad.no",
"h\u00e6gebostad.no",
"hoyanger.no",
"h\u00f8yanger.no",
"hoylandet.no",
"h\u00f8ylandet.no",
"ha.no",
"h\u00e5.no",
"ibestad.no",
"inderoy.no",
"inder\u00f8y.no",
"iveland.no",
"jevnaker.no",
"jondal.no",
"jolster.no",
"j\u00f8lster.no",
"karasjok.no",
"karasjohka.no",
"k\u00e1r\u00e1\u0161johka.no",
"karlsoy.no",
"galsa.no",
"g\u00e1ls\u00e1.no",
"karmoy.no",
"karm\u00f8y.no",
"kautokeino.no",
"guovdageaidnu.no",
"klepp.no",
"klabu.no",
"kl\u00e6bu.no",
"kongsberg.no",
"kongsvinger.no",
"kragero.no",
"krager\u00f8.no",
"kristiansand.no",
"kristiansund.no",
"krodsherad.no",
"kr\u00f8dsherad.no",
"kvalsund.no",
"rahkkeravju.no",
"r\u00e1hkker\u00e1vju.no",
"kvam.no",
"kvinesdal.no",
"kvinnherad.no",
"kviteseid.no",
"kvitsoy.no",
"kvits\u00f8y.no",
"kvafjord.no",
"kv\u00e6fjord.no",
"giehtavuoatna.no",
"kvanangen.no",
"kv\u00e6nangen.no",
"navuotna.no",
"n\u00e1vuotna.no",
"kafjord.no",
"k\u00e5fjord.no",
"gaivuotna.no",
"g\u00e1ivuotna.no",
"larvik.no",
"lavangen.no",
"lavagis.no",
"loabat.no",
"loab\u00e1t.no",
"lebesby.no",
"davvesiida.no",
"leikanger.no",
"leirfjord.no",
"leka.no",
"leksvik.no",
"lenvik.no",
"leangaviika.no",
"lea\u014bgaviika.no",
"lesja.no",
"levanger.no",
"lier.no",
"lierne.no",
"lillehammer.no",
"lillesand.no",
"lindesnes.no",
"lindas.no",
"lind\u00e5s.no",
"lom.no",
"loppa.no",
"lahppi.no",
"l\u00e1hppi.no",
"lund.no",
"lunner.no",
"luroy.no",
"lur\u00f8y.no",
"luster.no",
"lyngdal.no",
"lyngen.no",
"ivgu.no",
"lardal.no",
"lerdal.no",
"l\u00e6rdal.no",
"lodingen.no",
"l\u00f8dingen.no",
"lorenskog.no",
"l\u00f8renskog.no",
"loten.no",
"l\u00f8ten.no",
"malvik.no",
"masoy.no",
"m\u00e5s\u00f8y.no",
"muosat.no",
"muos\u00e1t.no",
"mandal.no",
"marker.no",
"marnardal.no",
"masfjorden.no",
"meland.no",
"meldal.no",
"melhus.no",
"meloy.no",
"mel\u00f8y.no",
"meraker.no",
"mer\u00e5ker.no",
"moareke.no",
"mo\u00e5reke.no",
"midsund.no",
"midtre-gauldal.no",
"modalen.no",
"modum.no",
"molde.no",
"moskenes.no",
"moss.no",
"mosvik.no",
"malselv.no",
"m\u00e5lselv.no",
"malatvuopmi.no",
"m\u00e1latvuopmi.no",
"namdalseid.no",
"aejrie.no",
"namsos.no",
"namsskogan.no",
"naamesjevuemie.no",
"n\u00e5\u00e5mesjevuemie.no",
"laakesvuemie.no",
"nannestad.no",
"narvik.no",
"narviika.no",
"naustdal.no",
"nedre-eiker.no",
"nes.akershus.no",
"nes.buskerud.no",
"nesna.no",
"nesodden.no",
"nesseby.no",
"unjarga.no",
"unj\u00e1rga.no",
"nesset.no",
"nissedal.no",
"nittedal.no",
"nord-aurdal.no",
"nord-fron.no",
"nord-odal.no",
"norddal.no",
"nordkapp.no",
"davvenjarga.no",
"davvenj\u00e1rga.no",
"nordre-land.no",
"nordreisa.no",
"raisa.no",
"r\u00e1isa.no",
"nore-og-uvdal.no",
"notodden.no",
"naroy.no",
"n\u00e6r\u00f8y.no",
"notteroy.no",
"n\u00f8tter\u00f8y.no",
"odda.no",
"oksnes.no",
"\u00f8ksnes.no",
"oppdal.no",
"oppegard.no",
"oppeg\u00e5rd.no",
"orkdal.no",
"orland.no",
"\u00f8rland.no",
"orskog.no",
"\u00f8rskog.no",
"orsta.no",
"\u00f8rsta.no",
"os.hedmark.no",
"os.hordaland.no",
"osen.no",
"osteroy.no",
"oster\u00f8y.no",
"ostre-toten.no",
"\u00f8stre-toten.no",
"overhalla.no",
"ovre-eiker.no",
"\u00f8vre-eiker.no",
"oyer.no",
"\u00f8yer.no",
"oygarden.no",
"\u00f8ygarden.no",
"oystre-slidre.no",
"\u00f8ystre-slidre.no",
"porsanger.no",
"porsangu.no",
"pors\u00e1\u014bgu.no",
"porsgrunn.no",
"radoy.no",
"rad\u00f8y.no",
"rakkestad.no",
"rana.no",
"ruovat.no",
"randaberg.no",
"rauma.no",
"rendalen.no",
"rennebu.no",
"rennesoy.no",
"rennes\u00f8y.no",
"rindal.no",
"ringebu.no",
"ringerike.no",
"ringsaker.no",
"rissa.no",
"risor.no",
"ris\u00f8r.no",
"roan.no",
"rollag.no",
"rygge.no",
"ralingen.no",
"r\u00e6lingen.no",
"rodoy.no",
"r\u00f8d\u00f8y.no",
"romskog.no",
"r\u00f8mskog.no",
"roros.no",
"r\u00f8ros.no",
"rost.no",
"r\u00f8st.no",
"royken.no",
"r\u00f8yken.no",
"royrvik.no",
"r\u00f8yrvik.no",
"rade.no",
"r\u00e5de.no",
"salangen.no",
"siellak.no",
"saltdal.no",
"salat.no",
"s\u00e1l\u00e1t.no",
"s\u00e1lat.no",
"samnanger.no",
"sande.more-og-romsdal.no",
"sande.m\u00f8re-og-romsdal.no",
"sande.vestfold.no",
"sandefjord.no",
"sandnes.no",
"sandoy.no",
"sand\u00f8y.no",
"sarpsborg.no",
"sauda.no",
"sauherad.no",
"sel.no",
"selbu.no",
"selje.no",
"seljord.no",
"sigdal.no",
"siljan.no",
"sirdal.no",
"skaun.no",
"skedsmo.no",
"ski.no",
"skien.no",
"skiptvet.no",
"skjervoy.no",
"skjerv\u00f8y.no",
"skierva.no",
"skierv\u00e1.no",
"skjak.no",
"skj\u00e5k.no",
"skodje.no",
"skanland.no",
"sk\u00e5nland.no",
"skanit.no",
"sk\u00e1nit.no",
"smola.no",
"sm\u00f8la.no",
"snillfjord.no",
"snasa.no",
"sn\u00e5sa.no",
"snoasa.no",
"snaase.no",
"sn\u00e5ase.no",
"sogndal.no",
"sokndal.no",
"sola.no",
"solund.no",
"songdalen.no",
"sortland.no",
"spydeberg.no",
"stange.no",
"stavanger.no",
"steigen.no",
"steinkjer.no",
"stjordal.no",
"stj\u00f8rdal.no",
"stokke.no",
"stor-elvdal.no",
"stord.no",
"stordal.no",
"storfjord.no",
"omasvuotna.no",
"strand.no",
"stranda.no",
"stryn.no",
"sula.no",
"suldal.no",
"sund.no",
"sunndal.no",
"surnadal.no",
"sveio.no",
"svelvik.no",
"sykkylven.no",
"sogne.no",
"s\u00f8gne.no",
"somna.no",
"s\u00f8mna.no",
"sondre-land.no",
"s\u00f8ndre-land.no",
"sor-aurdal.no",
"s\u00f8r-aurdal.no",
"sor-fron.no",
"s\u00f8r-fron.no",
"sor-odal.no",
"s\u00f8r-odal.no",
"sor-varanger.no",
"s\u00f8r-varanger.no",
"matta-varjjat.no",
"m\u00e1tta-v\u00e1rjjat.no",
"sorfold.no",
"s\u00f8rfold.no",
"sorreisa.no",
"s\u00f8rreisa.no",
"sorum.no",
"s\u00f8rum.no",
"tana.no",
"deatnu.no",
"time.no",
"tingvoll.no",
"tinn.no",
"tjeldsund.no",
"dielddanuorri.no",
"tjome.no",
"tj\u00f8me.no",
"tokke.no",
"tolga.no",
"torsken.no",
"tranoy.no",
"tran\u00f8y.no",
"tromso.no",
"troms\u00f8.no",
"tromsa.no",
"romsa.no",
"trondheim.no",
"troandin.no",
"trysil.no",
"trana.no",
"tr\u00e6na.no",
"trogstad.no",
"tr\u00f8gstad.no",
"tvedestrand.no",
"tydal.no",
"tynset.no",
"tysfjord.no",
"divtasvuodna.no",
"divttasvuotna.no",
"tysnes.no",
"tysvar.no",
"tysv\u00e6r.no",
"tonsberg.no",
"t\u00f8nsberg.no",
"ullensaker.no",
"ullensvang.no",
"ulvik.no",
"utsira.no",
"vadso.no",
"vads\u00f8.no",
"cahcesuolo.no",
"\u010d\u00e1hcesuolo.no",
"vaksdal.no",
"valle.no",
"vang.no",
"vanylven.no",
"vardo.no",
"vard\u00f8.no",
"varggat.no",
"v\u00e1rgg\u00e1t.no",
"vefsn.no",
"vaapste.no",
"vega.no",
"vegarshei.no",
"veg\u00e5rshei.no",
"vennesla.no",
"verdal.no",
"verran.no",
"vestby.no",
"vestnes.no",
"vestre-slidre.no",
"vestre-toten.no",
"vestvagoy.no",
"vestv\u00e5g\u00f8y.no",
"vevelstad.no",
"vik.no",
"vikna.no",
"vindafjord.no",
"volda.no",
"voss.no",
"varoy.no",
"v\u00e6r\u00f8y.no",
"vagan.no",
"v\u00e5gan.no",
"voagat.no",
"vagsoy.no",
"v\u00e5gs\u00f8y.no",
"vaga.no",
"v\u00e5g\u00e5.no",
"valer.ostfold.no",
"v\u00e5ler.\u00f8stfold.no",
"valer.hedmark.no",
"v\u00e5ler.hedmark.no",
"nr",
"biz.nr",
"info.nr",
"gov.nr",
"edu.nr",
"org.nr",
"net.nr",
"com.nr",
"nu",
"org",
"ae.org",
"za.org",
"pa",
"ac.pa",
"gob.pa",
"com.pa",
"org.pa",
"sld.pa",
"edu.pa",
"net.pa",
"ing.pa",
"abo.pa",
"med.pa",
"nom.pa",
"pe",
"edu.pe",
"gob.pe",
"nom.pe",
"mil.pe",
"org.pe",
"com.pe",
"net.pe",
"pf",
"com.pf",
"org.pf",
"edu.pf",
"ph",
"com.ph",
"net.ph",
"org.ph",
"gov.ph",
"edu.ph",
"ngo.ph",
"mil.ph",
"i.ph",
"pk",
"com.pk",
"net.pk",
"edu.pk",
"org.pk",
"fam.pk",
"biz.pk",
"web.pk",
"gov.pk",
"gob.pk",
"gok.pk",
"gon.pk",
"gop.pk",
"gos.pk",
"info.pk",
"pl",
"aid.pl",
"agro.pl",
"atm.pl",
"auto.pl",
"biz.pl",
"com.pl",
"edu.pl",
"gmina.pl",
"gsm.pl",
"info.pl",
"mail.pl",
"miasta.pl",
"media.pl",
"mil.pl",
"net.pl",
"nieruchomosci.pl",
"nom.pl",
"org.pl",
"pc.pl",
"powiat.pl",
"priv.pl",
"realestate.pl",
"rel.pl",
"sex.pl",
"shop.pl",
"sklep.pl",
"sos.pl",
"szkola.pl",
"targi.pl",
"tm.pl",
"tourism.pl",
"travel.pl",
"turystyka.pl",
"6bone.pl",
"art.pl",
"mbone.pl",
"gov.pl",
"uw.gov.pl",
"um.gov.pl",
"ug.gov.pl",
"upow.gov.pl",
"starostwo.gov.pl",
"so.gov.pl",
"sr.gov.pl",
"po.gov.pl",
"pa.gov.pl",
"ngo.pl",
"irc.pl",
"usenet.pl",
"augustow.pl",
"babia-gora.pl",
"bedzin.pl",
"beskidy.pl",
"bialowieza.pl",
"bialystok.pl",
"bielawa.pl",
"bieszczady.pl",
"boleslawiec.pl",
"bydgoszcz.pl",
"bytom.pl",
"cieszyn.pl",
"czeladz.pl",
"czest.pl",
"dlugoleka.pl",
"elblag.pl",
"elk.pl",
"glogow.pl",
"gniezno.pl",
"gorlice.pl",
"grajewo.pl",
"ilawa.pl",
"jaworzno.pl",
"jelenia-gora.pl",
"jgora.pl",
"kalisz.pl",
"kazimierz-dolny.pl",
"karpacz.pl",
"kartuzy.pl",
"kaszuby.pl",
"katowice.pl",
"kepno.pl",
"ketrzyn.pl",
"klodzko.pl",
"kobierzyce.pl",
"kolobrzeg.pl",
"konin.pl",
"konskowola.pl",
"kutno.pl",
"lapy.pl",
"lebork.pl",
"legnica.pl",
"lezajsk.pl",
"limanowa.pl",
"lomza.pl",
"lowicz.pl",
"lubin.pl",
"lukow.pl",
"malbork.pl",
"malopolska.pl",
"mazowsze.pl",
"mazury.pl",
"mielec.pl",
"mielno.pl",
"mragowo.pl",
"naklo.pl",
"nowaruda.pl",
"nysa.pl",
"olawa.pl",
"olecko.pl",
"olkusz.pl",
"olsztyn.pl",
"opoczno.pl",
"opole.pl",
"ostroda.pl",
"ostroleka.pl",
"ostrowiec.pl",
"ostrowwlkp.pl",
"pila.pl",
"pisz.pl",
"podhale.pl",
"podlasie.pl",
"polkowice.pl",
"pomorze.pl",
"pomorskie.pl",
"prochowice.pl",
"pruszkow.pl",
"przeworsk.pl",
"pulawy.pl",
"radom.pl",
"rawa-maz.pl",
"rybnik.pl",
"rzeszow.pl",
"sanok.pl",
"sejny.pl",
"siedlce.pl",
"slask.pl",
"slupsk.pl",
"sosnowiec.pl",
"stalowa-wola.pl",
"skoczow.pl",
"starachowice.pl",
"stargard.pl",
"suwalki.pl",
"swidnica.pl",
"swiebodzin.pl",
"swinoujscie.pl",
"szczecin.pl",
"szczytno.pl",
"tarnobrzeg.pl",
"tgory.pl",
"turek.pl",
"tychy.pl",
"ustka.pl",
"walbrzych.pl",
"warmia.pl",
"warszawa.pl",
"waw.pl",
"wegrow.pl",
"wielun.pl",
"wlocl.pl",
"wloclawek.pl",
"wodzislaw.pl",
"wolomin.pl",
"wroclaw.pl",
"zachpomor.pl",
"zagan.pl",
"zarow.pl",
"zgora.pl",
"zgorzelec.pl",
"gda.pl",
"gdansk.pl",
"gdynia.pl",
"med.pl",
"sopot.pl",
"gliwice.pl",
"poznan.pl",
"wroc.pl",
"zakopane.pl",
"pn",
"gov.pn",
"co.pn",
"org.pn",
"edu.pn",
"net.pn",
"pr",
"com.pr",
"net.pr",
"org.pr",
"gov.pr",
"edu.pr",
"isla.pr",
"pro.pr",
"biz.pr",
"info.pr",
"name.pr",
"est.pr",
"prof.pr",
"ac.pr",
"pro",
"aca.pro",
"bar.pro",
"cpa.pro",
"jur.pro",
"law.pro",
"med.pro",
"eng.pro",
"ps",
"edu.ps",
"gov.ps",
"sec.ps",
"plo.ps",
"com.ps",
"org.ps",
"net.ps",
"pt",
"net.pt",
"gov.pt",
"org.pt",
"edu.pt",
"int.pt",
"publ.pt",
"com.pt",
"nome.pt",
"pw",
"co.pw",
"ne.pw",
"or.pw",
"ed.pw",
"go.pw",
"belau.pw",
"re",
"com.re",
"asso.re",
"nom.re",
"ro",
"com.ro",
"org.ro",
"tm.ro",
"nt.ro",
"nom.ro",
"info.ro",
"rec.ro",
"arts.ro",
"firm.ro",
"store.ro",
"www.ro",
"rs",
"co.rs",
"org.rs",
"edu.rs",
"ac.rs",
"gov.rs",
"in.rs",
"ru",
"ac.ru",
"com.ru",
"edu.ru",
"int.ru",
"net.ru",
"org.ru",
"pp.ru",
"adygeya.ru",
"altai.ru",
"amur.ru",
"arkhangelsk.ru",
"astrakhan.ru",
"bashkiria.ru",
"belgorod.ru",
"bir.ru",
"bryansk.ru",
"buryatia.ru",
"cbg.ru",
"chel.ru",
"chelyabinsk.ru",
"chita.ru",
"chukotka.ru",
"chuvashia.ru",
"dagestan.ru",
"dudinka.ru",
"e-burg.ru",
"grozny.ru",
"irkutsk.ru",
"ivanovo.ru",
"izhevsk.ru",
"jar.ru",
"joshkar-ola.ru",
"kalmykia.ru",
"kaluga.ru",
"kamchatka.ru",
"karelia.ru",
"kazan.ru",
"kchr.ru",
"kemerovo.ru",
"khabarovsk.ru",
"khakassia.ru",
"khv.ru",
"kirov.ru",
"koenig.ru",
"komi.ru",
"kostroma.ru",
"krasnoyarsk.ru",
"kuban.ru",
"kurgan.ru",
"kursk.ru",
"lipetsk.ru",
"magadan.ru",
"mari.ru",
"mari-el.ru",
"marine.ru",
"mordovia.ru",
"mosreg.ru",
"msk.ru",
"murmansk.ru",
"nalchik.ru",
"nnov.ru",
"nov.ru",
"novosibirsk.ru",
"nsk.ru",
"omsk.ru",
"orenburg.ru",
"oryol.ru",
"palana.ru",
"penza.ru",
"perm.ru",
"pskov.ru",
"ptz.ru",
"rnd.ru",
"ryazan.ru",
"sakhalin.ru",
"samara.ru",
"saratov.ru",
"simbirsk.ru",
"smolensk.ru",
"spb.ru",
"stavropol.ru",
"stv.ru",
"surgut.ru",
"tambov.ru",
"tatarstan.ru",
"tom.ru",
"tomsk.ru",
"tsaritsyn.ru",
"tsk.ru",
"tula.ru",
"tuva.ru",
"tver.ru",
"tyumen.ru",
"udm.ru",
"udmurtia.ru",
"ulan-ude.ru",
"vladikavkaz.ru",
"vladimir.ru",
"vladivostok.ru",
"volgograd.ru",
"vologda.ru",
"voronezh.ru",
"vrn.ru",
"vyatka.ru",
"yakutia.ru",
"yamal.ru",
"yaroslavl.ru",
"yekaterinburg.ru",
"yuzhno-sakhalinsk.ru",
"amursk.ru",
"baikal.ru",
"cmw.ru",
"fareast.ru",
"jamal.ru",
"kms.ru",
"k-uralsk.ru",
"kustanai.ru",
"kuzbass.ru",
"magnitka.ru",
"mytis.ru",
"nakhodka.ru",
"nkz.ru",
"norilsk.ru",
"oskol.ru",
"pyatigorsk.ru",
"rubtsovsk.ru",
"snz.ru",
"syzran.ru",
"vdonsk.ru",
"zgrad.ru",
"gov.ru",
"mil.ru",
"test.ru",
"rw",
"gov.rw",
"net.rw",
"edu.rw",
"ac.rw",
"com.rw",
"co.rw",
"int.rw",
"mil.rw",
"gouv.rw",
"sa",
"com.sa",
"net.sa",
"org.sa",
"gov.sa",
"med.sa",
"pub.sa",
"edu.sa",
"sch.sa",
"sb",
"com.sb",
"edu.sb",
"gov.sb",
"net.sb",
"org.sb",
"sc",
"com.sc",
"gov.sc",
"net.sc",
"org.sc",
"edu.sc",
"sd",
"com.sd",
"net.sd",
"org.sd",
"edu.sd",
"med.sd",
"gov.sd",
"info.sd",
"se",
"a.se",
"ac.se",
"b.se",
"bd.se",
"brand.se",
"c.se",
"d.se",
"e.se",
"f.se",
"fh.se",
"fhsk.se",
"fhv.se",
"g.se",
"h.se",
"i.se",
"k.se",
"komforb.se",
"kommunalforbund.se",
"komvux.se",
"l.se",
"lanbib.se",
"m.se",
"n.se",
"naturbruksgymn.se",
"o.se",
"org.se",
"p.se",
"parti.se",
"pp.se",
"press.se",
"r.se",
"s.se",
"sshn.se",
"t.se",
"tm.se",
"u.se",
"w.se",
"x.se",
"y.se",
"z.se",
"sg",
"com.sg",
"net.sg",
"org.sg",
"gov.sg",
"edu.sg",
"per.sg",
"sh",
"si",
"sk",
"sl",
"com.sl",
"net.sl",
"edu.sl",
"gov.sl",
"org.sl",
"sm",
"sn",
"art.sn",
"com.sn",
"edu.sn",
"gouv.sn",
"org.sn",
"perso.sn",
"univ.sn",
"sr",
"st",
"co.st",
"com.st",
"consulado.st",
"edu.st",
"embaixada.st",
"gov.st",
"mil.st",
"net.st",
"org.st",
"principe.st",
"saotome.st",
"store.st",
"su",
"sy",
"edu.sy",
"gov.sy",
"net.sy",
"mil.sy",
"com.sy",
"org.sy",
"sz",
"co.sz",
"ac.sz",
"org.sz",
"tc",
"td",
"tel",
"tf",
"tg",
"th",
"ac.th",
"co.th",
"go.th",
"in.th",
"mi.th",
"net.th",
"or.th",
"tj",
"ac.tj",
"biz.tj",
"co.tj",
"com.tj",
"edu.tj",
"go.tj",
"gov.tj",
"int.tj",
"mil.tj",
"name.tj",
"net.tj",
"nic.tj",
"org.tj",
"test.tj",
"web.tj",
"tk",
"tl",
"gov.tl",
"tm",
"tn",
"com.tn",
"ens.tn",
"fin.tn",
"gov.tn",
"ind.tn",
"intl.tn",
"nat.tn",
"net.tn",
"org.tn",
"info.tn",
"perso.tn",
"tourism.tn",
"edunet.tn",
"rnrt.tn",
"rns.tn",
"rnu.tn",
"mincom.tn",
"agrinet.tn",
"defense.tn",
"turen.tn",
"to",
"com.to",
"gov.to",
"net.to",
"org.to",
"edu.to",
"mil.to",
"gov.nc.tr",
"travel",
"tt",
"co.tt",
"com.tt",
"org.tt",
"net.tt",
"biz.tt",
"info.tt",
"pro.tt",
"int.tt",
"coop.tt",
"jobs.tt",
"mobi.tt",
"travel.tt",
"museum.tt",
"aero.tt",
"name.tt",
"gov.tt",
"edu.tt",
"tv",
"tw",
"edu.tw",
"gov.tw",
"mil.tw",
"com.tw",
"net.tw",
"org.tw",
"idv.tw",
"game.tw",
"ebiz.tw",
"club.tw",
"\u7db2\u8def.tw",
"\u7d44\u7e54.tw",
"\u5546\u696d.tw",
"ac.tz",
"co.tz",
"go.tz",
"ne.tz",
"or.tz",
"ua",
"com.ua",
"edu.ua",
"gov.ua",
"in.ua",
"net.ua",
"org.ua",
"cherkassy.ua",
"chernigov.ua",
"chernovtsy.ua",
"ck.ua",
"cn.ua",
"crimea.ua",
"cv.ua",
"dn.ua",
"dnepropetrovsk.ua",
"donetsk.ua",
"dp.ua",
"if.ua",
"ivano-frankivsk.ua",
"kh.ua",
"kharkov.ua",
"kherson.ua",
"khmelnitskiy.ua",
"kiev.ua",
"kirovograd.ua",
"km.ua",
"kr.ua",
"ks.ua",
"kv.ua",
"lg.ua",
"lugansk.ua",
"lutsk.ua",
"lviv.ua",
"mk.ua",
"nikolaev.ua",
"od.ua",
"odessa.ua",
"pl.ua",
"poltava.ua",
"rovno.ua",
"rv.ua",
"sebastopol.ua",
"sumy.ua",
"te.ua",
"ternopil.ua",
"uzhgorod.ua",
"vinnica.ua",
"vn.ua",
"zaporizhzhe.ua",
"zp.ua",
"zhitomir.ua",
"zt.ua",
"ug",
"co.ug",
"ac.ug",
"sc.ug",
"go.ug",
"ne.ug",
"or.ug",
"us",
"dni.us",
"fed.us",
"isa.us",
"kids.us",
"nsn.us",
"ak.us",
"al.us",
"ar.us",
"as.us",
"az.us",
"ca.us",
"co.us",
"ct.us",
"dc.us",
"de.us",
"fl.us",
"ga.us",
"gu.us",
"hi.us",
"ia.us",
"id.us",
"il.us",
"in.us",
"ks.us",
"ky.us",
"la.us",
"ma.us",
"md.us",
"me.us",
"mi.us",
"mn.us",
"mo.us",
"ms.us",
"mt.us",
"nc.us",
"nd.us",
"ne.us",
"nh.us",
"nj.us",
"nm.us",
"nv.us",
"ny.us",
"oh.us",
"ok.us",
"or.us",
"pa.us",
"pr.us",
"ri.us",
"sc.us",
"sd.us",
"tn.us",
"tx.us",
"ut.us",
"vi.us",
"vt.us",
"va.us",
"wa.us",
"wi.us",
"wv.us",
"wy.us",
"uz",
"com.uz",
"co.uz",
"va",
"vc",
"com.vc",
"net.vc",
"org.vc",
"gov.vc",
"mil.vc",
"edu.vc",
"vg",
"vi",
"co.vi",
"com.vi",
"k12.vi",
"net.vi",
"org.vi",
"vn",
"com.vn",
"net.vn",
"org.vn",
"edu.vn",
"gov.vn",
"int.vn",
"ac.vn",
"biz.vn",
"info.vn",
"name.vn",
"pro.vn",
"health.vn",
"vu",
"ws",
"com.ws",
"net.ws",
"org.ws",
"gov.ws",
"edu.ws",
"\u0627\u0645\u0627\u0631\u0627\u062a",
"\u0627\u0644\u0633\u0639\u0648\u062f\u064a\u0629",
"\u0440\u0444",
"\u0645\u0635\u0631"
);
/**
* If a hostname is not in the EXCLUDE set, and if removing its
* leftmost component results in a name which is contained in this
* set, it is a TLD.
*/
static final Set<String> UNDER = ImmutableSet.of(
"ar",
"au",
"bd",
"bn",
"bt",
"ck",
"cy",
"do",
"eg",
"er",
"et",
"fj",
"fk",
"gt",
"gu",
"id",
"il",
"jm",
"aichi.jp",
"akita.jp",
"aomori.jp",
"chiba.jp",
"ehime.jp",
"fukui.jp",
"fukuoka.jp",
"fukushima.jp",
"gifu.jp",
"gunma.jp",
"hiroshima.jp",
"hokkaido.jp",
"hyogo.jp",
"ibaraki.jp",
"ishikawa.jp",
"iwate.jp",
"kagawa.jp",
"kagoshima.jp",
"kanagawa.jp",
"kawasaki.jp",
"kitakyushu.jp",
"kobe.jp",
"kochi.jp",
"kumamoto.jp",
"kyoto.jp",
"mie.jp",
"miyagi.jp",
"miyazaki.jp",
"nagano.jp",
"nagasaki.jp",
"nagoya.jp",
"nara.jp",
"niigata.jp",
"oita.jp",
"okayama.jp",
"okinawa.jp",
"osaka.jp",
"saga.jp",
"saitama.jp",
"sapporo.jp",
"sendai.jp",
"shiga.jp",
"shimane.jp",
"shizuoka.jp",
"tochigi.jp",
"tokushima.jp",
"tokyo.jp",
"tottori.jp",
"toyama.jp",
"wakayama.jp",
"yamagata.jp",
"yamaguchi.jp",
"yamanashi.jp",
"yokohama.jp",
"ke",
"kh",
"kw",
"mm",
"mt",
"mz",
"ni",
"np",
"nz",
"om",
"pg",
"py",
"qa",
"sv",
"tr",
"uk",
"sch.uk",
"uy",
"ve",
"ye",
"yu",
"za",
"zm",
"zw"
);
/**
* The elements in this set would pass the UNDER test, but are
* known not to be TLDs and are thus excluded from consideration.
*/
static final Set<String> EXCLUDED = ImmutableSet.of(
"congresodelalengua3.ar",
"educ.ar",
"gobiernoelectronico.ar",
"mecon.ar",
"nacion.ar",
"nic.ar",
"promocion.ar",
"retina.ar",
"uba.ar",
"metro.tokyo.jp",
"pref.aichi.jp",
"pref.akita.jp",
"pref.aomori.jp",
"pref.chiba.jp",
"pref.ehime.jp",
"pref.fukui.jp",
"pref.fukuoka.jp",
"pref.fukushima.jp",
"pref.gifu.jp",
"pref.gunma.jp",
"pref.hiroshima.jp",
"pref.hokkaido.jp",
"pref.hyogo.jp",
"pref.ibaraki.jp",
"pref.ishikawa.jp",
"pref.iwate.jp",
"pref.kagawa.jp",
"pref.kagoshima.jp",
"pref.kanagawa.jp",
"pref.kochi.jp",
"pref.kumamoto.jp",
"pref.kyoto.jp",
"pref.mie.jp",
"pref.miyagi.jp",
"pref.miyazaki.jp",
"pref.nagano.jp",
"pref.nagasaki.jp",
"pref.nara.jp",
"pref.niigata.jp",
"pref.oita.jp",
"pref.okayama.jp",
"pref.okinawa.jp",
"pref.osaka.jp",
"pref.saga.jp",
"pref.saitama.jp",
"pref.shiga.jp",
"pref.shimane.jp",
"pref.shizuoka.jp",
"pref.tochigi.jp",
"pref.tokushima.jp",
"pref.tottori.jp",
"pref.toyama.jp",
"pref.wakayama.jp",
"pref.yamagata.jp",
"pref.yamaguchi.jp",
"pref.yamanashi.jp",
"city.chiba.jp",
"city.fukuoka.jp",
"city.hiroshima.jp",
"city.kawasaki.jp",
"city.kitakyushu.jp",
"city.kobe.jp",
"city.kyoto.jp",
"city.nagoya.jp",
"city.niigata.jp",
"city.okayama.jp",
"city.osaka.jp",
"city.saitama.jp",
"city.sapporo.jp",
"city.sendai.jp",
"city.shizuoka.jp",
"city.yokohama.jp",
"bl.uk",
"british-library.uk",
"icnet.uk",
"jet.uk",
"nel.uk",
"nhs.uk",
"nls.uk",
"national-library-scotland.uk",
"parliament.uk"
);
}
| |
package net.cupmanager.jangular.nodes;
import java.io.Serializable;
import java.lang.reflect.Field;
import java.lang.reflect.ParameterizedType;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import net.cupmanager.jangular.Scope;
import net.cupmanager.jangular.compiler.CompilerSession;
import net.cupmanager.jangular.compiler.JangularCompilerUtils;
import net.cupmanager.jangular.compiler.templateloader.NoSuchScopeFieldException;
import net.cupmanager.jangular.exceptions.CompileExpressionException;
import net.cupmanager.jangular.exceptions.EvaluationException;
import net.cupmanager.jangular.injection.EvaluationContext;
import org.mvel2.CompileException;
import org.mvel2.MVEL;
import org.mvel2.ParserConfiguration;
import org.mvel2.ParserContext;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.FieldVisitor;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;
import org.objectweb.asm.signature.SignatureReader;
import org.objectweb.asm.signature.SignatureVisitor;
import org.objectweb.asm.signature.SignatureWriter;
public class RepeatNode extends JangularNode {
public static class RepeatNodeScope extends Scope {
public void iterate(Scope parent, int i, Object object){}
public void iterate(int i, Object object){}
}
private String varName;
private Serializable listExpression;
private JangularNode node;
private ParserContext pc;
private Class<? extends RepeatNodeScope> nodeScopeClass;
private String listExpressionString;
private String listVarName;
public RepeatNode(String varName, Serializable listExpression, JangularNode node) {
this.varName = varName;
this.listExpression = listExpression;
this.node = node;
}
public RepeatNode(String expression, JangularNode node) {
String[] parts = expression.split(" in ");
this.varName = parts[0].trim();
this.listExpressionString = parts[1];
int indexOfIf = listExpressionString.indexOf(" if ");
if (indexOfIf > -1) {
this.listVarName = listExpressionString.substring(0, indexOfIf);
listExpressionString = "($ in " + listExpressionString + ")";
} else {
this.listVarName = listExpressionString;
}
listExpressionString = listExpressionString.replace(listVarName, "this."+listVarName);
this.node = node;
}
private RepeatNode() {
}
@Override
public synchronized void eval(Scope scope, StringBuilder sb, EvaluationContext context, EvaluationSession session) throws EvaluationException {
try {
Collection<?> list = (Collection<?>)MVEL.executeExpression(listExpression, scope);
RepeatNodeScope nodeScope = nodeScopeClass.newInstance();
if (list != null) {
int i = 0;
for (Object o : list) {
if( i == 0 ){
nodeScope.iterate(scope, i, o);
} else {
nodeScope.iterate(i, o);
}
session.eval(node, nodeScope, sb, context);
i++;
}
}
} catch (InstantiationException e) {
throw new EvaluationException(node, e);
} catch (IllegalAccessException e) {
throw new EvaluationException(node, e);
} catch (RuntimeException e) {
System.err.println("Exception for expression: " + listExpressionString);
throw e;
}
}
@Override
public Collection<String> getReferencedVariables() throws CompileExpressionException {
Set<String> variables = new HashSet<String>();
variables.addAll(node.getReferencedVariables());
// if listExpression is like "this.base.years", add "base" to variables.
int thisIndex = listExpressionString.indexOf("this.");
String e = listExpressionString.substring(thisIndex+5);
int idx = e.indexOf(".");
if (idx < 0) idx=e.length();
variables.add(e.substring(0, idx));
// variables.addAll(pc.getInputs().keySet());
variables.remove("$index");
variables.remove(varName);
variables.remove("$");
return variables;
}
public static AtomicInteger repeatScopeSuffix = new AtomicInteger();
@Override
public void compileScope(Class<? extends Scope> parentScopeClass,
Class<? extends EvaluationContext> evaluationContextClass,
CompilerSession session) throws NoSuchScopeFieldException, CompileExpressionException {
Class<?> varType;
try {
ParserConfiguration conf = new ParserConfiguration();
conf.setClassLoader(session.getClassLoader());
this.pc = new ParserContext(conf);
pc.withInput(varName, Iterable.class);
pc.setStrictTypeEnforcement(true);
pc.addInput("this", parentScopeClass);
this.listExpression = MVEL.compileExpression("" + listExpressionString , pc);
varType = MVEL.analyze("this." + listVarName + ".iterator().next()", pc);
} catch (CompileException e ) {
System.err.println("Tried to analyze: this."+listVarName);
throw new CompileExpressionException(e);
}
String className = "RepeatScope" + (repeatScopeSuffix.incrementAndGet());
String parentClassName = parentScopeClass.getName().replace('.', '/');
Class<? extends RepeatNodeScope> cl = createRepeatScopeClass(parentScopeClass, session, varType, className, parentClassName);
this.nodeScopeClass = cl;
node.compileScope(cl, evaluationContextClass, session);
}
private Class<? extends RepeatNodeScope> createRepeatScopeClass(
Class<? extends Scope> parentScopeClass, CompilerSession session,
Class<?> varType, String className, String parentClassName) throws NoSuchScopeFieldException, CompileExpressionException {
ClassWriter cw = new ClassWriter(0);
FieldVisitor fv;
MethodVisitor mv;
cw.visit(Opcodes.V1_5, Opcodes.ACC_PUBLIC + Opcodes.ACC_SUPER, className, null, RepeatNodeScope.class.getName().replace('.', '/'), null);
fv = cw.visitField(Opcodes.ACC_PUBLIC, "$index", "I", null, null);
fv.visitEnd();
fv = cw.visitField(Opcodes.ACC_PUBLIC, varName, Type.getDescriptor(varType), null, null);
fv.visitEnd();
for (String field : getReferencedVariables()) {
try {
Field f = parentScopeClass.getField(field);
Type type = Type.getType(f.getType());
java.lang.reflect.Type t = f.getGenericType();
fv = cw.visitField(Opcodes.ACC_PUBLIC, field, type.getDescriptor(), getSignature(t), null);
fv.visitEnd();
} catch (NoSuchFieldException e) {
throw new NoSuchScopeFieldException(e);
} catch (SecurityException e) {
throw new NoSuchScopeFieldException(e);
}
}
// CONSTRUCTOR
mv = cw.visitMethod(Opcodes.ACC_PUBLIC, "<init>", "()V", null, null);
mv.visitCode();
mv.visitVarInsn(Opcodes.ALOAD, 0);
mv.visitMethodInsn(Opcodes.INVOKESPECIAL, RepeatNodeScope.class.getName().replace('.', '/'), "<init>", "()V");
mv.visitInsn(Opcodes.RETURN);
mv.visitMaxs(1, 1);
mv.visitEnd();
// ITERATE()
mv = cw.visitMethod(Opcodes.ACC_PUBLIC, "iterate", "(Lnet/cupmanager/jangular/Scope;ILjava/lang/Object;)V", null, null);
mv.visitCode();
mv.visitVarInsn(Opcodes.ALOAD, 1);
mv.visitTypeInsn(Opcodes.CHECKCAST, parentClassName);
mv.visitVarInsn(Opcodes.ASTORE, 4);
for (String field : getReferencedVariables()) {
try {
Type type = Type.getType(parentScopeClass.getField(field).getType());
mv.visitVarInsn(Opcodes.ALOAD, 0);
mv.visitVarInsn(Opcodes.ALOAD, 4);
mv.visitFieldInsn(Opcodes.GETFIELD, parentClassName, field, type.getDescriptor());
mv.visitFieldInsn(Opcodes.PUTFIELD, className, field, type.getDescriptor());
} catch (NoSuchFieldException e) {
throw new NoSuchScopeFieldException(e);
} catch (SecurityException e) {
throw new NoSuchScopeFieldException(e);
}
}
mv.visitVarInsn(Opcodes.ALOAD, 0);
mv.visitVarInsn(Opcodes.ILOAD, 2);
mv.visitFieldInsn(Opcodes.PUTFIELD, className, "$index", "I");
mv.visitVarInsn(Opcodes.ALOAD, 0);
mv.visitVarInsn(Opcodes.ALOAD, 3);
mv.visitTypeInsn(Opcodes.CHECKCAST, Type.getInternalName(varType));
mv.visitFieldInsn(Opcodes.PUTFIELD, className, varName, Type.getDescriptor(varType));
mv.visitInsn(Opcodes.RETURN);
mv.visitMaxs(2, 5);
mv.visitEnd();
cw.visitEnd();
// ITERATE2()
mv = cw.visitMethod(Opcodes.ACC_PUBLIC, "iterate", "(ILjava/lang/Object;)V", null, null);
mv.visitCode();
mv.visitVarInsn(Opcodes.ALOAD, 0);
mv.visitVarInsn(Opcodes.ILOAD, 1);
mv.visitFieldInsn(Opcodes.PUTFIELD, className, "$index", "I");
mv.visitVarInsn(Opcodes.ALOAD, 0);
mv.visitVarInsn(Opcodes.ALOAD, 2);
mv.visitTypeInsn(Opcodes.CHECKCAST, Type.getInternalName(varType));
mv.visitFieldInsn(Opcodes.PUTFIELD, className, varName, Type.getDescriptor(varType));
mv.visitInsn(Opcodes.RETURN);
mv.visitMaxs(2, 3);
mv.visitEnd();
cw.visitEnd();
Class<? extends RepeatNodeScope> cl = JangularCompilerUtils.loadScopeClass(session.getClassLoader(), cw.toByteArray(), className);
return cl;
}
public static String getSignature(java.lang.reflect.Type t) {
String signature = null;
if (t instanceof Class) {
signature = Type.getType((Class)t).getDescriptor();
} else if (t instanceof ParameterizedType) {
ParameterizedType par_t = (ParameterizedType) t;
java.lang.reflect.Type[] generictypes = par_t.getActualTypeArguments();
String s = "";
for (java.lang.reflect.Type gt : generictypes) {
s += getSignature(gt);
}
Type type = Type.getType((Class) par_t.getRawType());
String descr = type.getDescriptor();
descr = descr.substring(0, descr.length()-1);
signature = descr + "<"+s+">" + ";";
} else {
throw new IllegalArgumentException("t was unknown type: " + t.getClass());
}
return signature;
}
@Override
public JangularNode clone() {
RepeatNode rn = new RepeatNode();
rn.varName = varName;
rn.listExpression = listExpression;
rn.node = node.clone();
rn.pc = pc;
rn.nodeScopeClass = nodeScopeClass;
rn.listExpressionString = listExpressionString;
rn.listVarName = listVarName;
return rn;
}
public String toString() {
return getClass() + " repeat: " + varName + " in " + listExpressionString;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import static org.apache.hadoop.hbase.regionserver.HStoreFile.BULKLOAD_TIME_KEY;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.MultithreadedTestUtil.RepeatingTestThread;
import org.apache.hadoop.hbase.MultithreadedTestUtil.TestContext;
import org.apache.hadoop.hbase.StartTestingClusterOption;
import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.regionserver.wal.TestWALActionsListener;
import org.apache.hadoop.hbase.testclassification.LargeTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.tool.BulkLoadHFiles;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.wal.WAL;
import org.apache.hadoop.hbase.wal.WALEdit;
import org.apache.hadoop.hbase.wal.WALKey;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
/**
* Tests bulk loading of HFiles and shows the atomicity or lack of atomicity of
* the region server's bullkLoad functionality.
*/
@RunWith(Parameterized.class)
@Category({RegionServerTests.class, LargeTests.class})
public class TestHRegionServerBulkLoad {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestHRegionServerBulkLoad.class);
private static final Logger LOG = LoggerFactory.getLogger(TestHRegionServerBulkLoad.class);
protected static HBaseTestingUtil UTIL = new HBaseTestingUtil();
protected final static Configuration conf = UTIL.getConfiguration();
protected final static byte[] QUAL = Bytes.toBytes("qual");
protected final static int NUM_CFS = 10;
private int sleepDuration;
public static int BLOCKSIZE = 64 * 1024;
public static Algorithm COMPRESSION = Compression.Algorithm.NONE;
protected final static byte[][] families = new byte[NUM_CFS][];
static {
for (int i = 0; i < NUM_CFS; i++) {
families[i] = Bytes.toBytes(family(i));
}
}
@Parameters
public static final Collection<Object[]> parameters() {
int[] sleepDurations = new int[] { 0, 30000 };
List<Object[]> configurations = new ArrayList<>();
for (int i : sleepDurations) {
configurations.add(new Object[] { i });
}
return configurations;
}
public TestHRegionServerBulkLoad(int duration) {
this.sleepDuration = duration;
}
@BeforeClass
public static void setUpBeforeClass() throws Exception {
conf.setInt("hbase.rpc.timeout", 10 * 1000);
}
/**
* Create a rowkey compatible with
* {@link #createHFile(FileSystem, Path, byte[], byte[], byte[], int)}.
*/
public static byte[] rowkey(int i) {
return Bytes.toBytes(String.format("row_%08d", i));
}
static String family(int i) {
return String.format("family_%04d", i);
}
/**
* Create an HFile with the given number of rows with a specified value.
*/
public static void createHFile(FileSystem fs, Path path, byte[] family,
byte[] qualifier, byte[] value, int numRows) throws IOException {
HFileContext context = new HFileContextBuilder().withBlockSize(BLOCKSIZE)
.withCompression(COMPRESSION)
.build();
HFile.Writer writer = HFile
.getWriterFactory(conf, new CacheConfig(conf))
.withPath(fs, path)
.withFileContext(context)
.create();
long now = EnvironmentEdgeManager.currentTime();
try {
// subtract 2 since iterateOnSplits doesn't include boundary keys
for (int i = 0; i < numRows; i++) {
KeyValue kv = new KeyValue(rowkey(i), family, qualifier, now, value);
writer.append(kv);
}
writer.appendFileInfo(BULKLOAD_TIME_KEY, Bytes.toBytes(now));
} finally {
writer.close();
}
}
/**
* Thread that does full scans of the table looking for any partially
* completed rows.
*
* Each iteration of this loads 10 hdfs files, which occupies 5 file open file
* handles. So every 10 iterations (500 file handles) it does a region
* compaction to reduce the number of open file handles.
*/
public static class AtomicHFileLoader extends RepeatingTestThread {
final AtomicLong numBulkLoads = new AtomicLong();
final AtomicLong numCompactions = new AtomicLong();
private TableName tableName;
public AtomicHFileLoader(TableName tableName, TestContext ctx,
byte targetFamilies[][]) throws IOException {
super(ctx);
this.tableName = tableName;
}
@Override
public void doAnAction() throws Exception {
long iteration = numBulkLoads.getAndIncrement();
Path dir = UTIL.getDataTestDirOnTestFS(String.format("bulkLoad_%08d",
iteration));
// create HFiles for different column families
FileSystem fs = UTIL.getTestFileSystem();
byte[] val = Bytes.toBytes(String.format("%010d", iteration));
Map<byte[], List<Path>> family2Files = new TreeMap<>(Bytes.BYTES_COMPARATOR);
for (int i = 0; i < NUM_CFS; i++) {
Path hfile = new Path(dir, family(i));
byte[] fam = Bytes.toBytes(family(i));
createHFile(fs, hfile, fam, QUAL, val, 1000);
family2Files.put(fam, Collections.singletonList(hfile));
}
// bulk load HFiles
BulkLoadHFiles.create(UTIL.getConfiguration()).bulkLoad(tableName, family2Files);
final Connection conn = UTIL.getConnection();
// Periodically do compaction to reduce the number of open file handles.
if (numBulkLoads.get() % 5 == 0) {
// 5 * 50 = 250 open file handles!
try (RegionLocator locator = conn.getRegionLocator(tableName)) {
HRegionLocation loc = locator.getRegionLocation(Bytes.toBytes("aaa"), true);
conn.getAdmin().compactRegion(loc.getRegion().getRegionName());
numCompactions.incrementAndGet();
}
}
}
}
public static class MyObserver implements RegionCoprocessor, RegionObserver {
static int sleepDuration;
@Override
public Optional<RegionObserver> getRegionObserver() {
return Optional.of(this);
}
@Override
public InternalScanner preCompact(ObserverContext<RegionCoprocessorEnvironment> e, Store store,
InternalScanner scanner, ScanType scanType, CompactionLifeCycleTracker tracker,
CompactionRequest request)
throws IOException {
try {
Thread.sleep(sleepDuration);
} catch (InterruptedException ie) {
IOException ioe = new InterruptedIOException();
ioe.initCause(ie);
throw ioe;
}
return scanner;
}
}
/**
* Thread that does full scans of the table looking for any partially
* completed rows.
*/
public static class AtomicScanReader extends RepeatingTestThread {
byte targetFamilies[][];
Table table;
AtomicLong numScans = new AtomicLong();
AtomicLong numRowsScanned = new AtomicLong();
TableName TABLE_NAME;
public AtomicScanReader(TableName TABLE_NAME, TestContext ctx,
byte targetFamilies[][]) throws IOException {
super(ctx);
this.TABLE_NAME = TABLE_NAME;
this.targetFamilies = targetFamilies;
table = UTIL.getConnection().getTable(TABLE_NAME);
}
@Override
public void doAnAction() throws Exception {
Scan s = new Scan();
for (byte[] family : targetFamilies) {
s.addFamily(family);
}
ResultScanner scanner = table.getScanner(s);
for (Result res : scanner) {
byte[] lastRow = null, lastFam = null, lastQual = null;
byte[] gotValue = null;
for (byte[] family : targetFamilies) {
byte qualifier[] = QUAL;
byte thisValue[] = res.getValue(family, qualifier);
if (gotValue != null && thisValue != null
&& !Bytes.equals(gotValue, thisValue)) {
StringBuilder msg = new StringBuilder();
msg.append("Failed on scan ").append(numScans)
.append(" after scanning ").append(numRowsScanned)
.append(" rows!\n");
msg.append("Current was " + Bytes.toString(res.getRow()) + "/"
+ Bytes.toString(family) + ":" + Bytes.toString(qualifier)
+ " = " + Bytes.toString(thisValue) + "\n");
msg.append("Previous was " + Bytes.toString(lastRow) + "/"
+ Bytes.toString(lastFam) + ":" + Bytes.toString(lastQual)
+ " = " + Bytes.toString(gotValue));
throw new RuntimeException(msg.toString());
}
lastFam = family;
lastQual = qualifier;
lastRow = res.getRow();
gotValue = thisValue;
}
numRowsScanned.getAndIncrement();
}
numScans.getAndIncrement();
}
}
/**
* Creates a table with given table name and specified number of column
* families if the table does not already exist.
*/
public void setupTable(TableName table, int cfs) throws IOException {
try {
LOG.info("Creating table " + table);
TableDescriptorBuilder tableDescriptorBuilder =
TableDescriptorBuilder.newBuilder(table);
tableDescriptorBuilder.setCoprocessor(MyObserver.class.getName());
MyObserver.sleepDuration = this.sleepDuration;
for (int i = 0; i < 10; i++) {
ColumnFamilyDescriptor columnFamilyDescriptor =
ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(family(i))).build();
tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
}
UTIL.getAdmin().createTable(tableDescriptorBuilder.build());
} catch (TableExistsException tee) {
LOG.info("Table " + table + " already exists");
}
}
/**
* Atomic bulk load.
*/
@Test
public void testAtomicBulkLoad() throws Exception {
TableName TABLE_NAME = TableName.valueOf("atomicBulkLoad");
int millisToRun = 30000;
int numScanners = 50;
// Set createWALDir to true and use default values for other options.
UTIL.startMiniCluster(StartTestingClusterOption.builder().createWALDir(true).build());
try {
WAL log = UTIL.getHBaseCluster().getRegionServer(0).getWAL(null);
FindBulkHBaseListener listener = new FindBulkHBaseListener();
log.registerWALActionsListener(listener);
runAtomicBulkloadTest(TABLE_NAME, millisToRun, numScanners);
assertThat(listener.isFound(), is(true));
} finally {
UTIL.shutdownMiniCluster();
}
}
void runAtomicBulkloadTest(TableName tableName, int millisToRun, int numScanners)
throws Exception {
setupTable(tableName, 10);
TestContext ctx = new TestContext(UTIL.getConfiguration());
AtomicHFileLoader loader = new AtomicHFileLoader(tableName, ctx, null);
ctx.addThread(loader);
List<AtomicScanReader> scanners = Lists.newArrayList();
for (int i = 0; i < numScanners; i++) {
AtomicScanReader scanner = new AtomicScanReader(tableName, ctx, families);
scanners.add(scanner);
ctx.addThread(scanner);
}
ctx.startThreads();
ctx.waitFor(millisToRun);
ctx.stop();
LOG.info("Loaders:");
LOG.info(" loaded " + loader.numBulkLoads.get());
LOG.info(" compations " + loader.numCompactions.get());
LOG.info("Scanners:");
for (AtomicScanReader scanner : scanners) {
LOG.info(" scanned " + scanner.numScans.get());
LOG.info(" verified " + scanner.numRowsScanned.get() + " rows");
}
}
/**
* Run test on an HBase instance for 5 minutes. This assumes that the table
* under test only has a single region.
*/
public static void main(String args[]) throws Exception {
try {
Configuration c = HBaseConfiguration.create();
TestHRegionServerBulkLoad test = new TestHRegionServerBulkLoad(0);
test.setConf(c);
test.runAtomicBulkloadTest(TableName.valueOf("atomicTableTest"), 5 * 60 * 1000, 50);
} finally {
System.exit(0); // something hangs (believe it is lru threadpool)
}
}
private void setConf(Configuration c) {
UTIL = new HBaseTestingUtil(c);
}
static class FindBulkHBaseListener extends TestWALActionsListener.DummyWALActionsListener {
private boolean found = false;
@Override
public void visitLogEntryBeforeWrite(RegionInfo info, WALKey logKey, WALEdit logEdit) {
for (Cell cell : logEdit.getCells()) {
KeyValue kv = KeyValueUtil.ensureKeyValue(cell);
for (Map.Entry entry : kv.toStringMap().entrySet()) {
if (entry.getValue().equals(Bytes.toString(WALEdit.BULK_LOAD))) {
found = true;
}
}
}
}
public boolean isFound() {
return found;
}
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
*/
package com.intellij.slicer;
import com.intellij.icons.AllIcons;
import com.intellij.ide.CommonActionsManager;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.actions.CloseTabToolbarAction;
import com.intellij.ide.actions.RefreshAction;
import com.intellij.ide.util.treeView.AbstractTreeNode;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Splitter;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.wm.ToolWindow;
import com.intellij.openapi.wm.ToolWindowAnchor;
import com.intellij.openapi.wm.ex.ToolWindowManagerEx;
import com.intellij.openapi.wm.ex.ToolWindowManagerListener;
import com.intellij.pom.Navigatable;
import com.intellij.ui.*;
import com.intellij.ui.treeStructure.Tree;
import com.intellij.usageView.UsageInfo;
import com.intellij.usageView.UsageViewBundle;
import com.intellij.usages.Usage;
import com.intellij.usages.UsageViewPresentation;
import com.intellij.usages.UsageViewSettings;
import com.intellij.usages.impl.UsagePreviewPanel;
import com.intellij.util.EditSourceOnDoubleClickHandler;
import com.intellij.util.ui.UIUtil;
import com.intellij.util.ui.tree.TreeUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import javax.swing.event.TreeExpansionEvent;
import javax.swing.event.TreeWillExpandListener;
import javax.swing.tree.*;
import java.awt.*;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.util.*;
import java.util.List;
/**
* @author cdr
*/
public abstract class SlicePanel extends JPanel implements TypeSafeDataProvider, Disposable {
private final SliceTreeBuilder myBuilder;
private final JTree myTree;
private final AutoScrollToSourceHandler myAutoScrollToSourceHandler = new AutoScrollToSourceHandler() {
@Override
protected boolean isAutoScrollMode() {
return isAutoScroll();
}
@Override
protected void setAutoScrollMode(final boolean state) {
setAutoScroll(state);
}
};
private UsagePreviewPanel myUsagePreviewPanel;
private final Project myProject;
private boolean isDisposed;
private final ToolWindow myToolWindow;
private final SliceLanguageSupportProvider myProvider;
protected SlicePanel(@NotNull final Project project,
boolean dataFlowToThis,
@NotNull SliceNode rootNode,
boolean splitByLeafExpressions,
@NotNull final ToolWindow toolWindow) {
super(new BorderLayout());
myProvider = rootNode.getProvider();
myToolWindow = toolWindow;
final ToolWindowManagerListener listener = new ToolWindowManagerListener() {
ToolWindowAnchor myAnchor = toolWindow.getAnchor();
@Override
public void toolWindowRegistered(@NotNull String id) {
}
@Override
public void stateChanged() {
if (!project.isOpen()) return;
if (toolWindow.getAnchor() != myAnchor) {
myAnchor = myToolWindow.getAnchor();
layoutPanel();
}
}
};
ToolWindowManagerEx.getInstanceEx(project).addToolWindowManagerListener(listener, this);
ApplicationManager.getApplication().assertIsDispatchThread();
myProject = project;
myTree = createTree();
myBuilder = new SliceTreeBuilder(myTree, project, dataFlowToThis, rootNode, splitByLeafExpressions);
myBuilder.setCanYieldUpdate(!ApplicationManager.getApplication().isUnitTestMode());
Disposer.register(this, myBuilder);
myBuilder.addSubtreeToUpdate((DefaultMutableTreeNode)myTree.getModel().getRoot(), () -> {
if (isDisposed || myBuilder.isDisposed() || myProject.isDisposed()) return;
final SliceNode rootNode1 = myBuilder.getRootSliceNode();
myBuilder.expand(rootNode1, new Runnable() {
@Override
public void run() {
if (isDisposed || myBuilder.isDisposed() || myProject.isDisposed()) return;
myBuilder.select(rootNode1.myCachedChildren.get(0)); //first there is ony one child
}
});
treeSelectionChanged();
});
layoutPanel();
}
private void layoutPanel() {
if (myUsagePreviewPanel != null) {
Disposer.dispose(myUsagePreviewPanel);
}
removeAll();
JScrollPane pane = ScrollPaneFactory.createScrollPane(myTree);
if (isPreview()) {
pane.setBorder(IdeBorderFactory.createBorder(SideBorder.LEFT | SideBorder.RIGHT));
boolean vertical = myToolWindow.getAnchor() == ToolWindowAnchor.LEFT || myToolWindow.getAnchor() == ToolWindowAnchor.RIGHT;
Splitter splitter = new Splitter(vertical, UsageViewSettings.getInstance().getPreviewUsagesSplitterProportion());
splitter.setFirstComponent(pane);
myUsagePreviewPanel = new UsagePreviewPanel(myProject, new UsageViewPresentation());
myUsagePreviewPanel.setBorder(IdeBorderFactory.createBorder(SideBorder.LEFT));
Disposer.register(this, myUsagePreviewPanel);
splitter.setSecondComponent(myUsagePreviewPanel);
add(splitter, BorderLayout.CENTER);
}
else {
pane.setBorder(IdeBorderFactory.createBorder(SideBorder.LEFT));
add(pane, BorderLayout.CENTER);
}
add(createToolbar().getComponent(), BorderLayout.WEST);
myTree.getParent().setBackground(UIManager.getColor("Tree.background"));
revalidate();
}
@Override
public void dispose() {
if (myUsagePreviewPanel != null) {
UsageViewSettings.getInstance().setPreviewUsagesSplitterProportion(((Splitter)myUsagePreviewPanel.getParent()).getProportion());
myUsagePreviewPanel = null;
}
isDisposed = true;
ToolTipManager.sharedInstance().unregisterComponent(myTree);
}
static class MultiLanguageTreeCellRenderer implements TreeCellRenderer {
@NotNull
private final SliceUsageCellRendererBase rootRenderer;
@NotNull
private final Map<SliceLanguageSupportProvider, SliceUsageCellRendererBase> providersToRenderers = new HashMap<>();
public MultiLanguageTreeCellRenderer(@NotNull SliceUsageCellRendererBase rootRenderer) {
this.rootRenderer = rootRenderer;
rootRenderer.setOpaque(false);
}
@NotNull
private SliceUsageCellRendererBase getRenderer(Object value) {
if (!(value instanceof DefaultMutableTreeNode)) return rootRenderer;
Object userObject = ((DefaultMutableTreeNode)value).getUserObject();
if (!(userObject instanceof SliceNode)) return rootRenderer;
SliceLanguageSupportProvider provider = ((SliceNode)userObject).getProvider();
if (provider == null) return rootRenderer;
SliceUsageCellRendererBase renderer = providersToRenderers.get(provider);
if (renderer == null) {
renderer = provider.getRenderer();
renderer.setOpaque(false);
providersToRenderers.put(provider, renderer);
}
return renderer;
}
@Override
public Component getTreeCellRendererComponent(JTree tree,
Object value,
boolean selected,
boolean expanded,
boolean leaf,
int row,
boolean hasFocus) {
return getRenderer(value).getTreeCellRendererComponent(tree, value, selected, expanded, leaf, row, hasFocus);
}
}
@NotNull
private JTree createTree() {
DefaultMutableTreeNode root = new DefaultMutableTreeNode();
final Tree tree = new Tree(new DefaultTreeModel(root))/* {
@Override
protected void paintComponent(Graphics g) {
DuplicateNodeRenderer.paintDuplicateNodesBackground(g, this);
super.paintComponent(g);
}
}*/;
tree.setOpaque(false);
tree.setToggleClickCount(-1);
tree.setCellRenderer(new MultiLanguageTreeCellRenderer(myProvider.getRenderer()));
UIUtil.setLineStyleAngled(tree);
tree.setRootVisible(false);
tree.setShowsRootHandles(true);
tree.getSelectionModel().setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION);
tree.setSelectionPath(new TreePath(root.getPath()));
//ActionGroup group = (ActionGroup)ActionManager.getInstance().getAction(IdeActions.GROUP_METHOD_HIERARCHY_POPUP);
//PopupHandler.installPopupHandler(tree, group, ActionPlaces.METHOD_HIERARCHY_VIEW_POPUP, ActionManager.getInstance());
EditSourceOnDoubleClickHandler.install(tree);
new TreeSpeedSearch(tree);
TreeUtil.installActions(tree);
ToolTipManager.sharedInstance().registerComponent(tree);
myAutoScrollToSourceHandler.install(tree);
tree.getSelectionModel().addTreeSelectionListener(e -> treeSelectionChanged());
tree.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
if (KeyEvent.VK_ENTER == e.getKeyCode()) {
List<Navigatable> navigatables = getNavigatables();
if (navigatables.isEmpty()) return;
for (Navigatable navigatable : navigatables) {
if (navigatable instanceof AbstractTreeNode && ((AbstractTreeNode)navigatable).getValue() instanceof Usage) {
navigatable = (Usage)((AbstractTreeNode)navigatable).getValue();
}
if (navigatable.canNavigateToSource()) {
navigatable.navigate(false);
if (navigatable instanceof Usage) {
((Usage)navigatable).highlightInEditor();
}
}
}
e.consume();
}
}
});
tree.addTreeWillExpandListener(new TreeWillExpandListener() {
@Override
public void treeWillCollapse(TreeExpansionEvent event) {
}
@Override
public void treeWillExpand(TreeExpansionEvent event) {
TreePath path = event.getPath();
SliceNode node = fromPath(path);
node.calculateDupNode();
}
});
return tree;
}
private void treeSelectionChanged() {
SwingUtilities.invokeLater(() -> {
if (isDisposed) return;
List<UsageInfo> infos = getSelectedUsageInfos();
if (infos != null && myUsagePreviewPanel != null) {
myUsagePreviewPanel.updateLayout(infos);
}
});
}
private static SliceNode fromPath(TreePath path) {
Object lastPathComponent = path.getLastPathComponent();
if (lastPathComponent instanceof DefaultMutableTreeNode) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode)lastPathComponent;
Object userObject = node.getUserObject();
if (userObject instanceof SliceNode) {
return (SliceNode)userObject;
}
}
return null;
}
@Nullable
private List<UsageInfo> getSelectedUsageInfos() {
TreePath[] paths = myTree.getSelectionPaths();
if (paths == null) return null;
final ArrayList<UsageInfo> result = new ArrayList<>();
for (TreePath path : paths) {
SliceNode sliceNode = fromPath(path);
if (sliceNode != null) {
result.add(sliceNode.getValue().getUsageInfo());
}
}
if (result.isEmpty()) return null;
return result;
}
@Override
public void calcData(DataKey key, DataSink sink) {
if (key == CommonDataKeys.NAVIGATABLE_ARRAY) {
List<Navigatable> navigatables = getNavigatables();
if (!navigatables.isEmpty()) {
sink.put(CommonDataKeys.NAVIGATABLE_ARRAY, navigatables.toArray(new Navigatable[0]));
}
}
}
@NotNull
private List<Navigatable> getNavigatables() {
TreePath[] paths = myTree.getSelectionPaths();
if (paths == null) return Collections.emptyList();
final ArrayList<Navigatable> navigatables = new ArrayList<>();
for (TreePath path : paths) {
Object lastPathComponent = path.getLastPathComponent();
if (lastPathComponent instanceof DefaultMutableTreeNode) {
DefaultMutableTreeNode node = (DefaultMutableTreeNode)lastPathComponent;
Object userObject = node.getUserObject();
if (userObject instanceof Navigatable) {
navigatables.add((Navigatable)userObject);
}
else if (node instanceof Navigatable) {
navigatables.add((Navigatable)node);
}
}
}
return navigatables;
}
@NotNull
private ActionToolbar createToolbar() {
final DefaultActionGroup actionGroup = new DefaultActionGroup();
actionGroup.add(new MyRefreshAction(myTree));
if (isToShowAutoScrollButton()) {
actionGroup.add(myAutoScrollToSourceHandler.createToggleAction());
}
if (isToShowCloseButton()) {
actionGroup.add(new CloseAction());
}
if (isToShowPreviewButton()) {
actionGroup.add(new ToggleAction(UsageViewBundle.message("preview.usages.action.text", "usages"), "preview", AllIcons.Actions.PreviewDetails) {
@Override
public boolean isSelected(AnActionEvent e) {
return isPreview();
}
@Override
public void setSelected(AnActionEvent e, boolean state) {
setPreview(state);
layoutPanel();
}
});
}
myProvider.registerExtraPanelActions(actionGroup, myBuilder);
actionGroup.add(CommonActionsManager.getInstance().createExportToTextFileAction(new SliceToTextFileExporter(myBuilder, UsageViewSettings.getInstance())));
//actionGroup.add(new ContextHelpAction(HELP_ID));
return ActionManager.getInstance().createActionToolbar(ActionPlaces.TYPE_HIERARCHY_VIEW_TOOLBAR, actionGroup, false);
}
public boolean isToShowAutoScrollButton() {return true;}
public abstract boolean isAutoScroll();
public abstract void setAutoScroll(boolean autoScroll);
public boolean isToShowCloseButton() {return true;}
public boolean isToShowPreviewButton() {return true;}
public abstract boolean isPreview();
public abstract void setPreview(boolean preview);
private class CloseAction extends CloseTabToolbarAction {
@Override
public final void actionPerformed(final AnActionEvent e) {
close();
}
}
protected void close() {
final ProgressIndicator progress = myBuilder.getUi().getProgress();
if (progress != null) {
progress.cancel();
}
}
private final class MyRefreshAction extends RefreshAction {
private MyRefreshAction(JComponent tree) {
super(IdeBundle.message("action.refresh"), IdeBundle.message("action.refresh"), AllIcons.Actions.Refresh);
registerShortcutOn(tree);
}
@Override
public final void actionPerformed(final AnActionEvent e) {
SliceNode rootNode = (SliceNode)myBuilder.getRootNode().getUserObject();
rootNode.setChanged();
myBuilder.addSubtreeToUpdate(myBuilder.getRootNode());
}
@Override
public final void update(final AnActionEvent event) {
final Presentation presentation = event.getPresentation();
presentation.setEnabled(true);
}
}
@TestOnly
public SliceTreeBuilder getBuilder() {
return myBuilder;
}
}
| |
/**
*
*/
package com.perforce.p4java.option.client;
import java.util.List;
import com.perforce.p4java.core.IChangelist;
import com.perforce.p4java.exception.OptionsException;
import com.perforce.p4java.option.Options;
import com.perforce.p4java.server.IServer;
/**
* Options for the IClient.mergeFiles method.<p>
*
* Note also that the current implementation makes no attempt
* to validate the sanity or otherwise of the various options and
* their combination.
*
* @see com.perforce.p4java.client.IClient#mergeFiles(com.perforce.p4java.core.file.IFileSpec, com.perforce.p4java.core.file.IFileSpec, String, com.perforce.p4java.option.client.MergeFilesOptions)
*/
public class MergeFilesOptions extends Options {
/**
* Options: -c[changelist], -n, -q, -m[max], -b[branch], -S[stream], -P[parentStream], -F, -r, -s
*/
public static final String OPTIONS_SPECS = "i:c:clz b:n b:q i:m:gtz s:b s:S s:P b:F b:r b:s";
/**
* If positive, the integrated files are opened in the numbered
* pending changelist instead of the default changelist.
*/
protected int changelistId = IChangelist.UNKNOWN;
/**
* Cause the branch view to work bidirectionally, where the scope of
* the command is limited to integrations whose 'from' files match
* fromFile[revRange]. Corresponds to the -s flag, with the fromFile
* arg being specified in the main method fromFile parameter.
*/
protected boolean bidirectionalMerge = false;
/**
* Display what integrations would be necessary but don't
* actually do them. Corresponds to the -n flag.
*/
protected boolean showActionsOnly = false;
/**
* If true, suppresses normal output messages. Messages regarding
* errors or exceptional conditions are not suppressed.
* Corresponds to -q flag.
*/
protected boolean quiet = false;
/**
* Reverse the mappings in the branch view, with the
* target files and source files exchanging place.
* Corresponds to the -r flag.
*/
protected boolean reverseMapping = false;
/**
* If positive, integrate only the first maxFiles files.
* Corresponds to -m flag.
*/
protected int maxFiles = 0;
/**
* If non-null, use a user-defined branch view. The source is the left
* side of the branch view and the target is the right side. With -r,
* the direction is reversed. Corresponds to -b flag.
*/
protected String branch = null;
/**
* If not null, makes 'p4 integrate' use a stream's branch view. The
* source is the stream itself, and the target is the stream's parent.
* With -r, the direction is reversed. -P can be used to specify a
* parent stream other than the stream's actual parent. Note that to
* submit integrated stream files, the current client must be dedicated
* to the target stream. Corresponds to -S flag.
*/
protected String stream = null;
/**
* If non-null, specify a parent stream other than the stream's actual
* parent. Corresponds to -P flag.
*/
protected String parentStream = null;
/**
* If true, force merging even though the stream does not expect a merge
* to occur in the direction indicated. Normally 'p4 merge' enforces the
* expected flow of change dictated by the stream's spec. The 'p4 istat'
* command summarizes a stream's expected flow of change.
*/
protected boolean forceStreamMerge = false;
/**
* Default constructor.
*/
public MergeFilesOptions() {
super();
}
/**
* Strings-based constructor; see 'p4 help [command]' for possible options.
* <p>
*
* <b>WARNING: you should not pass more than one option or argument in each
* string parameter. Each option or argument should be passed-in as its own
* separate string parameter, without any spaces between the option and the
* option value (if any).<b>
* <p>
*
* <b>NOTE: setting options this way always bypasses the internal options
* values, and getter methods against the individual values corresponding to
* the strings passed in to this constructor will not normally reflect the
* string's setting. Do not use this constructor unless you know what you're
* doing and / or you do not also use the field getters and setters.</b>
*
* @see com.perforce.p4java.option.Options#Options(String...)
*/
public MergeFilesOptions(String... options) {
super(options);
}
/**
* Explicit-value constructor for use with a branch.
*/
public MergeFilesOptions(int changelistId, boolean showActionsOnly,
int maxFiles, String branch,
boolean reverseMapping, boolean bidirectionalInteg) {
super();
this.changelistId = changelistId;
this.showActionsOnly = showActionsOnly;
this.maxFiles = maxFiles;
this.branch = branch;
this.reverseMapping = reverseMapping;
this.bidirectionalMerge = bidirectionalInteg;
}
/**
* Explicit-value constructor for use with a stream.
*/
public MergeFilesOptions(int changelistId, boolean showActionsOnly,
int maxFiles, String stream, String parentStream,
boolean forceStreamMerge, boolean reverseMapping) {
super();
this.changelistId = changelistId;
this.showActionsOnly = showActionsOnly;
this.maxFiles = maxFiles;
this.stream = stream;
this.parentStream = parentStream;
this.forceStreamMerge = forceStreamMerge;
this.reverseMapping = reverseMapping;
}
/**
* @see com.perforce.p4java.option.Options#processOptions(com.perforce.p4java.server.IServer)
*/
public List<String> processOptions(IServer server) throws OptionsException {
this.optionList = this.processFields(OPTIONS_SPECS,
this.getChangelistId(),
this.isShowActionsOnly(),
this.isQuiet(),
this.getMaxFiles(),
this.getBranch(),
this.getStream(),
this.getParentStream(),
this.isForceStreamMerge(),
this.isReverseMapping(),
this.isBidirectionalInteg());
return this.optionList;
}
public int getChangelistId() {
return changelistId;
}
public MergeFilesOptions setChangelistId(int changelistId) {
this.changelistId = changelistId;
return this;
}
public boolean isShowActionsOnly() {
return showActionsOnly;
}
public MergeFilesOptions setShowActionsOnly(boolean showActionsOnly) {
this.showActionsOnly = showActionsOnly;
return this;
}
public boolean isQuiet() {
return quiet;
}
public MergeFilesOptions setQuiet(boolean quiet) {
this.quiet = quiet;
return this;
}
public boolean isReverseMapping() {
return reverseMapping;
}
public MergeFilesOptions setReverseMapping(boolean reverseMapping) {
this.reverseMapping = reverseMapping;
return this;
}
public boolean isBidirectionalInteg() {
return bidirectionalMerge;
}
public MergeFilesOptions setBidirectionalInteg(boolean bidirectionalInteg) {
this.bidirectionalMerge = bidirectionalInteg;
return this;
}
public int getMaxFiles() {
return maxFiles;
}
public MergeFilesOptions setMaxFiles(int maxFiles) {
this.maxFiles = maxFiles;
return this;
}
public String getBranch() {
return branch;
}
public MergeFilesOptions setBranch(String branch) {
this.branch = branch;
return this;
}
public String getStream() {
return stream;
}
public MergeFilesOptions setStream(String stream) {
this.stream = stream;
return this;
}
public String getParentStream() {
return parentStream;
}
public MergeFilesOptions setParentStream(String parentStream) {
this.parentStream = parentStream;
return this;
}
public boolean isForceStreamMerge() {
return forceStreamMerge;
}
public MergeFilesOptions setForceStreamMerge(boolean forceStreamMerge) {
this.forceStreamMerge = forceStreamMerge;
return this;
}
}
| |
/*
* Copyright (C) 2011 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.collect;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.collect.BoundType.CLOSED;
import static com.google.common.collect.BoundType.OPEN;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.collect.Multiset.Entry;
import java.util.Comparator;
import java.util.Iterator;
import java.util.NavigableSet;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.SortedSet;
import javax.annotation.Nullable;
/**
* Provides static utility methods for creating and working with
* {@link SortedMultiset} instances.
*
* @author Louis Wasserman
*/
@GwtCompatible(emulated = true)
final class SortedMultisets {
private SortedMultisets() {
}
/**
* A skeleton implementation for {@link SortedMultiset#elementSet}.
*/
static abstract class ElementSet<E> extends Multisets.ElementSet<E> implements
SortedSet<E> {
@Override abstract SortedMultiset<E> multiset();
@Override public Comparator<? super E> comparator() {
return multiset().comparator();
}
@Override public SortedSet<E> subSet(E fromElement, E toElement) {
return multiset().subMultiset(fromElement, CLOSED, toElement, OPEN).elementSet();
}
@Override public SortedSet<E> headSet(E toElement) {
return multiset().headMultiset(toElement, OPEN).elementSet();
}
@Override public SortedSet<E> tailSet(E fromElement) {
return multiset().tailMultiset(fromElement, CLOSED).elementSet();
}
@Override public E first() {
return getElementOrThrow(multiset().firstEntry());
}
@Override public E last() {
return getElementOrThrow(multiset().lastEntry());
}
}
/**
* A skeleton navigable implementation for {@link SortedMultiset#elementSet}.
*/
@GwtIncompatible("Navigable")
static class NavigableElementSet<E> extends ElementSet<E> implements NavigableSet<E> {
private final SortedMultiset<E> multiset;
NavigableElementSet(SortedMultiset<E> multiset) {
this.multiset = checkNotNull(multiset);
}
@Override final SortedMultiset<E> multiset() {
return multiset;
}
@Override
public E lower(E e) {
return getElementOrNull(multiset().headMultiset(e, OPEN).lastEntry());
}
@Override
public E floor(E e) {
return getElementOrNull(multiset().headMultiset(e, CLOSED).lastEntry());
}
@Override
public E ceiling(E e) {
return getElementOrNull(multiset().tailMultiset(e, CLOSED).firstEntry());
}
@Override
public E higher(E e) {
return getElementOrNull(multiset().tailMultiset(e, OPEN).firstEntry());
}
@Override
public NavigableSet<E> descendingSet() {
return new NavigableElementSet<E>(multiset().descendingMultiset());
}
@Override
public Iterator<E> descendingIterator() {
return descendingSet().iterator();
}
@Override
public E pollFirst() {
return getElementOrNull(multiset().pollFirstEntry());
}
@Override
public E pollLast() {
return getElementOrNull(multiset().pollLastEntry());
}
@Override
public NavigableSet<E> subSet(
E fromElement, boolean fromInclusive, E toElement, boolean toInclusive) {
return new NavigableElementSet<E>(multiset().subMultiset(
fromElement, BoundType.forBoolean(fromInclusive),
toElement, BoundType.forBoolean(toInclusive)));
}
@Override
public NavigableSet<E> headSet(E toElement, boolean inclusive) {
return new NavigableElementSet<E>(
multiset().headMultiset(toElement, BoundType.forBoolean(inclusive)));
}
@Override
public NavigableSet<E> tailSet(E fromElement, boolean inclusive) {
return new NavigableElementSet<E>(
multiset().tailMultiset(fromElement, BoundType.forBoolean(inclusive)));
}
}
private static <E> E getElementOrThrow(Entry<E> entry) {
if (entry == null) {
throw new NoSuchElementException();
}
return entry.getElement();
}
private static <E> E getElementOrNull(@Nullable Entry<E> entry) {
return (entry == null) ? null : entry.getElement();
}
/**
* A skeleton implementation of a descending multiset. Only needs
* {@code forwardMultiset()} and {@code entryIterator()}.
*/
static abstract class DescendingMultiset<E> extends ForwardingMultiset<E>
implements SortedMultiset<E> {
abstract SortedMultiset<E> forwardMultiset();
private transient Comparator<? super E> comparator;
@Override public Comparator<? super E> comparator() {
Comparator<? super E> result = comparator;
if (result == null) {
return comparator =
Ordering.from(forwardMultiset().comparator()).<E>reverse();
}
return result;
}
private transient SortedSet<E> elementSet;
@Override public SortedSet<E> elementSet() {
SortedSet<E> result = elementSet;
if (result == null) {
return elementSet = new SortedMultisets.ElementSet<E>() {
@Override SortedMultiset<E> multiset() {
return DescendingMultiset.this;
}
};
}
return result;
}
@Override public Entry<E> pollFirstEntry() {
return forwardMultiset().pollLastEntry();
}
@Override public Entry<E> pollLastEntry() {
return forwardMultiset().pollFirstEntry();
}
@Override public SortedMultiset<E> headMultiset(E toElement,
BoundType boundType) {
return forwardMultiset().tailMultiset(toElement, boundType)
.descendingMultiset();
}
@Override public SortedMultiset<E> subMultiset(E fromElement,
BoundType fromBoundType, E toElement, BoundType toBoundType) {
return forwardMultiset().subMultiset(toElement, toBoundType, fromElement,
fromBoundType).descendingMultiset();
}
@Override public SortedMultiset<E> tailMultiset(E fromElement,
BoundType boundType) {
return forwardMultiset().headMultiset(fromElement, boundType)
.descendingMultiset();
}
@Override protected Multiset<E> delegate() {
return forwardMultiset();
}
@Override public SortedMultiset<E> descendingMultiset() {
return forwardMultiset();
}
@Override public Entry<E> firstEntry() {
return forwardMultiset().lastEntry();
}
@Override public Entry<E> lastEntry() {
return forwardMultiset().firstEntry();
}
abstract Iterator<Entry<E>> entryIterator();
private transient Set<Entry<E>> entrySet;
@Override public Set<Entry<E>> entrySet() {
Set<Entry<E>> result = entrySet;
return (result == null) ? entrySet = createEntrySet() : result;
}
Set<Entry<E>> createEntrySet() {
return new Multisets.EntrySet<E>() {
@Override Multiset<E> multiset() {
return DescendingMultiset.this;
}
@Override public Iterator<Entry<E>> iterator() {
return entryIterator();
}
@Override public int size() {
return forwardMultiset().entrySet().size();
}
};
}
@Override public Iterator<E> iterator() {
return Multisets.iteratorImpl(this);
}
@Override public Object[] toArray() {
return standardToArray();
}
@Override public <T> T[] toArray(T[] array) {
return standardToArray(array);
}
@Override public String toString() {
return entrySet().toString();
}
}
}
| |
package com.twitter.elephantbird.pig.util;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.apache.pig.LoadFunc;
import org.apache.pig.backend.executionengine.ExecException;
import org.apache.pig.data.BagFactory;
import org.apache.pig.data.DataBag;
import org.apache.pig.data.DataByteArray;
import org.apache.pig.data.DataType;
import org.apache.pig.data.Tuple;
import org.apache.pig.data.TupleFactory;
import org.apache.pig.impl.logicalLayer.FrontendException;
import org.apache.pig.impl.logicalLayer.schema.Schema;
import org.apache.pig.impl.logicalLayer.schema.Schema.FieldSchema;
import org.apache.thrift.TBase;
import org.apache.thrift.protocol.TType;
import com.google.common.collect.Lists;
import com.twitter.elephantbird.pig.load.ThriftPigLoader;
import com.twitter.elephantbird.thrift.TStructDescriptor;
import com.twitter.elephantbird.thrift.TStructDescriptor.Field;
import com.twitter.elephantbird.util.ThriftUtils;
import com.twitter.elephantbird.util.TypeRef;
/**
* <li> converts a Thrift struct to a Pig tuple
* <li> utilities to provide schema for Pig loaders and Pig scripts
*/
public class ThriftToPig<M extends TBase<?, ?>> {
public static final Logger LOG = LogManager.getLogger(ThriftToPig.class);
private static BagFactory bagFactory = BagFactory.getInstance();
private static TupleFactory tupleFactory = TupleFactory.getInstance();
private TStructDescriptor structDesc;
public static <M extends TBase<?, ?>> ThriftToPig<M> newInstance(Class<M> tClass) {
return new ThriftToPig<M>(tClass);
}
public static <M extends TBase<?, ?>> ThriftToPig<M> newInstance(TypeRef<M> typeRef) {
return new ThriftToPig<M>(typeRef.getRawClass());
}
public ThriftToPig(Class<M> tClass) {
structDesc = TStructDescriptor.getInstance(tClass);
}
public TStructDescriptor getTStructDescriptor() {
return structDesc;
}
/**
* Converts a thrift object to Pig tuple.
* All the fields are deserialized.
* It might be better to use getLazyTuple() if not all fields
* are required.
*/
public Tuple getPigTuple(M thriftObj) {
return toTuple(structDesc, thriftObj);
}
/**
* Similar to {@link #getPigTuple(TBase)}. This delays
* serialization of tuple contents until they are requested.
* @param thriftObj
* @return
*/
public Tuple getLazyTuple(M thriftObj) {
return new LazyTuple(structDesc, thriftObj);
}
@SuppressWarnings("unchecked")
private static <T extends TBase>Tuple toTuple(TStructDescriptor tDesc, T tObj) {
int size = tDesc.getFields().size();
Tuple tuple = tupleFactory.newTuple(size);
for (int i=0; i<size; i++) {
Field field = tDesc.getFieldAt(i);
Object value = tDesc.getFieldValue(i, tObj);
try {
tuple.set(i, toPigObject(field, value, false));
} catch (ExecException e) { // not expected
throw new RuntimeException(e);
}
}
return tuple;
}
@SuppressWarnings("unchecked")
static Object toPigObject(Field field, Object value, boolean lazy) {
if (value == null) {
return null;
}
switch (field.getType()) {
case TType.BOOL:
return Integer.valueOf((Boolean)value ? 1 : 0);
case TType.BYTE :
return Integer.valueOf((Byte)value);
case TType.I16 :
return Integer.valueOf((Short)value);
case TType.STRING:
return stringTypeToPig(value);
case TType.STRUCT:
if (lazy) {
return new LazyTuple(field.gettStructDescriptor(), (TBase<?, ?>)value);
} else {
return toTuple(field.gettStructDescriptor(), (TBase<?, ?>)value);
}
case TType.MAP:
return toPigMap(field, (Map<Object, Object>)value, lazy);
case TType.SET:
return toPigBag(field.getSetElemField(), (Collection<Object>)value, lazy);
case TType.LIST:
return toPigBag(field.getListElemField(), (Collection<Object>)value, lazy);
case TType.ENUM:
return value.toString();
default:
// standard types : I32, I64, DOUBLE, etc.
return value;
}
}
/**
* TType.STRING is a mess in Thrift. It could be byte[], ByteBuffer,
* or even a String!.
*/
private static Object stringTypeToPig(Object value) {
if (value instanceof String) {
return value;
}
if (value instanceof byte[]) {
byte[] buf = (byte[])value;
return new DataByteArray(Arrays.copyOf(buf, buf.length));
}
if (value instanceof ByteBuffer) {
ByteBuffer bin = (ByteBuffer)value;
byte[] buf = new byte[bin.remaining()];
bin.mark();
bin.get(buf);
bin.reset();
return new DataByteArray(buf);
}
return null;
}
private static Map<String, Object> toPigMap(Field field,
Map<Object, Object> map,
boolean lazy) {
// PIG map's key always a String. just use toString() and hope
// things would work out ok.
HashMap<String, Object> out = new HashMap<String, Object>(map.size());
Field valueField = field.getMapValueField();
for(Entry<Object, Object> e : map.entrySet()) {
Object prev = out.put(e.getKey().toString(),
toPigObject(valueField, e.getValue(), lazy));
if (prev != null) {
String msg = "Duplicate keys while converting to String while "
+ " processing map " + field.getName() + " (key type : "
+ field.getMapKeyField().getType() + " value type : "
+ field.getMapValueField().getType() + ")";
LOG.warn(msg);
throw new RuntimeException(msg);
}
}
return out;
}
private static DataBag toPigBag(Field field,
Collection<Object> values,
boolean lazy) {
List<Tuple> tuples = Lists.newArrayListWithExpectedSize(values.size());
for(Object value : values) {
Object pValue = toPigObject(field, value, lazy);
if (pValue instanceof Tuple) { // DataBag should contain Tuples
tuples.add((Tuple)pValue);
} else {
tuples.add(tupleFactory.newTuple(pValue));
}
}
return bagFactory.newDefaultBag(tuples);
}
@SuppressWarnings("serial")
/**
* Delays serialization of Thrift fields until they are requested.
*/
private static class LazyTuple extends AbstractLazyTuple {
/* NOTE : This is only a partial optimization. The other part
* is to avoid deserialization of the Thrift fields from the
* binary buffer.
*
* Currently TDeserializer allows deserializing just one field,
* psuedo-skipping over the fields before it.
* But if we are going deserialize 5 fields out of 20, we will be
* skipping over same set of fields multiple times. OTOH this might
* still be better than a full deserialization.
*
* We need to write our own version of TBinaryProtocol that truly skips.
* Even TDeserializer 'skips'/ignores only after deserializing fields.
* (e.g. Strings, Integers, buffers etc).
*/
private TBase<?, ?> tObject;
private TStructDescriptor desc;
LazyTuple(TStructDescriptor desc, TBase<?, ?> tObject) {
initRealTuple(desc.getFields().size());
this.tObject = tObject;
this.desc = desc;
}
@Override
protected Object getObjectAt(int index) {
Field field = desc.getFieldAt(index);
return toPigObject(field, desc.getFieldValue(index, tObject), true);
}
}
/**
* Returns Pig schema for the Thrift struct.
*/
public static Schema toSchema(Class<? extends TBase<?, ?>> tClass) {
return toSchema(TStructDescriptor.getInstance(tClass));
}
public Schema toSchema() {
return toSchema(structDesc);
}
public static Schema toSchema(TStructDescriptor tDesc ) {
Schema schema = new Schema();
try {
for(Field field : tDesc.getFields()) {
String fieldName = field.getName();
if (field.isStruct()) {
schema.add(new FieldSchema(fieldName, toSchema(field.gettStructDescriptor()), DataType.TUPLE));
} else {
schema.add(singleFieldToFieldSchema(fieldName, field));
}
}
} catch (FrontendException t) {
throw new RuntimeException(t);
}
return schema;
}
//TODO we should probably implement better naming, the current system is pretty nonsensical now
private static FieldSchema singleFieldToFieldSchema(String fieldName, Field field) throws FrontendException {
switch (field.getType()) {
case TType.LIST:
return new FieldSchema(fieldName, singleFieldToTupleSchema(fieldName + "_tuple", field.getListElemField()), DataType.BAG);
case TType.SET:
return new FieldSchema(fieldName, singleFieldToTupleSchema(fieldName + "_tuple", field.getSetElemField()), DataType.BAG);
case TType.MAP:
// can not specify types for maps in Pig.
if (field.getMapKeyField().getType() != TType.STRING
&& field.getMapKeyField().getType() != TType.ENUM) {
LOG.warn("Using a map with non-string key for field " + field.getName()
+ ". while converting to PIG Tuple, toString() is used for the key."
+ " It could result in incorrect maps.");
}
return new FieldSchema(fieldName, null, DataType.MAP);
default:
return new FieldSchema(fieldName, null, getPigDataType(field));
}
}
/**
* A helper function which wraps a Schema in a tuple (for Pig bags) if our version of pig makes it necessary
*/
private static Schema wrapInTupleIfPig9(Schema schema) throws FrontendException {
if (PigUtil.Pig9orNewer) {
return new Schema(new FieldSchema("t",schema,DataType.TUPLE));
} else {
return schema;
}
}
/**
* Returns a schema with single tuple (for Pig bags).
*/
private static Schema singleFieldToTupleSchema(String fieldName, Field field) throws FrontendException {
switch (field.getType()) {
case TType.STRUCT:
return wrapInTupleIfPig9(toSchema(field.gettStructDescriptor()));
case TType.LIST:
return wrapInTupleIfPig9(new Schema(singleFieldToFieldSchema(fieldName, field)));
case TType.SET:
return wrapInTupleIfPig9(new Schema(singleFieldToFieldSchema(fieldName, field)));
default:
return wrapInTupleIfPig9(new Schema(new FieldSchema(fieldName, null, getPigDataType(field))));
}
}
private static byte getPigDataType(Field field) {
switch (field.getType()) {
case TType.BOOL:
case TType.BYTE:
case TType.I16:
case TType.I32:
return DataType.INTEGER;
case TType.ENUM:
return DataType.CHARARRAY;
case TType.I64:
return DataType.LONG;
case TType.DOUBLE:
return DataType.DOUBLE;
case TType.STRING:
return field.isBuffer() ? DataType.BYTEARRAY : DataType.CHARARRAY;
default:
throw new IllegalArgumentException("Unexpected type where a simple type is expected : " + field.getType());
}
}
/**
* Turn a Thrift Struct into a loading schema for a pig script.
*/
public static String toPigScript(Class<? extends TBase<?, ?>> thriftClass,
Class<? extends LoadFunc> pigLoader) {
StringBuilder sb = new StringBuilder();
/* we are commenting out explicit schema specification. The schema is
* included mainly to help the readers of the pig script. Pig learns the
* schema directly from the loader.
* If explicit schema is not commented, we might have surprising results
* when a Thrift class (possibly in control of another team) changes,
* but the Pig script is not updated. Commenting it out avoids this.
*/
StringBuilder prefix = new StringBuilder(" -- ");
sb.append("raw_data = load '$INPUT_FILES' using ")
.append(pigLoader.getName())
.append("('")
.append(thriftClass.getName())
.append("');\n")
.append(prefix)
.append("as ");
prefix.append(" ");
try {
stringifySchema(sb, toSchema(thriftClass), DataType.TUPLE, prefix);
} catch (FrontendException e) {
throw new RuntimeException(e);
}
sb.append("\n");
return sb.toString();
}
/**
* Print formatted schema. This is a modified version of
* {@link Schema#stringifySchema(StringBuilder, Schema, byte)}
* with support for (indented) pretty printing.
*/
// This is used for building up output string
// type can only be BAG or TUPLE
public static void stringifySchema(StringBuilder sb,
Schema schema,
byte type,
StringBuilder prefix)
throws FrontendException{
// this is a modified version of {@link Schema#stringifySchema(StringBuilder, Schema, byte)}
if (type == DataType.TUPLE) {
sb.append("(") ;
}
else if (type == DataType.BAG) {
sb.append("{") ;
}
prefix.append(" ");
sb.append("\n").append(prefix);
if (schema == null) {
sb.append("null") ;
}
else {
boolean isFirst = true ;
for (int i=0; i< schema.size() ;i++) {
if (!isFirst) {
sb.append(",\n").append(prefix);
}
else {
isFirst = false ;
}
FieldSchema fs = schema.getField(i) ;
if(fs == null) {
sb.append("null");
continue;
}
if (fs.alias != null) {
sb.append(fs.alias);
sb.append(": ");
}
if (DataType.isAtomic(fs.type)) {
sb.append(DataType.findTypeName(fs.type)) ;
}
else if ( (fs.type == DataType.TUPLE) ||
(fs.type == DataType.BAG) ) {
// safety net
if (schema != fs.schema) {
stringifySchema(sb, fs.schema, fs.type, prefix) ;
}
else {
throw new AssertionError("Schema refers to itself "
+ "as inner schema") ;
}
} else if (fs.type == DataType.MAP) {
sb.append(DataType.findTypeName(fs.type) + "[ ]") ;
} else {
sb.append(DataType.findTypeName(fs.type)) ;
}
}
}
prefix.setLength(prefix.length()-2);
sb.append("\n").append(prefix);
if (type == DataType.TUPLE) {
sb.append(")") ;
}
else if (type == DataType.BAG) {
sb.append("}") ;
}
}
public static void main(String[] args) throws Exception {
if (args.length > 0) {
Class<? extends TBase<?, ?>> tClass = ThriftUtils.getTypeRef(args[0]).getRawClass();
System.out.println(args[0] + " : " + toSchema(tClass).toString());
System.out.println(toPigScript(tClass, ThriftPigLoader.class));
}
}
}
| |
/*
* Copyright 2013-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cloudfoundry.reactor.client.v2.shareddomains;
import org.cloudfoundry.client.v2.Metadata;
import org.cloudfoundry.client.v2.jobs.JobEntity;
import org.cloudfoundry.client.v2.shareddomains.CreateSharedDomainRequest;
import org.cloudfoundry.client.v2.shareddomains.CreateSharedDomainResponse;
import org.cloudfoundry.client.v2.shareddomains.DeleteSharedDomainRequest;
import org.cloudfoundry.client.v2.shareddomains.DeleteSharedDomainResponse;
import org.cloudfoundry.client.v2.shareddomains.GetSharedDomainRequest;
import org.cloudfoundry.client.v2.shareddomains.GetSharedDomainResponse;
import org.cloudfoundry.client.v2.shareddomains.ListSharedDomainsRequest;
import org.cloudfoundry.client.v2.shareddomains.ListSharedDomainsResponse;
import org.cloudfoundry.client.v2.shareddomains.SharedDomainEntity;
import org.cloudfoundry.client.v2.shareddomains.SharedDomainResource;
import org.cloudfoundry.reactor.InteractionContext;
import org.cloudfoundry.reactor.TestRequest;
import org.cloudfoundry.reactor.TestResponse;
import org.cloudfoundry.reactor.client.AbstractClientApiTest;
import org.junit.Test;
import reactor.test.StepVerifier;
import java.time.Duration;
import java.util.Collections;
import static io.netty.handler.codec.http.HttpMethod.DELETE;
import static io.netty.handler.codec.http.HttpMethod.GET;
import static io.netty.handler.codec.http.HttpMethod.POST;
import static io.netty.handler.codec.http.HttpResponseStatus.ACCEPTED;
import static io.netty.handler.codec.http.HttpResponseStatus.NO_CONTENT;
import static io.netty.handler.codec.http.HttpResponseStatus.OK;
public final class ReactorSharedDomainsTest extends AbstractClientApiTest {
private final ReactorSharedDomains sharedDomains = new ReactorSharedDomains(CONNECTION_CONTEXT, this.root, TOKEN_PROVIDER, Collections.emptyMap());
@Test
public void create() {
mockRequest(InteractionContext.builder()
.request(TestRequest.builder()
.method(POST).path("/shared_domains")
.payload("fixtures/client/v2/shared_domains/POST_request.json")
.build())
.response(TestResponse.builder()
.status(OK)
.payload("fixtures/client/v2/shared_domains/POST_response.json")
.build())
.build());
this.sharedDomains
.create(CreateSharedDomainRequest.builder()
.name("shared-domain.com")
.routerGroupId("random-guid")
.build())
.as(StepVerifier::create)
.expectNext(CreateSharedDomainResponse.builder()
.metadata(Metadata.builder()
.id("d6c7d452-70bb-4edd-bbf1-a925dd51732c")
.url("/v2/shared_domains/d6c7d452-70bb-4edd-bbf1-a925dd51732c")
.createdAt("2016-04-22T19:33:17Z")
.build())
.entity(SharedDomainEntity.builder()
.name("example.com")
.routerGroupId("my-random-guid")
.routerGroupType("tcp")
.build())
.build())
.expectComplete()
.verify(Duration.ofSeconds(5));
}
@Test
public void delete() {
mockRequest(InteractionContext.builder()
.request(TestRequest.builder()
.method(DELETE).path("/shared_domains/fa1385de-55ba-41d3-beb2-f83919c634d6")
.build())
.response(TestResponse.builder()
.status(NO_CONTENT)
.build())
.build());
this.sharedDomains
.delete(DeleteSharedDomainRequest.builder()
.sharedDomainId("fa1385de-55ba-41d3-beb2-f83919c634d6")
.build())
.as(StepVerifier::create)
.expectComplete()
.verify(Duration.ofSeconds(5));
}
@Test
public void deleteAsync() {
mockRequest(InteractionContext.builder()
.request(TestRequest.builder()
.method(DELETE).path("/shared_domains/fa1385de-55ba-41d3-beb2-f83919c634d6?async=true")
.build())
.response(TestResponse.builder()
.status(ACCEPTED)
.payload("fixtures/client/v2/shared_domains/DELETE_{id}_async_response.json")
.build())
.build());
this.sharedDomains
.delete(DeleteSharedDomainRequest.builder()
.async(true)
.sharedDomainId("fa1385de-55ba-41d3-beb2-f83919c634d6")
.build())
.as(StepVerifier::create)
.expectNext(DeleteSharedDomainResponse.builder()
.metadata(Metadata.builder()
.id("2d9707ba-6f0b-4aef-a3de-fe9bdcf0c9d1")
.createdAt("2016-02-02T17:16:31Z")
.url("/v2/jobs/2d9707ba-6f0b-4aef-a3de-fe9bdcf0c9d1")
.build())
.entity(JobEntity.builder()
.id("2d9707ba-6f0b-4aef-a3de-fe9bdcf0c9d1")
.status("queued")
.build())
.build())
.expectComplete()
.verify(Duration.ofSeconds(5));
}
@Test
public void listSharedDomains() {
mockRequest(InteractionContext.builder()
.request(TestRequest.builder()
.method(GET).path("/shared_domains?page=-1")
.build())
.response(TestResponse.builder()
.status(OK)
.payload("fixtures/client/v2/shared_domains/GET_response.json")
.build())
.build());
this.sharedDomains
.list(ListSharedDomainsRequest.builder()
.page(-1)
.build())
.as(StepVerifier::create)
.expectNext(ListSharedDomainsResponse.builder()
.totalResults(5)
.totalPages(1)
.resource(SharedDomainResource.builder()
.metadata(Metadata.builder()
.id("f01b174d-c750-46b0-9ddf-3aeb2064d796")
.url("/v2/shared_domains/f01b174d-c750-46b0-9ddf-3aeb2064d796")
.createdAt("2015-11-30T23:38:35Z")
.build())
.entity(SharedDomainEntity.builder()
.name("customer-app-domain1.com")
.build())
.build())
.resource(SharedDomainResource.builder()
.metadata(Metadata.builder()
.id("3595f6cb-81cf-424e-a546-533877ccccfd")
.url("/v2/shared_domains/3595f6cb-81cf-424e-a546-533877ccccfd")
.createdAt("2015-11-30T23:38:35Z")
.build())
.entity(SharedDomainEntity.builder()
.name("customer-app-domain2.com")
.build())
.build())
.resource(SharedDomainResource.builder()
.metadata(Metadata.builder()
.id("d0d28c59-86ee-4415-9269-500976f18e72")
.url("/v2/shared_domains/d0d28c59-86ee-4415-9269-500976f18e72")
.createdAt("2015-11-30T23:38:35Z")
.build())
.entity(SharedDomainEntity.builder()
.name("domain-19.example.com")
.build())
.build())
.resource(SharedDomainResource.builder()
.metadata(Metadata.builder()
.id("b7242cdb-f81a-4469-b897-d5a218470fdf")
.url("/v2/shared_domains/b7242cdb-f81a-4469-b897-d5a218470fdf")
.createdAt("2015-11-30T23:38:35Z")
.build())
.entity(SharedDomainEntity.builder()
.name("domain-20.example.com")
.build())
.build())
.resource(SharedDomainResource.builder()
.metadata(Metadata.builder()
.id("130c193c-c1c6-41c9-98c2-4a0e16a948bf")
.url("/v2/shared_domains/130c193c-c1c6-41c9-98c2-4a0e16a948bf")
.createdAt("2015-11-30T23:38:35Z")
.build())
.entity(SharedDomainEntity.builder()
.name("domain-21.example.com")
.build())
.build())
.build())
.expectComplete()
.verify(Duration.ofSeconds(5));
}
public static final class Get extends AbstractClientApiTest {
private final ReactorSharedDomains sharedDomains = new ReactorSharedDomains(CONNECTION_CONTEXT, this.root, TOKEN_PROVIDER, Collections.emptyMap());
@Test
public void get() {
mockRequest(interactionContext());
this.sharedDomains
.get(GetSharedDomainRequest.builder()
.sharedDomainId("fa1385de-55ba-41d3-beb2-f83919c634d6")
.build())
.as(StepVerifier::create)
.expectNext(GetSharedDomainResponse.builder()
.metadata(Metadata.builder()
.id("fa1385de-55ba-41d3-beb2-f83919c634d6")
.url("/v2/shared_domains/fa1385de-55ba-41d3-beb2-f83919c634d6")
.createdAt("2016-06-08T16:41:33Z")
.build())
.entity(SharedDomainEntity.builder()
.name("customer-app-domain1.com")
.build())
.build())
.expectComplete()
.verify(Duration.ofSeconds(5));
}
public InteractionContext interactionContext() {
return InteractionContext.builder()
.request(TestRequest.builder()
.method(GET).path("/shared_domains/fa1385de-55ba-41d3-beb2-f83919c634d6")
.build())
.response(TestResponse.builder()
.status(OK)
.payload("fixtures/client/v2/shared_domains/GET_{id}_response.json")
.build())
.build();
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/speech/v1beta1/cloud_speech.proto
package com.google.cloud.speech.v1beta1;
/**
* <pre>
* Describes the progress of a long-running `AsyncRecognize` call. It is
* included in the `metadata` field of the `Operation` returned by the
* `GetOperation` call of the `google::longrunning::Operations` service.
* </pre>
*
* Protobuf type {@code google.cloud.speech.v1beta1.AsyncRecognizeMetadata}
*/
public final class AsyncRecognizeMetadata extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.cloud.speech.v1beta1.AsyncRecognizeMetadata)
AsyncRecognizeMetadataOrBuilder {
// Use AsyncRecognizeMetadata.newBuilder() to construct.
private AsyncRecognizeMetadata(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private AsyncRecognizeMetadata() {
progressPercent_ = 0;
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private AsyncRecognizeMetadata(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 8: {
progressPercent_ = input.readInt32();
break;
}
case 18: {
com.google.protobuf.Timestamp.Builder subBuilder = null;
if (startTime_ != null) {
subBuilder = startTime_.toBuilder();
}
startTime_ = input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(startTime_);
startTime_ = subBuilder.buildPartial();
}
break;
}
case 26: {
com.google.protobuf.Timestamp.Builder subBuilder = null;
if (lastUpdateTime_ != null) {
subBuilder = lastUpdateTime_.toBuilder();
}
lastUpdateTime_ = input.readMessage(com.google.protobuf.Timestamp.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(lastUpdateTime_);
lastUpdateTime_ = subBuilder.buildPartial();
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.cloud.speech.v1beta1.SpeechProto.internal_static_google_cloud_speech_v1beta1_AsyncRecognizeMetadata_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.speech.v1beta1.SpeechProto.internal_static_google_cloud_speech_v1beta1_AsyncRecognizeMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata.class, com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata.Builder.class);
}
public static final int PROGRESS_PERCENT_FIELD_NUMBER = 1;
private int progressPercent_;
/**
* <pre>
* Approximate percentage of audio processed thus far. Guaranteed to be 100
* when the audio is fully processed and the results are available.
* </pre>
*
* <code>int32 progress_percent = 1;</code>
*/
public int getProgressPercent() {
return progressPercent_;
}
public static final int START_TIME_FIELD_NUMBER = 2;
private com.google.protobuf.Timestamp startTime_;
/**
* <pre>
* Time when the request was received.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 2;</code>
*/
public boolean hasStartTime() {
return startTime_ != null;
}
/**
* <pre>
* Time when the request was received.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 2;</code>
*/
public com.google.protobuf.Timestamp getStartTime() {
return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_;
}
/**
* <pre>
* Time when the request was received.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 2;</code>
*/
public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() {
return getStartTime();
}
public static final int LAST_UPDATE_TIME_FIELD_NUMBER = 3;
private com.google.protobuf.Timestamp lastUpdateTime_;
/**
* <pre>
* Time of the most recent processing update.
* </pre>
*
* <code>.google.protobuf.Timestamp last_update_time = 3;</code>
*/
public boolean hasLastUpdateTime() {
return lastUpdateTime_ != null;
}
/**
* <pre>
* Time of the most recent processing update.
* </pre>
*
* <code>.google.protobuf.Timestamp last_update_time = 3;</code>
*/
public com.google.protobuf.Timestamp getLastUpdateTime() {
return lastUpdateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : lastUpdateTime_;
}
/**
* <pre>
* Time of the most recent processing update.
* </pre>
*
* <code>.google.protobuf.Timestamp last_update_time = 3;</code>
*/
public com.google.protobuf.TimestampOrBuilder getLastUpdateTimeOrBuilder() {
return getLastUpdateTime();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (progressPercent_ != 0) {
output.writeInt32(1, progressPercent_);
}
if (startTime_ != null) {
output.writeMessage(2, getStartTime());
}
if (lastUpdateTime_ != null) {
output.writeMessage(3, getLastUpdateTime());
}
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (progressPercent_ != 0) {
size += com.google.protobuf.CodedOutputStream
.computeInt32Size(1, progressPercent_);
}
if (startTime_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getStartTime());
}
if (lastUpdateTime_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, getLastUpdateTime());
}
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata)) {
return super.equals(obj);
}
com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata other = (com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata) obj;
boolean result = true;
result = result && (getProgressPercent()
== other.getProgressPercent());
result = result && (hasStartTime() == other.hasStartTime());
if (hasStartTime()) {
result = result && getStartTime()
.equals(other.getStartTime());
}
result = result && (hasLastUpdateTime() == other.hasLastUpdateTime());
if (hasLastUpdateTime()) {
result = result && getLastUpdateTime()
.equals(other.getLastUpdateTime());
}
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PROGRESS_PERCENT_FIELD_NUMBER;
hash = (53 * hash) + getProgressPercent();
if (hasStartTime()) {
hash = (37 * hash) + START_TIME_FIELD_NUMBER;
hash = (53 * hash) + getStartTime().hashCode();
}
if (hasLastUpdateTime()) {
hash = (37 * hash) + LAST_UPDATE_TIME_FIELD_NUMBER;
hash = (53 * hash) + getLastUpdateTime().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Describes the progress of a long-running `AsyncRecognize` call. It is
* included in the `metadata` field of the `Operation` returned by the
* `GetOperation` call of the `google::longrunning::Operations` service.
* </pre>
*
* Protobuf type {@code google.cloud.speech.v1beta1.AsyncRecognizeMetadata}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.cloud.speech.v1beta1.AsyncRecognizeMetadata)
com.google.cloud.speech.v1beta1.AsyncRecognizeMetadataOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.cloud.speech.v1beta1.SpeechProto.internal_static_google_cloud_speech_v1beta1_AsyncRecognizeMetadata_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.speech.v1beta1.SpeechProto.internal_static_google_cloud_speech_v1beta1_AsyncRecognizeMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata.class, com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata.Builder.class);
}
// Construct using com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
progressPercent_ = 0;
if (startTimeBuilder_ == null) {
startTime_ = null;
} else {
startTime_ = null;
startTimeBuilder_ = null;
}
if (lastUpdateTimeBuilder_ == null) {
lastUpdateTime_ = null;
} else {
lastUpdateTime_ = null;
lastUpdateTimeBuilder_ = null;
}
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.cloud.speech.v1beta1.SpeechProto.internal_static_google_cloud_speech_v1beta1_AsyncRecognizeMetadata_descriptor;
}
public com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata getDefaultInstanceForType() {
return com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata.getDefaultInstance();
}
public com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata build() {
com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata buildPartial() {
com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata result = new com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata(this);
result.progressPercent_ = progressPercent_;
if (startTimeBuilder_ == null) {
result.startTime_ = startTime_;
} else {
result.startTime_ = startTimeBuilder_.build();
}
if (lastUpdateTimeBuilder_ == null) {
result.lastUpdateTime_ = lastUpdateTime_;
} else {
result.lastUpdateTime_ = lastUpdateTimeBuilder_.build();
}
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata) {
return mergeFrom((com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata other) {
if (other == com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata.getDefaultInstance()) return this;
if (other.getProgressPercent() != 0) {
setProgressPercent(other.getProgressPercent());
}
if (other.hasStartTime()) {
mergeStartTime(other.getStartTime());
}
if (other.hasLastUpdateTime()) {
mergeLastUpdateTime(other.getLastUpdateTime());
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int progressPercent_ ;
/**
* <pre>
* Approximate percentage of audio processed thus far. Guaranteed to be 100
* when the audio is fully processed and the results are available.
* </pre>
*
* <code>int32 progress_percent = 1;</code>
*/
public int getProgressPercent() {
return progressPercent_;
}
/**
* <pre>
* Approximate percentage of audio processed thus far. Guaranteed to be 100
* when the audio is fully processed and the results are available.
* </pre>
*
* <code>int32 progress_percent = 1;</code>
*/
public Builder setProgressPercent(int value) {
progressPercent_ = value;
onChanged();
return this;
}
/**
* <pre>
* Approximate percentage of audio processed thus far. Guaranteed to be 100
* when the audio is fully processed and the results are available.
* </pre>
*
* <code>int32 progress_percent = 1;</code>
*/
public Builder clearProgressPercent() {
progressPercent_ = 0;
onChanged();
return this;
}
private com.google.protobuf.Timestamp startTime_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> startTimeBuilder_;
/**
* <pre>
* Time when the request was received.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 2;</code>
*/
public boolean hasStartTime() {
return startTimeBuilder_ != null || startTime_ != null;
}
/**
* <pre>
* Time when the request was received.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 2;</code>
*/
public com.google.protobuf.Timestamp getStartTime() {
if (startTimeBuilder_ == null) {
return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_;
} else {
return startTimeBuilder_.getMessage();
}
}
/**
* <pre>
* Time when the request was received.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 2;</code>
*/
public Builder setStartTime(com.google.protobuf.Timestamp value) {
if (startTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
startTime_ = value;
onChanged();
} else {
startTimeBuilder_.setMessage(value);
}
return this;
}
/**
* <pre>
* Time when the request was received.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 2;</code>
*/
public Builder setStartTime(
com.google.protobuf.Timestamp.Builder builderForValue) {
if (startTimeBuilder_ == null) {
startTime_ = builderForValue.build();
onChanged();
} else {
startTimeBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Time when the request was received.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 2;</code>
*/
public Builder mergeStartTime(com.google.protobuf.Timestamp value) {
if (startTimeBuilder_ == null) {
if (startTime_ != null) {
startTime_ =
com.google.protobuf.Timestamp.newBuilder(startTime_).mergeFrom(value).buildPartial();
} else {
startTime_ = value;
}
onChanged();
} else {
startTimeBuilder_.mergeFrom(value);
}
return this;
}
/**
* <pre>
* Time when the request was received.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 2;</code>
*/
public Builder clearStartTime() {
if (startTimeBuilder_ == null) {
startTime_ = null;
onChanged();
} else {
startTime_ = null;
startTimeBuilder_ = null;
}
return this;
}
/**
* <pre>
* Time when the request was received.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 2;</code>
*/
public com.google.protobuf.Timestamp.Builder getStartTimeBuilder() {
onChanged();
return getStartTimeFieldBuilder().getBuilder();
}
/**
* <pre>
* Time when the request was received.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 2;</code>
*/
public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() {
if (startTimeBuilder_ != null) {
return startTimeBuilder_.getMessageOrBuilder();
} else {
return startTime_ == null ?
com.google.protobuf.Timestamp.getDefaultInstance() : startTime_;
}
}
/**
* <pre>
* Time when the request was received.
* </pre>
*
* <code>.google.protobuf.Timestamp start_time = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>
getStartTimeFieldBuilder() {
if (startTimeBuilder_ == null) {
startTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>(
getStartTime(),
getParentForChildren(),
isClean());
startTime_ = null;
}
return startTimeBuilder_;
}
private com.google.protobuf.Timestamp lastUpdateTime_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> lastUpdateTimeBuilder_;
/**
* <pre>
* Time of the most recent processing update.
* </pre>
*
* <code>.google.protobuf.Timestamp last_update_time = 3;</code>
*/
public boolean hasLastUpdateTime() {
return lastUpdateTimeBuilder_ != null || lastUpdateTime_ != null;
}
/**
* <pre>
* Time of the most recent processing update.
* </pre>
*
* <code>.google.protobuf.Timestamp last_update_time = 3;</code>
*/
public com.google.protobuf.Timestamp getLastUpdateTime() {
if (lastUpdateTimeBuilder_ == null) {
return lastUpdateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : lastUpdateTime_;
} else {
return lastUpdateTimeBuilder_.getMessage();
}
}
/**
* <pre>
* Time of the most recent processing update.
* </pre>
*
* <code>.google.protobuf.Timestamp last_update_time = 3;</code>
*/
public Builder setLastUpdateTime(com.google.protobuf.Timestamp value) {
if (lastUpdateTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
lastUpdateTime_ = value;
onChanged();
} else {
lastUpdateTimeBuilder_.setMessage(value);
}
return this;
}
/**
* <pre>
* Time of the most recent processing update.
* </pre>
*
* <code>.google.protobuf.Timestamp last_update_time = 3;</code>
*/
public Builder setLastUpdateTime(
com.google.protobuf.Timestamp.Builder builderForValue) {
if (lastUpdateTimeBuilder_ == null) {
lastUpdateTime_ = builderForValue.build();
onChanged();
} else {
lastUpdateTimeBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Time of the most recent processing update.
* </pre>
*
* <code>.google.protobuf.Timestamp last_update_time = 3;</code>
*/
public Builder mergeLastUpdateTime(com.google.protobuf.Timestamp value) {
if (lastUpdateTimeBuilder_ == null) {
if (lastUpdateTime_ != null) {
lastUpdateTime_ =
com.google.protobuf.Timestamp.newBuilder(lastUpdateTime_).mergeFrom(value).buildPartial();
} else {
lastUpdateTime_ = value;
}
onChanged();
} else {
lastUpdateTimeBuilder_.mergeFrom(value);
}
return this;
}
/**
* <pre>
* Time of the most recent processing update.
* </pre>
*
* <code>.google.protobuf.Timestamp last_update_time = 3;</code>
*/
public Builder clearLastUpdateTime() {
if (lastUpdateTimeBuilder_ == null) {
lastUpdateTime_ = null;
onChanged();
} else {
lastUpdateTime_ = null;
lastUpdateTimeBuilder_ = null;
}
return this;
}
/**
* <pre>
* Time of the most recent processing update.
* </pre>
*
* <code>.google.protobuf.Timestamp last_update_time = 3;</code>
*/
public com.google.protobuf.Timestamp.Builder getLastUpdateTimeBuilder() {
onChanged();
return getLastUpdateTimeFieldBuilder().getBuilder();
}
/**
* <pre>
* Time of the most recent processing update.
* </pre>
*
* <code>.google.protobuf.Timestamp last_update_time = 3;</code>
*/
public com.google.protobuf.TimestampOrBuilder getLastUpdateTimeOrBuilder() {
if (lastUpdateTimeBuilder_ != null) {
return lastUpdateTimeBuilder_.getMessageOrBuilder();
} else {
return lastUpdateTime_ == null ?
com.google.protobuf.Timestamp.getDefaultInstance() : lastUpdateTime_;
}
}
/**
* <pre>
* Time of the most recent processing update.
* </pre>
*
* <code>.google.protobuf.Timestamp last_update_time = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>
getLastUpdateTimeFieldBuilder() {
if (lastUpdateTimeBuilder_ == null) {
lastUpdateTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>(
getLastUpdateTime(),
getParentForChildren(),
isClean());
lastUpdateTime_ = null;
}
return lastUpdateTimeBuilder_;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
// @@protoc_insertion_point(builder_scope:google.cloud.speech.v1beta1.AsyncRecognizeMetadata)
}
// @@protoc_insertion_point(class_scope:google.cloud.speech.v1beta1.AsyncRecognizeMetadata)
private static final com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata();
}
public static com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<AsyncRecognizeMetadata>
PARSER = new com.google.protobuf.AbstractParser<AsyncRecognizeMetadata>() {
public AsyncRecognizeMetadata parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new AsyncRecognizeMetadata(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<AsyncRecognizeMetadata> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<AsyncRecognizeMetadata> getParserForType() {
return PARSER;
}
public com.google.cloud.speech.v1beta1.AsyncRecognizeMetadata getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright 2014 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.refactoring;
import com.google.javascript.jscomp.NodeUtil;
import com.google.javascript.rhino.JSDocInfo;
import com.google.javascript.rhino.JSDocInfo.Visibility;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.jstype.JSType;
import com.google.javascript.rhino.jstype.JSTypeNative;
/**
* Class that contains common Matchers that are useful to everyone.
*
* @author mknichel@google.com (Mark Knichel)
*/
public final class Matchers {
// TODO(mknichel): Make sure all this code works with goog.scope.
/**
* Returns a Matcher that matches every node.
*/
public static Matcher anything() {
return new Matcher() {
@Override public boolean matches(Node node, NodeMetadata metadata) {
return true;
}
};
}
/**
* Returns a Matcher that returns true only if all of the provided
* matchers match.
*/
public static Matcher allOf(final Matcher... matchers) {
return new Matcher() {
@Override public boolean matches(Node node, NodeMetadata metadata) {
for (Matcher m : matchers) {
if (!m.matches(node, metadata)) {
return false;
}
}
return true;
}
};
}
/**
* Returns a Matcher that returns true if any of the provided matchers match.
*/
public static Matcher anyOf(final Matcher... matchers) {
return new Matcher() {
@Override public boolean matches(Node node, NodeMetadata metadata) {
for (Matcher m : matchers) {
if (m.matches(node, metadata)) {
return true;
}
}
return false;
}
};
}
/**
* Returns a Matcher that matches the opposite of the provided matcher.
*/
public static Matcher not(final Matcher matcher) {
return new Matcher() {
@Override public boolean matches(Node node, NodeMetadata metadata) {
return !matcher.matches(node, metadata);
}
};
}
/**
* Returns a matcher that matches any constructor definitions.
*/
public static Matcher constructor() {
return constructor(null);
}
/**
* Returns a matcher that matches constructor definitions of the specified
* name.
* @param name The name of the class constructor to match.
*/
public static Matcher constructor(final String name) {
return new Matcher() {
@Override public boolean matches(Node node, NodeMetadata metadata) {
JSDocInfo info = node.getJSDocInfo();
if (info != null && info.isConstructor()) {
Node firstChild = node.getFirstChild();
// TODO(mknichel): Make sure this works with the following cases:
// ns = {
// /** @constructor */
// name: function() {}
// }
if (name == null) {
return true;
}
if ((firstChild.isGetProp() || firstChild.isName())
&& firstChild.matchesQualifiedName(name)) {
return true;
}
}
return false;
}
};
}
/**
* Returns a Matcher that matches constructing new objects. This will match
* the NEW node of the JS Compiler AST.
*/
public static Matcher newClass() {
return new Matcher() {
@Override public boolean matches(Node node, NodeMetadata metadata) {
return node.isNew();
}
};
}
/**
* Returns a Matcher that matches constructing objects of the provided class
* name. This will match the NEW node of the JS Compiler AST.
* @param className The name of the class to return matching NEW nodes.
*/
public static Matcher newClass(final String className) {
return new Matcher() {
@Override public boolean matches(Node node, NodeMetadata metadata) {
if (!node.isNew()) {
return false;
}
JSType providedJsType = getJsType(metadata, className);
if (providedJsType == null) {
return false;
}
JSType jsType = node.getJSType();
if (jsType == null) {
return false;
}
jsType = jsType.restrictByNotNullOrUndefined();
return areTypesEquivalentIgnoringGenerics(jsType, providedJsType);
}
};
}
/**
* Returns a Matcher that matches any function call.
*/
public static Matcher functionCall() {
return functionCall(null);
}
/**
* Returns a Matcher that matches any function call that has the given
* number of arguments.
*/
public static Matcher functionCallWithNumArgs(final int numArgs) {
return new Matcher() {
@Override public boolean matches(Node node, NodeMetadata metadata) {
return node.isCall() && (node.getChildCount() - 1) == numArgs;
}
};
}
/**
* Returns a Matcher that matches any function call that has the given
* number of arguments and the given name.
* @param name The name of the function to match. For non-static functions,
* this must be the fully qualified name that includes the type of the
* object. For instance: {@code ns.AppContext.prototype.get} will match
* {@code appContext.get} and {@code this.get} when called from the
* AppContext class.
*/
public static Matcher functionCallWithNumArgs(final String name, final int numArgs) {
return allOf(functionCallWithNumArgs(numArgs), functionCall(name));
}
/**
* Returns a Matcher that matches all nodes that are function calls that match
* the provided name.
* @param name The name of the function to match. For non-static functions,
* this must be the fully qualified name that includes the type of the
* object. For instance: {@code ns.AppContext.prototype.get} will match
* {@code appContext.get} and {@code this.get} when called from the
* AppContext class.
*/
public static Matcher functionCall(final String name) {
return new Matcher() {
@Override public boolean matches(Node node, NodeMetadata metadata) {
// TODO(mknichel): Handle the case when functions are applied through .call or .apply.
return node.isCall() && propertyAccess(name).matches(node.getFirstChild(), metadata);
}
};
}
public static Matcher googRequire(final String namespace) {
return new Matcher() {
@Override
public boolean matches(Node node, NodeMetadata metadata) {
return functionCall("goog.require").matches(node, metadata)
&& node.getSecondChild().isString()
&& node.getSecondChild().getString().equals(namespace);
}
};
}
public static Matcher googRequire() {
return functionCall("goog.require");
}
public static Matcher googModuleOrProvide() {
return anyOf(functionCall("goog.module"), functionCall("goog.provide"));
}
/**
* Returns a Matcher that matches any property access.
*/
public static Matcher propertyAccess() {
return propertyAccess(null);
}
/**
* Returns a Matcher that matches nodes representing a GETPROP access of
* an object property.
* @param name The name of the property to match. For non-static properties,
* this must be the fully qualified name that includes the type of the
* object. For instance: {@code ns.AppContext.prototype.root}
* will match {@code appContext.root} and {@code this.root} when accessed
* from the AppContext.
*/
public static Matcher propertyAccess(final String name) {
return new Matcher() {
@Override public boolean matches(Node node, NodeMetadata metadata) {
if (node.isGetProp()) {
if (name == null) {
return true;
}
if (name.equals(node.getQualifiedName())) {
return true;
} else if (name.contains(".prototype.")) {
return matchesPrototypeInstanceVar(node, metadata, name);
}
}
return false;
}
};
}
/**
* Returns a Matcher that matches definitions of any enum.
*/
public static Matcher enumDefinition() {
return new Matcher() {
@Override public boolean matches(Node node, NodeMetadata metadata) {
JSType jsType = node.getJSType();
return jsType != null && jsType.isEnumType();
}
};
}
/**
* Returns a Matcher that matches definitions of an enum of the given type.
*/
public static Matcher enumDefinitionOfType(final String type) {
return new Matcher() {
@Override public boolean matches(Node node, NodeMetadata metadata) {
JSType providedJsType = getJsType(metadata, type);
if (providedJsType == null) {
return false;
}
providedJsType = providedJsType.restrictByNotNullOrUndefined();
JSType jsType = node.getJSType();
return jsType != null && jsType.isEnumType() && providedJsType.isEquivalentTo(
jsType.toMaybeEnumType().getElementsType().getPrimitiveType());
}
};
}
/**
* Returns a Matcher that matches an ASSIGN node where the RHS of the assignment matches the given
* rhsMatcher.
*/
public static Matcher assignmentWithRhs(final Matcher rhsMatcher) {
return new Matcher() {
@Override public boolean matches(Node node, NodeMetadata metadata) {
return node.isAssign() && rhsMatcher.matches(node.getLastChild(), metadata);
}
};
}
/**
* Returns a Matcher that matches a declaration of a variable on the
* prototype of a class.
*/
public static Matcher prototypeVariableDeclaration() {
return matcherForPrototypeDeclaration(false /* requireFunctionType */);
}
/**
* Returns a Matcher that matches a declaration of a method on the
* prototype of a class.
*/
public static Matcher prototypeMethodDeclaration() {
return matcherForPrototypeDeclaration(true /* requireFunctionType */);
}
/**
* Returns a Matcher that matches nodes that contain JS Doc that specify the
* {@code @type} annotation equivalent to the provided type.
*/
public static Matcher jsDocType(final String type) {
return new Matcher() {
@Override public boolean matches(Node node, NodeMetadata metadata) {
JSType providedJsType = getJsType(metadata, type);
if (providedJsType == null) {
return false;
}
providedJsType = providedJsType.restrictByNotNullOrUndefined();
// The JSDoc for a var declaration is on the VAR node, but the type only
// exists on the NAME node.
// TODO(mknichel): Make NodeUtil.getBestJSDoc public and use that.
JSDocInfo jsDoc = node.getParent().isVar()
? node.getParent().getJSDocInfo() : node.getJSDocInfo();
JSType jsType = node.getJSType();
return jsDoc != null && jsDoc.hasType() && jsType != null
&& providedJsType.isEquivalentTo(jsType.restrictByNotNullOrUndefined());
}
};
}
/**
* Returns a Matcher that matches against properties that are declared in the constructor.
*/
public static Matcher constructorPropertyDeclaration() {
return new Matcher() {
@Override public boolean matches(Node node, NodeMetadata metadata) {
// This will match against code that looks like:
// /** @constructor */
// function constructor() {
// this.variable = 3;
// }
if (!node.isAssign()
|| !node.getFirstChild().isGetProp()
|| !node.getFirstFirstChild().isThis()) {
return false;
}
while (node != null && !node.isFunction()) {
node = node.getParent();
}
if (node != null && node.isFunction()) {
JSDocInfo jsDoc = NodeUtil.getBestJSDocInfo(node);
if (jsDoc != null) {
return jsDoc.isConstructor();
}
}
return false;
}
};
}
/**
* Returns a Matcher that matches against nodes that are declared {@code @private}.
*/
public static Matcher isPrivate() {
return new Matcher() {
@Override public boolean matches(Node node, NodeMetadata metadata) {
JSDocInfo jsDoc = NodeUtil.getBestJSDocInfo(node);
if (jsDoc != null) {
return jsDoc.getVisibility() == Visibility.PRIVATE;
}
return false;
}
};
}
private static JSType getJsType(NodeMetadata metadata, String type) {
return metadata.getCompiler().getTypeRegistry().getType(type);
}
private static JSType getJsType(NodeMetadata metadata, JSTypeNative nativeType) {
return metadata.getCompiler().getTypeRegistry().getNativeType(nativeType);
}
private static boolean areTypesEquivalentIgnoringGenerics(JSType a, JSType b) {
boolean equivalent = a.isEquivalentTo(b);
if (equivalent) {
return true;
}
if (a.isTemplatizedType()) {
return a.toMaybeTemplatizedType().getReferencedType().isEquivalentTo(b);
}
return false;
}
/**
* Checks to see if the node represents an access of an instance variable
* on an object given a prototype declaration of an object. For instance,
* {@code ns.AppContext.prototype.get} will match {@code appContext.get}
* or {@code this.get} when accessed from within the AppContext object.
*/
private static boolean matchesPrototypeInstanceVar(Node node, NodeMetadata metadata,
String name) {
String[] parts = name.split(".prototype.");
String className = parts[0];
String propertyName = parts[1];
JSType providedJsType = getJsType(metadata, className);
if (providedJsType == null) {
return false;
}
JSType jsType = null;
if (node.hasChildren()) {
jsType = node.getFirstChild().getJSType();
}
if (jsType == null) {
return false;
}
jsType = jsType.restrictByNotNullOrUndefined();
if (!jsType.isUnknownType()
&& !jsType.isAllType()
&& jsType.isSubtype(providedJsType)) {
if (node.isName() && propertyName.equals(node.getString())) {
return true;
} else if (node.isGetProp()
&& propertyName.equals(node.getLastChild().getString())) {
return true;
}
}
return false;
}
private static Matcher matcherForPrototypeDeclaration(final boolean requireFunctionType) {
return new Matcher() {
@Override public boolean matches(Node node, NodeMetadata metadata) {
// TODO(mknichel): Figure out which node is the best to return for this
// function: the GETPROP node, or the ASSIGN node when the property is
// being assigned to.
// TODO(mknichel): Support matching:
// foo.prototype = {
// bar: 1
// };
Node firstChild = node.getFirstChild();
if (node.isGetProp() && firstChild.isGetProp()
&& firstChild.getLastChild().isString()
&& "prototype".equals(firstChild.getLastChild().getString())) {
JSType fnJsType = getJsType(metadata, JSTypeNative.FUNCTION_FUNCTION_TYPE);
JSType jsType = node.getJSType();
if (jsType == null) {
return false;
} else if (requireFunctionType) {
return jsType.canCastTo(fnJsType);
} else {
return !jsType.canCastTo(fnJsType);
}
}
return false;
}
};
}
// TODO(mknichel): Add matchers for:
// - Constructor with argument types
// - Function call with argument types
// - Function definitions.
// - Property definitions, references
// - IsStatic
// - JsDocMatcher
/** Prevent instantiation. */
private Matchers() {}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/pubsub/v1/pubsub.proto
package com.google.pubsub.v1;
public final class PubsubProto {
private PubsubProto() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistryLite registry) {
}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions(
(com.google.protobuf.ExtensionRegistryLite) registry);
}
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_Topic_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_Topic_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_PubsubMessage_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_PubsubMessage_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_PubsubMessage_AttributesEntry_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_PubsubMessage_AttributesEntry_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_GetTopicRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_GetTopicRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_PublishRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_PublishRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_PublishResponse_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_PublishResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_ListTopicsRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_ListTopicsRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_ListTopicsResponse_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_ListTopicsResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_ListTopicSubscriptionsRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_ListTopicSubscriptionsRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_ListTopicSubscriptionsResponse_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_ListTopicSubscriptionsResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_DeleteTopicRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_DeleteTopicRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_Subscription_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_Subscription_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_PushConfig_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_PushConfig_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_PushConfig_AttributesEntry_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_PushConfig_AttributesEntry_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_ReceivedMessage_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_ReceivedMessage_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_GetSubscriptionRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_GetSubscriptionRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_UpdateSubscriptionRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_UpdateSubscriptionRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_ListSubscriptionsRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_ListSubscriptionsRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_ListSubscriptionsResponse_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_ListSubscriptionsResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_DeleteSubscriptionRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_DeleteSubscriptionRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_ModifyPushConfigRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_ModifyPushConfigRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_PullRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_PullRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_PullResponse_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_PullResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_ModifyAckDeadlineRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_ModifyAckDeadlineRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_AcknowledgeRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_AcknowledgeRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_StreamingPullRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_StreamingPullRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_StreamingPullResponse_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_StreamingPullResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_CreateSnapshotRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_CreateSnapshotRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_Snapshot_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_Snapshot_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_ListSnapshotsRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_ListSnapshotsRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_ListSnapshotsResponse_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_ListSnapshotsResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_DeleteSnapshotRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_DeleteSnapshotRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_SeekRequest_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_SeekRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_google_pubsub_v1_SeekResponse_descriptor;
static final
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_google_pubsub_v1_SeekResponse_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\035google/pubsub/v1/pubsub.proto\022\020google." +
"pubsub.v1\032\034google/api/annotations.proto\032" +
"\036google/protobuf/duration.proto\032\033google/" +
"protobuf/empty.proto\032 google/protobuf/fi" +
"eld_mask.proto\032\037google/protobuf/timestam" +
"p.proto\"\025\n\005Topic\022\014\n\004name\030\001 \001(\t\"\333\001\n\rPubsu" +
"bMessage\022\014\n\004data\030\001 \001(\014\022C\n\nattributes\030\002 \003" +
"(\0132/.google.pubsub.v1.PubsubMessage.Attr" +
"ibutesEntry\022\022\n\nmessage_id\030\003 \001(\t\0220\n\014publi" +
"sh_time\030\004 \001(\0132\032.google.protobuf.Timestam",
"p\0321\n\017AttributesEntry\022\013\n\003key\030\001 \001(\t\022\r\n\005val" +
"ue\030\002 \001(\t:\0028\001\" \n\017GetTopicRequest\022\r\n\005topic" +
"\030\001 \001(\t\"R\n\016PublishRequest\022\r\n\005topic\030\001 \001(\t\022" +
"1\n\010messages\030\002 \003(\0132\037.google.pubsub.v1.Pub" +
"subMessage\"&\n\017PublishResponse\022\023\n\013message" +
"_ids\030\001 \003(\t\"K\n\021ListTopicsRequest\022\017\n\007proje" +
"ct\030\001 \001(\t\022\021\n\tpage_size\030\002 \001(\005\022\022\n\npage_toke" +
"n\030\003 \001(\t\"V\n\022ListTopicsResponse\022\'\n\006topics\030" +
"\001 \003(\0132\027.google.pubsub.v1.Topic\022\027\n\017next_p" +
"age_token\030\002 \001(\t\"U\n\035ListTopicSubscription",
"sRequest\022\r\n\005topic\030\001 \001(\t\022\021\n\tpage_size\030\002 \001" +
"(\005\022\022\n\npage_token\030\003 \001(\t\"P\n\036ListTopicSubsc" +
"riptionsResponse\022\025\n\rsubscriptions\030\001 \003(\t\022" +
"\027\n\017next_page_token\030\002 \001(\t\"#\n\022DeleteTopicR" +
"equest\022\r\n\005topic\030\001 \001(\t\"\332\001\n\014Subscription\022\014" +
"\n\004name\030\001 \001(\t\022\r\n\005topic\030\002 \001(\t\0221\n\013push_conf" +
"ig\030\004 \001(\0132\034.google.pubsub.v1.PushConfig\022\034" +
"\n\024ack_deadline_seconds\030\005 \001(\005\022\035\n\025retain_a" +
"cked_messages\030\007 \001(\010\022=\n\032message_retention" +
"_duration\030\010 \001(\0132\031.google.protobuf.Durati",
"on\"\230\001\n\nPushConfig\022\025\n\rpush_endpoint\030\001 \001(\t" +
"\022@\n\nattributes\030\002 \003(\0132,.google.pubsub.v1." +
"PushConfig.AttributesEntry\0321\n\017Attributes" +
"Entry\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t:\0028\001\"S\n" +
"\017ReceivedMessage\022\016\n\006ack_id\030\001 \001(\t\0220\n\007mess" +
"age\030\002 \001(\0132\037.google.pubsub.v1.PubsubMessa" +
"ge\".\n\026GetSubscriptionRequest\022\024\n\014subscrip" +
"tion\030\001 \001(\t\"\202\001\n\031UpdateSubscriptionRequest" +
"\0224\n\014subscription\030\001 \001(\0132\036.google.pubsub.v" +
"1.Subscription\022/\n\013update_mask\030\002 \001(\0132\032.go",
"ogle.protobuf.FieldMask\"R\n\030ListSubscript" +
"ionsRequest\022\017\n\007project\030\001 \001(\t\022\021\n\tpage_siz" +
"e\030\002 \001(\005\022\022\n\npage_token\030\003 \001(\t\"k\n\031ListSubsc" +
"riptionsResponse\0225\n\rsubscriptions\030\001 \003(\0132" +
"\036.google.pubsub.v1.Subscription\022\027\n\017next_" +
"page_token\030\002 \001(\t\"1\n\031DeleteSubscriptionRe" +
"quest\022\024\n\014subscription\030\001 \001(\t\"b\n\027ModifyPus" +
"hConfigRequest\022\024\n\014subscription\030\001 \001(\t\0221\n\013" +
"push_config\030\002 \001(\0132\034.google.pubsub.v1.Pus" +
"hConfig\"U\n\013PullRequest\022\024\n\014subscription\030\001",
" \001(\t\022\032\n\022return_immediately\030\002 \001(\010\022\024\n\014max_" +
"messages\030\003 \001(\005\"L\n\014PullResponse\022<\n\021receiv" +
"ed_messages\030\001 \003(\0132!.google.pubsub.v1.Rec" +
"eivedMessage\"_\n\030ModifyAckDeadlineRequest" +
"\022\024\n\014subscription\030\001 \001(\t\022\017\n\007ack_ids\030\004 \003(\t\022" +
"\034\n\024ack_deadline_seconds\030\003 \001(\005\";\n\022Acknowl" +
"edgeRequest\022\024\n\014subscription\030\001 \001(\t\022\017\n\007ack" +
"_ids\030\002 \003(\t\"\244\001\n\024StreamingPullRequest\022\024\n\014s" +
"ubscription\030\001 \001(\t\022\017\n\007ack_ids\030\002 \003(\t\022\037\n\027mo" +
"dify_deadline_seconds\030\003 \003(\005\022\037\n\027modify_de",
"adline_ack_ids\030\004 \003(\t\022#\n\033stream_ack_deadl" +
"ine_seconds\030\005 \001(\005\"U\n\025StreamingPullRespon" +
"se\022<\n\021received_messages\030\001 \003(\0132!.google.p" +
"ubsub.v1.ReceivedMessage\";\n\025CreateSnapsh" +
"otRequest\022\014\n\004name\030\001 \001(\t\022\024\n\014subscription\030" +
"\002 \001(\t\"X\n\010Snapshot\022\014\n\004name\030\001 \001(\t\022\r\n\005topic" +
"\030\002 \001(\t\022/\n\013expire_time\030\003 \001(\0132\032.google.pro" +
"tobuf.Timestamp\"N\n\024ListSnapshotsRequest\022" +
"\017\n\007project\030\001 \001(\t\022\021\n\tpage_size\030\002 \001(\005\022\022\n\np" +
"age_token\030\003 \001(\t\"_\n\025ListSnapshotsResponse",
"\022-\n\tsnapshots\030\001 \003(\0132\032.google.pubsub.v1.S" +
"napshot\022\027\n\017next_page_token\030\002 \001(\t\")\n\025Dele" +
"teSnapshotRequest\022\020\n\010snapshot\030\001 \001(\t\"m\n\013S" +
"eekRequest\022\024\n\014subscription\030\001 \001(\t\022*\n\004time" +
"\030\002 \001(\0132\032.google.protobuf.TimestampH\000\022\022\n\010" +
"snapshot\030\003 \001(\tH\000B\010\n\006target\"\016\n\014SeekRespon" +
"se2\350\017\n\nSubscriber\022\206\001\n\022CreateSubscription" +
"\022\036.google.pubsub.v1.Subscription\032\036.googl" +
"e.pubsub.v1.Subscription\"0\202\323\344\223\002*\032%/v1/{n" +
"ame=projects/*/subscriptions/*}:\001*\022\222\001\n\017G",
"etSubscription\022(.google.pubsub.v1.GetSub" +
"scriptionRequest\032\036.google.pubsub.v1.Subs" +
"cription\"5\202\323\344\223\002/\022-/v1/{subscription=proj" +
"ects/*/subscriptions/*}\022\240\001\n\022UpdateSubscr" +
"iption\022+.google.pubsub.v1.UpdateSubscrip" +
"tionRequest\032\036.google.pubsub.v1.Subscript" +
"ion\"=\202\323\344\223\002722/v1/{subscription.name=proj" +
"ects/*/subscriptions/*}:\001*\022\234\001\n\021ListSubsc" +
"riptions\022*.google.pubsub.v1.ListSubscrip" +
"tionsRequest\032+.google.pubsub.v1.ListSubs",
"criptionsResponse\".\202\323\344\223\002(\022&/v1/{project=" +
"projects/*}/subscriptions\022\220\001\n\022DeleteSubs" +
"cription\022+.google.pubsub.v1.DeleteSubscr" +
"iptionRequest\032\026.google.protobuf.Empty\"5\202" +
"\323\344\223\002/*-/v1/{subscription=projects/*/subs" +
"criptions/*}\022\243\001\n\021ModifyAckDeadline\022*.goo" +
"gle.pubsub.v1.ModifyAckDeadlineRequest\032\026" +
".google.protobuf.Empty\"J\202\323\344\223\002D\"?/v1/{sub" +
"scription=projects/*/subscriptions/*}:mo" +
"difyAckDeadline:\001*\022\221\001\n\013Acknowledge\022$.goo",
"gle.pubsub.v1.AcknowledgeRequest\032\026.googl" +
"e.protobuf.Empty\"D\202\323\344\223\002>\"9/v1/{subscript" +
"ion=projects/*/subscriptions/*}:acknowle" +
"dge:\001*\022\204\001\n\004Pull\022\035.google.pubsub.v1.PullR" +
"equest\032\036.google.pubsub.v1.PullResponse\"=" +
"\202\323\344\223\0027\"2/v1/{subscription=projects/*/sub" +
"scriptions/*}:pull:\001*\022d\n\rStreamingPull\022&" +
".google.pubsub.v1.StreamingPullRequest\032\'" +
".google.pubsub.v1.StreamingPullResponse(" +
"\0010\001\022\240\001\n\020ModifyPushConfig\022).google.pubsub",
".v1.ModifyPushConfigRequest\032\026.google.pro" +
"tobuf.Empty\"I\202\323\344\223\002C\">/v1/{subscription=p" +
"rojects/*/subscriptions/*}:modifyPushCon" +
"fig:\001*\022\214\001\n\rListSnapshots\022&.google.pubsub" +
".v1.ListSnapshotsRequest\032\'.google.pubsub" +
".v1.ListSnapshotsResponse\"*\202\323\344\223\002$\022\"/v1/{" +
"project=projects/*}/snapshots\022\203\001\n\016Create" +
"Snapshot\022\'.google.pubsub.v1.CreateSnapsh" +
"otRequest\032\032.google.pubsub.v1.Snapshot\",\202" +
"\323\344\223\002&\032!/v1/{name=projects/*/snapshots/*}",
":\001*\022\200\001\n\016DeleteSnapshot\022\'.google.pubsub.v" +
"1.DeleteSnapshotRequest\032\026.google.protobu" +
"f.Empty\"-\202\323\344\223\002\'*%/v1/{snapshot=projects/" +
"*/snapshots/*}\022\204\001\n\004Seek\022\035.google.pubsub." +
"v1.SeekRequest\032\036.google.pubsub.v1.SeekRe" +
"sponse\"=\202\323\344\223\0027\"2/v1/{subscription=projec" +
"ts/*/subscriptions/*}:seek:\001*2\233\006\n\tPublis" +
"her\022j\n\013CreateTopic\022\027.google.pubsub.v1.To" +
"pic\032\027.google.pubsub.v1.Topic\")\202\323\344\223\002#\032\036/v" +
"1/{name=projects/*/topics/*}:\001*\022\202\001\n\007Publ",
"ish\022 .google.pubsub.v1.PublishRequest\032!." +
"google.pubsub.v1.PublishResponse\"2\202\323\344\223\002," +
"\"\'/v1/{topic=projects/*/topics/*}:publis" +
"h:\001*\022o\n\010GetTopic\022!.google.pubsub.v1.GetT" +
"opicRequest\032\027.google.pubsub.v1.Topic\"\'\202\323" +
"\344\223\002!\022\037/v1/{topic=projects/*/topics/*}\022\200\001" +
"\n\nListTopics\022#.google.pubsub.v1.ListTopi" +
"csRequest\032$.google.pubsub.v1.ListTopicsR" +
"esponse\"\'\202\323\344\223\002!\022\037/v1/{project=projects/*" +
"}/topics\022\262\001\n\026ListTopicSubscriptions\022/.go",
"ogle.pubsub.v1.ListTopicSubscriptionsReq" +
"uest\0320.google.pubsub.v1.ListTopicSubscri" +
"ptionsResponse\"5\202\323\344\223\002/\022-/v1/{topic=proje" +
"cts/*/topics/*}/subscriptions\022t\n\013DeleteT" +
"opic\022$.google.pubsub.v1.DeleteTopicReque" +
"st\032\026.google.protobuf.Empty\"\'\202\323\344\223\002!*\037/v1/" +
"{topic=projects/*/topics/*}By\n\024com.googl" +
"e.pubsub.v1B\013PubsubProtoP\001Z6google.golan" +
"g.org/genproto/googleapis/pubsub/v1;pubs" +
"ub\370\001\001\252\002\026Google.Cloud.PubSub.V1b\006proto3"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.AnnotationsProto.getDescriptor(),
com.google.protobuf.DurationProto.getDescriptor(),
com.google.protobuf.EmptyProto.getDescriptor(),
com.google.protobuf.FieldMaskProto.getDescriptor(),
com.google.protobuf.TimestampProto.getDescriptor(),
}, assigner);
internal_static_google_pubsub_v1_Topic_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_google_pubsub_v1_Topic_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_Topic_descriptor,
new java.lang.String[] { "Name", });
internal_static_google_pubsub_v1_PubsubMessage_descriptor =
getDescriptor().getMessageTypes().get(1);
internal_static_google_pubsub_v1_PubsubMessage_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_PubsubMessage_descriptor,
new java.lang.String[] { "Data", "Attributes", "MessageId", "PublishTime", });
internal_static_google_pubsub_v1_PubsubMessage_AttributesEntry_descriptor =
internal_static_google_pubsub_v1_PubsubMessage_descriptor.getNestedTypes().get(0);
internal_static_google_pubsub_v1_PubsubMessage_AttributesEntry_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_PubsubMessage_AttributesEntry_descriptor,
new java.lang.String[] { "Key", "Value", });
internal_static_google_pubsub_v1_GetTopicRequest_descriptor =
getDescriptor().getMessageTypes().get(2);
internal_static_google_pubsub_v1_GetTopicRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_GetTopicRequest_descriptor,
new java.lang.String[] { "Topic", });
internal_static_google_pubsub_v1_PublishRequest_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_google_pubsub_v1_PublishRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_PublishRequest_descriptor,
new java.lang.String[] { "Topic", "Messages", });
internal_static_google_pubsub_v1_PublishResponse_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_google_pubsub_v1_PublishResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_PublishResponse_descriptor,
new java.lang.String[] { "MessageIds", });
internal_static_google_pubsub_v1_ListTopicsRequest_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_google_pubsub_v1_ListTopicsRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_ListTopicsRequest_descriptor,
new java.lang.String[] { "Project", "PageSize", "PageToken", });
internal_static_google_pubsub_v1_ListTopicsResponse_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_google_pubsub_v1_ListTopicsResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_ListTopicsResponse_descriptor,
new java.lang.String[] { "Topics", "NextPageToken", });
internal_static_google_pubsub_v1_ListTopicSubscriptionsRequest_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_google_pubsub_v1_ListTopicSubscriptionsRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_ListTopicSubscriptionsRequest_descriptor,
new java.lang.String[] { "Topic", "PageSize", "PageToken", });
internal_static_google_pubsub_v1_ListTopicSubscriptionsResponse_descriptor =
getDescriptor().getMessageTypes().get(8);
internal_static_google_pubsub_v1_ListTopicSubscriptionsResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_ListTopicSubscriptionsResponse_descriptor,
new java.lang.String[] { "Subscriptions", "NextPageToken", });
internal_static_google_pubsub_v1_DeleteTopicRequest_descriptor =
getDescriptor().getMessageTypes().get(9);
internal_static_google_pubsub_v1_DeleteTopicRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_DeleteTopicRequest_descriptor,
new java.lang.String[] { "Topic", });
internal_static_google_pubsub_v1_Subscription_descriptor =
getDescriptor().getMessageTypes().get(10);
internal_static_google_pubsub_v1_Subscription_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_Subscription_descriptor,
new java.lang.String[] { "Name", "Topic", "PushConfig", "AckDeadlineSeconds", "RetainAckedMessages", "MessageRetentionDuration", });
internal_static_google_pubsub_v1_PushConfig_descriptor =
getDescriptor().getMessageTypes().get(11);
internal_static_google_pubsub_v1_PushConfig_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_PushConfig_descriptor,
new java.lang.String[] { "PushEndpoint", "Attributes", });
internal_static_google_pubsub_v1_PushConfig_AttributesEntry_descriptor =
internal_static_google_pubsub_v1_PushConfig_descriptor.getNestedTypes().get(0);
internal_static_google_pubsub_v1_PushConfig_AttributesEntry_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_PushConfig_AttributesEntry_descriptor,
new java.lang.String[] { "Key", "Value", });
internal_static_google_pubsub_v1_ReceivedMessage_descriptor =
getDescriptor().getMessageTypes().get(12);
internal_static_google_pubsub_v1_ReceivedMessage_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_ReceivedMessage_descriptor,
new java.lang.String[] { "AckId", "Message", });
internal_static_google_pubsub_v1_GetSubscriptionRequest_descriptor =
getDescriptor().getMessageTypes().get(13);
internal_static_google_pubsub_v1_GetSubscriptionRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_GetSubscriptionRequest_descriptor,
new java.lang.String[] { "Subscription", });
internal_static_google_pubsub_v1_UpdateSubscriptionRequest_descriptor =
getDescriptor().getMessageTypes().get(14);
internal_static_google_pubsub_v1_UpdateSubscriptionRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_UpdateSubscriptionRequest_descriptor,
new java.lang.String[] { "Subscription", "UpdateMask", });
internal_static_google_pubsub_v1_ListSubscriptionsRequest_descriptor =
getDescriptor().getMessageTypes().get(15);
internal_static_google_pubsub_v1_ListSubscriptionsRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_ListSubscriptionsRequest_descriptor,
new java.lang.String[] { "Project", "PageSize", "PageToken", });
internal_static_google_pubsub_v1_ListSubscriptionsResponse_descriptor =
getDescriptor().getMessageTypes().get(16);
internal_static_google_pubsub_v1_ListSubscriptionsResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_ListSubscriptionsResponse_descriptor,
new java.lang.String[] { "Subscriptions", "NextPageToken", });
internal_static_google_pubsub_v1_DeleteSubscriptionRequest_descriptor =
getDescriptor().getMessageTypes().get(17);
internal_static_google_pubsub_v1_DeleteSubscriptionRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_DeleteSubscriptionRequest_descriptor,
new java.lang.String[] { "Subscription", });
internal_static_google_pubsub_v1_ModifyPushConfigRequest_descriptor =
getDescriptor().getMessageTypes().get(18);
internal_static_google_pubsub_v1_ModifyPushConfigRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_ModifyPushConfigRequest_descriptor,
new java.lang.String[] { "Subscription", "PushConfig", });
internal_static_google_pubsub_v1_PullRequest_descriptor =
getDescriptor().getMessageTypes().get(19);
internal_static_google_pubsub_v1_PullRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_PullRequest_descriptor,
new java.lang.String[] { "Subscription", "ReturnImmediately", "MaxMessages", });
internal_static_google_pubsub_v1_PullResponse_descriptor =
getDescriptor().getMessageTypes().get(20);
internal_static_google_pubsub_v1_PullResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_PullResponse_descriptor,
new java.lang.String[] { "ReceivedMessages", });
internal_static_google_pubsub_v1_ModifyAckDeadlineRequest_descriptor =
getDescriptor().getMessageTypes().get(21);
internal_static_google_pubsub_v1_ModifyAckDeadlineRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_ModifyAckDeadlineRequest_descriptor,
new java.lang.String[] { "Subscription", "AckIds", "AckDeadlineSeconds", });
internal_static_google_pubsub_v1_AcknowledgeRequest_descriptor =
getDescriptor().getMessageTypes().get(22);
internal_static_google_pubsub_v1_AcknowledgeRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_AcknowledgeRequest_descriptor,
new java.lang.String[] { "Subscription", "AckIds", });
internal_static_google_pubsub_v1_StreamingPullRequest_descriptor =
getDescriptor().getMessageTypes().get(23);
internal_static_google_pubsub_v1_StreamingPullRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_StreamingPullRequest_descriptor,
new java.lang.String[] { "Subscription", "AckIds", "ModifyDeadlineSeconds", "ModifyDeadlineAckIds", "StreamAckDeadlineSeconds", });
internal_static_google_pubsub_v1_StreamingPullResponse_descriptor =
getDescriptor().getMessageTypes().get(24);
internal_static_google_pubsub_v1_StreamingPullResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_StreamingPullResponse_descriptor,
new java.lang.String[] { "ReceivedMessages", });
internal_static_google_pubsub_v1_CreateSnapshotRequest_descriptor =
getDescriptor().getMessageTypes().get(25);
internal_static_google_pubsub_v1_CreateSnapshotRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_CreateSnapshotRequest_descriptor,
new java.lang.String[] { "Name", "Subscription", });
internal_static_google_pubsub_v1_Snapshot_descriptor =
getDescriptor().getMessageTypes().get(26);
internal_static_google_pubsub_v1_Snapshot_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_Snapshot_descriptor,
new java.lang.String[] { "Name", "Topic", "ExpireTime", });
internal_static_google_pubsub_v1_ListSnapshotsRequest_descriptor =
getDescriptor().getMessageTypes().get(27);
internal_static_google_pubsub_v1_ListSnapshotsRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_ListSnapshotsRequest_descriptor,
new java.lang.String[] { "Project", "PageSize", "PageToken", });
internal_static_google_pubsub_v1_ListSnapshotsResponse_descriptor =
getDescriptor().getMessageTypes().get(28);
internal_static_google_pubsub_v1_ListSnapshotsResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_ListSnapshotsResponse_descriptor,
new java.lang.String[] { "Snapshots", "NextPageToken", });
internal_static_google_pubsub_v1_DeleteSnapshotRequest_descriptor =
getDescriptor().getMessageTypes().get(29);
internal_static_google_pubsub_v1_DeleteSnapshotRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_DeleteSnapshotRequest_descriptor,
new java.lang.String[] { "Snapshot", });
internal_static_google_pubsub_v1_SeekRequest_descriptor =
getDescriptor().getMessageTypes().get(30);
internal_static_google_pubsub_v1_SeekRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_SeekRequest_descriptor,
new java.lang.String[] { "Subscription", "Time", "Snapshot", "Target", });
internal_static_google_pubsub_v1_SeekResponse_descriptor =
getDescriptor().getMessageTypes().get(31);
internal_static_google_pubsub_v1_SeekResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_google_pubsub_v1_SeekResponse_descriptor,
new java.lang.String[] { });
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.AnnotationsProto.http);
com.google.protobuf.Descriptors.FileDescriptor
.internalUpdateFileDescriptor(descriptor, registry);
com.google.api.AnnotationsProto.getDescriptor();
com.google.protobuf.DurationProto.getDescriptor();
com.google.protobuf.EmptyProto.getDescriptor();
com.google.protobuf.FieldMaskProto.getDescriptor();
com.google.protobuf.TimestampProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
| |
/*
* Copyright 1999-2015 dangdang.com.
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* </p>
*/
package com.dangdang.ddframe.rdb.sharding.merger.fixture;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.SQLFeatureNotSupportedException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
public final class MockResultSet<T> extends AbstractUnsupportedOperationMockResultSet {
private final List<String> columnNamesMetaData;
private final Iterator<Map<String, T>> data;
private Map<String, T> currentValue;
private boolean isClosed;
private final int size;
@SafeVarargs
public MockResultSet(@SuppressWarnings("unchecked") final T... data) {
columnNamesMetaData = new ArrayList<>(1);
columnNamesMetaData.add("name");
List<Map<String, T>> list = new ArrayList<>(data.length);
for (T each : data) {
Map<String, T> map = new LinkedHashMap<>(1);
map.put("name", each);
list.add(map);
}
size = list.size();
this.data = list.iterator();
}
public MockResultSet(final List<Map<String, T>> data) {
columnNamesMetaData = new ArrayList<>();
if (!data.isEmpty()) {
columnNamesMetaData.addAll(data.get(0).keySet());
}
size = data.size();
this.data = data.iterator();
}
public MockResultSet() {
this(Collections.<Map<String, T>>emptyList());
}
@Override
public boolean next() throws SQLException {
boolean result = data.hasNext();
if (result) {
currentValue = data.next();
}
return result;
}
@Override
public void close() throws SQLException {
isClosed = true;
}
@Override
public boolean isClosed() throws SQLException {
return isClosed;
}
@Override
public int getInt(final int columnIndex) throws SQLException {
return (Integer) find(columnIndex);
}
@Override
public int getInt(final String columnLabel) throws SQLException {
validateColumn(columnLabel);
return (Integer) currentValue.get(columnLabel);
}
@Override
public String getString(final int columnIndex) throws SQLException {
return (String) find(columnIndex);
}
@Override
public String getString(final String columnLabel) throws SQLException {
validateColumn(columnLabel);
return (String) currentValue.get(columnLabel);
}
@Override
public Object getObject(final int columnIndex) throws SQLException {
return find(columnIndex);
}
@Override
public Object getObject(final String columnLabel) throws SQLException {
validateColumn(columnLabel);
return currentValue.get(columnLabel);
}
@Override
public int findColumn(final String columnLabel) throws SQLException {
return columnNamesMetaData.indexOf(columnLabel) + 1;
}
private void validateColumn(final String columnLabel) throws SQLException {
if (!columnNamesMetaData.contains(columnLabel)) {
throw new SQLException(String.format("can not inRange column %s, column is %s", columnLabel, columnNamesMetaData));
}
}
private T find(final int columnIndex) {
int count = 1;
for (Entry<String, T> entry : currentValue.entrySet()) {
if (count == columnIndex) {
return entry.getValue();
}
count++;
}
return null;
}
@Override
public int getFetchSize() throws SQLException {
return size;
}
@Override
public Statement getStatement() throws SQLException {
return null;
}
@Override
public ResultSetMetaData getMetaData() throws SQLException {
return new MockResultSetMetaData();
}
public class MockResultSetMetaData implements ResultSetMetaData {
@Override
public int getColumnCount() throws SQLException {
return columnNamesMetaData.size();
}
@Override
public String getColumnLabel(final int column) throws SQLException {
return columnNamesMetaData.get(column - 1);
}
@Override
public boolean isAutoIncrement(final int column) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public boolean isCaseSensitive(final int column) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public boolean isSearchable(final int column) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public boolean isCurrency(final int column) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public int isNullable(final int column) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public boolean isSigned(final int column) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public int getColumnDisplaySize(final int column) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public String getColumnName(final int column) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public String getSchemaName(final int column) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public int getPrecision(final int column) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public int getScale(final int column) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public String getTableName(final int column) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public String getCatalogName(final int column) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public int getColumnType(final int column) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public String getColumnTypeName(final int column) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public boolean isReadOnly(final int column) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public boolean isWritable(final int column) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public boolean isDefinitelyWritable(final int column) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public String getColumnClassName(final int column) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public <I> I unwrap(final Class<I> iface) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
@Override
public boolean isWrapperFor(final Class<?> iface) throws SQLException {
throw new SQLFeatureNotSupportedException();
}
}
}
| |
package com.jivesoftware.os.amza.ui.region;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMap.Builder;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.jivesoftware.os.amza.api.AmzaInterner;
import com.jivesoftware.os.amza.api.IoStats;
import com.jivesoftware.os.amza.api.filer.UIO;
import com.jivesoftware.os.amza.api.partition.PartitionName;
import com.jivesoftware.os.amza.api.partition.RemoteVersionedState;
import com.jivesoftware.os.amza.api.partition.VersionedPartitionName;
import com.jivesoftware.os.amza.api.ring.RingMember;
import com.jivesoftware.os.amza.api.ring.RingMemberAndHost;
import com.jivesoftware.os.amza.api.scan.RowStream;
import com.jivesoftware.os.amza.api.stream.RowType;
import com.jivesoftware.os.amza.api.wal.WALHighwater;
import com.jivesoftware.os.amza.service.AmzaService;
import com.jivesoftware.os.amza.service.PartitionIsDisposedException;
import com.jivesoftware.os.amza.service.replication.PartitionStripeProvider;
import com.jivesoftware.os.amza.service.ring.AmzaRingReader;
import com.jivesoftware.os.amza.service.ring.RingTopology;
import com.jivesoftware.os.amza.service.stats.AmzaStats;
import com.jivesoftware.os.amza.service.stats.AmzaStats.CompactionFamily;
import com.jivesoftware.os.amza.service.stats.AmzaStats.Totals;
import com.jivesoftware.os.amza.service.stats.NetStats;
import com.jivesoftware.os.amza.service.take.HighwaterStorage;
import com.jivesoftware.os.amza.ui.soy.SoyRenderer;
import com.jivesoftware.os.aquarium.LivelyEndState;
import com.jivesoftware.os.aquarium.State;
import com.jivesoftware.os.aquarium.Waterline;
import com.jivesoftware.os.jive.utils.ordered.id.IdPacker;
import com.jivesoftware.os.jive.utils.ordered.id.TimestampProvider;
import com.jivesoftware.os.mlogger.core.MetricLogger;
import com.jivesoftware.os.mlogger.core.MetricLoggerFactory;
import java.lang.management.GarbageCollectorMXBean;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryMXBean;
import java.lang.management.RuntimeMXBean;
import java.nio.charset.StandardCharsets;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.LongAdder;
import javax.management.Attribute;
import javax.management.AttributeList;
import javax.management.InstanceNotFoundException;
import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException;
import javax.management.ObjectName;
import javax.management.ReflectionException;
/**
*
*/
// soy.page.healthPluginRegion
public class MetricsPluginRegion implements PageRegion<MetricsPluginRegion.MetricsPluginRegionInput> {
private static final MetricLogger LOG = MetricLoggerFactory.getLogger();
private final NumberFormat numberFormat = NumberFormat.getInstance();
private final String template;
private final String partitionMetricsTemplate;
private final String statsTemplate;
private final String visualizePartitionTemplate;
private final SoyRenderer renderer;
private final AmzaRingReader ringReader;
private final AmzaService amzaService;
private final TimestampProvider timestampProvider;
private final IdPacker idPacker;
private final AmzaInterner amzaInterner;
private final List<GarbageCollectorMXBean> garbageCollectors;
private final MemoryMXBean memoryBean;
private final RuntimeMXBean runtimeBean;
public MetricsPluginRegion(String template,
String partitionMetricsTemplate,
String statsTemplate,
String visualizePartitionTemplate,
SoyRenderer renderer,
AmzaRingReader ringReader,
AmzaService amzaService,
AmzaStats amzaStats,
TimestampProvider timestampProvider,
IdPacker idPacker,
AmzaInterner amzaInterner) {
this.template = template;
this.partitionMetricsTemplate = partitionMetricsTemplate;
this.statsTemplate = statsTemplate;
this.visualizePartitionTemplate = visualizePartitionTemplate;
this.renderer = renderer;
this.ringReader = ringReader;
this.amzaService = amzaService;
this.timestampProvider = timestampProvider;
this.idPacker = idPacker;
this.amzaInterner = amzaInterner;
garbageCollectors = ManagementFactory.getGarbageCollectorMXBeans();
memoryBean = ManagementFactory.getMemoryMXBean();
runtimeBean = ManagementFactory.getRuntimeMXBean();
}
public boolean abandonPartition(PartitionName partitionName) throws Exception {
LOG.info("Abandoning {}", partitionName);
return amzaService.abandonPartition(partitionName);
}
public static class MetricsPluginRegionInput {
final String ringName;
final String partitionName;
final boolean exact;
final boolean visualize;
public MetricsPluginRegionInput(String ringName, String partitionName, boolean exact, boolean visualize) {
this.ringName = ringName;
this.partitionName = partitionName;
this.exact = exact;
this.visualize = visualize;
}
}
class VisualizePartition implements RowStream {
List<Run> runs = new ArrayList<>();
Run lastRun;
long rowCount;
@Override
public boolean row(long rowFP, long rowTxId, RowType rowType, byte[] row) throws Exception {
rowCount++;
String subType = null;
if (rowType == RowType.system) {
long[] parts = UIO.bytesLongs(row);
if (parts[0] == 2) {
subType = "leaps: lastTx:" + Long.toHexString(parts[1]);
}
}
if (lastRun == null || lastRun.rowType != rowType || subType != null) {
if (lastRun != null) {
runs.add(lastRun);
}
lastRun = new Run(rowType, rowFP, rowTxId);
}
if (subType != null) {
lastRun.subType = subType;
}
lastRun.bytes += row.length;
lastRun.count++;
return true;
}
public void done(Map<String, Object> data) {
runs.add(lastRun);
List<Map<String, String>> runMaps = new ArrayList<>();
for (Run r : runs) {
if (r != null) {
Map<String, String> run = new HashMap<>();
run.put("rowType", r.rowType.name());
run.put("subType", r.subType);
run.put("startFp", String.valueOf(r.startFp));
run.put("rowTxId", Long.toHexString(r.rowTxId));
run.put("bytes", numberFormat.format(r.bytes));
run.put("count", numberFormat.format(r.count));
runMaps.add(run);
}
}
data.put("runs", runMaps);
data.put("rowCount", numberFormat.format(rowCount));
}
class Run {
RowType rowType;
String subType = "";
long startFp;
long rowTxId;
long bytes;
long count;
public Run(RowType rowType, long startFp, long rowTxId) {
this.rowType = rowType;
this.startFp = startFp;
this.rowTxId = rowTxId;
}
}
}
@Override
public String render(MetricsPluginRegionInput input) {
Map<String, Object> data = Maps.newHashMap();
try {
AmzaStats amzaStats = amzaService.amzaStats;
data.put("ringName", input.ringName);
data.put("partitionName", input.partitionName);
data.put("exact", input.exact);
if (input.partitionName.length() > 0) {
if (input.visualize) {
VisualizePartition visualizePartition = new VisualizePartition();
amzaService.visualizePartition(input.ringName.getBytes(), input.partitionName.getBytes(), visualizePartition);
visualizePartition.done(data);
return renderer.render(visualizePartitionTemplate, data);
} else {
return renderer.render(partitionMetricsTemplate, data);
}
} else {
data.put("addMember", numberFormat.format(amzaStats.addMember.longValue()));
data.put("removeMember", numberFormat.format(amzaStats.removeMember.longValue()));
data.put("getRing", numberFormat.format(amzaStats.getRing.longValue()));
data.put("rowsStream", numberFormat.format(amzaStats.rowsStream.longValue()));
data.put("availableRowsStream", numberFormat.format(amzaStats.availableRowsStream.longValue()));
data.put("rowsTaken", numberFormat.format(amzaStats.rowsTaken.longValue()));
List<Map<String, String>> longPolled = new ArrayList<>();
for (Entry<RingMember, LongAdder> polled : amzaStats.longPolled.entrySet()) {
LongAdder longPollAvailables = amzaStats.longPollAvailables.get(polled.getKey());
longPolled.add(ImmutableMap.of("member", polled.getKey().getMember(),
"longPolled", numberFormat.format(polled.getValue().longValue()),
"longPollAvailables", numberFormat.format(longPollAvailables == null ? -1L : longPollAvailables.longValue())));
}
data.put("longPolled", longPolled);
data.put("grandTotals", regionTotals(null, amzaStats.getGrandTotal(), false));
List<Map<String, Object>> regionTotals = new ArrayList<>();
List<PartitionName> partitionNames = Lists.newArrayList();
Iterables.addAll(partitionNames, amzaService.getSystemPartitionNames());
//Iterables.addAll(partitionNames, amzaService.getMemberPartitionNames());
Collections.sort(partitionNames);
for (PartitionName partitionName : partitionNames) {
Totals totals = amzaStats.getPartitionTotals().get(partitionName);
if (totals == null) {
totals = new Totals();
}
regionTotals.add(regionTotals(partitionName, totals, false));
}
data.put("regionTotals", regionTotals);
}
return renderer.render(template, data);
} catch (Exception e) {
LOG.error("Unable to retrieve data", e);
return "Error";
}
}
public String renderStats(String filter, boolean exact) {
Map<String, Object> data = Maps.newHashMap();
try {
AmzaStats amzaStats = amzaService.amzaStats;
List<Map<String, String>> longPolled = new ArrayList<>();
for (Entry<RingMember, LongAdder> polled : amzaStats.longPolled.entrySet()) {
LongAdder longPollAvailables = amzaStats.longPollAvailables.get(polled.getKey());
longPolled.add(ImmutableMap.of("member", polled.getKey().getMember(),
"longPolled", numberFormat.format(polled.getValue().longValue()),
"longPollAvailables", numberFormat.format(longPollAvailables == null ? -1L : longPollAvailables.longValue())));
}
data.put("longPolled", longPolled);
data.put("grandTotals", regionTotals(null, amzaStats.getGrandTotal(), true));
List<Map<String, Object>> regionTotals = new ArrayList<>();
List<PartitionName> partitionNames = Lists.newArrayList();
Iterables.addAll(partitionNames, amzaService.getSystemPartitionNames());
Iterables.addAll(partitionNames, amzaService.getMemberPartitionNames());
Collections.sort(partitionNames);
for (PartitionName partitionName : partitionNames) {
String name = new String(partitionName.getName(), StandardCharsets.UTF_8);
if (exact && name.equals(filter) || !exact && name.contains(filter)) {
Totals totals = amzaStats.getPartitionTotals().get(partitionName);
if (totals == null) {
totals = new Totals();
}
regionTotals.add(regionTotals(partitionName, totals, true));
}
}
data.put("regionTotals", regionTotals);
} catch (Exception e) {
LOG.error("Unable to retrieve data", e);
}
return renderer.render(statsTemplate, data);
}
public Map<String, Object> regionTotals(PartitionName name, AmzaStats.Totals totals, boolean includeCount) throws Exception {
Map<String, Object> map = new HashMap<>();
if (name != null) {
map.put("type", name.isSystemPartition() ? "SYSTEM" : "USER");
map.put("name", new String(name.getName()));
map.put("ringName", new String(name.getRingName()));
RingTopology ring = ringReader.getRing(name.getRingName(), -1);
List<Map<String, String>> ringMaps = new ArrayList<>();
for (RingMemberAndHost entry : ring.entries) {
ringMaps.add(ImmutableMap.of("member", entry.ringMember.getMember(),
"host", entry.ringHost.getHost(),
"port", String.valueOf(entry.ringHost.getPort())));
}
map.put("ring", ringMaps);
Map<VersionedPartitionName, Integer> categories = Maps.newHashMap();
Map<VersionedPartitionName, Long> ringCallCounts = Maps.newHashMap();
Map<VersionedPartitionName, Long> partitionCallCounts = Maps.newHashMap();
amzaService.getTakeCoordinator().streamCategories((versionedPartitionName, category, ringCallCount, partitionCallCount) -> {
categories.put(versionedPartitionName, category);
ringCallCounts.put(versionedPartitionName, ringCallCount);
partitionCallCounts.put(versionedPartitionName, partitionCallCount);
return true;
});
PartitionStripeProvider partitionStripeProvider = amzaService.getPartitionStripeProvider();
try {
partitionStripeProvider.txPartition(name, (txPartitionStripe, highwaterStorage, versionedAquarium) -> {
VersionedPartitionName versionedPartitionName = versionedAquarium == null ? null : versionedAquarium.getVersionedPartitionName();
LivelyEndState livelyEndState = versionedAquarium == null ? null : versionedAquarium.getLivelyEndState();
Waterline currentWaterline = livelyEndState != null ? livelyEndState.getCurrentWaterline() : null;
map.put("state", currentWaterline == null ? "unknown" : currentWaterline.getState());
map.put("quorum", currentWaterline == null ? "unknown" : currentWaterline.isAtQuorum());
//map.put("timestamp", currentWaterline == null ? "unknown" : String.valueOf(currentWaterline.getTimestamp()));
//map.put("version", currentWaterline == null ? "unknown" : String.valueOf(currentWaterline.getVersion()));
map.put("partitionVersion", versionedPartitionName == null ? "none" : Long.toHexString(versionedPartitionName.getPartitionVersion()));
State currentState = livelyEndState == null ? State.bootstrap : livelyEndState.getCurrentState();
map.put("isOnline", livelyEndState != null && livelyEndState.isOnline());
long[] stripeVersion = new long[1];
txPartitionStripe.tx((deltaIndex, stripeIndex, partitionStripe) -> {
if (includeCount) {
map.put("count", partitionStripe == null ? "-1" : numberFormat.format(partitionStripe.count(versionedAquarium)));
map.put("keyCount", partitionStripe == null ? "-1" : numberFormat.format(partitionStripe.keyCount(versionedAquarium)));
map.put("clobberCount", partitionStripe == null ? "-1" : numberFormat.format(partitionStripe.clobberCount(versionedAquarium)));
} else {
map.put("count", "(requires watch)");
}
stripeVersion[0] = stripeIndex; // yawn
map.put("highestTxId", partitionStripe == null ? "-1" : String.valueOf(partitionStripe.highestTxId(versionedPartitionName)));
return null;
});
int category = categories.getOrDefault(versionedPartitionName, -1);
long ringCallCount = ringCallCounts.getOrDefault(versionedPartitionName, -1L);
long partitionCallCount = partitionCallCounts.getOrDefault(versionedPartitionName, -1L);
map.put("category", category != -1 ? String.valueOf(category) : "unknown");
map.put("ringCallCount", String.valueOf(ringCallCount));
map.put("partitionCallCount", String.valueOf(partitionCallCount));
List<Map<String, Object>> tookLatencies = Lists.newArrayList();
if (includeCount) {
long currentTime = timestampProvider.getApproximateTimestamp(System.currentTimeMillis());
amzaService.getTakeCoordinator().streamTookLatencies(versionedPartitionName,
(ringMember, lastOfferedTxId, category1, tooSlowTxId, takeSessionId, online, steadyState, lastOfferedMillis,
lastTakenMillis, lastCategoryCheckMillis) -> {
Builder<String, Object> builder = ImmutableMap.<String, Object>builder();
builder.put("member", ringMember.getMember());
long tooSlowTimestamp = -1;
long latencyInMillis = -1;
if (lastOfferedTxId != -1) {
long lastOfferedTimestamp = idPacker.unpack(lastOfferedTxId)[0];
tooSlowTimestamp = idPacker.unpack(tooSlowTxId)[0];
latencyInMillis = currentTime - lastOfferedTimestamp;
}
String latency = ((latencyInMillis < 0) ? '-' : ' ') + getDurationBreakdown(Math.abs(latencyInMillis));
builder
.put("latency", (lastOfferedTxId == -1) ? "never" : latency)
.put("category", String.valueOf(category1))
.put("tooSlow", (lastOfferedTxId == -1) ? "never" : getDurationBreakdown(tooSlowTimestamp))
.put("takeSessionId", String.valueOf(takeSessionId))
.put("online", online)
.put("steadyState", steadyState);
tookLatencies.add(builder.build());
return true;
});
}
map.put("tookLatencies", tookLatencies);
if (includeCount) {
if (versionedPartitionName == null) {
map.put("highwaters", "none");
} else if (name.isSystemPartition()) {
HighwaterStorage systemHighwaterStorage = amzaService.getSystemHighwaterStorage();
WALHighwater partitionHighwater = systemHighwaterStorage.getPartitionHighwater(versionedPartitionName, true);
map.put("highwaters", renderHighwaters(partitionHighwater));
} else {
WALHighwater partitionHighwater = highwaterStorage.getPartitionHighwater(versionedPartitionName, true);
map.put("highwaters", renderHighwaters(partitionHighwater));
}
} else {
map.put("highwaters", "(requires watch)");
}
map.put("localState", ImmutableMap.of("online", livelyEndState != null && livelyEndState.isOnline(),
"state", currentState != null ? currentState.name() : "unknown",
"name", new String(amzaService.getRingReader().getRingMember().asAquariumMember().getMember()),
"partitionVersion", versionedPartitionName == null ? "none" : String.valueOf(versionedPartitionName.getPartitionVersion()),
"stripeVersion", versionedAquarium == null ? "none" : String.valueOf(stripeVersion[0])));
return -1;
});
List<Map<String, Object>> neighborStates = new ArrayList<>();
Set<RingMember> neighboringRingMembers = amzaService.getRingReader().getNeighboringRingMembers(name.getRingName(), -1);
for (RingMember ringMember : neighboringRingMembers) {
RemoteVersionedState neighborState = partitionStripeProvider.getRemoteVersionedState(ringMember, name);
neighborStates.add(ImmutableMap.of("version", neighborState != null ? String.valueOf(neighborState.version) : "unknown",
"state", neighborState != null && neighborState.waterline != null ? neighborState.waterline.getState().name() : "unknown",
"name", new String(ringMember.getMember().getBytes())));
}
map.put("neighborStates", neighborStates);
} catch (PartitionIsDisposedException e) {
//TODO just make soy more tolerant
map.put("state", "disposed");
map.put("quorum", "disposed");
map.put("partitionVersion", "disposed");
map.put("isOnline", false);
map.put("count", 0);
map.put("highestTxId", "-1");
map.put("category", "disposed");
map.put("ringCallCount", "-1");
map.put("partitionCallCount", "-1");
map.put("tookLatencies", Collections.emptyList());
map.put("highwaters", "disposed");
map.put("localState", ImmutableMap.of("online", false,
"state", "disposed",
"name", new String(amzaService.getRingReader().getRingMember().asAquariumMember().getMember()),
"partitionVersion", "disposed",
"stripeVersion", "disposed"));
map.put("neighborStates", Collections.emptyList());
}
}
map.put("gets", numberFormat.format(totals.gets.longValue()));
map.put("getsLag", getDurationBreakdown(totals.getsLatency));
map.put("scans", numberFormat.format(totals.scans.longValue()));
map.put("scansLag", getDurationBreakdown(totals.scansLatency));
map.put("scanKeys", numberFormat.format(totals.scanKeys.longValue()));
map.put("scanKeysLag", getDurationBreakdown(totals.scanKeysLatency));
map.put("directApplies", numberFormat.format(totals.directApplies.longValue()));
map.put("directAppliesLag", getDurationBreakdown(totals.directAppliesLag));
map.put("updates", numberFormat.format(totals.updates.longValue()));
map.put("updatesLag", getDurationBreakdown(totals.updatesLag));
map.put("offers", numberFormat.format(totals.offers.longValue()));
map.put("offersLag", getDurationBreakdown(totals.offersLag));
map.put("takes", numberFormat.format(totals.takes.longValue()));
map.put("takesLag", getDurationBreakdown(totals.takesLag));
map.put("takeApplies", numberFormat.format(totals.takeApplies.longValue()));
map.put("takeAppliesLag", getDurationBreakdown(totals.takeAppliesLag));
map.put("acks", numberFormat.format(totals.acks.longValue()));
map.put("acksLag", getDurationBreakdown(totals.acksLag));
map.put("quorums", numberFormat.format(totals.quorums.longValue()));
map.put("quorumsLag", getDurationBreakdown(totals.quorumsLatency));
map.put("quorumTimeouts", numberFormat.format(totals.quorumTimeouts.longValue()));
return map;
}
public String renderHighwaters(WALHighwater walHighwater) {
StringBuilder sb = new StringBuilder();
for (WALHighwater.RingMemberHighwater e : walHighwater.ringMemberHighwater) {
sb.append("<p>");
sb.append(e.ringMember.getMember()).append("=").append(Long.toHexString(e.transactionId)).append("\n");
sb.append("</p>");
}
return sb.toString();
}
public Map<String, Object> renderPartition(PartitionName partitionName, long startFp, long endFp) {
Map<String, Object> partitionViz = new HashMap<>();
//amzaService.getPartitionCreator().
return partitionViz;
}
public String renderOverview(Set<String> expandKeys) throws Exception {
StringBuilder sb = new StringBuilder();
sb.append("<p>uptime<span class=\"badge\">").append(getDurationBreakdown(runtimeBean.getUptime())).append("</span>");
sb.append(" deltaRem1<span class=\"badge\">").append(amzaService.amzaStats.deltaFirstCheckRemoves.longValue()).append("</span>");
sb.append(" deltaRem2<span class=\"badge\">").append(amzaService.amzaStats.deltaSecondCheckRemoves.longValue()).append(
"</span>");
sb.append(" baIntern<span class=\"badge\">").append(amzaInterner.size()).append("</span>");
double processCpuLoad = getProcessCpuLoad();
sb.append(progress("CPU",
(int) (processCpuLoad),
numberFormat.format(processCpuLoad) + " cpu load",
null, null));
double memoryLoad = (double) memoryBean.getHeapMemoryUsage().getUsed() / (double) memoryBean.getHeapMemoryUsage().getMax();
sb.append(progress("Heap",
(int) (memoryLoad * 100),
humanReadableByteCount(memoryBean.getHeapMemoryUsage().getUsed(), false)
+ " used out of " + humanReadableByteCount(memoryBean.getHeapMemoryUsage().getMax(), false),
null, null));
long s = 0;
for (GarbageCollectorMXBean gc : garbageCollectors) {
s += gc.getCollectionTime();
}
double gcLoad = (double) s / (double) runtimeBean.getUptime();
sb.append(progress("GC",
(int) (gcLoad * 100),
getDurationBreakdown(s) + " total gc",
null, null));
sb.append("</p>");
sb.append("<p><h3> Striped </h3></p>");
renderOverview(sb, amzaService.amzaStats, expandKeys, false);
sb.append("<p><h3> System </h3></p>");
renderOverview(sb, amzaService.amzaSystemStats, expandKeys, true);
return sb.toString();
}
private void renderOverview(StringBuilder sb, AmzaStats amzaStats, Set<String> expandKeys, boolean includePartitionTotals) throws Exception {
sb.append("<p>");
addIoStats("load-", amzaStats.loadIoStats, sb);
addIoStats("take-", amzaStats.takeIoStats, sb);
addIoStats("get-", amzaStats.getIoStats, sb);
addIoStats("merge-", amzaStats.mergeIoStats, sb);
addIoStats("update-", amzaStats.updateIoStats, sb);
addIoStats("compact-ts-", amzaStats.compactTombstoneIoStats, sb);
addNetStats("", amzaStats.netStats, sb);
sb.append("</p>");
sb.append("<p>");
Totals grandTotal = amzaStats.getGrandTotal();
addTotals(sb, "*", expandKeys, grandTotal);
if (includePartitionTotals) {
for (Entry<PartitionName, Totals> partitionNameTotalsEntry : amzaStats.getPartitionTotals().entrySet()) {
PartitionName partitionName = partitionNameTotalsEntry.getKey();
addTotals(sb, PartitionName.toHumanReadableString(partitionName), expandKeys, partitionNameTotalsEntry.getValue());
}
}
sb.append(progress("Took Average Rows (" + numberFormat.format(amzaStats.takes.longValue()) + ")",
(int) (((double) amzaStats.takeExcessRows.longValue() / amzaStats.takes.longValue()) / 4096 * 100),
numberFormat.format(amzaStats.takeExcessRows.longValue()),
null, null));
sb.append(progress("Active Long Polls (" + numberFormat.format(amzaStats.availableRowsStream.longValue()) + ")",
(int) ((amzaStats.availableRowsStream.longValue() / 100d) * 100), "",
null, null));
sb.append(progress("Active Row Streaming (" + numberFormat.format(amzaStats.rowsStream.longValue()) + ")",
(int) ((amzaStats.rowsStream.longValue() / 100d) * 100), "" + numberFormat.format(amzaStats.completedRowsStream.longValue()),
null, null));
sb.append(progress("Active Row Acknowledging (" + numberFormat.format(amzaStats.rowsTaken.longValue()) + ")",
(int) ((amzaStats.rowsTaken.longValue() / 100d) * 100), "" + numberFormat.format(amzaStats.completedRowsTake.longValue()),
null, null));
sb.append(progress("Back Pressure (" + numberFormat.format(amzaStats.backPressure.longValue()) + ")",
(int) ((amzaStats.backPressure.longValue() / 10000d) * 100), "" + amzaStats.pushBacks.longValue(),
null, null));
long[] count = amzaStats.deltaStripeMergeLoaded;
double[] load = amzaStats.deltaStripeLoad;
long[] mergeCount = amzaStats.deltaStripeMergePending;
double[] mergeLoad = amzaStats.deltaStripeMerge;
if (count.length == load.length) {
for (int i = 0; i < load.length; i++) {
sb.append(progress(" Delta Stripe " + i + " (" + load[i] + ")", (int) (load[i] * 100), "" + numberFormat.format(count[i]), null, null));
if (mergeLoad.length > i && mergeCount.length > i) {
sb.append(progress("Merge Stripe " + i + " (" + numberFormat.format(mergeLoad[i]) + ")", (int) (mergeLoad[i] * 100),
numberFormat.format(mergeCount[i]) + " partitions",
null, null));
}
}
} else {
LOG.warn("BUG count.length={} should equal load.length={}", count.length, load.length);
}
int tombostoneCompaction = amzaStats.ongoingCompaction(AmzaStats.CompactionFamily.tombstone);
int mergeCompaction = amzaStats.ongoingCompaction(AmzaStats.CompactionFamily.merge);
int expungeCompaction = amzaStats.ongoingCompaction(AmzaStats.CompactionFamily.expunge);
sb.append(progress("Tombstone Compactions (" + numberFormat.format(tombostoneCompaction) + ")",
(int) ((tombostoneCompaction / 10d) * 100), " total:" + amzaStats.getTotalCompactions(CompactionFamily.tombstone),
null, null));
sb.append(progress("Merge Compactions (" + numberFormat.format(mergeCompaction) + ")",
(int) ((mergeCompaction / 10d) * 100), " total:" + amzaStats.getTotalCompactions(CompactionFamily.merge),
null, null));
for (int i = 0; i < amzaStats.highwaterPendingLoad.length; i++) {
sb.append(progress("High water flushed " + i + " (" + numberFormat.format(amzaStats.highwaterPending[i]) + ")",
(int) (amzaStats.highwaterPendingLoad[i] * 100), " total:" + amzaStats.highwaterFlushed[i],
null, null));
}
sb.append(progress("Expunge Compactions (" + numberFormat.format(expungeCompaction) + ")",
(int) ((expungeCompaction / 10d) * 100), " total:" + amzaStats.getTotalCompactions(CompactionFamily.expunge),
null, null));
}
private void addTotals(StringBuilder sb, String name, Set<String> expandKeys, Totals grandTotal) {
sb.append(progress(name + ".gets (" + numberFormat.format(grandTotal.gets.longValue()) + ")",
(int) (((double) grandTotal.getsLatency / 1000d) * 100),
getDurationBreakdown(grandTotal.getsLatency) + " lag",
null, null));
sb.append(progress(name + ".scans (" + numberFormat.format(grandTotal.scans.longValue()) + ")",
(int) ((grandTotal.scansLatency / 1000d) * 100),
getDurationBreakdown(grandTotal.scansLatency) + " lag",
null, null));
sb.append(progress(name + ".scanKeys (" + numberFormat.format(grandTotal.scanKeys.longValue()) + ")",
(int) ((grandTotal.scanKeysLatency / 1000d) * 100),
getDurationBreakdown(grandTotal.scanKeysLatency) + " lag",
null, null));
sb.append(progress(name + ".direct Applied (" + numberFormat.format(grandTotal.directApplies.longValue()) + ")",
(int) ((grandTotal.directAppliesLag / 1000d) * 100),
getDurationBreakdown(grandTotal.directAppliesLag) + " lag",
null, null));
sb.append(progress(name + ".updates (" + numberFormat.format(grandTotal.updates.longValue()) + ")",
(int) ((grandTotal.updatesLag / 10000d) * 100),
getDurationBreakdown(grandTotal.updatesLag) + " lag",
null, null));
List<Map<String, Object>> subOffersLag = Lists.newArrayList();
if (expandKeys.contains("offers")) {
for (Entry<RingMember, AtomicLong> entry : grandTotal.memberOffersLag.entrySet()) {
long latency = entry.getValue().get();
subOffersLag.add(progressData("Offers " + entry.getKey().getMember(),
(int) ((latency / 10000d) * 100),
getDurationBreakdown(latency) + " lag"));
}
}
sb.append(progress(name + ".offers (" + numberFormat.format(grandTotal.offers.longValue()) + ")",
(int) ((grandTotal.offersLag / 10000d) * 100),
getDurationBreakdown(grandTotal.offersLag) + " lag",
"offers", subOffersLag));
sb.append(progress(name + ".took (" + numberFormat.format(grandTotal.takes.longValue()) + ")",
(int) ((grandTotal.takesLag / 10000d) * 100),
getDurationBreakdown(grandTotal.takesLag) + " lag",
null, null));
sb.append(progress(name + ".took Applied (" + numberFormat.format(grandTotal.takeApplies.longValue()) + ")",
(int) ((grandTotal.takeAppliesLag / 1000d) * 100),
getDurationBreakdown(grandTotal.takeAppliesLag) + " lag",
null, null));
List<Map<String, Object>> subAcksLag = Lists.newArrayList();
if (expandKeys.contains("acks")) {
for (Entry<RingMember, AtomicLong> entry : grandTotal.memberAcksLag.entrySet()) {
long latency = entry.getValue().get();
subAcksLag.add(progressData("Acks " + entry.getKey().getMember(),
(int) ((latency / 10000d) * 100),
getDurationBreakdown(latency) + " lag"));
}
}
sb.append(progress(name + ".acks (" + numberFormat.format(grandTotal.acks.longValue()) + ")",
(int) ((grandTotal.acksLag / 10000d) * 100),
getDurationBreakdown(grandTotal.acksLag) + " lag",
"acks", subAcksLag));
List<Map<String, Object>> subQuorumsLatency = Lists.newArrayList();
if (expandKeys.contains("quorums")) {
for (Entry<RingMember, AtomicLong> entry : grandTotal.memberQuorumsLatency.entrySet()) {
long latency = entry.getValue().get();
subQuorumsLatency.add(progressData(name + ".quorums " + entry.getKey().getMember(),
(int) ((latency / 10000d) * 100),
getDurationBreakdown(latency) + " lag"));
}
}
sb.append(progress(
name + ".quorums (" + numberFormat.format(grandTotal.quorums.longValue()) + " / " + numberFormat.format(
grandTotal.quorumTimeouts.longValue()) + ")",
(int) ((grandTotal.quorumsLatency / 10000d) * 100),
getDurationBreakdown(grandTotal.quorumsLatency) + " lag",
"quorums", subQuorumsLatency));
}
private void addNetStats(String name, NetStats netStats, StringBuilder sb) {
sb.append(" " + name + "netR<span class=\"badge\">").append(humanReadableByteCount(netStats.read.longValue(), false)).append(
"</span>");
sb.append(" " + name + "netW<span class=\"badge\">").append(humanReadableByteCount(netStats.wrote.longValue(), false)).append(
"</span>");
}
private void addIoStats(String name, IoStats ioStats, StringBuilder sb) {
sb.append(" " + name + "disk <span class=\"badge\"> R:").append(humanReadableByteCount(ioStats.read.longValue(), false));
sb.append(" W:").append(humanReadableByteCount(ioStats.wrote.longValue(), false));
sb.append("</span>");
}
private String progress(String title, int progress, String value, String progressKey, List<Map<String, Object>> subProgress) {
Map<String, Object> data = progressData(title, progress, value);
if (progressKey != null) {
data.put("progressKey", progressKey);
}
if (subProgress != null && !subProgress.isEmpty()) {
data.put("subProgress", subProgress);
}
return renderer.render("soy.page.amzaStackedProgress", data);
}
private Map<String, Object> progressData(String title, int progress, String value) {
Map<String, Object> data = new HashMap<>();
data.put("title", title);
data.put("progress", progress);
data.put("value", value);
return data;
}
public static double getProcessCpuLoad() throws MalformedObjectNameException, ReflectionException, InstanceNotFoundException {
MBeanServer mbs = ManagementFactory.getPlatformMBeanServer();
ObjectName name = ObjectName.getInstance("java.lang:type=OperatingSystem");
AttributeList list = mbs.getAttributes(name, new String[] { "ProcessCpuLoad" });
if (list.isEmpty()) {
return Double.NaN;
}
Attribute att = (Attribute) list.get(0);
Double value = (Double) att.getValue();
if (value == -1.0) {
return 0; // usually takes a couple of seconds before we get real values
}
return ((int) (value * 1000) / 10.0); // returns a percentage value with 1 decimal point precision
}
@Override
public String getTitle() {
return "Metrics";
}
public static String humanReadableByteCount(long bytes, boolean si) {
int unit = si ? 1000 : 1024;
if (bytes < unit) {
return bytes + " B";
}
int exp = (int) (Math.log(bytes) / Math.log(unit));
String pre = (si ? "kMGTPE" : "KMGTPE").charAt(exp - 1) + (si ? "" : "i");
return String.format("%.1f %sB", bytes / Math.pow(unit, exp), pre);
}
public static String getDurationBreakdown(long millis) {
if (millis < 0) {
return String.valueOf(millis);
}
long hours = TimeUnit.MILLISECONDS.toHours(millis);
millis -= TimeUnit.HOURS.toMillis(hours);
long minutes = TimeUnit.MILLISECONDS.toMinutes(millis);
millis -= TimeUnit.MINUTES.toMillis(minutes);
long seconds = TimeUnit.MILLISECONDS.toSeconds(millis);
millis -= TimeUnit.SECONDS.toMillis(seconds);
StringBuilder sb = new StringBuilder(64);
boolean showRemaining = true;
if (showRemaining || hours > 0) {
if (hours < 10) {
sb.append('0');
}
sb.append(hours);
sb.append(":");
showRemaining = true;
}
if (showRemaining || minutes > 0) {
if (minutes < 10) {
sb.append('0');
}
sb.append(minutes);
sb.append(":");
showRemaining = true;
}
if (showRemaining || seconds > 0) {
if (seconds < 10) {
sb.append('0');
}
sb.append(seconds);
sb.append(".");
showRemaining = true;
}
if (millis < 100) {
sb.append('0');
}
if (millis < 10) {
sb.append('0');
}
sb.append(millis);
return (sb.toString());
}
}
| |
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.idea.devkit.references;
import com.intellij.codeInsight.daemon.EmptyResolveMessageProvider;
import com.intellij.find.FindModel;
import com.intellij.find.impl.FindInProjectUtil;
import com.intellij.ide.presentation.Presentation;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.fileTypes.FileTypeManager;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.module.ModuleUtilCore;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ModuleRootManager;
import com.intellij.openapi.util.IconLoader;
import com.intellij.openapi.util.ProperTextRange;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.patterns.PsiJavaElementPattern;
import com.intellij.patterns.PsiMethodPattern;
import com.intellij.patterns.XmlPatterns;
import com.intellij.patterns.uast.UastPatterns;
import com.intellij.psi.*;
import com.intellij.psi.impl.source.resolve.reference.impl.providers.FileReference;
import com.intellij.psi.impl.source.resolve.reference.impl.providers.FileReferenceSet;
import com.intellij.psi.impl.source.resolve.reference.impl.providers.FileReferenceUtil;
import com.intellij.psi.impl.source.resolve.reference.impl.providers.PsiFileReference;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.xml.XmlAttribute;
import com.intellij.psi.xml.XmlAttributeValue;
import com.intellij.usages.FindUsagesProcessPresentation;
import com.intellij.usages.UsageViewPresentation;
import com.intellij.util.*;
import com.intellij.util.containers.ContainerUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.idea.devkit.DevKitBundle;
import org.jetbrains.idea.devkit.util.PsiUtil;
import org.jetbrains.uast.UElement;
import org.jetbrains.uast.UastContextKt;
import org.jetbrains.uast.UastLiteralUtils;
import java.util.Collection;
import java.util.List;
import java.util.function.Function;
import static com.intellij.patterns.PsiJavaPatterns.*;
public class IconsReferencesContributor extends PsiReferenceContributor
implements QueryExecutor<PsiReference, ReferencesSearch.SearchParameters> {
@Override
public boolean execute(@NotNull ReferencesSearch.SearchParameters queryParameters, @NotNull final Processor<? super PsiReference> consumer) {
final PsiElement file = queryParameters.getElementToSearch();
if (file instanceof PsiBinaryFile) {
final Module module = ReadAction.compute(() -> ModuleUtilCore.findModuleForPsiElement(file));
final VirtualFile image = ((PsiBinaryFile)file).getVirtualFile();
if (isImage(image) && isIconsModule(module)) {
final Project project = file.getProject();
final FindModel model = new FindModel();
final String path = getPathToImage(image, module);
model.setStringToFind(path);
model.setCaseSensitive(true);
model.setFindAll(true);
model.setWholeWordsOnly(true);
FindInProjectUtil.findUsages(model, project, usage -> {
ApplicationManager.getApplication().runReadAction(() -> {
final PsiElement element = usage.getElement();
final ProperTextRange textRange = usage.getRangeInElement();
if (element != null && textRange != null) {
final PsiElement start = element.findElementAt(textRange.getStartOffset());
final PsiElement end = element.findElementAt(textRange.getEndOffset());
if (start != null && end != null) {
PsiElement value = PsiTreeUtil.findCommonParent(start, end);
if (value instanceof PsiJavaToken) {
value = value.getParent();
}
if (value != null) {
final PsiFileReference reference = FileReferenceUtil.findFileReference(value);
if (reference != null) {
consumer.process(reference);
}
}
}
}
});
return true;
}, new FindUsagesProcessPresentation(new UsageViewPresentation()));
}
}
return true;
}
@Override
public void registerReferenceProviders(@NotNull PsiReferenceRegistrar registrar) {
registerForPresentationAnnotation(registrar);
registerForIconLoaderMethods(registrar);
registerForIconXmlAttribute(registrar);
}
private static void registerForIconXmlAttribute(@NotNull PsiReferenceRegistrar registrar) {
registrar.registerReferenceProvider(XmlPatterns.xmlAttributeValue().withLocalName("icon"), new PsiReferenceProvider() {
@Override
public PsiReference @NotNull [] getReferencesByElement(@NotNull final PsiElement element, @NotNull ProcessingContext context) {
if (!PsiUtil.isPluginXmlPsiElement(element)) {
return PsiReference.EMPTY_ARRAY;
}
return new PsiReference[]{
new IconPsiReferenceBase(element) {
@Override
public PsiElement resolve() {
String value = ((XmlAttributeValue)element).getValue();
if (value.startsWith("/")) {
FileReference lastRef = new FileReferenceSet(element).getLastReference();
return lastRef != null ? lastRef.resolve() : null;
}
return resolveIconPath(value, element);
}
@Override
public PsiElement handleElementRename(@NotNull String newElementName) throws IncorrectOperationException {
PsiElement element = resolve();
PsiElement resultForFile = handleFile(element, lastRef -> lastRef.handleElementRename(newElementName));
if (resultForFile != null) {
return resultForFile;
}
PsiElement resultForField = handleField(element, newElementName);
if (resultForField != null) {
return resultForField;
}
return super.handleElementRename(newElementName);
}
@Override
public PsiElement bindToElement(@NotNull PsiElement element) throws IncorrectOperationException {
PsiElement resultForFile = handleFile(element, lastRef -> lastRef.bindToElement(element));
if (resultForFile != null) {
return resultForFile;
}
PsiElement resultForField = handleField(element, null);
if (resultForField != null) {
return resultForField;
}
return super.bindToElement(element);
}
private PsiElement handleFile(PsiElement element, Function<FileReference, PsiElement> callback) {
if (element instanceof PsiFile) {
FileReference lastRef = new FileReferenceSet(element).getLastReference();
if (lastRef != null) {
return callback.apply(lastRef);
}
}
return null;
}
@Nullable
private PsiElement handleField(PsiElement element, @Nullable String newElementName) {
if (element instanceof PsiField) {
PsiClass containingClass = ((PsiField)element).getContainingClass();
if (containingClass != null) {
String classQualifiedName = containingClass.getQualifiedName();
if (classQualifiedName != null) {
if (newElementName == null) {
newElementName = ((PsiField)element).getName();
}
if (classQualifiedName.startsWith("com.intellij.icons.")) {
return replace(classQualifiedName, newElementName, "com.intellij.icons.");
}
if (classQualifiedName.startsWith("icons.")) {
return replace(classQualifiedName, newElementName, "icons.");
}
}
}
}
return null;
}
private PsiElement replace(String fqn, String newName, String pckg) {
XmlAttribute parent = (XmlAttribute)getElement().getParent();
parent.setValue(fqn.substring(pckg.length()) + "." + newName);
return parent.getValueElement();
}
}
};
}
});
}
private static void registerForIconLoaderMethods(@NotNull PsiReferenceRegistrar registrar) {
final PsiMethodPattern method = psiMethod().withName("findIcon", "getIcon").definedInClass(IconLoader.class.getName());
final PsiJavaElementPattern.Capture<PsiLiteralExpression> findGetIconPattern
= literalExpression().and(psiExpression().methodCallParameter(0, method));
registrar.registerReferenceProvider(findGetIconPattern, new PsiReferenceProvider() {
@Override
public PsiReference @NotNull [] getReferencesByElement(@NotNull final PsiElement element, @NotNull ProcessingContext context) {
if (!PsiUtil.isIdeaProject(element.getProject())) return PsiReference.EMPTY_ARRAY;
return new FileReferenceSet(element) {
@Override
protected Collection<PsiFileSystemItem> getExtraContexts() {
Module iconsModule = ModuleManager.getInstance(element.getProject()).findModuleByName("intellij.platform.icons");
if (iconsModule == null) {
iconsModule = ModuleManager.getInstance(element.getProject()).findModuleByName("icons");
}
if (iconsModule == null) {
return super.getExtraContexts();
}
final List<PsiFileSystemItem> result = new SmartList<>();
final VirtualFile[] roots = ModuleRootManager.getInstance(iconsModule).getSourceRoots();
final PsiManager psiManager = element.getManager();
for (VirtualFile root : roots) {
final PsiDirectory directory = psiManager.findDirectory(root);
ContainerUtil.addIfNotNull(result, directory);
}
return result;
}
}.getAllReferences();
}
}, PsiReferenceRegistrar.HIGHER_PRIORITY);
}
private static void registerForPresentationAnnotation(@NotNull PsiReferenceRegistrar registrar) {
UastReferenceRegistrar.registerUastReferenceProvider(
registrar,
UastPatterns.injectionHostUExpression()
.sourcePsiFilter(psi -> PsiUtil.isPluginProject(psi.getProject()))
.annotationParam(Presentation.class.getName(), "icon"),
UastReferenceRegistrar.uastInjectionHostReferenceProvider((uElement, referencePsiElement) -> new PsiReference[]{
new IconPsiReferenceBase(referencePsiElement) {
private UElement getUElement() {
return UastContextKt.toUElement(getElement());
}
@Override
public PsiElement resolve() {
final UElement uElement = getUElement();
if (uElement == null) return null;
String value = UastLiteralUtils.getValueIfStringLiteral(uElement);
return resolveIconPath(value, getElement());
}
@Override
public PsiElement handleElementRename(@NotNull String newElementName) throws IncorrectOperationException {
PsiElement field = resolve();
PsiElement result = handleElement(field, newElementName);
if (result != null) {
return result;
}
return super.handleElementRename(newElementName);
}
@Nullable
private PsiElement handleElement(PsiElement element, @Nullable String newElementName) {
if (element instanceof PsiField) {
PsiClass containingClass = ((PsiField)element).getContainingClass();
if (containingClass != null) {
String classQualifiedName = containingClass.getQualifiedName();
if (classQualifiedName != null) {
if (newElementName == null) {
newElementName = ((PsiField)element).getName();
}
if (classQualifiedName.startsWith("com.intellij.icons.")) {
return replace(newElementName, classQualifiedName, "com.intellij.icons.");
}
if (classQualifiedName.startsWith("icons.")) {
return replace(newElementName, classQualifiedName, "icons.");
}
}
}
}
return null;
}
private PsiElement replace(String newElementName, String fqn, String packageName) {
String newValue = fqn.substring(packageName.length()) + "." + newElementName;
return ElementManipulators.handleContentChange(getElement(), newValue);
}
}
}), PsiReferenceRegistrar.HIGHER_PRIORITY);
}
@NotNull
private static String getPathToImage(VirtualFile image, Module module) {
final String path = ModuleRootManager.getInstance(module).getSourceRoots()[0].getPath();
return "/" + FileUtil.getRelativePath(path, image.getPath(), '/');
}
private static boolean isIconsModule(Module module) {
return module != null && ("icons".equals(module.getName()) || "intellij.platform.icons".equals(module.getName()))
&& ModuleRootManager.getInstance(module).getSourceRoots().length == 1;
}
private static boolean isImage(VirtualFile image) {
final FileTypeManager mgr = FileTypeManager.getInstance();
return image != null && mgr.getFileTypeByFile(image) == mgr.getFileTypeByExtension("png");
}
@Nullable
private static PsiField resolveIconPath(String pathStr, PsiElement element) {
if (pathStr == null) {
return null;
}
List<String> path = StringUtil.split(pathStr, ".");
if (path.size() > 1 && path.get(0).endsWith("Icons")) {
Project project = element.getProject();
PsiClass cur = findIconClass(project, path.get(0));
if (cur == null) {
return null;
}
for (int i = 1; i < path.size() - 1; i++) {
cur = cur.findInnerClassByName(path.get(i), false);
if (cur == null) {
return null;
}
}
return cur.findFieldByName(path.get(path.size() - 1), false);
}
return null;
}
@Nullable
private static PsiClass findIconClass(Project project, String className) {
final boolean isAllIcons = "AllIcons".equals(className);
final String fqnClassName = isAllIcons ? "com.intellij.icons.AllIcons" : "icons." + className;
return JavaPsiFacade.getInstance(project)
.findClass(fqnClassName, isAllIcons ? GlobalSearchScope.allScope(project) : GlobalSearchScope.projectScope(project));
}
private static abstract class IconPsiReferenceBase extends PsiReferenceBase<PsiElement> implements EmptyResolveMessageProvider {
IconPsiReferenceBase(@NotNull PsiElement element) {
super(element, true);
}
@SuppressWarnings("UnresolvedPropertyKey")
@NotNull
@Override
public String getUnresolvedMessagePattern() {
return DevKitBundle.message("inspections.presentation.cannot.resolve.icon");
}
}
}
| |
/*
* Copyright 2014 Hippo B.V. (http://www.onehippo.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onehippo.cms7.essentials.components.rest;
import java.util.ArrayList;
import java.util.List;
import javax.jcr.Node;
import javax.jcr.RepositoryException;
import javax.servlet.http.HttpServletRequest;
import javax.xml.ws.WebServiceException;
import org.apache.commons.lang.StringUtils;
import org.hippoecm.hst.configuration.hosting.Mount;
import org.hippoecm.hst.content.beans.query.HstQuery;
import org.hippoecm.hst.content.beans.query.HstQueryResult;
import org.hippoecm.hst.content.beans.query.exceptions.QueryException;
import org.hippoecm.hst.content.beans.standard.HippoBean;
import org.hippoecm.hst.content.beans.standard.HippoBeanIterator;
import org.hippoecm.hst.content.beans.standard.HippoHtmlBean;
import org.hippoecm.hst.core.request.HstRequestContext;
import org.hippoecm.hst.jaxrs.services.AbstractResource;
import org.hippoecm.hst.util.PathUtils;
import org.onehippo.cms7.essentials.components.paging.DefaultPagination;
import org.onehippo.cms7.essentials.components.paging.IterablePagination;
import org.onehippo.cms7.essentials.components.paging.Pageable;
import org.onehippo.cms7.essentials.components.rest.ctx.RestContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @version "$Id$"
*/
public abstract class BaseRestResource extends AbstractResource {
public static final String INVALID_SCOPE = "Invalid scope";
public static final String UNCHECKED = "unchecked";
private static Logger log = LoggerFactory.getLogger(BaseRestResource.class);
//
protected <T extends HippoBean> Pageable<T> findBeans(final RestContext context, final Class<T> clazz) {
return findBeans(context, clazz, Subtypes.INCLUDE);
}
protected <T extends HippoBean> Pageable<T> findBeansNoSubtypes(final RestContext context, final Class<T> clazz) {
return findBeans(context, clazz, Subtypes.EXCLUDE);
}
protected <T extends HippoBean> Pageable<T> findBeans(final RestContext context, final Class<T> clazz, final Subtypes subtypes) {
try {
final HstQuery query = createQuery(context, clazz, subtypes);
final HstQueryResult execute = query.execute();
return new IterablePagination<>(
execute.getHippoBeans(),
execute.getTotalSize(),
context.getPageSize(),
context.getPage());
} catch (QueryException e) {
log.error("Error finding beans", e);
}
return DefaultPagination.emptyCollection();
}
protected Pageable<? extends HippoBean> executeQuery(final RestContext context, final HstQuery query) throws QueryException {
final HstQueryResult execute = query.execute();
return new IterablePagination<>(
execute.getHippoBeans(),
execute.getTotalSize(),
context.getPageSize(),
context.getPage());
}
/**
* Return HippoQuery which scope is site root
*
* @param context context
* @param clazz zero or more scope classes
* @return HstQuery instance
*/
@SuppressWarnings(UNCHECKED)
public HstQuery createQuery(final RestContext context, final Class<? extends HippoBean> clazz, final Subtypes subtypes) {
HstQuery query = null;
try {
Node scopeNode = getScopeForContext(context);
query = getHstQueryManager(context.getRequestContext()).createQuery(scopeNode, clazz, subtypes.isIncludeSubtypes());
final int pageSize = context.getPageSize();
final int page = context.getPage();
query.setLimit(pageSize);
query.setOffset((page - 1) * pageSize);
} catch (QueryException e) {
log.error("Error creating HST query", e);
} catch (RepositoryException e) {
throw new WebServiceException(INVALID_SCOPE, e);
}
if (query == null) {
throw new WebServiceException("Query was null (failed to create it)");
}
return query;
}
private Node getScopeForContext(final RestContext context) throws RepositoryException {
Node scopeNode;
final HttpServletRequest request = context.getRequest();
if (context.getScope() == null) {
scopeNode = getScope(request);
} else {
if (context.isAbsolutePath()) {
final Node rootNode = context.getRequestContext().getSession().getRootNode();
scopeNode = rootNode.getNode(StringUtils.removeStart(context.getScope(), "/"));
} else {
scopeNode = getScope(request, context.getScope());
}
}
return scopeNode;
}
public Node getScope(final HttpServletRequest request) throws RepositoryException {
HstRequestContext requestContext = getRequestContext(request);
Mount siteMount = requestContext.getResolvedMount().getMount();
if (siteMount == null) {
log.error("Couldn't find site mount for rest service");
return null;
}
String contentPath = siteMount.getContentPath();
if (contentPath != null) {
return requestContext.getSession().getRootNode().getNode(PathUtils.normalizePath(contentPath));
}
return null;
}
public Node getScope(final HttpServletRequest request, String relativePath) throws RepositoryException {
final Node root = getScope(request);
return root.getNode(relativePath);
}
@SuppressWarnings(UNCHECKED)
protected <T extends HippoBean> T getSingleBean(HstQuery query) throws QueryException {
final HstQueryResult results = query.execute();
final HippoBeanIterator beans = results.getHippoBeans();
if (beans.hasNext()) {
return (T) beans.nextHippoBean();
}
return null;
}
protected <T extends HippoBean> List<T> populateBeans(HstQuery query) throws QueryException {
final HstQueryResult results = query.execute();
final HippoBeanIterator beans = results.getHippoBeans();
List<T> retval = new ArrayList<>();
if (beans.hasNext()) {
@SuppressWarnings({UNCHECKED})
final T bean = (T) beans.nextHippoBean();
if (bean != null) {
retval.add(bean);
}
}
return retval;
}
public String parseHtml(RestContext context, HippoHtmlBean body) {
if (body == null) {
return null;
}
final String content = body.getContent();
return getContentRewriter().rewrite(content, body.getNode(), context.getRequestContext());
}
public enum Subtypes {
INCLUDE(true), EXCLUDE(false);
private final boolean includeSubtypes;
Subtypes(final boolean includeSubtypes) {
this.includeSubtypes = includeSubtypes;
}
public boolean isIncludeSubtypes() {
return includeSubtypes;
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v10/services/ad_group_extension_setting_service.proto
package com.google.ads.googleads.v10.services;
/**
* <pre>
* Response message for an ad group extension setting mutate.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse}
*/
public final class MutateAdGroupExtensionSettingsResponse extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse)
MutateAdGroupExtensionSettingsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use MutateAdGroupExtensionSettingsResponse.newBuilder() to construct.
private MutateAdGroupExtensionSettingsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MutateAdGroupExtensionSettingsResponse() {
results_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new MutateAdGroupExtensionSettingsResponse();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private MutateAdGroupExtensionSettingsResponse(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18: {
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
results_ = new java.util.ArrayList<com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult>();
mutable_bitField0_ |= 0x00000001;
}
results_.add(
input.readMessage(com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult.parser(), extensionRegistry));
break;
}
case 26: {
com.google.rpc.Status.Builder subBuilder = null;
if (partialFailureError_ != null) {
subBuilder = partialFailureError_.toBuilder();
}
partialFailureError_ = input.readMessage(com.google.rpc.Status.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(partialFailureError_);
partialFailureError_ = subBuilder.buildPartial();
}
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
results_ = java.util.Collections.unmodifiableList(results_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v10.services.AdGroupExtensionSettingServiceProto.internal_static_google_ads_googleads_v10_services_MutateAdGroupExtensionSettingsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v10.services.AdGroupExtensionSettingServiceProto.internal_static_google_ads_googleads_v10_services_MutateAdGroupExtensionSettingsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse.class, com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse.Builder.class);
}
public static final int PARTIAL_FAILURE_ERROR_FIELD_NUMBER = 3;
private com.google.rpc.Status partialFailureError_;
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
* @return Whether the partialFailureError field is set.
*/
@java.lang.Override
public boolean hasPartialFailureError() {
return partialFailureError_ != null;
}
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
* @return The partialFailureError.
*/
@java.lang.Override
public com.google.rpc.Status getPartialFailureError() {
return partialFailureError_ == null ? com.google.rpc.Status.getDefaultInstance() : partialFailureError_;
}
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
*/
@java.lang.Override
public com.google.rpc.StatusOrBuilder getPartialFailureErrorOrBuilder() {
return getPartialFailureError();
}
public static final int RESULTS_FIELD_NUMBER = 2;
private java.util.List<com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult> results_;
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult> getResultsList() {
return results_;
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResultOrBuilder>
getResultsOrBuilderList() {
return results_;
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
@java.lang.Override
public int getResultsCount() {
return results_.size();
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult getResults(int index) {
return results_.get(index);
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResultOrBuilder getResultsOrBuilder(
int index) {
return results_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < results_.size(); i++) {
output.writeMessage(2, results_.get(i));
}
if (partialFailureError_ != null) {
output.writeMessage(3, getPartialFailureError());
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < results_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, results_.get(i));
}
if (partialFailureError_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(3, getPartialFailureError());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse)) {
return super.equals(obj);
}
com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse other = (com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse) obj;
if (hasPartialFailureError() != other.hasPartialFailureError()) return false;
if (hasPartialFailureError()) {
if (!getPartialFailureError()
.equals(other.getPartialFailureError())) return false;
}
if (!getResultsList()
.equals(other.getResultsList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasPartialFailureError()) {
hash = (37 * hash) + PARTIAL_FAILURE_ERROR_FIELD_NUMBER;
hash = (53 * hash) + getPartialFailureError().hashCode();
}
if (getResultsCount() > 0) {
hash = (37 * hash) + RESULTS_FIELD_NUMBER;
hash = (53 * hash) + getResultsList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Response message for an ad group extension setting mutate.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse)
com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v10.services.AdGroupExtensionSettingServiceProto.internal_static_google_ads_googleads_v10_services_MutateAdGroupExtensionSettingsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v10.services.AdGroupExtensionSettingServiceProto.internal_static_google_ads_googleads_v10_services_MutateAdGroupExtensionSettingsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse.class, com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse.Builder.class);
}
// Construct using com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getResultsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (partialFailureErrorBuilder_ == null) {
partialFailureError_ = null;
} else {
partialFailureError_ = null;
partialFailureErrorBuilder_ = null;
}
if (resultsBuilder_ == null) {
results_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
resultsBuilder_.clear();
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v10.services.AdGroupExtensionSettingServiceProto.internal_static_google_ads_googleads_v10_services_MutateAdGroupExtensionSettingsResponse_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse getDefaultInstanceForType() {
return com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse build() {
com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse buildPartial() {
com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse result = new com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse(this);
int from_bitField0_ = bitField0_;
if (partialFailureErrorBuilder_ == null) {
result.partialFailureError_ = partialFailureError_;
} else {
result.partialFailureError_ = partialFailureErrorBuilder_.build();
}
if (resultsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
results_ = java.util.Collections.unmodifiableList(results_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.results_ = results_;
} else {
result.results_ = resultsBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse) {
return mergeFrom((com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse other) {
if (other == com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse.getDefaultInstance()) return this;
if (other.hasPartialFailureError()) {
mergePartialFailureError(other.getPartialFailureError());
}
if (resultsBuilder_ == null) {
if (!other.results_.isEmpty()) {
if (results_.isEmpty()) {
results_ = other.results_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureResultsIsMutable();
results_.addAll(other.results_);
}
onChanged();
}
} else {
if (!other.results_.isEmpty()) {
if (resultsBuilder_.isEmpty()) {
resultsBuilder_.dispose();
resultsBuilder_ = null;
results_ = other.results_;
bitField0_ = (bitField0_ & ~0x00000001);
resultsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getResultsFieldBuilder() : null;
} else {
resultsBuilder_.addAllMessages(other.results_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private com.google.rpc.Status partialFailureError_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder> partialFailureErrorBuilder_;
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
* @return Whether the partialFailureError field is set.
*/
public boolean hasPartialFailureError() {
return partialFailureErrorBuilder_ != null || partialFailureError_ != null;
}
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
* @return The partialFailureError.
*/
public com.google.rpc.Status getPartialFailureError() {
if (partialFailureErrorBuilder_ == null) {
return partialFailureError_ == null ? com.google.rpc.Status.getDefaultInstance() : partialFailureError_;
} else {
return partialFailureErrorBuilder_.getMessage();
}
}
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
*/
public Builder setPartialFailureError(com.google.rpc.Status value) {
if (partialFailureErrorBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
partialFailureError_ = value;
onChanged();
} else {
partialFailureErrorBuilder_.setMessage(value);
}
return this;
}
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
*/
public Builder setPartialFailureError(
com.google.rpc.Status.Builder builderForValue) {
if (partialFailureErrorBuilder_ == null) {
partialFailureError_ = builderForValue.build();
onChanged();
} else {
partialFailureErrorBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
*/
public Builder mergePartialFailureError(com.google.rpc.Status value) {
if (partialFailureErrorBuilder_ == null) {
if (partialFailureError_ != null) {
partialFailureError_ =
com.google.rpc.Status.newBuilder(partialFailureError_).mergeFrom(value).buildPartial();
} else {
partialFailureError_ = value;
}
onChanged();
} else {
partialFailureErrorBuilder_.mergeFrom(value);
}
return this;
}
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
*/
public Builder clearPartialFailureError() {
if (partialFailureErrorBuilder_ == null) {
partialFailureError_ = null;
onChanged();
} else {
partialFailureError_ = null;
partialFailureErrorBuilder_ = null;
}
return this;
}
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
*/
public com.google.rpc.Status.Builder getPartialFailureErrorBuilder() {
onChanged();
return getPartialFailureErrorFieldBuilder().getBuilder();
}
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
*/
public com.google.rpc.StatusOrBuilder getPartialFailureErrorOrBuilder() {
if (partialFailureErrorBuilder_ != null) {
return partialFailureErrorBuilder_.getMessageOrBuilder();
} else {
return partialFailureError_ == null ?
com.google.rpc.Status.getDefaultInstance() : partialFailureError_;
}
}
/**
* <pre>
* Errors that pertain to operation failures in the partial failure mode.
* Returned only when partial_failure = true and all errors occur inside the
* operations. If any errors occur outside the operations (e.g. auth errors),
* we return an RPC level error.
* </pre>
*
* <code>.google.rpc.Status partial_failure_error = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
getPartialFailureErrorFieldBuilder() {
if (partialFailureErrorBuilder_ == null) {
partialFailureErrorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>(
getPartialFailureError(),
getParentForChildren(),
isClean());
partialFailureError_ = null;
}
return partialFailureErrorBuilder_;
}
private java.util.List<com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult> results_ =
java.util.Collections.emptyList();
private void ensureResultsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
results_ = new java.util.ArrayList<com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult>(results_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult, com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult.Builder, com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResultOrBuilder> resultsBuilder_;
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
public java.util.List<com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult> getResultsList() {
if (resultsBuilder_ == null) {
return java.util.Collections.unmodifiableList(results_);
} else {
return resultsBuilder_.getMessageList();
}
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
public int getResultsCount() {
if (resultsBuilder_ == null) {
return results_.size();
} else {
return resultsBuilder_.getCount();
}
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
public com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult getResults(int index) {
if (resultsBuilder_ == null) {
return results_.get(index);
} else {
return resultsBuilder_.getMessage(index);
}
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
public Builder setResults(
int index, com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.set(index, value);
onChanged();
} else {
resultsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
public Builder setResults(
int index, com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.set(index, builderForValue.build());
onChanged();
} else {
resultsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
public Builder addResults(com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.add(value);
onChanged();
} else {
resultsBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
public Builder addResults(
int index, com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.add(index, value);
onChanged();
} else {
resultsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
public Builder addResults(
com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(builderForValue.build());
onChanged();
} else {
resultsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
public Builder addResults(
int index, com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(index, builderForValue.build());
onChanged();
} else {
resultsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
public Builder addAllResults(
java.lang.Iterable<? extends com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult> values) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, results_);
onChanged();
} else {
resultsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
public Builder clearResults() {
if (resultsBuilder_ == null) {
results_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
resultsBuilder_.clear();
}
return this;
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
public Builder removeResults(int index) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.remove(index);
onChanged();
} else {
resultsBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
public com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult.Builder getResultsBuilder(
int index) {
return getResultsFieldBuilder().getBuilder(index);
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
public com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResultOrBuilder getResultsOrBuilder(
int index) {
if (resultsBuilder_ == null) {
return results_.get(index); } else {
return resultsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
public java.util.List<? extends com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResultOrBuilder>
getResultsOrBuilderList() {
if (resultsBuilder_ != null) {
return resultsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(results_);
}
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
public com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult.Builder addResultsBuilder() {
return getResultsFieldBuilder().addBuilder(
com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult.getDefaultInstance());
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
public com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult.Builder addResultsBuilder(
int index) {
return getResultsFieldBuilder().addBuilder(
index, com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult.getDefaultInstance());
}
/**
* <pre>
* All results for the mutate.
* </pre>
*
* <code>repeated .google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult results = 2;</code>
*/
public java.util.List<com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult.Builder>
getResultsBuilderList() {
return getResultsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult, com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult.Builder, com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResultOrBuilder>
getResultsFieldBuilder() {
if (resultsBuilder_ == null) {
resultsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult, com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResult.Builder, com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingResultOrBuilder>(
results_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
results_ = null;
}
return resultsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse)
private static final com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse();
}
public static com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MutateAdGroupExtensionSettingsResponse>
PARSER = new com.google.protobuf.AbstractParser<MutateAdGroupExtensionSettingsResponse>() {
@java.lang.Override
public MutateAdGroupExtensionSettingsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new MutateAdGroupExtensionSettingsResponse(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<MutateAdGroupExtensionSettingsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MutateAdGroupExtensionSettingsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v10.services.MutateAdGroupExtensionSettingsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Workspace API
* Agent API
*
* OpenAPI spec version: 9.0.000.97.4639
*
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package com.genesys.internal.workspace.model;
import java.util.Objects;
import java.util.Arrays;
import com.genesys.internal.workspace.model.Kvpair;
import com.google.gson.TypeAdapter;
import com.google.gson.annotations.JsonAdapter;
import com.google.gson.annotations.SerializedName;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* OpenMediaChannel
*/
@javax.annotation.Generated(value = "io.swagger.codegen.languages.JavaClientCodegen", date = "2022-03-03T19:45:16.214Z")
public class OpenMediaChannel {
@SerializedName("name")
private String name = null;
@SerializedName("state")
private String state = null;
@SerializedName("dnd")
private Boolean dnd = null;
@SerializedName("reasons")
private List<Kvpair> reasons = null;
@SerializedName("interactions")
private List<Object> interactions = null;
@SerializedName("capabilities")
private List<String> capabilities = null;
public OpenMediaChannel name(String name) {
this.name = name;
return this;
}
/**
* Get name
* @return name
**/
@ApiModelProperty(value = "")
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public OpenMediaChannel state(String state) {
this.state = state;
return this;
}
/**
* Get state
* @return state
**/
@ApiModelProperty(value = "")
public String getState() {
return state;
}
public void setState(String state) {
this.state = state;
}
public OpenMediaChannel dnd(Boolean dnd) {
this.dnd = dnd;
return this;
}
/**
* Get dnd
* @return dnd
**/
@ApiModelProperty(value = "")
public Boolean isDnd() {
return dnd;
}
public void setDnd(Boolean dnd) {
this.dnd = dnd;
}
public OpenMediaChannel reasons(List<Kvpair> reasons) {
this.reasons = reasons;
return this;
}
public OpenMediaChannel addReasonsItem(Kvpair reasonsItem) {
if (this.reasons == null) {
this.reasons = new ArrayList<Kvpair>();
}
this.reasons.add(reasonsItem);
return this;
}
/**
* Information on causes for, and results of, actions taken by the user of the current DN. For details about reasons, refer to the [*Genesys Events and Models Reference Manual*](https://docs.genesys.com/Documentation/System/Current/GenEM/Reasons).
* @return reasons
**/
@ApiModelProperty(value = "Information on causes for, and results of, actions taken by the user of the current DN. For details about reasons, refer to the [*Genesys Events and Models Reference Manual*](https://docs.genesys.com/Documentation/System/Current/GenEM/Reasons).")
public List<Kvpair> getReasons() {
return reasons;
}
public void setReasons(List<Kvpair> reasons) {
this.reasons = reasons;
}
public OpenMediaChannel interactions(List<Object> interactions) {
this.interactions = interactions;
return this;
}
public OpenMediaChannel addInteractionsItem(Object interactionsItem) {
if (this.interactions == null) {
this.interactions = new ArrayList<Object>();
}
this.interactions.add(interactionsItem);
return this;
}
/**
* Get interactions
* @return interactions
**/
@ApiModelProperty(value = "")
public List<Object> getInteractions() {
return interactions;
}
public void setInteractions(List<Object> interactions) {
this.interactions = interactions;
}
public OpenMediaChannel capabilities(List<String> capabilities) {
this.capabilities = capabilities;
return this;
}
public OpenMediaChannel addCapabilitiesItem(String capabilitiesItem) {
if (this.capabilities == null) {
this.capabilities = new ArrayList<String>();
}
this.capabilities.add(capabilitiesItem);
return this;
}
/**
* Get capabilities
* @return capabilities
**/
@ApiModelProperty(value = "")
public List<String> getCapabilities() {
return capabilities;
}
public void setCapabilities(List<String> capabilities) {
this.capabilities = capabilities;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
OpenMediaChannel openMediaChannel = (OpenMediaChannel) o;
return Objects.equals(this.name, openMediaChannel.name) &&
Objects.equals(this.state, openMediaChannel.state) &&
Objects.equals(this.dnd, openMediaChannel.dnd) &&
Objects.equals(this.reasons, openMediaChannel.reasons) &&
Objects.equals(this.interactions, openMediaChannel.interactions) &&
Objects.equals(this.capabilities, openMediaChannel.capabilities);
}
@Override
public int hashCode() {
return Objects.hash(name, state, dnd, reasons, interactions, capabilities);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class OpenMediaChannel {\n");
sb.append(" name: ").append(toIndentedString(name)).append("\n");
sb.append(" state: ").append(toIndentedString(state)).append("\n");
sb.append(" dnd: ").append(toIndentedString(dnd)).append("\n");
sb.append(" reasons: ").append(toIndentedString(reasons)).append("\n");
sb.append(" interactions: ").append(toIndentedString(interactions)).append("\n");
sb.append(" capabilities: ").append(toIndentedString(capabilities)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| |
/*
* dex2jar - Tools to work with android .dex and java .class files
* Copyright (c) 2009-2012 Panxiaobo
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.googlecode.dex2jar.tools;
import java.io.File;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.nio.charset.StandardCharsets;
import java.nio.file.*;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.spi.FileSystemProvider;
import java.util.*;
public abstract class BaseCmd {
public static String getBaseName(String fn) {
int x = fn.lastIndexOf('.');
return x >= 0 ? fn.substring(0, x) : fn;
}
public static String getBaseName(Path fn) {
return getBaseName(fn.getFileName().toString());
}
public interface FileVisitorX {
// change the relative from Path to String
// java.nio.file.ProviderMismatchException on jdk8
void visitFile(Path file, String relative) throws IOException;
}
public static void walkFileTreeX(final Path base, final FileVisitorX fv) throws IOException {
Files.walkFileTree(base, new SimpleFileVisitor<Path>() {
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException {
fv.visitFile(file, base.relativize(file).toString());
return super.visitFile(file, attrs);
}
});
}
public static void walkJarOrDir(final Path in, final FileVisitorX fv) throws IOException {
if (Files.isDirectory(in)) {
walkFileTreeX(in, fv);
} else {
try (FileSystem inputFileSystem = openZip(in)) {
walkFileTreeX(inputFileSystem.getPath("/"), fv);
}
}
}
public static void createParentDirectories(Path p) throws IOException {
// merge patch from t3stwhat, fix crash on save to windows path like 'C:\\abc.jar'
Path parent = p.getParent();
if (parent != null && !Files.exists(parent)) {
Files.createDirectories(parent);
}
}
public static FileSystem createZip(Path output) throws IOException {
Map<String, Object> env = new HashMap<>();
env.put("create", "true");
Files.deleteIfExists(output);
createParentDirectories(output);
for (FileSystemProvider p : FileSystemProvider.installedProviders()) {
String s = p.getScheme();
if ("jar".equals(s) || "zip".equalsIgnoreCase(s)) {
return p.newFileSystem(output, env);
}
}
throw new IOException("cant find zipfs support");
}
public static FileSystem openZip(Path in) throws IOException {
for (FileSystemProvider p : FileSystemProvider.installedProviders()) {
String s = p.getScheme();
if ("jar".equals(s) || "zip".equalsIgnoreCase(s)) {
return p.newFileSystem(in, new HashMap<String, Object>());
}
}
throw new IOException("cant find zipfs support");
}
@SuppressWarnings("serial")
protected static class HelpException extends RuntimeException {
public HelpException() {
super();
}
public HelpException(String message) {
super(message);
}
}
@Retention(value = RetentionPolicy.RUNTIME)
@Target(value = { ElementType.FIELD })
static public @interface Opt {
String argName() default "";
String description() default "";
boolean hasArg() default true;
String longOpt() default "";
String opt() default "";
boolean required() default false;
}
static protected class Option implements Comparable<Option> {
public String argName = "arg";
public String description;
public Field field;
public boolean hasArg = true;
public String longOpt;
public String opt;
public boolean required = false;
@Override
public int compareTo(Option o) {
int result = s(this.opt, o.opt);
if (result == 0) {
result = s(this.longOpt, o.longOpt);
if (result == 0) {
result = s(this.argName, o.argName);
if (result == 0) {
result = s(this.description, o.description);
}
}
}
return result;
}
private static int s(String a, String b) {
if (a != null && b != null) {
return a.compareTo(b);
} else if (a != null) {
return 1;
} else if (b != null) {
return -1;
} else {
return 0;
}
}
public String getOptAndLongOpt() {
StringBuilder sb = new StringBuilder();
boolean havePrev = false;
if (opt != null && opt.length() > 0) {
sb.append("-").append(opt);
havePrev = true;
}
if (longOpt != null && longOpt.length() > 0) {
if (havePrev) {
sb.append(",");
}
sb.append("--").append(longOpt);
}
return sb.toString();
}
}
@Retention(value = RetentionPolicy.RUNTIME)
@Target(value = { ElementType.TYPE })
static public @interface Syntax {
String cmd();
String desc() default "";
String onlineHelp() default "";
String syntax() default "";
}
private String cmdLineSyntax;
private String cmdName;
private String desc;
private String onlineHelp;
protected Map<String, Option> optMap = new HashMap<String, Option>();
@Opt(opt = "h", longOpt = "help", hasArg = false, description = "Print this help message")
private boolean printHelp = false;
protected String remainingArgs[];
protected String orginalArgs[];
public BaseCmd() {
}
public BaseCmd(String cmdLineSyntax, String header) {
super();
int i = cmdLineSyntax.indexOf(' ');
if (i > 0) {
this.cmdName = cmdLineSyntax.substring(0, i);
this.cmdLineSyntax = cmdLineSyntax.substring(i + 1);
}
this.desc = header;
}
public BaseCmd(String cmdName, String cmdSyntax, String header) {
super();
this.cmdName = cmdName;
this.cmdLineSyntax = cmdSyntax;
this.desc = header;
}
private Set<Option> collectRequriedOptions(Map<String, Option> optMap) {
Set<Option> options = new HashSet<Option>();
for (Map.Entry<String, Option> e : optMap.entrySet()) {
Option option = e.getValue();
if (option.required) {
options.add(option);
}
}
return options;
}
@SuppressWarnings({ "rawtypes", "unchecked" })
protected Object convert(String value, Class type) {
if (type.equals(String.class)) {
return value;
}
if (type.equals(int.class) || type.equals(Integer.class)) {
return Integer.parseInt(value);
}
if (type.equals(long.class) || type.equals(Long.class)) {
return Long.parseLong(value);
}
if (type.equals(float.class) || type.equals(Float.class)) {
return Float.parseFloat(value);
}
if (type.equals(double.class) || type.equals(Double.class)) {
return Double.parseDouble(value);
}
if (type.equals(boolean.class) || type.equals(Boolean.class)) {
return Boolean.parseBoolean(value);
}
if (type.equals(File.class)) {
return new File(value);
}
if (type.equals(Path.class)) {
return new File(value).toPath();
}
try {
type.asSubclass(Enum.class);
return Enum.valueOf(type, value);
} catch (Exception e) {
}
throw new RuntimeException("can't convert [" + value + "] to type " + type);
}
;
protected abstract void doCommandLine() throws Exception;
public void doMain(String... args) {
try {
initOptions();
parseSetArgs(args);
doCommandLine();
} catch (HelpException e) {
String msg = e.getMessage();
if (msg != null && msg.length() > 0) {
System.err.println("ERROR: " + msg);
}
usage();
} catch (Exception e) {
e.printStackTrace(System.err);
}
}
protected String getVersionString() {
return getClass().getPackage().getImplementationVersion();
}
protected void initOptionFromClass(Class<?> clz) {
if (clz == null) {
return;
} else {
initOptionFromClass(clz.getSuperclass());
}
Syntax syntax = clz.getAnnotation(Syntax.class);
if (syntax != null) {
this.cmdLineSyntax = syntax.syntax();
this.cmdName = syntax.cmd();
this.desc = syntax.desc();
this.onlineHelp = syntax.onlineHelp();
}
Field[] fs = clz.getDeclaredFields();
for (Field f : fs) {
Opt opt = f.getAnnotation(Opt.class);
if (opt != null) {
f.setAccessible(true);
if (!opt.hasArg()) {
Class<?> type = f.getType();
if (!type.equals(boolean.class)) {
throw new RuntimeException("the type of " + f
+ " must be boolean, as it is declared as no args");
}
boolean b;
try {
b = (Boolean) f.get(this);
} catch (Exception e) {
throw new RuntimeException(e);
}
if (b) {
throw new RuntimeException("the value of " + f + " must be false, as it is declared as no args");
}
}
Option option = new Option();
option.field = f;
option.description = opt.description();
option.hasArg = opt.hasArg();
option.required = opt.required();
boolean haveLongOpt = false;
if (!"".equals(opt.longOpt())) {
option.longOpt = opt.longOpt();
checkConflict(option, "--" + option.longOpt);
haveLongOpt = true;
}
if (!"".equals(opt.argName())) {
option.argName = opt.argName();
}
if (!"".equals(opt.opt())) {
option.opt = opt.opt();
checkConflict(option, "-" + option.opt);
} else {
if (!haveLongOpt) {
throw new RuntimeException("opt or longOpt is not set in @Opt(...) " + f);
}
}
}
}
}
private void checkConflict(Option option, String key) {
if (optMap.containsKey(key)) {
Option preOption = optMap.get(key);
throw new RuntimeException(String.format("[@Opt(...) %s] conflict with [@Opt(...) %s]",
preOption.field.toString(), option.field
));
}
optMap.put(key, option);
}
protected void initOptions() {
initOptionFromClass(this.getClass());
}
public static void main(String... args) throws Exception {
if (args.length < 1) {
System.err.println("d2j-run <class> [args]");
return;
}
Class<?> clz = Class.forName(args[0]);
String newArgs[] = new String[args.length - 1];
System.arraycopy(args, 1, newArgs, 0, newArgs.length);
if (BaseCmd.class.isAssignableFrom(clz)) {
BaseCmd baseCmd = (BaseCmd) clz.newInstance();
baseCmd.doMain(newArgs);
} else {
Method m = clz.getMethod("main",String[].class);
m.setAccessible(true);
m.invoke(null, newArgs);
}
}
protected void parseSetArgs(String... args) throws IllegalArgumentException, IllegalAccessException {
this.orginalArgs = args;
List<String> remainsOptions = new ArrayList<String>();
Set<Option> requiredOpts = collectRequriedOptions(optMap);
Option needArgOpt = null;
for (String s : args) {
if (needArgOpt != null) {
needArgOpt.field.set(this, convert(s, needArgOpt.field.getType()));
needArgOpt = null;
} else if (s.startsWith("-")) {// its a short or long option
Option opt = optMap.get(s);
requiredOpts.remove(opt);
if (opt == null) {
System.err.println("ERROR: Unrecognized option: " + s);
throw new HelpException();
} else {
if (opt.hasArg) {
needArgOpt = opt;
} else {
opt.field.set(this, true);
}
}
} else {
remainsOptions.add(s);
}
}
if (needArgOpt != null) {
System.err.println("ERROR: Option " + needArgOpt.getOptAndLongOpt() + " need an argument value");
throw new HelpException();
}
this.remainingArgs = remainsOptions.toArray(new String[remainsOptions.size()]);
if (this.printHelp) {
throw new HelpException();
}
if (!requiredOpts.isEmpty()) {
StringBuilder sb = new StringBuilder();
sb.append("ERROR: Options: ");
boolean first = true;
for (Option option : requiredOpts) {
if (first) {
first = false;
} else {
sb.append(" and ");
}
sb.append(option.getOptAndLongOpt());
}
sb.append(" is required");
System.err.println(sb.toString());
throw new HelpException();
}
}
protected void usage() {
PrintWriter out = new PrintWriter(new OutputStreamWriter(System.err, StandardCharsets.UTF_8), true);
final int maxLength = 80;
final int maxPaLength = 40;
out.println(this.cmdName + " -- " + desc);
out.println("usage: " + this.cmdName + " " + cmdLineSyntax);
if (this.optMap.size() > 0) {
out.println("options:");
}
// [PART.A.........][Part.B
// .-a,--aa.<arg>...desc1
// .................desc2
// .-b,--bb
TreeSet<Option> options = new TreeSet<Option>(this.optMap.values());
int palength = -1;
for (Option option : options) {
int pa = 4 + option.getOptAndLongOpt().length();
if (option.hasArg) {
pa += 3 + option.argName.length();
}
if (pa < maxPaLength) {
if (pa > palength) {
palength = pa;
}
}
}
int pblength = maxLength - palength;
StringBuilder sb = new StringBuilder();
for (Option option : options) {
sb.setLength(0);
sb.append(" ").append(option.getOptAndLongOpt());
if (option.hasArg) {
sb.append(" <").append(option.argName).append(">");
}
String desc = option.description;
if (desc == null || desc.length() == 0) {// no description
out.println(sb);
} else {
for (int i = palength - sb.length(); i > 0; i--) {
sb.append(' ');
}
if (sb.length() > maxPaLength) {// to huge part A
out.println(sb);
sb.setLength(0);
for (int i = 0; i < palength; i++) {
sb.append(' ');
}
}
int nextStart = 0;
while (nextStart < desc.length()) {
if (desc.length() - nextStart < pblength) {// can put in one line
sb.append(desc.substring(nextStart));
out.println(sb);
nextStart = desc.length();
sb.setLength(0);
} else {
sb.append(desc.substring(nextStart, nextStart + pblength));
out.println(sb);
nextStart += pblength;
sb.setLength(0);
if (nextStart < desc.length()) {
for (int i = 0; i < palength; i++) {
sb.append(' ');
}
}
}
}
if (sb.length() > 0) {
out.println(sb);
sb.setLength(0);
}
}
}
String ver = getVersionString();
if (ver != null && !"".equals(ver)) {
out.println("version: " + ver);
}
if (onlineHelp != null && !"".equals(onlineHelp)) {
if (onlineHelp.length() + "online help: ".length() > maxLength) {
out.println("online help: ");
out.println(onlineHelp);
} else {
out.println("online help: " + onlineHelp);
}
}
out.flush();
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.query.filter;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Range;
import com.google.common.collect.RangeSet;
import com.google.common.collect.TreeRangeSet;
import com.google.common.primitives.Doubles;
import com.google.common.primitives.Floats;
import io.druid.common.guava.GuavaUtils;
import io.druid.java.util.common.StringUtils;
import io.druid.query.extraction.ExtractionFn;
import io.druid.segment.filter.DimensionPredicateFilter;
import io.druid.segment.filter.SelectorFilter;
import java.nio.ByteBuffer;
import java.util.Objects;
/**
*/
public class SelectorDimFilter implements DimFilter
{
private final String dimension;
private final String value;
private final ExtractionFn extractionFn;
private final Object initLock = new Object();
private DruidLongPredicate longPredicate;
private DruidFloatPredicate floatPredicate;
private DruidDoublePredicate druidDoublePredicate;
@JsonCreator
public SelectorDimFilter(
@JsonProperty("dimension") String dimension,
@JsonProperty("value") String value,
@JsonProperty("extractionFn") ExtractionFn extractionFn
)
{
Preconditions.checkArgument(dimension != null, "dimension must not be null");
this.dimension = dimension;
this.value = Strings.nullToEmpty(value);
this.extractionFn = extractionFn;
}
@Override
public byte[] getCacheKey()
{
byte[] dimensionBytes = StringUtils.toUtf8(dimension);
byte[] valueBytes = (value == null) ? new byte[]{} : StringUtils.toUtf8(value);
byte[] extractionFnBytes = extractionFn == null ? new byte[0] : extractionFn.getCacheKey();
return ByteBuffer.allocate(3 + dimensionBytes.length + valueBytes.length + extractionFnBytes.length)
.put(DimFilterUtils.SELECTOR_CACHE_ID)
.put(dimensionBytes)
.put(DimFilterUtils.STRING_SEPARATOR)
.put(valueBytes)
.put(DimFilterUtils.STRING_SEPARATOR)
.put(extractionFnBytes)
.array();
}
@Override
public DimFilter optimize()
{
return new InDimFilter(dimension, ImmutableList.of(value), extractionFn).optimize();
}
@Override
public Filter toFilter()
{
if (extractionFn == null) {
return new SelectorFilter(dimension, value);
} else {
final String valueOrNull = Strings.emptyToNull(value);
final DruidPredicateFactory predicateFactory = new DruidPredicateFactory()
{
@Override
public Predicate<String> makeStringPredicate()
{
return Predicates.equalTo(valueOrNull);
}
@Override
public DruidLongPredicate makeLongPredicate()
{
initLongPredicate();
return longPredicate;
}
@Override
public DruidFloatPredicate makeFloatPredicate()
{
initFloatPredicate();
return floatPredicate;
}
@Override
public DruidDoublePredicate makeDoublePredicate()
{
initDoublePredicate();
return druidDoublePredicate;
}
};
return new DimensionPredicateFilter(dimension, predicateFactory, extractionFn);
}
}
@JsonProperty
public String getDimension()
{
return dimension;
}
@JsonProperty
public String getValue()
{
return value;
}
@JsonProperty
public ExtractionFn getExtractionFn()
{
return extractionFn;
}
@Override
public String toString()
{
if (extractionFn != null) {
return StringUtils.format("%s(%s) = %s", extractionFn, dimension, value);
} else {
return StringUtils.format("%s = %s", dimension, value);
}
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
SelectorDimFilter that = (SelectorDimFilter) o;
if (!dimension.equals(that.dimension)) {
return false;
}
if (value != null ? !value.equals(that.value) : that.value != null) {
return false;
}
return extractionFn != null ? extractionFn.equals(that.extractionFn) : that.extractionFn == null;
}
@Override
public RangeSet<String> getDimensionRangeSet(String dimension)
{
if (!Objects.equals(getDimension(), dimension) || getExtractionFn() != null) {
return null;
}
RangeSet<String> retSet = TreeRangeSet.create();
retSet.add(Range.singleton(Strings.nullToEmpty(value)));
return retSet;
}
@Override
public int hashCode()
{
int result = dimension.hashCode();
result = 31 * result + (value != null ? value.hashCode() : 0);
result = 31 * result + (extractionFn != null ? extractionFn.hashCode() : 0);
return result;
}
private void initLongPredicate()
{
if (longPredicate != null) {
return;
}
synchronized (initLock) {
if (longPredicate != null) {
return;
}
final Long valueAsLong = GuavaUtils.tryParseLong(value);
if (valueAsLong == null) {
longPredicate = DruidLongPredicate.ALWAYS_FALSE;
} else {
// store the primitive, so we don't unbox for every comparison
final long unboxedLong = valueAsLong.longValue();
longPredicate = input -> input == unboxedLong;
}
}
}
private void initFloatPredicate()
{
if (floatPredicate != null) {
return;
}
synchronized (initLock) {
if (floatPredicate != null) {
return;
}
final Float valueAsFloat = Floats.tryParse(value);
if (valueAsFloat == null) {
floatPredicate = DruidFloatPredicate.ALWAYS_FALSE;
} else {
final int floatBits = Float.floatToIntBits(valueAsFloat);
floatPredicate = input -> Float.floatToIntBits(input) == floatBits;
}
}
}
private void initDoublePredicate()
{
if (druidDoublePredicate != null) {
return;
}
synchronized (initLock) {
if (druidDoublePredicate != null) {
return;
}
final Double aDouble = Doubles.tryParse(value);
if (aDouble == null) {
druidDoublePredicate = DruidDoublePredicate.ALWAYS_FALSE;
} else {
final long bits = Double.doubleToLongBits(aDouble);
druidDoublePredicate = input -> Double.doubleToLongBits(input) == bits;
}
}
}
}
| |
/*
* Copyright 2002-2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.context.support;
import java.util.Date;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import org.springframework.beans.MutablePropertyValues;
import org.springframework.beans.factory.support.PropertiesBeanDefinitionReader;
import org.springframework.context.ACATester;
import org.springframework.context.AbstractApplicationContextTests;
import org.springframework.context.BeanThatListens;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.MessageSourceResolvable;
import org.springframework.context.NoSuchMessageException;
import org.springframework.core.io.ClassPathResource;
/**
* @author Rod Johnson
* @author Juergen Hoeller
*/
public class StaticMessageSourceTests extends AbstractApplicationContextTests {
protected static final String MSG_TXT1_US =
"At '{1,time}' on \"{1,date}\", there was \"{2}\" on planet {0,number,integer}.";
protected static final String MSG_TXT1_UK =
"At '{1,time}' on \"{1,date}\", there was \"{2}\" on station number {0,number,integer}.";
protected static final String MSG_TXT2_US =
"This is a test message in the message catalog with no args.";
protected static final String MSG_TXT3_US =
"This is another test message in the message catalog with no args.";
protected StaticApplicationContext sac;
/** Overridden */
@Override
public void testCount() {
// These are only checked for current Ctx (not parent ctx)
assertCount(15);
}
@Override
public void testMessageSource() throws NoSuchMessageException {
// Do nothing here since super is looking for errorCodes we
// do NOT have in the Context
}
public void testGetMessageWithDefaultPassedInAndFoundInMsgCatalog() {
// Try with Locale.US
assertTrue("valid msg from staticMsgSource with default msg passed in returned msg from msg catalog for Locale.US",
sac.getMessage("message.format.example2", null, "This is a default msg if not found in MessageSource.", Locale.US)
.equals("This is a test message in the message catalog with no args."));
}
public void testGetMessageWithDefaultPassedInAndNotFoundInMsgCatalog() {
// Try with Locale.US
assertTrue("bogus msg from staticMsgSource with default msg passed in returned default msg for Locale.US",
sac.getMessage("bogus.message", null, "This is a default msg if not found in MessageSource.", Locale.US)
.equals("This is a default msg if not found in MessageSource."));
}
/**
* We really are testing the AbstractMessageSource class here.
* The underlying implementation uses a hashMap to cache messageFormats
* once a message has been asked for. This test is an attempt to
* make sure the cache is being used properly.
* @see org.springframework.context.support.AbstractMessageSource for more details.
*/
public void testGetMessageWithMessageAlreadyLookedFor() {
Object[] arguments = {
new Integer(7), new Date(System.currentTimeMillis()),
"a disturbance in the Force"
};
// The first time searching, we don't care about for this test
// Try with Locale.US
sac.getMessage("message.format.example1", arguments, Locale.US);
// Now msg better be as expected
assertTrue("2nd search within MsgFormat cache returned expected message for Locale.US",
sac.getMessage("message.format.example1", arguments, Locale.US).indexOf(
"there was \"a disturbance in the Force\" on planet 7.") != -1);
Object[] newArguments = {
new Integer(8), new Date(System.currentTimeMillis()),
"a disturbance in the Force"
};
// Now msg better be as expected even with different args
assertTrue("2nd search within MsgFormat cache with different args returned expected message for Locale.US",
sac.getMessage("message.format.example1", newArguments, Locale.US)
.indexOf("there was \"a disturbance in the Force\" on planet 8.") != -1);
}
/**
* Example taken from the javadocs for the java.text.MessageFormat class
*/
public void testGetMessageWithNoDefaultPassedInAndFoundInMsgCatalog() {
Object[] arguments = {
new Integer(7), new Date(System.currentTimeMillis()),
"a disturbance in the Force"
};
/*
Try with Locale.US
Since the msg has a time value in it, we will use String.indexOf(...)
to just look for a substring without the time. This is because it is
possible that by the time we store a time variable in this method
and the time the ResourceBundleMessageSource resolves the msg the
minutes of the time might not be the same.
*/
assertTrue("msg from staticMsgSource for Locale.US substituting args for placeholders is as expected",
sac.getMessage("message.format.example1", arguments, Locale.US)
.indexOf("there was \"a disturbance in the Force\" on planet 7.") != -1);
// Try with Locale.UK
assertTrue("msg from staticMsgSource for Locale.UK substituting args for placeholders is as expected",
sac.getMessage("message.format.example1", arguments, Locale.UK)
.indexOf("there was \"a disturbance in the Force\" on station number 7.") != -1);
// Try with Locale.US - Use a different test msg that requires no args
assertTrue("msg from staticMsgSource for Locale.US that requires no args is as expected",
sac.getMessage("message.format.example2", null, Locale.US)
.equals("This is a test message in the message catalog with no args."));
}
public void testGetMessageWithNoDefaultPassedInAndNotFoundInMsgCatalog() {
// Expecting an exception
try {
// Try with Locale.US
sac.getMessage("bogus.message", null, Locale.US);
fail("bogus msg from staticMsgSource for Locale.US without default msg should have thrown exception");
}
catch (NoSuchMessageException tExcept) {
assertTrue("bogus msg from staticMsgSource for Locale.US without default msg threw expected exception", true);
}
}
public void testMessageSourceResolvable() {
// first code valid
String[] codes1 = new String[] {"message.format.example3", "message.format.example2"};
MessageSourceResolvable resolvable1 = new DefaultMessageSourceResolvable(codes1, null, "default");
try {
assertTrue("correct message retrieved", MSG_TXT3_US.equals(sac.getMessage(resolvable1, Locale.US)));
}
catch (NoSuchMessageException ex) {
fail("Should not throw NoSuchMessageException");
}
// only second code valid
String[] codes2 = new String[] {"message.format.example99", "message.format.example2"};
MessageSourceResolvable resolvable2 = new DefaultMessageSourceResolvable(codes2, null, "default");
try {
assertTrue("correct message retrieved", MSG_TXT2_US.equals(sac.getMessage(resolvable2, Locale.US)));
}
catch (NoSuchMessageException ex) {
fail("Should not throw NoSuchMessageException");
}
// no code valid, but default given
String[] codes3 = new String[] {"message.format.example99", "message.format.example98"};
MessageSourceResolvable resolvable3 = new DefaultMessageSourceResolvable(codes3, null, "default");
try {
assertTrue("correct message retrieved", "default".equals(sac.getMessage(resolvable3, Locale.US)));
}
catch (NoSuchMessageException ex) {
fail("Should not throw NoSuchMessageException");
}
// no code valid, no default
String[] codes4 = new String[] {"message.format.example99", "message.format.example98"};
MessageSourceResolvable resolvable4 = new DefaultMessageSourceResolvable(codes4);
try {
sac.getMessage(resolvable4, Locale.US);
fail("Should have thrown NoSuchMessageException");
}
catch (NoSuchMessageException ex) {
// expected
}
}
/** Run for each test */
@Override
protected ConfigurableApplicationContext createContext() throws Exception {
StaticApplicationContext parent = new StaticApplicationContext();
Map<String, String> m = new HashMap<String, String>();
m.put("name", "Roderick");
parent.registerPrototype("rod", org.springframework.tests.sample.beans.TestBean.class, new MutablePropertyValues(m));
m.put("name", "Albert");
parent.registerPrototype("father", org.springframework.tests.sample.beans.TestBean.class, new MutablePropertyValues(m));
parent.refresh();
parent.addApplicationListener(parentListener);
this.sac = new StaticApplicationContext(parent);
sac.registerSingleton("beanThatListens", BeanThatListens.class, new MutablePropertyValues());
sac.registerSingleton("aca", ACATester.class, new MutablePropertyValues());
sac.registerPrototype("aca-prototype", ACATester.class, new MutablePropertyValues());
PropertiesBeanDefinitionReader reader = new PropertiesBeanDefinitionReader(sac.getDefaultListableBeanFactory());
reader.loadBeanDefinitions(new ClassPathResource("testBeans.properties", getClass()));
sac.refresh();
sac.addApplicationListener(listener);
StaticMessageSource messageSource = sac.getStaticMessageSource();
Map<String, String> usMessages = new HashMap<String, String>(3);
usMessages.put("message.format.example1", MSG_TXT1_US);
usMessages.put("message.format.example2", MSG_TXT2_US);
usMessages.put("message.format.example3", MSG_TXT3_US);
messageSource.addMessages(usMessages, Locale.US);
messageSource.addMessage("message.format.example1", Locale.UK, MSG_TXT1_UK);
return sac;
}
public void testNestedMessageSourceWithParamInChild() {
StaticMessageSource source = new StaticMessageSource();
StaticMessageSource parent = new StaticMessageSource();
source.setParentMessageSource(parent);
source.addMessage("param", Locale.ENGLISH, "value");
parent.addMessage("with.param", Locale.ENGLISH, "put {0} here");
MessageSourceResolvable resolvable = new DefaultMessageSourceResolvable(
new String[] {"with.param"}, new Object[] {new DefaultMessageSourceResolvable("param")});
assertEquals("put value here", source.getMessage(resolvable, Locale.ENGLISH));
}
public void testNestedMessageSourceWithParamInParent() {
StaticMessageSource source = new StaticMessageSource();
StaticMessageSource parent = new StaticMessageSource();
source.setParentMessageSource(parent);
parent.addMessage("param", Locale.ENGLISH, "value");
source.addMessage("with.param", Locale.ENGLISH, "put {0} here");
MessageSourceResolvable resolvable = new DefaultMessageSourceResolvable(
new String[] {"with.param"}, new Object[] {new DefaultMessageSourceResolvable("param")});
assertEquals("put value here", source.getMessage(resolvable, Locale.ENGLISH));
}
}
| |
/*
* JBoss, Home of Professional Open Source
* Copyright 2014, Red Hat, Inc. and/or its affiliates, and individual
* contributors by the @authors tag. See the copyright.txt in the
* distribution for a full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.as.quickstarts.kitchensink.rest;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Logger;
import javax.ejb.Stateless;
import javax.inject.Inject;
import javax.persistence.NoResultException;
import javax.validation.ConstraintViolation;
import javax.validation.ConstraintViolationException;
import javax.validation.ValidationException;
import javax.validation.Validator;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.jboss.as.quickstarts.kitchensink.data.MemberRepository;
import org.jboss.as.quickstarts.kitchensink.model.Member;
import org.jboss.as.quickstarts.kitchensink.service.MemberRegistration;
/**
* JAX-RS Example
* <p/>
* This class produces a RESTful service to read/write the contents of the
* members table.
*/
@Path("/members")
@Stateless
public class MemberService {
@Inject
private Logger log;
@Inject
private Validator validator;
@Inject
private MemberRepository repository;
@Inject
MemberRegistration registration;
@GET
@Produces(MediaType.APPLICATION_JSON)
public List<Member> listAllMembers() {
return repository.findAllOrderedByName();
}
@GET
@Path("/{id:[0-9][0-9]*}")
@Produces(MediaType.APPLICATION_JSON)
public Response lookupMemberById(@PathParam("id") long id) {
Member member = repository.findById(id);
if (member == null) {
return Response.status(Response.Status.NOT_FOUND).build();
}
return Response.ok(member).build();
}
/**
* Creates a new member from the values provided. Performs validation, and
* will return a JAX-RS response with either 200 ok, or with a map of
* fields, and related errors.
*/
@POST
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response createMember(Member member) {
Response.ResponseBuilder builder = null;
try {
// Validates member using bean validation
validateMember(member);
member = registration.register(member);
// Create an "ok" response
builder = Response.ok(member);
} catch (ConstraintViolationException ce) {
// Handle bean validation issues
builder = createViolationResponse(ce.getConstraintViolations());
} catch (ValidationException e) {
// Handle the unique constrain violation
Map<String, String> responseObj = new HashMap<String, String>();
responseObj.put("email", "Email taken");
builder = Response.status(Response.Status.CONFLICT).entity(
responseObj);
} catch (Exception e) {
// Handle generic exceptions
Map<String, String> responseObj = new HashMap<String, String>();
responseObj.put("error", e.getMessage());
builder = Response.status(Response.Status.BAD_REQUEST).entity(
responseObj);
}
return builder.build();
}
/**
* <p>
* Validates the given Member variable and throws validation exceptions
* based on the type of error. If the error is standard bean validation
* errors then it will throw a ConstraintValidationException with the set of
* the constraints violated.
* </p>
* <p>
* If the error is caused because an existing member with the same email is
* registered it throws a regular validation exception so that it can be
* interpreted separately.
* </p>
*
* @param member
* Member to be validated
* @throws ConstraintViolationException
* If Bean Validation errors exist
* @throws ValidationException
* If member with the same email already exists
*/
private void validateMember(Member member)
throws ConstraintViolationException, ValidationException {
// Create a bean validator and check for issues.
Set<ConstraintViolation<Member>> violations = validator
.validate(member);
if (!violations.isEmpty()) {
throw new ConstraintViolationException(
new HashSet<ConstraintViolation<?>>(violations));
}
// Check the uniqueness of the email address
if (emailAlreadyExists(member.getEmail())) {
throw new ValidationException("Unique Email Violation");
}
}
/**
* Creates a JAX-RS "Bad Request" response including a map of all violation
* fields, and their message. This can then be used by clients to show
* violations.
*
* @param violations
* A set of violations that needs to be reported
* @return JAX-RS response containing all violations
*/
private Response.ResponseBuilder createViolationResponse(
Set<ConstraintViolation<?>> violations) {
log.fine("Validation completed. violations found: " + violations.size());
Map<String, String> responseObj = new HashMap<String, String>();
for (ConstraintViolation<?> violation : violations) {
responseObj.put(violation.getPropertyPath().toString(),
violation.getMessage());
}
return Response.status(Response.Status.BAD_REQUEST).entity(responseObj);
}
/**
* Checks if a member with the same email address is already registered.
* This is the only way to easily capture the
* "@UniqueConstraint(columnNames = "email")" constraint from the Member
* class.
*
* @param email
* The email to check
* @return True if the email already exists, and false otherwise
*/
public boolean emailAlreadyExists(String email) {
Member member = null;
try {
member = repository.findByEmail(email);
} catch (NoResultException e) {
// ignore
}
return member != null;
}
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.openvr;
import javax.annotation.*;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.Checks.*;
import static org.lwjgl.system.JNI.*;
import static org.lwjgl.system.MemoryStack.*;
import static org.lwjgl.system.MemoryUtil.*;
public class VRRenderModels {
protected VRRenderModels() {
throw new UnsupportedOperationException();
}
// --- [ VRRenderModels_LoadRenderModel_Async ] ---
/** Unsafe version of: {@link #VRRenderModels_LoadRenderModel_Async LoadRenderModel_Async} */
public static int nVRRenderModels_LoadRenderModel_Async(long pchRenderModelName, long ppRenderModel) {
long __functionAddress = OpenVR.VRRenderModels.LoadRenderModel_Async;
if (CHECKS) {
check(__functionAddress);
}
return callPPI(pchRenderModelName, ppRenderModel, __functionAddress);
}
/**
* Loads and returns a render model for use in the application. {@code pchRenderModelName} should be a render model name from the
* {@link VR#ETrackedDeviceProperty_Prop_RenderModelName_String} property or an absolute path name to a render model on disk.
*
* <p>The resulting render model is valid until {@link VR#VR_ShutdownInternal ShutdownInternal} is called or until {@link #VRRenderModels_FreeRenderModel FreeRenderModel} is called. When the application is finished with
* the render model it should call {@link #VRRenderModels_FreeRenderModel FreeRenderModel} to free the memory associated with the model.</p>
*/
@NativeType("EVRRenderModelError")
public static int VRRenderModels_LoadRenderModel_Async(@NativeType("char const *") ByteBuffer pchRenderModelName, @NativeType("RenderModel_t **") PointerBuffer ppRenderModel) {
if (CHECKS) {
checkNT1(pchRenderModelName);
check(ppRenderModel, 1);
}
return nVRRenderModels_LoadRenderModel_Async(memAddress(pchRenderModelName), memAddress(ppRenderModel));
}
/**
* Loads and returns a render model for use in the application. {@code pchRenderModelName} should be a render model name from the
* {@link VR#ETrackedDeviceProperty_Prop_RenderModelName_String} property or an absolute path name to a render model on disk.
*
* <p>The resulting render model is valid until {@link VR#VR_ShutdownInternal ShutdownInternal} is called or until {@link #VRRenderModels_FreeRenderModel FreeRenderModel} is called. When the application is finished with
* the render model it should call {@link #VRRenderModels_FreeRenderModel FreeRenderModel} to free the memory associated with the model.</p>
*/
@NativeType("EVRRenderModelError")
public static int VRRenderModels_LoadRenderModel_Async(@NativeType("char const *") CharSequence pchRenderModelName, @NativeType("RenderModel_t **") PointerBuffer ppRenderModel) {
if (CHECKS) {
check(ppRenderModel, 1);
}
MemoryStack stack = stackGet(); int stackPointer = stack.getPointer();
try {
stack.nASCII(pchRenderModelName, true);
long pchRenderModelNameEncoded = stack.getPointerAddress();
return nVRRenderModels_LoadRenderModel_Async(pchRenderModelNameEncoded, memAddress(ppRenderModel));
} finally {
stack.setPointer(stackPointer);
}
}
// --- [ VRRenderModels_FreeRenderModel ] ---
/** Unsafe version of: {@link #VRRenderModels_FreeRenderModel FreeRenderModel} */
public static void nVRRenderModels_FreeRenderModel(long pRenderModel) {
long __functionAddress = OpenVR.VRRenderModels.FreeRenderModel;
if (CHECKS) {
check(__functionAddress);
}
callPV(pRenderModel, __functionAddress);
}
/** Frees a previously returned render model It is safe to call this on a null ptr. */
public static void VRRenderModels_FreeRenderModel(@Nullable @NativeType("RenderModel_t *") RenderModel pRenderModel) {
nVRRenderModels_FreeRenderModel(memAddressSafe(pRenderModel));
}
// --- [ VRRenderModels_LoadTexture_Async ] ---
/** Unsafe version of: {@link #VRRenderModels_LoadTexture_Async LoadTexture_Async} */
public static int nVRRenderModels_LoadTexture_Async(int textureId, long ppTexture) {
long __functionAddress = OpenVR.VRRenderModels.LoadTexture_Async;
if (CHECKS) {
check(__functionAddress);
}
return callPI(textureId, ppTexture, __functionAddress);
}
/** Loads and returns a texture for use in the application. */
@NativeType("EVRRenderModelError")
public static int VRRenderModels_LoadTexture_Async(@NativeType("TextureID_t") int textureId, @NativeType("RenderModel_TextureMap_t **") PointerBuffer ppTexture) {
if (CHECKS) {
check(ppTexture, 1);
}
return nVRRenderModels_LoadTexture_Async(textureId, memAddress(ppTexture));
}
// --- [ VRRenderModels_FreeTexture ] ---
/** Unsafe version of: {@link #VRRenderModels_FreeTexture FreeTexture} */
public static void nVRRenderModels_FreeTexture(long pTexture) {
long __functionAddress = OpenVR.VRRenderModels.FreeTexture;
if (CHECKS) {
check(__functionAddress);
}
callPV(pTexture, __functionAddress);
}
/** Frees a previously returned texture. It is safe to call this on a null ptr. */
public static void VRRenderModels_FreeTexture(@Nullable @NativeType("RenderModel_TextureMap_t *") RenderModelTextureMap pTexture) {
nVRRenderModels_FreeTexture(memAddressSafe(pTexture));
}
// --- [ VRRenderModels_LoadTextureD3D11_Async ] ---
/** Unsafe version of: {@link #VRRenderModels_LoadTextureD3D11_Async LoadTextureD3D11_Async} */
public static int nVRRenderModels_LoadTextureD3D11_Async(int textureId, long pD3D11Device, long ppD3D11Texture2D) {
long __functionAddress = OpenVR.VRRenderModels.LoadTextureD3D11_Async;
if (CHECKS) {
check(__functionAddress);
check(pD3D11Device);
}
return callPPI(textureId, pD3D11Device, ppD3D11Texture2D, __functionAddress);
}
/** Creates a D3D11 texture and loads data into it. */
@NativeType("EVRRenderModelError")
public static int VRRenderModels_LoadTextureD3D11_Async(@NativeType("TextureID_t") int textureId, @NativeType("void *") long pD3D11Device, @NativeType("void **") PointerBuffer ppD3D11Texture2D) {
if (CHECKS) {
check(ppD3D11Texture2D, 1);
}
return nVRRenderModels_LoadTextureD3D11_Async(textureId, pD3D11Device, memAddress(ppD3D11Texture2D));
}
// --- [ VRRenderModels_LoadIntoTextureD3D11_Async ] ---
/** Helper function to copy the bits into an existing texture. */
@NativeType("EVRRenderModelError")
public static int VRRenderModels_LoadIntoTextureD3D11_Async(@NativeType("TextureID_t") int textureId, @NativeType("void *") long pDstTexture) {
long __functionAddress = OpenVR.VRRenderModels.LoadIntoTextureD3D11_Async;
if (CHECKS) {
check(__functionAddress);
check(pDstTexture);
}
return callPI(textureId, pDstTexture, __functionAddress);
}
// --- [ VRRenderModels_FreeTextureD3D11 ] ---
/** Use this to free textures created with LoadTextureD3D11_Async instead of calling Release on them. */
public static void VRRenderModels_FreeTextureD3D11(@NativeType("void *") long pD3D11Texture2D) {
long __functionAddress = OpenVR.VRRenderModels.FreeTextureD3D11;
if (CHECKS) {
check(__functionAddress);
check(pD3D11Texture2D);
}
callPV(pD3D11Texture2D, __functionAddress);
}
// --- [ VRRenderModels_GetRenderModelName ] ---
/** Unsafe version of: {@link #VRRenderModels_GetRenderModelName GetRenderModelName} */
public static int nVRRenderModels_GetRenderModelName(int unRenderModelIndex, long pchRenderModelName, int unRenderModelNameLen) {
long __functionAddress = OpenVR.VRRenderModels.GetRenderModelName;
if (CHECKS) {
check(__functionAddress);
}
return callPI(unRenderModelIndex, pchRenderModelName, unRenderModelNameLen, __functionAddress);
}
/**
* Use this to get the names of available render models. Index does not correlate to a tracked device index, but is only used for iterating over all
* available render models. If the index is out of range, this function will return 0. Otherwise, it will return the size of the buffer required for the
* name.
*/
@NativeType("uint32_t")
public static int VRRenderModels_GetRenderModelName(@NativeType("uint32_t") int unRenderModelIndex, @Nullable @NativeType("char *") ByteBuffer pchRenderModelName) {
return nVRRenderModels_GetRenderModelName(unRenderModelIndex, memAddressSafe(pchRenderModelName), remainingSafe(pchRenderModelName));
}
/**
* Use this to get the names of available render models. Index does not correlate to a tracked device index, but is only used for iterating over all
* available render models. If the index is out of range, this function will return 0. Otherwise, it will return the size of the buffer required for the
* name.
*/
@NativeType("uint32_t")
public static String VRRenderModels_GetRenderModelName(@NativeType("uint32_t") int unRenderModelIndex, @NativeType("uint32_t") int unRenderModelNameLen) {
MemoryStack stack = stackGet(); int stackPointer = stack.getPointer();
try {
ByteBuffer pchRenderModelName = stack.malloc(unRenderModelNameLen);
int __result = nVRRenderModels_GetRenderModelName(unRenderModelIndex, memAddress(pchRenderModelName), unRenderModelNameLen);
return memASCII(pchRenderModelName, __result - 1);
} finally {
stack.setPointer(stackPointer);
}
}
// --- [ VRRenderModels_GetRenderModelCount ] ---
/** Returns the number of available render models. */
@NativeType("uint32_t")
public static int VRRenderModels_GetRenderModelCount() {
long __functionAddress = OpenVR.VRRenderModels.GetRenderModelCount;
if (CHECKS) {
check(__functionAddress);
}
return callI(__functionAddress);
}
// --- [ VRRenderModels_GetComponentCount ] ---
/** Unsafe version of: {@link #VRRenderModels_GetComponentCount GetComponentCount} */
public static int nVRRenderModels_GetComponentCount(long pchRenderModelName) {
long __functionAddress = OpenVR.VRRenderModels.GetComponentCount;
if (CHECKS) {
check(__functionAddress);
}
return callPI(pchRenderModelName, __functionAddress);
}
/**
* Returns the number of components of the specified render model.
*
* <p>Components are useful when client application wish to draw, label, or otherwise interact with components of tracked objects.</p>
*/
@NativeType("uint32_t")
public static int VRRenderModels_GetComponentCount(@NativeType("char const *") ByteBuffer pchRenderModelName) {
if (CHECKS) {
checkNT1(pchRenderModelName);
}
return nVRRenderModels_GetComponentCount(memAddress(pchRenderModelName));
}
/**
* Returns the number of components of the specified render model.
*
* <p>Components are useful when client application wish to draw, label, or otherwise interact with components of tracked objects.</p>
*/
@NativeType("uint32_t")
public static int VRRenderModels_GetComponentCount(@NativeType("char const *") CharSequence pchRenderModelName) {
MemoryStack stack = stackGet(); int stackPointer = stack.getPointer();
try {
stack.nASCII(pchRenderModelName, true);
long pchRenderModelNameEncoded = stack.getPointerAddress();
return nVRRenderModels_GetComponentCount(pchRenderModelNameEncoded);
} finally {
stack.setPointer(stackPointer);
}
}
// --- [ VRRenderModels_GetComponentName ] ---
/** Unsafe version of: {@link #VRRenderModels_GetComponentName GetComponentName} */
public static int nVRRenderModels_GetComponentName(long pchRenderModelName, int unComponentIndex, long pchComponentName, int unComponentNameLen) {
long __functionAddress = OpenVR.VRRenderModels.GetComponentName;
if (CHECKS) {
check(__functionAddress);
}
return callPPI(pchRenderModelName, unComponentIndex, pchComponentName, unComponentNameLen, __functionAddress);
}
/**
* Use this to get the names of available components. Index does not correlate to a tracked device index, but is only used for iterating over all
* available components. If the index is out of range, this function will return 0. Otherwise, it will return the size of the buffer required for the
* name.
*/
@NativeType("uint32_t")
public static int VRRenderModels_GetComponentName(@NativeType("char const *") ByteBuffer pchRenderModelName, @NativeType("uint32_t") int unComponentIndex, @Nullable @NativeType("char *") ByteBuffer pchComponentName) {
if (CHECKS) {
checkNT1(pchRenderModelName);
}
return nVRRenderModels_GetComponentName(memAddress(pchRenderModelName), unComponentIndex, memAddressSafe(pchComponentName), remainingSafe(pchComponentName));
}
/**
* Use this to get the names of available components. Index does not correlate to a tracked device index, but is only used for iterating over all
* available components. If the index is out of range, this function will return 0. Otherwise, it will return the size of the buffer required for the
* name.
*/
@NativeType("uint32_t")
public static int VRRenderModels_GetComponentName(@NativeType("char const *") CharSequence pchRenderModelName, @NativeType("uint32_t") int unComponentIndex, @Nullable @NativeType("char *") ByteBuffer pchComponentName) {
MemoryStack stack = stackGet(); int stackPointer = stack.getPointer();
try {
stack.nASCII(pchRenderModelName, true);
long pchRenderModelNameEncoded = stack.getPointerAddress();
return nVRRenderModels_GetComponentName(pchRenderModelNameEncoded, unComponentIndex, memAddressSafe(pchComponentName), remainingSafe(pchComponentName));
} finally {
stack.setPointer(stackPointer);
}
}
/**
* Use this to get the names of available components. Index does not correlate to a tracked device index, but is only used for iterating over all
* available components. If the index is out of range, this function will return 0. Otherwise, it will return the size of the buffer required for the
* name.
*/
@NativeType("uint32_t")
public static String VRRenderModels_GetComponentName(@NativeType("char const *") CharSequence pchRenderModelName, @NativeType("uint32_t") int unComponentIndex, @NativeType("uint32_t") int unComponentNameLen) {
MemoryStack stack = stackGet(); int stackPointer = stack.getPointer();
try {
stack.nASCII(pchRenderModelName, true);
long pchRenderModelNameEncoded = stack.getPointerAddress();
ByteBuffer pchComponentName = stack.malloc(unComponentNameLen);
int __result = nVRRenderModels_GetComponentName(pchRenderModelNameEncoded, unComponentIndex, memAddress(pchComponentName), unComponentNameLen);
return memASCII(pchComponentName, __result - 1);
} finally {
stack.setPointer(stackPointer);
}
}
// --- [ VRRenderModels_GetComponentButtonMask ] ---
/** Unsafe version of: {@link #VRRenderModels_GetComponentButtonMask GetComponentButtonMask} */
public static long nVRRenderModels_GetComponentButtonMask(long pchRenderModelName, long pchComponentName) {
long __functionAddress = OpenVR.VRRenderModels.GetComponentButtonMask;
if (CHECKS) {
check(__functionAddress);
}
return callPPJ(pchRenderModelName, pchComponentName, __functionAddress);
}
/**
* Get the button mask for all buttons associated with this component.
*
* <p>If no buttons (or axes) are associated with this component, return 0</p>
*
* <div style="margin-left: 26px; border-left: 1px solid gray; padding-left: 14px;"><h5>Note</h5>
*
* <p>multiple components may be associated with the same button. Ex: two grip buttons on a single controller.</p></div>
*
* <div style="margin-left: 26px; border-left: 1px solid gray; padding-left: 14px;"><h5>Note</h5>
*
* <p>A single component may be associated with multiple buttons. Ex: A trackpad which also provides "D-pad" functionality</p></div>
*/
@NativeType("uint64_t")
public static long VRRenderModels_GetComponentButtonMask(@NativeType("char const *") ByteBuffer pchRenderModelName, @NativeType("char const *") ByteBuffer pchComponentName) {
if (CHECKS) {
checkNT1(pchRenderModelName);
checkNT1(pchComponentName);
}
return nVRRenderModels_GetComponentButtonMask(memAddress(pchRenderModelName), memAddress(pchComponentName));
}
/**
* Get the button mask for all buttons associated with this component.
*
* <p>If no buttons (or axes) are associated with this component, return 0</p>
*
* <div style="margin-left: 26px; border-left: 1px solid gray; padding-left: 14px;"><h5>Note</h5>
*
* <p>multiple components may be associated with the same button. Ex: two grip buttons on a single controller.</p></div>
*
* <div style="margin-left: 26px; border-left: 1px solid gray; padding-left: 14px;"><h5>Note</h5>
*
* <p>A single component may be associated with multiple buttons. Ex: A trackpad which also provides "D-pad" functionality</p></div>
*/
@NativeType("uint64_t")
public static long VRRenderModels_GetComponentButtonMask(@NativeType("char const *") CharSequence pchRenderModelName, @NativeType("char const *") CharSequence pchComponentName) {
MemoryStack stack = stackGet(); int stackPointer = stack.getPointer();
try {
stack.nASCII(pchRenderModelName, true);
long pchRenderModelNameEncoded = stack.getPointerAddress();
stack.nASCII(pchComponentName, true);
long pchComponentNameEncoded = stack.getPointerAddress();
return nVRRenderModels_GetComponentButtonMask(pchRenderModelNameEncoded, pchComponentNameEncoded);
} finally {
stack.setPointer(stackPointer);
}
}
// --- [ VRRenderModels_GetComponentRenderModelName ] ---
/** Unsafe version of: {@link #VRRenderModels_GetComponentRenderModelName GetComponentRenderModelName} */
public static int nVRRenderModels_GetComponentRenderModelName(long pchRenderModelName, long pchComponentName, long pchComponentRenderModelName, int unComponentRenderModelNameLen) {
long __functionAddress = OpenVR.VRRenderModels.GetComponentRenderModelName;
if (CHECKS) {
check(__functionAddress);
}
return callPPPI(pchRenderModelName, pchComponentName, pchComponentRenderModelName, unComponentRenderModelNameLen, __functionAddress);
}
/**
* Use this to get the render model name for the specified rendermode/component combination, to be passed to {@link #VRRenderModels_LoadRenderModel_Async LoadRenderModel_Async}. If the component
* name is out of range, this function will return 0. Otherwise, it will return the size of the buffer required for the name.
*/
@NativeType("uint32_t")
public static int VRRenderModels_GetComponentRenderModelName(@NativeType("char const *") ByteBuffer pchRenderModelName, @NativeType("char const *") ByteBuffer pchComponentName, @Nullable @NativeType("char *") ByteBuffer pchComponentRenderModelName) {
if (CHECKS) {
checkNT1(pchRenderModelName);
checkNT1(pchComponentName);
}
return nVRRenderModels_GetComponentRenderModelName(memAddress(pchRenderModelName), memAddress(pchComponentName), memAddressSafe(pchComponentRenderModelName), remainingSafe(pchComponentRenderModelName));
}
/**
* Use this to get the render model name for the specified rendermode/component combination, to be passed to {@link #VRRenderModels_LoadRenderModel_Async LoadRenderModel_Async}. If the component
* name is out of range, this function will return 0. Otherwise, it will return the size of the buffer required for the name.
*/
@NativeType("uint32_t")
public static int VRRenderModels_GetComponentRenderModelName(@NativeType("char const *") CharSequence pchRenderModelName, @NativeType("char const *") CharSequence pchComponentName, @Nullable @NativeType("char *") ByteBuffer pchComponentRenderModelName) {
MemoryStack stack = stackGet(); int stackPointer = stack.getPointer();
try {
stack.nASCII(pchRenderModelName, true);
long pchRenderModelNameEncoded = stack.getPointerAddress();
stack.nASCII(pchComponentName, true);
long pchComponentNameEncoded = stack.getPointerAddress();
return nVRRenderModels_GetComponentRenderModelName(pchRenderModelNameEncoded, pchComponentNameEncoded, memAddressSafe(pchComponentRenderModelName), remainingSafe(pchComponentRenderModelName));
} finally {
stack.setPointer(stackPointer);
}
}
/**
* Use this to get the render model name for the specified rendermode/component combination, to be passed to {@link #VRRenderModels_LoadRenderModel_Async LoadRenderModel_Async}. If the component
* name is out of range, this function will return 0. Otherwise, it will return the size of the buffer required for the name.
*/
@NativeType("uint32_t")
public static String VRRenderModels_GetComponentRenderModelName(@NativeType("char const *") CharSequence pchRenderModelName, @NativeType("char const *") CharSequence pchComponentName, @NativeType("uint32_t") int unComponentRenderModelNameLen) {
MemoryStack stack = stackGet(); int stackPointer = stack.getPointer();
try {
stack.nASCII(pchRenderModelName, true);
long pchRenderModelNameEncoded = stack.getPointerAddress();
stack.nASCII(pchComponentName, true);
long pchComponentNameEncoded = stack.getPointerAddress();
ByteBuffer pchComponentRenderModelName = stack.malloc(unComponentRenderModelNameLen);
int __result = nVRRenderModels_GetComponentRenderModelName(pchRenderModelNameEncoded, pchComponentNameEncoded, memAddress(pchComponentRenderModelName), unComponentRenderModelNameLen);
return memASCII(pchComponentRenderModelName, __result - 1);
} finally {
stack.setPointer(stackPointer);
}
}
// --- [ VRRenderModels_GetComponentStateForDevicePath ] ---
/** Unsafe version of: {@link #VRRenderModels_GetComponentStateForDevicePath GetComponentStateForDevicePath} */
public static boolean nVRRenderModels_GetComponentStateForDevicePath(long pchRenderModelName, long pchComponentName, long devicePath, long pState, long pComponentState) {
long __functionAddress = OpenVR.VRRenderModels.GetComponentStateForDevicePath;
if (CHECKS) {
check(__functionAddress);
}
return callPPJPPZ(pchRenderModelName, pchComponentName, devicePath, pState, pComponentState, __functionAddress);
}
/**
* @return if the {@code pchRenderModelName} or {@code pchComponentName} is invalid, this will return false (and transforms will be set to identity). Otherwise,
* return true Note: For dynamic objects, visibility may be dynamic. (I.e., true/false will be returned based on controller state and controller mode
* state).
*/
@NativeType("bool")
public static boolean VRRenderModels_GetComponentStateForDevicePath(@NativeType("char const *") ByteBuffer pchRenderModelName, @NativeType("char const *") ByteBuffer pchComponentName, @NativeType("VRInputValueHandle_t") long devicePath, @NativeType("RenderModel_ControllerMode_State_t const *") RenderModelControllerModeState pState, @NativeType("RenderModel_ComponentState_t *") RenderModelComponentState pComponentState) {
if (CHECKS) {
checkNT1(pchRenderModelName);
checkNT1(pchComponentName);
}
return nVRRenderModels_GetComponentStateForDevicePath(memAddress(pchRenderModelName), memAddress(pchComponentName), devicePath, pState.address(), pComponentState.address());
}
/**
* @return if the {@code pchRenderModelName} or {@code pchComponentName} is invalid, this will return false (and transforms will be set to identity). Otherwise,
* return true Note: For dynamic objects, visibility may be dynamic. (I.e., true/false will be returned based on controller state and controller mode
* state).
*/
@NativeType("bool")
public static boolean VRRenderModels_GetComponentStateForDevicePath(@NativeType("char const *") CharSequence pchRenderModelName, @NativeType("char const *") CharSequence pchComponentName, @NativeType("VRInputValueHandle_t") long devicePath, @NativeType("RenderModel_ControllerMode_State_t const *") RenderModelControllerModeState pState, @NativeType("RenderModel_ComponentState_t *") RenderModelComponentState pComponentState) {
MemoryStack stack = stackGet(); int stackPointer = stack.getPointer();
try {
stack.nASCII(pchRenderModelName, true);
long pchRenderModelNameEncoded = stack.getPointerAddress();
stack.nASCII(pchComponentName, true);
long pchComponentNameEncoded = stack.getPointerAddress();
return nVRRenderModels_GetComponentStateForDevicePath(pchRenderModelNameEncoded, pchComponentNameEncoded, devicePath, pState.address(), pComponentState.address());
} finally {
stack.setPointer(stackPointer);
}
}
// --- [ VRRenderModels_GetComponentState ] ---
/** Unsafe version of: {@link #VRRenderModels_GetComponentState GetComponentState} */
public static boolean nVRRenderModels_GetComponentState(long pchRenderModelName, long pchComponentName, long pControllerState, long pState, long pComponentState) {
long __functionAddress = OpenVR.VRRenderModels.GetComponentState;
if (CHECKS) {
check(__functionAddress);
}
return callPPPPPZ(pchRenderModelName, pchComponentName, pControllerState, pState, pComponentState, __functionAddress);
}
/**
* This version of {@code GetComponentState} takes a controller state block instead of an action origin. This function is deprecated. You should use the
* new input system and {@link #VRRenderModels_GetComponentStateForDevicePath GetComponentStateForDevicePath} instead.
*/
@NativeType("bool")
public static boolean VRRenderModels_GetComponentState(@NativeType("char const *") ByteBuffer pchRenderModelName, @NativeType("char const *") ByteBuffer pchComponentName, @NativeType("VRControllerState_t const *") VRControllerState pControllerState, @NativeType("RenderModel_ControllerMode_State_t const *") RenderModelControllerModeState pState, @NativeType("RenderModel_ComponentState_t *") RenderModelComponentState pComponentState) {
if (CHECKS) {
checkNT1(pchRenderModelName);
checkNT1(pchComponentName);
}
return nVRRenderModels_GetComponentState(memAddress(pchRenderModelName), memAddress(pchComponentName), pControllerState.address(), pState.address(), pComponentState.address());
}
/**
* This version of {@code GetComponentState} takes a controller state block instead of an action origin. This function is deprecated. You should use the
* new input system and {@link #VRRenderModels_GetComponentStateForDevicePath GetComponentStateForDevicePath} instead.
*/
@NativeType("bool")
public static boolean VRRenderModels_GetComponentState(@NativeType("char const *") CharSequence pchRenderModelName, @NativeType("char const *") CharSequence pchComponentName, @NativeType("VRControllerState_t const *") VRControllerState pControllerState, @NativeType("RenderModel_ControllerMode_State_t const *") RenderModelControllerModeState pState, @NativeType("RenderModel_ComponentState_t *") RenderModelComponentState pComponentState) {
MemoryStack stack = stackGet(); int stackPointer = stack.getPointer();
try {
stack.nASCII(pchRenderModelName, true);
long pchRenderModelNameEncoded = stack.getPointerAddress();
stack.nASCII(pchComponentName, true);
long pchComponentNameEncoded = stack.getPointerAddress();
return nVRRenderModels_GetComponentState(pchRenderModelNameEncoded, pchComponentNameEncoded, pControllerState.address(), pState.address(), pComponentState.address());
} finally {
stack.setPointer(stackPointer);
}
}
// --- [ VRRenderModels_RenderModelHasComponent ] ---
/** Unsafe version of: {@link #VRRenderModels_RenderModelHasComponent RenderModelHasComponent} */
public static boolean nVRRenderModels_RenderModelHasComponent(long pchRenderModelName, long pchComponentName) {
long __functionAddress = OpenVR.VRRenderModels.RenderModelHasComponent;
if (CHECKS) {
check(__functionAddress);
}
return callPPZ(pchRenderModelName, pchComponentName, __functionAddress);
}
/** Returns true if the render model has a component with the specified name. */
@NativeType("bool")
public static boolean VRRenderModels_RenderModelHasComponent(@NativeType("char const *") ByteBuffer pchRenderModelName, @NativeType("char const *") ByteBuffer pchComponentName) {
if (CHECKS) {
checkNT1(pchRenderModelName);
checkNT1(pchComponentName);
}
return nVRRenderModels_RenderModelHasComponent(memAddress(pchRenderModelName), memAddress(pchComponentName));
}
/** Returns true if the render model has a component with the specified name. */
@NativeType("bool")
public static boolean VRRenderModels_RenderModelHasComponent(@NativeType("char const *") CharSequence pchRenderModelName, @NativeType("char const *") CharSequence pchComponentName) {
MemoryStack stack = stackGet(); int stackPointer = stack.getPointer();
try {
stack.nASCII(pchRenderModelName, true);
long pchRenderModelNameEncoded = stack.getPointerAddress();
stack.nASCII(pchComponentName, true);
long pchComponentNameEncoded = stack.getPointerAddress();
return nVRRenderModels_RenderModelHasComponent(pchRenderModelNameEncoded, pchComponentNameEncoded);
} finally {
stack.setPointer(stackPointer);
}
}
// --- [ VRRenderModels_GetRenderModelThumbnailURL ] ---
/** Unsafe version of: {@link #VRRenderModels_GetRenderModelThumbnailURL GetRenderModelThumbnailURL} */
public static int nVRRenderModels_GetRenderModelThumbnailURL(long pchRenderModelName, long pchThumbnailURL, int unThumbnailURLLen, long peError) {
long __functionAddress = OpenVR.VRRenderModels.GetRenderModelThumbnailURL;
if (CHECKS) {
check(__functionAddress);
}
return callPPPI(pchRenderModelName, pchThumbnailURL, unThumbnailURLLen, peError, __functionAddress);
}
/** Returns the URL of the thumbnail image for this rendermodel. */
@NativeType("uint32_t")
public static int VRRenderModels_GetRenderModelThumbnailURL(@NativeType("char const *") ByteBuffer pchRenderModelName, @Nullable @NativeType("char *") ByteBuffer pchThumbnailURL, @NativeType("EVRRenderModelError *") IntBuffer peError) {
if (CHECKS) {
checkNT1(pchRenderModelName);
check(peError, 1);
}
return nVRRenderModels_GetRenderModelThumbnailURL(memAddress(pchRenderModelName), memAddressSafe(pchThumbnailURL), remainingSafe(pchThumbnailURL), memAddress(peError));
}
/** Returns the URL of the thumbnail image for this rendermodel. */
@NativeType("uint32_t")
public static int VRRenderModels_GetRenderModelThumbnailURL(@NativeType("char const *") CharSequence pchRenderModelName, @Nullable @NativeType("char *") ByteBuffer pchThumbnailURL, @NativeType("EVRRenderModelError *") IntBuffer peError) {
if (CHECKS) {
check(peError, 1);
}
MemoryStack stack = stackGet(); int stackPointer = stack.getPointer();
try {
stack.nASCII(pchRenderModelName, true);
long pchRenderModelNameEncoded = stack.getPointerAddress();
return nVRRenderModels_GetRenderModelThumbnailURL(pchRenderModelNameEncoded, memAddressSafe(pchThumbnailURL), remainingSafe(pchThumbnailURL), memAddress(peError));
} finally {
stack.setPointer(stackPointer);
}
}
/** Returns the URL of the thumbnail image for this rendermodel. */
@NativeType("uint32_t")
public static String VRRenderModels_GetRenderModelThumbnailURL(@NativeType("char const *") CharSequence pchRenderModelName, @NativeType("uint32_t") int unThumbnailURLLen, @NativeType("EVRRenderModelError *") IntBuffer peError) {
if (CHECKS) {
check(peError, 1);
}
MemoryStack stack = stackGet(); int stackPointer = stack.getPointer();
try {
stack.nASCII(pchRenderModelName, true);
long pchRenderModelNameEncoded = stack.getPointerAddress();
ByteBuffer pchThumbnailURL = stack.malloc(unThumbnailURLLen);
int __result = nVRRenderModels_GetRenderModelThumbnailURL(pchRenderModelNameEncoded, memAddress(pchThumbnailURL), unThumbnailURLLen, memAddress(peError));
return memASCII(pchThumbnailURL, __result - 1);
} finally {
stack.setPointer(stackPointer);
}
}
// --- [ VRRenderModels_GetRenderModelOriginalPath ] ---
/** Unsafe version of: {@link #VRRenderModels_GetRenderModelOriginalPath GetRenderModelOriginalPath} */
public static int nVRRenderModels_GetRenderModelOriginalPath(long pchRenderModelName, long pchOriginalPath, int unOriginalPathLen, long peError) {
long __functionAddress = OpenVR.VRRenderModels.GetRenderModelOriginalPath;
if (CHECKS) {
check(__functionAddress);
}
return callPPPI(pchRenderModelName, pchOriginalPath, unOriginalPathLen, peError, __functionAddress);
}
/**
* Provides a render model path that will load the unskinned model if the model name provided has been replace by the user. If the model hasn't been
* replaced the path value will still be a valid path to load the model. Pass this to LoadRenderModel_Async, etc. to load the model.
*/
@NativeType("uint32_t")
public static int VRRenderModels_GetRenderModelOriginalPath(@NativeType("char const *") ByteBuffer pchRenderModelName, @Nullable @NativeType("char *") ByteBuffer pchOriginalPath, @NativeType("EVRRenderModelError *") IntBuffer peError) {
if (CHECKS) {
checkNT1(pchRenderModelName);
check(peError, 1);
}
return nVRRenderModels_GetRenderModelOriginalPath(memAddress(pchRenderModelName), memAddressSafe(pchOriginalPath), remainingSafe(pchOriginalPath), memAddress(peError));
}
/**
* Provides a render model path that will load the unskinned model if the model name provided has been replace by the user. If the model hasn't been
* replaced the path value will still be a valid path to load the model. Pass this to LoadRenderModel_Async, etc. to load the model.
*/
@NativeType("uint32_t")
public static int VRRenderModels_GetRenderModelOriginalPath(@NativeType("char const *") CharSequence pchRenderModelName, @Nullable @NativeType("char *") ByteBuffer pchOriginalPath, @NativeType("EVRRenderModelError *") IntBuffer peError) {
if (CHECKS) {
check(peError, 1);
}
MemoryStack stack = stackGet(); int stackPointer = stack.getPointer();
try {
stack.nASCII(pchRenderModelName, true);
long pchRenderModelNameEncoded = stack.getPointerAddress();
return nVRRenderModels_GetRenderModelOriginalPath(pchRenderModelNameEncoded, memAddressSafe(pchOriginalPath), remainingSafe(pchOriginalPath), memAddress(peError));
} finally {
stack.setPointer(stackPointer);
}
}
/**
* Provides a render model path that will load the unskinned model if the model name provided has been replace by the user. If the model hasn't been
* replaced the path value will still be a valid path to load the model. Pass this to LoadRenderModel_Async, etc. to load the model.
*/
@NativeType("uint32_t")
public static String VRRenderModels_GetRenderModelOriginalPath(@NativeType("char const *") CharSequence pchRenderModelName, @NativeType("uint32_t") int unOriginalPathLen, @NativeType("EVRRenderModelError *") IntBuffer peError) {
if (CHECKS) {
check(peError, 1);
}
MemoryStack stack = stackGet(); int stackPointer = stack.getPointer();
try {
stack.nASCII(pchRenderModelName, true);
long pchRenderModelNameEncoded = stack.getPointerAddress();
ByteBuffer pchOriginalPath = stack.malloc(unOriginalPathLen);
int __result = nVRRenderModels_GetRenderModelOriginalPath(pchRenderModelNameEncoded, memAddress(pchOriginalPath), unOriginalPathLen, memAddress(peError));
return memASCII(pchOriginalPath, __result - 1);
} finally {
stack.setPointer(stackPointer);
}
}
// --- [ VRRenderModels_GetRenderModelErrorNameFromEnum ] ---
/** Unsafe version of: {@link #VRRenderModels_GetRenderModelErrorNameFromEnum GetRenderModelErrorNameFromEnum} */
public static long nVRRenderModels_GetRenderModelErrorNameFromEnum(int error) {
long __functionAddress = OpenVR.VRRenderModels.GetRenderModelErrorNameFromEnum;
if (CHECKS) {
check(__functionAddress);
}
return callP(error, __functionAddress);
}
/** Returns a string for a render model error. */
@Nullable
@NativeType("char const *")
public static String VRRenderModels_GetRenderModelErrorNameFromEnum(@NativeType("EVRRenderModelError") int error) {
long __result = nVRRenderModels_GetRenderModelErrorNameFromEnum(error);
return memASCIISafe(__result);
}
}
| |
/*
* $Id$
*/
/*
Copyright (c) 2000-2007 Board of Trustees of Leland Stanford Jr. University,
all rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
Except as contained in this notice, the name of Stanford University shall not
be used in advertising or otherwise to promote the sale, use or other dealings
in this Software without prior written authorization from Stanford University.
*/
package org.lockss.filter.html;
import java.io.*;
import java.util.*;
import org.lockss.test.*;
import org.lockss.util.*;
import org.htmlparser.*;
import org.htmlparser.util.*;
public class TestHtmlFilterInputStream extends LockssTestCase {
static String ISO = "ISO-8859-1";
static String UTF8 = "UTF-8";
/** Check that the filtered string matches expected. */
private void assertFilterString(String expected, String input,
HtmlTransform xform)
throws IOException {
assertFilterString(expected, input, null, null, null, xform);
}
private void assertFilterString(String expected, String input,
String strCharset, String inCharset,
String outCharset, HtmlTransform xform)
throws IOException {
InputStream in = (strCharset == null)
? new StringInputStream(input)
: new ReaderInputStream(new StringReader(input), strCharset);
InputStream filt =
new HtmlFilterInputStream(in, inCharset, outCharset, xform);
if (strCharset != null) {
assertInputStreamMatchesString(expected, filt, strCharset);
} else {
assertInputStreamMatchesString(expected, filt);
}
assertEquals(-1, filt.read());
filt.close();
System.gc();
try {
filt.read();
fail("closed InputStream should throw");
} catch (IOException e) {}
}
private void assertIdentityXform(String expected, String input)
throws IOException {
assertFilterString(expected, input, new IdentityXform());
}
private void assertIdentityXform(String expected,
String input, String strCharset,
String inCharset, String outCharset)
throws IOException {
assertFilterString(expected, input, strCharset, inCharset, outCharset,
new IdentityXform());
}
public void testIll() {
try {
new HtmlFilterInputStream(null, new IdentityXform ());
fail("null InputStream should throw");
} catch(IllegalArgumentException iae) {
}
try {
new HtmlFilterInputStream(new StringInputStream("blah"), null);
fail("null xform should throw");
} catch(IllegalArgumentException iae) {
}
}
public void testIdentityXform() throws IOException {
assertFilterString("<html>foo</html>",
"<html>foo</html>",
new IdentityXform());
}
public void testEmpty() throws IOException {
assertFilterString("", "", new IdentityXform());
MockHtmlTransform xform =
new MockHtmlTransform(ListUtil.list(new NodeList()));
assertFilterString("", "", xform);
assertEquals(0, xform.getArg(0).size());
}
NodeList parse(String in) throws Exception {
Parser p = ParserUtils.createParserParsingAnInputString(in);
return p.parse(null);
}
public void testXform() throws Exception {
String in = "<b>bold</b>";
NodeList out = parse("<i>it</i>");
MockHtmlTransform xform = new MockHtmlTransform(ListUtil.list(out));
assertFilterString("<i>it</i>", in, xform);
NodeList nl = xform.getArg(0);
assertEquals(3, nl.size());
assertEquals("<b>", nl.elementAt(0).toHtml());
assertEquals("bold", nl.elementAt(1).toHtml());
assertEquals("</b>", nl.elementAt(2).toHtml());
}
public void testUnclosed1() throws IOException {
String in = "<HTML><BODY>" +
"<ul><li>l1<li>l2<div>text1</ul>tween" +
"<ul><li>l3<li>l4<div><script>text2</ul>" +
"</body></html>";
String exp = "<HTML><BODY>" +
"<ul><li>l1</li><li>l2<div>text1</div></li></ul>tween" +
"<ul><li>l3</li><li>l4<div><script>text2</script></div></li></ul>" +
"</body></html>";
assertIdentityXform(in, in);
ConfigurationUtil.setFromArgs(HtmlFilterInputStream.PARAM_VERBATIM,
"false");
assertIdentityXform(exp, in);
}
public void testUnclosed2() throws IOException {
String in = "<HTML><BODY>" +
"<dl><dt>t1<dd>d1<div>text1</dl>" +
"<dl><dt>t2<dd>d2<div><script>text2</dl>" +
"</body></html>";
String exp = "<HTML><BODY>" +
"<dl><dt>t1</dt><dd>d1<div>text1</div></dd></dl>" +
"<dl><dt>t2</dt><dd>d2<div><script>text2</script></div></dd></dl>" +
"</body></html>";
assertIdentityXform(in, in);
ConfigurationUtil.setFromArgs(HtmlFilterInputStream.PARAM_VERBATIM,
"false");
assertIdentityXform(exp, in);
}
// Ensure that the script parser has been put into non-strict mode
public void testScript() throws IOException {
String in = "<HTML><BODY>" +
"<script>document.write (\"<a>This is strictly illegal</A>\")" +
" more('stu<a style=\"' + 'foo\">ff');" +
" more('<h2');" +
" more('stuff</h2>');" +
" more('stuff</a>');" +
"</script>" +
"</body></html>";
assertIdentityXform(in, in);
}
public void testCharset() throws Exception {
String in1 = "<html><body>" +
"abc\u00e91234" +
"</body></html>";
String exp1 = "<html><body>" +
"abc\u00e91234" +
"</body></html>";
String exp1U = "<html><body>" +
"abc\u00fd1234" +
"</body></html>";
String in2 = "<html><body>" +
"abc\u2260" +
"</body></html>";
String exp2 = "<html><body>" +
"abc\u2260" +
"</body></html>";
String exp28 = "<html><body>" +
"abc\u0060" +
"</body></html>";
ConfigurationUtil.addFromArgs(HtmlFilterInputStream.PARAM_ADAPT_ENCODING,
"false");
assertIdentityXform(exp1, in1, "ISO-8859-1", "ISO-8859-1", "ISO-8859-1");
assertIdentityXform(exp1, in1, "ISO-8859-1", "ISO-8859-1", null);
assertIdentityXform(exp1U, in1, "ISO-8859-1", "UTF-8", null);
assertIdentityXform(exp2, in2, "UTF-8", "UTF-8", "UTF-8");
assertIdentityXform(exp28, in2, "UTF-8", "UTF-8", null);
try {
assertIdentityXform(exp1, in1, "ISO-8859-1", "ISO-8859-1", "UTF-8");
fail("Shouldn't match String read with different encoding");
} catch (junit.framework.ComparisonFailure e) {
}
ConfigurationUtil.addFromArgs(HtmlFilterInputStream.PARAM_ADAPT_ENCODING,
"true");
assertIdentityXform(exp1, in1, "ISO-8859-1", "ISO-8859-1", "ISO-8859-1");
assertIdentityXform(exp1, in1, "ISO-8859-1", "ISO-8859-1", "UTF-16");
assertIdentityXform(exp1, in1, "ISO-8859-1", "ISO-8859-1", null);
assertIdentityXform(exp1U, in1, "ISO-8859-1", "UTF-8", null);
assertIdentityXform(exp2, in2, "UTF-8", "UTF-8", "UTF-8");
assertIdentityXform(exp28, in2, "UTF-8", "UTF-8", null);
// With adaptEncoding true, outCharset is ignored if it's non-null
assertIdentityXform(exp1, in1, "ISO-8859-1", "ISO-8859-1", "UTF-8");
}
public void testKnowsEncoding() throws Exception {
String in1 = "<html><body>" +
"abc\u00e91234" +
"</body></html>";
String exp1 = "<html><body>" +
"abc\u00e91234" +
"</body></html>";
InputStream in = new ReaderInputStream(new StringReader(in1), ISO);
InputStream filt =
new HtmlFilterInputStream(in, ISO, ISO, new IdentityXform());
assertInputStreamMatchesString(exp1, filt, ISO);
assertTrue(filt instanceof EncodedThing);
EncodedThing et = (EncodedThing)filt;
assertEquals(ISO, et.getCharset());
}
public void testKnowsEncodingChange() throws Exception {
String in1 = "<html><head>" +
"<META http-equiv=Content-Type content=\"text/html; charset=utf-8\">" +
"</head></body>" +
"abc\u00e91234" +
"</body></html>";
String exp1 = "<html><head>" +
"<META http-equiv=Content-Type content=\"text/html; charset=utf-8\">" +
"</head></body>" +
"abc\u00e91234" +
"</body></html>";
// With input encoded as UTF-8
InputStream in = new ReaderInputStream(new StringReader(in1), UTF8);
// And a file whose Content-Type is ISO-8859 and contains a charset
// change to UTF-8
InputStream filt =
new HtmlFilterInputStream(in, ISO, ISO, new IdentityXform());
// The filtered stream should know that its encoding is UTF-8 (*before*
// anything is read from it)
assertTrue(filt instanceof EncodedThing);
EncodedThing et = (EncodedThing)filt;
assertEquals(UTF8, et.getCharset());
// It should match the UTF-8 encoding of the string
assertInputStreamMatchesString(exp1, filt, UTF8);
// And should still know that its encoding is UTF-8
assertEquals(UTF8, et.getCharset());
}
public void testKnowsEncodingChangeCharsetMeta() throws Exception {
String in1 = "<html><head>" +
"<META charset=utf-8>" +
"</head></body>" +
"abc\u00e91234" +
"</body></html>";
String exp1 = "<html><head>" +
"<META charset=utf-8>" +
"</head></body>" +
"abc\u00e91234" +
"</body></html>";
// With input encoded as UTF-8
InputStream in = new ReaderInputStream(new StringReader(in1), UTF8);
// And a file whose Content-Type is ISO-8859 and contains a charset
// change to UTF-8
InputStream filt =
new HtmlFilterInputStream(in, ISO, ISO, new IdentityXform());
// The filtered stream should know that its encoding is UTF-8 (*before*
// anything is read from it)
assertTrue(filt instanceof EncodedThing);
EncodedThing et = (EncodedThing)filt;
assertEquals(UTF8, et.getCharset());
// It should match the UTF-8 encoding of the string
assertInputStreamMatchesString(exp1, filt, UTF8);
// And should still know that its encoding is UTF-8
assertEquals(UTF8, et.getCharset());
}
public void testChangeCharsetFailsIfNoMark() throws Exception {
ConfigurationUtil.setFromArgs(HtmlFilterInputStream.PARAM_MARK_SIZE, "0");
log.info("read(): exception following is expected");
try {
doParseCharset();
fail("parser should fail to reset() input stream if not mark()ed");
} catch (IOException e) {
}
}
public void testChangeCharsetBadCharConfig() throws Exception {
ConfigurationUtil.setFromArgs(HtmlFilterInputStream.PARAM_ENCODING_MATCH_RANGE,
"1000");
String file = "charset-change3.txt";
java.net.URL url = getResource(file);
InputStream in = null;
InputStream expin = null;
try {
in = UrlUtil.openInputStream(url.toString());
in = new BufferedInputStream(in);
expin = UrlUtil.openInputStream(url.toString());
Reader exprdr = new InputStreamReader(expin, "UTF-8");
HtmlFilterInputStream actin = new HtmlFilterInputStream(in, "ISO-8859-1", "UTF-8", new IdentityXform());
Reader actrdr = new InputStreamReader(actin, "UTF-8");
String exp = StringUtil.fromReader(exprdr);
String act = StringUtil.fromReader(actrdr);
assertEquals(exp.substring(3227), act.substring(3234));
} finally {
IOUtil.safeClose(in);
IOUtil.safeClose(expin);
}
}
public void testChangeCharsetBadCharSetter() throws Exception {
// Ensure setter (below) overrides config
ConfigurationUtil.setFromArgs(HtmlFilterInputStream.PARAM_ENCODING_MATCH_RANGE,
"0");
String file = "charset-change3.txt";
java.net.URL url = getResource(file);
InputStream in = null;
InputStream expin = null;
try {
in = UrlUtil.openInputStream(url.toString());
in = new BufferedInputStream(in);
expin = UrlUtil.openInputStream(url.toString());
Reader exprdr = new InputStreamReader(expin, "UTF-8");
HtmlFilterInputStream actin = new HtmlFilterInputStream(in, "ISO-8859-1", "UTF-8", new IdentityXform());
actin.setEncodingMatchRange(128);
Reader actrdr = new InputStreamReader(actin, "UTF-8");
String exp = StringUtil.fromReader(exprdr);
String act = StringUtil.fromReader(actrdr);
assertEquals(exp.substring(3227), act.substring(3234));
} finally {
IOUtil.safeClose(in);
IOUtil.safeClose(expin);
}
}
public void testChangeCharsetBadCharLargeRange() throws Exception {
String file = "charset-change3.txt";
java.net.URL url = getResource(file);
InputStream in = null;
InputStream expin = null;
try {
in = UrlUtil.openInputStream(url.toString());
in = new BufferedInputStream(in);
expin = UrlUtil.openInputStream(url.toString());
Reader exprdr = new InputStreamReader(expin, "UTF-8");
HtmlFilterInputStream actin = new HtmlFilterInputStream(in, "ISO-8859-1", "UTF-8", new IdentityXform());
actin.setEncodingMatchRange(10000);
Reader actrdr = new InputStreamReader(actin, "UTF-8");
String exp = StringUtil.fromReader(exprdr);
String act = StringUtil.fromReader(actrdr);
assertEquals(exp.substring(3227), act.substring(3234));
} finally {
IOUtil.safeClose(in);
IOUtil.safeClose(expin);
}
}
public void testChangeCharsetBadCharLateChange() throws Exception {
String file = "charset-change3.txt";
java.net.URL url = getResource(file);
InputStream in = null;
InputStream expin = null;
try {
in = UrlUtil.openInputStream(url.toString());
in = new BufferedInputStream(in);
expin = UrlUtil.openInputStream(url.toString());
Reader exprdr = new InputStreamReader(expin, "UTF-8");
HtmlFilterInputStream actin = new HtmlFilterInputStream(in, "ISO-8859-1", "UTF-8", new IdentityXform());
actin.setEncodingMatchRange(1000);
Reader actrdr = new InputStreamReader(actin, "UTF-8");
String exp = StringUtil.fromReader(exprdr);
String act = StringUtil.fromReader(actrdr);
assertEquals(exp.substring(3227), act.substring(3234));
} finally {
IOUtil.safeClose(in);
IOUtil.safeClose(expin);
}
}
public void testChangeCharsetMatchRangeDisabled() throws Exception {
ConfigurationUtil.setFromArgs(HtmlFilterInputStream.PARAM_ENCODING_MATCH_RANGE,
"0");
String file = "charset-change3.txt";
java.net.URL url = getResource(file);
InputStream in = null;
InputStream expin = null;
try {
in = UrlUtil.openInputStream(url.toString());
in = new BufferedInputStream(in);
HtmlFilterInputStream actin = new HtmlFilterInputStream(in, "ISO-8859-1", "UTF-8", new IdentityXform());
Reader actrdr = new InputStreamReader(actin, "UTF-8");
StringUtil.fromReader(actrdr);
fail("encodingMatchRange set to zero, mismatch should throw");
} catch (IOException e) {
// ignored
} finally {
IOUtil.safeClose(in);
}
}
// Test default mark size
public void testChangeCharset() throws Exception {
doParseCharset();
}
void doParseCharset() throws Exception {
String file = "rewind-test.txt";
java.net.URL url = getResource(file);
InputStream in = null;
InputStream expin = null;
try {
in = UrlUtil.openInputStream(url.toString());
assertNotNull(in);
in = new BufferedInputStream(in);
expin = UrlUtil.openInputStream(url.toString());
Reader rdr = new InputStreamReader(expin, "iso-8859-1");
String exp = StringUtil.fromReader(rdr);
Reader filt = StringUtil.getLineReader(new HtmlFilterInputStream(in, new IdentityXform()));
assertReaderMatchesString(exp, filt);
} finally {
IOUtil.safeClose(in);
IOUtil.safeClose(expin);
}
}
class IdentityXform implements HtmlTransform {
public NodeList transform(NodeList nl) {
return nl;
}
}
static class MyLinkTag extends org.htmlparser.tags.LinkTag {
private static final String[] mEnders = new String[] {"A", "P", "DIV", "TD", "TR", "FORM", "LI"};
private static final String[] mEndTagEnders = new String[] {"P", "DIV", "TD", "TR", "FORM", "LI", "BODY", "HTML"};
List lst;
public MyLinkTag(List lst) {
this.lst = lst;
}
public String[] getEnders () {
return mEnders;
}
public String[] getEndTagEnders() {
return mEndTagEnders;
}
public void setStartPosition(int start) {
lst.add("s"+start);
}
public void setEndPosition(int end) {
lst.add("e"+end);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.physical.impl.scan.v3.file;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.drill.common.exceptions.CustomErrorContext;
import org.apache.drill.common.exceptions.UserException;
import org.apache.drill.common.map.CaseInsensitiveMap;
import org.apache.drill.common.types.TypeProtos.DataMode;
import org.apache.drill.common.types.TypeProtos.MajorType;
import org.apache.drill.common.types.TypeProtos.MinorType;
import org.apache.drill.common.types.Types;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.physical.impl.scan.v3.file.ImplicitColumnMarker.FileImplicitMarker;
import org.apache.drill.exec.physical.impl.scan.v3.file.ImplicitColumnMarker.InternalColumnMarker;
import org.apache.drill.exec.physical.impl.scan.v3.file.ImplicitColumnMarker.PartitionColumnMarker;
import org.apache.drill.exec.physical.impl.scan.v3.schema.MutableTupleSchema;
import org.apache.drill.exec.physical.impl.scan.v3.schema.MutableTupleSchema.ColumnHandle;
import org.apache.drill.exec.physical.impl.scan.v3.schema.ProjectedColumn;
import org.apache.drill.exec.physical.impl.scan.v3.schema.ScanSchemaTracker;
import org.apache.drill.exec.physical.impl.scan.v3.schema.SchemaUtils;
import org.apache.drill.exec.record.metadata.ColumnMetadata;
import org.apache.drill.exec.record.metadata.MetadataUtils;
import org.apache.drill.exec.record.metadata.TupleMetadata;
import org.apache.drill.exec.server.options.OptionSet;
import org.apache.drill.exec.store.ColumnExplorer.ImplicitFileColumn;
import org.apache.drill.exec.store.ColumnExplorer.ImplicitFileColumns;
import org.apache.drill.exec.store.ColumnExplorer.ImplicitInternalFileColumns;
import org.apache.drill.exec.store.dfs.DrillFileSystem;
import org.apache.drill.shaded.guava.com.google.common.base.Strings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Manages the resolution of implicit file metadata and partition columns.
* Parses the file metadata columns from the projection list. Creates a parse
* result which drives loading data into vectors. Supports renaming the columns
* via session options.
* <p>
* Lifecycle:
* <ul>
* <li>At the start of the scan, the parser looks for implicit and partition
* columns in the scan schema, resolving matching columns to be implicit
* columns, and building up a description of those columns for use later.</li>
* <li>If the projection list contains a wildcard, it can also contain implicit
* columns. If running in legacy mode, inserts partition columns when the query
* contains a wildcard.</li>
* <li>On each file (on each reader), the parse result allows generating actual
* values for each column, which are then written into the corresponding value
* vectors.</li>
* </ul>
* <p>
* Assumes that the partition count is fixed at runtime; that it is determined
* at plan time and provided in the plan. This ensures that the schema is stable
* across readers: even a reader at the top-most partition will produce columns
* for all partitions if using legacy mode wildcard expansion.
*/
public class ImplicitColumnResolver {
private static final Logger logger = LoggerFactory.getLogger(ImplicitColumnResolver.class);
public static class ImplicitColumnOptions {
protected OptionSet optionSet;
protected int maxPartitionDepth;
/**
* Historically Drill will expand partition columns (dir0, dir1, ...)
* when the project list includes a wildcard.
*/
protected boolean useLegacyWildcardExpansion = true;
protected DrillFileSystem dfs;
public ImplicitColumnOptions optionSet(OptionSet optionSet) {
this.optionSet = optionSet;
return this;
}
/**
* The maximum partition depth for any file in this query. Specifies
* the maximum number of {@code diri} columns that this parser will
* recognize or generate.
*/
public ImplicitColumnOptions maxPartitionDepth(int maxPartitionDepth) {
this.maxPartitionDepth = maxPartitionDepth;
return this;
}
/**
* Indicates whether to expand partition columns when the query contains a wildcard.
* Supports queries such as the following:<code><pre>
* select * from dfs.`partitioned-dir`</pre></code>
* In which the output columns will be (columns, dir0) if the partitioned directory
* has one level of nesting.
*
* See {@link TestImplicitFileColumns#testImplicitColumns}
*/
public ImplicitColumnOptions useLegacyWildcardExpansion(boolean flag) {
useLegacyWildcardExpansion = flag;
return this;
}
public ImplicitColumnOptions dfs(DrillFileSystem dfs) {
this.dfs = dfs;
return this;
}
}
/**
* The result of scanning the scan output schema to identify implicit and
* partition columns. Defines a sub-schema of just these columns, along with
* column markers which resolve the columns for each file.
*/
public static class ParseResult {
private final List<ImplicitColumnMarker> columns;
private final TupleMetadata schema;
private final boolean isMetadataScan;
protected ParseResult(List<ImplicitColumnMarker> columns, TupleMetadata schema,
boolean isMetadataScan) {
this.columns = columns;
this.schema = schema;
this.isMetadataScan = isMetadataScan;
}
public TupleMetadata schema() { return schema; }
public List<ImplicitColumnMarker> columns() { return columns; }
public boolean isMetadataScan() { return isMetadataScan; }
public Object[] resolve(FileDescrip fileInfo) {
Object values[] = new Object[columns.size()];
for (int i = 0; i < values.length; i++) {
values[i] = columns.get(i).resolve(fileInfo);
}
return values;
}
}
private static class ImplicitColumnParser {
private final ImplicitColumnResolver parser;
private final ScanSchemaTracker tracker;
private final MutableTupleSchema scanSchema;
private final List<ImplicitColumnMarker> columns = new ArrayList<>();
private final Set<Integer> referencedPartitions = new HashSet<>();
private boolean isMetadataScan;
protected ImplicitColumnParser(ImplicitColumnResolver parser, ScanSchemaTracker tracker) {
this.parser = parser;
this.tracker = tracker;
this.scanSchema = tracker.internalSchema();
}
protected ParseResult parse() {
for (ColumnHandle col : tracker.internalSchema().columns()) {
matchColumn(col);
}
if (tracker.internalSchema().projectionType() == ScanSchemaTracker.ProjectionType.ALL) {
expandWildcard();
}
// Have the tracker gather the implicit columns so they appear
// in the same order as the output schema, even if a wildcard
// appears out-of-order:
// SELECT *, fileName
// SELECT fileName, *
return new ParseResult(columns, tracker.applyImplicitCols(), isMetadataScan);
}
private void expandWildcard() {
if (!parser.useLegacyWildcardExpansion) {
return;
}
// Legacy wildcard expansion: include the file partitions for this file.
// This is a disadvantage for a * query: files at different directory
// levels will have different numbers of columns. Would be better to
// return this data as an array at some point.
// Append this after the *, keeping the * for later expansion.
for (int i = 0; i < parser.maxPartitionDepth; i++) {
if (referencedPartitions.contains(i)) {
continue;
}
ImplicitColumnMarker marker = new PartitionColumnMarker(i);
ColumnMetadata resolved = MetadataUtils.newScalar(parser.partitionName(i), PARTITION_COL_TYPE);
columns.add(marker);
tracker.expandImplicitCol(resolved, marker);
referencedPartitions.add(i);
}
}
private void matchColumn(ColumnHandle col) {
String colType = SchemaUtils.implicitColType(col.column());
if (colType != null) {
resolveTaggedColumn(parser, col, colType);
return;
} else if (col.column().isDynamic()) {
matchByName(col);
}
}
private void resolveTaggedColumn(ImplicitColumnResolver parser,
ColumnHandle col, String colType) {
Matcher m = parser.partitionTypePattern.matcher(colType);
if (m.matches()) {
resolvePartitionColumn(m, parser, col);
return;
}
ImplicitFileColumn defn = parser.typeDefs.get(colType);
if (defn != null) {
resolveImplicitColumn((ImplicitFileColumns) defn, col, colType);
return;
}
resolveUnknownColumn(col, colType);
}
private void resolvePartitionColumn(Matcher m, ImplicitColumnResolver parser,
ColumnHandle col) {
// The provided schema column must be of the correct type and mode.
ColumnMetadata colSchema = col.column();
if (colSchema.type() != MinorType.VARCHAR ||
colSchema.mode() != DataMode.OPTIONAL) {
throw UserException.validationError()
.message("Provided column `%s` is marked as a parition column, but is of the wrong type",
colSchema.columnString())
.addContext("Expected type", MinorType.VARCHAR.name())
.addContext("Expected cardinality", DataMode.OPTIONAL.name())
.addContext(parser.errorContext)
.build(logger);
}
// Partition column
int partitionIndex = Integer.parseInt(m.group(1));
markImplicit(col, new PartitionColumnMarker(partitionIndex));
// Remember the partition for later wildcard expansion
referencedPartitions.add(partitionIndex);
}
private void resolveImplicitColumn(ImplicitFileColumns defn,
ColumnHandle col, String colType) {
// The provided schema column must be of the correct type and mode.
ColumnMetadata colSchema = col.column();
if (colSchema.type() != MinorType.VARCHAR ||
colSchema.mode() == DataMode.REPEATED) {
throw UserException.validationError()
.message("Provided column `%s` is marked as implicit '%s', but is of the wrong type",
colSchema.columnString(), defn.propertyValue())
.addContext("Expected type", MinorType.VARCHAR.name())
.addContext("Expected cardinality", String.format("%s or %s",
DataMode.REQUIRED.name(), DataMode.OPTIONAL.name()))
.addContext(parser.errorContext)
.build(logger);
}
markImplicit(col, new FileImplicitMarker(defn));
}
private void markImplicit(ColumnHandle col, ImplicitColumnMarker marker) {
columns.add(marker);
col.markImplicit(marker);
}
private void resolveUnknownColumn(ColumnHandle col, String colType) {
throw UserException.validationError()
.message("Provided column %s references an undefined implicit column type '%s'",
col.column().columnString(), colType)
.addContext("Expected type", MinorType.VARCHAR.name())
.addContext("Expected cardinality", String.format("%s or %s",
DataMode.REQUIRED.name(), DataMode.OPTIONAL.name()))
.addContext(parser.errorContext)
.build(logger);
}
private void matchByName(ColumnHandle col) {
Matcher m = parser.partitionPattern.matcher(col.name());
if (m.matches()) {
buildPartitionColumn(m, col);
return;
}
ImplicitFileColumn defn = parser.colDefs.get(col.name());
if (defn != null) {
buildImplicitColumn(defn, col);
}
}
private void buildPartitionColumn(Matcher m, ColumnHandle col) {
// If the projected column is a map or array, then it shadows the
// partition column. Example: dir0.x, dir0[2].
ProjectedColumn projCol = (ProjectedColumn) col.column();
if (!projCol.isSimple()) {
logger.warn("Projected column {} shadows partition column {}",
projCol.projectString(), col.name());
return;
}
// Partition column
int partitionIndex = Integer.parseInt(m.group(1));
resolve(col,
MetadataUtils.newScalar(col.name(), PARTITION_COL_TYPE),
new PartitionColumnMarker(partitionIndex));
// Remember the partition for later wildcard expansion
referencedPartitions.add(partitionIndex);
}
private void resolve(ColumnHandle col, ColumnMetadata resolved, ImplicitColumnMarker marker) {
columns.add(marker);
scanSchema.resolveImplicit(col, resolved, marker);
}
private void buildImplicitColumn(ImplicitFileColumn defn,
ColumnHandle col) {
// If the projected column is a map or array, then it shadows the
// metadata column. Example: filename.x, filename[2].
ProjectedColumn projCol = (ProjectedColumn) col.column();
if (!projCol.isSimple()) {
logger.warn("Projected column {} shadows implicit column {}",
projCol.projectString(), col.name());
} else if (defn instanceof ImplicitInternalFileColumns) {
resolveInternalColumn(col, (ImplicitInternalFileColumns) defn);
} else {
resolve(col,
MetadataUtils.newScalar(col.name(), IMPLICIT_COL_TYPE),
new FileImplicitMarker((ImplicitFileColumns) defn));
}
}
private void resolveInternalColumn(ColumnHandle col,
ImplicitInternalFileColumns defn) {
// Tests may not provide the DFS, real code must
if (defn == ImplicitInternalFileColumns.LAST_MODIFIED_TIME &&
parser.dfs == null) {
throw new IllegalStateException(
"Must provide a file system to use " + defn.name());
}
// Check if this is an implied metadata scan
if (defn == ImplicitInternalFileColumns.PROJECT_METADATA) {
isMetadataScan = true;
}
// TODO: Internal columns are VARCHAR for historical reasons.
// Better to use a type that fits the column purposes.
resolve(col,
MetadataUtils.newScalar(col.name(),
defn.isOptional() ? OPTIONAL_INTERNAL_COL_TYPE : IMPLICIT_COL_TYPE),
new InternalColumnMarker(defn));
}
}
public static final MajorType IMPLICIT_COL_TYPE = Types.required(MinorType.VARCHAR);
public static final MajorType PARTITION_COL_TYPE = Types.optional(MinorType.VARCHAR);
public static final MajorType OPTIONAL_INTERNAL_COL_TYPE = Types.optional(MinorType.VARCHAR);
private final int maxPartitionDepth;
private final boolean useLegacyWildcardExpansion;
private final String partitionDesignator;
private final Pattern partitionPattern;
private final Pattern partitionTypePattern;
private final Map<String, ImplicitFileColumn> colDefs = CaseInsensitiveMap.newHashMap();
private final Map<String, ImplicitFileColumn> typeDefs = CaseInsensitiveMap.newHashMap();
private final CustomErrorContext errorContext;
private final DrillFileSystem dfs;
public ImplicitColumnResolver(ImplicitColumnOptions options, CustomErrorContext errorContext) {
this.errorContext = errorContext;
this.maxPartitionDepth = options.maxPartitionDepth;
this.useLegacyWildcardExpansion = options.useLegacyWildcardExpansion;
this.dfs = options.dfs;
this.partitionDesignator = options.optionSet.getString(ExecConstants.FILESYSTEM_PARTITION_COLUMN_LABEL);
this.partitionPattern = Pattern.compile(partitionDesignator + "(\\d+)", Pattern.CASE_INSENSITIVE);
if (partitionDesignator.equals(ColumnMetadata.IMPLICIT_PARTITION_PREFIX)) {
this.partitionTypePattern = partitionPattern;
} else {
this.partitionTypePattern = Pattern.compile(ColumnMetadata.IMPLICIT_PARTITION_PREFIX + "(\\d+)",
Pattern.CASE_INSENSITIVE);
}
// File implicit columns: can be defined in the provided schema
for (ImplicitFileColumns defn : ImplicitFileColumns.values()) {
String colName = options.optionSet.getString(defn.optionName());
if (!Strings.isNullOrEmpty(colName)) {
this.colDefs.put(colName, defn);
}
this.typeDefs.put(defn.propertyValue(), defn);
}
// Internal implicit cols: cannot be defined in the provided schema
for (ImplicitInternalFileColumns defn : ImplicitInternalFileColumns.values()) {
String colName = options.optionSet.getString(defn.optionName());
if (!Strings.isNullOrEmpty(colName)) {
this.colDefs.put(colName, defn);
}
}
}
public ParseResult parse(ScanSchemaTracker tracker) {
return new ImplicitColumnParser(this, tracker).parse();
}
public String partitionName(int partition) {
return partitionDesignator + partition;
}
}
| |
/*
* Copyright 2000-2012 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.vfs.impl.jar;
import com.intellij.openapi.vfs.*;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.HashMap;
import java.util.Map;
/**
* @author yole
*/
public class CoreJarFileSystem extends DeprecatedVirtualFileSystem implements ArchiveFileSystem {
private final Map<String, CoreJarHandler> myHandlers = new HashMap<String, CoreJarHandler>();
@NotNull
@Override
public String getProtocol() {
return StandardFileSystems.JAR_PROTOCOL;
}
@Override
public VirtualFile findFileByPath(@NotNull @NonNls String path) {
int separatorIndex = path.indexOf("!/");
if (separatorIndex < 0) {
throw new IllegalArgumentException("path in JarFileSystem must contain a separator");
}
String localPath = path.substring(0, separatorIndex);
String pathInJar = path.substring(separatorIndex+2);
CoreJarHandler handler = getHandler(localPath);
if (handler == null)
return null;
return handler.findFileByPath(pathInJar);
}
@Nullable
protected CoreJarHandler getHandler(String localPath) {
CoreJarHandler handler = myHandlers.get(localPath);
if (handler == null) {
handler = new CoreJarHandler(this, localPath);
myHandlers.put(localPath, handler);
}
return handler;
}
@Override
public void refresh(boolean asynchronous) {
}
@Override
public VirtualFile refreshAndFindFileByPath(@NotNull String path) {
return findFileByPath(path);
}
@Nullable
@Override
public VirtualFile getVirtualFileForArchive(@Nullable VirtualFile entryVFile) {
return null;
}
@Nullable
@Override
public VirtualFile findByPathWithSeparator(@Nullable VirtualFile entryVFile) {
return null;
}
@Nullable
@Override
public ArchiveFile getArchiveWrapperFile(@NotNull VirtualFile entryVFile) throws IOException {
return null;
}
@Override
public boolean isMakeCopyOfJar(File originalFile) {
return false;
}
@Override
public boolean isMakeCopyForArchive(@NotNull File originalFile) {
return false;
}
@Override
public void setNoCopyJarForPath(String pathInJar) {
}
@Override
public void addNoCopyArchiveForPath(@NotNull String path) {
}
@Override
public void refreshWithoutFileWatcher(boolean asynchronous) {
}
@Override
public boolean exists(@NotNull VirtualFile file) {
return false;
}
@NotNull
@Override
public String[] list(@NotNull VirtualFile file) {
return new String[0];
}
@Override
public boolean isDirectory(@NotNull VirtualFile file) {
return false;
}
@Override
public long getTimeStamp(@NotNull VirtualFile file) {
return 0;
}
@Override
public void setTimeStamp(@NotNull VirtualFile file, long timeStamp) throws IOException {
}
@Override
public boolean isWritable(@NotNull VirtualFile file) {
return false;
}
@Override
public void setWritable(@NotNull VirtualFile file, boolean writableFlag) throws IOException {
}
@Override
public boolean isSymLink(@NotNull VirtualFile file) {
return false;
}
@Nullable
@Override
public String resolveSymLink(@NotNull VirtualFile file) {
return null;
}
@NotNull
@Override
public byte[] contentsToByteArray(@NotNull VirtualFile file) throws IOException {
return new byte[0];
}
@NotNull
@Override
public InputStream getInputStream(@NotNull VirtualFile file) throws IOException {
return null;
}
@NotNull
@Override
public OutputStream getOutputStream(@NotNull VirtualFile file, Object requestor, long modStamp, long timeStamp) throws IOException {
return null;
}
@Override
public long getLength(@NotNull VirtualFile file) {
return 0;
}
@Nullable
@Override
public VirtualFile getLocalVirtualFileFor(@Nullable VirtualFile entryVFile) {
return null;
}
@Nullable
@Override
public VirtualFile findLocalVirtualFileByPath(@NotNull String path) {
return null;
}
@Override
public void deleteFile(Object requestor, @NotNull VirtualFile vFile) throws IOException {
}
@Override
public void moveFile(Object requestor, @NotNull VirtualFile vFile, @NotNull VirtualFile newParent) throws IOException {
}
@Override
public void renameFile(Object requestor, @NotNull VirtualFile vFile, @NotNull String newName) throws IOException {
}
@Override
public VirtualFile createChildFile(Object requestor, @NotNull VirtualFile vDir, @NotNull String fileName) throws IOException {
return null;
}
@NotNull
@Override
public VirtualFile createChildDirectory(Object requestor, @NotNull VirtualFile vDir, @NotNull String dirName) throws IOException {
return null;
}
@Override
public VirtualFile copyFile(Object requestor,
@NotNull VirtualFile virtualFile,
@NotNull VirtualFile newParent,
@NotNull String copyName) throws IOException {
return null;
}
}
| |
/*
* Copyright (c) 2010-2013 the original author or authors
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
package org.jmxtrans.agent;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.io.*;
import java.nio.channels.FileChannel;
import java.nio.charset.StandardCharsets;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Map;
import java.util.TimeZone;
import static org.jmxtrans.agent.util.ConfigurationUtils.getBoolean;
import static org.jmxtrans.agent.util.ConfigurationUtils.getString;
/**
* @author <a href="mailto:cleclerc@cloudbees.com">Cyrille Le Clerc</a>
*/
public class FileOverwriterOutputWriter extends AbstractOutputWriter {
public final static String SETTING_FILE_NAME = "fileName";
public final static String SETTING_FILE_NAME_DEFAULT_VALUE = "jmxtrans-agent.data";
public final static String SETTING_SHOW_TIMESTAMP = "showTimeStamp";
public final static Boolean SETTING_SHOW_TIMESTAMP_DEFAULT = false;
protected Writer temporaryFileWriter;
protected File temporaryFile;
protected File file = new File(SETTING_FILE_NAME_DEFAULT_VALUE);
protected Boolean showTimeStamp;
private static Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT"));
private static DateFormat dfISO8601 = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm'Z'");
@Override
public synchronized void postConstruct(Map<String, String> settings) {
super.postConstruct(settings);
dfISO8601.setTimeZone(TimeZone.getTimeZone("GMT"));
file = new File(getString(settings, SETTING_FILE_NAME, SETTING_FILE_NAME_DEFAULT_VALUE));
showTimeStamp = getBoolean(settings, SETTING_SHOW_TIMESTAMP, SETTING_SHOW_TIMESTAMP_DEFAULT);
logger.log(getInfoLevel(), "FileOverwriterOutputWriter configured with file " + file.getAbsolutePath());
}
protected Writer getTemporaryFileWriter() throws IOException {
if (temporaryFile == null) {
temporaryFile = File.createTempFile("jmxtrans-agent-", ".data");
temporaryFile.deleteOnExit();
if (logger.isLoggable(getDebugLevel()))
logger.log(getDebugLevel(), "Created temporary file " + temporaryFile.getAbsolutePath());
temporaryFileWriter = null;
}
if (temporaryFileWriter == null) {
temporaryFileWriter = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(temporaryFile, false), StandardCharsets.UTF_8));
}
return temporaryFileWriter;
}
@Override
public void writeInvocationResult(String invocationName, Object value) throws IOException {
writeQueryResult(invocationName, null, value);
}
public synchronized void writeQueryResult(@Nonnull String name, @Nullable String type, @Nullable Object value) throws IOException {
try {
if (showTimeStamp){
getTemporaryFileWriter().write("["+dfISO8601.format(Calendar.getInstance().getTime()) +"] "+name + " " + value + "\n");
} else {
getTemporaryFileWriter().write(name + " " + value + "\n");
}
} catch (IOException e) {
releaseTemporaryWriter();
throw e;
}
}
protected void releaseTemporaryWriter() {
try {
IoUtils.closeQuietly(getTemporaryFileWriter());
} catch (IOException e) {
// silently skip
}
if (temporaryFile != null) {
boolean deleted = temporaryFile.delete();
if (deleted) {
logger.warning("Failure to delete " + temporaryFile);
}
}
temporaryFile = null;
}
@Override
public synchronized void postCollect() throws IOException {
try {
getTemporaryFileWriter().close();
if (logger.isLoggable(getDebugLevel()))
logger.log(getDebugLevel(), "Overwrite " + file.getAbsolutePath() + " by " + temporaryFile.getAbsolutePath());
IoUtils.replaceFile(temporaryFile, file);
} finally {
temporaryFileWriter = null;
}
}
public static class IoUtils {
/**
* Simple implementation without chunking if the source file is big.
*
* @param source
* @param destination
* @throws java.io.IOException
*/
private static void doCopySmallFile(File source, File destination) throws IOException {
if (destination.exists() && destination.isDirectory()) {
throw new IOException("Can not copy file, destination is a directory: " + destination.getAbsolutePath());
}
FileInputStream fis = null;
FileOutputStream fos = null;
FileChannel input = null;
FileChannel output = null;
try {
fis = new FileInputStream(source);
fos = new FileOutputStream(destination, false);
input = fis.getChannel();
output = fos.getChannel();
output.transferFrom(input, 0, input.size());
} finally {
closeQuietly(output);
closeQuietly(input);
closeQuietly(fis);
closeQuietly(fos);
}
if (destination.length() != source.length()) {
throw new IOException("Failed to copy content from '" +
source + "' (" + source.length() + "bytes) to '" + destination + "' (" + destination.length() + ")");
}
}
public static void closeQuietly(Closeable closeable) {
if (closeable == null)
return;
try {
closeable.close();
} catch (Exception e) {
// ignore silently
}
}
public static void closeQuietly(Writer writer) {
if (writer == null)
return;
try {
writer.close();
} catch (Exception e) {
// ignore silently
}
}
/**
* Needed for old JVMs where {@link java.io.InputStream} does not implement {@link java.io.Closeable}.
*/
public static void closeQuietly(InputStream inputStream) {
if (inputStream == null)
return;
try {
inputStream.close();
} catch (Exception e) {
// ignore silently
}
}
private static void replaceFile(File source, File destination) throws IOException {
boolean destinationExists;
if (destination.exists()) {
boolean deleted = destination.delete();
if (deleted) {
destinationExists = false;
} else {
destinationExists = true;
}
} else {
destinationExists = false;
}
if (destinationExists) {
doCopySmallFile(source, destination);
} else {
boolean renamed = source.renameTo(destination);
if (!renamed) {
doCopySmallFile(source, destination);
}
}
}
}
}
| |
package com.team9889;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.util.ElapsedTime;
import com.team9889.subsystems.Drive;
import com.team9889.subsystems.GlyphLypht;
import com.team9889.subsystems.Relic;
/**
* Created by joshua9889 on 4/17/17.
* Our Teleop Code
*/
@TeleOp
public class Teleop extends Team9889Linear {
ElapsedTime matchTime = new ElapsedTime();
boolean intaking = false;
private GlyphLypht.Mode currentMode = GlyphLypht.Mode.Teleop;
private int modifier = 0;
public void runOpMode() throws InterruptedException {
waitForStart(false);
Robot.getDrive().DriveControlState(Drive.DriveControlStates.OPERATOR_CONTROL);
// Timer for Displaying time on DS
matchTime.reset();
// Start control Threads
new Thread(new GlyphRunnable()).start();
new Thread(new IntakeRunnable()).start();
// Loop while the match is happening
while (opModeIsActive() && !isStopRequested()){
// Retract Jewel arm
Robot.getJewel().stop();
double turn = gamepad1.right_stick_x;
double speed = -gamepad1.left_stick_y;
double left = speed + turn;
double right = speed - turn;
if(currentMode == GlyphLypht.Mode.OvertheBack){
left /= 2;
right /= 2;
}
Robot.getDrive().setLeftRightPower(left, right);
// Auto brake to make it easier
// to get up on balance stone
if(matchTime.seconds()>110)
Robot.getDrive().DriveZeroPowerState(Drive.DriveZeroPowerStates.BRAKE);
else
Robot.getDrive().DriveZeroPowerState(Drive.DriveZeroPowerStates.FLOAT);
idle();
//Push Telemetry to phone
telemetry.addData("Match Time", 120-matchTime.seconds());
updateTelemetry();
idle();
sleep(20);
}
// Stop everything
finalAction();
}
// Control Arm and Wrist
private class GlyphRunnable implements Runnable {
@Override
public void run(){
while(opModeIsActive() && !isStopRequested()){
// Outtake one glyph and deploy single
// glyph to level 2
if(driver_station.outtakeAndLevel2()){
intaking=false;
Robot.getIntake().outtake();
sleep(700);
Robot.getLift().setServoPosition(0.4);
Robot.getLift().clamp();
Robot.getIntake().stopIntake();
sleep(400);
Robot.getLift().goTo(GlyphLypht.Mode.Level2);
ElapsedTime t = new ElapsedTime();
while(opModeIsActive() && !Robot.getLift().isAtLocation() && t.milliseconds()<1000){
Thread.yield();
}
sleep(100);
Robot.getIntake().retract();
currentMode = GlyphLypht.Mode.Level2;
}
// Go to level 2
if (driver_station.level2()) {
intaking=false;
if(currentMode == GlyphLypht.Mode.Intake){
Robot.getLift().setServoPosition(0.4);
Robot.getLift().clamp();
Robot.getIntake().stopIntake();
sleep(400);
}
Robot.getIntake().stopIntake();
Robot.getIntake().clearArm();
Robot.getLift().goTo(GlyphLypht.Mode.Level2);
while(opModeIsActive() && !Robot.getLift().isAtLocation()){
Thread.yield();
}
sleep(100);
Robot.getIntake().retract();
currentMode = GlyphLypht.Mode.Level2;
}
// Go to top level
else if (driver_station.level4()) {
intaking=false;
if(currentMode == GlyphLypht.Mode.Intake){
Robot.getLift().setServoPosition(0.4);
Robot.getLift().clamp();
Robot.getIntake().stopIntake();
sleep(400);
}
if(currentMode!= GlyphLypht.Mode.Level2){
Robot.getIntake().stopIntake();
Robot.getIntake().clearArm();
Robot.getLift().goTo(GlyphLypht.Mode.Level2);
while(opModeIsActive() && !Robot.getLift().isAtLocation()){
Thread.yield();
}
}
Robot.getLift().goTo(GlyphLypht.Mode.Level4);
sleep(100);
Robot.getIntake().retract();
currentMode = GlyphLypht.Mode.Level4;
}
// Go to Intaking
else if (driver_station.intake()) {
Robot.getIntake().intake();
intaking = true;
Robot.getLift().goTo(GlyphLypht.Mode.Intake);
currentMode = GlyphLypht.Mode.Intake;
}
// Over-the-back scoring
else if(driver_station.overTheBack()) {
intaking=false;
if(currentMode == GlyphLypht.Mode.Intake){
Robot.getLift().setServoPosition(0.4);
Robot.getLift().clamp();
Robot.getIntake().stopIntake();
sleep(400);
}
Robot.getIntake().stopIntake();
Robot.getIntake().clearArm();
Robot.getLift().goTo(GlyphLypht.Mode.OvertheBack);
currentMode = GlyphLypht.Mode.OvertheBack;
}
idle();
}
}
}
// Control Intake
private class IntakeRunnable implements Runnable{
private Relic.RelicState wantedState = Relic.RelicState.STOWED;
private boolean firstRun = true;
@Override
public void run(){
while(opModeIsActive() && !isStopRequested()){
// Control the fingers
if(driver_station.release()){
Robot.getLift().release();
}
// A quick preset to make it easier
// to get a glyph in.
if(driver_station.swivel()){
Robot.getIntake().leftRetract();
sleep(500);
Robot.getIntake().rightRetract();
sleep(500);
Robot.getIntake().intake();
}
// Control the intake
if(driver_station.retract()) {
Robot.getIntake().retract();
intaking = false;
}
if(driver_station.outtake()){
intaking=false;
Robot.getIntake().outtake();
Robot.getIntake().clearArm();
}
if(gamepad2.dpad_down) {
wantedState = Relic.RelicState.DEPLOYTOINTAKE;
modifier=0;
firstRun = false;
} else if(gamepad2.dpad_right) {
wantedState = Relic.RelicState.RETRACTED;
modifier=0;
firstRun = false;
} else if(gamepad2.dpad_left) {
wantedState = Relic.RelicState.THRIRDZONE;
modifier=0;
firstRun = false;
} else if(gamepad2.dpad_up){
wantedState = Relic.RelicState.CLOSE;
modifier=0;
firstRun = false;
}
if(!firstRun) {
Robot.getRelic().goTo(wantedState);
if (wantedState== Relic.RelicState.THRIRDZONE && Robot.getRelic().isInPosition()){
Robot.getRelic().openFinger();
wantedState= Relic.RelicState.RETRACTED;
}
if(gamepad2.left_bumper)
Robot.getRelic().elbowRetract();
if(gamepad2.right_stick_y < -0.2)
modifier+=20;
else if(gamepad2.right_stick_y>0.2)
modifier-=20;
if(gamepad2.left_stick_y < -0.2)
modifier+=4;
else if(gamepad2.left_stick_y>0.2)
modifier-=4;
Robot.getRelic().setModifier(modifier);
}
if(gamepad2.left_stick_button)
Robot.getRelic().closeFinger();
else if(gamepad2.right_stick_button)
Robot.getRelic().openFinger();
idle();
}
}
}
}
| |
/*
* Copyright 2005 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.decisiontable.parser.xls;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.apache.poi.openxml4j.exceptions.InvalidFormatException;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.CellValue;
import org.apache.poi.ss.usermodel.DataFormatter;
import org.apache.poi.ss.usermodel.FormulaEvaluator;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.ss.usermodel.WorkbookFactory;
import org.apache.poi.ss.util.CellRangeAddress;
import org.drools.decisiontable.parser.DecisionTableParser;
import org.drools.template.parser.DataListener;
import org.drools.template.parser.DecisionTableParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.lang.String.format;
/**
* Parse an excel spreadsheet, pushing cell info into the SheetListener interface.
*/
public class ExcelParser
implements
DecisionTableParser {
private static final Logger log = LoggerFactory.getLogger( ExcelParser.class );
public static final String DEFAULT_RULESHEET_NAME = "Decision Tables";
private Map<String, List<DataListener>> _listeners = new HashMap<String, List<DataListener>>();
private boolean _useFirstSheet;
/**
* Define a map of sheet name to listener handlers.
* @param sheetListeners map of String to SheetListener
*/
public ExcelParser( final Map<String, List<DataListener>> sheetListeners ) {
this._listeners = sheetListeners;
}
public ExcelParser( final List<DataListener> sheetListeners ) {
this._listeners.put( ExcelParser.DEFAULT_RULESHEET_NAME,
sheetListeners );
this._useFirstSheet = true;
}
public ExcelParser( final DataListener listener ) {
List<DataListener> listeners = new ArrayList<DataListener>();
listeners.add( listener );
this._listeners.put( ExcelParser.DEFAULT_RULESHEET_NAME,
listeners );
this._useFirstSheet = true;
}
public void parseFile( InputStream inStream ) {
try {
Workbook workbook = WorkbookFactory.create( inStream );
if ( _useFirstSheet ) {
Sheet sheet = workbook.getSheetAt( 0 );
processSheet( sheet, _listeners.get( DEFAULT_RULESHEET_NAME ) );
} else {
for ( String sheetName : _listeners.keySet() ) {
Sheet sheet = workbook.getSheet( sheetName );
if ( sheet == null ) {
throw new IllegalStateException( "Could not find the sheetName (" + sheetName
+ ") in the workbook sheetNames." );
}
processSheet( sheet,
_listeners.get( sheetName ) );
}
}
} catch ( InvalidFormatException e ) {
throw new DecisionTableParseException( "An error occurred opening the workbook. It is possible that the encoding of the document did not match the encoding of the reader.",
e );
} catch ( IOException e ) {
throw new DecisionTableParseException( "Failed to open Excel stream, " + "please check that the content is xls97 format.",
e );
}
}
private CellRangeAddress[] getMergedCells( Sheet sheet ) {
CellRangeAddress[] ranges = new CellRangeAddress[ sheet.getNumMergedRegions() ];
for ( int i = 0; i < ranges.length; i++ ) {
ranges[ i ] = sheet.getMergedRegion( i );
}
return ranges;
}
private void processSheet( Sheet sheet,
List<? extends DataListener> listeners ) {
int maxRows = sheet.getLastRowNum();
CellRangeAddress[] mergedRanges = getMergedCells( sheet );
DataFormatter formatter = new DataFormatter( Locale.ENGLISH );
FormulaEvaluator formulaEvaluator = sheet.getWorkbook().getCreationHelper().createFormulaEvaluator();
for ( int i = 0; i <= maxRows; i++ ) {
Row row = sheet.getRow( i );
int lastCellNum = row != null ? row.getLastCellNum() : 0;
newRow( listeners, i, lastCellNum );
for ( int cellNum = 0; cellNum < lastCellNum; cellNum++ ) {
Cell cell = row.getCell( cellNum );
if ( cell == null ) {
continue;
}
double num = 0;
CellRangeAddress merged = getRangeIfMerged( cell,
mergedRanges );
if ( merged != null ) {
Cell topLeft = sheet.getRow( merged.getFirstRow() ).getCell( merged.getFirstColumn() );
newCell( listeners,
i,
cellNum,
formatter.formatCellValue( topLeft ),
topLeft.getColumnIndex() );
} else {
switch ( cell.getCellType() ) {
case Cell.CELL_TYPE_FORMULA:
String cellValue = null;
try {
CellValue cv = formulaEvaluator.evaluate( cell );
cellValue = getCellValue( cv );
newCell( listeners,
i,
cellNum,
cellValue,
DataListener.NON_MERGED );
} catch ( RuntimeException e ) {
// This is thrown if an external link cannot be resolved, so try the cached value
log.warn( "Cannot resolve externally linked value: " + formatter.formatCellValue( cell ) );
String cachedValue = tryToReadCachedValue( cell );
newCell( listeners,
i,
cellNum,
cachedValue,
DataListener.NON_MERGED );
}
break;
case Cell.CELL_TYPE_NUMERIC:
num = cell.getNumericCellValue();
default:
if ( num - Math.round( num ) != 0 ) {
newCell( listeners,
i,
cellNum,
String.valueOf( num ),
DataListener.NON_MERGED );
} else {
newCell( listeners,
i,
cellNum,
formatter.formatCellValue( cell ),
DataListener.NON_MERGED );
}
}
}
}
}
finishSheet( listeners );
}
private String tryToReadCachedValue( Cell cell ) {
DataFormatter formatter = new DataFormatter( Locale.ENGLISH );
String cachedValue;
switch ( cell.getCachedFormulaResultType() ) {
case Cell.CELL_TYPE_NUMERIC:
double num = cell.getNumericCellValue();
if ( num - Math.round( num ) != 0 ) {
cachedValue = String.valueOf( num );
} else {
cachedValue = formatter.formatCellValue( cell );
}
break;
case Cell.CELL_TYPE_STRING:
cachedValue = cell.getStringCellValue();
break;
case Cell.CELL_TYPE_BOOLEAN:
cachedValue = String.valueOf( cell.getBooleanCellValue() );
break;
case Cell.CELL_TYPE_ERROR:
cachedValue = String.valueOf( cell.getErrorCellValue() );
break;
default:
throw new DecisionTableParseException( format( "Can't read cached value for cell[row=%d, col=%d, value=%s]!",
cell.getRowIndex(), cell.getColumnIndex(), cell ) );
}
return cachedValue;
}
private String getCellValue( final CellValue cv ) {
switch ( cv.getCellType() ) {
case Cell.CELL_TYPE_BOOLEAN:
return Boolean.toString( cv.getBooleanValue() );
case Cell.CELL_TYPE_NUMERIC:
return String.valueOf( cv.getNumberValue() );
}
return cv.getStringValue();
}
CellRangeAddress getRangeIfMerged( Cell cell,
CellRangeAddress[] mergedRanges ) {
for ( int i = 0; i < mergedRanges.length; i++ ) {
CellRangeAddress r = mergedRanges[ i ];
if ( r.isInRange( cell.getRowIndex(), cell.getColumnIndex() ) ) {
return r;
}
}
return null;
}
private void finishSheet( List<? extends DataListener> listeners ) {
for ( DataListener listener : listeners ) {
listener.finishSheet();
}
}
private void newRow( List<? extends DataListener> listeners,
int row,
int cols ) {
for ( DataListener listener : listeners ) {
listener.newRow( row,
cols );
}
}
public void newCell( List<? extends DataListener> listeners,
int row,
int column,
String value,
int mergedColStart ) {
for ( DataListener listener : listeners ) {
listener.newCell( row,
column,
value,
mergedColStart );
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.transport;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Executors;
import java.util.concurrent.SynchronousQueue;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLEngine;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.cql3.QueryOptions;
import org.apache.cassandra.db.ConsistencyLevel;
import org.apache.cassandra.security.SSLFactory;
import org.apache.cassandra.transport.messages.CredentialsMessage;
import org.apache.cassandra.transport.messages.ErrorMessage;
import org.apache.cassandra.transport.messages.ExecuteMessage;
import org.apache.cassandra.transport.messages.PrepareMessage;
import org.apache.cassandra.transport.messages.QueryMessage;
import org.apache.cassandra.transport.messages.ResultMessage;
import org.apache.cassandra.transport.messages.StartupMessage;
import org.apache.cassandra.utils.MD5Digest;
import org.jboss.netty.bootstrap.ClientBootstrap;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelFuture;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelPipeline;
import org.jboss.netty.channel.ChannelPipelineFactory;
import org.jboss.netty.channel.Channels;
import org.jboss.netty.channel.ExceptionEvent;
import org.jboss.netty.channel.MessageEvent;
import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory;
import org.jboss.netty.handler.ssl.SslHandler;
import org.jboss.netty.logging.InternalLoggerFactory;
import org.jboss.netty.logging.Slf4JLoggerFactory;
import static org.apache.cassandra.config.EncryptionOptions.ClientEncryptionOptions;
public class SimpleClient
{
static
{
InternalLoggerFactory.setDefaultFactory(new Slf4JLoggerFactory());
}
private static final Logger logger = LoggerFactory.getLogger(SimpleClient.class);
public final String host;
public final int port;
private final ClientEncryptionOptions encryptionOptions;
protected final ResponseHandler responseHandler = new ResponseHandler();
protected final Connection.Tracker tracker = new ConnectionTracker();
// We don't track connection really, so we don't need one Connection per channel
protected final Connection connection = new Connection(null, Server.CURRENT_VERSION, tracker);
protected ClientBootstrap bootstrap;
protected Channel channel;
protected ChannelFuture lastWriteFuture;
private final Connection.Factory connectionFactory = new Connection.Factory()
{
public Connection newConnection(Channel channel, int version)
{
assert version == Server.CURRENT_VERSION;
return connection;
}
};
public SimpleClient(String host, int port, ClientEncryptionOptions encryptionOptions)
{
this.host = host;
this.port = port;
this.encryptionOptions = encryptionOptions;
}
public SimpleClient(String host, int port)
{
this(host, port, new ClientEncryptionOptions());
}
public void connect(boolean useCompression) throws IOException
{
establishConnection();
Map<String, String> options = new HashMap<String, String>();
options.put(StartupMessage.CQL_VERSION, "3.0.0");
if (useCompression)
{
options.put(StartupMessage.COMPRESSION, "snappy");
connection.setCompressor(FrameCompressor.SnappyCompressor.instance);
}
execute(new StartupMessage(options));
}
protected void establishConnection() throws IOException
{
// Configure the client.
bootstrap = new ClientBootstrap(
new NioClientSocketChannelFactory(
Executors.newCachedThreadPool(),
Executors.newCachedThreadPool()));
bootstrap.setOption("tcpNoDelay", true);
// Configure the pipeline factory.
if(encryptionOptions.enabled)
{
bootstrap.setPipelineFactory(new SecurePipelineFactory());
}
else
{
bootstrap.setPipelineFactory(new PipelineFactory());
}
ChannelFuture future = bootstrap.connect(new InetSocketAddress(host, port));
// Wait until the connection attempt succeeds or fails.
channel = future.awaitUninterruptibly().getChannel();
if (!future.isSuccess())
{
bootstrap.releaseExternalResources();
throw new IOException("Connection Error", future.getCause());
}
}
public void login(Map<String, String> credentials)
{
CredentialsMessage msg = new CredentialsMessage();
msg.credentials.putAll(credentials);
execute(msg);
}
public ResultMessage execute(String query, ConsistencyLevel consistency)
{
return execute(query, Collections.<ByteBuffer>emptyList(), consistency);
}
public ResultMessage execute(String query, List<ByteBuffer> values, ConsistencyLevel consistencyLevel)
{
Message.Response msg = execute(new QueryMessage(query, new QueryOptions(consistencyLevel, values)));
assert msg instanceof ResultMessage;
return (ResultMessage)msg;
}
public ResultMessage.Prepared prepare(String query)
{
Message.Response msg = execute(new PrepareMessage(query));
assert msg instanceof ResultMessage.Prepared;
return (ResultMessage.Prepared)msg;
}
public ResultMessage executePrepared(byte[] statementId, List<ByteBuffer> values, ConsistencyLevel consistency)
{
Message.Response msg = execute(new ExecuteMessage(MD5Digest.wrap(statementId), new QueryOptions(consistency, values)));
assert msg instanceof ResultMessage;
return (ResultMessage)msg;
}
public void close()
{
// Wait until all messages are flushed before closing the channel.
if (lastWriteFuture != null)
lastWriteFuture.awaitUninterruptibly();
// Close the connection. Make sure the close operation ends because
// all I/O operations are asynchronous in Netty.
channel.close().awaitUninterruptibly();
// Shut down all thread pools to exit.
bootstrap.releaseExternalResources();
}
protected Message.Response execute(Message.Request request)
{
try
{
request.attach(connection);
lastWriteFuture = channel.write(request);
Message.Response msg = responseHandler.responses.take();
if (msg instanceof ErrorMessage)
throw new RuntimeException((Throwable)((ErrorMessage)msg).error);
return msg;
}
catch (InterruptedException e)
{
throw new RuntimeException(e);
}
}
// Stateless handlers
private static final Message.ProtocolDecoder messageDecoder = new Message.ProtocolDecoder();
private static final Message.ProtocolEncoder messageEncoder = new Message.ProtocolEncoder();
private static final Frame.Decompressor frameDecompressor = new Frame.Decompressor();
private static final Frame.Compressor frameCompressor = new Frame.Compressor();
private static final Frame.Encoder frameEncoder = new Frame.Encoder();
private static class ConnectionTracker implements Connection.Tracker
{
public void addConnection(Channel ch, Connection connection) {}
public void closeAll() {}
}
private class PipelineFactory implements ChannelPipelineFactory
{
public ChannelPipeline getPipeline() throws Exception
{
ChannelPipeline pipeline = Channels.pipeline();
//pipeline.addLast("debug", new LoggingHandler());
pipeline.addLast("frameDecoder", new Frame.Decoder(connectionFactory));
pipeline.addLast("frameEncoder", frameEncoder);
pipeline.addLast("frameDecompressor", frameDecompressor);
pipeline.addLast("frameCompressor", frameCompressor);
pipeline.addLast("messageDecoder", messageDecoder);
pipeline.addLast("messageEncoder", messageEncoder);
pipeline.addLast("handler", responseHandler);
return pipeline;
}
}
private class SecurePipelineFactory extends PipelineFactory
{
private final SSLContext sslContext;
public SecurePipelineFactory() throws IOException
{
this.sslContext = SSLFactory.createSSLContext(encryptionOptions, true);
}
public ChannelPipeline getPipeline() throws Exception
{
SSLEngine sslEngine = sslContext.createSSLEngine();
sslEngine.setUseClientMode(true);
sslEngine.setEnabledCipherSuites(encryptionOptions.cipher_suites);
ChannelPipeline pipeline = super.getPipeline();
pipeline.addFirst("ssl", new SslHandler(sslEngine));
return pipeline;
}
}
private static class ResponseHandler extends SimpleChannelUpstreamHandler
{
public final BlockingQueue<Message.Response> responses = new SynchronousQueue<Message.Response>(true);
@Override
public void messageReceived(ChannelHandlerContext ctx, MessageEvent e)
{
assert e.getMessage() instanceof Message.Response;
try
{
responses.put((Message.Response)e.getMessage());
}
catch (InterruptedException ie)
{
throw new RuntimeException(ie);
}
}
public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) throws Exception
{
if (this == ctx.getPipeline().getLast())
logger.error("Exception in response", e.getCause());
ctx.sendUpstream(e);
}
}
}
| |
package example.zxing;
import android.app.Activity;
import android.content.SharedPreferences;
import android.content.res.Configuration;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.v4.app.ActionBarDrawerToggle;
import android.support.v4.app.DialogFragment;
import android.support.v4.app.Fragment;
import android.support.v4.view.GravityCompat;
import android.support.v4.widget.DrawerLayout;
import android.support.v7.app.ActionBar;
import android.support.v7.app.ActionBarActivity;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.CheckBox;
import android.widget.ListView;
import android.widget.Toast;
/**
* Fragment used for managing interactions for and presentation of a navigation drawer.
* See the <a href="https://developer.android.com/design/patterns/navigation-drawer.html#Interaction">
* design guidelines</a> for a complete explanation of the behaviors implemented here.
*/
public class NavigationDrawerFragment extends Fragment {
/**
* Remember the position of the selected item.
*/
private static final String STATE_SELECTED_POSITION = "selected_navigation_drawer_position";
/**
* Per the design guidelines, you should show the drawer on launch until the user manually
* expands it. This shared preference tracks this.
*/
private static final String PREF_USER_LEARNED_DRAWER = "navigation_drawer_learned";
/**
* A pointer to the current callbacks instance (the Activity).
*/
private NavigationDrawerCallbacks mCallbacks;
/**
* Helper component that ties the action bar to the navigation drawer.
*/
private ActionBarDrawerToggle mDrawerToggle;
private DrawerLayout mDrawerLayout;
private ListView mDrawerListView;
private View mFragmentContainerView;
private int mCurrentSelectedPosition = 0;
private boolean mFromSavedInstanceState;
private boolean mUserLearnedDrawer;
public NavigationDrawerFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Read in the flag indicating whether or not the user has demonstrated awareness of the
// drawer. See PREF_USER_LEARNED_DRAWER for details.
SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(getActivity());
mUserLearnedDrawer = sp.getBoolean(PREF_USER_LEARNED_DRAWER, false);
if (savedInstanceState != null) {
mCurrentSelectedPosition = savedInstanceState.getInt(STATE_SELECTED_POSITION);
mFromSavedInstanceState = true;
}
// Select either the default item (0) or the last selected item.
selectItem(mCurrentSelectedPosition);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
// Indicate that this fragment would like to influence the set of actions in the action bar.
setHasOptionsMenu(true);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
mDrawerListView = (ListView) inflater.inflate(
R.layout.fragment_navigation_drawer, container, false);
mDrawerListView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
selectItem(position);
}
});
mDrawerListView.setAdapter(new ArrayAdapter<String>(
getActionBar().getThemedContext(),
android.R.layout.simple_list_item_activated_1,
android.R.id.text1,
new String[]{
getString(R.string.title_homepage),
getString(R.string.title_qrScanner),
"My Profile",
"Roaming Info",
"Number Info"
}));
mDrawerListView.setItemChecked(mCurrentSelectedPosition, true);
return mDrawerListView;
}
public boolean isDrawerOpen() {
return mDrawerLayout != null && mDrawerLayout.isDrawerOpen(mFragmentContainerView);
}
/**
* Users of this fragment must call this method to set up the navigation drawer interactions.
*
* @param fragmentId The android:id of this fragment in its activity's layout.
* @param drawerLayout The DrawerLayout containing this fragment's UI.
*/
public void setUp(int fragmentId, DrawerLayout drawerLayout) {
mFragmentContainerView = getActivity().findViewById(fragmentId);
mDrawerLayout = drawerLayout;
// set a custom shadow that overlays the main content when the drawer opens
mDrawerLayout.setDrawerShadow(R.drawable.drawer_shadow, GravityCompat.START);
// set up the drawer's list view with items and click listener
ActionBar actionBar = getActionBar();
actionBar.setDisplayHomeAsUpEnabled(true);
actionBar.setHomeButtonEnabled(true);
// ActionBarDrawerToggle ties together the the proper interactions
// between the navigation drawer and the action bar app icon.
mDrawerToggle = new ActionBarDrawerToggle(
getActivity(), /* host Activity */
mDrawerLayout, /* DrawerLayout object */
R.drawable.ic_drawer, /* nav drawer image to replace 'Up' caret */
R.string.navigation_drawer_open, /* "open drawer" description for accessibility */
R.string.navigation_drawer_close /* "close drawer" description for accessibility */
) {
@Override
public void onDrawerClosed(View drawerView) {
super.onDrawerClosed(drawerView);
if (!isAdded()) {
return;
}
getActivity().supportInvalidateOptionsMenu(); // calls onPrepareOptionsMenu()
}
@Override
public void onDrawerOpened(View drawerView) {
super.onDrawerOpened(drawerView);
if (!isAdded()) {
return;
}
if (!mUserLearnedDrawer) {
// The user manually opened the drawer; store this flag to prevent auto-showing
// the navigation drawer automatically in the future.
mUserLearnedDrawer = true;
SharedPreferences sp = PreferenceManager
.getDefaultSharedPreferences(getActivity());
sp.edit().putBoolean(PREF_USER_LEARNED_DRAWER, true).apply();
}
getActivity().supportInvalidateOptionsMenu(); // calls onPrepareOptionsMenu()
}
};
// If the user hasn't 'learned' about the drawer, open it to introduce them to the drawer,
// per the navigation drawer design guidelines.
if (!mUserLearnedDrawer && !mFromSavedInstanceState) {
mDrawerLayout.openDrawer(mFragmentContainerView);
}
// Defer code dependent on restoration of previous instance state.
mDrawerLayout.post(new Runnable() {
@Override
public void run() {
mDrawerToggle.syncState();
}
});
mDrawerLayout.setDrawerListener(mDrawerToggle);
}
private void selectItem(int position) {
mCurrentSelectedPosition = position;
if (mDrawerListView != null) {
mDrawerListView.setItemChecked(position, true);
}
if (mDrawerLayout != null) {
mDrawerLayout.closeDrawer(mFragmentContainerView);
}
if (mCallbacks != null) {
mCallbacks.onNavigationDrawerItemSelected(position);
}
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
try {
mCallbacks = (NavigationDrawerCallbacks) activity;
} catch (ClassCastException e) {
throw new ClassCastException("Activity must implement NavigationDrawerCallbacks.");
}
}
@Override
public void onDetach() {
super.onDetach();
mCallbacks = null;
}
@Override
public void onSaveInstanceState(Bundle outState) {
super.onSaveInstanceState(outState);
outState.putInt(STATE_SELECTED_POSITION, mCurrentSelectedPosition);
}
@Override
public void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
// Forward the new configuration the drawer toggle component.
mDrawerToggle.onConfigurationChanged(newConfig);
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
// If the drawer is open, show the global app actions in the action bar. See also
// showGlobalContextActionBar, which controls the top-left area of the action bar.
if (mDrawerLayout != null && isDrawerOpen()) {
inflater.inflate(R.menu.global, menu);
showGlobalContextActionBar();
}
super.onCreateOptionsMenu(menu, inflater);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (mDrawerToggle.onOptionsItemSelected(item)) {
return true;
}
if (item.getItemId() == R.id.action_example) {
return true;
}
else if(item.getItemId() == R.id.action_settings) {
F1.newInstance().show(getFragmentManager(), null);
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* Per the navigation drawer design guidelines, updates the action bar to show the global app
* 'context', rather than just what's in the current screen.
*/
private void showGlobalContextActionBar() {
ActionBar actionBar = getActionBar();
actionBar.setDisplayShowTitleEnabled(true);
actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD);
actionBar.setTitle(R.string.app_name);
}
private ActionBar getActionBar() {
return ((ActionBarActivity) getActivity()).getSupportActionBar();
}
/**
* Callbacks interface that all activities using this fragment must implement.
*/
public static interface NavigationDrawerCallbacks {
/**
* Called when an item in the navigation drawer is selected.
*/
void onNavigationDrawerItemSelected(int position);
}
public static class F1 extends DialogFragment {
SharedPreferences sharedPreferences;
Boolean editable,nexmoValid;
public static F1 newInstance() {
F1 f1 = new F1();
f1.setStyle(DialogFragment.STYLE_NO_FRAME, android.R.style.Theme_DeviceDefault_Dialog);
return f1;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
// Remove the default background
getDialog().getWindow().setBackgroundDrawable(new ColorDrawable(Color.TRANSPARENT));
// Inflate the new view with margins and background
View v = inflater.inflate(R.layout.popup_layout, container, false);
sharedPreferences=getActivity().getSharedPreferences("Profile", getActivity().MODE_PRIVATE);
editable=sharedPreferences.getBoolean("editable", false);
nexmoValid=sharedPreferences.getBoolean("nexmoValid", false);
final SharedPreferences.Editor editor=sharedPreferences.edit();
if(editable==true){
((CheckBox)v.findViewById(R.id.cbEditing)).setChecked(true);
}
else{
((CheckBox)v.findViewById(R.id.cbEditing)).setChecked(false);
}
if(nexmoValid==true){
((CheckBox)v.findViewById(R.id.cbNexmoValidation)).setChecked(true);
}
else{
((CheckBox)v.findViewById(R.id.cbNexmoValidation)).setChecked(false);
}
v.findViewById(R.id.cbEditing).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Log.d("asd", ((CheckBox) v).isChecked() + "");
if(((CheckBox) v).isChecked()==true){
editor.putBoolean("editable", true);
editor.putString("phonenum","");
editor.commit();
}
else{
editor.putBoolean("editable",false);
editor.commit();
}
}
});
v.findViewById(R.id.cbNexmoValidation).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Log.d("asd",((CheckBox)v).isChecked()+"");
if(((CheckBox) v).isChecked()==true){
editor.putBoolean("nexmoValid",true);
editor.commit();
}
else{
editor.putBoolean("nexmoValid",false);
editor.commit();
}
}
});
v.findViewById(R.id.popup_root).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Log.d("asd", v.getId() + "root");
dismiss();
}
});
return v;
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.ui;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.ui.laf.darcula.DarculaInstaller;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.ComponentsPackage;
import com.intellij.openapi.editor.colors.ex.DefaultColorSchemesManager;
import com.intellij.openapi.editor.ex.util.EditorUtil;
import com.intellij.openapi.options.BaseConfigurable;
import com.intellij.openapi.options.SearchableConfigurable;
import com.intellij.openapi.ui.ComboBox;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.wm.ex.WindowManagerEx;
import com.intellij.ui.components.JBCheckBox;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.Dictionary;
import java.util.Hashtable;
/**
* @author Eugene Belyaev
*/
public class AppearanceConfigurable extends BaseConfigurable implements SearchableConfigurable {
private MyComponent myComponent;
public String getDisplayName() {
return IdeBundle.message("title.appearance");
}
public AppearanceConfigurable() {
myComponent = new MyComponent();
}
private void initComponent() {
if (myComponent == null) {
myComponent = new MyComponent();
}
}
public JComponent createComponent() {
initComponent();
DefaultComboBoxModel aModel = new DefaultComboBoxModel(UIUtil.getValidFontNames(Registry.is("ide.settings.appearance.font.family.only")));
myComponent.myFontCombo.setModel(aModel);
myComponent.myFontSizeCombo.setModel(new DefaultComboBoxModel(UIUtil.getStandardFontSizes()));
myComponent.myPresentationModeFontSize.setModel(new DefaultComboBoxModel(UIUtil.getStandardFontSizes()));
myComponent.myFontSizeCombo.setEditable(true);
myComponent.myPresentationModeFontSize.setEditable(true);
myComponent.myLafComboBox.setModel(new DefaultComboBoxModel(LafManager.getInstance().getInstalledLookAndFeels()));
myComponent.myLafComboBox.setRenderer(new LafComboBoxRenderer());
myComponent.myAntialiasingCheckBox.setSelected(UISettings.getInstance().ANTIALIASING_IN_IDE);
myComponent.myLCDRenderingScopeCombo.setEnabled(UISettings.getInstance().ANTIALIASING_IN_IDE);
myComponent.myAntialiasingCheckBox.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent e) {
myComponent.myLCDRenderingScopeCombo.setEnabled(myComponent.myAntialiasingCheckBox.isSelected());
}
});
myComponent.myLCDRenderingScopeCombo.setModel(new DefaultComboBoxModel(LCDRenderingScope.values()));
myComponent.myLCDRenderingScopeCombo.setSelectedItem(UISettings.getInstance().LCD_RENDERING_SCOPE);
Dictionary<Integer, JComponent> delayDictionary = new Hashtable<Integer, JComponent>();
delayDictionary.put(new Integer(0), new JLabel("0"));
delayDictionary.put(new Integer(1200), new JLabel("1200"));
//delayDictionary.put(new Integer(2400), new JLabel("2400"));
myComponent.myInitialTooltipDelaySlider.setLabelTable(delayDictionary);
UIUtil.setSliderIsFilled(myComponent.myInitialTooltipDelaySlider, Boolean.TRUE);
myComponent.myInitialTooltipDelaySlider.setMinimum(0);
myComponent.myInitialTooltipDelaySlider.setMaximum(1200);
myComponent.myInitialTooltipDelaySlider.setPaintLabels(true);
myComponent.myInitialTooltipDelaySlider.setPaintTicks(true);
myComponent.myInitialTooltipDelaySlider.setPaintTrack(true);
myComponent.myInitialTooltipDelaySlider.setMajorTickSpacing(1200);
myComponent.myInitialTooltipDelaySlider.setMinorTickSpacing(100);
myComponent.myEnableAlphaModeCheckBox.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
boolean state = myComponent.myEnableAlphaModeCheckBox.isSelected();
myComponent.myAlphaModeDelayTextField.setEnabled(state);
myComponent.myAlphaModeRatioSlider.setEnabled(state);
}
});
myComponent.myAlphaModeRatioSlider.setSize(100, 50);
@SuppressWarnings({"UseOfObsoleteCollectionType"})
Dictionary<Integer, JComponent> dictionary = new Hashtable<Integer, JComponent>();
dictionary.put(new Integer(0), new JLabel("0%"));
dictionary.put(new Integer(50), new JLabel("50%"));
dictionary.put(new Integer(100), new JLabel("100%"));
myComponent.myAlphaModeRatioSlider.setLabelTable(dictionary);
UIUtil.setSliderIsFilled(myComponent.myAlphaModeRatioSlider, Boolean.TRUE);
myComponent.myAlphaModeRatioSlider.setPaintLabels(true);
myComponent.myAlphaModeRatioSlider.setPaintTicks(true);
myComponent.myAlphaModeRatioSlider.setPaintTrack(true);
myComponent.myAlphaModeRatioSlider.setMajorTickSpacing(50);
myComponent.myAlphaModeRatioSlider.setMinorTickSpacing(10);
myComponent.myAlphaModeRatioSlider.addChangeListener(new ChangeListener() {
public void stateChanged(ChangeEvent e) {
myComponent.myAlphaModeRatioSlider.setToolTipText(myComponent.myAlphaModeRatioSlider.getValue() + "%");
}
});
myComponent.myTransparencyPanel.setVisible(WindowManagerEx.getInstanceEx().isAlphaModeSupported());
return myComponent.myPanel;
}
public void apply() {
initComponent();
UISettings settings = UISettings.getInstance();
int _fontSize = getIntValue(myComponent.myFontSizeCombo, settings.FONT_SIZE);
int _presentationFontSize = getIntValue(myComponent.myPresentationModeFontSize, settings.PRESENTATION_MODE_FONT_SIZE);
boolean shouldUpdateUI = false;
String _fontFace = (String)myComponent.myFontCombo.getSelectedItem();
LafManager lafManager = LafManager.getInstance();
if (_fontSize != settings.FONT_SIZE || !settings.FONT_FACE.equals(_fontFace)) {
settings.FONT_SIZE = _fontSize;
settings.FONT_FACE = _fontFace;
shouldUpdateUI = true;
}
if (_presentationFontSize != settings.PRESENTATION_MODE_FONT_SIZE) {
settings.PRESENTATION_MODE_FONT_SIZE = _presentationFontSize;
shouldUpdateUI = true;
}
if (myComponent.myAntialiasingCheckBox.isSelected() != settings.ANTIALIASING_IN_IDE) {
settings.ANTIALIASING_IN_IDE = myComponent.myAntialiasingCheckBox.isSelected();
shouldUpdateUI = true;
}
if (!myComponent.myLCDRenderingScopeCombo.getSelectedItem().equals(settings.LCD_RENDERING_SCOPE)) {
settings.LCD_RENDERING_SCOPE = (LCDRenderingScope)myComponent.myLCDRenderingScopeCombo.getSelectedItem();
shouldUpdateUI = true;
}
settings.ANIMATE_WINDOWS = myComponent.myAnimateWindowsCheckBox.isSelected();
boolean update = settings.SHOW_TOOL_WINDOW_NUMBERS != myComponent.myWindowShortcutsCheckBox.isSelected();
settings.SHOW_TOOL_WINDOW_NUMBERS = myComponent.myWindowShortcutsCheckBox.isSelected();
update |= settings.HIDE_TOOL_STRIPES != !myComponent.myShowToolStripesCheckBox.isSelected();
settings.HIDE_TOOL_STRIPES = !myComponent.myShowToolStripesCheckBox.isSelected();
update |= settings.SHOW_ICONS_IN_MENUS != myComponent.myCbDisplayIconsInMenu.isSelected();
settings.SHOW_ICONS_IN_MENUS = myComponent.myCbDisplayIconsInMenu.isSelected();
update |= settings.SHOW_MEMORY_INDICATOR != myComponent.myShowMemoryIndicatorCheckBox.isSelected();
settings.SHOW_MEMORY_INDICATOR = myComponent.myShowMemoryIndicatorCheckBox.isSelected();
update |= settings.ALLOW_MERGE_BUTTONS != myComponent.myAllowMergeButtons.isSelected();
settings.ALLOW_MERGE_BUTTONS = myComponent.myAllowMergeButtons.isSelected();
update |= settings.CYCLE_SCROLLING != myComponent.myCycleScrollingCheckBox.isSelected();
settings.CYCLE_SCROLLING = myComponent.myCycleScrollingCheckBox.isSelected();
if (settings.OVERRIDE_NONIDEA_LAF_FONTS != myComponent.myOverrideLAFFonts.isSelected()) {
shouldUpdateUI = true;
}
settings.OVERRIDE_NONIDEA_LAF_FONTS = myComponent.myOverrideLAFFonts.isSelected();
settings.MOVE_MOUSE_ON_DEFAULT_BUTTON = myComponent.myMoveMouseOnDefaultButtonCheckBox.isSelected();
settings.HIDE_NAVIGATION_ON_FOCUS_LOSS = myComponent.myHideNavigationPopupsCheckBox.isSelected();
settings.DND_WITH_PRESSED_ALT_ONLY = myComponent.myAltDNDCheckBox.isSelected();
update |= settings.DISABLE_MNEMONICS != myComponent.myDisableMnemonics.isSelected();
settings.DISABLE_MNEMONICS = myComponent.myDisableMnemonics.isSelected();
update |= settings.USE_SMALL_LABELS_ON_TABS != myComponent.myUseSmallLabelsOnTabs.isSelected();
settings.USE_SMALL_LABELS_ON_TABS = myComponent.myUseSmallLabelsOnTabs.isSelected();
update |= settings.WIDESCREEN_SUPPORT != myComponent.myWidescreenLayoutCheckBox.isSelected();
settings.WIDESCREEN_SUPPORT = myComponent.myWidescreenLayoutCheckBox.isSelected();
update |= settings.LEFT_HORIZONTAL_SPLIT != myComponent.myLeftLayoutCheckBox.isSelected();
settings.LEFT_HORIZONTAL_SPLIT = myComponent.myLeftLayoutCheckBox.isSelected();
update |= settings.RIGHT_HORIZONTAL_SPLIT != myComponent.myRightLayoutCheckBox.isSelected();
settings.RIGHT_HORIZONTAL_SPLIT = myComponent.myRightLayoutCheckBox.isSelected();
update |= settings.NAVIGATE_TO_PREVIEW != (myComponent.myNavigateToPreviewCheckBox.isVisible() && myComponent.myNavigateToPreviewCheckBox.isSelected());
settings.NAVIGATE_TO_PREVIEW = myComponent.myNavigateToPreviewCheckBox.isSelected();
ColorBlindness blindness = myComponent.myColorBlindnessPanel.getColorBlindness();
if (settings.COLOR_BLINDNESS != blindness) {
settings.COLOR_BLINDNESS = blindness;
update = true;
ComponentsPackage.getStateStore(ApplicationManager.getApplication()).reloadState(DefaultColorSchemesManager.class);
shouldUpdateUI = true;
}
update |= settings.DISABLE_MNEMONICS_IN_CONTROLS != myComponent.myDisableMnemonicInControlsCheckBox.isSelected();
settings.DISABLE_MNEMONICS_IN_CONTROLS = myComponent.myDisableMnemonicInControlsCheckBox.isSelected();
update |= settings.SHOW_ICONS_IN_QUICK_NAVIGATION != myComponent.myHideIconsInQuickNavigation.isSelected();
settings.SHOW_ICONS_IN_QUICK_NAVIGATION = myComponent.myHideIconsInQuickNavigation.isSelected();
if (!Comparing.equal(myComponent.myLafComboBox.getSelectedItem(), lafManager.getCurrentLookAndFeel())) {
final UIManager.LookAndFeelInfo lafInfo = (UIManager.LookAndFeelInfo)myComponent.myLafComboBox.getSelectedItem();
if (lafManager.checkLookAndFeel(lafInfo)) {
update = shouldUpdateUI = true;
final boolean wasDarcula = UIUtil.isUnderDarcula();
lafManager.setCurrentLookAndFeel(lafInfo);
//noinspection SSBasedInspection
SwingUtilities.invokeLater(new Runnable() {
public void run() {
if (UIUtil.isUnderDarcula()) {
DarculaInstaller.install();
} else if (wasDarcula) {
DarculaInstaller.uninstall();
}
}
});
}
}
if (shouldUpdateUI) {
lafManager.updateUI();
}
if (WindowManagerEx.getInstanceEx().isAlphaModeSupported()) {
int delay = -1;
try {
delay = Integer.parseInt(myComponent.myAlphaModeDelayTextField.getText());
}
catch (NumberFormatException ignored) {
}
float ratio = myComponent.myAlphaModeRatioSlider.getValue() / 100f;
if (myComponent.myEnableAlphaModeCheckBox.isSelected() != settings.ENABLE_ALPHA_MODE ||
delay != -1 && delay != settings.ALPHA_MODE_DELAY || ratio != settings.ALPHA_MODE_RATIO) {
update = true;
settings.ENABLE_ALPHA_MODE = myComponent.myEnableAlphaModeCheckBox.isSelected();
settings.ALPHA_MODE_DELAY = delay;
settings.ALPHA_MODE_RATIO = ratio;
}
}
int tooltipDelay = Math.min(myComponent.myInitialTooltipDelaySlider.getValue(), 5000);
if (tooltipDelay != Registry.intValue("ide.tooltip.initialDelay")) {
update = true;
Registry.get("ide.tooltip.initialDelay").setValue(tooltipDelay);
}
if (update) {
settings.fireUISettingsChanged();
}
myComponent.updateCombo();
EditorUtil.reinitSettings();
}
private static int getIntValue(JComboBox combo, int defaultValue) {
String temp = (String)combo.getEditor().getItem();
int value = -1;
if (temp != null && temp.trim().length() > 0) {
try {
value = Integer.parseInt(temp);
}
catch (NumberFormatException ignore) {
}
if (value <= 0) {
value = defaultValue;
}
}
else {
value = defaultValue;
}
return value;
}
public void reset() {
initComponent();
UISettings settings = UISettings.getInstance();
myComponent.myFontCombo.setSelectedItem(settings.FONT_FACE);
myComponent.myAntialiasingCheckBox.setSelected(settings.ANTIALIASING_IN_IDE);
myComponent.myLCDRenderingScopeCombo.setSelectedItem(settings.LCD_RENDERING_SCOPE);
myComponent.myFontSizeCombo.setSelectedItem(Integer.toString(settings.FONT_SIZE));
myComponent.myPresentationModeFontSize.setSelectedItem(Integer.toString(settings.PRESENTATION_MODE_FONT_SIZE));
myComponent.myAnimateWindowsCheckBox.setSelected(settings.ANIMATE_WINDOWS);
myComponent.myWindowShortcutsCheckBox.setSelected(settings.SHOW_TOOL_WINDOW_NUMBERS);
myComponent.myShowToolStripesCheckBox.setSelected(!settings.HIDE_TOOL_STRIPES);
myComponent.myCbDisplayIconsInMenu.setSelected(settings.SHOW_ICONS_IN_MENUS);
myComponent.myShowMemoryIndicatorCheckBox.setSelected(settings.SHOW_MEMORY_INDICATOR);
myComponent.myAllowMergeButtons.setSelected(settings.ALLOW_MERGE_BUTTONS);
myComponent.myCycleScrollingCheckBox.setSelected(settings.CYCLE_SCROLLING);
myComponent.myHideIconsInQuickNavigation.setSelected(settings.SHOW_ICONS_IN_QUICK_NAVIGATION);
myComponent.myMoveMouseOnDefaultButtonCheckBox.setSelected(settings.MOVE_MOUSE_ON_DEFAULT_BUTTON);
myComponent.myHideNavigationPopupsCheckBox.setSelected(settings.HIDE_NAVIGATION_ON_FOCUS_LOSS);
myComponent.myAltDNDCheckBox.setSelected(settings.DND_WITH_PRESSED_ALT_ONLY);
myComponent.myLafComboBox.setSelectedItem(LafManager.getInstance().getCurrentLookAndFeel());
myComponent.myOverrideLAFFonts.setSelected(settings.OVERRIDE_NONIDEA_LAF_FONTS);
myComponent.myDisableMnemonics.setSelected(settings.DISABLE_MNEMONICS);
myComponent.myUseSmallLabelsOnTabs.setSelected(settings.USE_SMALL_LABELS_ON_TABS);
myComponent.myWidescreenLayoutCheckBox.setSelected(settings.WIDESCREEN_SUPPORT);
myComponent.myLeftLayoutCheckBox.setSelected(settings.LEFT_HORIZONTAL_SPLIT);
myComponent.myRightLayoutCheckBox.setSelected(settings.RIGHT_HORIZONTAL_SPLIT);
myComponent.myNavigateToPreviewCheckBox.setSelected(settings.NAVIGATE_TO_PREVIEW);
myComponent.myNavigateToPreviewCheckBox.setVisible(false);//disabled for a while
myComponent.myColorBlindnessPanel.setColorBlindness(settings.COLOR_BLINDNESS);
myComponent.myDisableMnemonicInControlsCheckBox.setSelected(settings.DISABLE_MNEMONICS_IN_CONTROLS);
boolean alphaModeEnabled = WindowManagerEx.getInstanceEx().isAlphaModeSupported();
if (alphaModeEnabled) {
myComponent.myEnableAlphaModeCheckBox.setSelected(settings.ENABLE_ALPHA_MODE);
}
else {
myComponent.myEnableAlphaModeCheckBox.setSelected(false);
}
myComponent.myEnableAlphaModeCheckBox.setEnabled(alphaModeEnabled);
myComponent.myAlphaModeDelayTextField.setText(Integer.toString(settings.ALPHA_MODE_DELAY));
myComponent.myAlphaModeDelayTextField.setEnabled(alphaModeEnabled && settings.ENABLE_ALPHA_MODE);
int ratio = (int)(settings.ALPHA_MODE_RATIO * 100f);
myComponent.myAlphaModeRatioSlider.setValue(ratio);
myComponent.myAlphaModeRatioSlider.setToolTipText(ratio + "%");
myComponent.myAlphaModeRatioSlider.setEnabled(alphaModeEnabled && settings.ENABLE_ALPHA_MODE);
myComponent.myInitialTooltipDelaySlider.setValue(Registry.intValue("ide.tooltip.initialDelay"));
myComponent.updateCombo();
}
public boolean isModified() {
initComponent();
UISettings settings = UISettings.getInstance();
boolean isModified = false;
isModified |= !Comparing.equal(myComponent.myFontCombo.getSelectedItem(), settings.FONT_FACE);
isModified |= !Comparing.equal(myComponent.myFontSizeCombo.getEditor().getItem(), Integer.toString(settings.FONT_SIZE));
isModified |= myComponent.myAntialiasingCheckBox.isSelected() != settings.ANTIALIASING_IN_IDE;
isModified |= !myComponent.myLCDRenderingScopeCombo.getSelectedItem().equals(settings.LCD_RENDERING_SCOPE);
isModified |= myComponent.myAnimateWindowsCheckBox.isSelected() != settings.ANIMATE_WINDOWS;
isModified |= myComponent.myWindowShortcutsCheckBox.isSelected() != settings.SHOW_TOOL_WINDOW_NUMBERS;
isModified |= myComponent.myShowToolStripesCheckBox.isSelected() == settings.HIDE_TOOL_STRIPES;
isModified |= myComponent.myCbDisplayIconsInMenu.isSelected() != settings.SHOW_ICONS_IN_MENUS;
isModified |= myComponent.myShowMemoryIndicatorCheckBox.isSelected() != settings.SHOW_MEMORY_INDICATOR;
isModified |= myComponent.myAllowMergeButtons.isSelected() != settings.ALLOW_MERGE_BUTTONS;
isModified |= myComponent.myCycleScrollingCheckBox.isSelected() != settings.CYCLE_SCROLLING;
isModified |= myComponent.myOverrideLAFFonts.isSelected() != settings.OVERRIDE_NONIDEA_LAF_FONTS;
isModified |= myComponent.myDisableMnemonics.isSelected() != settings.DISABLE_MNEMONICS;
isModified |= myComponent.myDisableMnemonicInControlsCheckBox.isSelected() != settings.DISABLE_MNEMONICS_IN_CONTROLS;
isModified |= myComponent.myUseSmallLabelsOnTabs.isSelected() != settings.USE_SMALL_LABELS_ON_TABS;
isModified |= myComponent.myWidescreenLayoutCheckBox.isSelected() != settings.WIDESCREEN_SUPPORT;
isModified |= myComponent.myLeftLayoutCheckBox.isSelected() != settings.LEFT_HORIZONTAL_SPLIT;
isModified |= myComponent.myRightLayoutCheckBox.isSelected() != settings.RIGHT_HORIZONTAL_SPLIT;
isModified |= myComponent.myNavigateToPreviewCheckBox.isSelected() != settings.NAVIGATE_TO_PREVIEW;
isModified |= myComponent.myColorBlindnessPanel.getColorBlindness() != settings.COLOR_BLINDNESS;
isModified |= myComponent.myHideIconsInQuickNavigation.isSelected() != settings.SHOW_ICONS_IN_QUICK_NAVIGATION;
isModified |= !Comparing.equal(myComponent.myPresentationModeFontSize.getEditor().getItem(), Integer.toString(settings.PRESENTATION_MODE_FONT_SIZE));
isModified |= myComponent.myMoveMouseOnDefaultButtonCheckBox.isSelected() != settings.MOVE_MOUSE_ON_DEFAULT_BUTTON;
isModified |= myComponent.myHideNavigationPopupsCheckBox.isSelected() != settings.HIDE_NAVIGATION_ON_FOCUS_LOSS;
isModified |= myComponent.myAltDNDCheckBox.isSelected() != settings.DND_WITH_PRESSED_ALT_ONLY;
isModified |= !Comparing.equal(myComponent.myLafComboBox.getSelectedItem(), LafManager.getInstance().getCurrentLookAndFeel());
if (WindowManagerEx.getInstanceEx().isAlphaModeSupported()) {
isModified |= myComponent.myEnableAlphaModeCheckBox.isSelected() != settings.ENABLE_ALPHA_MODE;
int delay = -1;
try {
delay = Integer.parseInt(myComponent.myAlphaModeDelayTextField.getText());
}
catch (NumberFormatException ignored) {
}
if (delay != -1) {
isModified |= delay != settings.ALPHA_MODE_DELAY;
}
float ratio = myComponent.myAlphaModeRatioSlider.getValue() / 100f;
isModified |= ratio != settings.ALPHA_MODE_RATIO;
}
int tooltipDelay = -1;
tooltipDelay = myComponent.myInitialTooltipDelaySlider.getValue();
isModified |= tooltipDelay != Registry.intValue("ide.tooltip.initialDelay");
return isModified;
}
public void disposeUIResources() {
myComponent = null;
}
public String getHelpTopic() {
return "preferences.lookFeel";
}
private static class MyComponent {
private JPanel myPanel;
private JComboBox myFontCombo;
private JComboBox myFontSizeCombo;
private JCheckBox myAnimateWindowsCheckBox;
private JCheckBox myWindowShortcutsCheckBox;
private JCheckBox myShowToolStripesCheckBox;
private JCheckBox myShowMemoryIndicatorCheckBox;
private JComboBox myLafComboBox;
private JCheckBox myCycleScrollingCheckBox;
private JBCheckBox myAntialiasingCheckBox;
private ComboBox myLCDRenderingScopeCombo;
private JCheckBox myMoveMouseOnDefaultButtonCheckBox;
private JCheckBox myEnableAlphaModeCheckBox;
private JTextField myAlphaModeDelayTextField;
private JSlider myAlphaModeRatioSlider;
private JLabel myFontSizeLabel;
private JLabel myFontNameLabel;
private JPanel myTransparencyPanel;
private JCheckBox myOverrideLAFFonts;
private JCheckBox myHideIconsInQuickNavigation;
private JCheckBox myCbDisplayIconsInMenu;
private JCheckBox myDisableMnemonics;
private JCheckBox myDisableMnemonicInControlsCheckBox;
private JCheckBox myHideNavigationPopupsCheckBox;
private JCheckBox myAltDNDCheckBox;
private JCheckBox myAllowMergeButtons;
private JBCheckBox myUseSmallLabelsOnTabs;
private JBCheckBox myWidescreenLayoutCheckBox;
private JCheckBox myLeftLayoutCheckBox;
private JCheckBox myRightLayoutCheckBox;
private JSlider myInitialTooltipDelaySlider;
private ComboBox myPresentationModeFontSize;
private JCheckBox myNavigateToPreviewCheckBox;
private ColorBlindnessPanel myColorBlindnessPanel;
public MyComponent() {
myOverrideLAFFonts.addActionListener( new ActionListener() {
public void actionPerformed(ActionEvent e) {
updateCombo();
}
});
if (!Registry.is("ide.transparency.mode.for.windows")) {
myTransparencyPanel.getParent().remove(myTransparencyPanel);
}
}
public void updateCombo() {
boolean enableChooser = myOverrideLAFFonts.isSelected();
myFontCombo.setEnabled(enableChooser);
myFontSizeCombo.setEnabled(enableChooser);
myFontNameLabel.setEnabled(enableChooser);
myFontSizeLabel.setEnabled(enableChooser);
}
private void createUIComponents() {
myFontSizeCombo = new ComboBox();
myPresentationModeFontSize = new ComboBox();
}
}
@NotNull
public String getId() {
//noinspection ConstantConditions
return getHelpTopic();
}
@Nullable
public Runnable enableSearch(String option) {
return null;
}
}
| |
/*
* Copyright 2002-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package spark.resource;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import spark.utils.Assert;
import spark.utils.ResourceUtils;
/**
* Convenience base class for {@link Resource} implementations,
* pre-implementing typical behavior.
* <p>The "exists" method will check whether a File or InputStream can
* be opened; "isOpen" will always return false; "getURL" and "getFile"
* throw an exception; and "toString" will return the description.
*
* @author Juergen Hoeller
* Code copied from Spring source. Modifications made (mostly removal of methods) by Per Wendel.
*/
public abstract class AbstractResource implements Resource {
/**
* This implementation checks whether a File can be opened,
* falling back to whether an InputStream can be opened.
* This will cover both directories and content resources.
*/
@Override
public boolean exists() {
// Try file existence: can we find the file in the file system?
try {
return getFile().exists();
} catch (IOException ex) {
// Fall back to stream existence: can we open the stream?
try {
InputStream is = getInputStream();
is.close();
return true;
} catch (Throwable isEx) {
return false;
}
}
}
/**
* This implementation always returns {@code true}.
*/
@Override
public boolean isReadable() {
return true;
}
/**
* This implementation always returns {@code false}.
*/
@Override
public boolean isOpen() {
return false;
}
/**
* This implementation throws a FileNotFoundException, assuming
* that the resource cannot be resolved to a URL.
*/
@Override
public URL getURL() throws IOException {
throw new FileNotFoundException(getDescription() + " cannot be resolved to URL");
}
/**
* This implementation builds a URI based on the URL returned
* by {@link #getURL()}.
*/
@Override
public URI getURI() throws IOException {
URL url = getURL();
try {
return ResourceUtils.toURI(url);
} catch (URISyntaxException ex) {
throw new IOException("Invalid URI [" + url + "]", ex);
}
}
/**
* This implementation throws a FileNotFoundException, assuming
* that the resource cannot be resolved to an absolute file path.
*/
@Override
public File getFile() throws IOException {
throw new FileNotFoundException(getDescription() + " cannot be resolved to absolute file path");
}
/**
* This implementation reads the entire InputStream to calculate the
* content length. Subclasses will almost always be able to provide
* a more optimal version of this, e.g. checking a File length.
*
* @throws IllegalStateException if {@link #getInputStream()} returns null.
* @see #getInputStream()
*/
@Override
public long contentLength() throws IOException {
InputStream is = this.getInputStream();
Assert.state(is != null, "resource input stream must not be null");
try {
long size = 0;
byte[] buf = new byte[255];
int read;
while ((read = is.read(buf)) != -1) {
size += read;
}
return size;
} finally {
try {
is.close();
} catch (IOException ex) {
}
}
}
/**
* This implementation checks the timestamp of the underlying File,
* if available.
*
* @see #getFileForLastModifiedCheck()
*/
@Override
public long lastModified() throws IOException {
long lastModified = getFileForLastModifiedCheck().lastModified();
if (lastModified == 0L) {
throw new FileNotFoundException(getDescription() +
" cannot be resolved in the file system for resolving its last-modified timestamp");
}
return lastModified;
}
/**
* Determine the File to use for timestamp checking.
* <p>The default implementation delegates to {@link #getFile()}.
*
* @return the File to use for timestamp checking (never {@code null})
* @throws IOException if the resource cannot be resolved as absolute
* file path, i.e. if the resource is not available in a file system
*/
protected File getFileForLastModifiedCheck() throws IOException {
return getFile();
}
/**
* This implementation throws a FileNotFoundException, assuming
* that relative resources cannot be created for this resource.
*/
@Override
public Resource createRelative(String relativePath) throws IOException {
throw new FileNotFoundException("Cannot create a relative resource for " + getDescription());
}
/**
* This implementation always returns {@code null},
* assuming that this resource type does not have a filename.
*/
@Override
public String getFilename() {
return null;
}
/**
* This implementation returns the description of this resource.
*
* @see #getDescription()
*/
@Override
public String toString() {
return getDescription();
}
/**
* This implementation compares description strings.
*
* @see #getDescription()
*/
@Override
public boolean equals(Object obj) {
return (obj == this ||
(obj instanceof Resource && ((Resource) obj).getDescription().equals(getDescription())));
}
/**
* This implementation returns the description's hash code.
*
* @see #getDescription()
*/
@Override
public int hashCode() {
return getDescription().hashCode();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.security.AccessControlException;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.lang.math.LongRange;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.authorize.PolicyProvider;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.api.protocolrecords.ApplicationsRequestScope;
import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest;
import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptReportResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationAttemptsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerReportResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainersRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainersResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetDelegationTokenResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoResponse;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoRequest;
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse;
import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.KillApplicationResponse;
import org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.MoveApplicationAcrossQueuesResponse;
import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenRequest;
import org.apache.hadoop.yarn.api.protocolrecords.RenewDelegationTokenResponse;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteRequest;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationDeleteResponse;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionRequest;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationSubmissionResponse;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateRequest;
import org.apache.hadoop.yarn.api.protocolrecords.ReservationUpdateResponse;
import org.apache.hadoop.yarn.api.protocolrecords.UpdateApplicationPriorityRequest;
import org.apache.hadoop.yarn.api.protocolrecords.UpdateApplicationPriorityResponse;
import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationRequest;
import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerReport;
import org.apache.hadoop.yarn.api.records.NodeReport;
import org.apache.hadoop.yarn.api.records.NodeState;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.QueueACL;
import org.apache.hadoop.yarn.api.records.QueueInfo;
import org.apache.hadoop.yarn.api.records.ReservationDefinition;
import org.apache.hadoop.yarn.api.records.ReservationId;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.api.records.YarnClusterMetrics;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.ApplicationAttemptNotFoundException;
import org.apache.hadoop.yarn.exceptions.ApplicationNotFoundException;
import org.apache.hadoop.yarn.exceptions.ContainerNotFoundException;
import org.apache.hadoop.yarn.exceptions.YarnException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.ipc.RPCUtil;
import org.apache.hadoop.yarn.ipc.YarnRPC;
import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier;
import org.apache.hadoop.yarn.server.resourcemanager.RMAuditLogger.AuditConstants;
import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager;
import org.apache.hadoop.yarn.server.resourcemanager.reservation.Plan;
import org.apache.hadoop.yarn.server.resourcemanager.reservation.ReservationInputValidator;
import org.apache.hadoop.yarn.server.resourcemanager.reservation.ReservationSystem;
import org.apache.hadoop.yarn.server.resourcemanager.reservation.exceptions.PlanningException;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppEventType;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppMoveEvent;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.rmcontainer.RMContainer;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerAppReport;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNodeReport;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.YarnScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.security.QueueACLsManager;
import org.apache.hadoop.yarn.server.resourcemanager.security.RMDelegationTokenSecretManager;
import org.apache.hadoop.yarn.server.resourcemanager.security.authorize.RMPolicyProvider;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.Clock;
import org.apache.hadoop.yarn.util.Records;
import org.apache.hadoop.yarn.util.UTCClock;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.SettableFuture;
/**
* The client interface to the Resource Manager. This module handles all the rpc
* interfaces to the resource manager from the client.
*/
public class ClientRMService extends AbstractService implements
ApplicationClientProtocol {
private static final ArrayList<ApplicationReport> EMPTY_APPS_REPORT = new ArrayList<ApplicationReport>();
private static final Log LOG = LogFactory.getLog(ClientRMService.class);
final private AtomicInteger applicationCounter = new AtomicInteger(0);
final private YarnScheduler scheduler;
final private RMContext rmContext;
private final RMAppManager rmAppManager;
private Server server;
protected RMDelegationTokenSecretManager rmDTSecretManager;
private final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
InetSocketAddress clientBindAddress;
private final ApplicationACLsManager applicationsACLsManager;
private final QueueACLsManager queueACLsManager;
// For Reservation APIs
private Clock clock;
private ReservationSystem reservationSystem;
private ReservationInputValidator rValidator;
public ClientRMService(RMContext rmContext, YarnScheduler scheduler,
RMAppManager rmAppManager, ApplicationACLsManager applicationACLsManager,
QueueACLsManager queueACLsManager,
RMDelegationTokenSecretManager rmDTSecretManager) {
this(rmContext, scheduler, rmAppManager, applicationACLsManager,
queueACLsManager, rmDTSecretManager, new UTCClock());
}
public ClientRMService(RMContext rmContext, YarnScheduler scheduler,
RMAppManager rmAppManager, ApplicationACLsManager applicationACLsManager,
QueueACLsManager queueACLsManager,
RMDelegationTokenSecretManager rmDTSecretManager, Clock clock) {
super(ClientRMService.class.getName());
this.scheduler = scheduler;
this.rmContext = rmContext;
this.rmAppManager = rmAppManager;
this.applicationsACLsManager = applicationACLsManager;
this.queueACLsManager = queueACLsManager;
this.rmDTSecretManager = rmDTSecretManager;
this.reservationSystem = rmContext.getReservationSystem();
this.clock = clock;
this.rValidator = new ReservationInputValidator(clock);
}
@Override
protected void serviceInit(Configuration conf) throws Exception {
clientBindAddress = getBindAddress(conf);
super.serviceInit(conf);
}
@Override
protected void serviceStart() throws Exception {
Configuration conf = getConfig();
YarnRPC rpc = YarnRPC.create(conf);
this.server =
rpc.getServer(ApplicationClientProtocol.class, this,
clientBindAddress,
conf, this.rmDTSecretManager,
conf.getInt(YarnConfiguration.RM_CLIENT_THREAD_COUNT,
YarnConfiguration.DEFAULT_RM_CLIENT_THREAD_COUNT));
// Enable service authorization?
if (conf.getBoolean(
CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION,
false)) {
InputStream inputStream =
this.rmContext.getConfigurationProvider()
.getConfigurationInputStream(conf,
YarnConfiguration.HADOOP_POLICY_CONFIGURATION_FILE);
if (inputStream != null) {
conf.addResource(inputStream);
}
refreshServiceAcls(conf, RMPolicyProvider.getInstance());
}
this.server.start();
clientBindAddress = conf.updateConnectAddr(YarnConfiguration.RM_BIND_HOST,
YarnConfiguration.RM_ADDRESS,
YarnConfiguration.DEFAULT_RM_ADDRESS,
server.getListenerAddress());
super.serviceStart();
}
@Override
protected void serviceStop() throws Exception {
if (this.server != null) {
this.server.stop();
}
super.serviceStop();
}
InetSocketAddress getBindAddress(Configuration conf) {
return conf.getSocketAddr(
YarnConfiguration.RM_BIND_HOST,
YarnConfiguration.RM_ADDRESS,
YarnConfiguration.DEFAULT_RM_ADDRESS,
YarnConfiguration.DEFAULT_RM_PORT);
}
@Private
public InetSocketAddress getBindAddress() {
return clientBindAddress;
}
/**
* check if the calling user has the access to application information.
* @param callerUGI
* @param owner
* @param operationPerformed
* @param application
* @return
*/
private boolean checkAccess(UserGroupInformation callerUGI, String owner,
ApplicationAccessType operationPerformed,
RMApp application) {
return applicationsACLsManager.checkAccess(callerUGI, operationPerformed,
owner, application.getApplicationId())
|| queueACLsManager.checkAccess(callerUGI, QueueACL.ADMINISTER_QUEUE,
application.getQueue());
}
ApplicationId getNewApplicationId() {
ApplicationId applicationId = org.apache.hadoop.yarn.server.utils.BuilderUtils
.newApplicationId(recordFactory, ResourceManager.getClusterTimeStamp(),
applicationCounter.incrementAndGet());
LOG.info("Allocated new applicationId: " + applicationId.getId());
return applicationId;
}
@Override
public GetNewApplicationResponse getNewApplication(
GetNewApplicationRequest request) throws YarnException {
GetNewApplicationResponse response = recordFactory
.newRecordInstance(GetNewApplicationResponse.class);
response.setApplicationId(getNewApplicationId());
// Pick up min/max resource from scheduler...
response.setMaximumResourceCapability(scheduler
.getMaximumResourceCapability());
return response;
}
/**
* It gives response which includes application report if the application
* present otherwise throws ApplicationNotFoundException.
*/
@Override
public GetApplicationReportResponse getApplicationReport(
GetApplicationReportRequest request) throws YarnException {
ApplicationId applicationId = request.getApplicationId();
UserGroupInformation callerUGI;
try {
callerUGI = UserGroupInformation.getCurrentUser();
} catch (IOException ie) {
LOG.info("Error getting UGI ", ie);
throw RPCUtil.getRemoteException(ie);
}
RMApp application = this.rmContext.getRMApps().get(applicationId);
if (application == null) {
// If the RM doesn't have the application, throw
// ApplicationNotFoundException and let client to handle.
throw new ApplicationNotFoundException("Application with id '"
+ applicationId + "' doesn't exist in RM.");
}
boolean allowAccess = checkAccess(callerUGI, application.getUser(),
ApplicationAccessType.VIEW_APP, application);
ApplicationReport report =
application.createAndGetApplicationReport(callerUGI.getUserName(),
allowAccess);
GetApplicationReportResponse response = recordFactory
.newRecordInstance(GetApplicationReportResponse.class);
response.setApplicationReport(report);
return response;
}
@Override
public GetApplicationAttemptReportResponse getApplicationAttemptReport(
GetApplicationAttemptReportRequest request) throws YarnException,
IOException {
ApplicationAttemptId appAttemptId = request.getApplicationAttemptId();
UserGroupInformation callerUGI;
try {
callerUGI = UserGroupInformation.getCurrentUser();
} catch (IOException ie) {
LOG.info("Error getting UGI ", ie);
throw RPCUtil.getRemoteException(ie);
}
RMApp application = this.rmContext.getRMApps().get(
appAttemptId.getApplicationId());
if (application == null) {
// If the RM doesn't have the application, throw
// ApplicationNotFoundException and let client to handle.
throw new ApplicationNotFoundException("Application with id '"
+ request.getApplicationAttemptId().getApplicationId()
+ "' doesn't exist in RM.");
}
boolean allowAccess = checkAccess(callerUGI, application.getUser(),
ApplicationAccessType.VIEW_APP, application);
GetApplicationAttemptReportResponse response = null;
if (allowAccess) {
RMAppAttempt appAttempt = application.getAppAttempts().get(appAttemptId);
if (appAttempt == null) {
throw new ApplicationAttemptNotFoundException(
"ApplicationAttempt with id '" + appAttemptId +
"' doesn't exist in RM.");
}
ApplicationAttemptReport attemptReport = appAttempt
.createApplicationAttemptReport();
response = GetApplicationAttemptReportResponse.newInstance(attemptReport);
}else{
throw new YarnException("User " + callerUGI.getShortUserName()
+ " does not have privilage to see this attempt " + appAttemptId);
}
return response;
}
@Override
public GetApplicationAttemptsResponse getApplicationAttempts(
GetApplicationAttemptsRequest request) throws YarnException, IOException {
ApplicationId appId = request.getApplicationId();
UserGroupInformation callerUGI;
try {
callerUGI = UserGroupInformation.getCurrentUser();
} catch (IOException ie) {
LOG.info("Error getting UGI ", ie);
throw RPCUtil.getRemoteException(ie);
}
RMApp application = this.rmContext.getRMApps().get(appId);
if (application == null) {
// If the RM doesn't have the application, throw
// ApplicationNotFoundException and let client to handle.
throw new ApplicationNotFoundException("Application with id '" + appId
+ "' doesn't exist in RM.");
}
boolean allowAccess = checkAccess(callerUGI, application.getUser(),
ApplicationAccessType.VIEW_APP, application);
GetApplicationAttemptsResponse response = null;
if (allowAccess) {
Map<ApplicationAttemptId, RMAppAttempt> attempts = application
.getAppAttempts();
List<ApplicationAttemptReport> listAttempts =
new ArrayList<ApplicationAttemptReport>();
Iterator<Map.Entry<ApplicationAttemptId, RMAppAttempt>> iter = attempts
.entrySet().iterator();
while (iter.hasNext()) {
listAttempts.add(iter.next().getValue()
.createApplicationAttemptReport());
}
response = GetApplicationAttemptsResponse.newInstance(listAttempts);
} else {
throw new YarnException("User " + callerUGI.getShortUserName()
+ " does not have privilage to see this aplication " + appId);
}
return response;
}
/*
* (non-Javadoc)
*
* we're going to fix the issue of showing non-running containers of the
* running application in YARN-1794
*/
@Override
public GetContainerReportResponse getContainerReport(
GetContainerReportRequest request) throws YarnException, IOException {
ContainerId containerId = request.getContainerId();
ApplicationAttemptId appAttemptId = containerId.getApplicationAttemptId();
ApplicationId appId = appAttemptId.getApplicationId();
UserGroupInformation callerUGI;
try {
callerUGI = UserGroupInformation.getCurrentUser();
} catch (IOException ie) {
LOG.info("Error getting UGI ", ie);
throw RPCUtil.getRemoteException(ie);
}
RMApp application = this.rmContext.getRMApps().get(appId);
if (application == null) {
// If the RM doesn't have the application, throw
// ApplicationNotFoundException and let client to handle.
throw new ApplicationNotFoundException("Application with id '" + appId
+ "' doesn't exist in RM.");
}
boolean allowAccess = checkAccess(callerUGI, application.getUser(),
ApplicationAccessType.VIEW_APP, application);
GetContainerReportResponse response = null;
if (allowAccess) {
RMAppAttempt appAttempt = application.getAppAttempts().get(appAttemptId);
if (appAttempt == null) {
throw new ApplicationAttemptNotFoundException(
"ApplicationAttempt with id '" + appAttemptId +
"' doesn't exist in RM.");
}
RMContainer rmConatiner = this.rmContext.getScheduler().getRMContainer(
containerId);
if (rmConatiner == null) {
throw new ContainerNotFoundException("Container with id '" + containerId
+ "' doesn't exist in RM.");
}
response = GetContainerReportResponse.newInstance(rmConatiner
.createContainerReport());
} else {
throw new YarnException("User " + callerUGI.getShortUserName()
+ " does not have privilage to see this aplication " + appId);
}
return response;
}
/*
* (non-Javadoc)
*
* we're going to fix the issue of showing non-running containers of the
* running application in YARN-1794"
*/
@Override
public GetContainersResponse getContainers(GetContainersRequest request)
throws YarnException, IOException {
ApplicationAttemptId appAttemptId = request.getApplicationAttemptId();
ApplicationId appId = appAttemptId.getApplicationId();
UserGroupInformation callerUGI;
try {
callerUGI = UserGroupInformation.getCurrentUser();
} catch (IOException ie) {
LOG.info("Error getting UGI ", ie);
throw RPCUtil.getRemoteException(ie);
}
RMApp application = this.rmContext.getRMApps().get(appId);
if (application == null) {
// If the RM doesn't have the application, throw
// ApplicationNotFoundException and let client to handle.
throw new ApplicationNotFoundException("Application with id '" + appId
+ "' doesn't exist in RM.");
}
boolean allowAccess = checkAccess(callerUGI, application.getUser(),
ApplicationAccessType.VIEW_APP, application);
GetContainersResponse response = null;
if (allowAccess) {
RMAppAttempt appAttempt = application.getAppAttempts().get(appAttemptId);
if (appAttempt == null) {
throw new ApplicationAttemptNotFoundException(
"ApplicationAttempt with id '" + appAttemptId +
"' doesn't exist in RM.");
}
Collection<RMContainer> rmContainers = Collections.emptyList();
SchedulerAppReport schedulerAppReport =
this.rmContext.getScheduler().getSchedulerAppInfo(appAttemptId);
if (schedulerAppReport != null) {
rmContainers = schedulerAppReport.getLiveContainers();
}
List<ContainerReport> listContainers = new ArrayList<ContainerReport>();
for (RMContainer rmContainer : rmContainers) {
listContainers.add(rmContainer.createContainerReport());
}
response = GetContainersResponse.newInstance(listContainers);
} else {
throw new YarnException("User " + callerUGI.getShortUserName()
+ " does not have privilage to see this aplication " + appId);
}
return response;
}
@Override
public SubmitApplicationResponse submitApplication(
SubmitApplicationRequest request) throws YarnException {
ApplicationSubmissionContext submissionContext = request
.getApplicationSubmissionContext();
ApplicationId applicationId = submissionContext.getApplicationId();
// ApplicationSubmissionContext needs to be validated for safety - only
// those fields that are independent of the RM's configuration will be
// checked here, those that are dependent on RM configuration are validated
// in RMAppManager.
String user = null;
try {
// Safety
user = UserGroupInformation.getCurrentUser().getShortUserName();
} catch (IOException ie) {
LOG.warn("Unable to get the current user.", ie);
RMAuditLogger.logFailure(user, AuditConstants.SUBMIT_APP_REQUEST,
ie.getMessage(), "ClientRMService",
"Exception in submitting application", applicationId);
throw RPCUtil.getRemoteException(ie);
}
// Check whether app has already been put into rmContext,
// If it is, simply return the response
if (rmContext.getRMApps().get(applicationId) != null) {
LOG.info("This is an earlier submitted application: " + applicationId);
return SubmitApplicationResponse.newInstance();
}
if (submissionContext.getQueue() == null) {
submissionContext.setQueue(YarnConfiguration.DEFAULT_QUEUE_NAME);
}
if (submissionContext.getApplicationName() == null) {
submissionContext.setApplicationName(
YarnConfiguration.DEFAULT_APPLICATION_NAME);
}
if (submissionContext.getApplicationType() == null) {
submissionContext
.setApplicationType(YarnConfiguration.DEFAULT_APPLICATION_TYPE);
} else {
if (submissionContext.getApplicationType().length() > YarnConfiguration.APPLICATION_TYPE_LENGTH) {
submissionContext.setApplicationType(submissionContext
.getApplicationType().substring(0,
YarnConfiguration.APPLICATION_TYPE_LENGTH));
}
}
try {
// call RMAppManager to submit application directly
rmAppManager.submitApplication(submissionContext,
System.currentTimeMillis(), user);
LOG.info("Application with id " + applicationId.getId() +
" submitted by user " + user);
RMAuditLogger.logSuccess(user, AuditConstants.SUBMIT_APP_REQUEST,
"ClientRMService", applicationId);
} catch (YarnException e) {
LOG.info("Exception in submitting application with id " +
applicationId.getId(), e);
RMAuditLogger.logFailure(user, AuditConstants.SUBMIT_APP_REQUEST,
e.getMessage(), "ClientRMService",
"Exception in submitting application", applicationId);
throw e;
}
SubmitApplicationResponse response = recordFactory
.newRecordInstance(SubmitApplicationResponse.class);
return response;
}
@SuppressWarnings("unchecked")
@Override
public KillApplicationResponse forceKillApplication(
KillApplicationRequest request) throws YarnException {
ApplicationId applicationId = request.getApplicationId();
UserGroupInformation callerUGI;
try {
callerUGI = UserGroupInformation.getCurrentUser();
} catch (IOException ie) {
LOG.info("Error getting UGI ", ie);
RMAuditLogger.logFailure("UNKNOWN", AuditConstants.KILL_APP_REQUEST,
"UNKNOWN", "ClientRMService" , "Error getting UGI",
applicationId);
throw RPCUtil.getRemoteException(ie);
}
RMApp application = this.rmContext.getRMApps().get(applicationId);
if (application == null) {
RMAuditLogger.logFailure(callerUGI.getUserName(),
AuditConstants.KILL_APP_REQUEST, "UNKNOWN", "ClientRMService",
"Trying to kill an absent application", applicationId);
throw new ApplicationNotFoundException("Trying to kill an absent"
+ " application " + applicationId);
}
if (!checkAccess(callerUGI, application.getUser(),
ApplicationAccessType.MODIFY_APP, application)) {
RMAuditLogger.logFailure(callerUGI.getShortUserName(),
AuditConstants.KILL_APP_REQUEST,
"User doesn't have permissions to "
+ ApplicationAccessType.MODIFY_APP.toString(), "ClientRMService",
AuditConstants.UNAUTHORIZED_USER, applicationId);
throw RPCUtil.getRemoteException(new AccessControlException("User "
+ callerUGI.getShortUserName() + " cannot perform operation "
+ ApplicationAccessType.MODIFY_APP.name() + " on " + applicationId));
}
if (application.isAppFinalStateStored()) {
RMAuditLogger.logSuccess(callerUGI.getShortUserName(),
AuditConstants.KILL_APP_REQUEST, "ClientRMService", applicationId);
return KillApplicationResponse.newInstance(true);
}
this.rmContext.getDispatcher().getEventHandler()
.handle(new RMAppEvent(applicationId, RMAppEventType.KILL));
// For UnmanagedAMs, return true so they don't retry
return KillApplicationResponse.newInstance(
application.getApplicationSubmissionContext().getUnmanagedAM());
}
@Override
public GetClusterMetricsResponse getClusterMetrics(
GetClusterMetricsRequest request) throws YarnException {
GetClusterMetricsResponse response = recordFactory
.newRecordInstance(GetClusterMetricsResponse.class);
YarnClusterMetrics ymetrics = recordFactory
.newRecordInstance(YarnClusterMetrics.class);
ymetrics.setNumNodeManagers(this.rmContext.getRMNodes().size());
ClusterMetrics clusterMetrics = ClusterMetrics.getMetrics();
ymetrics.setNumDecommissionedNodeManagers(clusterMetrics
.getNumDecommisionedNMs());
ymetrics.setNumActiveNodeManagers(clusterMetrics.getNumActiveNMs());
ymetrics.setNumLostNodeManagers(clusterMetrics.getNumLostNMs());
ymetrics.setNumUnhealthyNodeManagers(clusterMetrics.getUnhealthyNMs());
ymetrics.setNumRebootedNodeManagers(clusterMetrics.getNumRebootedNMs());
response.setClusterMetrics(ymetrics);
return response;
}
@Override
public GetApplicationsResponse getApplications(
GetApplicationsRequest request) throws YarnException {
return getApplications(request, true);
}
/**
* Get applications matching the {@link GetApplicationsRequest}. If
* caseSensitive is set to false, applicationTypes in
* GetApplicationRequest are expected to be in all-lowercase
*/
@Private
public GetApplicationsResponse getApplications(
GetApplicationsRequest request, boolean caseSensitive)
throws YarnException {
UserGroupInformation callerUGI;
try {
callerUGI = UserGroupInformation.getCurrentUser();
} catch (IOException ie) {
LOG.info("Error getting UGI ", ie);
throw RPCUtil.getRemoteException(ie);
}
Set<String> applicationTypes = request.getApplicationTypes();
EnumSet<YarnApplicationState> applicationStates =
request.getApplicationStates();
Set<String> users = request.getUsers();
Set<String> queues = request.getQueues();
Set<String> tags = request.getApplicationTags();
long limit = request.getLimit();
LongRange start = request.getStartRange();
LongRange finish = request.getFinishRange();
ApplicationsRequestScope scope = request.getScope();
final Map<ApplicationId, RMApp> apps = rmContext.getRMApps();
Iterator<RMApp> appsIter;
// If the query filters by queues, we can avoid considering apps outside
// of those queues by asking the scheduler for the apps in those queues.
if (queues != null && !queues.isEmpty()) {
// Construct an iterator over apps in given queues
// Collect list of lists to avoid copying all apps
final List<List<ApplicationAttemptId>> queueAppLists =
new ArrayList<List<ApplicationAttemptId>>();
for (String queue : queues) {
List<ApplicationAttemptId> appsInQueue = scheduler.getAppsInQueue(queue);
if (appsInQueue != null && !appsInQueue.isEmpty()) {
queueAppLists.add(appsInQueue);
}
}
appsIter = new Iterator<RMApp>() {
Iterator<List<ApplicationAttemptId>> appListIter = queueAppLists.iterator();
Iterator<ApplicationAttemptId> schedAppsIter;
@Override
public boolean hasNext() {
// Because queueAppLists has no empty lists, hasNext is whether the
// current list hasNext or whether there are any remaining lists
return (schedAppsIter != null && schedAppsIter.hasNext())
|| appListIter.hasNext();
}
@Override
public RMApp next() {
if (schedAppsIter == null || !schedAppsIter.hasNext()) {
schedAppsIter = appListIter.next().iterator();
}
return apps.get(schedAppsIter.next().getApplicationId());
}
@Override
public void remove() {
throw new UnsupportedOperationException("Remove not supported");
}
};
} else {
appsIter = apps.values().iterator();
}
List<ApplicationReport> reports = new ArrayList<ApplicationReport>();
while (appsIter.hasNext() && reports.size() < limit) {
RMApp application = appsIter.next();
// Check if current application falls under the specified scope
if (scope == ApplicationsRequestScope.OWN &&
!callerUGI.getUserName().equals(application.getUser())) {
continue;
}
if (applicationTypes != null && !applicationTypes.isEmpty()) {
String appTypeToMatch = caseSensitive
? application.getApplicationType()
: StringUtils.toLowerCase(application.getApplicationType());
if (!applicationTypes.contains(appTypeToMatch)) {
continue;
}
}
if (applicationStates != null && !applicationStates.isEmpty()) {
if (!applicationStates.contains(application
.createApplicationState())) {
continue;
}
}
if (users != null && !users.isEmpty() &&
!users.contains(application.getUser())) {
continue;
}
if (start != null && !start.containsLong(application.getStartTime())) {
continue;
}
if (finish != null && !finish.containsLong(application.getFinishTime())) {
continue;
}
if (tags != null && !tags.isEmpty()) {
Set<String> appTags = application.getApplicationTags();
if (appTags == null || appTags.isEmpty()) {
continue;
}
boolean match = false;
for (String tag : tags) {
if (appTags.contains(tag)) {
match = true;
break;
}
}
if (!match) {
continue;
}
}
// checkAccess can grab the scheduler lock so call it last
boolean allowAccess = checkAccess(callerUGI, application.getUser(),
ApplicationAccessType.VIEW_APP, application);
if (scope == ApplicationsRequestScope.VIEWABLE && !allowAccess) {
continue;
}
reports.add(application.createAndGetApplicationReport(
callerUGI.getUserName(), allowAccess));
}
GetApplicationsResponse response =
recordFactory.newRecordInstance(GetApplicationsResponse.class);
response.setApplicationList(reports);
return response;
}
@Override
public GetClusterNodesResponse getClusterNodes(GetClusterNodesRequest request)
throws YarnException {
GetClusterNodesResponse response =
recordFactory.newRecordInstance(GetClusterNodesResponse.class);
EnumSet<NodeState> nodeStates = request.getNodeStates();
if (nodeStates == null || nodeStates.isEmpty()) {
nodeStates = EnumSet.allOf(NodeState.class);
}
Collection<RMNode> nodes = RMServerUtils.queryRMNodes(rmContext,
nodeStates);
List<NodeReport> nodeReports = new ArrayList<NodeReport>(nodes.size());
for (RMNode nodeInfo : nodes) {
nodeReports.add(createNodeReports(nodeInfo));
}
response.setNodeReports(nodeReports);
return response;
}
@Override
public GetQueueInfoResponse getQueueInfo(GetQueueInfoRequest request)
throws YarnException {
UserGroupInformation callerUGI;
try {
callerUGI = UserGroupInformation.getCurrentUser();
} catch (IOException ie) {
LOG.info("Error getting UGI ", ie);
throw RPCUtil.getRemoteException(ie);
}
GetQueueInfoResponse response =
recordFactory.newRecordInstance(GetQueueInfoResponse.class);
try {
QueueInfo queueInfo =
scheduler.getQueueInfo(request.getQueueName(),
request.getIncludeChildQueues(),
request.getRecursive());
List<ApplicationReport> appReports = EMPTY_APPS_REPORT;
if (request.getIncludeApplications()) {
List<ApplicationAttemptId> apps =
scheduler.getAppsInQueue(request.getQueueName());
appReports = new ArrayList<ApplicationReport>(apps.size());
for (ApplicationAttemptId app : apps) {
RMApp rmApp = rmContext.getRMApps().get(app.getApplicationId());
if (rmApp != null) {
// Check if user is allowed access to this app
if (!checkAccess(callerUGI, rmApp.getUser(),
ApplicationAccessType.VIEW_APP, rmApp)) {
continue;
}
appReports.add(
rmApp.createAndGetApplicationReport(
callerUGI.getUserName(), true));
}
}
}
queueInfo.setApplications(appReports);
response.setQueueInfo(queueInfo);
} catch (IOException ioe) {
LOG.info("Failed to getQueueInfo for " + request.getQueueName(), ioe);
}
return response;
}
private NodeReport createNodeReports(RMNode rmNode) {
SchedulerNodeReport schedulerNodeReport =
scheduler.getNodeReport(rmNode.getNodeID());
Resource used = BuilderUtils.newResource(0, 0);
int numContainers = 0;
if (schedulerNodeReport != null) {
used = schedulerNodeReport.getUsedResource();
numContainers = schedulerNodeReport.getNumContainers();
}
NodeReport report =
BuilderUtils.newNodeReport(rmNode.getNodeID(), rmNode.getState(),
rmNode.getHttpAddress(), rmNode.getRackName(), used,
rmNode.getTotalCapability(), numContainers,
rmNode.getHealthReport(), rmNode.getLastHealthReportTime(),
rmNode.getNodeLabels());
return report;
}
@Override
public GetQueueUserAclsInfoResponse getQueueUserAcls(
GetQueueUserAclsInfoRequest request) throws YarnException {
GetQueueUserAclsInfoResponse response =
recordFactory.newRecordInstance(GetQueueUserAclsInfoResponse.class);
response.setUserAclsInfoList(scheduler.getQueueUserAclInfo());
return response;
}
@Override
public GetDelegationTokenResponse getDelegationToken(
GetDelegationTokenRequest request) throws YarnException {
try {
// Verify that the connection is kerberos authenticated
if (!isAllowedDelegationTokenOp()) {
throw new IOException(
"Delegation Token can be issued only with kerberos authentication");
}
GetDelegationTokenResponse response =
recordFactory.newRecordInstance(GetDelegationTokenResponse.class);
UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
Text owner = new Text(ugi.getUserName());
Text realUser = null;
if (ugi.getRealUser() != null) {
realUser = new Text(ugi.getRealUser().getUserName());
}
RMDelegationTokenIdentifier tokenIdentifier =
new RMDelegationTokenIdentifier(owner, new Text(request.getRenewer()),
realUser);
Token<RMDelegationTokenIdentifier> realRMDTtoken =
new Token<RMDelegationTokenIdentifier>(tokenIdentifier,
this.rmDTSecretManager);
response.setRMDelegationToken(
BuilderUtils.newDelegationToken(
realRMDTtoken.getIdentifier(),
realRMDTtoken.getKind().toString(),
realRMDTtoken.getPassword(),
realRMDTtoken.getService().toString()
));
return response;
} catch(IOException io) {
throw RPCUtil.getRemoteException(io);
}
}
@Override
public RenewDelegationTokenResponse renewDelegationToken(
RenewDelegationTokenRequest request) throws YarnException {
try {
if (!isAllowedDelegationTokenOp()) {
throw new IOException(
"Delegation Token can be renewed only with kerberos authentication");
}
org.apache.hadoop.yarn.api.records.Token protoToken = request.getDelegationToken();
Token<RMDelegationTokenIdentifier> token = new Token<RMDelegationTokenIdentifier>(
protoToken.getIdentifier().array(), protoToken.getPassword().array(),
new Text(protoToken.getKind()), new Text(protoToken.getService()));
String user = getRenewerForToken(token);
long nextExpTime = rmDTSecretManager.renewToken(token, user);
RenewDelegationTokenResponse renewResponse = Records
.newRecord(RenewDelegationTokenResponse.class);
renewResponse.setNextExpirationTime(nextExpTime);
return renewResponse;
} catch (IOException e) {
throw RPCUtil.getRemoteException(e);
}
}
@Override
public CancelDelegationTokenResponse cancelDelegationToken(
CancelDelegationTokenRequest request) throws YarnException {
try {
if (!isAllowedDelegationTokenOp()) {
throw new IOException(
"Delegation Token can be cancelled only with kerberos authentication");
}
org.apache.hadoop.yarn.api.records.Token protoToken = request.getDelegationToken();
Token<RMDelegationTokenIdentifier> token = new Token<RMDelegationTokenIdentifier>(
protoToken.getIdentifier().array(), protoToken.getPassword().array(),
new Text(protoToken.getKind()), new Text(protoToken.getService()));
String user = UserGroupInformation.getCurrentUser().getUserName();
rmDTSecretManager.cancelToken(token, user);
return Records.newRecord(CancelDelegationTokenResponse.class);
} catch (IOException e) {
throw RPCUtil.getRemoteException(e);
}
}
@SuppressWarnings("unchecked")
@Override
public MoveApplicationAcrossQueuesResponse moveApplicationAcrossQueues(
MoveApplicationAcrossQueuesRequest request) throws YarnException {
ApplicationId applicationId = request.getApplicationId();
UserGroupInformation callerUGI;
try {
callerUGI = UserGroupInformation.getCurrentUser();
} catch (IOException ie) {
LOG.info("Error getting UGI ", ie);
RMAuditLogger.logFailure("UNKNOWN", AuditConstants.MOVE_APP_REQUEST,
"UNKNOWN", "ClientRMService" , "Error getting UGI",
applicationId);
throw RPCUtil.getRemoteException(ie);
}
RMApp application = this.rmContext.getRMApps().get(applicationId);
if (application == null) {
RMAuditLogger.logFailure(callerUGI.getUserName(),
AuditConstants.MOVE_APP_REQUEST, "UNKNOWN", "ClientRMService",
"Trying to move an absent application", applicationId);
throw new ApplicationNotFoundException("Trying to move an absent"
+ " application " + applicationId);
}
if (!checkAccess(callerUGI, application.getUser(),
ApplicationAccessType.MODIFY_APP, application)) {
RMAuditLogger.logFailure(callerUGI.getShortUserName(),
AuditConstants.MOVE_APP_REQUEST,
"User doesn't have permissions to "
+ ApplicationAccessType.MODIFY_APP.toString(), "ClientRMService",
AuditConstants.UNAUTHORIZED_USER, applicationId);
throw RPCUtil.getRemoteException(new AccessControlException("User "
+ callerUGI.getShortUserName() + " cannot perform operation "
+ ApplicationAccessType.MODIFY_APP.name() + " on " + applicationId));
}
// Moves only allowed when app is in a state that means it is tracked by
// the scheduler
if (EnumSet.of(RMAppState.NEW, RMAppState.NEW_SAVING, RMAppState.FAILED,
RMAppState.FINAL_SAVING, RMAppState.FINISHING, RMAppState.FINISHED,
RMAppState.KILLED, RMAppState.KILLING, RMAppState.FAILED)
.contains(application.getState())) {
String msg = "App in " + application.getState() + " state cannot be moved.";
RMAuditLogger.logFailure(callerUGI.getShortUserName(),
AuditConstants.MOVE_APP_REQUEST, "UNKNOWN", "ClientRMService", msg);
throw new YarnException(msg);
}
SettableFuture<Object> future = SettableFuture.create();
this.rmContext.getDispatcher().getEventHandler().handle(
new RMAppMoveEvent(applicationId, request.getTargetQueue(), future));
try {
Futures.get(future, YarnException.class);
} catch (YarnException ex) {
RMAuditLogger.logFailure(callerUGI.getShortUserName(),
AuditConstants.MOVE_APP_REQUEST, "UNKNOWN", "ClientRMService",
ex.getMessage());
throw ex;
}
RMAuditLogger.logSuccess(callerUGI.getShortUserName(),
AuditConstants.MOVE_APP_REQUEST, "ClientRMService" , applicationId);
MoveApplicationAcrossQueuesResponse response = recordFactory
.newRecordInstance(MoveApplicationAcrossQueuesResponse.class);
return response;
}
private String getRenewerForToken(Token<RMDelegationTokenIdentifier> token)
throws IOException {
UserGroupInformation user = UserGroupInformation.getCurrentUser();
UserGroupInformation loginUser = UserGroupInformation.getLoginUser();
// we can always renew our own tokens
return loginUser.getUserName().equals(user.getUserName())
? token.decodeIdentifier().getRenewer().toString()
: user.getShortUserName();
}
void refreshServiceAcls(Configuration configuration,
PolicyProvider policyProvider) {
this.server.refreshServiceAclWithLoadedConfiguration(configuration,
policyProvider);
}
private boolean isAllowedDelegationTokenOp() throws IOException {
if (UserGroupInformation.isSecurityEnabled()) {
return EnumSet.of(AuthenticationMethod.KERBEROS,
AuthenticationMethod.KERBEROS_SSL,
AuthenticationMethod.CERTIFICATE)
.contains(UserGroupInformation.getCurrentUser()
.getRealAuthenticationMethod());
} else {
return true;
}
}
@VisibleForTesting
public Server getServer() {
return this.server;
}
@Override
public ReservationSubmissionResponse submitReservation(
ReservationSubmissionRequest request) throws YarnException, IOException {
// Check if reservation system is enabled
checkReservationSytem(AuditConstants.SUBMIT_RESERVATION_REQUEST);
ReservationSubmissionResponse response =
recordFactory.newRecordInstance(ReservationSubmissionResponse.class);
// Create a new Reservation Id
ReservationId reservationId = reservationSystem.getNewReservationId();
// Validate the input
Plan plan =
rValidator.validateReservationSubmissionRequest(reservationSystem,
request, reservationId);
// Check ACLs
String queueName = request.getQueue();
String user =
checkReservationACLs(queueName,
AuditConstants.SUBMIT_RESERVATION_REQUEST);
try {
// Try to place the reservation using the agent
boolean result =
plan.getReservationAgent().createReservation(reservationId, user,
plan, request.getReservationDefinition());
if (result) {
// add the reservation id to valid ones maintained by reservation
// system
reservationSystem.setQueueForReservation(reservationId, queueName);
// create the reservation synchronously if required
refreshScheduler(queueName, request.getReservationDefinition(),
reservationId.toString());
// return the reservation id
response.setReservationId(reservationId);
}
} catch (PlanningException e) {
RMAuditLogger.logFailure(user, AuditConstants.SUBMIT_RESERVATION_REQUEST,
e.getMessage(), "ClientRMService",
"Unable to create the reservation: " + reservationId);
throw RPCUtil.getRemoteException(e);
}
RMAuditLogger.logSuccess(user, AuditConstants.SUBMIT_RESERVATION_REQUEST,
"ClientRMService: " + reservationId);
return response;
}
@Override
public ReservationUpdateResponse updateReservation(
ReservationUpdateRequest request) throws YarnException, IOException {
// Check if reservation system is enabled
checkReservationSytem(AuditConstants.UPDATE_RESERVATION_REQUEST);
ReservationUpdateResponse response =
recordFactory.newRecordInstance(ReservationUpdateResponse.class);
// Validate the input
Plan plan =
rValidator.validateReservationUpdateRequest(reservationSystem, request);
ReservationId reservationId = request.getReservationId();
String queueName = reservationSystem.getQueueForReservation(reservationId);
// Check ACLs
String user =
checkReservationACLs(queueName,
AuditConstants.UPDATE_RESERVATION_REQUEST);
// Try to update the reservation using default agent
try {
boolean result =
plan.getReservationAgent().updateReservation(reservationId, user,
plan, request.getReservationDefinition());
if (!result) {
String errMsg = "Unable to update reservation: " + reservationId;
RMAuditLogger.logFailure(user,
AuditConstants.UPDATE_RESERVATION_REQUEST, errMsg,
"ClientRMService", errMsg);
throw RPCUtil.getRemoteException(errMsg);
}
} catch (PlanningException e) {
RMAuditLogger.logFailure(user, AuditConstants.UPDATE_RESERVATION_REQUEST,
e.getMessage(), "ClientRMService",
"Unable to update the reservation: " + reservationId);
throw RPCUtil.getRemoteException(e);
}
RMAuditLogger.logSuccess(user, AuditConstants.UPDATE_RESERVATION_REQUEST,
"ClientRMService: " + reservationId);
return response;
}
@Override
public ReservationDeleteResponse deleteReservation(
ReservationDeleteRequest request) throws YarnException, IOException {
// Check if reservation system is enabled
checkReservationSytem(AuditConstants.DELETE_RESERVATION_REQUEST);
ReservationDeleteResponse response =
recordFactory.newRecordInstance(ReservationDeleteResponse.class);
// Validate the input
Plan plan =
rValidator.validateReservationDeleteRequest(reservationSystem, request);
ReservationId reservationId = request.getReservationId();
String queueName = reservationSystem.getQueueForReservation(reservationId);
// Check ACLs
String user =
checkReservationACLs(queueName,
AuditConstants.DELETE_RESERVATION_REQUEST);
// Try to update the reservation using default agent
try {
boolean result =
plan.getReservationAgent().deleteReservation(reservationId, user,
plan);
if (!result) {
String errMsg = "Could not delete reservation: " + reservationId;
RMAuditLogger.logFailure(user,
AuditConstants.DELETE_RESERVATION_REQUEST, errMsg,
"ClientRMService", errMsg);
throw RPCUtil.getRemoteException(errMsg);
}
} catch (PlanningException e) {
RMAuditLogger.logFailure(user, AuditConstants.DELETE_RESERVATION_REQUEST,
e.getMessage(), "ClientRMService",
"Unable to delete the reservation: " + reservationId);
throw RPCUtil.getRemoteException(e);
}
RMAuditLogger.logSuccess(user, AuditConstants.DELETE_RESERVATION_REQUEST,
"ClientRMService: " + reservationId);
return response;
}
@Override
public GetNodesToLabelsResponse getNodeToLabels(
GetNodesToLabelsRequest request) throws YarnException, IOException {
RMNodeLabelsManager labelsMgr = rmContext.getNodeLabelManager();
GetNodesToLabelsResponse response =
GetNodesToLabelsResponse.newInstance(labelsMgr.getNodeLabelsInfo());
return response;
}
@Override
public GetLabelsToNodesResponse getLabelsToNodes(
GetLabelsToNodesRequest request) throws YarnException, IOException {
RMNodeLabelsManager labelsMgr = rmContext.getNodeLabelManager();
if (request.getNodeLabels() == null || request.getNodeLabels().isEmpty()) {
return GetLabelsToNodesResponse.newInstance(
labelsMgr.getLabelsInfoToNodes());
} else {
return GetLabelsToNodesResponse.newInstance(
labelsMgr.getLabelsInfoToNodes(request.getNodeLabels()));
}
}
@Override
public GetClusterNodeLabelsResponse getClusterNodeLabels(
GetClusterNodeLabelsRequest request) throws YarnException, IOException {
RMNodeLabelsManager labelsMgr = rmContext.getNodeLabelManager();
GetClusterNodeLabelsResponse response =
GetClusterNodeLabelsResponse.newInstance(
labelsMgr.getClusterNodeLabels());
return response;
}
private void checkReservationSytem(String auditConstant) throws YarnException {
// Check if reservation is enabled
if (reservationSystem == null) {
throw RPCUtil.getRemoteException("Reservation is not enabled."
+ " Please enable & try again");
}
}
private void refreshScheduler(String planName,
ReservationDefinition contract, String reservationId) {
if ((contract.getArrival() - clock.getTime()) < reservationSystem
.getPlanFollowerTimeStep()) {
LOG.debug(MessageFormat
.format(
"Reservation {0} is within threshold so attempting to create synchronously.",
reservationId));
reservationSystem.synchronizePlan(planName);
LOG.info(MessageFormat.format("Created reservation {0} synchronously.",
reservationId));
}
}
private String checkReservationACLs(String queueName, String auditConstant)
throws YarnException {
UserGroupInformation callerUGI;
try {
callerUGI = UserGroupInformation.getCurrentUser();
} catch (IOException ie) {
RMAuditLogger.logFailure("UNKNOWN", auditConstant, queueName,
"ClientRMService", "Error getting UGI");
throw RPCUtil.getRemoteException(ie);
}
// Check if user has access on the managed queue
if (!queueACLsManager.checkAccess(callerUGI, QueueACL.SUBMIT_APPLICATIONS,
queueName)) {
RMAuditLogger.logFailure(
callerUGI.getShortUserName(),
auditConstant,
"User doesn't have permissions to "
+ QueueACL.SUBMIT_APPLICATIONS.toString(), "ClientRMService",
AuditConstants.UNAUTHORIZED_USER);
throw RPCUtil.getRemoteException(new AccessControlException("User "
+ callerUGI.getShortUserName() + " cannot perform operation "
+ QueueACL.SUBMIT_APPLICATIONS.name() + " on queue" + queueName));
}
return callerUGI.getShortUserName();
}
@Override
public UpdateApplicationPriorityResponse updateApplicationPriority(
UpdateApplicationPriorityRequest request) throws YarnException,
IOException {
ApplicationId applicationId = request.getApplicationId();
Priority newAppPriority = request.getApplicationPriority();
UserGroupInformation callerUGI;
try {
callerUGI = UserGroupInformation.getCurrentUser();
} catch (IOException ie) {
LOG.info("Error getting UGI ", ie);
RMAuditLogger.logFailure("UNKNOWN", AuditConstants.UPDATE_APP_PRIORITY,
"UNKNOWN", "ClientRMService", "Error getting UGI", applicationId);
throw RPCUtil.getRemoteException(ie);
}
RMApp application = this.rmContext.getRMApps().get(applicationId);
if (application == null) {
RMAuditLogger.logFailure(callerUGI.getUserName(),
AuditConstants.UPDATE_APP_PRIORITY, "UNKNOWN", "ClientRMService",
"Trying to update priority of an absent application", applicationId);
throw new ApplicationNotFoundException(
"Trying to update priority o an absent application " + applicationId);
}
if (!checkAccess(callerUGI, application.getUser(),
ApplicationAccessType.MODIFY_APP, application)) {
RMAuditLogger.logFailure(callerUGI.getShortUserName(),
AuditConstants.UPDATE_APP_PRIORITY,
"User doesn't have permissions to "
+ ApplicationAccessType.MODIFY_APP.toString(), "ClientRMService",
AuditConstants.UNAUTHORIZED_USER, applicationId);
throw RPCUtil.getRemoteException(new AccessControlException("User "
+ callerUGI.getShortUserName() + " cannot perform operation "
+ ApplicationAccessType.MODIFY_APP.name() + " on " + applicationId));
}
// Update priority only when app is tracked by the scheduler
if (!EnumSet.of(RMAppState.ACCEPTED, RMAppState.RUNNING).contains(
application.getState())) {
String msg =
"Application in " + application.getState()
+ " state cannot be update priority.";
RMAuditLogger
.logFailure(callerUGI.getShortUserName(),
AuditConstants.UPDATE_APP_PRIORITY, "UNKNOWN", "ClientRMService",
msg);
throw new YarnException(msg);
}
try {
rmContext.getScheduler().updateApplicationPriority(newAppPriority,
applicationId);
} catch (YarnException ex) {
RMAuditLogger.logFailure(callerUGI.getShortUserName(),
AuditConstants.UPDATE_APP_PRIORITY, "UNKNOWN", "ClientRMService",
ex.getMessage());
throw ex;
}
RMAuditLogger.logSuccess(callerUGI.getShortUserName(),
AuditConstants.UPDATE_APP_PRIORITY, "ClientRMService", applicationId);
UpdateApplicationPriorityResponse response =
recordFactory
.newRecordInstance(UpdateApplicationPriorityResponse.class);
return response;
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.execution.testframework.sm.runner;
import com.intellij.execution.process.ProcessOutputTypes;
import com.intellij.execution.testframework.Printer;
import com.intellij.execution.testframework.sm.SMTestRunnerConnectionUtil;
import com.intellij.execution.testframework.sm.runner.events.*;
import com.intellij.openapi.application.Application;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.util.containers.ContainerUtil;
import gnu.trove.TIntObjectHashMap;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
import java.util.Set;
public class GeneralIdBasedToSMTRunnerEventsConvertor extends GeneralTestEventsProcessor {
private static final Logger LOG = Logger.getInstance(GeneralIdBasedToSMTRunnerEventsConvertor.class.getName());
private final TIntObjectHashMap<Node> myNodeByIdMap = new TIntObjectHashMap<Node>();
private final Set<Node> myRunningTestNodes = ContainerUtil.newHashSet();
private final SMTestProxy.SMRootTestProxy myTestsRootProxy;
private final Node myTestsRootNode;
private final String myTestFrameworkName;
private boolean myIsTestingFinished = false;
private SMTestLocator myLocator = null;
private TestProxyPrinterProvider myTestProxyPrinterProvider = null;
public GeneralIdBasedToSMTRunnerEventsConvertor(Project project,
@NotNull SMTestProxy.SMRootTestProxy testsRootProxy,
@NotNull String testFrameworkName) {
super(project);
myTestsRootProxy = testsRootProxy;
myTestsRootNode = new Node(0, null, testsRootProxy);
myTestFrameworkName = testFrameworkName;
myNodeByIdMap.put(myTestsRootNode.getId(), myTestsRootNode);
}
@Override
public void setLocator(@NotNull SMTestLocator locator) {
myLocator = locator;
}
public void onStartTesting() {
addToInvokeLater(new Runnable() {
public void run() {
myTestsRootNode.setState(State.RUNNING, GeneralIdBasedToSMTRunnerEventsConvertor.this);
myTestsRootProxy.setStarted();
fireOnTestingStarted(myTestsRootProxy);
}
});
}
@Override
public void onTestsReporterAttached() {
addToInvokeLater(new Runnable() {
public void run() {
fireOnTestsReporterAttached(myTestsRootProxy);
}
});
}
public void onFinishTesting() {
addToInvokeLater(new Runnable() {
public void run() {
if (myIsTestingFinished) {
// has been already invoked!
return;
}
myIsTestingFinished = true;
// We don't know whether process was destroyed by user
// or it finished after all tests have been run
// Lets assume, if at finish all nodes except root suite have final state (passed, failed or ignored),
// then all is ok otherwise process was terminated by user
boolean completeTree = isTreeComplete(myRunningTestNodes, myTestsRootProxy);
if (completeTree) {
myTestsRootProxy.setFinished();
} else {
myTestsRootProxy.setTerminated();
}
if (!myRunningTestNodes.isEmpty()) {
logProblem("Unexpected running nodes: " + myRunningTestNodes);
}
myNodeByIdMap.clear();
myRunningTestNodes.clear();
fireOnTestingFinished(myTestsRootProxy);
}
});
stopEventProcessing();
}
@Override
public void setPrinterProvider(@NotNull TestProxyPrinterProvider printerProvider) {
myTestProxyPrinterProvider = printerProvider;
}
public void onTestStarted(@NotNull final TestStartedEvent testStartedEvent) {
addToInvokeLater(new Runnable() {
public void run() {
doStartNode(testStartedEvent, false);
}
});
}
public void onSuiteStarted(@NotNull final TestSuiteStartedEvent suiteStartedEvent) {
addToInvokeLater(new Runnable() {
public void run() {
doStartNode(suiteStartedEvent, true);
}
});
}
private void doStartNode(@NotNull BaseStartedNodeEvent startedNodeEvent, boolean suite) {
Node node = findNode(startedNodeEvent);
if (node != null) {
if (node.getState() == State.NOT_RUNNING && startedNodeEvent.isRunning()) {
setNodeAndAncestorsRunning(node);
}
else {
logProblem(startedNodeEvent + " has been already started: " + node + "!");
}
return;
}
Node parentNode = findValidParentNode(startedNodeEvent);
if (parentNode == null) {
return;
}
if (!validateNodeId(startedNodeEvent)) {
return;
}
String nodeName = startedNodeEvent.getName();
SMTestProxy childProxy = new SMTestProxy(nodeName, suite, startedNodeEvent.getLocationUrl(), true);
TestProxyPrinterProvider printerProvider = myTestProxyPrinterProvider;
String nodeType = startedNodeEvent.getNodeType();
if (printerProvider != null && nodeType != null && nodeName != null) {
Printer printer = printerProvider.getPrinterByType(nodeType, nodeName, startedNodeEvent.getNodeArgs());
if (printer != null) {
childProxy.setPreferredPrinter(printer);
}
}
node = new Node(startedNodeEvent.getId(), parentNode, childProxy);
myNodeByIdMap.put(startedNodeEvent.getId(), node);
if (myLocator != null) {
childProxy.setLocator(myLocator);
}
parentNode.getProxy().addChild(childProxy);
if (startedNodeEvent.isRunning()) {
setNodeAndAncestorsRunning(node);
}
}
@Nullable
private Node findValidParentNode(@NotNull BaseStartedNodeEvent startedNodeEvent) {
int parentId = startedNodeEvent.getParentId();
if (parentId < 0) {
logProblem("Parent node id should be non-negative: " + startedNodeEvent + ".", true);
return null;
}
Node parentNode = myNodeByIdMap.get(startedNodeEvent.getParentId());
if (parentNode == null) {
logProblem("Parent node is undefined for " + startedNodeEvent + ".", true);
return null;
}
if (parentNode.getState() != State.NOT_RUNNING && parentNode.getState() != State.RUNNING) {
logProblem("Parent node should be registered or running: " + parentNode + ", " + startedNodeEvent);
return null;
}
return parentNode;
}
public void onTestFinished(@NotNull final TestFinishedEvent testFinishedEvent) {
addToInvokeLater(new Runnable() {
public void run() {
Node node = findNodeToTerminate(testFinishedEvent);
if (node != null) {
SMTestProxy testProxy = node.getProxy();
testProxy.setDuration(testFinishedEvent.getDuration());
testProxy.setFinished();
fireOnTestFinished(testProxy);
terminateNode(node, State.FINISHED);
}
}
});
}
public void onSuiteFinished(@NotNull final TestSuiteFinishedEvent suiteFinishedEvent) {
addToInvokeLater(new Runnable() {
public void run() {
Node node = findNodeToTerminate(suiteFinishedEvent);
if (node != null) {
SMTestProxy suiteProxy = node.getProxy();
suiteProxy.setFinished();
fireOnSuiteFinished(suiteProxy);
terminateNode(node, State.FINISHED);
}
}
});
}
@Nullable
private Node findNodeToTerminate(@NotNull TreeNodeEvent treeNodeEvent) {
Node node = findNode(treeNodeEvent);
if (node == null) {
logProblem("Trying to finish not existent node: " + treeNodeEvent);
return null;
}
return node;
}
public void onUncapturedOutput(@NotNull final String text, final Key outputType) {
addToInvokeLater(new Runnable() {
public void run() {
Node activeNode = findActiveNode();
SMTestProxy activeProxy = activeNode.getProxy();
if (ProcessOutputTypes.STDERR.equals(outputType)) {
activeProxy.addStdErr(text);
} else if (ProcessOutputTypes.SYSTEM.equals(outputType)) {
activeProxy.addSystemOutput(text);
} else {
activeProxy.addStdOutput(text, outputType);
}
}
});
}
public void onError(@NotNull final String localizedMessage,
@Nullable final String stackTrace,
final boolean isCritical) {
addToInvokeLater(new Runnable() {
public void run() {
Node activeNode = findActiveNode();
SMTestProxy activeProxy = activeNode.getProxy();
activeProxy.addError(localizedMessage, stackTrace, isCritical);
}
});
}
public void onTestFailure(@NotNull final TestFailedEvent testFailedEvent) {
addToInvokeLater(new Runnable() {
public void run() {
Node node = findNodeToTerminate(testFailedEvent);
if (node == null) {
return;
}
SMTestProxy testProxy = node.getProxy();
String comparisonFailureActualText = testFailedEvent.getComparisonFailureActualText();
String comparisonFailureExpectedText = testFailedEvent.getComparisonFailureExpectedText();
String failureMessage = testFailedEvent.getLocalizedFailureMessage();
String stackTrace = testFailedEvent.getStacktrace();
if (comparisonFailureActualText != null && comparisonFailureExpectedText != null) {
testProxy.setTestComparisonFailed(failureMessage, stackTrace,
comparisonFailureActualText, comparisonFailureExpectedText, testFailedEvent.getFilePath());
} else if (comparisonFailureActualText == null && comparisonFailureExpectedText == null) {
testProxy.setTestFailed(failureMessage, stackTrace, testFailedEvent.isTestError());
} else {
logProblem("Comparison failure actual and expected texts should be both null or not null.\n"
+ "Expected:\n"
+ comparisonFailureExpectedText + "\n"
+ "Actual:\n"
+ comparisonFailureActualText);
}
long duration = testFailedEvent.getDurationMillis();
if (duration >= 0) {
testProxy.setDuration(duration);
}
// fire event
fireOnTestFailed(testProxy);
terminateNode(node, State.FAILED);
}
});
}
public void onTestIgnored(@NotNull final TestIgnoredEvent testIgnoredEvent) {
addToInvokeLater(new Runnable() {
public void run() {
Node node = findNodeToTerminate(testIgnoredEvent);
if (node != null) {
SMTestProxy testProxy = node.getProxy();
testProxy.setTestIgnored(testIgnoredEvent.getIgnoreComment(), testIgnoredEvent.getStacktrace());
// fire event
fireOnTestIgnored(testProxy);
terminateNode(node, State.IGNORED);
}
}
});
}
public void onTestOutput(@NotNull final TestOutputEvent testOutputEvent) {
addToInvokeLater(new Runnable() {
public void run() {
Node node = findNode(testOutputEvent);
if (node == null) {
logProblem("Test wasn't started! But " + testOutputEvent + "!");
return;
}
SMTestProxy testProxy = node.getProxy();
if (testOutputEvent.isStdOut()) {
testProxy.addStdOutput(testOutputEvent.getText(), ProcessOutputTypes.STDOUT);
} else {
testProxy.addStdErr(testOutputEvent.getText());
}
}
});
}
public void onTestsCountInSuite(final int count) {
addToInvokeLater(new Runnable() {
public void run() {
fireOnTestsCountInSuite(count);
}
});
}
private boolean validateNodeId(@NotNull TreeNodeEvent treeNodeEvent) {
int nodeId = treeNodeEvent.getId();
if (nodeId <= 0) {
logProblem("Node id should be positive: " + treeNodeEvent + ".", true);
return false;
}
return true;
}
@Nullable
private Node findNode(@NotNull TreeNodeEvent treeNodeEvent) {
if (!validateNodeId(treeNodeEvent)) {
return null;
}
return myNodeByIdMap.get(treeNodeEvent.getId());
}
@Nullable
public SMTestProxy findProxyById(int id) {
Node node = myNodeByIdMap.get(id);
return node != null ? node.getProxy() : null;
}
/*
* Remove listeners, etc
*/
public void dispose() {
super.dispose();
addToInvokeLater(new Runnable() {
public void run() {
disconnectListeners();
if (!myRunningTestNodes.isEmpty()) {
Application application = ApplicationManager.getApplication();
if (!application.isHeadlessEnvironment() && !application.isUnitTestMode()) {
logProblem("Not all events were processed!");
}
}
myRunningTestNodes.clear();
myNodeByIdMap.clear();
}
});
}
private void setNodeAndAncestorsRunning(@NotNull Node lowestNode) {
Node node = lowestNode;
while (node != null && node != myTestsRootNode && node.getState() == State.NOT_RUNNING) {
node.setState(State.RUNNING, this);
SMTestProxy proxy = node.getProxy();
proxy.setStarted();
if (proxy.isSuite()) {
fireOnSuiteStarted(proxy);
} else {
myRunningTestNodes.add(lowestNode);
fireOnTestStarted(proxy);
}
node = node.getParentNode();
}
}
private void terminateNode(@NotNull Node node, @NotNull State terminateState) {
node.setState(terminateState, this);
myRunningTestNodes.remove(node);
}
@NotNull
private Node findActiveNode() {
if (myRunningTestNodes.isEmpty()) {
return myTestsRootNode;
}
return myRunningTestNodes.iterator().next();
}
private void logProblem(@NotNull String msg) {
logProblem(msg, SMTestRunnerConnectionUtil.isInDebugMode());
}
private void logProblem(@NotNull String msg, boolean throwError) {
final String text = "[" + myTestFrameworkName + "] " + msg;
if (throwError) {
LOG.error(text);
}
else {
LOG.warn(text);
}
}
private enum State {
NOT_RUNNING, RUNNING, FINISHED, FAILED, IGNORED
}
private static class Node {
private final int myId;
private final Node myParentNode;
private final SMTestProxy myProxy;
private State myState;
Node(int id, @Nullable Node parentNode, @NotNull SMTestProxy proxy) {
myId = id;
myParentNode = parentNode;
myProxy = proxy;
myState = State.NOT_RUNNING;
}
public int getId() {
return myId;
}
@Nullable
public Node getParentNode() {
return myParentNode;
}
@NotNull
public SMTestProxy getProxy() {
return myProxy;
}
@NotNull
public State getState() {
return myState;
}
public void setState(@NotNull State newState, @NotNull GeneralIdBasedToSMTRunnerEventsConvertor convertor) {
boolean accepted = false;
if (myState == State.NOT_RUNNING || myState == State.RUNNING) {
accepted = myState.ordinal() < newState.ordinal();
}
if (accepted) {
myState = newState;
}
else {
convertor.logProblem("Illegal state change [" + myState + " -> " + newState + "]: " + toString(), false);
}
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Node node = (Node)o;
return myId == node.myId;
}
@Override
public int hashCode() {
return myId;
}
@Override
public String toString() {
return "{" +
"id=" + myId +
", parentId=" + (myParentNode != null ? String.valueOf(myParentNode.getId()) : "<undefined>") +
", name='" + myProxy.getName() +
"', isSuite=" + myProxy.isSuite() +
", state=" + myState +
'}';
}
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.async.seq;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BiConsumer;
import ghidra.async.*;
import ghidra.async.seq.AsyncSequenceWithTemp.*;
import ghidra.util.Msg;
/**
* Part of the underlying implementation of {@link AsyncUtils#sequence(TypeSpec)}
*
* @param <R> the type of result for the whole sequence
*/
public class AsyncSequenceWithoutTemp<R> {
// The temporary "result" -- will be null, but notified upon completion
private final CompletableFuture<Void> tmpResult;
// The result for the whole sequence
private final CompletableFuture<R> seqResult;
/**
* Construct a new sequence without a temporary value
*
* Do not call this directly. Please use {@link AsyncUtils#sequence(TypeSpec)}.
*
* @param seqResult the result of the whole sequence, passed to each appended sequence
* @param tmpResult the result of the current final action
*/
public AsyncSequenceWithoutTemp(CompletableFuture<R> seqResult,
CompletableFuture<Void> tmpResult) {
this.seqResult = seqResult;
this.tmpResult = tmpResult;
}
/**
* Append an action to this sequence that produces a temporary value
*
* @param action the action
* @param type the type of temporary value that action will produce
* @return the new sequence with the appended action
*/
public <U> AsyncSequenceWithTemp<R, U> then(AsyncSequenceActionProduces<R, U> action,
TypeSpec<U> type) {
return new AsyncSequenceWithTemp<>(seqResult, tmpResult.thenCompose((result) -> {
HandlerForProducer<R, U> handler = new HandlerForProducer<>(seqResult);
try {
action.accept(handler);
}
catch (Throwable e) {
seqResult.completeExceptionally(e);
throw e;
}
return handler.future;
}));
}
/**
* Append an action to this sequence that produces a temporary value
*
* @param action the action
* @param type the type of temporary value that action will produce
* @return the new sequence with the appended action
*/
public <U> AsyncSequenceWithTemp<R, U> then(Executor executor,
AsyncSequenceActionProduces<R, U> action, TypeSpec<U> type) {
return new AsyncSequenceWithTemp<>(seqResult, tmpResult.thenComposeAsync((result) -> {
HandlerForProducer<R, U> handler = new HandlerForProducer<>(seqResult);
try {
action.accept(handler);
}
catch (Throwable e) {
seqResult.completeExceptionally(e);
throw e;
}
return handler.future;
}, executor));
}
/**
* Append an action to this sequence that stores a value
*
* @param action the action
* @param storage a reference to receive the result upon completion
* @return the new sequence with the appended action
*/
public <U> AsyncSequenceWithoutTemp<R> then(AsyncSequenceActionProduces<R, U> action,
AtomicReference<U> storage) {
return new AsyncSequenceWithoutTemp<>(seqResult, tmpResult.thenCompose((result) -> {
HandlerForStorer<R, U> handler = new HandlerForStorer<>(seqResult, storage);
try {
action.accept(handler);
}
catch (Throwable e) {
seqResult.completeExceptionally(e);
throw e;
}
return handler.future;
}));
}
/**
* Append an action to this sequence that stores a value
*
* @param action the action
* @param storage a reference to receive the result upon completion
* @return the new sequence with the appended action
*/
public <U> AsyncSequenceWithoutTemp<R> then(Executor executor,
AsyncSequenceActionProduces<R, U> action, AtomicReference<U> storage) {
return new AsyncSequenceWithoutTemp<>(seqResult, tmpResult.thenComposeAsync((result) -> {
HandlerForStorer<R, U> handler = new HandlerForStorer<>(seqResult, storage);
try {
action.accept(handler);
}
catch (Throwable e) {
seqResult.completeExceptionally(e);
throw e;
}
return handler.future;
}, executor));
}
/**
* Append an action to this sequence
*
* @param action the action
* @return the new sequence with the appended action
*/
public AsyncSequenceWithoutTemp<R> then(AsyncSequenceActionRuns<R> action) {
return new AsyncSequenceWithoutTemp<>(seqResult, tmpResult.thenCompose((result) -> {
HandlerForRunner<R> handler = new HandlerForRunner<>(seqResult);
try {
action.accept(handler);
}
catch (Throwable e) {
seqResult.completeExceptionally(e);
throw e;
}
return handler.future;
}));
}
/**
* Append an action to this sequence
*
* @param action the action
* @return the new sequence with the appended action
*/
public AsyncSequenceWithoutTemp<R> then(Executor executor, AsyncSequenceActionRuns<R> action) {
return new AsyncSequenceWithoutTemp<>(seqResult, tmpResult.thenComposeAsync((result) -> {
HandlerForRunner<R> handler = new HandlerForRunner<>(seqResult);
try {
action.accept(handler);
}
catch (Throwable e) {
seqResult.completeExceptionally(e);
throw e;
}
return handler.future;
}, executor));
}
/**
* Finish defining this sequence of actions and obtain its future result
*
* When an action in the sequence calls {@link AsyncHandlerCanExit#exit(Object, Throwable)}, the
* returned {@link CompletableFuture} is completed. If any action completes exceptionally, the
* returned {@link CompletableFuture} is completed exceptionally. If the final action executes,
* {@link AsyncSequenceHandlerForRunner#next(Void, Throwable)}, the returned
* {@link CompletableFuture} is completed with {@code null}.
*
* @return the future result of the sequence
*/
public CompletableFuture<R> finish() {
return then((seq) -> {
seq.exit(null, null);
}).seqResult;
}
/**
* Register an action to execute on sequence completion
*
* All registered actions are submitted for execution simultaneously when an action in the
* sequence calls {@link AsyncHandlerCanExit#exit(Object, Throwable)}. This is useful for
* methods that begin executing sequences "with a context". It is roughly equivalent to a
* {@code finally} block. On-exit actions can be registered before other actions are appended to
* the chain.
*
* An uncaught exception in an on-exit action will simply be logged and ignored.
*
* @param action the action to execute
*/
public AsyncSequenceWithoutTemp<R> onExit(BiConsumer<? super R, Throwable> action) {
seqResult.handle((result, exc) -> {
try {
action.accept(result, exc);
}
catch (Throwable t) {
Msg.error(this, "Uncaught exception in onExit", t);
}
return result;
});
return this;
}
}
| |
/*
Android Asynchronous Http Client
Copyright (c) 2011 James Smith <james@loopj.com>
http://loopj.com
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package cn.rongcloud.im.server.network.http;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.StatusLine;
import org.apache.http.client.HttpResponseException;
import org.apache.http.util.ByteArrayBuffer;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.lang.ref.WeakReference;
import java.net.URI;
/**
* Used to intercept and handle the responses from requests made using {@link AsyncHttpClient}. The
* {@link #onSuccess(int, Header[], byte[])} method is designed to be anonymously
* overridden with your own response handling code. <p> </p> Additionally, you can override the
* {@link #onFailure(int, Header[], byte[], Throwable)}, {@link #onStart()}, {@link
* #onFinish()}, {@link #onRetry()} and {@link #onProgress(int, int)} methods as required.
* <p> </p> For example: <p> </p>
* <pre>
* AsyncHttpClient client = new AsyncHttpClient();
* client.get("http://www.google.com", new AsyncHttpResponseHandler() {
* @Override
* public void onStart() {
* // Initiated the request
* }
*
* @Override
* public void onSuccess(int statusCode, Header[] headers, byte[] responseBody) {
* // Successfully got a response
* }
*
* @Override
* public void onFailure(int statusCode, Header[] headers, byte[] responseBody, Throwable error)
* {
* // Response failed :(
* }
*
* @Override
* public void onRetry() {
* // Request was retried
* }
*
* @Override
* public void onProgress(int bytesWritten, int totalSize) {
* // Progress notification
* }
*
* @Override
* public void onFinish() {
* // Completed the request (either success or failure)
* }
* });
* </pre>
*/
public class AsyncHttpResponseHandler implements ResponseHandlerInterface {
private static final String LOG_TAG = "AsyncHttpResponseHandler";
protected static final int SUCCESS_MESSAGE = 0;
protected static final int FAILURE_MESSAGE = 1;
protected static final int START_MESSAGE = 2;
protected static final int FINISH_MESSAGE = 3;
protected static final int PROGRESS_MESSAGE = 4;
protected static final int RETRY_MESSAGE = 5;
protected static final int BUFFER_SIZE = 4096;
private Handler handler;
public static final String DEFAULT_CHARSET = "UTF-8";
private String responseCharset = DEFAULT_CHARSET;
private Boolean useSynchronousMode = false;
private URI requestURI = null;
private Header[] requestHeaders = null;
@Override
public URI getRequestURI() {
return this.requestURI;
}
@Override
public Header[] getRequestHeaders() {
return this.requestHeaders;
}
@Override
public void setRequestURI(URI requestURI) {
this.requestURI = requestURI;
}
@Override
public void setRequestHeaders(Header[] requestHeaders) {
this.requestHeaders = requestHeaders;
}
// avoid leaks by using a non-anonymous handler class
// with a weak reference
static class ResponderHandler extends Handler {
private final WeakReference<AsyncHttpResponseHandler> mResponder;
ResponderHandler(AsyncHttpResponseHandler service) {
mResponder = new WeakReference<AsyncHttpResponseHandler>(service);
}
@Override
public void handleMessage(Message msg) {
AsyncHttpResponseHandler service = mResponder.get();
if (service != null) {
service.handleMessage(msg);
}
}
}
public boolean getUseSynchronousMode() {
return (useSynchronousMode);
}
@Override
public void setUseSynchronousMode(boolean value) {
useSynchronousMode = value;
}
/**
* Sets the charset for the response string. If not set, the default is UTF-8.
*
* @param charset to be used for the response string.
* @see <a href="http://docs.oracle.com/javase/7/docs/api/java/nio/charset/Charset.html">Charset</a>
*/
public void setCharset(final String charset) {
this.responseCharset = charset;
}
public String getCharset() {
return this.responseCharset == null ? DEFAULT_CHARSET : this.responseCharset;
}
/**
* Creates a new AsyncHttpResponseHandler
*/
public AsyncHttpResponseHandler() {
// Set up a handler to post events back to the correct thread if possible
if (Looper.myLooper() != null) {
handler = new ResponderHandler(this);
}
}
//
// Callbacks to be overridden, typically anonymously
//
/**
* Fired when the request progress, override to handle in your own code
*
* @param bytesWritten offset from start of file
* @param totalSize total size of file
*/
public void onProgress(int bytesWritten, int totalSize) {
}
/**
* Fired when the request is started, override to handle in your own code
*/
public void onStart() {
}
/**
* Fired in all cases when the request is finished, after both success and failure, override to
* handle in your own code
*/
public void onFinish() {
}
/**
* Fired when a request returns successfully, override to handle in your own code
*
* @param content the body of the HTTP response from the server
* @deprecated use {@link #onSuccess(int, Header[], byte[])}
*/
@Deprecated
public void onSuccess(String content) {
}
/**
* Fired when a request returns successfully, override to handle in your own code
*
* @param statusCode the status code of the response
* @param headers the headers of the HTTP response
* @param content the body of the HTTP response from the server
* @deprecated use {@link #onSuccess(int, Header[], byte[])}
*/
@Deprecated
public void onSuccess(int statusCode, Header[] headers, String content) {
onSuccess(statusCode, content);
}
/**
* Fired when a request returns successfully, override to handle in your own code
*
* @param statusCode the status code of the response
* @param content the body of the HTTP response from the server
* @deprecated use {@link #onSuccess(int, Header[], byte[])}
*/
@Deprecated
public void onSuccess(int statusCode, String content) {
onSuccess(content);
}
/**
* Fired when a request returns successfully, override to handle in your own code
*
* @param statusCode the status code of the response
* @param headers return headers, if any
* @param responseBody the body of the HTTP response from the server
*/
public void onSuccess(int statusCode, Header[] headers, byte[] responseBody) {
try {
String response = responseBody == null ? null : new String(responseBody, getCharset());
onSuccess(statusCode, headers, response);
} catch (UnsupportedEncodingException e) {
onFailure(statusCode, headers, e, null);
}
}
/**
* Fired when a request fails to complete, override to handle in your own code
*
* @param error the underlying cause of the failure
* @deprecated use {@link #onFailure(Throwable, String)}
*/
@Deprecated
public void onFailure(Throwable error) {
}
/**
* Fired when a request fails to complete, override to handle in your own code
*
* @param error the underlying cause of the failure
* @param content the response body, if any
* @deprecated use {@link #onFailure(int, Header[], byte[], Throwable)}
*/
@Deprecated
public void onFailure(Throwable error, String content) {
// By default, call the deprecated onFailure(Throwable) for compatibility
onFailure(error);
}
/**
* Fired when a request fails to complete, override to handle in your own code
*
* @param statusCode return HTTP status code
* @param error the underlying cause of the failure
* @param content the response body, if any
* @deprecated use {@link #onFailure(int, Header[], byte[], Throwable)}
*/
@Deprecated
public void onFailure(int statusCode, Throwable error, String content) {
// By default, call the chain method onFailure(Throwable,String)
onFailure(error, content);
}
/**
* Fired when a request fails to complete, override to handle in your own code
*
* @param statusCode return HTTP status code
* @param headers return headers, if any
* @param error the underlying cause of the failure
* @param content the response body, if any
* @deprecated use {@link #onFailure(int, Header[], byte[], Throwable)}
*/
@Deprecated
public void onFailure(int statusCode, Header[] headers, Throwable error, String content) {
// By default, call the chain method onFailure(int,Throwable,String)
onFailure(statusCode, error, content);
}
/**
* Fired when a request fails to complete, override to handle in your own code
*
* @param statusCode return HTTP status code
* @param headers return headers, if any
* @param responseBody the response body, if any
* @param error the underlying cause of the failure
*/
public void onFailure(int statusCode, Header[] headers, byte[] responseBody, Throwable error) {
try {
String response = responseBody == null ? null : new String(responseBody, getCharset());
onFailure(statusCode, headers, error, response);
} catch (UnsupportedEncodingException e) {
onFailure(statusCode, headers, e, null);
}
}
/**
* Fired when a retry occurs, override to handle in your own code
*/
public void onRetry() {
}
//
// Pre-processing of messages (executes in background threadpool thread)
//
final public void sendProgressMessage(int bytesWritten, int bytesTotal) {
sendMessage(obtainMessage(PROGRESS_MESSAGE, new Object[] {bytesWritten, bytesTotal}));
}
final public void sendSuccessMessage(int statusCode, Header[] headers, byte[] responseBody) {
sendMessage(obtainMessage(SUCCESS_MESSAGE, new Object[] {statusCode, headers, responseBody}));
}
final public void sendFailureMessage(int statusCode, Header[] headers, byte[] responseBody, Throwable error) {
sendMessage(obtainMessage(FAILURE_MESSAGE, new Object[] {statusCode, headers, responseBody, error}));
}
final public void sendStartMessage() {
sendMessage(obtainMessage(START_MESSAGE, null));
}
final public void sendFinishMessage() {
sendMessage(obtainMessage(FINISH_MESSAGE, null));
}
final public void sendRetryMessage() {
sendMessage(obtainMessage(RETRY_MESSAGE, null));
}
// Methods which emulate android's Handler and Message methods
protected void handleMessage(Message msg) {
Object[] response;
switch (msg.what) {
case SUCCESS_MESSAGE:
response = (Object[]) msg.obj;
if (response != null && response.length >= 3) {
onSuccess((Integer) response[0], (Header[]) response[1], (byte[]) response[2]);
} else {
}
break;
case FAILURE_MESSAGE:
response = (Object[]) msg.obj;
if (response != null && response.length >= 4) {
onFailure((Integer) response[0], (Header[]) response[1], (byte[]) response[2], (Throwable) response[3]);
} else {
}
break;
case START_MESSAGE:
onStart();
break;
case FINISH_MESSAGE:
onFinish();
break;
case PROGRESS_MESSAGE:
response = (Object[]) msg.obj;
if (response != null && response.length >= 2) {
try {
onProgress((Integer) response[0], (Integer) response[1]);
} catch (Throwable t) {
}
} else {
}
break;
case RETRY_MESSAGE:
onRetry();
break;
}
}
protected void sendMessage(Message msg) {
if (getUseSynchronousMode() || handler == null) {
handleMessage(msg);
} else if (!Thread.currentThread().isInterrupted()) { // do not send messages if request has been cancelled
handler.sendMessage(msg);
}
}
protected void postRunnable(Runnable r) {
if (r != null) {
handler.post(r);
}
}
protected Message obtainMessage(int responseMessage, Object response) {
Message msg;
if (handler != null) {
msg = handler.obtainMessage(responseMessage, response);
} else {
msg = Message.obtain();
if (msg != null) {
msg.what = responseMessage;
msg.obj = response;
}
}
return msg;
}
@Override
public void sendResponseMessage(HttpResponse response) throws IOException {
// do not process if request has been cancelled
if (!Thread.currentThread().isInterrupted()) {
StatusLine status = response.getStatusLine();
byte[] responseBody;
responseBody = getResponseData(response.getEntity());
// additional cancellation check as getResponseData() can take non-zero time to process
if (!Thread.currentThread().isInterrupted()) {
if (status.getStatusCode() >= 300) {
sendFailureMessage(status.getStatusCode(), response.getAllHeaders(), responseBody, new HttpResponseException(status.getStatusCode(), status.getReasonPhrase()));
} else {
sendSuccessMessage(status.getStatusCode(), response.getAllHeaders(), responseBody);
}
}
}
}
byte[] getResponseData(HttpEntity entity) throws IOException {
byte[] responseBody = null;
if (entity != null) {
InputStream instream = entity.getContent();
if (instream != null) {
long contentLength = entity.getContentLength();
if (contentLength > Integer.MAX_VALUE) {
throw new IllegalArgumentException("HTTP entity too large to be buffered in memory");
}
if (contentLength < 0) {
contentLength = BUFFER_SIZE;
}
try {
ByteArrayBuffer buffer = new ByteArrayBuffer((int) contentLength);
try {
byte[] tmp = new byte[BUFFER_SIZE];
int l, count = 0;
// do not send messages if request has been cancelled
while ((l = instream.read(tmp)) != -1 && !Thread.currentThread().isInterrupted()) {
count += l;
buffer.append(tmp, 0, l);
sendProgressMessage(count, (int) contentLength);
}
} finally {
instream.close();
}
responseBody = buffer.toByteArray();
} catch (OutOfMemoryError e) {
System.gc();
throw new IOException("File too large to fit into available memory");
}
}
}
return responseBody;
}
}
| |
/*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.wso2.carbon.device.mgt.jaxrs.service.api;
import io.swagger.annotations.*;
import org.wso2.carbon.apimgt.annotations.api.API;
import org.wso2.carbon.apimgt.annotations.api.Permission;
import org.wso2.carbon.device.mgt.jaxrs.beans.*;
import javax.ws.rs.*;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.util.List;
@API(name = "UserManagement", version = "1.0.0", context = "/api/device-mgt/v1.0/users", tags = {"device_management"})
@Path("/users")
@Api(value = "User Management", description = "User management related operations can be found here.")
@Produces(MediaType.APPLICATION_JSON)
@Consumes(MediaType.APPLICATION_JSON)
public interface UserManagementService {
@POST
@ApiOperation(
consumes = MediaType.APPLICATION_JSON,
produces = MediaType.APPLICATION_JSON,
httpMethod = "POST",
value = "Adding a User",
notes = "WSO2 EMM supports user management. Add a new user to the WSO2 EMM user management system via this REST API",
tags = "User Management")
@ApiResponses(
value = {
@ApiResponse(
code = 201,
message = "Created. \n Successfully created the user.",
responseHeaders = {
@ResponseHeader(
name = "Content-Location",
description = "The URL of the role added."),
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body"),
@ResponseHeader(
name = "ETag",
description = "Entity Tag of the response resource.\n" +
"Used by caches, or in conditional requests."),
@ResponseHeader(
name = "Last-Modified",
description = "Date and time the resource was last modified.\n" +
"Used by caches, or in conditional requests.")}),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid request or validation error.",
response = ErrorResponse.class),
@ApiResponse(
code = 409,
message = "Conflict. \n User already exists.",
response = ErrorResponse.class),
@ApiResponse(
code = 415,
message = "Unsupported media type. \n The entity of the request was in a not " +
"supported format.",
response = ErrorResponse.class),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n Server error occurred while adding a new user.",
response = ErrorResponse.class)
})
@Permission(name = "Manage Users", permission = "/device-mgt/users/manage")
Response addUser(
@ApiParam(
name = "user",
value = "Provide the property details to add a new user.\n" +
"Double click the example value and click try out. ",
required = true) UserInfo user);
@GET
@Path("/{username}")
@ApiOperation(
produces = MediaType.APPLICATION_JSON,
httpMethod = "GET",
value = "Getting Details of a User",
notes = "Get the details of a user registered with WSO2 EMM using the REST API.",
response = BasicUserInfo.class,
tags = "User Management")
@ApiResponses(value = {
@ApiResponse(
code = 200,
message = "OK. \n Successfully fetched the details of the specified user.",
response = BasicUserInfo.class,
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body"),
@ResponseHeader(
name = "ETag",
description = "Entity Tag of the response resource.\n" +
"Used by caches, or in conditional requests."),
@ResponseHeader(
name = "Last-Modified",
description = "Date and time the resource was last modified.\n" +
"Used by caches, or in conditional requests."),
}),
@ApiResponse(
code = 304,
message = "Not Modified. \n Empty body because the client already has the latest version of the requested resource."),
@ApiResponse(
code = 404,
message = "Not Found. \n The specified resource does not exist.",
response = ErrorResponse.class),
@ApiResponse(
code = 406,
message = "Not Acceptable.\n The requested media type is not supported",
response = ErrorResponse.class),
@ApiResponse(
code = 500,
message = "Internal Server ErrorResponse. \n Server error occurred while" +
" fetching the ruser details.",
response = ErrorResponse.class)
})
@Permission(name = "View Users", permission = "/device-mgt/users/view")
Response getUser(
@ApiParam(
name = "username",
value = "Provide the username of the user.",
required = true,
defaultValue = "admin")
@PathParam("username") String username,
@ApiParam(
name = "domain",
value = "The domain name of the user store.",
required = false)
@QueryParam("domain") String domain,
@ApiParam(
name = "If-Modified-Since",
value = "Checks if the requested variant was modified, since the specified date-time.\n" +
"Provide the value in the following format: EEE, d MMM yyyy HH:mm:ss Z.\n" +
"Example: Mon, 05 Jan 2014 15:10:00 +0200",
required = false)
@HeaderParam("If-Modified-Since") String ifModifiedSince);
@PUT
@Path("/{username}")
@ApiOperation(
consumes = MediaType.APPLICATION_JSON,
produces = MediaType.APPLICATION_JSON,
httpMethod = "PUT",
value = "Updating Details of a User",
notes = "There will be situations where you will want to update the user details. In such "
+ "situation you can update the user details using this REST API.",
tags = "User Management")
@ApiResponses(value = {
@ApiResponse(
code = 200,
message = "OK. \n Successfully updated the details of the specified user.",
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "Content type of the body"),
@ResponseHeader(
name = "ETag",
description = "Entity Tag of the response resource.\n" +
"Used by caches, or in conditional requests."),
@ResponseHeader(
name = "Last-Modified",
description = "Date and time the resource was last modified.\n" +
"Used by caches, or in conditional requests.")}),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid request or validation error.",
response = ErrorResponse.class),
@ApiResponse(
code = 404,
message = "Not Found. \n The specified resource does not exist.",
response = ErrorResponse.class),
@ApiResponse(
code = 415,
message = "Unsupported media type. \n The format of the requested entity was not supported.",
response = ErrorResponse.class),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n " +
"Server error occurred while updating the user.",
response = ErrorResponse.class)
})
@Permission(name = "Manage Users", permission = "/device-mgt/users/manage")
Response updateUser(
@ApiParam(
name = "username",
value = "The username of the user.",
required = true,
defaultValue = "admin")
@PathParam("username") String username,
@ApiParam(
name = "domain",
value = "The domain name of the user store.",
required = false)
@QueryParam("domain") String domain,
@ApiParam(
name = "userData",
value = "Update the user details.\n" +
"NOTE: Do not change the admin username, password and roles when trying out this API.",
required = true) UserInfo userData);
@DELETE
@Path("/{username}")
@ApiOperation(
httpMethod = "DELETE",
value = "Deleting a User",
notes = "When an employee leaves the organization, you can remove the user details from WSO2 EMM using this REST API.",
tags = "User Management")
@ApiResponses(value = {
@ApiResponse(
code = 200,
message = "OK. \n Successfully removed the user from WSO2 EMM."),
@ApiResponse(
code = 404,
message = "Not Found. \n The specified resource does not exist.",
response = ErrorResponse.class),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n " +
"Server error occurred while removing the user.",
response = ErrorResponse.class
)
})
@Permission(name = "Manage Users", permission = "/device-mgt/users/manage")
Response removeUser(
@ApiParam(
name = "username",
value = "Username of the user to be deleted.\n" +
"INFO: If you want to try out this API, make sure to create a new user and then remove that user. Do not remove the admin user.",
required = true,
defaultValue = "[Create a new user named Jim, and then try out this API.]")
@PathParam("username") String username,
@ApiParam(
name = "domain",
value = "The domain name of the user store.",
required = false)
@QueryParam("domain") String domain);
@GET
@Path("/{username}/roles")
@ApiOperation(
produces = MediaType.APPLICATION_JSON,
httpMethod = "GET",
value = "Getting the Role Details of a User",
notes = "A user can be assigned to one or more role in EMM. Using this REST API you can get the role/roles a user is assigned to.",
tags = "User Management")
@ApiResponses(value = {
@ApiResponse(
code = 200,
message = "OK. \n Successfully fetched the list of roles the specified user is assigned to.",
response = RoleList.class,
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body"),
@ResponseHeader(
name = "ETag",
description = "Entity Tag of the response resource.\n" +
"Used by caches, or in conditional requests."),
@ResponseHeader(
name = "Last-Modified",
description = "Date and time the resource was last modified.\n" +
"Used by caches, or in conditional requests."),
}),
@ApiResponse(
code = 304,
message = "Not Modified. \n Empty body because the client already has the latest version of the requested resource."),
@ApiResponse(
code = 404,
message = "Not Found. \n The specified resource does not exist.\n",
response = ErrorResponse.class),
@ApiResponse(
code = 406,
message = "Not Acceptable.\n The requested media type is not supported",
response = ErrorResponse.class),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n Server error occurred while fetching the list of roles" +
" assigned to the specified user.",
response = ErrorResponse.class)
})
@Permission(name = "View Users", permission = "/device-mgt/users/view")
Response getRolesOfUser(
@ApiParam(
name = "username",
value = "The username of the user.",
required = true,
defaultValue = "admin")
@PathParam("username") String username,
@ApiParam(
name = "domain",
value = "The domain name of the user store.",
required = false)
@QueryParam("domain") String domain);
@GET
@ApiOperation(
produces = MediaType.APPLICATION_JSON,
httpMethod = "GET",
value = "Getting Details of Users",
notes = "You are able to manage users in WSO2 EMM by adding, updating and removing users. If you wish to get the list of users registered with WSO2 EMM, you can do so "
+ "using this REST API",
tags = "User Management")
@ApiResponses(value = {
@ApiResponse(
code = 200,
message = "OK. \n Successfully fetched the list of users registered with WSO2 EMM.",
response = BasicUserInfoList.class,
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body"),
@ResponseHeader(
name = "ETag",
description = "Entity Tag of the response resource.\n" +
"Used by caches, or in conditional requests."),
@ResponseHeader(
name = "Last-Modified",
description = "Date and time the resource was last modified.\n" +
"Used by caches, or in conditional requests."),
}),
@ApiResponse(
code = 304,
message = "Not Modified. \n Empty body because the client already has the latest version of the requested resource.\n"),
@ApiResponse(
code = 406,
message = "Not Acceptable.\n The requested media type is not supported",
response = ErrorResponse.class),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n Server error occurred while fetching the list of WSO2 EMM users.",
response = ErrorResponse.class)
})
@Permission(name = "View Users", permission = "/device-mgt/users/view")
Response getUsers(
@ApiParam(
name = "filter",
value = "The username of the user.",
required = false)
@QueryParam("filter") String filter,
@ApiParam(
name = "If-Modified-Since",
value = "Checks if the requested variant was modified, since the specified date-time\n." +
"Provide the value in the Java Date Format: EEE, d MMM yyyy HH:mm:ss Z.\n" +
"Example: Mon, 05 Jan 2014 15:10:00 +0200",
required = false)
@HeaderParam("If-Modified-Since") String timestamp,
@ApiParam(
name = "offset",
value = "The starting pagination index for the complete list of qualified items.",
required = false,
defaultValue = "0")
@QueryParam("offset") int offset,
@ApiParam(
name = "limit",
value = "Provide how many user details you require from the starting pagination index/offset.",
required = false,
defaultValue = "5")
@QueryParam("limit") int limit);
@GET
@Path("/count")
@ApiOperation(
produces = MediaType.APPLICATION_JSON,
httpMethod = "GET",
value = "Getting the User Count",
notes = "Get the number of users in WSO2 EMM via this REST API.",
tags = "User Management")
@ApiResponses(value = {
@ApiResponse(
code = 200,
message = "OK. \n Successfully fetched the user count.",
response = BasicUserInfoList.class,
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body")
}),
@ApiResponse(
code = 406,
message = "Not Acceptable.\n The requested media type is not supported",
response = ErrorResponse.class),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n Server error occurred while fetching the total number of users in WSO2 EMM.",
response = ErrorResponse.class)
})
@Permission(name = "View Users", permission = "/device-mgt/users/view")
Response getUserCount();
@GET
@Path("/search/usernames")
@ApiOperation(
produces = MediaType.APPLICATION_JSON,
httpMethod = "GET",
value = "Searching for a User Name",
notes = "If you are unsure of the user name of a user and need to retrieve the details of a specific user, you can "
+ "search for that user by giving a character or a few characters in the username. "
+ "You will be given a list of users having the user name in the exact order of the "
+ "characters you provided.",
tags = "User Management")
@ApiResponses(value = {
@ApiResponse(
code = 200,
message = "OK. \n Successfully fetched the list of users that matched the given filter.",
response = String.class,
responseContainer = "List",
responseHeaders = {
@ResponseHeader(
name = "Content-Type",
description = "The content type of the body"),
@ResponseHeader(
name = "ETag",
description = "Entity Tag of the response resource.\n" +
"Used by caches, or in conditional requests."),
@ResponseHeader(
name = "Last-Modified",
description = "Date and time the resource was last modified.\n" +
"Used by caches, or in conditional requests."),
}),
@ApiResponse(
code = 304,
message = "Not Modified. \n Empty body because the client already has the latest version of the requested resource."),
@ApiResponse(
code = 406,
message = "Not Acceptable.\n The requested media type is not supported",
response = ErrorResponse.class),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n Server error occurred while fetching the list of users that matched the given filter.",
response = ErrorResponse.class)
})
@Permission(name = "View Users", permission = "/device-mgt/users/view")
Response getUserNames(
@ApiParam(
name = "filter",
value = "Provide a character or a few character in the user name",
required = true)
@QueryParam("filter") String filter,
@ApiParam(
name = "domain",
value = "The user store domain which the user names should be fetched from",
required = false)
@QueryParam("domain") String domain,
@ApiParam(
name = "If-Modified-Since",
value = "Checks if the requested variant was modified, since the specified date-time\n." +
"Provide the value in the following format: EEE, d MMM yyyy HH:mm:ss Z\n. " +
"Example: Mon, 05 Jan 2014 15:10:00 +0200",
required = false)
@HeaderParam("If-Modified-Since") String timestamp,
@ApiParam(
name = "offset",
value = "The starting pagination index for the complete list of qualified items.",
required = false,
defaultValue = "0")
@QueryParam("offset") int offset,
@ApiParam(
name = "limit",
value = "Provide how many user details you require from the starting pagination index/offset.",
required = false,
defaultValue = "5")
@QueryParam("limit") int limit);
@PUT
@Path("/credentials")
@ApiOperation(
consumes = MediaType.APPLICATION_JSON,
produces = MediaType.APPLICATION_JSON,
httpMethod = "PUT",
value = "Changing the User Password",
notes = "A user is able to change the password to secure their WSO2 EMM profile via this REST API.",
tags = "User Management")
@ApiResponses(value = {
@ApiResponse(
code = 200,
message = "OK. \n Successfully updated the user credentials."),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid request or validation error.",
response = ErrorResponse.class),
@ApiResponse(
code = 404,
message = "Not Found. \n The specified resource does not exist.",
response = ErrorResponse.class),
@ApiResponse(
code = 415,
message = "Unsupported media type. \n The format of the requested entity was not supported.",
response = ErrorResponse.class),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n " +
"Server error occurred while updating the user credentials.",
response = ErrorResponse.class)
})
@Permission(name = "Reset user password", permission = "/login")
Response resetPassword(
@ApiParam(
name = "credentials",
value = "The property to change the password.\n" +
"The password should be within 5 to 30 characters",
required = true) OldPasswordResetWrapper credentials);
@POST
@Path("/send-invitation")
@ApiOperation(
consumes = MediaType.APPLICATION_JSON,
produces = MediaType.APPLICATION_JSON,
httpMethod = "POST",
value = "Sending Enrollment Invitations to Users",
notes = "Send the users a mail inviting them to download the EMM mobile application on their devices using the REST API given below.\n" +
"Before running the REST API command to send the enrollment invitations to users make sure to configure WSO2 EMM as explained in step 4, under the WSO2 EMM general server configurations documentation.",
tags = "User Management")
@ApiResponses(value = {
@ApiResponse(
code = 200,
message = "OK. \n Successfully sent the invitation mail."),
@ApiResponse(
code = 400,
message = "Bad Request. \n Invalid request or validation error.",
response = ErrorResponse.class),
@ApiResponse(
code = 404,
message = "Not Found. \n The specified resource does not exist.\n",
response = ErrorResponse.class),
@ApiResponse(
code = 415,
message = "Unsupported media type. \n The format of the requested entity was not supported.\n",
response = ErrorResponse.class),
@ApiResponse(
code = 500,
message = "Internal Server Error. \n " +
"Server error occurred while updating the user credentials.",
response = ErrorResponse.class)
})
@Permission(name = "Manage Users", permission = "/device-mgt/users/manage")
Response inviteExistingUsersToEnrollDevice(
@ApiParam(
name = "users",
value = "List of users",
required = true) List<String> usernames);
}
| |
// Copyright 2018 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License
package com.google.devtools.build.lib.rules.config;
import static com.google.common.collect.ImmutableSortedMap.toImmutableSortedMap;
import static com.google.common.truth.Truth.assertThat;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Ordering;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.analysis.BaseRuleClasses;
import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.RuleContext;
import com.google.devtools.build.lib.analysis.actions.FileWriteAction;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration;
import com.google.devtools.build.lib.analysis.config.BuildOptions;
import com.google.devtools.build.lib.analysis.config.BuildOptionsView;
import com.google.devtools.build.lib.analysis.config.transitions.PatchTransition;
import com.google.devtools.build.lib.analysis.util.BuildViewTestCase;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.packages.Rule;
import com.google.devtools.build.lib.testutil.TestRuleClassProvider;
import java.util.Map;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for manual trimming of feature flags with the transitive_configs attribute. */
@RunWith(JUnit4.class)
public final class FeatureFlagManualTrimmingTest extends BuildViewTestCase {
@Before
public void enableManualTrimming() throws Exception {
enableManualTrimmingAnd();
}
private void enableManualTrimmingAnd(String... otherFlags) throws Exception {
ImmutableList<String> flags = new ImmutableList.Builder<String>()
.add("--enforce_transitive_configs_for_config_feature_flag")
.add(otherFlags)
.build();
useConfiguration(flags.toArray(new String[0]));
}
@Override
protected ConfiguredRuleClassProvider createRuleClassProvider() {
ConfiguredRuleClassProvider.Builder builder =
new ConfiguredRuleClassProvider.Builder().addRuleDefinition(new FeatureFlagSetterRule());
TestRuleClassProvider.addStandardRules(builder);
return builder.build();
}
@Before
public void setUpFlagReadingRule() throws Exception {
scratch.file(
"test/read_flags.bzl",
"def _read_flags_impl(ctx):",
" ctx.actions.write(",
" ctx.outputs.flagdict,",
" '\\n'.join(['%s:::%s' % (dep.label, dep[config_common.FeatureFlagInfo].value)",
" for dep in ctx.attr.flags]))",
" return [DefaultInfo(files = depset([ctx.outputs.flagdict]))]",
"read_flags = rule(",
" implementation = _read_flags_impl,",
" attrs = {'flags': attr.label_list()},",
" outputs = {'flagdict': '%{name}.flags'},",
")");
}
@Before
public void setUpHostTransitionRule() throws Exception {
scratch.file(
"test/host_transition.bzl",
"def _host_transition_impl(ctx):",
" files = depset(transitive = [src[DefaultInfo].files for src in ctx.attr.srcs])",
" return [DefaultInfo(files = files)]",
"host_transition = rule(",
" implementation = _host_transition_impl,",
" attrs = {'srcs': attr.label_list(cfg='host')},",
")");
}
private ImmutableSortedMap<Label, String> getFlagMapFromConfiguration(BuildConfiguration config)
throws Exception {
return FeatureFlagValue.getFlagValues(config.getOptions());
}
private ImmutableSortedMap<Label, String> getFlagValuesFromOutputFile(Artifact flagDict) {
String fileContents =
((FileWriteAction) getActionGraph().getGeneratingAction(flagDict)).getFileContents();
return Splitter.on('\n')
.withKeyValueSeparator(":::")
.split(fileContents)
.entrySet()
.stream()
.collect(
toImmutableSortedMap(
Ordering.natural(),
(entry) -> Label.parseAbsoluteUnchecked(entry.getKey()),
Map.Entry::getValue));
}
@Test
public void duplicateTargetsCreatedWithTrimmingDisabled() throws Exception {
useConfiguration("--noenforce_transitive_configs_for_config_feature_flag");
scratch.file(
"test/BUILD",
"load(':read_flags.bzl', 'read_flags')",
"feature_flag_setter(",
" name = 'left',",
" deps = [':common'],",
" flag_values = {",
" ':different_flag': 'left',",
" ':common_flag': 'configured',",
" },",
" transitive_configs = [':common_flag'],",
")",
"feature_flag_setter(",
" name = 'right',",
" deps = [':common'],",
" flag_values = {",
" ':different_flag': 'right',",
" ':common_flag': 'configured',",
" },",
" transitive_configs = [':common_flag'],",
")",
"read_flags(",
" name = 'common',",
" flags = [':common_flag'],",
" transitive_configs = [':common_flag'],",
")",
"config_feature_flag(",
" name = 'different_flag',",
" allowed_values = ['default', 'left', 'right'],",
" default_value = 'default',",
")",
"config_feature_flag(",
" name = 'common_flag',",
" allowed_values = ['default', 'configured', 'other'],",
" default_value = 'default',",
")");
Artifact leftFlags =
Iterables.getOnlyElement(getFilesToBuild(getConfiguredTarget("//test:left")).toList());
Artifact rightFlags =
Iterables.getOnlyElement(getFilesToBuild(getConfiguredTarget("//test:right")).toList());
assertThat(leftFlags).isNotEqualTo(rightFlags);
}
@Test
public void featureFlagSetAndInTransitiveConfigs_GetsSetValue() throws Exception {
scratch.file(
"test/BUILD",
"load(':read_flags.bzl', 'read_flags')",
"feature_flag_setter(",
" name = 'target',",
" deps = [':reader'],",
" flag_values = {",
" ':trimmed_flag': 'left',",
" ':used_flag': 'configured',",
" },",
" transitive_configs = [':used_flag'],",
")",
"read_flags(",
" name = 'reader',",
" flags = [':used_flag'],",
" transitive_configs = [':used_flag'],",
")",
"config_feature_flag(",
" name = 'trimmed_flag',",
" allowed_values = ['default', 'left', 'right'],",
" default_value = 'default',",
")",
"config_feature_flag(",
" name = 'used_flag',",
" allowed_values = ['default', 'configured', 'other'],",
" default_value = 'default',",
")");
Artifact targetFlags =
Iterables.getOnlyElement(getFilesToBuild(getConfiguredTarget("//test:target")).toList());
Label usedFlag = Label.parseAbsolute("//test:used_flag", ImmutableMap.of());
assertThat(getFlagValuesFromOutputFile(targetFlags)).containsEntry(usedFlag, "configured");
}
@Test
public void featureFlagSetButNotInTransitiveConfigs_IsTrimmedOutAndCollapsesDuplicates()
throws Exception {
scratch.file(
"test/BUILD",
"load(':read_flags.bzl', 'read_flags')",
"feature_flag_setter(",
" name = 'left',",
" deps = [':common'],",
" flag_values = {",
" ':different_flag': 'left',",
" ':common_flag': 'configured',",
" },",
" transitive_configs = [':common_flag'],",
")",
"feature_flag_setter(",
" name = 'right',",
" deps = [':common'],",
" flag_values = {" ,
" ':different_flag': 'right',",
" ':common_flag': 'configured',",
" },",
" transitive_configs = [':common_flag'],",
")",
"read_flags(",
" name = 'common',",
" flags = [':common_flag'],",
" transitive_configs = [':common_flag'],",
")",
"config_feature_flag(",
" name = 'different_flag',",
" allowed_values = ['default', 'left', 'right'],",
" default_value = 'default',",
")",
"config_feature_flag(",
" name = 'common_flag',",
" allowed_values = ['default', 'configured', 'other'],",
" default_value = 'default',",
")");
Artifact leftFlags =
Iterables.getOnlyElement(getFilesToBuild(getConfiguredTarget("//test:left")).toList());
Artifact rightFlags =
Iterables.getOnlyElement(getFilesToBuild(getConfiguredTarget("//test:right")).toList());
assertThat(leftFlags).isEqualTo(rightFlags);
assertThat(leftFlags.getArtifactOwner()).isEqualTo(rightFlags.getArtifactOwner());
}
@Test
public void featureFlagInTransitiveConfigsButNotSet_GetsDefaultValue() throws Exception {
scratch.file(
"test/BUILD",
"load(':read_flags.bzl', 'read_flags')",
"feature_flag_setter(",
" name = 'target',",
" deps = [':reader'],",
" flag_values = {",
" ':trimmed_flag': 'left',",
" },",
" transitive_configs = [':used_flag'],",
")",
"read_flags(",
" name = 'reader',",
" flags = [':used_flag'],",
" transitive_configs = [':used_flag'],",
")",
"config_feature_flag(",
" name = 'trimmed_flag',",
" allowed_values = ['default', 'left', 'right'],",
" default_value = 'default',",
")",
"config_feature_flag(",
" name = 'used_flag',",
" allowed_values = ['default', 'configured', 'other'],",
" default_value = 'default',",
")");
Artifact targetFlags =
Iterables.getOnlyElement(getFilesToBuild(getConfiguredTarget("//test:target")).toList());
Label usedFlag = Label.parseAbsolute("//test:used_flag", ImmutableMap.of());
assertThat(getFlagValuesFromOutputFile(targetFlags)).containsEntry(usedFlag, "default");
}
@Test
public void featureFlagInTransitiveConfigsButNotInTransitiveClosure_IsWastefulButDoesNotError()
throws Exception {
scratch.file(
"test/BUILD",
"load(':read_flags.bzl', 'read_flags')",
"feature_flag_setter(",
" name = 'left',",
" deps = [':common'],",
" flag_values = {",
" ':different_flag': 'left',",
" ':common_flag': 'configured',",
" },",
" transitive_configs = [':different_flag', ':common_flag'],",
")",
"feature_flag_setter(",
" name = 'right',",
" deps = [':common'],",
" flag_values = {",
" ':different_flag': 'right',",
" ':common_flag': 'configured',",
" },",
" transitive_configs = [':different_flag', ':common_flag'],",
")",
"read_flags(",
" name = 'common',",
" flags = [':common_flag'],",
" transitive_configs = [':different_flag', ':common_flag'],",
")",
"config_feature_flag(",
" name = 'different_flag',",
" allowed_values = ['default', 'left', 'right'],",
" default_value = 'default',",
")",
"config_feature_flag(",
" name = 'common_flag',",
" allowed_values = ['default', 'configured', 'other'],",
" default_value = 'default',",
")");
Artifact leftFlags =
Iterables.getOnlyElement(getFilesToBuild(getConfiguredTarget("//test:left")).toList());
Artifact rightFlags =
Iterables.getOnlyElement(getFilesToBuild(getConfiguredTarget("//test:right")).toList());
assertThat(leftFlags).isNotEqualTo(rightFlags);
assertThat(leftFlags.getArtifactOwner()).isNotEqualTo(rightFlags.getArtifactOwner());
}
@Test
public void emptyTransitiveConfigs_EquivalentRegardlessOfFeatureFlags() throws Exception {
scratch.file(
"test/BUILD",
"load(':read_flags.bzl', 'read_flags')",
"feature_flag_setter(",
" name = 'left',",
" deps = [':reader'],",
" flag_values = {",
" ':used_flag': 'left',",
" },",
" transitive_configs = [':used_flag'],",
")",
"feature_flag_setter(",
" name = 'right',",
" deps = [':reader'],",
" flag_values = {",
" ':used_flag': 'right',",
" },",
" transitive_configs = [':used_flag'],",
")",
"read_flags(",
" name = 'reader',",
" transitive_configs = [],",
")",
"config_feature_flag(",
" name = 'used_flag',",
" allowed_values = ['default', 'left', 'right'],",
" default_value = 'default',",
")");
Artifact leftFlags =
Iterables.getOnlyElement(getFilesToBuild(getConfiguredTarget("//test:left")).toList());
Artifact rightFlags =
Iterables.getOnlyElement(getFilesToBuild(getConfiguredTarget("//test:right")).toList());
Artifact directFlags =
Iterables.getOnlyElement(getFilesToBuild(getConfiguredTarget("//test:reader")).toList());
assertThat(leftFlags).isEqualTo(rightFlags);
assertThat(leftFlags).isEqualTo(directFlags);
}
@Test
public void absentTransitiveConfigs_EquivalentRegardlessOfFeatureFlags() throws Exception {
scratch.file(
"test/BUILD",
"load(':read_flags.bzl', 'read_flags')",
"feature_flag_setter(",
" name = 'left',",
" deps = [':reader'],",
" flag_values = {",
" ':used_flag': 'left',",
" },",
" transitive_configs = [':used_flag'],",
")",
"feature_flag_setter(",
" name = 'right',",
" deps = [':reader'],",
" flag_values = {",
" ':used_flag': 'right',",
" },",
" transitive_configs = [':used_flag'],",
")",
"read_flags(",
" name = 'reader',",
// no transitive_configs = equivalent to []
")",
"config_feature_flag(",
" name = 'used_flag',",
" allowed_values = ['default', 'left', 'right'],",
" default_value = 'default',",
")");
Artifact leftFlags =
Iterables.getOnlyElement(getFilesToBuild(getConfiguredTarget("//test:left")).toList());
Artifact rightFlags =
Iterables.getOnlyElement(getFilesToBuild(getConfiguredTarget("//test:right")).toList());
Artifact directFlags =
Iterables.getOnlyElement(getFilesToBuild(getConfiguredTarget("//test:reader")).toList());
assertThat(leftFlags).isEqualTo(rightFlags);
assertThat(leftFlags).isEqualTo(directFlags);
}
@Test
public void nonexistentLabelInTransitiveConfigs_DoesNotError() throws Exception {
scratch.file(
"test/BUILD",
"load(':read_flags.bzl', 'read_flags')",
"feature_flag_setter(",
" name = 'target',",
" deps = [':reader'],",
" flag_values = {",
" ':trimmed_flag': 'left',",
" },",
" transitive_configs = [':false_flag'],",
")",
"read_flags(",
" name = 'reader',",
" transitive_configs = [':false_flag'],",
")",
"config_feature_flag(",
" name = 'trimmed_flag',",
" allowed_values = ['default', 'left', 'right'],",
" default_value = 'default',",
")");
getConfiguredTarget("//test:target");
assertNoEvents();
}
@Test
public void flagSetBySetterButNotInTransitiveConfigs_CanBeUsedByDeps() throws Exception {
scratch.file(
"test/BUILD",
"load(':read_flags.bzl', 'read_flags')",
"feature_flag_setter(",
" name = 'target',",
" deps = [':reader'],",
" flag_values = {",
" ':not_actually_trimmed_flag': 'left',",
" },",
" transitive_configs = [],",
")",
"read_flags(",
" name = 'reader',",
" flags = [':not_actually_trimmed_flag'],",
" transitive_configs = [':not_actually_trimmed_flag'],",
")",
"config_feature_flag(",
" name = 'not_actually_trimmed_flag',",
" allowed_values = ['default', 'left', 'right'],",
" default_value = 'default',",
")");
getConfiguredTarget("//test:target");
assertNoEvents();
}
@Test
public void featureFlagInUnusedSelectBranchButNotInTransitiveConfigs_DoesNotError()
throws Exception {
scratch.file(
"test/BUILD",
"load(':read_flags.bzl', 'read_flags')",
"feature_flag_setter(",
" name = 'target',",
" deps = [':reader'],",
" flag_values = {",
" ':trimmed_flag': 'left',",
" },",
" transitive_configs = [':used_flag'],",
")",
"read_flags(",
" name = 'reader',",
" flags = select({':used_flag@other': [':trimmed_flag'], '//conditions:default': []}),",
" transitive_configs = [':used_flag'],",
")",
"config_setting(",
" name = 'used_flag@other',",
" flag_values = {':used_flag': 'other'},",
")",
"config_feature_flag(",
" name = 'trimmed_flag',",
" allowed_values = ['default', 'left', 'right'],",
" default_value = 'default',",
")",
"config_feature_flag(",
" name = 'used_flag',",
" allowed_values = ['default', 'configured', 'other'],",
" default_value = 'default',",
")");
getConfiguredTarget("//test:target");
assertNoEvents();
}
@Test
public void featureFlagTarget_IsTrimmedToOnlyItself() throws Exception {
scratch.file(
"test/BUILD",
"load(':read_flags.bzl', 'read_flags')",
"feature_flag_setter(",
" name = 'target',",
" exports_flag = ':read_flag',",
" flag_values = {",
" ':trimmed_flag': 'left',",
" ':read_flag': 'configured',",
" },",
" transitive_configs = [':trimmed_flag', ':read_flag'],",
")",
"config_feature_flag(",
" name = 'trimmed_flag',",
" allowed_values = ['default', 'left', 'right'],",
" default_value = 'default',",
")",
"config_feature_flag(",
" name = 'read_flag',",
" allowed_values = ['default', 'configured', 'other'],",
" default_value = 'default',",
")");
ConfiguredTarget target = getConfiguredTarget("//test:target");
RuleContext ruleContext = getRuleContext(target);
BuildConfiguration childConfiguration =
Iterables.getOnlyElement(ruleContext.getPrerequisiteConfiguredTargets("exports_flag"))
.getConfiguration();
Label childLabel = Label.parseAbsoluteUnchecked("//test:read_flag");
assertThat(getFlagMapFromConfiguration(childConfiguration).keySet())
.containsExactly(childLabel);
}
@Test
public void featureFlagAccessedByPathWithMissingLabel_ProducesError() throws Exception {
reporter.removeHandler(failFastHandler); // expecting an error
scratch.file(
"test/BUILD",
"load(':read_flags.bzl', 'read_flags')",
"feature_flag_setter(",
" name = 'target',",
" deps = [':broken'],",
" flag_values = {",
" ':used_flag': 'configured',",
" },",
" transitive_configs = [':used_flag'],",
")",
"filegroup(",
" name = 'broken',",
" srcs = [':reader'],",
" transitive_configs = [],",
")",
"read_flags(",
" name = 'reader',",
" flags = [':used_flag'],",
" transitive_configs = [':used_flag'],",
")",
"config_feature_flag(",
" name = 'used_flag',",
" allowed_values = ['default', 'configured', 'other'],",
" default_value = 'default',",
")");
assertThat(getConfiguredTarget("//test:target")).isNull();
assertContainsEvent(
"Feature flag //test:used_flag was accessed in a configuration it is not present in. All "
+ "targets which depend on //test:used_flag directly or indirectly must name it in "
+ "their transitive_configs attribute.");
}
@Test
public void featureFlagAccessedByPathWithMissingTransitiveConfigs_ProducesError()
throws Exception {
reporter.removeHandler(failFastHandler); // expecting an error
scratch.file(
"test/BUILD",
"load(':read_flags.bzl', 'read_flags')",
"feature_flag_setter(",
" name = 'target',",
" deps = [':broken'],",
" flag_values = {",
" ':used_flag': 'configured',",
" },",
" transitive_configs = [':used_flag'],",
")",
"filegroup(",
" name = 'broken',",
" srcs = [':reader'],",
// no transitive_configs = equivalent to []
")",
"read_flags(",
" name = 'reader',",
" flags = [':used_flag'],",
" transitive_configs = [':used_flag'],",
")",
"config_feature_flag(",
" name = 'used_flag',",
" allowed_values = ['default', 'configured', 'other'],",
" default_value = 'default',",
")");
assertThat(getConfiguredTarget("//test:target")).isNull();
assertContainsEvent(
"Feature flag //test:used_flag was accessed in a configuration it is not present in. All "
+ "targets which depend on //test:used_flag directly or indirectly must name it in "
+ "their transitive_configs attribute.");
}
@Test
public void featureFlagInHostConfiguration_HasDefaultValue() throws Exception {
scratch.file(
"test/BUILD",
"load(':host_transition.bzl', 'host_transition')",
"load(':read_flags.bzl', 'read_flags')",
"feature_flag_setter(",
" name = 'target',",
" deps = [':host'],",
" flag_values = {",
" ':used_flag': 'configured',",
" },",
" transitive_configs = [':used_flag'],",
")",
"host_transition(",
" name = 'host',",
" srcs = [':reader'],",
" transitive_configs = [':used_flag'],",
")",
"read_flags(",
" name = 'reader',",
" flags = [':used_flag'],",
" transitive_configs = [':used_flag'],",
")",
"config_feature_flag(",
" name = 'used_flag',",
" allowed_values = ['default', 'configured', 'other'],",
" default_value = 'default',",
")");
Artifact targetFlags =
Iterables.getOnlyElement(getFilesToBuild(getConfiguredTarget("//test:target")).toList());
Label usedFlag = Label.parseAbsolute("//test:used_flag", ImmutableMap.of());
assertThat(getFlagValuesFromOutputFile(targetFlags)).containsEntry(usedFlag, "default");
}
@Test
public void featureFlagInHostConfiguration_HasNoTransitiveConfigEnforcement() throws Exception {
scratch.file(
"test/BUILD",
"load(':host_transition.bzl', 'host_transition')",
"load(':read_flags.bzl', 'read_flags')",
"feature_flag_setter(",
" name = 'target',",
" deps = [':host'],",
" flag_values = {",
" ':used_flag': 'configured',",
" },",
// no transitive_configs
")",
"host_transition(",
" name = 'host',",
" srcs = [':reader'],",
// no transitive_configs
")",
"read_flags(",
" name = 'reader',",
" flags = [':used_flag'],",
// no transitive_configs
")",
"config_feature_flag(",
" name = 'used_flag',",
" allowed_values = ['default', 'configured', 'other'],",
" default_value = 'default',",
")");
getConfiguredTarget("//test:target");
assertNoEvents();
}
@Test
public void noDistinctHostConfiguration_DoesNotResultInActionConflicts() throws Exception {
scratch.file(
"test/BUILD",
"load(':host_transition.bzl', 'host_transition')",
"load(':read_flags.bzl', 'read_flags')",
"feature_flag_setter(",
" name = 'target',",
" deps = [':host', ':reader'],",
")",
"host_transition(",
" name = 'host',",
" srcs = [':reader'],",
")",
"read_flags(",
" name = 'reader',",
" flags = [],",
")");
enableManualTrimmingAnd("--nodistinct_host_configuration");
ConfiguredTarget target = getConfiguredTarget("//test:target");
assertNoEvents();
// Note that '//test:reader' is accessed (and creates actions) in both the host and target
// configurations. If these are different but output to the same path (as was the case before
// --nodistinct_host_configuration caused --enforce_transitive_configs_for_config_feature_flag
// to become a no-op), then this causes action conflicts, as described in b/117932061 (for which
// this test is a regression test).
assertThat(getFilesToBuild(target).toList()).hasSize(1);
// Action conflict detection is not enabled for these tests. However, the action conflict comes
// from the outputs of the two configurations of //test:reader being unequal artifacts;
// hence, this test checks that the nested set of artifacts reachable from //test:target only
// contains one artifact, that is, they were deduplicated for being equal.
}
@Test
public void noDistinctHostConfiguration_DisablesEnforcementForBothHostAndTargetConfigs()
throws Exception {
scratch.file(
"test/BUILD",
"load(':host_transition.bzl', 'host_transition')",
"load(':read_flags.bzl', 'read_flags')",
"feature_flag_setter(",
" name = 'target',",
" deps = [':host', ':reader'],",
" flag_values = {",
" ':used_flag': 'configured',",
" },",
// no transitive_configs
")",
"host_transition(",
" name = 'host',",
" srcs = [':reader'],",
// no transitive_configs
")",
"read_flags(",
" name = 'reader',",
" flags = [':used_flag'],",
// no transitive_configs
")",
"config_feature_flag(",
" name = 'used_flag',",
" allowed_values = ['default', 'configured', 'other'],",
" default_value = 'default',",
")");
enableManualTrimmingAnd("--nodistinct_host_configuration");
getConfiguredTarget("//test:target");
assertNoEvents();
}
@Test
public void featureFlagAccessedDirectly_ReturnsDefaultValue() throws Exception {
scratch.file(
"test/BUILD",
"config_feature_flag(",
" name = 'used_flag',",
" allowed_values = ['default', 'configured', 'other'],",
" default_value = 'default',",
")");
assertThat(
ConfigFeatureFlagProvider.fromTarget(getConfiguredTarget("//test:used_flag"))
.getFlagValue())
.isEqualTo("default");
}
@Test
public void featureFlagAccessedViaTopLevelLibraryTarget_ReturnsDefaultValue() throws Exception {
scratch.file(
"test/BUILD",
"load(':read_flags.bzl', 'read_flags')",
"read_flags(",
" name = 'reader',",
" flags = [':used_flag'],",
" transitive_configs = [':used_flag'],",
")",
"config_feature_flag(",
" name = 'used_flag',",
" allowed_values = ['default', 'configured', 'other'],",
" default_value = 'default',",
")");
Artifact targetFlags =
Iterables.getOnlyElement(getFilesToBuild(getConfiguredTarget("//test:reader")).toList());
Label usedFlag = Label.parseAbsolute("//test:used_flag", ImmutableMap.of());
assertThat(getFlagValuesFromOutputFile(targetFlags)).containsEntry(usedFlag, "default");
}
@Test
public void featureFlagSettingRules_OverrideFlagsFromReverseTransitiveClosure()
throws Exception {
// In other words: if you have a dependency which sets feature flags itself, you don't need to
// name any of the feature flags used by that target or its transitive closure, as it sets
// feature flags itself.
// This is because the feature flag setting transition (which calls replaceFlagValues) runs
// before the trimming transition and completely replaces the feature flag set. Thus, when
// the trimming transition (which calls trimFlagValues) runs, its requests are always satisfied.
scratch.file(
"test/BUILD",
"load(':read_flags.bzl', 'read_flags')",
"filegroup(",
" name = 'toplevel',",
" srcs = [':target'],",
// no transitive_configs
")",
"feature_flag_setter(",
" name = 'target',",
" deps = [':reader'],",
" flag_values = {",
" ':trimmed_flag': 'left',",
" ':used_flag': 'configured',",
" },",
" transitive_configs = [':used_flag'],",
")",
"read_flags(",
" name = 'reader',",
" flags = [':used_flag'],",
" transitive_configs = [':used_flag'],",
")",
"config_feature_flag(",
" name = 'trimmed_flag',",
" allowed_values = ['default', 'left', 'right'],",
" default_value = 'default',",
")",
"config_feature_flag(",
" name = 'used_flag',",
" allowed_values = ['default', 'configured', 'other'],",
" default_value = 'default',",
")");
Artifact targetFlags =
Iterables.getOnlyElement(getFilesToBuild(getConfiguredTarget("//test:toplevel")).toList());
Label usedFlag = Label.parseAbsolute("//test:used_flag", ImmutableMap.of());
assertThat(getFlagValuesFromOutputFile(targetFlags)).containsEntry(usedFlag, "configured");
}
@Test
public void trimmingTransitionReturnsOriginalOptionsWhenNothingIsTrimmed() throws Exception {
// This is a performance regression test. The trimming transition applies over every configured
// target in a build. Since BuildOptions.hashCode is expensive, if that produced a unique
// BuildOptions instance for every configured target
scratch.file(
"test/BUILD",
"load(':read_flags.bzl', 'read_flags')",
"feature_flag_setter(",
" name = 'toplevel_target',",
" deps = [':dep'],",
" flag_values = {",
" ':used_flag': 'configured',",
" },",
" transitive_configs = [':used_flag'],",
")",
"read_flags(",
" name = 'dep',",
" flags = [':used_flag'],",
" transitive_configs = [':used_flag'],",
")",
"config_feature_flag(",
" name = 'used_flag',",
" allowed_values = ['default', 'configured', 'other'],",
" default_value = 'default',",
")");
BuildOptions topLevelOptions =
getConfiguration(getConfiguredTarget("//test:toplevel_target")).getOptions();
PatchTransition transition =
new ConfigFeatureFlagTaggedTrimmingTransitionFactory(BaseRuleClasses.TAGGED_TRIMMING_ATTR)
.create((Rule) getTarget("//test:dep"));
BuildOptions depOptions =
transition.patch(
new BuildOptionsView(topLevelOptions, transition.requiresOptionFragments()),
eventCollector);
assertThat(depOptions).isSameInstanceAs(topLevelOptions);
}
@Test
public void featureFlagSetAndInTransitiveConfigs_GetsSetValueWhenTrimTest() throws Exception {
scratch.file(
"test/BUILD",
"load(':read_flags.bzl', 'read_flags')",
"feature_flag_setter(",
" name = 'target',",
" deps = [':reader'],",
" flag_values = {",
" ':trimmed_flag': 'left',",
" ':used_flag': 'configured',",
" },",
" transitive_configs = [':used_flag'],",
")",
"read_flags(",
" name = 'reader',",
" flags = [':used_flag'],",
" transitive_configs = [':used_flag'],",
")",
"config_feature_flag(",
" name = 'trimmed_flag',",
" allowed_values = ['default', 'left', 'right'],",
" default_value = 'default',",
")",
"config_feature_flag(",
" name = 'used_flag',",
" allowed_values = ['default', 'configured', 'other'],",
" default_value = 'default',",
")");
enableManualTrimmingAnd("--trim_test_configuration");
Artifact targetFlags =
Iterables.getOnlyElement(getFilesToBuild(getConfiguredTarget("//test:target")).toList());
Label usedFlag = Label.parseAbsolute("//test:used_flag", ImmutableMap.of());
assertThat(getFlagValuesFromOutputFile(targetFlags)).containsEntry(usedFlag, "configured");
}
}
| |
/*
* Copyright (c) 2020, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.apimgt.persistence.utils;
import javax.xml.namespace.QName;
import org.wso2.carbon.governance.api.exception.GovernanceException;
import org.wso2.carbon.governance.api.generic.dataobjects.GenericArtifact;
import org.wso2.carbon.governance.api.generic.dataobjects.GenericArtifactImpl;
public class PersistenceHelper {
public static GenericArtifact getSampleAPIArtifact() throws GovernanceException {
GenericArtifact artifact = new GenericArtifactImpl(new QName("", "PizzaShackAPI", ""),
"application/vnd.wso2-api+xml");
artifact.setAttribute("overview_endpointSecured","false");
artifact.setAttribute("overview_transports","http,https");
artifact.setAttribute("URITemplate_authType3","Application & Application User");
artifact.setAttribute("overview_wadl",null);
artifact.setAttribute("URITemplate_authType4","Application & Application User");
artifact.setAttribute("overview_authorizationHeader","Authorization");
artifact.setAttribute("URITemplate_authType1","Application & Application User");
artifact.setAttribute("overview_visibleTenants",null);
artifact.setAttribute("URITemplate_authType2","Application & Application User");
artifact.setAttribute("overview_wsdl",null);
artifact.setAttribute("overview_apiSecurity","oauth2,oauth_basic_auth_api_key_mandatory");
artifact.setAttribute("URITemplate_authType0","Application & Application User");
artifact.setAttribute("overview_keyManagers","[\"all\"]");
artifact.setAttribute("overview_environments","Production and Sandbox");
artifact.setAttribute("overview_context","/pizzashack/1.0.0");
artifact.setAttribute("overview_visibility","restricted");
artifact.setAttribute("overview_isLatest","true");
artifact.setAttribute("overview_outSequence","log_out_message");
artifact.setAttribute("overview_provider","admin");
artifact.setAttribute("apiCategories_categoryName","testcategory");
artifact.setAttribute("overview_thumbnail",
"/registry/resource/_system/governance/apimgt/applicationdata/provider/admin/PizzaShackAPI/1.0.0/icon");
artifact.setAttribute("overview_contextTemplate","/pizzashack/{version}");
artifact.setAttribute("overview_description","This is a simple API for Pizza Shack online pizza delivery store.");
artifact.setAttribute("overview_technicalOwner","John Doe");
artifact.setAttribute("overview_type","HTTP");
artifact.setAttribute("overview_technicalOwnerEmail","architecture@pizzashack.com");
artifact.setAttribute("URITemplate_httpVerb4","DELETE");
artifact.setAttribute("overview_inSequence","log_in_message");
artifact.setAttribute("URITemplate_httpVerb2","GET");
artifact.setAttribute("URITemplate_httpVerb3","PUT");
artifact.setAttribute("URITemplate_httpVerb0","POST");
artifact.setAttribute("URITemplate_httpVerb1","GET");
artifact.setAttribute("labels_labelName","gwlable");
artifact.setAttribute("overview_businessOwner","Jane Roe");
artifact.setAttribute("overview_version","1.0.0");
artifact.setAttribute("overview_endpointConfig",
"{\"endpoint_type\":\"http\",\"sandbox_endpoints\":{\"url\":\"https://localhost:9443/am/sample/pizzashack/v1/api/\"},"
+ "\"endpoint_security\":{\"production\":{\"password\":\"admin\",\"tokenUrl\":null,\"clientId\":null,"
+ "\"clientSecret\":null,\"customParameters\":\"{}\",\"additionalProperties\":{},\"type\":\"BASIC\","
+ "\"grantType\":null,\"enabled\":true,\"uniqueIdentifier\":null,\"username\":\"admin\"},"
+ "\"sandbox\":{\"password\":null,\"tokenUrl\":null,\"clientId\":null,\"clientSecret\":null,"
+ "\"customParameters\":\"{}\",\"additionalProperties\":{},\"type\":null,\"grantType\":null,\"enabled\":false,"
+ "\"uniqueIdentifier\":null,\"username\":null}},\"production_endpoints\":"
+ "{\"url\":\"https://localhost:9443/am/sample/pizzashack/v1/api/\"}}");
artifact.setAttribute("overview_tier","Bronze||Silver||Gold||Unlimited");
artifact.setAttribute("overview_sandboxTps","1000");
artifact.setAttribute("overview_apiOwner",null);
artifact.setAttribute("overview_businessOwnerEmail","marketing@pizzashack.com");
artifact.setAttribute("isMonetizationEnabled","false");
artifact.setAttribute("overview_implementation","ENDPOINT");
artifact.setAttribute("overview_deployments","null");
artifact.setAttribute("overview_redirectURL",null);
artifact.setAttribute("monetizationProperties","{}");
artifact.setAttribute("overview_name","PizzaShackAPI");
artifact.setAttribute("overview_subscriptionAvailability","current_tenant");
artifact.setAttribute("overview_productionTps","1000");
artifact.setAttribute("overview_cacheTimeout","300");
artifact.setAttribute("overview_visibleRoles","admin,internal/subscriber");
artifact.setAttribute("overview_testKey",null);
artifact.setAttribute("overview_corsConfiguration",
"{\"corsConfigurationEnabled\":true,\"accessControlAllowOrigins\":[\"*\"],"
+ "\"accessControlAllowCredentials\":false,\"accessControlAllowHeaders\":[\"authorization\","
+ "\"Access-Control-Allow-Origin\",\"Content-Type\",\"SOAPAction\",\"apikey\",\"testKey\"],"
+ "\"accessControlAllowMethods\":[\"GET\",\"PUT\",\"POST\",\"DELETE\",\"PATCH\",\"OPTIONS\"]}");
artifact.setAttribute("overview_advertiseOnly","false");
artifact.setAttribute("overview_versionType","context");
artifact.setAttribute("overview_status","PUBLISHED");
artifact.setAttribute("overview_endpointPpassword",null);
artifact.setAttribute("overview_tenants",null);
artifact.setAttribute("overview_endpointAuthDigest","false");
artifact.setAttribute("overview_faultSequence","json_fault");
artifact.setAttribute("overview_responseCaching","Enabled");
artifact.setAttribute("URITemplate_urlPattern4","/order/{orderId}");
artifact.setAttribute("overview_isDefaultVersion","true");
artifact.setAttribute("URITemplate_urlPattern2","/order/{orderId}");
artifact.setAttribute("URITemplate_urlPattern3","/order/{orderId}");
artifact.setAttribute("URITemplate_urlPattern0","/order");
artifact.setAttribute("URITemplate_urlPattern1","/menu");
artifact.setAttribute("overview_enableStore","true");
artifact.setAttribute("overview_enableSchemaValidation","true");
artifact.setAttribute("overview_endpointUsername",null);
return artifact;
}
public static GenericArtifact getSampleAPIArtifactForTenantf() throws GovernanceException {
GenericArtifact artifact = new GenericArtifactImpl(new QName("", "PizzaShackAPI", ""),
"application/vnd.wso2-api+xml");
artifact.setAttribute("overview_endpointSecured","false");
artifact.setAttribute("overview_transports","http,https");
artifact.setAttribute("URITemplate_authType3","Application & Application User");
artifact.setAttribute("overview_wadl",null);
artifact.setAttribute("URITemplate_authType4","Application & Application User");
artifact.setAttribute("overview_authorizationHeader","Authorization");
artifact.setAttribute("URITemplate_authType1","Application & Application User");
artifact.setAttribute("overview_visibleTenants",null);
artifact.setAttribute("URITemplate_authType2","Application & Application User");
artifact.setAttribute("overview_wsdl",null);
artifact.setAttribute("overview_apiSecurity","oauth2,oauth_basic_auth_api_key_mandatory");
artifact.setAttribute("URITemplate_authType0","Application & Application User");
artifact.setAttribute("overview_keyManagers","[\"all\"]");
artifact.setAttribute("overview_environments","Production and Sandbox");
artifact.setAttribute("overview_context","/t/wso2.com/pizzashack/1.0.0");
artifact.setAttribute("overview_visibility","restricted");
artifact.setAttribute("overview_isLatest","true");
artifact.setAttribute("overview_outSequence","log_out_message");
artifact.setAttribute("overview_provider","admin-AT-wso2.com");
artifact.setAttribute("apiCategories_categoryName","testcategory");
artifact.setAttribute("overview_thumbnail",
"/t/wso2.com/t/wso2.com/registry/resource/_system/governance/apimgt/applicationdata/provider/admin-AT-wso2.com/PizzaShackAPI/1.0.0/icon");
artifact.setAttribute("overview_contextTemplate","/t/wso2.com/pizzashack/{version}");
artifact.setAttribute("overview_description","This is a simple API for Pizza Shack online pizza delivery store.");
artifact.setAttribute("overview_technicalOwner","John Doe");
artifact.setAttribute("overview_type","HTTP");
artifact.setAttribute("overview_technicalOwnerEmail","architecture@pizzashack.com");
artifact.setAttribute("URITemplate_httpVerb4","DELETE");
artifact.setAttribute("overview_inSequence","log_in_message");
artifact.setAttribute("URITemplate_httpVerb2","GET");
artifact.setAttribute("URITemplate_httpVerb3","PUT");
artifact.setAttribute("URITemplate_httpVerb0","POST");
artifact.setAttribute("URITemplate_httpVerb1","GET");
artifact.setAttribute("labels_labelName","gwlable");
artifact.setAttribute("overview_businessOwner","Jane Roe");
artifact.setAttribute("overview_version","1.0.0");
artifact.setAttribute("overview_endpointConfig",
"{\"endpoint_type\":\"http\",\"sandbox_endpoints\":{\"url\":\"https://localhost:9443/am/sample/pizzashack/v1/api/\"},"
+ "\"endpoint_security\":{\"production\":{\"password\":\"admin\",\"tokenUrl\":null,\"clientId\":null,"
+ "\"clientSecret\":null,\"customParameters\":\"{}\",\"additionalProperties\":{},\"type\":\"BASIC\","
+ "\"grantType\":null,\"enabled\":true,\"uniqueIdentifier\":null,\"username\":\"admin\"},"
+ "\"sandbox\":{\"password\":null,\"tokenUrl\":null,\"clientId\":null,\"clientSecret\":null,"
+ "\"customParameters\":\"{}\",\"additionalProperties\":{},\"type\":null,\"grantType\":null,\"enabled\":false,"
+ "\"uniqueIdentifier\":null,\"username\":null}},\"production_endpoints\":"
+ "{\"url\":\"https://localhost:9443/am/sample/pizzashack/v1/api/\"}}");
artifact.setAttribute("overview_tier","Bronze||Silver||Gold||Unlimited");
artifact.setAttribute("overview_sandboxTps","1000");
artifact.setAttribute("overview_apiOwner","admin@wso2.com");
artifact.setAttribute("overview_businessOwnerEmail","marketing@pizzashack.com");
artifact.setAttribute("isMonetizationEnabled","false");
artifact.setAttribute("overview_implementation","ENDPOINT");
artifact.setAttribute("overview_deployments","null");
artifact.setAttribute("overview_redirectURL",null);
artifact.setAttribute("monetizationProperties","{}");
artifact.setAttribute("overview_name","PizzaShackAPI");
artifact.setAttribute("overview_subscriptionAvailability","current_tenant");
artifact.setAttribute("overview_productionTps","1000");
artifact.setAttribute("overview_cacheTimeout","300");
artifact.setAttribute("overview_visibleRoles","admin,internal/subscriber");
artifact.setAttribute("overview_testKey",null);
artifact.setAttribute("overview_corsConfiguration",
"{\"corsConfigurationEnabled\":true,\"accessControlAllowOrigins\":[\"*\"],"
+ "\"accessControlAllowCredentials\":false,\"accessControlAllowHeaders\":[\"authorization\","
+ "\"Access-Control-Allow-Origin\",\"Content-Type\",\"SOAPAction\",\"apikey\",\"testKey\"],"
+ "\"accessControlAllowMethods\":[\"GET\",\"PUT\",\"POST\",\"DELETE\",\"PATCH\",\"OPTIONS\"]}");
artifact.setAttribute("overview_advertiseOnly","false");
artifact.setAttribute("overview_versionType","context");
artifact.setAttribute("overview_status","PUBLISHED");
artifact.setAttribute("overview_endpointPpassword",null);
artifact.setAttribute("overview_tenants",null);
artifact.setAttribute("overview_endpointAuthDigest","false");
artifact.setAttribute("overview_faultSequence","json_fault");
artifact.setAttribute("overview_responseCaching","Enabled");
artifact.setAttribute("URITemplate_urlPattern4","/order/{orderId}");
artifact.setAttribute("overview_isDefaultVersion","true");
artifact.setAttribute("URITemplate_urlPattern2","/order/{orderId}");
artifact.setAttribute("URITemplate_urlPattern3","/order/{orderId}");
artifact.setAttribute("URITemplate_urlPattern0","/order");
artifact.setAttribute("URITemplate_urlPattern1","/menu");
artifact.setAttribute("overview_enableStore","true");
artifact.setAttribute("overview_enableSchemaValidation","true");
artifact.setAttribute("overview_endpointUsername",null);
return artifact;
}
}
| |
/*
* Copyright 2017 StreamSets Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.streamsets.datacollector.definition;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.streamsets.pipeline.api.ConfigDef;
import com.streamsets.pipeline.api.MultiValueChooserModel;
import com.streamsets.pipeline.api.ValueChooserModel;
import com.streamsets.pipeline.api.base.BaseEnumChooserValues;
import org.junit.Assert;
import org.junit.Test;
import java.lang.reflect.Field;
import java.util.List;
import java.util.Map;
public class TestConfigValueExtractor {
public enum FooEnum { A, B, C }
public static class FooEnumValueChooser extends BaseEnumChooserValues<FooEnum> {
public FooEnumValueChooser() {
super(FooEnum.class);
}
}
public static class Configs {
@ConfigDef(
label = "L",
type = ConfigDef.Type.STRING,
defaultValue = "X",
required = true
)
public String withDefault;
@ConfigDef(
label = "L",
type = ConfigDef.Type.NUMBER,
required = true
)
public int withoutDefault;
@ConfigDef(
label = "L",
type = ConfigDef.Type.BOOLEAN,
required = true,
defaultValue = "true"
)
public boolean boolB;
@ConfigDef(
label = "L",
type = ConfigDef.Type.CHARACTER,
required = true,
defaultValue = "c"
)
public char charC;
@ConfigDef(
label = "L",
type = ConfigDef.Type.NUMBER,
required = true,
defaultValue = "1"
)
public int byteN;
@ConfigDef(
label = "L",
type = ConfigDef.Type.NUMBER,
required = true,
defaultValue = "1"
)
public short shortN;
@ConfigDef(
label = "L",
type = ConfigDef.Type.NUMBER,
required = true,
defaultValue = "1"
)
public int intN;
@ConfigDef(
label = "L",
type = ConfigDef.Type.NUMBER,
required = true,
defaultValue = "1"
)
public long longN;
@ConfigDef(
label = "L",
type = ConfigDef.Type.NUMBER,
required = true,
defaultValue = "0.5"
)
public float floatN;
@ConfigDef(
label = "L",
type = ConfigDef.Type.NUMBER,
required = true,
defaultValue = "0.5"
)
public double doubleN;
@ConfigDef(
label = "L",
type = ConfigDef.Type.LIST,
required = true,
defaultValue = "[ 1 ]"
)
public List<Integer> listL;
@ConfigDef(
label = "L",
type = ConfigDef.Type.MAP,
required = true,
defaultValue = "{ \"K\": \"V\"}"
)
public Map mapM;
@ConfigDef(
label = "L",
type = ConfigDef.Type.TEXT,
required = true,
defaultValue = "Hello"
)
public String textT;
@ConfigDef(
label = "L",
type = ConfigDef.Type.MODEL,
required = true
)
public String modelM;
@ConfigDef(
label = "L",
type = ConfigDef.Type.NUMBER,
required = true,
defaultValue = "${x}"
)
public int elEL;
@ConfigDef(
label = "L",
defaultValue = "A",
type = ConfigDef.Type.MODEL,
required = true
)
@ValueChooserModel(FooEnumValueChooser.class)
public FooEnum enumS;
@ConfigDef(
label = "L",
defaultValue = "[ \"A\" ]",
type = ConfigDef.Type.MODEL,
required = true
)
@MultiValueChooserModel(FooEnumValueChooser.class)
public List<FooEnum> enumM;
@ConfigDef(
type = ConfigDef.Type.RUNTIME,
required = true,
label = "L",
defaultValue = "stringValue"
)
public String runtimeString;
@ConfigDef(
type = ConfigDef.Type.RUNTIME,
required = true,
label = "L",
defaultValue = "1987"
)
public int runtimeInteger;
}
@Test
public void testExtractValue() throws Exception {
Field field = Configs.class.getField("withDefault");
ConfigDef configDef = field.getAnnotation(ConfigDef.class);
Assert.assertEquals("X", ConfigValueExtractor.get().extract(field, configDef, "x"));
field = Configs.class.getField("boolB");
configDef = field.getAnnotation(ConfigDef.class);
Assert.assertEquals(true, ConfigValueExtractor.get().extract(field, configDef, "x"));
field = Configs.class.getField("charC");
configDef = field.getAnnotation(ConfigDef.class);
Assert.assertEquals('c', ConfigValueExtractor.get().extract(field, configDef, "x"));
field = Configs.class.getField("byteN");
configDef = field.getAnnotation(ConfigDef.class);
Assert.assertEquals(1, ConfigValueExtractor.get().extract(field, configDef, "x"));
field = Configs.class.getField("shortN");
configDef = field.getAnnotation(ConfigDef.class);
Assert.assertEquals((short)1, ConfigValueExtractor.get().extract(field, configDef, "x"));
field = Configs.class.getField("intN");
configDef = field.getAnnotation(ConfigDef.class);
Assert.assertEquals(1, ConfigValueExtractor.get().extract(field, configDef, "x"));
field = Configs.class.getField("longN");
configDef = field.getAnnotation(ConfigDef.class);
Assert.assertEquals((long)1, ConfigValueExtractor.get().extract(field, configDef, "x"));
field = Configs.class.getField("floatN");
configDef = field.getAnnotation(ConfigDef.class);
Assert.assertEquals((float)0.5, ConfigValueExtractor.get().extract(field, configDef, "x"));
field = Configs.class.getField("doubleN");
configDef = field.getAnnotation(ConfigDef.class);
Assert.assertEquals(0.5, ConfigValueExtractor.get().extract(field, configDef, "x"));
field = Configs.class.getField("listL");
configDef = field.getAnnotation(ConfigDef.class);
Assert.assertEquals(ImmutableList.of(1), ConfigValueExtractor.get().extract(field, configDef, "x"));
field = Configs.class.getField("mapM");
configDef = field.getAnnotation(ConfigDef.class);
Assert.assertEquals(ImmutableList.of(ImmutableMap.of("key", "K", "value", "V")),
ConfigValueExtractor.get().extract(field, configDef, "x"));
field = Configs.class.getField("textT");
configDef = field.getAnnotation(ConfigDef.class);
Assert.assertEquals("Hello", ConfigValueExtractor.get().extract(field, configDef, "x"));
field = Configs.class.getField("modelM");
configDef = field.getAnnotation(ConfigDef.class);
// we get null here but the bean creator will inject the type default
Assert.assertEquals(null, ConfigValueExtractor.get().extract(field, configDef, "x"));
field = Configs.class.getField("elEL");
configDef = field.getAnnotation(ConfigDef.class);
Assert.assertEquals("${x}", ConfigValueExtractor.get().extract(field, configDef, "x"));
field = Configs.class.getField("enumS");
configDef = field.getAnnotation(ConfigDef.class);
Assert.assertEquals(FooEnum.A, ConfigValueExtractor.get().extract(field, configDef, "x"));
field = Configs.class.getField("enumM");
configDef = field.getAnnotation(ConfigDef.class);
Assert.assertEquals(ImmutableList.of(FooEnum.A), ConfigValueExtractor.get().extract(field, configDef, "x"));
field = Configs.class.getField("runtimeString");
configDef = field.getAnnotation(ConfigDef.class);
Assert.assertEquals("stringValue", ConfigValueExtractor.get().extract(field, configDef, "x"));
field = Configs.class.getField("runtimeInteger");
configDef = field.getAnnotation(ConfigDef.class);
Assert.assertEquals(1987, ConfigValueExtractor.get().extract(field, configDef, "x"));
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver;
import static org.apache.hadoop.hbase.KeyValueTestUtil.create;
import static org.apache.hadoop.hbase.regionserver.KeyValueScanFixture.scanFixture;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.NavigableSet;
import java.util.Random;
import java.util.TreeSet;
import java.util.concurrent.CountDownLatch;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeepDeletedCells;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
import org.apache.hadoop.hbase.io.hfile.RandomKeyValueUtil;
import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This test tests whether parallel {@link StoreScanner#close()} and
* {@link StoreScanner#updateReaders(List, List)} works perfectly ensuring
* that there are no references on the existing Storescanner readers.
*/
@Category({ RegionServerTests.class, MediumTests.class })
public class TestStoreScannerClosure {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestStoreScannerClosure.class);
private static final Logger LOG = LoggerFactory.getLogger(TestStoreScannerClosure.class);
private static final int NUM_VALID_KEY_TYPES = KeyValue.Type.values().length - 2;
@Rule
public TestName name = new TestName();
private static final String CF_STR = "cf";
private static HRegion region;
private static final byte[] CF = Bytes.toBytes(CF_STR);
static Configuration CONF = HBaseConfiguration.create();
private static CacheConfig cacheConf;
private static FileSystem fs;
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static String ROOT_DIR = TEST_UTIL.getDataTestDir("TestHFile").toString();
private ScanInfo scanInfo = new ScanInfo(CONF, CF, 0, Integer.MAX_VALUE, Long.MAX_VALUE,
KeepDeletedCells.FALSE, HConstants.DEFAULT_BLOCKSIZE, 0, CellComparator.getInstance(), false);
private final static byte[] fam = Bytes.toBytes("cf_1");
private static final KeyValue[] kvs =
new KeyValue[] { create("R1", "cf", "a", 11, KeyValue.Type.Put, "dont-care"),
create("R1", "cf", "b", 11, KeyValue.Type.Put, "dont-care"),
create("R1", "cf", "c", 11, KeyValue.Type.Put, "dont-care"),
create("R1", "cf", "d", 11, KeyValue.Type.Put, "dont-care"),
create("R1", "cf", "e", 11, KeyValue.Type.Put, "dont-care"),
create("R1", "cf", "f", 11, KeyValue.Type.Put, "dont-care"),
create("R1", "cf", "g", 11, KeyValue.Type.Put, "dont-care"),
create("R1", "cf", "h", 11, KeyValue.Type.Put, "dont-care"),
create("R1", "cf", "i", 11, KeyValue.Type.Put, "dont-care"),
create("R2", "cf", "a", 11, KeyValue.Type.Put, "dont-care"), };
@BeforeClass
public static void setUp() throws Exception {
CONF = TEST_UTIL.getConfiguration();
cacheConf = new CacheConfig(CONF);
fs = TEST_UTIL.getTestFileSystem();
TableName tableName = TableName.valueOf("test");
HTableDescriptor htd = new HTableDescriptor(tableName);
htd.addFamily(new HColumnDescriptor(fam));
HRegionInfo info = new HRegionInfo(tableName, null, null, false);
Path path = TEST_UTIL.getDataTestDir("test");
region = HBaseTestingUtility.createRegionAndWAL(info, path, TEST_UTIL.getConfiguration(), htd);
}
@Test
public void testScannerCloseAndUpdateReaders1() throws Exception {
testScannerCloseAndUpdateReaderInternal(true, false);
}
@Test
public void testScannerCloseAndUpdateReaders2() throws Exception {
testScannerCloseAndUpdateReaderInternal(false, true);
}
private Path writeStoreFile() throws IOException {
Path storeFileParentDir = new Path(TEST_UTIL.getDataTestDir(), "TestHFile");
HFileContext meta = new HFileContextBuilder().withBlockSize(64 * 1024).build();
StoreFileWriter sfw = new StoreFileWriter.Builder(CONF, fs).withOutputDir(storeFileParentDir)
.withComparator(CellComparatorImpl.COMPARATOR).withFileContext(meta).build();
final int rowLen = 32;
Random RNG = new Random();
for (int i = 0; i < 1000; ++i) {
byte[] k = RandomKeyValueUtil.randomOrderedKey(RNG, i);
byte[] v = RandomKeyValueUtil.randomValue(RNG);
int cfLen = RNG.nextInt(k.length - rowLen + 1);
KeyValue kv = new KeyValue(k, 0, rowLen, k, rowLen, cfLen, k, rowLen + cfLen,
k.length - rowLen - cfLen, RNG.nextLong(), generateKeyType(RNG), v, 0, v.length);
sfw.append(kv);
}
sfw.close();
return sfw.getPath();
}
private static KeyValue.Type generateKeyType(Random rand) {
if (rand.nextBoolean()) {
// Let's make half of KVs puts.
return KeyValue.Type.Put;
} else {
KeyValue.Type keyType = KeyValue.Type.values()[1 + rand.nextInt(NUM_VALID_KEY_TYPES)];
if (keyType == KeyValue.Type.Minimum || keyType == KeyValue.Type.Maximum) {
throw new RuntimeException("Generated an invalid key type: " + keyType + ". "
+ "Probably the layout of KeyValue.Type has changed.");
}
return keyType;
}
}
private HStoreFile readStoreFile(Path storeFilePath, Configuration conf) throws Exception {
// Open the file reader with block cache disabled.
HStoreFile file = new HStoreFile(fs, storeFilePath, conf, cacheConf, BloomType.NONE, true);
return file;
}
private void testScannerCloseAndUpdateReaderInternal(boolean awaitUpdate, boolean awaitClose)
throws IOException, InterruptedException {
// start write to store file.
Path path = writeStoreFile();
HStoreFile file = null;
List<HStoreFile> files = new ArrayList<HStoreFile>();
try {
file = readStoreFile(path, CONF);
files.add(file);
} catch (Exception e) {
// fail test
assertTrue(false);
}
scanFixture(kvs);
// scanners.add(storeFileScanner);
try (ExtendedStoreScanner scan = new ExtendedStoreScanner(region.getStore(fam), scanInfo,
new Scan(), getCols("a", "d"), 100L)) {
Thread closeThread = new Thread() {
public void run() {
scan.close(awaitClose, true);
}
};
closeThread.start();
Thread updateThread = new Thread() {
public void run() {
try {
scan.updateReaders(awaitUpdate, files, Collections.emptyList());
} catch (IOException e) {
e.printStackTrace();
}
}
};
updateThread.start();
// complete both the threads
closeThread.join();
// complete both the threads
updateThread.join();
if (file.getReader() != null) {
// the fileReader is not null when the updateReaders has completed first.
// in the other case the fileReader will be null.
int refCount = file.getReader().getRefCount();
LOG.info("the store scanner count is " + refCount);
assertTrue("The store scanner count should be 0", refCount == 0);
}
}
}
private static class ExtendedStoreScanner extends StoreScanner {
private CountDownLatch latch = new CountDownLatch(1);
public ExtendedStoreScanner(HStore store, ScanInfo scanInfo, Scan scan,
NavigableSet<byte[]> columns, long readPt) throws IOException {
super(store, scanInfo, scan, columns, readPt);
}
public void updateReaders(boolean await, List<HStoreFile> sfs,
List<KeyValueScanner> memStoreScanners) throws IOException {
if (await) {
try {
latch.await();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
super.updateReaders(sfs, memStoreScanners);
if (!await) {
latch.countDown();
}
}
// creating a dummy close
public void close(boolean await, boolean dummy) {
if (await) {
try {
latch.await();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
super.close();
if (!await) {
latch.countDown();
}
}
}
NavigableSet<byte[]> getCols(String... strCols) {
NavigableSet<byte[]> cols = new TreeSet<>(Bytes.BYTES_COMPARATOR);
for (String col : strCols) {
byte[] bytes = Bytes.toBytes(col);
cols.add(bytes);
}
return cols;
}
}
| |
/**
*
*/
package com.vaderetrosecure.keystore.dao;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.spec.InvalidKeySpecException;
import javax.crypto.BadPaddingException;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.SecretKey;
import javax.crypto.spec.SecretKeySpec;
import org.apache.log4j.Logger;
/**
* This class is used to protect secret and private keys of the key store.
* It contains an AES secret key with its initialization vector.<br>
* <b>Be aware</b> that objects from this class are used by {@link com.vaderetrosecure.keystore.VRKeyStoreSpi} to
* create protections but are never stored. Implementors must only store {@link LockedKeyProtection} objects that,
* internally, will be derived to create {@code KeyProtection} objects.
*
* @see com.vaderetrosecure.keystore.VRKeyStoreSpi
* @see com.vaderetrosecure.keystore.dao.LockedKeyProtection
*/
public class KeyProtection
{
private static final Logger LOG = Logger.getLogger(KeyProtection.class);
private SecretKey key;
private byte[] iv;
/**
* Construct a new empty {@code KeyProtection}.
*/
public KeyProtection()
{
this(null, new byte[]{});
}
/**
* Construct a new {@code KeyProtection}, given a secret key and a
* initialization vector.
*
* @param key the secret key.
* @param iv the initialization vector.
* @see javax.crypto.SecretKey
*/
public KeyProtection(SecretKey key, byte[] iv)
{
this.key = key;
this.iv = iv;
}
/**
* Construct a new {@code KeyProtection}, from a {@code LockedKeyProtection} object.
* If the {@code VRKeyStoreSpi} locked it with public key, a private key must be
* given to decipher it.
*
* @param lockedKeyProtection the LockedKeyProtection object that can be stored.
* @param privateKey if the LockedKeyProtection object was ciphered by the VRKeyStoreSpi, it is mandatory. Otherwise, it is set to null.
* @throws InvalidKeyException if the private key is wrong.
* @throws NoSuchAlgorithmException if algorithm was not found.
* @throws NoSuchPaddingException if key can not be deciphered.
* @throws IllegalBlockSizeException if key can not be deciphered.
* @throws BadPaddingException if key can not be deciphered.
* @see com.vaderetrosecure.keystore.dao.LockedKeyProtection
*/
public KeyProtection(LockedKeyProtection lockedKeyProtection, PrivateKey privateKey) throws InvalidKeyException, NoSuchAlgorithmException, NoSuchPaddingException, IllegalBlockSizeException, BadPaddingException
{
this.key = unlockCipheredKey(lockedKeyProtection.getCipheredKey(), privateKey);
this.iv = lockedKeyProtection.getIV();
}
/**
* Give the secret key used to decipher stored secret and private keys.
*
* @return the secret key.
*/
public SecretKey getKey()
{
return key;
}
/**
* Assign a secret key to this object.
*
* @param key the secret key.
*/
public void setKey(SecretKey key)
{
this.key = key;
}
/**
* Give the initialization vector used with the secret key.
*
* @return the initialization vector.
*/
public byte[] getIV()
{
return iv;
}
/**
* Assign an initialization vector to this object.
*
* @param iv the initialization vector.
*/
public void setIV(byte[] iv)
{
this.iv = iv;
}
/**
* Generate a {@code KeyProtection} object, given a password and salt.
* Password and salt are used to generate the secret key. An initialization vector
* is also generated using a strong random number generator.<b>This method is the
* preferred way</b> to generate a new {@code KeyProtection} object.<br>
* Implementors do not need to call this method.
*
* @param password the password to protect the stored key.
* @param salt salt used with the password to generate the KeyProtection object.
* @return an new KeyProtection object.
* @throws NoSuchAlgorithmException if algorithm was not found.
* @throws InvalidKeySpecException if the IV is malformed.
*/
public static KeyProtection generateKeyProtection(char[] password, byte[] salt) throws NoSuchAlgorithmException, InvalidKeySpecException
{
byte[] iv = CryptoTools.generateIV();
return generateKeyProtection(password, salt, iv);
}
/**
* Generate a {@code KeyProtection} object, given a password, salt and an initialization vector.
* Password and salt are used to generate the secret key. An initialization vector
* is also generated using a strong random number generator.<br>
* Implementors do not need to call this method.
*
* @param password the password to protect the stored key.
* @param salt salt used with the password to generate the KeyProtection object.
* @param iv an initialization vector.
* @return an new KeyProtection object.
* @throws NoSuchAlgorithmException if algorithm was not found.
* @throws InvalidKeySpecException if the IV is malformed.
*/
public static KeyProtection generateKeyProtection(char[] password, byte[] salt, byte[] iv) throws NoSuchAlgorithmException, InvalidKeySpecException
{
SecretKey sk = CryptoTools.getAESSecretKey(password, salt);
return new KeyProtection(sk, iv);
}
private SecretKey unlockCipheredKey(byte[] cipheredKey, PrivateKey privateKey) throws InvalidKeyException, NoSuchAlgorithmException, NoSuchPaddingException, IllegalBlockSizeException, BadPaddingException
{
SecretKey sk = null;
if (privateKey == null)
{
LOG.debug("No public key, now try to unlock a readable key protection");
sk = new SecretKeySpec(cipheredKey, "AES");
}
else
sk = new SecretKeySpec(CryptoTools.decipherData(cipheredKey, privateKey), "AES");
return sk;
}
/**
* Create a {@code LockedKeyProtection} object from this {@code KeyProtection} object.
* A public key can be given in parameter to cipher this object. This method is called by
* {@code VRKeyStoreSpi} to create a ciphered key protection that can be stored.<br>
* <b>Be aware that not using a public key implies a security vulnerability</b>, because a third party program can potentially decipher
* all stored keys, if the storage entity is attainable.
* Implementors do not need to call this method.
*
* @param publicKey the public key used to cipher this object. May be null if not required, but implies a security vulnerability.
* @return a new LockedKeyProtection object.
* @throws InvalidKeyException if the public key is not valid.
* @throws NoSuchAlgorithmException if algorithm is not found.
* @throws NoSuchPaddingException if the public key is wrong or not large enough.
* @throws IllegalBlockSizeException if the public key is wrong or not large enough.
* @throws BadPaddingException if the public key is wrong or not large enough.
* @see com.vaderetrosecure.keystore.dao.LockedKeyProtection
* @see com.vaderetrosecure.keystore.VRKeyStoreSpi
*/
public LockedKeyProtection getLockedKeyProtection(PublicKey publicKey) throws InvalidKeyException, NoSuchAlgorithmException, NoSuchPaddingException, IllegalBlockSizeException, BadPaddingException
{
LockedKeyProtection lkp = null;
if (publicKey == null)
{
LOG.debug("No private key, so key protection will be readable");
lkp = new LockedKeyProtection(getKey().getEncoded(), getIV());
}
else
lkp = new LockedKeyProtection(CryptoTools.cipherData(getKey().getEncoded(), publicKey), getIV());
return lkp;
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.psi.impl.source.resolve.reference.impl;
import com.intellij.codeInsight.completion.InsertHandler;
import com.intellij.codeInsight.completion.InsertionContext;
import com.intellij.codeInsight.completion.PrioritizedLookupElement;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.codeInsight.lookup.LookupElementBuilder;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Iconable;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.RecursionGuard;
import com.intellij.openapi.util.RecursionManager;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.impl.JavaConstantExpressionEvaluator;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.util.*;
import com.intellij.util.ArrayUtil;
import com.intellij.util.ArrayUtilRt;
import com.intellij.util.ObjectUtils;
import com.intellij.util.PlatformIcons;
import com.intellij.util.containers.ContainerUtil;
import com.siyeh.ig.psiutils.DeclarationSearchUtils;
import com.siyeh.ig.psiutils.ExpressionUtils;
import com.siyeh.ig.psiutils.MethodCallUtils;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.*;
import java.util.function.Function;
/**
* @author Pavel.Dolgov
*/
public final class JavaReflectionReferenceUtil {
// MethodHandle (Java 7) and VarHandle (Java 9) infrastructure
public static final String JAVA_LANG_INVOKE_METHOD_HANDLES_LOOKUP = "java.lang.invoke.MethodHandles.Lookup";
public static final String JAVA_LANG_INVOKE_METHOD_TYPE = "java.lang.invoke.MethodType";
public static final String METHOD_TYPE = "methodType";
public static final String GENERIC_METHOD_TYPE = "genericMethodType";
public static final String FIND_VIRTUAL = "findVirtual";
public static final String FIND_STATIC = "findStatic";
public static final String FIND_SPECIAL = "findSpecial";
public static final String FIND_GETTER = "findGetter";
public static final String FIND_SETTER = "findSetter";
public static final String FIND_STATIC_GETTER = "findStaticGetter";
public static final String FIND_STATIC_SETTER = "findStaticSetter";
public static final String FIND_VAR_HANDLE = "findVarHandle";
public static final String FIND_STATIC_VAR_HANDLE = "findStaticVarHandle";
public static final String FIND_CONSTRUCTOR = "findConstructor";
public static final String FIND_CLASS = "findClass";
public static final String[] HANDLE_FACTORY_METHOD_NAMES = {
FIND_VIRTUAL, FIND_STATIC, FIND_SPECIAL,
FIND_GETTER, FIND_SETTER,
FIND_STATIC_GETTER, FIND_STATIC_SETTER,
FIND_VAR_HANDLE, FIND_STATIC_VAR_HANDLE};
// Classic reflection infrastructure
public static final String GET_FIELD = "getField";
public static final String GET_DECLARED_FIELD = "getDeclaredField";
public static final String GET_METHOD = "getMethod";
public static final String GET_DECLARED_METHOD = "getDeclaredMethod";
public static final String GET_CONSTRUCTOR = "getConstructor";
public static final String GET_DECLARED_CONSTRUCTOR = "getDeclaredConstructor";
public static final String JAVA_LANG_CLASS_LOADER = "java.lang.ClassLoader";
public static final String FOR_NAME = "forName";
public static final String LOAD_CLASS = "loadClass";
public static final String GET_CLASS = "getClass";
public static final String NEW_INSTANCE = "newInstance";
public static final String TYPE = "TYPE";
// Atomic field updaters
public static final String NEW_UPDATER = "newUpdater";
public static final String ATOMIC_LONG_FIELD_UPDATER = "java.util.concurrent.atomic.AtomicLongFieldUpdater";
public static final String ATOMIC_INTEGER_FIELD_UPDATER = "java.util.concurrent.atomic.AtomicIntegerFieldUpdater";
public static final String ATOMIC_REFERENCE_FIELD_UPDATER = "java.util.concurrent.atomic.AtomicReferenceFieldUpdater";
private static final RecursionGuard<PsiElement> ourGuard = RecursionManager.createGuard("JavaLangClassMemberReference");
@Contract("null -> null")
public static ReflectiveType getReflectiveType(@Nullable PsiExpression context) {
context = PsiUtil.skipParenthesizedExprDown(context);
if (context == null) {
return null;
}
if (context instanceof PsiClassObjectAccessExpression) {
final PsiTypeElement operand = ((PsiClassObjectAccessExpression)context).getOperand();
return ReflectiveType.create(operand.getType(), true);
}
if (context instanceof PsiMethodCallExpression) {
final PsiMethodCallExpression methodCall = (PsiMethodCallExpression)context;
final String methodReferenceName = methodCall.getMethodExpression().getReferenceName();
if (FOR_NAME.equals(methodReferenceName)) {
final PsiMethod method = methodCall.resolveMethod();
if (method != null && isJavaLangClass(method.getContainingClass())) {
final PsiExpression[] expressions = methodCall.getArgumentList().getExpressions();
if (expressions.length == 1) {
final PsiExpression argument = findDefinition(PsiUtil.skipParenthesizedExprDown(expressions[0]));
final String className = computeConstantExpression(argument, String.class);
if (className != null) {
return ReflectiveType.create(findClass(className, context), true);
}
}
}
}
else if (GET_CLASS.equals(methodReferenceName) && methodCall.getArgumentList().isEmpty()) {
final PsiMethod method = methodCall.resolveMethod();
if (method != null && isJavaLangObject(method.getContainingClass())) {
final PsiExpression qualifier = PsiUtil.skipParenthesizedExprDown(methodCall.getMethodExpression().getQualifierExpression());
if (qualifier instanceof PsiReferenceExpression) {
final PsiExpression definition = findVariableDefinition((PsiReferenceExpression)qualifier);
if (definition != null) {
return getClassInstanceType(definition);
}
}
//TODO type of the qualifier may be a supertype of the actual value - need to compute the type of the actual value
// otherwise getDeclaredField and getDeclaredMethod may work not reliably
if (qualifier != null) {
return getClassInstanceType(qualifier);
}
}
}
}
if (context instanceof PsiReferenceExpression) {
PsiReferenceExpression reference = (PsiReferenceExpression)context;
final PsiElement resolved = reference.resolve();
if (resolved instanceof PsiVariable) {
PsiVariable variable = (PsiVariable)resolved;
if (isJavaLangClass(PsiTypesUtil.getPsiClass(variable.getType()))) {
final PsiExpression definition = findVariableDefinition(reference, variable);
if (definition != null) {
ReflectiveType result = ourGuard.doPreventingRecursion(variable, false, () -> getReflectiveType(definition));
if (result != null) {
return result;
}
}
}
}
}
final PsiType type = context.getType();
if (type instanceof PsiClassType) {
final PsiClassType.ClassResolveResult resolveResult = ((PsiClassType)type).resolveGenerics();
final PsiClass resolvedElement = resolveResult.getElement();
if (!isJavaLangClass(resolvedElement)) return null;
if (context instanceof PsiReferenceExpression && TYPE.equals(((PsiReferenceExpression)context).getReferenceName())) {
final PsiElement resolved = ((PsiReferenceExpression)context).resolve();
if (resolved instanceof PsiField) {
final PsiField field = (PsiField)resolved;
if (field.hasModifierProperty(PsiModifier.FINAL) && field.hasModifierProperty(PsiModifier.STATIC)) {
final PsiType[] classTypeArguments = ((PsiClassType)type).getParameters();
final PsiPrimitiveType unboxedType = classTypeArguments.length == 1
? PsiPrimitiveType.getUnboxedType(classTypeArguments[0]) : null;
if (unboxedType != null && field.getContainingClass() == PsiUtil.resolveClassInClassTypeOnly(classTypeArguments[0])) {
return ReflectiveType.create(unboxedType, true);
}
}
}
}
final PsiTypeParameter[] parameters = resolvedElement.getTypeParameters();
if (parameters.length == 1) {
final PsiType typeArgument = resolveResult.getSubstitutor().substitute(parameters[0]);
final PsiType erasure = TypeConversionUtil.erasure(typeArgument);
final PsiClass argumentClass = PsiTypesUtil.getPsiClass(erasure);
if (argumentClass != null && !isJavaLangObject(argumentClass)) {
return ReflectiveType.create(argumentClass, false);
}
}
}
return null;
}
@Nullable
private static ReflectiveType getClassInstanceType(@Nullable PsiExpression expression) {
expression = PsiUtil.skipParenthesizedExprDown(expression);
if (expression == null) {
return null;
}
if (expression instanceof PsiMethodCallExpression) {
final PsiMethodCallExpression methodCall = (PsiMethodCallExpression)expression;
final String methodReferenceName = methodCall.getMethodExpression().getReferenceName();
if (NEW_INSTANCE.equals(methodReferenceName)) {
final PsiMethod method = methodCall.resolveMethod();
if (method != null) {
final PsiExpression[] arguments = methodCall.getArgumentList().getExpressions();
if (arguments.length == 0 && isClassWithName(method.getContainingClass(), CommonClassNames.JAVA_LANG_CLASS)) {
final PsiExpression qualifier = methodCall.getMethodExpression().getQualifierExpression();
if (qualifier != null) {
return ourGuard.doPreventingRecursion(qualifier, false, () -> getReflectiveType(qualifier));
}
}
else if (arguments.length > 1 && isClassWithName(method.getContainingClass(), CommonClassNames.JAVA_LANG_REFLECT_ARRAY)) {
final PsiExpression typeExpression = arguments[0];
if (typeExpression != null) {
final ReflectiveType itemType =
ourGuard.doPreventingRecursion(typeExpression, false, () -> getReflectiveType(typeExpression));
return ReflectiveType.arrayOf(itemType);
}
}
}
}
}
return ReflectiveType.create(expression.getType(), false);
}
@Contract("null,_->null")
@Nullable
public static <T> T computeConstantExpression(@Nullable PsiExpression expression, @NotNull Class<T> expectedType) {
expression = PsiUtil.skipParenthesizedExprDown(expression);
final Object computed = JavaConstantExpressionEvaluator.computeConstantExpression(expression, false);
return ObjectUtils.tryCast(computed, expectedType);
}
@Nullable
public static ReflectiveClass getReflectiveClass(PsiExpression context) {
final ReflectiveType reflectiveType = getReflectiveType(context);
return reflectiveType != null ? reflectiveType.getReflectiveClass() : null;
}
@Nullable
public static PsiExpression findDefinition(@Nullable PsiExpression expression) {
int preventEndlessLoop = 5;
while (expression instanceof PsiReferenceExpression) {
if (--preventEndlessLoop == 0) return null;
expression = findVariableDefinition((PsiReferenceExpression)expression);
}
return expression;
}
@Nullable
private static PsiExpression findVariableDefinition(@NotNull PsiReferenceExpression referenceExpression) {
final PsiElement resolved = referenceExpression.resolve();
return resolved instanceof PsiVariable ? findVariableDefinition(referenceExpression, (PsiVariable)resolved) : null;
}
@Nullable
private static PsiExpression findVariableDefinition(@NotNull PsiReferenceExpression referenceExpression, @NotNull PsiVariable variable) {
if (variable.hasModifierProperty(PsiModifier.FINAL)) {
final PsiExpression initializer = variable.getInitializer();
if (initializer != null) {
return initializer;
}
if (variable instanceof PsiField) {
return findFinalFieldDefinition(referenceExpression, (PsiField)variable);
}
}
return DeclarationSearchUtils.findDefinition(referenceExpression, variable);
}
@Nullable
private static PsiExpression findFinalFieldDefinition(@NotNull PsiReferenceExpression referenceExpression, @NotNull PsiField field) {
if (!field.hasModifierProperty(PsiModifier.FINAL)) return null;
final PsiClass psiClass = ObjectUtils.tryCast(field.getParent(), PsiClass.class);
if (psiClass != null) {
final boolean isStatic = field.hasModifierProperty(PsiModifier.STATIC);
final List<PsiClassInitializer> initializers =
ContainerUtil.filter(psiClass.getInitializers(), initializer -> initializer.hasModifierProperty(PsiModifier.STATIC) == isStatic);
for (PsiClassInitializer initializer : initializers) {
final PsiExpression assignedExpression = getAssignedExpression(initializer, field);
if (assignedExpression != null) {
return assignedExpression;
}
}
if (!isStatic) {
final PsiMethod[] constructors = psiClass.getConstructors();
if (constructors.length == 1) {
return getAssignedExpression(constructors[0], field);
}
for (PsiMethod constructor : constructors) {
if (PsiTreeUtil.isAncestor(constructor, referenceExpression, true)) {
return getAssignedExpression(constructor, field);
}
}
}
}
return null;
}
@Nullable
private static PsiExpression getAssignedExpression(@NotNull PsiMember maybeContainsAssignment, @NotNull PsiField field) {
final PsiAssignmentExpression assignment = SyntaxTraverser.psiTraverser(maybeContainsAssignment)
.filter(PsiAssignmentExpression.class)
.find(expression -> ExpressionUtils.isReferenceTo(expression.getLExpression(), field));
return assignment != null ? assignment.getRExpression() : null;
}
private static PsiClass findClass(@NotNull String qualifiedName, @NotNull PsiElement context) {
final Project project = context.getProject();
return JavaPsiFacade.getInstance(project).findClass(qualifiedName, GlobalSearchScope.allScope(project));
}
@Contract("null -> false")
static boolean isJavaLangClass(@Nullable PsiClass aClass) {
return isClassWithName(aClass, CommonClassNames.JAVA_LANG_CLASS);
}
@Contract("null -> false")
static boolean isJavaLangObject(@Nullable PsiClass aClass) {
return isClassWithName(aClass, CommonClassNames.JAVA_LANG_OBJECT);
}
@Contract("null, _ -> false")
public static boolean isClassWithName(@Nullable PsiClass aClass, @NotNull String name) {
return aClass != null && name.equals(aClass.getQualifiedName());
}
@Contract("null -> false")
static boolean isRegularMethod(@Nullable PsiMethod method) {
return method != null && !method.isConstructor();
}
static boolean isPublic(@NotNull PsiMember member) {
return member.hasModifierProperty(PsiModifier.PUBLIC);
}
static boolean isAtomicallyUpdateable(@NotNull PsiField field) {
if (field.hasModifierProperty(PsiModifier.STATIC) || !field.hasModifierProperty(PsiModifier.VOLATILE)) {
return false;
}
final PsiType type = field.getType();
return !(type instanceof PsiPrimitiveType) || PsiType.INT.equals(type) || PsiType.LONG.equals(type);
}
@Nullable
static String getParameterTypesText(@NotNull PsiMethod method) {
final StringJoiner joiner = new StringJoiner(", ");
for (PsiParameter parameter : method.getParameterList().getParameters()) {
final String typeText = getTypeText(parameter.getType());
joiner.add(typeText + ".class");
}
return joiner.toString();
}
static void shortenArgumentsClassReferences(@NotNull InsertionContext context) {
final PsiElement parameter = PsiUtilCore.getElementAtOffset(context.getFile(), context.getStartOffset());
final PsiExpressionList parameterList = PsiTreeUtil.getParentOfType(parameter, PsiExpressionList.class);
if (parameterList != null && parameterList.getParent() instanceof PsiMethodCallExpression) {
JavaCodeStyleManager.getInstance(context.getProject()).shortenClassReferences(parameterList);
}
}
@NotNull
static LookupElement withPriority(@NotNull LookupElement lookupElement, boolean hasPriority) {
return hasPriority ? lookupElement : PrioritizedLookupElement.withPriority(lookupElement, -1);
}
@Nullable
static LookupElement withPriority(@Nullable LookupElement lookupElement, int priority) {
return priority == 0 || lookupElement == null ? lookupElement : PrioritizedLookupElement.withPriority(lookupElement, priority);
}
static int getMethodSortOrder(@NotNull PsiMethod method) {
return isJavaLangObject(method.getContainingClass()) ? 1 : isPublic(method) ? -1 : 0;
}
@Nullable
static String getMemberType(@Nullable PsiElement element) {
final PsiMethodCallExpression methodCall = PsiTreeUtil.getParentOfType(element, PsiMethodCallExpression.class);
return methodCall != null ? methodCall.getMethodExpression().getReferenceName() : null;
}
@Nullable
static LookupElement lookupMethod(@NotNull PsiMethod method, @Nullable InsertHandler<LookupElement> insertHandler) {
final ReflectiveSignature signature = getMethodSignature(method);
return signature != null
? LookupElementBuilder.create(signature, method.getName())
.withIcon(signature.getIcon())
.withTailText(signature.getShortArgumentTypes())
.withInsertHandler(insertHandler)
: null;
}
static void replaceText(@NotNull InsertionContext context, @NotNull String text) {
final PsiElement newElement = PsiUtilCore.getElementAtOffset(context.getFile(), context.getStartOffset());
final PsiElement params = newElement.getParent().getParent();
final int end = params.getTextRange().getEndOffset() - 1;
final int start = Math.min(newElement.getTextRange().getEndOffset(), end);
context.getDocument().replaceString(start, end, text);
context.commitDocument();
shortenArgumentsClassReferences(context);
}
@NotNull
public static String getTypeText(@NotNull PsiType type) {
final ReflectiveType reflectiveType = ReflectiveType.create(type, false);
return reflectiveType.getQualifiedName();
}
@Nullable
public static String getTypeText(@Nullable PsiExpression argument) {
final ReflectiveType reflectiveType = getReflectiveType(argument);
return reflectiveType != null ? reflectiveType.getQualifiedName() : null;
}
@Contract("null -> null")
@Nullable
public static ReflectiveSignature getMethodSignature(@Nullable PsiMethod method) {
if (method != null) {
final List<String> types = new ArrayList<>();
final PsiType returnType = method.getReturnType();
types.add(getTypeText(returnType != null ? returnType : PsiType.VOID)); // null return type means it's a constructor
for (PsiParameter parameter : method.getParameterList().getParameters()) {
types.add(getTypeText(parameter.getType()));
}
final Icon icon = method.getIcon(Iconable.ICON_FLAG_VISIBILITY);
return ReflectiveSignature.create(icon, types);
}
return null;
}
@NotNull
public static String getMethodTypeExpressionText(@NotNull ReflectiveSignature signature) {
final String types = signature.getText(true, type -> type + ".class");
return JAVA_LANG_INVOKE_METHOD_TYPE + "." + METHOD_TYPE + types;
}
public static boolean isCallToMethod(@NotNull PsiMethodCallExpression methodCall, @NotNull String className, @NotNull String methodName) {
return MethodCallUtils.isCallToMethod(methodCall, className, null, methodName, (PsiType[])null);
}
/**
* Tries to unwrap array and find its components
* @param maybeArray an array to unwrap
* @return list of unwrapped array components, some or all of them could be null if unknown (but the length is known);
* returns null if nothing is known.
*/
@Nullable
public static List<PsiExpression> getVarargs(@Nullable PsiExpression maybeArray) {
if (ExpressionUtils.isNullLiteral(maybeArray)) {
return Collections.emptyList();
}
if (isVarargAsArray(maybeArray)) {
final PsiExpression argumentsDefinition = findDefinition(maybeArray);
if (argumentsDefinition instanceof PsiArrayInitializerExpression) {
return Arrays.asList(((PsiArrayInitializerExpression)argumentsDefinition).getInitializers());
}
if (argumentsDefinition instanceof PsiNewExpression) {
final PsiArrayInitializerExpression arrayInitializer = ((PsiNewExpression)argumentsDefinition).getArrayInitializer();
if (arrayInitializer != null) {
return Arrays.asList(arrayInitializer.getInitializers());
}
final PsiExpression[] dimensions = ((PsiNewExpression)argumentsDefinition).getArrayDimensions();
if (dimensions.length == 1) { // new Object[length] or new Class<?>[length]
final Integer itemCount = computeConstantExpression(findDefinition(dimensions[0]), Integer.class);
if (itemCount != null && itemCount >= 0 && itemCount < 256) {
return Collections.nCopies(itemCount, null);
}
}
}
}
return null;
}
@Contract("null -> false")
public static boolean isVarargAsArray(@Nullable PsiExpression maybeArray) {
final PsiType type = maybeArray != null ? maybeArray.getType() : null;
return type instanceof PsiArrayType &&
type.getArrayDimensions() == 1 &&
type.getDeepComponentType() instanceof PsiClassType;
}
/**
* Take method's return type and parameter types
* from arguments of MethodType.methodType(Class...) and MethodType.genericMethodType(int, boolean?)
*/
@Nullable
public static ReflectiveSignature composeMethodSignature(@Nullable PsiExpression methodTypeExpression) {
final PsiExpression typeDefinition = findDefinition(methodTypeExpression);
if (typeDefinition instanceof PsiMethodCallExpression) {
final PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression)typeDefinition;
final String referenceName = methodCallExpression.getMethodExpression().getReferenceName();
Function<PsiExpression[], ReflectiveSignature> composer = null;
if (METHOD_TYPE.equals(referenceName)) {
composer = JavaReflectionReferenceUtil::composeMethodSignatureFromTypes;
}
else if (GENERIC_METHOD_TYPE.equals(referenceName)) {
composer = JavaReflectionReferenceUtil::composeGenericMethodSignature;
}
if (composer != null) {
final PsiMethod method = methodCallExpression.resolveMethod();
if (method != null) {
final PsiClass psiClass = method.getContainingClass();
if (psiClass != null && JAVA_LANG_INVOKE_METHOD_TYPE.equals(psiClass.getQualifiedName())) {
final PsiExpression[] arguments = methodCallExpression.getArgumentList().getExpressions();
return composer.apply(arguments);
}
}
}
}
return null;
}
@Nullable
private static ReflectiveSignature composeMethodSignatureFromTypes(PsiExpression @NotNull [] returnAndParameterTypes) {
final List<String> typeTexts = ContainerUtil.map(returnAndParameterTypes, JavaReflectionReferenceUtil::getTypeText);
return ReflectiveSignature.create(typeTexts);
}
@Nullable
public static Pair.NonNull<Integer, Boolean> getGenericSignature(PsiExpression @NotNull [] genericSignatureShape) {
if (genericSignatureShape.length == 0 || genericSignatureShape.length > 2) {
return null;
}
final Integer objectArgCount = computeConstantExpression(genericSignatureShape[0], Integer.class);
final Boolean finalArray = // there's an additional parameter which is an ellipsis or an array
genericSignatureShape.length > 1 ? computeConstantExpression(genericSignatureShape[1], Boolean.class) : false;
if (objectArgCount == null || objectArgCount < 0 || objectArgCount > 255) {
return null;
}
if (finalArray == null || finalArray && objectArgCount > 254) {
return null;
}
return Pair.createNonNull(objectArgCount, finalArray);
}
/**
* All the types in the method signature are either unbounded type parameters or java.lang.Object (with possible vararg)
*/
@Nullable
private static ReflectiveSignature composeGenericMethodSignature(PsiExpression @NotNull [] genericSignatureShape) {
final Pair.NonNull<Integer, Boolean> signature = getGenericSignature(genericSignatureShape);
if (signature == null) return null;
final int objectArgCount = signature.getFirst();
final boolean finalArray = signature.getSecond();
final List<String> typeNames = new ArrayList<>();
typeNames.add(CommonClassNames.JAVA_LANG_OBJECT); // return type
for (int i = 0; i < objectArgCount; i++) {
typeNames.add(CommonClassNames.JAVA_LANG_OBJECT);
}
if (finalArray) {
typeNames.add(CommonClassNames.JAVA_LANG_OBJECT + "[]");
}
return ReflectiveSignature.create(typeNames);
}
public static final class ReflectiveType {
final PsiType myType;
final boolean myIsExact;
private ReflectiveType(@NotNull PsiType erasedType, boolean isExact) {
myType = erasedType;
myIsExact = isExact;
}
@NotNull
public String getQualifiedName() {
return myType.getCanonicalText();
}
@Override
public String toString() {
return myType.getCanonicalText();
}
public boolean isEqualTo(@Nullable PsiType otherType) {
return otherType != null && myType.equals(erasure(otherType));
}
public boolean isAssignableFrom(@NotNull PsiType type) {
return myType.isAssignableFrom(type);
}
public boolean isPrimitive() {
return myType instanceof PsiPrimitiveType;
}
@NotNull
public PsiType getType() {
return myType;
}
public boolean isExact() {
return myIsExact;
}
@Nullable
public ReflectiveClass getReflectiveClass() {
PsiClass psiClass = getPsiClass();
if (psiClass != null) {
return new ReflectiveClass(psiClass, myIsExact);
}
return null;
}
@Nullable
public ReflectiveType getArrayComponentType() {
if (myType instanceof PsiArrayType) {
PsiType componentType = ((PsiArrayType)myType).getComponentType();
return new ReflectiveType(componentType, myIsExact);
}
return null;
}
@Nullable
public PsiClass getPsiClass() {
return PsiTypesUtil.getPsiClass(myType);
}
@Contract("!null,_ -> !null; null,_ -> null")
@Nullable
public static ReflectiveType create(@Nullable PsiType originalType, boolean isExact) {
if (originalType != null) {
return new ReflectiveType(erasure(originalType), isExact);
}
return null;
}
@Contract("!null,_ -> !null; null,_ -> null")
@Nullable
public static ReflectiveType create(@Nullable PsiClass psiClass, boolean isExact) {
if (psiClass != null) {
final PsiElementFactory factory = JavaPsiFacade.getElementFactory(psiClass.getProject());
return new ReflectiveType(factory.createType(psiClass), isExact);
}
return null;
}
@Contract("!null -> !null; null -> null")
@Nullable
public static ReflectiveType arrayOf(@Nullable ReflectiveType itemType) {
if (itemType != null) {
return new ReflectiveType(itemType.myType.createArrayType(), itemType.myIsExact);
}
return null;
}
@NotNull
private static PsiType erasure(@NotNull PsiType type) {
final PsiType erasure = TypeConversionUtil.erasure(type);
if (erasure instanceof PsiEllipsisType) {
return ((PsiEllipsisType)erasure).toArrayType();
}
return erasure;
}
}
public static class ReflectiveClass {
final PsiClass myPsiClass;
final boolean myIsExact;
public ReflectiveClass(@NotNull PsiClass psiClass, boolean isExact) {
myPsiClass = psiClass;
myIsExact = isExact;
}
@NotNull
public PsiClass getPsiClass() {
return myPsiClass;
}
public boolean isExact() {
return myIsExact || myPsiClass.hasModifierProperty(PsiModifier.FINAL);
}
}
public static final class ReflectiveSignature implements Comparable<ReflectiveSignature> {
public static final ReflectiveSignature NO_ARGUMENT_CONSTRUCTOR_SIGNATURE =
new ReflectiveSignature(null, PsiKeyword.VOID, ArrayUtilRt.EMPTY_STRING_ARRAY);
private final Icon myIcon;
@NotNull private final String myReturnType;
private final String @NotNull [] myArgumentTypes;
@Nullable
public static ReflectiveSignature create(@NotNull List<String> typeTexts) {
return create(null, typeTexts);
}
@Nullable
public static ReflectiveSignature create(@Nullable Icon icon, @NotNull List<String> typeTexts) {
if (!typeTexts.isEmpty() && !typeTexts.contains(null)) {
final String[] argumentTypes = ArrayUtilRt.toStringArray(typeTexts.subList(1, typeTexts.size()));
return new ReflectiveSignature(icon, typeTexts.get(0), argumentTypes);
}
return null;
}
private ReflectiveSignature(@Nullable Icon icon, @NotNull String returnType, String @NotNull [] argumentTypes) {
myIcon = icon;
myReturnType = returnType;
myArgumentTypes = argumentTypes;
}
public String getText(boolean withReturnType, @NotNull Function<? super String, String> transformation) {
return getText(withReturnType, true, transformation);
}
public String getText(boolean withReturnType, boolean withParentheses, @NotNull Function<? super String, String> transformation) {
final StringJoiner joiner = new StringJoiner(", ", withParentheses ? "(" : "", withParentheses ? ")" : "");
if (withReturnType) {
joiner.add(transformation.apply(myReturnType));
}
for (String argumentType : myArgumentTypes) {
joiner.add(transformation.apply(argumentType));
}
return joiner.toString();
}
@NotNull
public String getShortReturnType() {
return PsiNameHelper.getShortClassName(myReturnType);
}
@NotNull
public String getShortArgumentTypes() {
return getText(false, PsiNameHelper::getShortClassName);
}
public @NotNull Icon getIcon() {
return myIcon != null ? myIcon : PlatformIcons.METHOD_ICON;
}
@Override
public int compareTo(@NotNull ReflectiveSignature other) {
int c = myArgumentTypes.length - other.myArgumentTypes.length;
if (c != 0) return c;
c = ArrayUtil.lexicographicCompare(myArgumentTypes, other.myArgumentTypes);
if (c != 0) return c;
return myReturnType.compareTo(other.myReturnType);
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof ReflectiveSignature)) return false;
final ReflectiveSignature other = (ReflectiveSignature)o;
return Objects.equals(myReturnType, other.myReturnType) &&
Arrays.equals(myArgumentTypes, other.myArgumentTypes);
}
@Override
public int hashCode() {
return Objects.hash(myReturnType, Arrays.hashCode(myArgumentTypes));
}
@Override
public String toString() {
return myReturnType + " " + Arrays.toString(myArgumentTypes);
}
}
}
| |
/*
* Copyright 2020-2022, Seqera Labs
* Copyright 2013-2019, Centre for Genomic Regulation (CRG)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nextflow.io;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.io.OutputStream;
import java.io.Writer;
import java.nio.ByteBuffer;
import java.nio.CharBuffer;
import java.nio.charset.Charset;
import java.nio.charset.CharsetDecoder;
import java.nio.charset.CoderResult;
import java.nio.charset.CodingErrorAction;
/**
* {@link OutputStream} implementation that transforms a byte stream to a
* character stream using a specified charset encoding and writes the resulting
* stream to a {@link Writer}. The stream is transformed using a
* {@link CharsetDecoder} object, guaranteeing that all charset
* encodings supported by the JRE are handled correctly.
* <p>
* The output of the {@link CharsetDecoder} is buffered using a fixed size buffer.
* This implies that the data is written to the underlying {@link Writer} in chunks
* that are no larger than the size of this buffer. By default, the buffer is
* flushed only when it overflows or when {@link #flush()} or {@link #close()}
* is called. In general there is therefore no need to wrap the underlying {@link Writer}
* in a {@link java.io.BufferedWriter}. {@link WriterOutputStream} can also
* be instructed to flush the buffer after each write operation. In this case, all
* available data is written immediately to the underlying {@link Writer}, implying that
* the current position of the {@link Writer} is correlated to the current position
* of the {@link WriterOutputStream}.
* <p>
* {@link WriterOutputStream} implements the inverse transformation of {@link java.io.OutputStreamWriter};
* in the following example, writing to <tt>out2</tt> would have the same result as writing to
* <tt>out</tt> directly (provided that the byte sequence is legal with respect to the
* charset encoding):
* <pre>
* OutputStream out = ...
* Charset cs = ...
* OutputStreamWriter writer = new OutputStreamWriter(out, cs);
* WriterOutputStream out2 = new WriterOutputStream(writer, cs);</pre>
* {@link WriterOutputStream} implements the same transformation as {@link java.io.InputStreamReader},
* except that the control flow is reversed: both classes transform a byte stream
* into a character stream, but {@link java.io.InputStreamReader} pulls data from the underlying stream,
* while {@link WriterOutputStream} pushes it to the underlying stream.
* <p>
* Note that while there are use cases where there is no alternative to using
* this class, very often the need to use this class is an indication of a flaw
* in the design of the code. This class is typically used in situations where an existing
* API only accepts an {@link OutputStream} object, but where the stream is known to represent
* character data that must be decoded for further use.
* <p>
* Instances of {@link WriterOutputStream} are not thread safe.
*
* @see ReaderInputStream
*
* @since 2.0
*/
public class WriterOutputStream extends OutputStream {
private static final int DEFAULT_BUFFER_SIZE = 1024;
private final Writer writer;
private final CharsetDecoder decoder;
private final boolean writeImmediately;
/**
* ByteBuffer used as input for the decoder. This buffer can be small
* as it is used only to transfer the received data to the
* decoder.
*/
private final ByteBuffer decoderIn = ByteBuffer.allocate(128);
/**
* CharBuffer used as output for the decoder. It should be
* somewhat larger as we write from this buffer to the
* underlying Writer.
*/
private final CharBuffer decoderOut;
/**
* Constructs a new {@link WriterOutputStream} with a default output buffer size of
* 1024 characters. The output buffer will only be flushed when it overflows or when
* {@link #flush()} or {@link #close()} is called.
*
* @param writer the target {@link Writer}
* @param decoder the charset decoder
* @since 2.1
*/
public WriterOutputStream(Writer writer, CharsetDecoder decoder) {
this(writer, decoder, DEFAULT_BUFFER_SIZE, false);
}
/**
* Constructs a new {@link WriterOutputStream}.
*
* @param writer the target {@link Writer}
* @param decoder the charset decoder
* @param bufferSize the size of the output buffer in number of characters
* @param writeImmediately If <tt>true</tt> the output buffer will be flushed after each
* write operation, i.e. all available data will be written to the
* underlying {@link Writer} immediately. If <tt>false</tt>, the
* output buffer will only be flushed when it overflows or when
* {@link #flush()} or {@link #close()} is called.
* @since 2.1
*/
public WriterOutputStream(Writer writer, CharsetDecoder decoder, int bufferSize, boolean writeImmediately) {
this.writer = writer;
this.decoder = decoder;
this.writeImmediately = writeImmediately;
decoderOut = CharBuffer.allocate(bufferSize);
}
/**
* Constructs a new {@link WriterOutputStream}.
*
* @param writer the target {@link Writer}
* @param charset the charset encoding
* @param bufferSize the size of the output buffer in number of characters
* @param writeImmediately If <tt>true</tt> the output buffer will be flushed after each
* write operation, i.e. all available data will be written to the
* underlying {@link Writer} immediately. If <tt>false</tt>, the
* output buffer will only be flushed when it overflows or when
* {@link #flush()} or {@link #close()} is called.
*/
public WriterOutputStream(Writer writer, Charset charset, int bufferSize, boolean writeImmediately) {
this(writer,
charset.newDecoder()
.onMalformedInput(CodingErrorAction.REPLACE)
.onUnmappableCharacter(CodingErrorAction.REPLACE)
.replaceWith("?"),
bufferSize,
writeImmediately);
}
/**
* Constructs a new {@link WriterOutputStream} with a default output buffer size of
* 1024 characters. The output buffer will only be flushed when it overflows or when
* {@link #flush()} or {@link #close()} is called.
*
* @param writer the target {@link Writer}
* @param charset the charset encoding
*/
public WriterOutputStream(Writer writer, Charset charset) {
this(writer, charset, DEFAULT_BUFFER_SIZE, false);
}
/**
* Constructs a new {@link WriterOutputStream}.
*
* @param writer the target {@link Writer}
* @param charsetName the name of the charset encoding
* @param bufferSize the size of the output buffer in number of characters
* @param writeImmediately If <tt>true</tt> the output buffer will be flushed after each
* write operation, i.e. all available data will be written to the
* underlying {@link Writer} immediately. If <tt>false</tt>, the
* output buffer will only be flushed when it overflows or when
* {@link #flush()} or {@link #close()} is called.
*/
public WriterOutputStream(Writer writer, String charsetName, int bufferSize, boolean writeImmediately) {
this(writer, Charset.forName(charsetName), bufferSize, writeImmediately);
}
/**
* Constructs a new {@link WriterOutputStream} with a default output buffer size of
* 1024 characters. The output buffer will only be flushed when it overflows or when
* {@link #flush()} or {@link #close()} is called.
*
* @param writer the target {@link Writer}
* @param charsetName the name of the charset encoding
*/
public WriterOutputStream(Writer writer, String charsetName) {
this(writer, charsetName, DEFAULT_BUFFER_SIZE, false);
}
/**
* Constructs a new {@link WriterOutputStream} that uses the default character encoding
* and with a default output buffer size of 1024 characters. The output buffer will only
* be flushed when it overflows or when {@link #flush()} or {@link #close()} is called.
*
* @param writer the target {@link Writer}
*/
public WriterOutputStream(Writer writer) {
this(writer, Charset.defaultCharset(), DEFAULT_BUFFER_SIZE, false);
}
/**
* Write bytes from the specified byte array to the stream.
*
* @param b the byte array containing the bytes to write
* @param off the start offset in the byte array
* @param len the number of bytes to write
* @throws IOException if an I/O error occurs
*/
@Override
public void write(byte[] b, int off, int len) throws IOException {
while (len > 0) {
int c = Math.min(len, decoderIn.remaining());
decoderIn.put(b, off, c);
processInput(false);
len -= c;
off += c;
}
if (writeImmediately) {
flushOutput();
}
}
/**
* Write bytes from the specified byte array to the stream.
*
* @param b the byte array containing the bytes to write
* @throws IOException if an I/O error occurs
*/
@Override
public void write(byte[] b) throws IOException {
write(b, 0, b.length);
}
/**
* Write a single byte to the stream.
*
* @param b the byte to write
* @throws IOException if an I/O error occurs
*/
@Override
public void write(int b) throws IOException {
write(new byte[] { (byte)b }, 0, 1);
}
/**
* Flush the stream. Any remaining content accumulated in the output buffer
* will be written to the underlying {@link Writer}. After that
* {@link Writer#flush()} will be called.
* @throws IOException if an I/O error occurs
*/
@Override
public void flush() throws IOException {
flushOutput();
writer.flush();
}
/**
* Close the stream. Any remaining content accumulated in the output buffer
* will be written to the underlying {@link Writer}. After that
* {@link Writer#close()} will be called.
* @throws IOException if an I/O error occurs
*/
@Override
public void close() throws IOException {
processInput(true);
flushOutput();
writer.close();
}
/**
* Decode the contents of the input ByteBuffer into a CharBuffer.
*
* @param endOfInput indicates end of input
* @throws IOException if an I/O error occurs
*/
private void processInput(boolean endOfInput) throws IOException {
// Prepare decoderIn for reading
decoderIn.flip();
CoderResult coderResult;
while (true) {
coderResult = decoder.decode(decoderIn, decoderOut, endOfInput);
if (coderResult.isOverflow()) {
flushOutput();
} else if (coderResult.isUnderflow()) {
break;
} else {
// The decoder is configured to replace malformed input and unmappable characters,
// so we should not get here.
throw new IOException("Unexpected coder result");
}
}
// Discard the bytes that have been read
decoderIn.compact();
}
/**
* Flush the output.
*
* @throws IOException if an I/O error occurs
*/
private void flushOutput() throws IOException {
if (decoderOut.position() > 0) {
writer.write(decoderOut.array(), 0, decoderOut.position());
decoderOut.rewind();
}
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package git4idea.util;
import com.intellij.notification.Notification;
import com.intellij.notification.NotificationAction;
import com.intellij.notification.NotificationListener;
import com.intellij.notification.NotificationType;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.DialogWrapper;
import com.intellij.openapi.ui.MultiLineLabelUI;
import com.intellij.openapi.ui.VerticalFlowLayout;
import com.intellij.openapi.util.NlsContexts;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.VcsBundle;
import com.intellij.openapi.vcs.VcsNotifier;
import com.intellij.openapi.vcs.changes.ui.SelectFilesDialog;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.ui.ScrollPaneFactory;
import com.intellij.ui.components.JBLabel;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.xml.util.XmlStringUtil;
import git4idea.DialogManager;
import git4idea.GitUtil;
import git4idea.i18n.GitBundle;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.border.EmptyBorder;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import static com.intellij.openapi.vcs.VcsNotifier.IMPORTANT_ERROR_NOTIFICATION;
public final class GitUntrackedFilesHelper {
private static final Logger LOG = Logger.getInstance(GitUntrackedFilesHelper.class);
private GitUntrackedFilesHelper() {
}
/**
* Displays notification about {@code untracked files would be overwritten by checkout} error.
* Clicking on the link in the notification opens a simple dialog with the list of these files.
* @param root
* @param relativePaths
* @param operation the name of the Git operation that caused the error: {@code rebase, merge, checkout}.
* @param description the content of the notification or null if the default content is to be used.
*/
public static void notifyUntrackedFilesOverwrittenBy(@NotNull Project project,
@NotNull VirtualFile root,
@NotNull Collection<String> relativePaths,
@NotNull @Nls String operation,
@Nullable @NlsContexts.DetailedDescription String description) {
notifyUntrackedFilesOverwrittenBy(project, root, relativePaths, operation, description, null);
}
public static void notifyUntrackedFilesOverwrittenBy(@NotNull Project project,
@NotNull VirtualFile root,
@NotNull Collection<String> relativePaths,
@NotNull @Nls String operation,
@Nullable @NlsContexts.DetailedDescription String description,
@Nullable NotificationListener listener,
NotificationAction @NotNull ... actions) {
Notification notification = getUntrackedFilesOverwrittenByNotification(project, root, relativePaths, operation, description, listener);
for (NotificationAction action : actions) {
notification.addAction(action);
}
VcsNotifier.getInstance(project).notify(notification);
}
/**
* Show dialog for the "Untracked Files Would be Overwritten by checkout/merge/rebase" error,
* with a proposal to rollback the action (checkout/merge/rebase) in successful repositories.
* <p/>
* The method receives the relative paths to some untracked files, returned by Git command,
* and tries to find corresponding VirtualFiles, based on the given root, to display in the standard dialog.
* If for some reason it doesn't find any VirtualFile, it shows the paths in a simple dialog.
*
* @return true if the user agrees to rollback, false if the user decides to keep things as is and simply close the dialog.
*/
public static boolean showUntrackedFilesDialogWithRollback(@NotNull final Project project,
@NotNull @Nls String operationName,
@NotNull @NlsContexts.Label String rollbackProposal,
@NotNull VirtualFile root,
@NotNull final Collection<String> relativePaths) {
Collection<String> absolutePaths = GitUtil.toAbsolute(root, relativePaths);
List<VirtualFile> untrackedFiles = ContainerUtil.mapNotNull(absolutePaths,
absolutePath -> GitUtil.findRefreshFileOrLog(absolutePath));
Ref<Boolean> rollback = Ref.create();
ApplicationManager.getApplication().invokeAndWait(() -> {
JComponent filesBrowser;
if (untrackedFiles.isEmpty()) {
LOG.debug("Couldn't find the untracked files, displaying simplified dialog.");
filesBrowser = new GitSimplePathsBrowser(project, absolutePaths);
}
else {
long validFiles = untrackedFiles.stream().filter(VirtualFile::isValid).count();
LOG.debug(String.format("Untracked files: [%s]. Valid: %d (of %d)", untrackedFiles, validFiles, untrackedFiles.size()));
filesBrowser = ScrollPaneFactory.createScrollPane(new SelectFilesDialog.VirtualFileList(project, false, true, untrackedFiles));
}
String title = GitBundle.message("dialog.title.could.not.operation", StringUtil.capitalize(operationName));
String description = GitBundle.message("dialog.message.untracked.files.will.be.overwritten.by.operation", operationName);
DialogWrapper dialog = new UntrackedFilesRollBackDialog(project, filesBrowser, description, rollbackProposal);
dialog.setTitle(title);
DialogManager.show(dialog);
rollback.set(dialog.isOK());
});
return rollback.get();
}
@NotNull
private static Notification getUntrackedFilesOverwrittenByNotification(@NotNull Project project,
@NotNull VirtualFile root,
@NotNull Collection<String> relativePaths,
@NotNull @Nls String operation,
@Nullable @NlsContexts.DetailedDescription String description,
@Nullable NotificationListener listener) {
if (description == null) description = "";
String notificationTitle = GitBundle.message("notification.title.untracked.files.prevent.operation", StringUtil.capitalize(operation));
String notificationDesc = GitBundle.message("notification.content.untracked.files.prevent.operation.move.or.commit",
operation, description);
final Collection<String> absolutePaths = GitUtil.toAbsolute(root, relativePaths);
final List<VirtualFile> untrackedFiles = ContainerUtil.mapNotNull(absolutePaths, absolutePath -> {
return GitUtil.findRefreshFileOrLog(absolutePath);
});
Notification notification = IMPORTANT_ERROR_NOTIFICATION.createNotification(notificationTitle, notificationDesc,
NotificationType.ERROR, listener, "untracked.files.overwritten");
notification.addAction(new NotificationAction(VcsBundle.messagePointer("action.NotificationAction.VFSListener.text.view.files")) {
@Override
public void actionPerformed(@NotNull AnActionEvent e, @NotNull Notification notification) {
String dialogDesc = GitBundle.message("dialog.message.untracked.files.will.be.overwritten.by.operation", operation);
String title = GitBundle.message("dialog.title.untracked.files.preventing.operation", StringUtil.capitalize(operation));
if (untrackedFiles.isEmpty()) {
GitUtil.showPathsInDialog(project, absolutePaths, title, dialogDesc);
}
else {
DialogWrapper dialog;
dialog = new UntrackedFilesDialog(project, untrackedFiles, dialogDesc);
dialog.setTitle(title);
dialog.show();
}
}
});
return notification;
}
private static class UntrackedFilesDialog extends SelectFilesDialog {
UntrackedFilesDialog(Project project,
@NotNull Collection<? extends VirtualFile> untrackedFiles,
@NotNull @NlsContexts.Label String dialogDesc) {
super(project, new ArrayList<>(untrackedFiles), StringUtil.stripHtml(dialogDesc, true), null, false, true);
init();
}
@Override
protected Action @NotNull [] createActions() {
return new Action[]{getOKAction()};
}
}
private static class UntrackedFilesRollBackDialog extends DialogWrapper {
@NotNull private final JComponent myFilesBrowser;
@NotNull private final @NlsContexts.Label String myPrompt;
@NotNull private final @NlsContexts.Label String myRollbackProposal;
UntrackedFilesRollBackDialog(@NotNull Project project,
@NotNull JComponent filesBrowser,
@NotNull @NlsContexts.Label String prompt,
@NotNull @NlsContexts.Label String rollbackProposal) {
super(project);
myFilesBrowser = filesBrowser;
myPrompt = prompt;
myRollbackProposal = rollbackProposal;
setOKButtonText(GitBundle.message("button.rollback"));
setCancelButtonText(GitBundle.message("button.don.t.rollback"));
init();
}
@Override
protected JComponent createSouthPanel() {
JComponent buttons = super.createSouthPanel();
JPanel panel = new JPanel(new VerticalFlowLayout());
panel.add(new JBLabel(XmlStringUtil.wrapInHtml(myRollbackProposal)));
if (buttons != null) {
panel.add(buttons);
}
return panel;
}
@Nullable
@Override
protected JComponent createCenterPanel() {
return myFilesBrowser;
}
@Nullable
@Override
protected JComponent createNorthPanel() {
JLabel label = new JLabel(myPrompt);
label.setUI(new MultiLineLabelUI());
label.setBorder(new EmptyBorder(5, 1, 5, 1));
return label;
}
}
}
| |
/**
* generated by Xtext 2.10.0
*/
package com.laegler.stubbr.lang.stubbrLang.impl;
import com.laegler.stubbr.lang.stubbrLang.Option;
import com.laegler.stubbr.lang.stubbrLang.OptionFlowNode;
import com.laegler.stubbr.lang.stubbrLang.StubbrLangPackage;
import java.util.Collection;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.impl.MinimalEObjectImpl;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Option</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link com.laegler.stubbr.lang.stubbrLang.impl.OptionImpl#isDefault <em>Default</em>}</li>
* <li>{@link com.laegler.stubbr.lang.stubbrLang.impl.OptionImpl#getName <em>Name</em>}</li>
* <li>{@link com.laegler.stubbr.lang.stubbrLang.impl.OptionImpl#getLabel <em>Label</em>}</li>
* <li>{@link com.laegler.stubbr.lang.stubbrLang.impl.OptionImpl#getFlowNodes <em>Flow Nodes</em>}</li>
* </ul>
*
* @generated
*/
public class OptionImpl extends MinimalEObjectImpl.Container implements Option
{
/**
* The default value of the '{@link #isDefault() <em>Default</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isDefault()
* @generated
* @ordered
*/
protected static final boolean DEFAULT_EDEFAULT = false;
/**
* The cached value of the '{@link #isDefault() <em>Default</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #isDefault()
* @generated
* @ordered
*/
protected boolean default_ = DEFAULT_EDEFAULT;
/**
* The default value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getName()
* @generated
* @ordered
*/
protected static final String NAME_EDEFAULT = null;
/**
* The cached value of the '{@link #getName() <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getName()
* @generated
* @ordered
*/
protected String name = NAME_EDEFAULT;
/**
* The default value of the '{@link #getLabel() <em>Label</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLabel()
* @generated
* @ordered
*/
protected static final String LABEL_EDEFAULT = null;
/**
* The cached value of the '{@link #getLabel() <em>Label</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLabel()
* @generated
* @ordered
*/
protected String label = LABEL_EDEFAULT;
/**
* The cached value of the '{@link #getFlowNodes() <em>Flow Nodes</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getFlowNodes()
* @generated
* @ordered
*/
protected EList<OptionFlowNode> flowNodes;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected OptionImpl()
{
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass()
{
return StubbrLangPackage.eINSTANCE.getOption();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isDefault()
{
return default_;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setDefault(boolean newDefault)
{
boolean oldDefault = default_;
default_ = newDefault;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, StubbrLangPackage.OPTION__DEFAULT, oldDefault, default_));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getName()
{
return name;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setName(String newName)
{
String oldName = name;
name = newName;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, StubbrLangPackage.OPTION__NAME, oldName, name));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getLabel()
{
return label;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setLabel(String newLabel)
{
String oldLabel = label;
label = newLabel;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, StubbrLangPackage.OPTION__LABEL, oldLabel, label));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<OptionFlowNode> getFlowNodes()
{
if (flowNodes == null)
{
flowNodes = new EObjectContainmentEList<OptionFlowNode>(OptionFlowNode.class, this, StubbrLangPackage.OPTION__FLOW_NODES);
}
return flowNodes;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs)
{
switch (featureID)
{
case StubbrLangPackage.OPTION__FLOW_NODES:
return ((InternalEList<?>)getFlowNodes()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType)
{
switch (featureID)
{
case StubbrLangPackage.OPTION__DEFAULT:
return isDefault();
case StubbrLangPackage.OPTION__NAME:
return getName();
case StubbrLangPackage.OPTION__LABEL:
return getLabel();
case StubbrLangPackage.OPTION__FLOW_NODES:
return getFlowNodes();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue)
{
switch (featureID)
{
case StubbrLangPackage.OPTION__DEFAULT:
setDefault((Boolean)newValue);
return;
case StubbrLangPackage.OPTION__NAME:
setName((String)newValue);
return;
case StubbrLangPackage.OPTION__LABEL:
setLabel((String)newValue);
return;
case StubbrLangPackage.OPTION__FLOW_NODES:
getFlowNodes().clear();
getFlowNodes().addAll((Collection<? extends OptionFlowNode>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID)
{
switch (featureID)
{
case StubbrLangPackage.OPTION__DEFAULT:
setDefault(DEFAULT_EDEFAULT);
return;
case StubbrLangPackage.OPTION__NAME:
setName(NAME_EDEFAULT);
return;
case StubbrLangPackage.OPTION__LABEL:
setLabel(LABEL_EDEFAULT);
return;
case StubbrLangPackage.OPTION__FLOW_NODES:
getFlowNodes().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID)
{
switch (featureID)
{
case StubbrLangPackage.OPTION__DEFAULT:
return default_ != DEFAULT_EDEFAULT;
case StubbrLangPackage.OPTION__NAME:
return NAME_EDEFAULT == null ? name != null : !NAME_EDEFAULT.equals(name);
case StubbrLangPackage.OPTION__LABEL:
return LABEL_EDEFAULT == null ? label != null : !LABEL_EDEFAULT.equals(label);
case StubbrLangPackage.OPTION__FLOW_NODES:
return flowNodes != null && !flowNodes.isEmpty();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString()
{
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (default: ");
result.append(default_);
result.append(", name: ");
result.append(name);
result.append(", label: ");
result.append(label);
result.append(')');
return result.toString();
}
} //OptionImpl
| |
/*
* Copyright 2015 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.executor;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.PriorityBlockingQueue;
import org.junit.Assert;
import org.junit.Test;
import azkaban.alert.Alerter;
import azkaban.flow.Flow;
import azkaban.project.Project;
import azkaban.user.User;
import azkaban.utils.JSONUtils;
import azkaban.utils.Pair;
import azkaban.utils.Props;
import azkaban.utils.TestUtils;
/**
* Test class for ExecutableFlowPriorityComparator
* */
public class ExecutableFlowPriorityComparatorTest {
/* Helper method to create an ExecutableFlow from serialized description */
private ExecutableFlow createExecutableFlow(String flowName, int priority,
long updateTime, int executionId) throws IOException {
ExecutableFlow execFlow =
TestUtils.createExecutableFlow("exectest1", flowName);
execFlow.setUpdateTime(updateTime);
execFlow.setExecutionId(executionId);
if (priority > 0) {
execFlow.getExecutionOptions().getFlowParameters()
.put(ExecutionOptions.FLOW_PRIORITY, String.valueOf(priority));
}
return execFlow;
}
/* priority queue order when all priorities are explicitly specified */
@Test
public void testExplicitlySpecifiedPriorities() throws IOException,
InterruptedException {
ExecutableFlow flow1 = createExecutableFlow("exec1", 5, 3, 1);
ExecutableFlow flow2 = createExecutableFlow("exec2", 6, 3, 2);
ExecutableFlow flow3 = createExecutableFlow("exec3", 2, 3, 3);
ExecutionReference dummyRef = new ExecutionReference(0);
BlockingQueue<Pair<ExecutionReference, ExecutableFlow>> queue =
new PriorityBlockingQueue<Pair<ExecutionReference, ExecutableFlow>>(10,
new ExecutableFlowPriorityComparator());
queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow1));
queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow2));
queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow3));
Assert.assertEquals(flow2, queue.take().getSecond());
Assert.assertEquals(flow1, queue.take().getSecond());
Assert.assertEquals(flow3, queue.take().getSecond());
}
/* priority queue order when some priorities are implicitly specified */
@Test
public void testMixedSpecifiedPriorities() throws IOException,
InterruptedException {
ExecutableFlow flow1 = createExecutableFlow("exec1", 3, 3, 1);
ExecutableFlow flow2 = createExecutableFlow("exec2", 2, 3, 2);
ExecutableFlow flow3 = createExecutableFlow("exec3", -2, 3, 3);
ExecutionReference dummyRef = new ExecutionReference(0);
BlockingQueue<Pair<ExecutionReference, ExecutableFlow>> queue =
new PriorityBlockingQueue<Pair<ExecutionReference, ExecutableFlow>>(10,
new ExecutableFlowPriorityComparator());
queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow1));
queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow2));
queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow3));
Assert.assertEquals(flow3, queue.take().getSecond());
Assert.assertEquals(flow1, queue.take().getSecond());
Assert.assertEquals(flow2, queue.take().getSecond());
}
/*
* priority queue order when some priorities are equal, updatetime is used in
* this case
*/
@Test
public void testEqualPriorities() throws IOException, InterruptedException {
ExecutableFlow flow1 = createExecutableFlow("exec1", 3, 1, 1);
ExecutableFlow flow2 = createExecutableFlow("exec2", 2, 2, 2);
ExecutableFlow flow3 = createExecutableFlow("exec3", -2, 3, 3);
ExecutableFlow flow4 = createExecutableFlow("exec3", 3, 4, 4);
ExecutionReference dummyRef = new ExecutionReference(0);
BlockingQueue<Pair<ExecutionReference, ExecutableFlow>> queue =
new PriorityBlockingQueue<Pair<ExecutionReference, ExecutableFlow>>(10,
new ExecutableFlowPriorityComparator());
queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow4));
queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow1));
queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow2));
queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow3));
Assert.assertEquals(flow3, queue.take().getSecond());
Assert.assertEquals(flow1, queue.take().getSecond());
Assert.assertEquals(flow4, queue.take().getSecond());
Assert.assertEquals(flow2, queue.take().getSecond());
}
/*
* priority queue order when some priorities and updatetime are equal,
* execution Id is used in this case
*/
@Test
public void testEqualUpdateTimeAndPriority() throws IOException,
InterruptedException {
ExecutableFlow flow1 = createExecutableFlow("exec1", 3, 1, 1);
ExecutableFlow flow2 = createExecutableFlow("exec2", 2, 2, 2);
ExecutableFlow flow3 = createExecutableFlow("exec3", -2, 2, 3);
ExecutableFlow flow4 = createExecutableFlow("exec3", 3, 4, 4);
ExecutionReference dummyRef = new ExecutionReference(0);
BlockingQueue<Pair<ExecutionReference, ExecutableFlow>> queue =
new PriorityBlockingQueue<Pair<ExecutionReference, ExecutableFlow>>(10,
new ExecutableFlowPriorityComparator());
queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow4));
queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow1));
queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow2));
queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow3));
Assert.assertEquals(flow3, queue.take().getSecond());
Assert.assertEquals(flow1, queue.take().getSecond());
Assert.assertEquals(flow4, queue.take().getSecond());
Assert.assertEquals(flow2, queue.take().getSecond());
}
@Test
public void testEqualUpdateTimeAndPriority1() throws IOException,
InterruptedException {
ExecutableFlow flow1 = createExecutableFlow("exec1", 3, 1, 1);
ExecutableFlow flow2 = createExecutableFlow("exec2", 2, 2, 2);
ExecutableFlow flow3 = createExecutableFlow("exec3", 3, 2, 3);
ExecutableFlow flow4 = createExecutableFlow("exec3", 3, 4, 4);
ExecutionReference dummyRef = new ExecutionReference(0);
BlockingQueue<Pair<ExecutionReference, ExecutableFlow>> queue =
new PriorityBlockingQueue<Pair<ExecutionReference, ExecutableFlow>>(10,
new ExecutableFlowPriorityComparator());
queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow4));
queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow1));
queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow2));
queue.put(new Pair<ExecutionReference, ExecutableFlow>(dummyRef, flow3));
int i = 0;
long j = 6;
while (queue.size() > 0) {
Pair<ExecutionReference, ExecutableFlow> pair = queue.take();
System.out.println(pair.getSecond().getExecutionId() + ", time:" + pair.getSecond().getUpdateTime());
pair.getSecond().setUpdateTime(j++);
queue.put(pair);
i ++;
if (i > 5) {
break;
}
}
// for (Pair<ExecutionReference, ExecutableFlow> pair : queue) {
// System.out.println(pair.getSecond().getExecutionId() + ", time:" + pair.getSecond().getUpdateTime());
// pair.getSecond().setUpdateTime(System.currentTimeMillis());
//// queue.put(pair);
// break;
// }
//
// for (Pair<ExecutionReference, ExecutableFlow> pair : queue) {
// System.out.println(pair.getSecond().getExecutionId() + ", time" + pair.getSecond().getUpdateTime());
//// pair.getSecond().setUpdateTime(System.currentTimeMillis());
//
// }
// Assert.assertEquals(flow3, queue.take().getSecond());
// Assert.assertEquals(flow1, queue.take().getSecond());
// Assert.assertEquals(flow4, queue.take().getSecond());
// Assert.assertEquals(flow2, queue.take().getSecond());
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.webapp;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.StringReader;
import java.util.ArrayList;
import java.util.EnumSet;
import javax.ws.rs.core.MediaType;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeState;
import org.apache.hadoop.yarn.server.api.records.NodeHealthStatus;
import org.apache.hadoop.yarn.server.resourcemanager.MockNM;
import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeImpl;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeStatusEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNodeReport;
import org.apache.hadoop.yarn.server.resourcemanager.security.QueueACLsManager;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebServicesTestUtils;
import org.codehaus.jettison.json.JSONArray;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
import org.junit.Before;
import org.junit.Test;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import com.google.common.base.Joiner;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.servlet.GuiceServletContextListener;
import com.google.inject.servlet.ServletModule;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.ClientResponse.Status;
import com.sun.jersey.api.client.UniformInterfaceException;
import com.sun.jersey.api.client.WebResource;
import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
import com.sun.jersey.test.framework.JerseyTest;
import com.sun.jersey.test.framework.WebAppDescriptor;
public class TestRMWebServicesNodes extends JerseyTest {
private static MockRM rm;
private Injector injector = Guice.createInjector(new ServletModule() {
@Override
protected void configureServlets() {
bind(JAXBContextResolver.class);
bind(RMWebServices.class);
bind(GenericExceptionHandler.class);
rm = new MockRM(new Configuration());
rm.getRMContext().getContainerTokenSecretManager().rollMasterKey();
rm.getRMContext().getNMTokenSecretManager().rollMasterKey();
bind(ResourceManager.class).toInstance(rm);
bind(RMContext.class).toInstance(rm.getRMContext());
bind(ApplicationACLsManager.class).toInstance(
rm.getApplicationACLsManager());
bind(QueueACLsManager.class).toInstance(rm.getQueueACLsManager());
serve("/*").with(GuiceContainer.class);
}
});
public class GuiceServletConfig extends GuiceServletContextListener {
@Override
protected Injector getInjector() {
return injector;
}
}
@Before
@Override
public void setUp() throws Exception {
super.setUp();
}
public TestRMWebServicesNodes() {
super(new WebAppDescriptor.Builder(
"org.apache.hadoop.yarn.server.resourcemanager.webapp")
.contextListenerClass(GuiceServletConfig.class)
.filterClass(com.google.inject.servlet.GuiceFilter.class)
.contextPath("jersey-guice-filter").servletPath("/").build());
}
@Test
public void testNodes() throws JSONException, Exception {
testNodesHelper("nodes", MediaType.APPLICATION_JSON);
}
@Test
public void testNodesSlash() throws JSONException, Exception {
testNodesHelper("nodes/", MediaType.APPLICATION_JSON);
}
@Test
public void testNodesDefault() throws JSONException, Exception {
testNodesHelper("nodes/", "");
}
@Test
public void testNodesDefaultWithUnHealthyNode() throws JSONException,
Exception {
WebResource r = resource();
MockNM nm1 = rm.registerNode("h1:1234", 5120);
MockNM nm2 = rm.registerNode("h2:1235", 5121);
rm.sendNodeStarted(nm1);
rm.NMwaitForState(nm1.getNodeId(), NodeState.RUNNING);
rm.NMwaitForState(nm2.getNodeId(), NodeState.NEW);
MockNM nm3 = rm.registerNode("h3:1236", 5122);
rm.NMwaitForState(nm3.getNodeId(), NodeState.NEW);
rm.sendNodeStarted(nm3);
rm.NMwaitForState(nm3.getNodeId(), NodeState.RUNNING);
RMNodeImpl node = (RMNodeImpl) rm.getRMContext().getRMNodes()
.get(nm3.getNodeId());
NodeHealthStatus nodeHealth = NodeHealthStatus.newInstance(false,
"test health report", System.currentTimeMillis());
node.handle(new RMNodeStatusEvent(nm3.getNodeId(), nodeHealth,
new ArrayList<ContainerStatus>(), null, null));
rm.NMwaitForState(nm3.getNodeId(), NodeState.UNHEALTHY);
ClientResponse response =
r.path("ws").path("v1").path("cluster").path("nodes")
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject nodes = json.getJSONObject("nodes");
assertEquals("incorrect number of elements", 1, nodes.length());
JSONArray nodeArray = nodes.getJSONArray("node");
// 3 nodes, including the unhealthy node and the new node.
assertEquals("incorrect number of elements", 3, nodeArray.length());
}
@Test
public void testNodesQueryNew() throws JSONException, Exception {
WebResource r = resource();
MockNM nm1 = rm.registerNode("h1:1234", 5120);
MockNM nm2 = rm.registerNode("h2:1235", 5121);
rm.sendNodeStarted(nm1);
rm.NMwaitForState(nm1.getNodeId(), NodeState.RUNNING);
rm.NMwaitForState(nm2.getNodeId(), NodeState.NEW);
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("nodes").queryParam("states", NodeState.NEW.toString())
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject nodes = json.getJSONObject("nodes");
assertEquals("incorrect number of elements", 1, nodes.length());
JSONArray nodeArray = nodes.getJSONArray("node");
assertEquals("incorrect number of elements", 1, nodeArray.length());
JSONObject info = nodeArray.getJSONObject(0);
verifyNodeInfo(info, nm2);
}
@Test
public void testNodesQueryStateNone() throws JSONException, Exception {
WebResource r = resource();
rm.registerNode("h1:1234", 5120);
rm.registerNode("h2:1235", 5121);
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("nodes")
.queryParam("states", NodeState.DECOMMISSIONED.toString())
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
assertEquals("nodes is not null", JSONObject.NULL, json.get("nodes"));
}
@Test
public void testNodesQueryStateInvalid() throws JSONException, Exception {
WebResource r = resource();
rm.registerNode("h1:1234", 5120);
rm.registerNode("h2:1235", 5121);
try {
r.path("ws").path("v1").path("cluster").path("nodes")
.queryParam("states", "BOGUSSTATE").accept(MediaType.APPLICATION_JSON)
.get(JSONObject.class);
fail("should have thrown exception querying invalid state");
} catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse();
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject msg = response.getEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals("incorrect number of elements", 3, exception.length());
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils
.checkStringContains(
"exception message",
"org.apache.hadoop.yarn.api.records.NodeState.BOGUSSTATE",
message);
WebServicesTestUtils.checkStringMatch("exception type",
"IllegalArgumentException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"java.lang.IllegalArgumentException", classname);
} finally {
rm.stop();
}
}
@Test
public void testNodesQueryStateLost() throws JSONException, Exception {
WebResource r = resource();
MockNM nm1 = rm.registerNode("h1:1234", 5120);
MockNM nm2 = rm.registerNode("h2:1234", 5120);
rm.sendNodeStarted(nm1);
rm.sendNodeStarted(nm2);
rm.NMwaitForState(nm1.getNodeId(), NodeState.RUNNING);
rm.NMwaitForState(nm2.getNodeId(), NodeState.RUNNING);
rm.sendNodeLost(nm1);
rm.sendNodeLost(nm2);
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("nodes").queryParam("states", NodeState.LOST.toString())
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
JSONObject nodes = json.getJSONObject("nodes");
assertEquals("incorrect number of elements", 1, nodes.length());
JSONArray nodeArray = nodes.getJSONArray("node");
assertEquals("incorrect number of elements", 2, nodeArray.length());
for (int i = 0; i < nodeArray.length(); ++i) {
JSONObject info = nodeArray.getJSONObject(i);
String host = info.get("id").toString().split(":")[0];
RMNode rmNode = rm.getRMContext().getInactiveRMNodes().get(host);
WebServicesTestUtils.checkStringMatch("nodeHTTPAddress", "",
info.getString("nodeHTTPAddress"));
WebServicesTestUtils.checkStringMatch("state", rmNode.getState()
.toString(), info.getString("state"));
}
}
@Test
public void testSingleNodeQueryStateLost() throws JSONException, Exception {
WebResource r = resource();
MockNM nm1 = rm.registerNode("h1:1234", 5120);
MockNM nm2 = rm.registerNode("h2:1234", 5120);
rm.sendNodeStarted(nm1);
rm.sendNodeStarted(nm2);
rm.NMwaitForState(nm1.getNodeId(), NodeState.RUNNING);
rm.NMwaitForState(nm2.getNodeId(), NodeState.RUNNING);
rm.sendNodeLost(nm1);
rm.sendNodeLost(nm2);
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("nodes").path("h2:1234").accept(MediaType.APPLICATION_JSON)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
JSONObject info = json.getJSONObject("node");
String id = info.get("id").toString();
assertEquals("Incorrect Node Information.", "h2:1234", id);
RMNode rmNode = rm.getRMContext().getInactiveRMNodes().get("h2");
WebServicesTestUtils.checkStringMatch("nodeHTTPAddress", "",
info.getString("nodeHTTPAddress"));
WebServicesTestUtils.checkStringMatch("state",
rmNode.getState().toString(), info.getString("state"));
}
@Test
public void testNodesQueryRunning() throws JSONException, Exception {
WebResource r = resource();
MockNM nm1 = rm.registerNode("h1:1234", 5120);
MockNM nm2 = rm.registerNode("h2:1235", 5121);
rm.sendNodeStarted(nm1);
rm.NMwaitForState(nm1.getNodeId(), NodeState.RUNNING);
rm.NMwaitForState(nm2.getNodeId(), NodeState.NEW);
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("nodes").queryParam("states", "running")
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject nodes = json.getJSONObject("nodes");
assertEquals("incorrect number of elements", 1, nodes.length());
JSONArray nodeArray = nodes.getJSONArray("node");
assertEquals("incorrect number of elements", 1, nodeArray.length());
}
@Test
public void testNodesQueryHealthyFalse() throws JSONException, Exception {
WebResource r = resource();
MockNM nm1 = rm.registerNode("h1:1234", 5120);
MockNM nm2 = rm.registerNode("h2:1235", 5121);
rm.sendNodeStarted(nm1);
rm.NMwaitForState(nm1.getNodeId(), NodeState.RUNNING);
rm.NMwaitForState(nm2.getNodeId(), NodeState.NEW);
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("nodes").queryParam("states", "UNHEALTHY")
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
assertEquals("nodes is not null", JSONObject.NULL, json.get("nodes"));
}
public void testNodesHelper(String path, String media) throws JSONException,
Exception {
WebResource r = resource();
MockNM nm1 = rm.registerNode("h1:1234", 5120);
MockNM nm2 = rm.registerNode("h2:1235", 5121);
rm.sendNodeStarted(nm1);
rm.sendNodeStarted(nm2);
rm.NMwaitForState(nm1.getNodeId(), NodeState.RUNNING);
rm.NMwaitForState(nm2.getNodeId(), NodeState.RUNNING);
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path(path).accept(media).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject nodes = json.getJSONObject("nodes");
assertEquals("incorrect number of elements", 1, nodes.length());
JSONArray nodeArray = nodes.getJSONArray("node");
assertEquals("incorrect number of elements", 2, nodeArray.length());
JSONObject info = nodeArray.getJSONObject(0);
String id = info.get("id").toString();
if (id.matches("h1:1234")) {
verifyNodeInfo(info, nm1);
verifyNodeInfo(nodeArray.getJSONObject(1), nm2);
} else {
verifyNodeInfo(info, nm2);
verifyNodeInfo(nodeArray.getJSONObject(1), nm1);
}
}
@Test
public void testSingleNode() throws JSONException, Exception {
rm.registerNode("h1:1234", 5120);
MockNM nm2 = rm.registerNode("h2:1235", 5121);
testSingleNodeHelper("h2:1235", nm2, MediaType.APPLICATION_JSON);
}
@Test
public void testSingleNodeSlash() throws JSONException, Exception {
MockNM nm1 = rm.registerNode("h1:1234", 5120);
rm.registerNode("h2:1235", 5121);
testSingleNodeHelper("h1:1234/", nm1, MediaType.APPLICATION_JSON);
}
@Test
public void testSingleNodeDefault() throws JSONException, Exception {
MockNM nm1 = rm.registerNode("h1:1234", 5120);
rm.registerNode("h2:1235", 5121);
testSingleNodeHelper("h1:1234/", nm1, "");
}
public void testSingleNodeHelper(String nodeid, MockNM nm, String media)
throws JSONException, Exception {
WebResource r = resource();
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("nodes").path(nodeid).accept(media).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
assertEquals("incorrect number of elements", 1, json.length());
JSONObject info = json.getJSONObject("node");
verifyNodeInfo(info, nm);
}
@Test
public void testNonexistNode() throws JSONException, Exception {
rm.registerNode("h1:1234", 5120);
rm.registerNode("h2:1235", 5121);
WebResource r = resource();
try {
r.path("ws").path("v1").path("cluster").path("nodes")
.path("node_invalid:99").accept(MediaType.APPLICATION_JSON)
.get(JSONObject.class);
fail("should have thrown exception on non-existent nodeid");
} catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse();
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject msg = response.getEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals("incorrect number of elements", 3, exception.length());
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
verifyNonexistNodeException(message, type, classname);
} finally {
rm.stop();
}
}
// test that the exception output defaults to JSON
@Test
public void testNonexistNodeDefault() throws JSONException, Exception {
rm.registerNode("h1:1234", 5120);
rm.registerNode("h2:1235", 5121);
WebResource r = resource();
try {
r.path("ws").path("v1").path("cluster").path("nodes")
.path("node_invalid:99").get(JSONObject.class);
fail("should have thrown exception on non-existent nodeid");
} catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse();
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject msg = response.getEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals("incorrect number of elements", 3, exception.length());
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
verifyNonexistNodeException(message, type, classname);
} finally {
rm.stop();
}
}
// test that the exception output works in XML
@Test
public void testNonexistNodeXML() throws JSONException, Exception {
rm.registerNode("h1:1234", 5120);
rm.registerNode("h2:1235", 5121);
WebResource r = resource();
try {
r.path("ws").path("v1").path("cluster").path("nodes")
.path("node_invalid:99").accept(MediaType.APPLICATION_XML)
.get(JSONObject.class);
fail("should have thrown exception on non-existent nodeid");
} catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse();
assertEquals(Status.NOT_FOUND, response.getClientResponseStatus());
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
String msg = response.getEntity(String.class);
System.out.println(msg);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(msg));
Document dom = db.parse(is);
NodeList nodes = dom.getElementsByTagName("RemoteException");
Element element = (Element) nodes.item(0);
String message = WebServicesTestUtils.getXmlString(element, "message");
String type = WebServicesTestUtils.getXmlString(element, "exception");
String classname = WebServicesTestUtils.getXmlString(element,
"javaClassName");
verifyNonexistNodeException(message, type, classname);
} finally {
rm.stop();
}
}
private void verifyNonexistNodeException(String message, String type, String classname) {
assertTrue("exception message incorrect",
"java.lang.Exception: nodeId, node_invalid:99, is not found"
.matches(message));
assertTrue("exception type incorrect", "NotFoundException".matches(type));
assertTrue("exception className incorrect",
"org.apache.hadoop.yarn.webapp.NotFoundException".matches(classname));
}
@Test
public void testInvalidNode() throws JSONException, Exception {
rm.registerNode("h1:1234", 5120);
rm.registerNode("h2:1235", 5121);
WebResource r = resource();
try {
r.path("ws").path("v1").path("cluster").path("nodes")
.path("node_invalid_foo").accept(MediaType.APPLICATION_JSON)
.get(JSONObject.class);
fail("should have thrown exception on non-existent nodeid");
} catch (UniformInterfaceException ue) {
ClientResponse response = ue.getResponse();
assertEquals(Status.BAD_REQUEST, response.getClientResponseStatus());
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject msg = response.getEntity(JSONObject.class);
JSONObject exception = msg.getJSONObject("RemoteException");
assertEquals("incorrect number of elements", 3, exception.length());
String message = exception.getString("message");
String type = exception.getString("exception");
String classname = exception.getString("javaClassName");
WebServicesTestUtils.checkStringMatch("exception message",
"Invalid NodeId \\[node_invalid_foo\\]. Expected host:port", message);
WebServicesTestUtils.checkStringMatch("exception type",
"IllegalArgumentException", type);
WebServicesTestUtils.checkStringMatch("exception classname",
"java.lang.IllegalArgumentException", classname);
} finally {
rm.stop();
}
}
@Test
public void testNodesXML() throws JSONException, Exception {
rm.start();
WebResource r = resource();
MockNM nm1 = rm.registerNode("h1:1234", 5120);
// MockNM nm2 = rm.registerNode("h2:1235", 5121);
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("nodes").accept(MediaType.APPLICATION_XML)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
String xml = response.getEntity(String.class);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList nodesApps = dom.getElementsByTagName("nodes");
assertEquals("incorrect number of elements", 1, nodesApps.getLength());
NodeList nodes = dom.getElementsByTagName("node");
assertEquals("incorrect number of elements", 1, nodes.getLength());
verifyNodesXML(nodes, nm1);
rm.stop();
}
@Test
public void testSingleNodesXML() throws JSONException, Exception {
rm.start();
WebResource r = resource();
MockNM nm1 = rm.registerNode("h1:1234", 5120);
// MockNM nm2 = rm.registerNode("h2:1235", 5121);
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("nodes").path("h1:1234").accept(MediaType.APPLICATION_XML)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
String xml = response.getEntity(String.class);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList nodes = dom.getElementsByTagName("node");
assertEquals("incorrect number of elements", 1, nodes.getLength());
verifyNodesXML(nodes, nm1);
rm.stop();
}
@Test
public void testNodes2XML() throws JSONException, Exception {
rm.start();
WebResource r = resource();
rm.registerNode("h1:1234", 5120);
rm.registerNode("h2:1235", 5121);
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("nodes").accept(MediaType.APPLICATION_XML)
.get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_XML_TYPE, response.getType());
String xml = response.getEntity(String.class);
DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
DocumentBuilder db = dbf.newDocumentBuilder();
InputSource is = new InputSource();
is.setCharacterStream(new StringReader(xml));
Document dom = db.parse(is);
NodeList nodesApps = dom.getElementsByTagName("nodes");
assertEquals("incorrect number of elements", 1, nodesApps.getLength());
NodeList nodes = dom.getElementsByTagName("node");
assertEquals("incorrect number of elements", 2, nodes.getLength());
rm.stop();
}
@Test
public void testQueryAll() throws Exception {
WebResource r = resource();
MockNM nm1 = rm.registerNode("h1:1234", 5120);
MockNM nm2 = rm.registerNode("h2:1235", 5121);
MockNM nm3 = rm.registerNode("h3:1236", 5122);
rm.sendNodeStarted(nm1);
rm.sendNodeStarted(nm3);
rm.NMwaitForState(nm1.getNodeId(), NodeState.RUNNING);
rm.NMwaitForState(nm2.getNodeId(), NodeState.NEW);
rm.sendNodeLost(nm3);
ClientResponse response = r.path("ws").path("v1").path("cluster")
.path("nodes")
.queryParam("states", Joiner.on(',').join(EnumSet.allOf(NodeState.class)))
.accept(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertEquals(MediaType.APPLICATION_JSON_TYPE, response.getType());
JSONObject json = response.getEntity(JSONObject.class);
JSONObject nodes = json.getJSONObject("nodes");
assertEquals("incorrect number of elements", 1, nodes.length());
JSONArray nodeArray = nodes.getJSONArray("node");
assertEquals("incorrect number of elements", 3, nodeArray.length());
}
public void verifyNodesXML(NodeList nodes, MockNM nm) throws JSONException,
Exception {
for (int i = 0; i < nodes.getLength(); i++) {
Element element = (Element) nodes.item(i);
verifyNodeInfoGeneric(nm,
WebServicesTestUtils.getXmlString(element, "state"),
WebServicesTestUtils.getXmlString(element, "rack"),
WebServicesTestUtils.getXmlString(element, "id"),
WebServicesTestUtils.getXmlString(element, "nodeHostName"),
WebServicesTestUtils.getXmlString(element, "nodeHTTPAddress"),
WebServicesTestUtils.getXmlLong(element, "lastHealthUpdate"),
WebServicesTestUtils.getXmlString(element, "healthReport"),
WebServicesTestUtils.getXmlInt(element, "numContainers"),
WebServicesTestUtils.getXmlLong(element, "usedMemoryMB"),
WebServicesTestUtils.getXmlLong(element, "availMemoryMB"),
WebServicesTestUtils.getXmlString(element, "version"));
}
}
public void verifyNodeInfo(JSONObject nodeInfo, MockNM nm)
throws JSONException, Exception {
assertEquals("incorrect number of elements", 11, nodeInfo.length());
verifyNodeInfoGeneric(nm, nodeInfo.getString("state"),
nodeInfo.getString("rack"),
nodeInfo.getString("id"), nodeInfo.getString("nodeHostName"),
nodeInfo.getString("nodeHTTPAddress"),
nodeInfo.getLong("lastHealthUpdate"),
nodeInfo.getString("healthReport"), nodeInfo.getInt("numContainers"),
nodeInfo.getLong("usedMemoryMB"), nodeInfo.getLong("availMemoryMB"),
nodeInfo.getString("version"));
}
public void verifyNodeInfoGeneric(MockNM nm, String state, String rack,
String id, String nodeHostName,
String nodeHTTPAddress, long lastHealthUpdate, String healthReport,
int numContainers, long usedMemoryMB, long availMemoryMB, String version)
throws JSONException, Exception {
RMNode node = rm.getRMContext().getRMNodes().get(nm.getNodeId());
ResourceScheduler sched = rm.getResourceScheduler();
SchedulerNodeReport report = sched.getNodeReport(nm.getNodeId());
WebServicesTestUtils.checkStringMatch("state", node.getState().toString(),
state);
WebServicesTestUtils.checkStringMatch("rack", node.getRackName(), rack);
WebServicesTestUtils.checkStringMatch("id", nm.getNodeId().toString(), id);
WebServicesTestUtils.checkStringMatch("nodeHostName", nm.getNodeId()
.getHost(), nodeHostName);
WebServicesTestUtils.checkStringMatch("healthReport",
String.valueOf(node.getHealthReport()), healthReport);
String expectedHttpAddress = nm.getNodeId().getHost() + ":"
+ nm.getHttpPort();
WebServicesTestUtils.checkStringMatch("nodeHTTPAddress",
expectedHttpAddress, nodeHTTPAddress);
WebServicesTestUtils.checkStringMatch("version",
node.getNodeManagerVersion(), version);
long expectedHealthUpdate = node.getLastHealthReportTime();
assertEquals("lastHealthUpdate doesn't match, got: " + lastHealthUpdate
+ " expected: " + expectedHealthUpdate, expectedHealthUpdate,
lastHealthUpdate);
if (report != null) {
assertEquals("numContainers doesn't match: " + numContainers,
report.getNumContainers(), numContainers);
assertEquals("usedMemoryMB doesn't match: " + usedMemoryMB, report
.getUsedResource().getMemory(), usedMemoryMB);
assertEquals("availMemoryMB doesn't match: " + availMemoryMB, report
.getAvailableResource().getMemory(), availMemoryMB);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.fortress.web.panel;
import com.inmethod.grid.IGridColumn;
import com.inmethod.grid.column.PropertyColumn;
import com.inmethod.grid.treegrid.TreeGrid;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.wicket.Component;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.attributes.AjaxCallListener;
import org.apache.wicket.ajax.attributes.AjaxRequestAttributes;
import org.apache.wicket.ajax.form.AjaxFormComponentUpdatingBehavior;
import org.apache.wicket.event.IEvent;
import org.apache.wicket.extensions.ajax.markup.html.modal.ModalWindow;
import org.apache.wicket.markup.html.form.FormComponentPanel;
import org.apache.wicket.markup.html.form.Radio;
import org.apache.wicket.markup.html.form.RadioGroup;
import org.apache.wicket.markup.html.form.TextField;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.PropertyModel;
import org.apache.directory.fortress.web.common.GlobalIds;
import org.apache.directory.fortress.web.control.SecUtils;
import org.apache.directory.fortress.web.model.ObjectListModel;
import org.apache.directory.fortress.web.event.SaveModelEvent;
import org.apache.directory.fortress.web.control.SecureIndicatingAjaxButton;
import org.apache.directory.fortress.web.control.SecureIndicatingAjaxLink;
import org.apache.directory.fortress.web.event.SelectModelEvent;
import org.apache.directory.fortress.core.model.FortEntity;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.model.Model;
import org.apache.directory.fortress.core.model.OrgUnit;
import org.apache.directory.fortress.core.model.PermObj;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
/**
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
* @version $Rev$
*/
public class ObjectListPanel extends FormComponentPanel
{
/** Default serialVersionUID */
private static final long serialVersionUID = 1L;
private static final Logger LOG = LoggerFactory.getLogger( ObjectListPanel.class.getName() );
private Form listForm;
private DefaultTreeModel treeModel;
private DefaultMutableTreeNode node;
private TreeGrid<DefaultTreeModel, DefaultMutableTreeNode, String> grid;
private DefaultMutableTreeNode rootNode;
private TextField searchValFld;
private RadioGroup radioGroup;
private String searchVal;
private char selectedRadioButton;
private static final char NAMES = 'N';
private static final char OUS = 'O';
public ObjectListPanel( String id, final boolean isAdmin )
{
super( id );
ObjectListModel objectListModel = new ObjectListModel( new PermObj( "" ), isAdmin,
SecUtils.getSession( this ) );
setDefaultModel( objectListModel );
addGrid();
radioGroup = new RadioGroup( "searchOptions", new PropertyModel( this, "selectedRadioButton" ) );
add( radioGroup );
Radio objectRb = new Radio( "objectRb", new Model( Character.valueOf( NAMES ) ) );
radioGroup.add( objectRb );
Radio ouRb = new Radio( "ouRb", new Model( Character.valueOf( OUS ) ) );
radioGroup.add( ouRb );
addOUSearchModal( ouRb );
radioGroup.setOutputMarkupId( true );
radioGroup.setRenderBodyOnly( false );
searchValFld = new TextField( GlobalIds.SEARCH_VAL, new PropertyModel<String>( this, GlobalIds.SEARCH_VAL ) );
searchValFld.setOutputMarkupId( true );
AjaxFormComponentUpdatingBehavior ajaxUpdater = new AjaxFormComponentUpdatingBehavior( GlobalIds.ONBLUR )
{
/** Default serialVersionUID */
private static final long serialVersionUID = 1L;
@Override
protected void onUpdate( final AjaxRequestTarget target )
{
target.add( searchValFld );
}
};
searchValFld.add( ajaxUpdater );
radioGroup.add( searchValFld );
this.listForm.add( radioGroup );
selectedRadioButton = NAMES;
this.listForm.add( new SecureIndicatingAjaxButton( GlobalIds.SEARCH, GlobalIds.REVIEW_MGR, "findPermObjs" )
{
/** Default serialVersionUID */
private static final long serialVersionUID = 1L;
@Override
protected void onSubmit( AjaxRequestTarget target )
{
LOG.debug( ".search.onSubmit selected radio button: " + selectedRadioButton );
info( "Searching Permission Objects..." );
if ( !StringUtils.isNotEmpty( searchVal ) )
{
searchVal = "";
}
PermObj srchObject = new PermObj();
switch ( selectedRadioButton )
{
case NAMES:
LOG.debug( ".onSubmit OBJECT RB selected" );
srchObject.setObjName( searchVal );
break;
case OUS:
LOG.debug( ".onSubmit OUS RB selected" );
srchObject.setOu( searchVal );
break;
}
setDefaultModel( new ObjectListModel( srchObject, isAdmin,
SecUtils.getSession( this ) ) );
treeModel.reload();
rootNode.removeAllChildren();
List<PermObj> permObjs = ( List<PermObj> ) getDefaultModelObject();
if ( CollectionUtils.isNotEmpty( permObjs ) )
{
for ( PermObj permObj : permObjs )
rootNode.add( new DefaultMutableTreeNode( permObj ) );
info( "Search returned " + permObjs.size() + " matching objects" );
}
else
{
info( "No matching objects found" );
}
target.add( grid );
}
@Override
public void onError( AjaxRequestTarget target )
{
LOG.warn( ".search.onError" );
target.add();
}
@Override
protected void updateAjaxAttributes( AjaxRequestAttributes attributes )
{
super.updateAjaxAttributes( attributes );
AjaxCallListener ajaxCallListener = new AjaxCallListener()
{
/** Default serialVersionUID */
private static final long serialVersionUID = 1L;
@Override
public CharSequence getFailureHandler( Component component )
{
return GlobalIds.WINDOW_LOCATION_REPLACE_COMMANDER_HOME_HTML;
}
};
attributes.getAjaxCallListeners().add( ajaxCallListener );
}
} );
}
@Override
public void onEvent( IEvent event )
{
if ( event.getPayload() instanceof SaveModelEvent )
{
SaveModelEvent modelEvent = ( SaveModelEvent ) event.getPayload();
switch ( modelEvent.getOperation() )
{
case ADD:
add( modelEvent.getEntity() );
break;
case UPDATE:
modelChanged();
break;
case DELETE:
prune();
break;
default:
LOG.error( "onEvent caught invalid operation" );
break;
}
AjaxRequestTarget target = ( ( SaveModelEvent ) event.getPayload() ).getAjaxRequestTarget();
LOG.debug( ".onEvent AJAX - ObjectListPanel - SaveModelEvent: " + target.toString() );
}
}
private void removeSelectedItems( TreeGrid<DefaultTreeModel, DefaultMutableTreeNode, String> grid )
{
Collection<IModel<DefaultMutableTreeNode>> selected = grid.getSelectedItems();
for ( IModel<DefaultMutableTreeNode> model : selected )
{
DefaultMutableTreeNode node = model.getObject();
treeModel.removeNodeFromParent( node );
PermObj permObj = ( PermObj ) node.getUserObject();
LOG.debug( ".removeSelectedItems user node: " + permObj.getObjName() );
List<PermObj> permObjs = ( ( List<PermObj> ) getDefaultModel().getObject() );
permObjs.remove( permObj );
}
}
private DefaultTreeModel createTreeModel( List<PermObj> permObjs )
{
DefaultTreeModel model;
rootNode = new DefaultMutableTreeNode( null );
model = new DefaultTreeModel( rootNode );
if ( permObjs == null )
LOG.debug( "no Permission Objects found" );
else
{
LOG.debug( ".createTreeModel Permission Objects found:" + permObjs.size() );
for ( PermObj permObj : permObjs )
rootNode.add( new DefaultMutableTreeNode( permObj ) );
}
return model;
}
private void addGrid()
{
List<IGridColumn<DefaultTreeModel, DefaultMutableTreeNode, String>> columns =
new ArrayList<>();
PropertyColumn objName = new PropertyColumn<>(
Model.of( "Object Name" ), "userObject.ObjName" );
objName.setInitialSize( 300 );
columns.add( objName );
PropertyColumn ou = new PropertyColumn<>(
Model.of( "Perm Organization" ), "userObject.Ou" );
ou.setInitialSize( 200 );
columns.add( ou );
PropertyColumn description = new PropertyColumn<>(
Model.of( "Description" ), "userObject.Description" );
description.setInitialSize( 500 );
columns.add( description );
PropertyColumn type = new PropertyColumn( new Model( "Type" ), "userObject.Type" );
type.setInitialSize( 200 );
columns.add( type );
List<PermObj> permObjs = ( List<PermObj> ) getDefaultModel().getObject();
treeModel = createTreeModel( permObjs );
grid = new TreeGrid<DefaultTreeModel, DefaultMutableTreeNode, String>( "objecttreegrid", treeModel, columns )
{
/** Default serialVersionUID */
private static final long serialVersionUID = 1L;
@Override
public void selectItem( IModel itemModel, boolean selected )
{
node = ( DefaultMutableTreeNode ) itemModel.getObject();
if ( !node.isRoot() )
{
PermObj permObj = ( PermObj ) node.getUserObject();
LOG.debug( "TreeGrid.addGrid.selectItem selected permission object =" + permObj.getObjName() );
if ( super.isItemSelected( itemModel ) )
{
LOG.debug( "TreeGrid.addGrid.selectItem item is selected" );
super.selectItem( itemModel, false );
}
else
{
super.selectItem( itemModel, true );
SelectModelEvent.send( getPage(), this, permObj );
}
}
}
};
//grid.setContentHeight( 50, SizeUnit.EM );
grid.setAllowSelectMultiple( false );
grid.setClickRowToSelect( true );
grid.setClickRowToDeselect( false );
grid.setSelectToEdit( false );
// expand the root node
grid.getTreeState().expandAll();
this.listForm = new Form( "objectlistform" );
this.listForm.add( grid );
add( this.listForm );
grid.setOutputMarkupId( true );
}
private void addOUSearchModal( Radio ouRb )
{
final ModalWindow ousModalWindow;
listForm.add( ousModalWindow = new ModalWindow( "ousearchmodal" ) );
final OUSearchModalPanel ouSearchModalPanel = new OUSearchModalPanel( ousModalWindow.getContentId(),
ousModalWindow, false );
ousModalWindow.setContent( ouSearchModalPanel );
ousModalWindow.setWindowClosedCallback( new ModalWindow.WindowClosedCallback()
{
/** Default serialVersionUID */
private static final long serialVersionUID = 1L;
@Override
public void onClose( AjaxRequestTarget target )
{
OrgUnit ou = ouSearchModalPanel.getSelection();
if ( ou != null )
{
searchVal = ou.getName();
selectedRadioButton = OUS;
target.add( radioGroup );
}
}
} );
ouRb.add( new SecureIndicatingAjaxLink( "ouAssignLinkLbl", GlobalIds.DEL_REVIEW_MGR, "searchOU" )
{
/** Default serialVersionUID */
private static final long serialVersionUID = 1L;
public void onClick( AjaxRequestTarget target )
{
String msg = "clicked on ou search";
msg += "ouSelection: " + searchVal;
ouSearchModalPanel.setSearchVal( searchVal );
LOG.debug( msg );
target.prependJavaScript( GlobalIds.WICKET_WINDOW_UNLOAD_CONFIRMATION_FALSE );
ousModalWindow.show( target );
}
@Override
protected void updateAjaxAttributes( AjaxRequestAttributes attributes )
{
super.updateAjaxAttributes( attributes );
AjaxCallListener ajaxCallListener = new AjaxCallListener()
{
/** Default serialVersionUID */
private static final long serialVersionUID = 1L;
@Override
public CharSequence getFailureHandler( Component component )
{
return GlobalIds.WINDOW_LOCATION_REPLACE_COMMANDER_HOME_HTML;
}
};
attributes.getAjaxCallListeners().add( ajaxCallListener );
}
} );
ousModalWindow.setTitle( "Permission Object Organizational Unit Search Modal" );
ousModalWindow.setInitialWidth( 450 );
ousModalWindow.setInitialHeight( 450 );
ousModalWindow.setCookieName( "permou-modal" );
}
public void add( FortEntity entity )
{
if ( getDefaultModelObject() != null )
{
List<PermObj> permObjs = ( ( List<PermObj> ) getDefaultModelObject() );
permObjs.add( ( PermObj ) entity );
treeModel.insertNodeInto( new DefaultMutableTreeNode( entity ), rootNode, 0 );
//treeModel.insertNodeInto(new DefaultMutableTreeNode(entity), rootNode, permObjs.size());
}
}
public void prune()
{
removeSelectedItems( grid );
}
}
| |
/*
* This file is part of choco-solver, http://choco-solver.org/
*
* Copyright (c) 2019, IMT Atlantique. All rights reserved.
*
* Licensed under the BSD 4-clause license.
*
* See LICENSE file in the project root for full license information.
*/
package org.chocosolver.solver;
import org.chocosolver.memory.Except_0;
import org.chocosolver.memory.ICondition;
import org.chocosolver.solver.search.strategy.Search;
import org.chocosolver.solver.search.strategy.strategy.AbstractStrategy;
import org.chocosolver.util.ESat;
import java.io.InputStream;
import java.util.function.Function;
import java.util.function.IntPredicate;
import java.util.function.Predicate;
/**
* A concrete implementation of Settings that enables to modify settings programmatically.
*
* <p> Project: choco-solver.
*
* @author Charles Prud'homme
* @since 14/12/2017.
*/
public class DefaultSettings implements Settings {
/**
* Default welcome message
*/
private static final String DEFAULT_WELCOME_MESSAGE =
"** Choco 4.10.2 (2019-10) : Constraint Programming Solver, Copyright (c) 2010-2019";
private static final String DEFAULT_PREFIX = "TMP_";
private String welcomeMessage = DEFAULT_WELCOME_MESSAGE;
private Predicate<Solver> modelChecker = s -> !ESat.FALSE.equals(s.isSatisfied());
private boolean enableViews = true;
private int maxDomSizeForEnumerated = 32_768;
private int minCardForSumDecomposition = 1024;
private boolean enableTableSubstitution = true;
private int maxTupleSizeForSubstitution = 10_000;
private double MCRDecimalPrecision = 1e-4d;
private boolean sortPropagatorActivationWRTPriority = true;
private Function<Model, AbstractStrategy> defaultSearch = Search::defaultSearch;
private ICondition environmentHistorySimulationCondition = new Except_0();
private boolean warnUser = false;
private boolean enableDecompositionOfBooleanSum = false;
private IntPredicate enableIncrementalityOnBoolSum = i -> i > 10;
private boolean cloneVariableArrayInPropagator = true;
private boolean enableACOnTernarySum = false;
private String defaultPrefix = DEFAULT_PREFIX;
private boolean enableSAT = false;
private boolean swapOnPassivate = false;
private boolean checkDeclaredConstraints = true;
private boolean printAllUndeclaredConstraints = false;
private byte hybridEngine = 0b00;
private int nbMaxLearnt = 100_000;
private int maxLearntCardinlity = Integer.MAX_VALUE / 100;
private float clauseReductionRatio = .5f;
private int dominancePerimeter = 4;
private boolean explainGlobalFailureInSum = true;
private Function<Model, Solver> initSolver = Solver::new;
public DefaultSettings() {
// when assert is on, the assert properties is load
assert loadAssert():"Cannot load Assert.properties";
}
private boolean loadAssert(){
InputStream inStream = this.getClass().getClassLoader().getResourceAsStream("Assert.properties");
try {
this.load(inStream);
} catch (Exception e) {
e.printStackTrace();
return false;
}
return true;
}
@Override
public String getWelcomeMessage() {
return welcomeMessage;
}
@Override
public DefaultSettings setWelcomeMessage(String welcomeMessage) {
this.welcomeMessage = welcomeMessage;
return this;
}
@Override
public boolean checkModel(Solver solver) {
return modelChecker.test(solver);
}
@Override
public DefaultSettings setModelChecker(Predicate<Solver> modelChecker) {
this.modelChecker = modelChecker;
return this;
}
@Override
public boolean enableViews() {
return enableViews;
}
@Override
public DefaultSettings setEnableViews(boolean enableViews) {
this.enableViews = enableViews;
return this;
}
@Override
public int getMaxDomSizeForEnumerated() {
return maxDomSizeForEnumerated;
}
@Override
public DefaultSettings setMaxDomSizeForEnumerated(int maxDomSizeForEnumerated) {
this.maxDomSizeForEnumerated = maxDomSizeForEnumerated;
return this;
}
@Override
public int getMinCardForSumDecomposition() {
return minCardForSumDecomposition;
}
@Override
public Settings setMinCardinalityForSumDecomposition(int defaultMinCardinalityForSumDecomposition) {
this.minCardForSumDecomposition = defaultMinCardinalityForSumDecomposition;
return this;
}
@Override
public boolean enableTableSubstitution() {
return enableTableSubstitution;
}
@Override
public DefaultSettings setEnableTableSubstitution(boolean enableTableSubstitution) {
this.enableTableSubstitution = enableTableSubstitution;
return this;
}
@Override
public int getMaxTupleSizeForSubstitution() {
return maxTupleSizeForSubstitution;
}
@Override
public double getMCRDecimalPrecision() {
return MCRDecimalPrecision;
}
@Override
public Settings setMCRDecimalPrecision(double precision) {
this.MCRDecimalPrecision = precision;
return this;
}
@Override
public DefaultSettings setMaxTupleSizeForSubstitution(int maxTupleSizeForSubstitution) {
this.maxTupleSizeForSubstitution = maxTupleSizeForSubstitution;
return this;
}
@Override
public boolean sortPropagatorActivationWRTPriority() {
return sortPropagatorActivationWRTPriority;
}
@Override
public DefaultSettings setSortPropagatorActivationWRTPriority(boolean sortPropagatorActivationWRTPriority) {
this.sortPropagatorActivationWRTPriority = sortPropagatorActivationWRTPriority;
return this;
}
@Override
public AbstractStrategy makeDefaultSearch(Model model) {
return defaultSearch.apply(model);
}
@Override
public DefaultSettings setDefaultSearch(Function<Model, AbstractStrategy> defaultSearch) {
this.defaultSearch = defaultSearch;
return this;
}
@Override
public ICondition getEnvironmentHistorySimulationCondition() {
return environmentHistorySimulationCondition;
}
@Override
public DefaultSettings setEnvironmentHistorySimulationCondition(ICondition environmentHistorySimulationCondition) {
this.environmentHistorySimulationCondition = environmentHistorySimulationCondition;
return this;
}
@Override
public boolean warnUser() {
return warnUser;
}
@Override
public DefaultSettings setWarnUser(boolean warnUser) {
this.warnUser = warnUser;
return this;
}
@Override
public boolean enableDecompositionOfBooleanSum() {
return enableDecompositionOfBooleanSum;
}
@Override
public DefaultSettings setEnableDecompositionOfBooleanSum(boolean enableDecompositionOfBooleanSum) {
this.enableDecompositionOfBooleanSum = enableDecompositionOfBooleanSum;
return this;
}
@Override
public boolean enableIncrementalityOnBoolSum(int nbvars) {
return enableIncrementalityOnBoolSum.test(nbvars);
}
@Override
public DefaultSettings setEnableIncrementalityOnBoolSum(IntPredicate enableIncrementalityOnBoolSum) {
this.enableIncrementalityOnBoolSum = enableIncrementalityOnBoolSum;
return this;
}
@Override
public boolean cloneVariableArrayInPropagator() {
return cloneVariableArrayInPropagator;
}
@Override
public DefaultSettings setCloneVariableArrayInPropagator(boolean cloneVariableArrayInPropagator) {
this.cloneVariableArrayInPropagator = cloneVariableArrayInPropagator;
return this;
}
@Override
public boolean enableACOnTernarySum() {
return enableACOnTernarySum;
}
@Override
public Settings setEnableACOnTernarySum(boolean enable) {
this.enableACOnTernarySum = enable;
return this;
}
@Override
public String defaultPrefix() {
return defaultPrefix;
}
@Override
public DefaultSettings setDefaultPrefix(String defaultPrefix) {
this.defaultPrefix = defaultPrefix;
return this;
}
@Override
public boolean enableSAT() {
return enableSAT;
}
@Override
public DefaultSettings setEnableSAT(boolean enableSAT) {
this.enableSAT = enableSAT;
return this;
}
@Override
public boolean swapOnPassivate() {
return swapOnPassivate;
}
@Override
public DefaultSettings setSwapOnPassivate(boolean swapOnPassivate) {
this.swapOnPassivate = swapOnPassivate;
return this;
}
@Override
public boolean checkDeclaredConstraints() {
return checkDeclaredConstraints;
}
@Override
public DefaultSettings setCheckDeclaredConstraints(boolean checkDeclaredConstraints) {
this.checkDeclaredConstraints = checkDeclaredConstraints;
return this;
}
@Override
public boolean printAllUndeclaredConstraints() {
return printAllUndeclaredConstraints;
}
@Override
public Settings setPrintAllUndeclaredConstraints(boolean printAllUndeclaredConstraints) {
this.printAllUndeclaredConstraints = printAllUndeclaredConstraints;
return this;
}
@Override
public Solver initSolver(Model model) {
return initSolver.apply(model);
}
@Override
public DefaultSettings setInitSolver(Function<Model, Solver> initSolver) {
this.initSolver = initSolver;
return this;
}
@Override
public byte enableHybridizationOfPropagationEngine() {
return hybridEngine;
}
@Override
public Settings setHybridizationOfPropagationEngine(byte hybrid) {
this.hybridEngine = hybrid;
return this;
}
@Override
public int getNbMaxLearntClauses() {
return nbMaxLearnt;
}
@Override
public Settings setNbMaxLearntClauses(int n) {
this.nbMaxLearnt = n;
return this;
}
@Override
public float getRatioForClauseStoreReduction() {
return this.clauseReductionRatio;
}
@Override
public Settings setRatioForClauseStoreReduction(float f) {
this.clauseReductionRatio = f;
return this;
}
@Override
public int getMaxLearntClauseCardinality() {
return maxLearntCardinlity;
}
@Override
public Settings setMaxLearntClauseCardinality(int n) {
maxLearntCardinlity = n;
return this;
}
@Override
public int getLearntClausesDominancePerimeter() {
return dominancePerimeter;
}
@Override
public Settings setLearntClausesDominancePerimeter(int n) {
this.dominancePerimeter = n;
return this;
}
@Override
public boolean explainGlobalFailureInSum() {
return explainGlobalFailureInSum;
}
@Override
public Settings explainGlobalFailureInSum(boolean b) {
this.explainGlobalFailureInSum = b;
return this;
}
}
| |
package com.surfapi.javadoc;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.StringUtils;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import com.sun.javadoc.AnnotationDesc;
import com.sun.javadoc.AnnotationTypeDoc;
import com.sun.javadoc.AnnotationTypeElementDoc;
import com.sun.javadoc.AnnotationValue;
import com.sun.javadoc.ClassDoc;
import com.sun.javadoc.ConstructorDoc;
import com.sun.javadoc.Doc;
import com.sun.javadoc.ExecutableMemberDoc;
import com.sun.javadoc.FieldDoc;
import com.sun.javadoc.LanguageVersion;
import com.sun.javadoc.MemberDoc;
import com.sun.javadoc.MethodDoc;
import com.sun.javadoc.PackageDoc;
import com.sun.javadoc.ParamTag;
import com.sun.javadoc.Parameter;
import com.sun.javadoc.ParameterizedType;
import com.sun.javadoc.ProgramElementDoc;
import com.sun.javadoc.RootDoc;
import com.sun.javadoc.Tag;
import com.sun.javadoc.ThrowsTag;
import com.sun.javadoc.Type;
import com.sun.javadoc.TypeVariable;
import com.sun.javadoc.WildcardType;
import com.surfapi.coll.Cawls;
import com.surfapi.coll.MapBuilder;
import com.surfapi.json.JSONTrace;
/**
* Prints javadoc in JSON format.
*
* The structure of the JSON objects closely matches that of the
* PackageDoc, ClassDoc, MethodDoc, etc. APIs in the com.sun.javadoc library.
*
*
* Explanation of types:
*
* An AnnotationDesc represents an annotation applied to a class/method/parm/whatever.
* It contains an AnnotationType and a list of ElementValuePairs.
*
* An AnnotationType represents an annotation class (type). It inherits from ClassDoc.
* It contains a list of AnnotationTypeElementDocs.
*
* An AnnotationTypeElementDoc represents a single element/field within an annotation.
* It inherits from MethodDoc. It contains an AnnotationValue defaultValue.
*
* An AnnotationValue is just a wrapper around an Object value (usually a String, could be an Object).
*
* An AnnotationDesc.ElementValuePair is a pairing of AnnotationTypeElementDoc and AnnotationValue.
* It describes an applied annotation element value.
*
* TypeVariable is a template type variable name, e.g. for java.util.List<E>, the TypeVariable is "E".
* A TypeVariable has "bounds", which is its "extends" clause.
*
* TypeArgument is a value for a TypeVariable. E.g, for java.util.List<String>, the TypeArgument is "String".
* TypeArguments are contained in ParameterizedTypes.
*
* A ParameterizedType is a type that contains a TypeArgument. E.g java.util.List<String> is a ParameterizedType.
* The Type is "java.util.List", the TypeArgument is "String".
*
* A WildcardType is a TypeArgument that contains the wildcard '?'. Often it's accompanied with an 'extends' or
* 'super' clause.
*
*
*/
public class JsonDoclet {
/**
* {@inheritDoc} is searched for and replaced in any methodDoc commentText,
* @return, @param, or @throws comment in which it is found -- so long as
* the overridden class/method is available to this javadoc process.
*/
public static final String InheritDocTag = "{@inheritDoc}";
/**
* Doclet entry point. Javadoc calls this method, passing in the
* doc metadata.
*/
public static boolean start(RootDoc root) {
return new JsonDoclet(root).go();
}
/**
* NOTE: Without this method present and returning LanguageVersion.JAVA_1_5,
* Javadoc will not process generics because it assumes LanguageVersion.JAVA_1_1
* @return language version (hard coded to LanguageVersion.JAVA_1_5)
*/
public static LanguageVersion languageVersion() {
return LanguageVersion.JAVA_1_5;
}
/**
* The rootDoc, passed in to the custom doclet by the javadoc framework.
*/
protected RootDoc rootDoc;
/**
* Set of packages. This gets populated as we process classDocs. Then
* at the end we process all the packageDocs.
*/
private Set<PackageDoc> packageDocs = new HashSet<PackageDoc>();
/**
* CTOR.
*/
public JsonDoclet(RootDoc rootDoc) {
this.rootDoc = rootDoc;
}
/**
*
*/
protected boolean go() {
// TODO: OPTIMIZATION: remove all null/empty entries from the map, to minimize storage use.
// process Maps and Lists recursively.
for (ClassDoc classDoc : rootDoc.classes()) {
for ( Object obj : processClass(classDoc)) {
System.out.println( JSONTrace.prettyPrint( (Map) obj ) );
}
}
for ( Object obj : processPackages( getPackageDocs() )) {
System.out.println( JSONTrace.prettyPrint( (Map) obj ) );
}
return true;
}
/**
* Process the given classDoc along with all its methods, constructors, fields, enumConstants, etc.
*
* @return a list of javadoc models.
*/
protected JSONArray processClass(ClassDoc classDoc) {
JSONArray retMe = new JSONArray();
retMe.add( classDoc.isAnnotationType() ? processAnnotationTypeDoc( (AnnotationTypeDoc) classDoc ) : processClassDoc(classDoc) );
retMe.addAll( processMethodDocs( classDoc.methods() ) );
retMe.addAll( processConstructorDocs( classDoc.constructors() ) );
retMe.addAll( processFieldDocs( classDoc.fields() ) );
retMe.addAll( processFieldDocs( classDoc.enumConstants() ) );
return retMe;
}
/**
* Process the given set of packageDocs.
*
* @return a list of package models.
*/
protected JSONArray processPackages( Collection<PackageDoc> packageDocs ) {
JSONArray retMe = new JSONArray();
for (PackageDoc packageDoc : packageDocs) {
retMe.add( processPackageDoc(packageDoc) );
}
return retMe;
}
/**
* @return the set of packageDocs that were accumulated while processing the classDocs.
*/
protected Set<PackageDoc> getPackageDocs() {
packageDocs.remove(null); // Remove null in case it was added
return packageDocs;
}
/**
* @return full JSON objects for the given ClassDoc[]
*/
protected JSONArray processClassDocs(ClassDoc[] classDocs) {
JSONArray retMe = new JSONArray();
for (ClassDoc classDoc : classDocs) {
retMe.add( processClassDoc(classDoc) );
}
return retMe;
}
/**
* @return the full JSON for the given ClassDoc
*/
protected JSONObject processClassDoc(ClassDoc classDoc) {
JSONObject classJson = processProgramElementDoc(classDoc);
classJson.putAll( processType(classDoc) ); // ??? maybe to get parameterized type info?
classJson.put("superclass", processClassDocStub(classDoc.superclass()));
classJson.put("superclassType", processType(classDoc.superclassType()));
classJson.put("allSuperclassTypes", getAllUniqueSuperclassTypes(classDoc));
classJson.put("interfaces", processClassDocStubs(classDoc.interfaces()));
classJson.put("interfaceTypes", processTypes(classDoc.interfaceTypes()));
classJson.put("allInterfaceTypes", getAllUniqueInterfaceTypes(classDoc));
classJson.put("typeParameters", processTypeVariables( classDoc.typeParameters() ) );
classJson.put("typeParamTags", processParamTags( classDoc.typeParamTags() ) );
classJson.put("methods", processMethodDocStubs(classDoc.methods()));
classJson.put("allInheritedMethods", getAllInheritedMethods( classDoc ));
classJson.put("constructors", processConstructorDocStubs( classDoc.constructors() ));
classJson.put("fields", processFieldDocStubs(classDoc.fields()));
classJson.put("enumConstants", processFieldDocStubs(classDoc.enumConstants()));
classJson.put("innerClasses", processClassDocStubs(classDoc.innerClasses()));
return classJson;
}
/**
* @return JSON stubs for the given ClassDoc[].
*/
protected JSONArray processClassDocStubs(ClassDoc[] classDocs) {
JSONArray retMe = new JSONArray();
for (ClassDoc classDoc : classDocs) {
retMe.add( processClassDocStub(classDoc) );
}
return retMe;
}
/**
* @return a JSON stub for the given ClassDoc.
*/
protected JSONObject processClassDocStub(ClassDoc classDoc) {
return processProgramElementDocStub(classDoc);
}
/**
* @return the unique set of all superclass types extended by the given classDoc
* (note: interfaces may extend more than one interface).
*/
protected List<Map> getAllUniqueSuperclassTypes(ClassDoc classDoc) {
return Cawls.uniqueForField( getAllSuperclassTypes(classDoc), "qualifiedTypeName" );
}
/**
* @return a list of all superclass types, from most derived to oldest grand parent
*/
protected JSONArray getAllSuperclassTypes(ClassDoc classDoc) {
JSONArray retMe = new JSONArray();
for (Type superclassType : getSuperclassTypes(classDoc)) {
retMe.add( processType( superclassType ) );
retMe.addAll( getAllSuperclassTypes( superclassType.asClassDoc() ) );
}
return retMe;
}
/**
* @return the unique set of all interface types implemented by the given classDoc
* including interfaces implemented by superclasses.
*/
protected List<Map> getAllUniqueInterfaceTypes(ClassDoc classDoc) {
return Cawls.uniqueForField( getAllInterfaceTypes(classDoc), "qualifiedTypeName" );
}
/**
* @return a list of all superclass types
*/
protected JSONArray getAllInterfaceTypes(ClassDoc classDoc) {
JSONArray retMe = new JSONArray();
if (classDoc != null) {
retMe.addAll( processTypes( classDoc.interfaceTypes() ) );
retMe.addAll( getAllInterfaceTypes( classDoc.superclass() ) );
}
return retMe;
}
/**
* @return a list of all interfaces implemented by the given classDoc and
* all superclasses of the classDoc.
*/
protected List<ClassDoc> getAllInterfaces(ClassDoc classDoc) {
List<ClassDoc> retMe = new ArrayList<ClassDoc>();
if (classDoc != null) {
retMe.addAll( Arrays.asList(classDoc.interfaces()) );
retMe.addAll( getAllInterfaces( classDoc.superclass() ) );
}
return retMe;
}
/**
* @return the full JSON for the given PackageDoc
*/
protected JSONObject processPackageDoc(PackageDoc packageDoc) {
if (packageDoc == null) {
return null;
}
JSONObject retMe = processDoc(packageDoc);
retMe.put("annotations", processAnnotationDescs(packageDoc.annotations()));
retMe.put("annotationTypes", processAnnotationTypeDocStubs(packageDoc.annotationTypes()));
retMe.put("enums", processClassDocStubs(packageDoc.enums()));
retMe.put("errors", processClassDocStubs(packageDoc.errors()));
retMe.put("exceptions", processClassDocStubs(packageDoc.exceptions()));
retMe.put("interfaces", processClassDocStubs(packageDoc.interfaces()));
retMe.put("ordinaryClasses", processClassDocStubs(packageDoc.ordinaryClasses()));
return retMe;
}
/**
* @return a JSON stub for the given PackageDoc.
*/
protected JSONObject processPackageDocStub(PackageDoc packageDoc) {
return processDocStub(packageDoc);
}
/**
* @return full JSON objects for the given ConstructorDoc[]
*/
protected JSONArray processConstructorDocs(ConstructorDoc[] constructorDocs) {
JSONArray retMe = new JSONArray();
for (ConstructorDoc constructorDoc: constructorDocs) {
retMe.add( processConstructorDoc( constructorDoc ) );
}
return retMe;
}
/**
* @return JSON stubs for the given ConstructorDoc[].
*/
protected JSONArray processConstructorDocStubs(ConstructorDoc[] constructorDocs) {
JSONArray retMe = new JSONArray();
for (ConstructorDoc constructorDoc: constructorDocs) {
retMe.add( processConstructorDocStub( constructorDoc ) );
}
return retMe;
}
/**
* @return the full JSON for the given ConstructorDoc
*/
protected JSONObject processConstructorDoc(ConstructorDoc constructorDoc) {
return processExecutableMemberDoc(constructorDoc);
}
/**
* @return a JSON stub for the given ConstructorDoc
*/
protected JSONObject processConstructorDocStub(ConstructorDoc constructorDoc) {
return processExecutableMemberDocStub(constructorDoc);
}
/**
* @return classDoc.superclass for classes, classDoc.interfaces[0] for interfaces
*/
protected ClassDoc getSuperclass(ClassDoc classDoc) {
if (classDoc.isInterface()){
return (Cawls.isEmpty(classDoc.interfaces())) ? null : classDoc.interfaces()[0] ;
} else {
return classDoc.superclass();
}
}
/**
* @return classDoc.superclass for classes, classDoc.interfaces[0] for interfaces
*/
protected List<Type> getSuperclassTypes(ClassDoc classDoc) {
if (classDoc == null) {
return new ArrayList<Type>();
} else if (classDoc.isInterface()){
return Arrays.asList( classDoc.interfaceTypes() );
} else if (classDoc.superclassType() != null) {
return Arrays.asList( classDoc.superclassType() );
} else {
return new ArrayList<Type>();
}
}
// /**
// * @return [ { "superclassType": {}, "inheritedMethods": [ {}, {}, ... ] },
// * { "superclassType": {}, "inheritedMethods": [ {}, {}, ... ] } ]
// */
// protected JSONArray getAllInheritedMethods(ClassDoc classDoc) {
// JSONArray retMe = new JSONArray();
//
// // Keep track of methods we've already inherited so as not to
// // inherit them again from another superclass.
// List<MethodDoc> alreadyInherited = new ArrayList<MethodDoc>();
//
// // TODO: use getSuperclassTypes to handle interfaces with more than 1 superclass.
// // for ( Type superclassType : getSuperclassTypes(classDoc) ) {
//
// for ( ClassDoc superclassDoc = getSuperclass(classDoc);
// superclassDoc != null;
// superclassDoc = getSuperclass(superclassDoc) ) {
//
// List<MethodDoc> inheritedMethods = new ArrayList<MethodDoc>();
//
// for (MethodDoc supermethodDoc : superclassDoc.methods()) {
// if ( !isMethodOverridden( supermethodDoc, classDoc.methods(), alreadyInherited ) ) {
// inheritedMethods.add( supermethodDoc );
// }
// }
//
// if (inheritedMethods.size() > 0) {
// retMe.add( processInheritedMethods( superclassDoc, inheritedMethods ) );
//
// // Keep track of inheritedMethods so as not to inherit them again from
// // another superclass.
// alreadyInherited.addAll( inheritedMethods );
// }
// }
// // }
//
// return retMe;
// }
/**
* @return [ { "superclassType": {}, "inheritedMethods": [ {}, {}, ... ] },
* { "superclassType": {}, "inheritedMethods": [ {}, {}, ... ] } ]
*/
protected List<Map> getAllInheritedMethods(ClassDoc classDoc) {
return Cawls.uniqueForField( getAllInheritedMethodsHelper(classDoc, classDoc, new ArrayList<MethodDoc>()), "superclassType");
}
/**
*
* @param childClassDoc - The class for which we are determining the inherited methods
* @param parentClassDoc - will search for inherited methods from the *parent*
* of this class. So the first pass thru this recursive
* algorithm should pass in the base/child class for this
* argument.
* @param alreadyInherited - keeps track of methods we've already inherited so
* as not to inherit them again from another superclass.
*
* @return [ { "superclassType": {}, "inheritedMethods": [ {}, {}, ... ] },
* { "superclassType": {}, "inheritedMethods": [ {}, {}, ... ] } ]
*/
protected List<Map> getAllInheritedMethodsHelper(ClassDoc childClassDoc,
ClassDoc parentClassDoc,
List<MethodDoc> alreadyInherited) {
List<Map> retMe = new ArrayList<Map>();
// use getSuperclassTypes to handle interfaces with more than 1 superclass.
for ( Type superclassType : getSuperclassTypes(parentClassDoc) ) {
ClassDoc superclassDoc = superclassType.asClassDoc();
// Collect all methods inherited from this superclass,
// ignoring methods that we've alreadyInherited from a
// previous superclass.
List<MethodDoc> inheritedMethods = new ArrayList<MethodDoc>();
for (MethodDoc supermethodDoc : superclassDoc.methods()) {
if ( !isMethodOverridden( supermethodDoc, childClassDoc.methods(), alreadyInherited ) ) {
inheritedMethods.add( supermethodDoc );
}
}
if (inheritedMethods.size() > 0) {
// Create an entry for the inherited methods from this superclass
retMe.add( processInheritedMethods( superclassDoc, inheritedMethods ) );
// Keep track of inheritedMethods so as not to inherit them again from
// another superclass.
alreadyInherited.addAll( inheritedMethods );
}
// Recurse to search parents of this superclass
retMe.addAll( getAllInheritedMethodsHelper( childClassDoc, superclassDoc, alreadyInherited ) );
}
return retMe;
}
/**
* @return true if the given supermethodDoc is overridden by one of the given methodDocs.
*/
protected boolean isMethodOverridden( MethodDoc supermethodDoc,
MethodDoc[] methodDocs,
List<MethodDoc> alreadyInherited) {
for (MethodDoc methodDoc : methodDocs) {
if (methodDoc.overrides(supermethodDoc)) {
return true;
}
}
for (MethodDoc methodDoc : alreadyInherited) {
if (methodDoc.overrides(supermethodDoc)) {
return true;
}
}
return false;
}
/**
* @return the method from the givne supermethodDocs list that is overridden by the given methodDoc.
*/
protected MethodDoc getOverriddenMethod(MethodDoc methodDoc, MethodDoc[] supermethodDocs) {
if (supermethodDocs == null) {
return null;
}
for (MethodDoc supermethodDoc : supermethodDocs) {
if (methodDoc.overrides(supermethodDoc)) {
return supermethodDoc;
}
}
return null;
}
/**
* @return { "superclassType": {}, "inheritedMethods": [ {}, {}, ... ] }
*/
protected JSONObject processInheritedMethods(ClassDoc superclassDoc, List<MethodDoc> inheritedMethods) {
JSONObject retMe = new JSONObject();
retMe.put("superclassType", processType(superclassDoc));
retMe.put("inheritedMethods", processMethodDocStubs( inheritedMethods ) );
return retMe;
}
/**
* @return JSON stubs for the given MethodDoc[].
*/
protected JSONArray processMethodDocStubs(MethodDoc[] methodDocs) {
JSONArray retMe = new JSONArray();
for (MethodDoc methodDoc: methodDocs) {
retMe.add( processMethodDocStub( methodDoc ) );
}
return retMe;
}
/**
* @return JSON stubs for the given MethodDoc[].
*/
protected JSONArray processMethodDocStubs(Collection<MethodDoc> methodDocs) {
JSONArray retMe = new JSONArray();
for (MethodDoc methodDoc: methodDocs) {
retMe.add( processMethodDocStub( methodDoc ) );
}
return retMe;
}
/**
* @return full JSON objects for the given MethodDoc[]
*/
protected JSONArray processMethodDocs(MethodDoc[] methodDocs) {
JSONArray retMe = new JSONArray();
for (MethodDoc methodDoc: methodDocs) {
retMe.add( processMethodDoc( methodDoc ) );
}
return retMe;
}
/**
* @return a JSON stub for the given MethodDoc.
*/
protected JSONObject processMethodDocStub(MethodDoc methodDoc) {
if (methodDoc == null) {
return null;
}
JSONObject retMe = processExecutableMemberDocStub(methodDoc);
retMe.put("returnType", processTypeStub(methodDoc.returnType()));
return retMe;
}
/**
* @return the full JSON for the given MethodDoc
*/
protected JSONObject processMethodDoc(MethodDoc methodDoc) {
if (methodDoc == null) {
return null;
}
JSONObject retMe = processExecutableMemberDoc(methodDoc);
retMe.put("returnType", processType(methodDoc.returnType()));
retMe.put("overriddenMethod", processMethodDocStub(methodDoc.overriddenMethod() ) );
retMe.put("overriddenType", processTypeStub(methodDoc.overriddenType() ) );
MethodDoc specifiedByMethodDoc = getSpecifiedByMethod(methodDoc);
retMe.put("specifiedByMethod", processMethodDocStub( specifiedByMethodDoc ) );
if (methodDoc.overriddenMethod() != null || specifiedByMethodDoc != null) {
inheritDoc(retMe, methodDoc, specifiedByMethodDoc);
}
return retMe;
}
/**
* Process any and all commentText/@return/@param/@throws tags that are
* either missing or contain "{@inheritDoc}" by walking up the inheritance
* tree looking for doc to inherit.
*
* @return retMe
*/
protected JSONObject inheritDoc(JSONObject retMe, MethodDoc methodDoc, MethodDoc specifiedByMethodDoc) {
retMe.put("commentText", getInheritedCommentText(methodDoc, specifiedByMethodDoc));
if ( !methodDoc.returnType().typeName().equals("void")) {
inheritReturnTag(retMe, methodDoc, specifiedByMethodDoc);
}
inheritParamTags(retMe, methodDoc, specifiedByMethodDoc);
inheritThrowsTags(retMe, methodDoc, specifiedByMethodDoc);
return retMe;
}
/**
* @return retMe
*/
protected JSONObject inheritReturnTag(JSONObject retMe, MethodDoc methodDoc, MethodDoc specifiedByMethodDoc) {
String returnTagText = getInheritedReturnTagText(methodDoc, specifiedByMethodDoc);
if (! StringUtils.isEmpty( returnTagText ) ) {
// TODO: Cawls.replaceFirst would be a nice method to have....
JSONObject returnTag = (JSONObject) Cawls.findFirst( (List<Map>)retMe.get("tags"), new MapBuilder().append("name","@return") );
if (returnTag != null) {
returnTag.put("text", returnTagText); // updates the list in place.
} else {
// Add a new tag to the list.
((JSONArray)retMe.get("tags")).add( new MapBuilder().append("name", "@return")
.append("kind", "@return")
.append("text", returnTagText ) );
}
}
return retMe;
}
/**
* Either the paramTag exists or it doesn't. If it doesn't, inherit.
* If it does, and contains {@inheritDoc}, resolve inherited doc.
*
* @return retMe
*/
protected JSONObject inheritParamTags(JSONObject retMe, MethodDoc methodDoc, MethodDoc specifiedByMethodDoc) {
// First things first - compile a list of ParamTags. If any are missing,
// inherit from the parent class.
List<ParamTag> paramTags = getInheritedParamTags(methodDoc, specifiedByMethodDoc);
List<Map> paramTagModels = new ArrayList<Map>();
for ( ParamTag paramTag : paramTags ) {
Map paramTagModel = processParamTag(paramTag);
paramTagModel.put("parameterComment", getInheritedParamTagComment(methodDoc, paramTag.parameterName(), specifiedByMethodDoc) );
paramTagModels.add(paramTagModel);
}
retMe.put("paramTags", paramTagModels);
return retMe;
}
/**
* @return retMe
*/
protected JSONObject inheritThrowsTags(JSONObject retMe, MethodDoc methodDoc, MethodDoc specifiedByMethodDoc) {
// TODO
return retMe;
}
/**
* @return if currentText is empty, return inheritedText; otherwise replace all
* instances of {@inheritDoc} in currentText with inheritedText
*/
protected String resolveInheritDoc(String currentText, String inheritedText) {
if (StringUtils.isEmpty(currentText)) {
return inheritedText;
} else if ( !StringUtils.isEmpty(inheritedText) ) {
// Replace any occurence of {@inheritDoc} with the inheritedText.
return currentText.replace( JsonDoclet.InheritDocTag, inheritedText );
}
return currentText;
}
/**
* Resolve inherited comment text by scanning up the methodDoc's inheritance chain,
* resolving any {@inheritDoc} encountered along the way.
*
* This method returns as soon as it finds a non-empty commentText with all {@inheritDoc}
* tags resolved.
*
* @return the comment text for the given methodDoc, all inheritance resolved.
*/
protected String getInheritedCommentText(MethodDoc methodDoc, MethodDoc specifiedByMethodDoc) {
String retMe = null;
for ( ;
methodDoc != null && (StringUtils.isEmpty(retMe) || retMe.contains(JsonDoclet.InheritDocTag) );
methodDoc = methodDoc.overriddenMethod() ) {
retMe = resolveInheritDoc(retMe, methodDoc.commentText() );
}
// Inherit from the interface
retMe = resolveInheritDoc(retMe, (specifiedByMethodDoc != null) ? specifiedByMethodDoc.commentText() : null);
return retMe;
}
/**
* Resolve inherited @return tag text by scanning up the methodDoc's inheritance chain,
* resolving any {@inheritDoc} encountered along the way.
*
* This method returns as soon as it finds a non-empty @return tag text with all {@inheritDoc}
* tags resolved.
*
* Note: the logic of this method is exactly the same as getInheritedCommentText.
* The only difference is the value we're retrieving from the methodDoc (@return tag
* text vs commentText).
*
* @return the @return tag text for the given methodDoc, all inheritance resolved.
*/
protected String getInheritedReturnTagText(MethodDoc methodDoc, MethodDoc specifiedByMethodDoc) {
String retMe = null;
for ( ;
methodDoc != null && (StringUtils.isEmpty(retMe) || retMe.contains(JsonDoclet.InheritDocTag) );
methodDoc = methodDoc.overriddenMethod() ) {
retMe = resolveInheritDoc(retMe, getReturnTagText( methodDoc ) );
}
// Inherit from the interface
retMe = resolveInheritDoc(retMe, getReturnTagText( specifiedByMethodDoc) );
return retMe;
}
/**
* @return the @return tag text for the given methodDoc, or null if not found.
*/
protected String getReturnTagText(MethodDoc methodDoc) {
if (methodDoc == null) {
return null;
}
for (Tag tag : Cawls.safeIterable(methodDoc.tags())) {
if (tag.name().equals("@return")) {
return tag.text();
}
}
return null;
}
/**
* Resolve inherited @param tag text by scanning up the methodDoc's inheritance chain,
* resolving any {@inheritDoc} encountered along the way.
*
* This method returns as soon as it finds a non-empty @param tag text with all {@inheritDoc}
* tags resolved.
*
* Note: the logic of this method is exactly the same as getInheritedCommentText.
* The only difference is the value we're retrieving from the methodDoc (@param tag
* text vs commentText).
*
* @return the @param tag text for the given methodDoc, all inheritance resolved.
*/
protected String getInheritedParamTagComment(MethodDoc methodDoc, String parameterName, MethodDoc specifiedByMethodDoc) {
String retMe = null;
for ( ;
methodDoc != null && (StringUtils.isEmpty(retMe) || retMe.contains(JsonDoclet.InheritDocTag) );
methodDoc = methodDoc.overriddenMethod() ) {
retMe = resolveInheritDoc(retMe, getParamTagComment( methodDoc, parameterName ));
}
// Inherit from the interface
retMe = resolveInheritDoc(retMe, getParamTagComment( specifiedByMethodDoc, parameterName) );
return retMe;
}
/**
* @return the @param tag comment for the given methodDoc and parameter, or null if not found.
*/
protected String getParamTagComment(MethodDoc methodDoc, String parameterName) {
if (methodDoc == null) {
return null;
}
for (ParamTag paramTag : Cawls.safeIterable(methodDoc.paramTags())) {
if (paramTag.parameterName().equals( parameterName )) {
return paramTag.parameterComment();
}
}
return null;
}
/**
* @return the @param tag for the given parameterName
*/
protected ParamTag getParamTag(ParamTag[] paramTags, String parameterName) {
for (ParamTag paramTag : Cawls.safeIterable(paramTags)) {
if (paramTag.parameterName().equals( parameterName )) {
return paramTag;
}
}
return null;
}
/**
* Resolve inherited @param tags.
*
* This method compiles a list of param tags for each of the given methodDoc's parameters.
* If a paramTag is missing from the given methodDoc, it is searched for in the method's
* inheritance chain.
*
* @return a list of @param tags for the given methodDoc, some of which may be inherited.
*/
protected List<ParamTag> getInheritedParamTags(MethodDoc methodDoc, MethodDoc specifiedByMethodDoc) {
List<ParamTag> retMe = new ArrayList<ParamTag>();
for ( Parameter parameter : methodDoc.parameters() ) {
ParamTag paramTag = getInheritedParamTag( methodDoc, parameter.name(), specifiedByMethodDoc);
if (paramTag != null) {
retMe.add( paramTag );
}
}
return retMe;
}
/**
* @return the first non-null ParamTag with the given parameterName in the inheritance tree
* for the given methodDoc.
*/
protected ParamTag getInheritedParamTag(MethodDoc methodDoc, String parameterName, MethodDoc specifiedByMethodDoc) {
for ( ;
methodDoc != null;
methodDoc = methodDoc.overriddenMethod() ) {
ParamTag retMe = getParamTag( methodDoc.paramTags(), parameterName );
if (retMe != null) {
return retMe;
}
}
// Couldn't find it in the superclass hierarchy. Check the interface method
return (specifiedByMethodDoc != null) ? getParamTag( specifiedByMethodDoc.paramTags(), parameterName ) : null;
}
/**
*
*/
protected MethodDoc getSpecifiedByMethod(MethodDoc methodDoc) {
List<ClassDoc> allInterfaces = getAllInterfaces(methodDoc.containingClass());
for (ClassDoc intf : allInterfaces) {
MethodDoc specifiedByMethod = getSpecifiedByMethod(methodDoc, intf);
if (specifiedByMethod != null) {
return specifiedByMethod;
}
}
return null;
}
/**
* @return the method from the given intf (or its superclasses) that is overridden
* (or implemented by) the given methodDoc.
*/
protected MethodDoc getSpecifiedByMethod(MethodDoc methodDoc, ClassDoc intf) {
MethodDoc retMe = null;
if (intf != null) {
retMe = getOverriddenMethod( methodDoc, intf.methods() );
if (retMe == null) {
// Try the super interface.
retMe = getSpecifiedByMethod( methodDoc, intf.superclass() );
}
}
return retMe;
}
/**
* @return full JSON objects for the given FieldDoc[]
*/
protected JSONArray processFieldDocs(FieldDoc[] fieldDocs) {
JSONArray retMe = new JSONArray();
for (FieldDoc fieldDoc: fieldDocs) {
retMe.add( processFieldDoc( fieldDoc ) );
}
return retMe;
}
/**
* @return JSON stubs for the given FieldDoc[].
*/
protected JSONArray processFieldDocStubs(FieldDoc[] fieldDocs) {
JSONArray retMe = new JSONArray();
for (FieldDoc fieldDoc: fieldDocs) {
retMe.add( processFieldDocStub( fieldDoc ) );
}
return retMe;
}
/**
* @return the full JSON for the given FieldDoc
*/
protected JSONObject processFieldDoc(FieldDoc fieldDoc) {
if (fieldDoc == null) {
return null;
}
JSONObject retMe = processMemberDoc(fieldDoc);
retMe.put("type", processType(fieldDoc.type()));
retMe.put("constantValueExpression", fieldDoc.constantValueExpression());
retMe.put("constantValue", ((fieldDoc.constantValue() != null) ? fieldDoc.constantValue().toString() : null));
retMe.put("serialFieldTags", processTags(fieldDoc.serialFieldTags()));
return retMe;
}
/**
* @return a JSON stub for the given FieldDoc
*/
protected JSONObject processFieldDocStub(FieldDoc fieldDoc) {
if (fieldDoc == null) {
return null;
}
JSONObject retMe = processMemberDocStub(fieldDoc);
retMe.put("type", processTypeStub(fieldDoc.type()));
retMe.put("constantValueExpression", fieldDoc.constantValueExpression());
return retMe;
}
/**
* @return the full JSON for the given MemberDoc
*/
protected JSONObject processMemberDoc(MemberDoc memberDoc) {
if (memberDoc == null) {
return null;
}
JSONObject retMe = processProgramElementDoc(memberDoc);
retMe.put("isSynthetic", memberDoc.isSynthetic());
return retMe;
}
/**
* @return a JSON stub for the given MemberDoc
*/
protected JSONObject processMemberDocStub(MemberDoc memberDoc) {
return processProgramElementDocStub(memberDoc);
}
/**
* @return the full JSON for the given ExecutableMemberDoc
*/
protected JSONObject processExecutableMemberDoc(ExecutableMemberDoc emDoc) {
JSONObject retMe = processMemberDoc(emDoc);
retMe.put("flatSignature", emDoc.flatSignature());
retMe.put("signature", emDoc.signature());
retMe.put("parameters", processParameters(emDoc.parameters()));
retMe.put("paramTags", processParamTags(emDoc.paramTags()));
retMe.put("thrownExceptions", processClassDocStubs(emDoc.thrownExceptions()));
retMe.put("thrownExceptionTypes", processTypes(emDoc.thrownExceptionTypes()));
retMe.put("typeParameters", processTypeVariables(emDoc.typeParameters()));
retMe.put("typeParamTags", processParamTags(emDoc.typeParamTags()));
retMe.put("throwsTags", processThrowsTags(emDoc.throwsTags()));
return retMe;
}
/**
* @return a JSON stub for the given ExecutableMemberDoc
*/
protected JSONObject processExecutableMemberDocStub(ExecutableMemberDoc emDoc) {
JSONObject retMe = processMemberDocStub(emDoc);
retMe.put("parameters", processParameterStubs(emDoc.parameters()));
retMe.put("flatSignature", emDoc.flatSignature());
retMe.put("thrownExceptionTypes", processTypes(emDoc.thrownExceptionTypes()));
retMe.put("typeParameters", processTypeVariables(emDoc.typeParameters()));
return retMe;
}
/**
* @return full JSON objects for the given Parameter[]
*/
protected JSONArray processParameters(Parameter[] parameters) {
JSONArray retMe = new JSONArray();
for (Parameter parameter: parameters) {
retMe.add( processParameter( parameter ) );
}
return retMe;
}
/**
* @return JSON stubs for the given Parameter[].
*/
protected JSONArray processParameterStubs(Parameter[] parameters) {
JSONArray retMe = new JSONArray();
for (Parameter parameter: parameters) {
retMe.add( processParameterStub( parameter ) );
}
return retMe;
}
/**
* @return the full JSON for the given Parameter
*/
protected JSONObject processParameter(Parameter parameter) {
if (parameter == null) {
return null;
}
JSONObject retMe = new JSONObject();
retMe.put("name", parameter.name());
retMe.put("toString", parameter.toString());
retMe.put("type", processType(parameter.type()));
retMe.put("typeName", parameter.typeName());
retMe.put("annotations", processAnnotationDescs(parameter.annotations()));
return retMe;
}
/**
* @return a JSON stub for the given Parameter
*/
protected JSONObject processParameterStub(Parameter parameter) {
if (parameter == null) {
return null;
}
JSONObject retMe = new JSONObject();
retMe.put("type", processTypeStub(parameter.type()));
retMe.put("name", parameter.name());
return retMe;
}
/**
* @return the full JSON for the given ProgramElementDoc
*/
protected JSONObject processProgramElementDoc(ProgramElementDoc programElementDoc) {
if (programElementDoc == null) {
return null;
}
JSONObject retMe = processDoc(programElementDoc);
retMe.put("containingPackage", processPackageDocStub(programElementDoc.containingPackage()) );
packageDocs.add(programElementDoc.containingPackage());
retMe.put("containingClass", processClassDocStub(programElementDoc.containingClass()) );
retMe.put("qualifiedName", programElementDoc.qualifiedName());
retMe.put("modifiers", programElementDoc.modifiers());
retMe.put("modifierSpecifier", programElementDoc.modifierSpecifier());
retMe.put("annotations", processAnnotationDescs(programElementDoc.annotations()));
return retMe;
}
/**
* @return a JSON stub for the given ProgramElementDoc
*/
protected JSONObject processProgramElementDocStub(ProgramElementDoc peDoc) {
if (peDoc == null) {
return null;
}
JSONObject retMe = processDocStub(peDoc);
retMe.put("qualifiedName", peDoc.qualifiedName());
retMe.put("modifiers", peDoc.modifiers());
return retMe;
}
/**
* @return full JSON objects for the given AnnotationDesc[]
*/
protected JSONArray processAnnotationDescs(AnnotationDesc[] annotations) {
JSONArray retMe = new JSONArray();
for (AnnotationDesc annotation : annotations) {
retMe.add( processAnnotationDesc(annotation) );
}
return retMe;
}
/**
*
*
* @return the full JSON for the given AnnotationDesc
*/
protected JSONObject processAnnotationDesc(AnnotationDesc annotation) {
if (annotation == null) {
return null;
}
JSONObject retMe = new JSONObject();
retMe.put("annotationType", processAnnotationTypeDocStub( annotation.annotationType()));
retMe.put("elementValues", processAnnotationElementValues( annotation.elementValues() ));
return retMe;
}
/**
* @return the full JSON for the given annotation type
*/
protected JSONObject processAnnotationTypeDoc(AnnotationTypeDoc annoTypeDoc) {
JSONObject retMe = processClassDoc(annoTypeDoc);
retMe.put( "elements", processAnnotationTypeElementDocs( annoTypeDoc.elements() ) );
return retMe;
}
/**
* @return JSON stubs for the given AnnotationTypeDoc[].
*
*
*/
protected JSONArray processAnnotationTypeDocStubs(AnnotationTypeDoc[] annotationTypeDocs) {
JSONArray retMe = new JSONArray();
for (AnnotationTypeDoc annotationTypeDoc : annotationTypeDocs) {
retMe.add( processAnnotationTypeDocStub(annotationTypeDoc) );
}
return retMe;
}
/**
* @return a JSON stub for the given AnnotationTypeDoc.
*/
protected JSONObject processAnnotationTypeDocStub(AnnotationTypeDoc annotationTypeDoc) {
return processClassDocStub(annotationTypeDoc);
}
/**
* @return the full JSON for the given annotation type
*/
protected JSONArray processAnnotationTypeElementDocs(AnnotationTypeElementDoc[] annoTypeElementDocs) {
JSONArray retMe = new JSONArray();
for (AnnotationTypeElementDoc annoTypeElementDoc : annoTypeElementDocs) {
retMe.add( processAnnotationTypeElementDoc( annoTypeElementDoc ) );
}
return retMe;
}
/**
* @return the full JSON for the given annotation type
*/
protected JSONObject processAnnotationTypeElementDoc(AnnotationTypeElementDoc annoTypeElementDoc) {
if (annoTypeElementDoc == null) {
return null;
}
JSONObject retMe = processMethodDoc( annoTypeElementDoc );
// retMe.put("name", annoTypeElementDoc.name());
// retMe.put("qualifiedName", annoTypeElementDoc.qualifiedName());
// retMe.put("returnType", processTypeStub(annoTypeElementDoc.returnType()));
retMe.put("defaultValue", processAnnotationValue( annoTypeElementDoc.defaultValue() ) );
return retMe;
}
/**
* @return the full JSON for the given annotation type
*/
protected JSONObject processAnnotationTypeElementDocStub(AnnotationTypeElementDoc annoTypeElementDoc) {
if (annoTypeElementDoc == null) {
return null;
}
JSONObject retMe = processMemberDocStub( annoTypeElementDoc );
retMe.put("returnType", processTypeStub(annoTypeElementDoc.returnType()));
// JSONObject retMe = new JSONObject();
// retMe.put("name", annoTypeElementDoc.name())
return retMe;
}
/**
* @return the full JSON for the given annotation type
*/
protected JSONObject processAnnotationValue(AnnotationValue annoValue) {
if (annoValue == null) {
return null;
}
JSONObject retMe = new JSONObject();
retMe.put( "toString", annoValue.toString() );
return retMe;
}
/**
* @return full JSON objects for the given AnnotationDesc.ElementValuePair[]
*/
protected JSONArray processAnnotationElementValues( AnnotationDesc.ElementValuePair[] elementValues) {
JSONArray retMe = new JSONArray();
for (AnnotationDesc.ElementValuePair elementValue : elementValues) {
retMe.add( processAnnotationElementValue( elementValue ));
}
return retMe;
}
/**
* @return the full JSON for the given AnnotationDesc.ElementValuePair
*/
protected JSONObject processAnnotationElementValue(AnnotationDesc.ElementValuePair elementValue) {
if (elementValue == null) {
return null;
}
JSONObject retMe = new JSONObject();
retMe.put( "element", processAnnotationTypeElementDocStub( elementValue.element() ));
retMe.put("value", processAnnotationValue( elementValue.value() ) );
return retMe;
}
/**
* @return full JSON objects for the given TypeVariable[]
*/
protected JSONArray processTypeVariables( TypeVariable[] typeVariables) {
JSONArray retMe = new JSONArray();
for (TypeVariable typeVariable : typeVariables) {
retMe.add( processTypeVariable(typeVariable) );
}
return retMe;
}
/**
*
* @return the full JSON for the given TypeVariable
*/
protected JSONObject processTypeVariable(TypeVariable typeVariable) {
if (typeVariable == null) {
return null;
}
JSONObject retMe = processType(typeVariable);
retMe.put("bounds", processTypes(typeVariable.bounds()));
return retMe;
}
/**
* @return full JSON objects for the given Type[]
*/
protected JSONArray processTypes(Type[] types) {
JSONArray retMe = new JSONArray();
for (Type type : types) {
retMe.add( processType(type) );
}
return retMe;
}
/**
* @return the full JSON for the given Type
*/
protected JSONObject processType(Type type) {
if (type == null) {
return null;
}
JSONObject retMe = new JSONObject();
retMe.put("qualifiedTypeName", type.qualifiedTypeName());
retMe.put("simpleTypeName", type.simpleTypeName());
retMe.put("typeName", type.typeName());
retMe.put("toString", type.toString());
retMe.put("dimension", type.dimension());
retMe.put("parameterizedType", processParameterizedType( type.asParameterizedType() ) );
retMe.put("wildcardType", processWildcardType( type.asWildcardType() ) );
// TODO: add metaType=type/parameterizedType/wildcardType
return retMe;
}
/**
* @return a JSON stub for the given Type
*/
protected JSONObject processTypeStub(Type type) {
return processType(type);
// -rx- if (type == null) {
// -rx- return null;
// -rx- }
// -rx-
// -rx- JSONObject retMe = new JSONObject();
// -rx-
// -rx- retMe.put("qualifiedTypeName", type.qualifiedTypeName());
// -rx- retMe.put("typeName", type.typeName());
// -rx- retMe.put("dimension", type.dimension());
// -rx- retMe.put("toString", type.toString());
// -rx-
// -rx- return retMe;
}
/**
* @return the full JSON for the given Type
*/
protected JSONObject processParameterizedType(ParameterizedType parameterizedType) {
if (parameterizedType == null) {
return null;
}
JSONObject retMe = new JSONObject();
retMe.put("typeArguments", processTypes( parameterizedType.typeArguments() ) );
return retMe;
}
/**
* @return the full JSON for the given Type
*/
protected JSONObject processWildcardType(WildcardType wildcardType) {
if (wildcardType == null) {
return null;
}
JSONObject retMe = new JSONObject();
retMe.put("extendsBounds", processTypes( wildcardType.extendsBounds() ) );
retMe.put("superBounds", processTypes( wildcardType.superBounds() ) );
return retMe;
}
/**
* The Doc element is a supertype to the others (ClassDoc, PackageDoc, etc).
*
* @return JSON-mapping of the doc's javadoc meta data.
*/
protected JSONObject processDoc(Doc doc) {
if (doc == null) {
return null;
}
JSONObject docJson = new JSONObject();
docJson.put("name", doc.name());
// commentText includes in-line tags but not block tags.
docJson.put("commentText", doc.commentText());
// rawCommentText includes everything - the entire javadoc comment, unprocessed.
docJson.put("rawCommentText", doc.getRawCommentText());
// tags includes only block tags, no inline tags.
docJson.put("tags", processTags( doc.tags() ) );
// Like commentText, includes raw text + inline tags; does NOT include block tags
// raw text wrapped in Tag with kind="Text" and name="Text"
docJson.put("inlineTags", processTags( doc.inlineTags() ) );
docJson.put("seeTags", processTags( doc.seeTags() ) );
docJson.put("firstSentenceTags", processTags( doc.firstSentenceTags() ));
docJson.put("metaType", determineMetaType(doc));
return docJson;
}
/**
* @return a JSON stub for the given Doc.
*/
protected JSONObject processDocStub(Doc doc) {
if (doc == null) {
return null;
}
JSONObject retMe = new JSONObject();
retMe.put("name", doc.name());
retMe.put("metaType", determineMetaType(doc));
retMe.put("firstSentenceTags", processTags( doc.firstSentenceTags() ));
return retMe;
}
/**
*
* @return The type that the given Doc represents, e.g. "class", "enum", "method", etc..
*/
protected String determineMetaType(Doc doc) {
if (doc.isEnum()) {
return "enum";
} else if (doc.isAnnotationType()) {
return "annotationType";
} else if (doc.isClass()) {
return "class";
} else if (doc.isInterface()) {
return "interface";
} else if (doc.isConstructor()) {
return "constructor";
} else if (doc.isMethod()) {
return "method";
} else if (doc.isField()) {
return "field";
} else if (doc.isEnumConstant()) {
return "enumConstant";
} else if (doc.isAnnotationTypeElement()) {
return "annotationTypeElement";
} else if (doc instanceof PackageDoc) {
return "package";
} else {
return "unknown";
}
}
/**
* @return full JSON objects for the given Tag[]
*/
protected JSONArray processTags(Tag[] tags) {
JSONArray tagsJson = new JSONArray();
for (Tag tag : tags) {
tagsJson.add( processTag(tag) );
}
return tagsJson;
}
/**
* @return the full JSON for the given Tag
*
* TODO: what is a Tag?
*/
protected JSONObject processTag(Tag tag) {
JSONObject tagJson = new JSONObject();
tagJson.put("name", tag.name());
tagJson.put("kind", tag.kind());
tagJson.put("text", tag.text());
return tagJson;
}
/**
* @return full JSON objects for the given ParamTag[]
*/
protected JSONArray processParamTags(ParamTag[] paramTags) {
JSONArray retMe = new JSONArray();
for (ParamTag paramTag : paramTags) {
retMe.add(processParamTag(paramTag));
}
return retMe;
}
/**
* @return the full JSON for the given ParamTag
*/
protected JSONObject processParamTag(ParamTag paramTag) {
if (paramTag == null) {
return null;
}
JSONObject paramJson = processTag(paramTag);
paramJson.put("parameterComment", paramTag.parameterComment());
paramJson.put("parameterName", paramTag.parameterName());
return paramJson;
}
/**
* @return full JSON objects for the given ThrowsTag[]
*/
protected JSONArray processThrowsTags(ThrowsTag[] throwsTags) {
JSONArray retMe = new JSONArray();
for (ThrowsTag throwsTag : throwsTags) {
retMe.add(processThrowsTag(throwsTag));
}
return retMe;
}
/**
* @return the full JSON for the given ThrowsTag
*/
protected JSONObject processThrowsTag(ThrowsTag throwsTag) {
if (throwsTag == null) {
return null;
}
JSONObject retMe = processTag(throwsTag);
retMe.put("exceptionComment", throwsTag.exceptionComment());
retMe.put("exceptionName", throwsTag.exceptionName());
retMe.put("exceptionType", processTypeStub(throwsTag.exceptionType()));
return retMe;
}
}
| |
/*
* The MIT License (MIT)
*
* Copyright (c) 2018 Chris Magnussen and Elior Boukhobza
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package com.chrisrm.idea.ui;
import com.intellij.ide.navigationToolbar.NavBarItem;
import com.intellij.ide.navigationToolbar.NavBarPanel;
import com.intellij.ide.navigationToolbar.ui.CommonNavBarUI;
import com.intellij.ide.ui.UISettings;
import com.intellij.util.ui.JBInsets;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.UIUtil;
import gnu.trove.THashMap;
import javax.swing.*;
import java.awt.*;
import java.awt.geom.*;
import java.awt.image.*;
import java.util.HashMap;
import java.util.Map;
/**
* @author Konstantin Bulenkov
*/
public class MTNavBarUI extends CommonNavBarUI {
private static final Map<NavBarItem, Map<ImageType, BufferedImage>> CACHE = new THashMap<>();
private enum ImageType {
INACTIVE,
NEXT_ACTIVE,
ACTIVE,
INACTIVE_FLOATING,
NEXT_ACTIVE_FLOATING,
ACTIVE_FLOATING,
INACTIVE_NO_TOOLBAR,
NEXT_ACTIVE_NO_TOOLBAR,
ACTIVE_NO_TOOLBAR
}
@Override
public void clearItems() {
super.clearItems();
CACHE.clear();
}
@Override
public void doPaintNavBarPanel(final Graphics2D g, final Rectangle r, final boolean mainToolbarVisible, final boolean undocked) {
}
@Override
public JBInsets getElementPadding() {
return JBUI.insets(5, 0, 5, 15);
}
@Override
public void doPaintNavBarItem(final Graphics2D g, final NavBarItem item, final NavBarPanel navbar) {
final boolean floating = navbar.isInFloatingMode();
final boolean toolbarVisible = UISettings.getInstance().getShowMainToolbar();
final boolean selected = item.isSelected() && item.isFocused();
final boolean nextSelected = item.isNextSelected() && navbar.isFocused();
// Determine the type of breadcrumb to draw
final ImageType type;
if (floating) {
type = selected ? ImageType.ACTIVE_FLOATING : nextSelected ? ImageType.NEXT_ACTIVE_FLOATING : ImageType.INACTIVE_FLOATING;
} else {
if (toolbarVisible) {
type = selected ? ImageType.ACTIVE : nextSelected ? ImageType.NEXT_ACTIVE : ImageType.INACTIVE;
} else {
type = selected ? ImageType.ACTIVE_NO_TOOLBAR : nextSelected ? ImageType.NEXT_ACTIVE_NO_TOOLBAR : ImageType.INACTIVE_NO_TOOLBAR;
}
}
final Map<ImageType, BufferedImage> cached = CACHE.computeIfAbsent(item, k -> new HashMap<>());
// Draw or use cache
final BufferedImage image = cached.computeIfAbsent(type, k -> drawToBuffer(item, floating, selected, navbar));
UIUtil.drawImage(g, image, 0, 0, null);
final Icon icon = item.getIcon();
final int offset = getFirstElementLeftOffset();
final int iconOffset = getElementPadding().left + offset;
icon.paintIcon(item, g, iconOffset, (item.getHeight() - icon.getIconHeight()) / 2);
final int textOffset = icon.getIconWidth() + iconOffset + offset;
item.doPaintText(g, textOffset);
}
private static BufferedImage drawToBuffer(final NavBarItem item,
final boolean floating,
final boolean selected,
final NavBarPanel navbar) {
final int w = item.getWidth();
final int h = item.getHeight();
final int offset = (w - getDecorationOffset());
final int arrowXBegin = (w - (getDecorationOffset() / 2));
final int arrowYBegin = getDecorationHOffset();
final int arrowHeight = (h - 2 * getDecorationHOffset());
final int h2 = h / 2;
final Color highlightColor = UIManager.getColor("Focus.color");
final Color arrowColor = UIManager.getColor("MenuBar.foreground");
// The image we will build
final BufferedImage result = UIUtil.createImage(w, h, BufferedImage.TYPE_INT_ARGB);
final Graphics2D g2 = result.createGraphics();
g2.setStroke(new BasicStroke(1f, BasicStroke.CAP_BUTT, BasicStroke.JOIN_ROUND));
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
// Create the inner and outer shapes for the navbar item
final Path2D.Double shape = new Path2D.Double();
shape.moveTo(0, 0);
shape.lineTo(offset, 0);
shape.lineTo(w, h2);
shape.lineTo(offset, h);
shape.lineTo(0, h);
shape.closePath();
final Path2D.Double endShape = new Path2D.Double();
endShape.moveTo(offset, 0);
endShape.lineTo(w, 0);
endShape.lineTo(w, h);
endShape.lineTo(offset, h);
endShape.lineTo(w, h2);
endShape.closePath();
// Colorify the shape with the panel background
g2.setPaint(UIUtil.getPanelBackground());
g2.fill(shape);
g2.fill(endShape);
// If navigation item is selected, colorify with list background color and draw arrow in halo color
if (selected) {
final Path2D.Double focusShape = new Path2D.Double();
focusShape.moveTo(0, 1);
focusShape.lineTo(offset, 1);
focusShape.lineTo(w - 1, h2);
focusShape.lineTo(offset, h - 1);
focusShape.lineTo(0, h - 1);
g2.setColor(highlightColor);
if (floating && item.isLastElement()) {
g2.fillRect(0, 0, w, h);
} else {
g2.fill(shape);
g2.draw(focusShape);
}
}
// Now go to the previous item and paint the end part as if it was part of the current item
if (item.isNextSelected() && navbar.isFocused()) {
g2.setColor(highlightColor);
g2.fill(endShape);
final Path2D.Double endFocusShape = new Path2D.Double();
endFocusShape.moveTo(w, 1);
endFocusShape.lineTo(offset, 1);
endFocusShape.lineTo(w - 1, h2);
endFocusShape.lineTo(offset, h - 1);
endFocusShape.lineTo(w, h - 1);
g2.setColor(highlightColor);
g2.draw(endFocusShape);
}
// Now draw the arrow
g2.translate(arrowXBegin, arrowYBegin);
final int off = (getDecorationOffset() / 2) - 1;
if (!floating || !item.isLastElement()) {
drawArrow(g2, arrowColor, off, arrowHeight);
}
g2.dispose();
return result;
}
private static int getDecorationOffset() {
return JBUI.scale(14);
}
private static int getDecorationHOffset() {
return JBUI.scale(9);
}
private static int getFirstElementLeftOffset() {
return JBUI.scale(6);
}
private static void drawArrow(final Graphics2D g2d,
final Color arrowColor,
final int arrowWidth,
final int arrowHeight) {
final int xEnd = arrowWidth - 1;
g2d.setColor(arrowColor);
g2d.drawLine(0, 0, xEnd, arrowHeight / 2);
g2d.drawLine(xEnd, arrowHeight / 2, 0, arrowHeight);
g2d.translate(-1, 0);
g2d.drawLine(2, 0, xEnd, arrowHeight / 2);
g2d.drawLine(xEnd, arrowHeight / 2, 2, arrowHeight);
}
}
| |
package mcjty.rftools.blocks.spawner;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import mcjty.lib.container.InventoryHelper;
import mcjty.lib.entity.GenericEnergyReceiverTileEntity;
import mcjty.lib.network.Argument;
import mcjty.lib.network.PacketServerCommand;
import mcjty.lib.varia.BlockTools;
import mcjty.lib.varia.Coordinate;
import mcjty.lib.varia.Logging;
import mcjty.rftools.RFTools;
import mcjty.rftools.network.RFToolsMessages;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraft.inventory.ISidedInventory;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.NBTTagList;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.AxisAlignedBB;
import net.minecraftforge.common.util.Constants;
import net.minecraftforge.common.util.ForgeDirection;
import java.util.Map;
public class MatterBeamerTileEntity extends GenericEnergyReceiverTileEntity implements ISidedInventory {
public static final int TICKTIME = 20;
public static String CMD_SETDESTINATION = "setDest";
private InventoryHelper inventoryHelper = new InventoryHelper(this, MatterBeamerContainer.factory, 1);
// The location of the destination spawner..
private Coordinate destination = null;
private int ticker = TICKTIME;
public MatterBeamerTileEntity() {
super(SpawnerConfiguration.BEAMER_MAXENERGY, SpawnerConfiguration.BEAMER_RECEIVEPERTICK);
}
@Override
protected void checkStateServer() {
int meta = worldObj.getBlockMetadata(xCoord, yCoord, zCoord);
if (!BlockTools.getRedstoneSignal(meta)) {
disableBlockGlow();
return;
}
ticker--;
if (ticker > 0) {
return;
}
ticker = TICKTIME;
TileEntity te = null;
if (destination != null) {
te = worldObj.getTileEntity(destination.getX(), destination.getY(), destination.getZ());
if (!(te instanceof SpawnerTileEntity)) {
setDestination(null);
return;
}
} else {
return;
}
ItemStack itemStack = inventoryHelper.getStackInSlot(0);
if (itemStack == null || itemStack.stackSize == 0) {
disableBlockGlow();
return;
}
SpawnerTileEntity spawnerTileEntity = (SpawnerTileEntity) te;
int maxblocks = (int) (SpawnerConfiguration.beamBlocksPerSend * (1.01 + getInfusedFactor() * 2.0));
int numblocks = Math.min(maxblocks, itemStack.stackSize);
int rf = (int) (SpawnerConfiguration.beamRfPerObject * numblocks * (4.0f - getInfusedFactor()) / 4.0f);
if (getEnergyStored(ForgeDirection.DOWN) < rf) {
return;
}
consumeEnergy(rf);
spawnerTileEntity.addMatter(itemStack, numblocks);
inventoryHelper.decrStackSize(0, numblocks);
enableBlockGlow();
}
private void disableBlockGlow() {
// Bit 0 is active, bit 3 is redstone.
int meta = worldObj.getBlockMetadata(xCoord, yCoord, zCoord);
if ((meta & 1) != 0) {
worldObj.setBlockMetadataWithNotify(xCoord, yCoord, zCoord, (meta & ~1), 3);
}
}
private void enableBlockGlow() {
int meta = worldObj.getBlockMetadata(xCoord, yCoord, zCoord);
if ((meta & 1) == 0) {
worldObj.setBlockMetadataWithNotify(xCoord, yCoord, zCoord, meta | 1, 3);
}
}
@Override
public boolean shouldRenderInPass(int pass) {
return pass == 1;
}
@SideOnly(Side.CLIENT)
@Override
public AxisAlignedBB getRenderBoundingBox() {
return AxisAlignedBB.getBoundingBox(xCoord - 4, yCoord - 4, zCoord - 4, xCoord + 5, yCoord + 5, zCoord + 5);
}
// Called from client side when a wrench is used.
public void useWrench(EntityPlayer player) {
Coordinate thisCoord = new Coordinate(xCoord, yCoord, zCoord);
Coordinate coord = RFTools.instance.clientInfo.getSelectedTE();
TileEntity tileEntity = null;
if (coord != null) {
tileEntity = worldObj.getTileEntity(coord.getX(), coord.getY(), coord.getZ());
}
if (!(tileEntity instanceof MatterBeamerTileEntity)) {
// None selected. Just select this one.
RFTools.instance.clientInfo.setSelectedTE(thisCoord);
SpawnerTileEntity destinationTE = getDestinationTE();
if (destinationTE == null) {
RFTools.instance.clientInfo.setDestinationTE(null);
} else {
RFTools.instance.clientInfo.setDestinationTE(new Coordinate(destinationTE.xCoord, destinationTE.yCoord, destinationTE.zCoord));
}
Logging.message(player, "Select a spawner as destination");
} else if (coord.equals(thisCoord)) {
// Unselect this one.
RFTools.instance.clientInfo.setSelectedTE(null);
RFTools.instance.clientInfo.setDestinationTE(null);
setDestination(null);
Logging.message(player, "Destination cleared!");
}
}
public void setDestination(Coordinate destination) {
this.destination = destination;
disableBlockGlow();
markDirty();
if (worldObj.isRemote) {
// We're on the client. Send change to server.
RFToolsMessages.INSTANCE.sendToServer(new PacketServerCommand(xCoord, yCoord, zCoord,
MatterBeamerTileEntity.CMD_SETDESTINATION,
new Argument("dest", destination)));
} else {
worldObj.markBlockForUpdate(xCoord, yCoord, zCoord);
}
}
public Coordinate getDestination() {
return destination;
}
/**
* Get the current destination. This function checks first if that destination is
* still valid and if not it is reset to null (i.e. the destination was removed).
* @return the destination TE or null if there is no valid one
*/
private SpawnerTileEntity getDestinationTE() {
if (destination == null) {
return null;
}
TileEntity te = worldObj.getTileEntity(destination.getX(), destination.getY(), destination.getZ());
if (te instanceof SpawnerTileEntity) {
return (SpawnerTileEntity) te;
} else {
destination = null;
worldObj.markBlockForUpdate(xCoord, yCoord, zCoord);
return null;
}
}
@Override
public void readFromNBT(NBTTagCompound tagCompound) {
super.readFromNBT(tagCompound);
destination = Coordinate.readFromNBT(tagCompound, "dest");
}
@Override
public void readRestorableFromNBT(NBTTagCompound tagCompound) {
super.readRestorableFromNBT(tagCompound);
readBufferFromNBT(tagCompound);
}
private void readBufferFromNBT(NBTTagCompound tagCompound) {
NBTTagList bufferTagList = tagCompound.getTagList("Items", Constants.NBT.TAG_COMPOUND);
for (int i = 0 ; i < bufferTagList.tagCount() ; i++) {
NBTTagCompound nbtTagCompound = bufferTagList.getCompoundTagAt(i);
inventoryHelper.setStackInSlot(i, ItemStack.loadItemStackFromNBT(nbtTagCompound));
}
}
@Override
public void writeToNBT(NBTTagCompound tagCompound) {
super.writeToNBT(tagCompound);
Coordinate.writeToNBT(tagCompound, "dest", destination);
}
@Override
public void writeRestorableToNBT(NBTTagCompound tagCompound) {
super.writeRestorableToNBT(tagCompound);
writeBufferToNBT(tagCompound);
}
private void writeBufferToNBT(NBTTagCompound tagCompound) {
NBTTagList bufferTagList = new NBTTagList();
for (int i = 0 ; i < inventoryHelper.getCount() ; i++) {
ItemStack stack = inventoryHelper.getStackInSlot(i);
NBTTagCompound nbtTagCompound = new NBTTagCompound();
if (stack != null) {
stack.writeToNBT(nbtTagCompound);
}
bufferTagList.appendTag(nbtTagCompound);
}
tagCompound.setTag("Items", bufferTagList);
}
@Override
public int[] getAccessibleSlotsFromSide(int side) {
return MatterBeamerContainer.factory.getAccessibleSlots();
}
@Override
public boolean canInsertItem(int index, ItemStack item, int side) {
return MatterBeamerContainer.factory.isInputSlot(index);
}
@Override
public boolean canExtractItem(int index, ItemStack item, int side) {
return MatterBeamerContainer.factory.isOutputSlot(index);
}
@Override
public int getSizeInventory() {
return inventoryHelper.getCount();
}
@Override
public ItemStack getStackInSlot(int index) {
return inventoryHelper.getStackInSlot(index);
}
@Override
public ItemStack decrStackSize(int index, int amount) {
return inventoryHelper.decrStackSize(index, amount);
}
@Override
public ItemStack getStackInSlotOnClosing(int index) {
return null;
}
@Override
public void setInventorySlotContents(int index, ItemStack stack) {
inventoryHelper.setInventorySlotContents(getInventoryStackLimit(), index, stack);
}
@Override
public String getInventoryName() {
return "Beamer Inventory";
}
@Override
public boolean hasCustomInventoryName() {
return false;
}
@Override
public int getInventoryStackLimit() {
return 64;
}
@Override
public boolean isUseableByPlayer(EntityPlayer player) {
return true;
}
@Override
public void openInventory() {
}
@Override
public void closeInventory() {
}
@Override
public boolean isItemValidForSlot(int index, ItemStack stack) {
return true;
}
@Override
public boolean execute(EntityPlayerMP playerMP, String command, Map<String, Argument> args) {
boolean rc = super.execute(playerMP, command, args);
if (rc) {
return true;
}
if (CMD_SETDESTINATION.equals(command)) {
setDestination(args.get("dest").getCoordinate());
return true;
}
return false;
}
}
| |
/*
* IzPack - Copyright 2001-2012 Julien Ponge, All Rights Reserved.
*
* http://izpack.org/
* http://izpack.codehaus.org/
*
* Copyright 2012 Tim Anderson
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.izforge.izpack.compiler.packager.impl;
import com.izforge.izpack.api.data.Blockable;
import com.izforge.izpack.api.data.OverrideType;
import com.izforge.izpack.api.data.Pack;
import com.izforge.izpack.api.data.PackInfo;
import com.izforge.izpack.merge.MergeManager;
import org.apache.commons.io.IOUtils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
import java.io.*;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.util.List;
import java.util.jar.JarEntry;
import java.util.jar.JarInputStream;
import java.util.jar.JarOutputStream;
import static org.junit.Assert.*;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
/**
* Enter description.
*
* @author Tim Anderson
*/
public abstract class AbstractPackagerTest
{
/**
* The merge manager.
*/
private MergeManager mergeManager;
@Before
public void setUp()
{
mergeManager = mock(MergeManager.class);
}
@Test
public void noSplash() throws IOException
{
PackagerBase packager = createPackager(Mockito.mock(JarOutputStream.class), mergeManager);
packager.writeManifest();
verify(mergeManager).addResourceToMerge(anyString(), eq("META-INF/MANIFEST.MF"));
}
@Test
public void noGuiPrefs() throws IOException
{
PackagerBase packager = createPackager(Mockito.mock(JarOutputStream.class), mergeManager);
packager.writeManifest();
verify(mergeManager).addResourceToMerge(anyString(), anyString());
}
/**
* Verifies that the pack size can be specified.
* <p/>
* Given:
* <ul>
* <li>{@code size} = the specified pack size; and</li>
* <li>{@code fileSize} = the total size of all files in the pack</li>
* </ul>
* The Pack.getSize() method will return:
* <ul>
* <li>{@code size} if {@code size > fileSize}</li>
* <li>{@code fileSize} if {@code size < fileSize}</li>
* </ul>
*
* @throws Exception for any error
*/
@Test
public void testSize() throws Exception
{
File file = createTextFile("This is a test");
long size = 1000000;
long fileSize = file.length();
// pack size not specified - should be set to fileSize
checkSize(fileSize, fileSize, 0, file);
// pack size specified, and > file size. Should be set to pack size
checkSize(size, 0, size);
// pack size specified, and > file size. Should be set to pack size
checkSize(size, fileSize, size, file);
// pack size specified, and < file size. Should be set to fileSize
long tooSmall = fileSize - 1;
checkSize(fileSize, fileSize, tooSmall, file);
assertTrue(file.delete());
}
/**
* Helper to create a packager that writes to the provided jar.
*
* @param jar the jar stream
* @param mergeManager the merge manager
* @return a new packager
*/
protected abstract PackagerBase createPackager(JarOutputStream jar, MergeManager mergeManager);
/**
* Verifies that the pack size is calculated correctly.
*
* @param expectedSize the expected pack size
* @param expectedFileSize the expected total file size
* @param size the pack size. May be {@code 0}
* @param files the pack files
* @throws Exception for any error
*/
private void checkSize(long expectedSize, long expectedFileSize, long size, File... files) throws Exception
{
File jar = File.createTempFile("installer", ".jar");
JarOutputStream output = new JarOutputStream(new FileOutputStream(jar));
PackagerBase packager = createPackager(output, mergeManager);
PackInfo packInfo = new PackInfo("Core", "Core", null, true, false, null, true, size);
long fileSize = 0;
for (File file : files)
{
packInfo.addFile(file.getParentFile(), file, "$INSTALL_PATH/" + file.getName(), null,
OverrideType.OVERRIDE_TRUE, null, Blockable.BLOCKABLE_NONE, null, null, null);
fileSize += file.length();
}
packager.addPack(packInfo);
packager.createInstaller();
InputStream jarEntry = getJarEntry("resources/packs.info", jar);
ObjectInputStream packStream = new ObjectInputStream(jarEntry);
List<PackInfo> packsInfo = (List<PackInfo>) packStream.readObject();
assertEquals(1, packsInfo.size());
Pack pack = packsInfo.get(0).getPack();
assertEquals(expectedSize, pack.getSize());
assertEquals(expectedFileSize, fileSize);
IOUtils.closeQuietly(jarEntry);
IOUtils.closeQuietly(packStream);
assertTrue(jar.delete());
}
/**
* Helper to return a stream to the content of a jar entry.
*
* @param name the name of the entry
* @param jar the jar
* @return a stream to the content
* @throws IOException for any I/O error
*/
private InputStream getJarEntry(String name, File jar) throws IOException
{
JarInputStream input = new JarInputStream(new FileInputStream(jar));
JarEntry entry;
while ((entry = input.getNextJarEntry()) != null)
{
if (entry.getName().equals(name))
{
return input;
}
}
fail("Failed to find jar entry: " + name);
return null;
}
/**
* Helper to create a temporary text file containing the specified text.
*
* @param text the text
* @return the new file
* @throws IOException for any I/O error
*/
private File createTextFile(String text) throws IOException
{
File file = File.createTempFile("data", ".txt");
PrintStream printStream = new PrintStream(file);
printStream.print(text);
printStream.close();
return file;
}
public static File getBaseDir()
{
File path = null;
try
{
URL url = AbstractPackagerTest.class.getClassLoader().getResource("");
if (url != null)
{
URI uri = url.toURI();
path = new File(uri);
// path: <root>/target/test-classes
path = path.getParentFile(); // <root>/target/
path = path.getParentFile(); // <root>
}
else
{
Assert.fail("Resource not found");
}
}
catch (URISyntaxException e)
{
Assert.fail(e.getMessage());
}
return path;
}
}
| |
/*
* Copyright 2013-2022 Erudika. https://erudika.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For issues and patches go to: https://github.com/erudika
*/
package com.erudika.para.core;
import com.erudika.para.core.utils.CoreUtils;
import com.erudika.para.core.utils.ParaObjectUtils;
import com.erudika.para.core.annotations.Locked;
import com.erudika.para.core.annotations.Stored;
import com.erudika.para.core.utils.Pager;
import com.erudika.para.core.utils.Para;
import com.erudika.para.core.utils.Utils;
import com.fasterxml.jackson.annotation.JsonAnyGetter;
import com.fasterxml.jackson.annotation.JsonAnySetter;
import java.io.Serializable;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import org.apache.commons.lang3.StringUtils;
/**
* A generic system class for storing data.
* It is essentially a map of keys and values.
* @author Alex Bogdanovski [alex@erudika.com]
*/
public class Sysprop implements ParaObject, Serializable {
private static final long serialVersionUID = 1L;
@Stored @Locked private String id;
@Stored @Locked private Long timestamp;
@Stored @Locked private String type;
@Stored @Locked private String appid;
@Stored @Locked private String parentid;
@Stored @Locked private String creatorid;
@Stored private Long updated;
@Stored private String name;
@Stored private List<String> tags;
@Stored private Integer votes;
@Stored private Long version;
@Stored private Boolean stored;
@Stored private Boolean indexed;
@Stored private Boolean cached;
@Stored private Map<String, Object> properties;
/**
* No-args constructor.
*/
public Sysprop() {
this(null);
}
/**
* The default constructor.
* @param id the object id
*/
public Sysprop(String id) {
setId(id);
setName(getName());
}
/**
* Adds a new key/value pair to the map.
* @param name a key
* @param value a value
* @return this
*/
@JsonAnySetter
public Sysprop addProperty(String name, Object value) {
if (!StringUtils.isBlank(name) && value != null) {
getProperties().put(name, value);
}
return this;
}
/**
* Returns the value of a property for a given key.
* @param name the key
* @return the value
*/
public Object getProperty(String name) {
if (!StringUtils.isBlank(name)) {
return getProperties().get(name);
}
return null;
}
/**
* Removes a property from the map.
* @param name the key
* @return this
*/
public Sysprop removeProperty(String name) {
if (!StringUtils.isBlank(name)) {
getProperties().remove(name);
}
return this;
}
/**
* Checks for the existence of a property.
* @param name the key
* @return true if a property with this key exists
*/
public boolean hasProperty(String name) {
if (StringUtils.isBlank(name)) {
return false;
}
return getProperties().containsKey(name);
}
/**
* A map of all properties (key/values).
* @return a map
*/
@JsonAnyGetter
public Map<String, Object> getProperties() {
if (properties == null) {
properties = new HashMap<>();
}
return properties;
}
/**
* Overwrites the map.
* @param properties a new map
*/
public void setProperties(Map<String, Object> properties) {
this.properties = properties;
}
////////////////////////////////////////////////////////
@Override
public final String getId() {
return id;
}
@Override
public final void setId(String id) {
this.id = id;
}
@Override
public final String getType() {
type = (type == null) ? Utils.type(this.getClass()) : type;
return type;
}
@Override
public final void setType(String type) {
this.type = type;
}
@Override
public String getAppid() {
appid = (appid == null) ? Para.getConfig().getRootAppIdentifier() : appid;
return appid;
}
@Override
public void setAppid(String appid) {
this.appid = appid;
}
@Override
public String getObjectURI() {
return CoreUtils.getInstance().getObjectURI(this);
}
@Override
public List<String> getTags() {
return tags;
}
@Override
public void setTags(List<String> tags) {
this.tags = tags;
}
@Override
public Boolean getStored() {
if (stored == null) {
stored = true;
}
return stored;
}
@Override
public void setStored(Boolean stored) {
this.stored = stored;
}
@Override
public Boolean getIndexed() {
if (indexed == null) {
indexed = true;
}
return indexed;
}
@Override
public void setIndexed(Boolean indexed) {
this.indexed = indexed;
}
@Override
public Boolean getCached() {
if (cached == null) {
cached = true;
}
return cached;
}
@Override
public void setCached(Boolean cached) {
this.cached = cached;
}
@Override
public Long getTimestamp() {
return (timestamp != null && timestamp != 0) ? timestamp : null;
}
@Override
public void setTimestamp(Long timestamp) {
this.timestamp = timestamp;
}
@Override
public String getCreatorid() {
return creatorid;
}
@Override
public void setCreatorid(String creatorid) {
this.creatorid = creatorid;
}
@Override
public final String getName() {
return CoreUtils.getInstance().getName(name, id);
}
@Override
public final void setName(String name) {
this.name = (name == null || !name.isEmpty()) ? name : this.name;
}
@Override
public String getPlural() {
return Utils.singularToPlural(getType());
}
@Override
public String getParentid() {
return parentid;
}
@Override
public void setParentid(String parentid) {
this.parentid = parentid;
}
@Override
public Long getUpdated() {
return (updated != null && updated != 0) ? updated : null;
}
@Override
public void setUpdated(Long updated) {
this.updated = updated;
}
@Override
public String create() {
return CoreUtils.getInstance().getDao().create(getAppid(), this);
}
@Override
public void update() {
CoreUtils.getInstance().getDao().update(getAppid(), this);
}
@Override
public void delete() {
CoreUtils.getInstance().getDao().delete(getAppid(), this);
}
@Override
public boolean exists() {
return CoreUtils.getInstance().getDao().read(getAppid(), getId()) != null;
}
@Override
public boolean voteUp(String userid) {
return CoreUtils.getInstance().vote(this, userid, VoteValue.UP);
}
@Override
public boolean voteDown(String userid) {
return CoreUtils.getInstance().vote(this, userid, VoteValue.DOWN);
}
@Override
public Integer getVotes() {
return (votes == null) ? 0 : votes;
}
@Override
public void setVotes(Integer votes) {
this.votes = votes;
}
@Override
public Long getVersion() {
return (version == null) ? 0 : version;
}
@Override
public void setVersion(Long version) {
this.version = version;
}
@Override
public Long countLinks(String type2) {
return CoreUtils.getInstance().countLinks(this, type2);
}
@Override
public List<Linker> getLinks(String type2, Pager... pager) {
return CoreUtils.getInstance().getLinks(this, type2, pager);
}
@Override
public <P extends ParaObject> List<P> getLinkedObjects(String type, Pager... pager) {
return CoreUtils.getInstance().getLinkedObjects(this, type, pager);
}
@Override
public <P extends ParaObject> List<P> findLinkedObjects(String type, String field, String query, Pager... pager) {
return CoreUtils.getInstance().findLinkedObjects(this, type, field, query, pager);
}
@Override
public boolean isLinked(String type2, String id2) {
return CoreUtils.getInstance().isLinked(this, type2, id2);
}
@Override
public boolean isLinked(ParaObject toObj) {
return CoreUtils.getInstance().isLinked(this, toObj);
}
@Override
public String link(String id2) {
return CoreUtils.getInstance().link(this, id2);
}
@Override
public void unlink(String type, String id2) {
CoreUtils.getInstance().unlink(this, type, id2);
}
@Override
public void unlinkAll() {
CoreUtils.getInstance().unlinkAll(this);
}
@Override
public Long countChildren(String type) {
return CoreUtils.getInstance().countChildren(this, type);
}
@Override
public <P extends ParaObject> List<P> getChildren(String type, Pager... pager) {
return CoreUtils.getInstance().getChildren(this, type, pager);
}
@Override
public <P extends ParaObject> List<P> getChildren(String type, String field, String term, Pager... pager) {
return CoreUtils.getInstance().getChildren(this, type, field, term, pager);
}
@Override
public <P extends ParaObject> List<P> findChildren(String type, String query, Pager... pager) {
return CoreUtils.getInstance().findChildren(this, type, query, pager);
}
@Override
public void deleteChildren(String type) {
CoreUtils.getInstance().deleteChildren(this, type);
}
@Override
public int hashCode() {
int hash = 7;
hash = 67 * hash + Objects.hashCode(this.id) + Objects.hashCode(this.name);
return hash;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final ParaObject other = (ParaObject) obj;
if (!Objects.equals(this.id, other.getId())) {
return false;
}
return true;
}
@Override
public String toString() {
return ParaObjectUtils.toJSON(this);
}
}
| |
package com.dreamfighter.android.manager;
import android.Manifest;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.ConnectivityManager;
import android.net.NetworkInfo.DetailedState;
import android.os.AsyncTask;
import android.os.AsyncTask.Status;
import android.os.Build;
import android.support.v4.app.ActivityCompat;
import android.support.v4.content.ContextCompat;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.EditText;
import android.widget.TextView;
import com.dreamfighter.android.R;
import com.dreamfighter.android.entity.ProxyConfiguration;
import com.dreamfighter.android.enums.DownloadInfo;
import com.dreamfighter.android.enums.RequestInfo;
import com.dreamfighter.android.enums.ResponseType;
import com.dreamfighter.android.log.Logger;
import com.dreamfighter.android.utils.HttpUtils;
import org.apache.http.Header;
import org.apache.http.HttpHost;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.auth.AuthScope;
import org.apache.http.auth.UsernamePasswordCredentials;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.CookieStore;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.conn.ClientConnectionManager;
import org.apache.http.conn.params.ConnRouteParams;
import org.apache.http.conn.scheme.PlainSocketFactory;
import org.apache.http.conn.scheme.Scheme;
import org.apache.http.conn.scheme.SchemeRegistry;
import org.apache.http.cookie.Cookie;
import org.apache.http.entity.StringEntity;
import org.apache.http.entity.mime.MultipartEntity;
import org.apache.http.entity.mime.content.FileBody;
import org.apache.http.entity.mime.content.StringBody;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.impl.conn.SingleClientConnManager;
import org.apache.http.message.BasicHeader;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.params.BasicHttpParams;
import org.apache.http.params.HttpConnectionParams;
import org.apache.http.params.HttpParams;
import org.apache.http.protocol.HTTP;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.net.UnknownHostException;
import java.security.KeyManagementException;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.UnrecoverableKeyException;
import java.security.cert.CertificateException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* this class is for managing request to server
* can add header, raw post, or upload
* you can add listener too to callback after you get response from server
*
* @author fitra.bayu
* @version 1.0
*
*
*/
public class RequestManager {
public static final String POST_TYPE_RAW= "raw";
public static final String POST_TYPE_FORM= "form";
public static final String CONTENT_TYPE_JSON= "application/json";
public static final String CONTENT_TYPE_HTML= "text/html";
private String urlString;
private String rawStringPost;
private boolean post = false;
private boolean finish = true;
private boolean success = false;
private CookieStore cookieStore;
private List<NameValuePair> postParams = new ArrayList<NameValuePair>();
private Boolean connection = true;
private Bitmap bitmap = null;
private ResponseType responseType = ResponseType.STRING;
private String postType = POST_TYPE_FORM;
private String contentType = CONTENT_TYPE_HTML;
private String resultString = null;
private String filename = null;
private Long filesize = 0l;
private Long currentDownload = 0l;
private DownloadInfo downloadInfo;
private RequestInfo requestInfo;
private boolean upload;
private InputStream inputStreamUpload;
private File fileUpload;
public static boolean ACTIVATED_PROXY_AUTH = true;
private boolean printResponseHeader = false;
// listener to specify callback
private RequestListeners requestListeners;
private Context context;
private String fileUploadName = "FILE";
private HttpUtils httpUtils;
private Map<String,String> listHeader = new HashMap<String, String>();
private RequestTask requestTask;
private static Dialog dialogProxyAuth = null;
//private Long intervalUpdateProgress = 0l;
private CustomRequest customRequest;
private boolean secure = false;
private long timeout = 60000;
private boolean requestCancel = false;
public interface CustomRequest{
void onRequest(InputStream is);
}
public RequestManager(Context context){
this.context = context;
requestTask = new RequestTask();
checkProxyConfiguration();
}
public RequestManager(Context context, String url){
this.urlString= url;
this.context = context;
requestTask = new RequestTask();
}
@SuppressLint("NewApi")
public void request(){
if(!requestTask.getStatus().equals(Status.RUNNING) && !requestTask.getStatus().equals(Status.FINISHED)){
//requestTask.execute();
}else{
requestTask = new RequestTask();
}
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB){
requestTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
}else{
requestTask.execute();
}
}
@SuppressLint("NewApi")
public void request(ResponseType responseType){
this.responseType = responseType;
if(!requestTask.getStatus().equals(Status.RUNNING) && !requestTask.getStatus().equals(Status.FINISHED)){
//requestTask.execute();
}else{
requestTask = new RequestTask();
}
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB){
requestTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
}else{
requestTask.execute();
}
}
@SuppressLint("NewApi")
public void request(String url){
this.urlString = url;
if(!requestTask.getStatus().equals(Status.RUNNING) && !requestTask.getStatus().equals(Status.FINISHED)){
//requestTask.execute();
}else{
requestTask = new RequestTask();
}
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB){
requestTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
}else{
requestTask.execute();
}
}
@SuppressWarnings("deprecation")
public void checkProxyConfiguration(){
final ConnectivityManager connMgr = (ConnectivityManager) context.getSystemService(Context.CONNECTIVITY_SERVICE);
final android.net.NetworkInfo wifi = connMgr.getNetworkInfo(ConnectivityManager.TYPE_WIFI);
boolean isUsingWifi = false;
if( wifi.isAvailable() && wifi.getDetailedState() == DetailedState.CONNECTED){
isUsingWifi = true;
}
if(!ACTIVATED_PROXY_AUTH || !isUsingWifi){
return;
}
boolean IS_ICS_OR_LATER = Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH;
String proxyAddress;
int proxyPort;
if( IS_ICS_OR_LATER ){
proxyAddress = System.getProperty( "http.proxyHost" );
String portStr = System.getProperty( "http.proxyPort" );
proxyPort = Integer.parseInt( ( portStr != null ? portStr : "-1" ) );
}else{
proxyAddress = android.net.Proxy.getHost( getContext() );
proxyPort = android.net.Proxy.getPort( getContext() );
}
if(!"".equals(proxyAddress) && proxyAddress!=null){
HttpUtils.instance.setUseProxy(true);
HttpUtils.instance.setHostname(proxyAddress);
HttpUtils.instance.setPort(proxyPort);
ProxyConfigurationManager proxyManager = new ProxyConfigurationManager(getContext());
ProxyConfiguration proxyConfiguration = proxyManager.getProxyConfigurationByAddressAndPort(proxyAddress, proxyPort);
//Logger.log("proxyConfiguration=>"+proxyConfiguration);
if(proxyConfiguration==null){
try{
if(dialogProxyAuth==null){
dialogProxyAuth = createDialog(proxyAddress,proxyPort);
dialogProxyAuth.show();
}else if(!dialogProxyAuth.isShowing()){
dialogProxyAuth.show();
}
}catch(RuntimeException e){
e.printStackTrace();
}
}else if(proxyConfiguration.getUseProxyAuth()){
HttpUtils.instance.setUseAuthProxy(proxyConfiguration.getUseProxyAuth());
HttpUtils.instance.setUsername(proxyConfiguration.getProxyUserName());
HttpUtils.instance.setPassword(proxyConfiguration.getProxyPassword());
}else{
HttpUtils.instance.setUseAuthProxy(proxyConfiguration.getUseProxyAuth());
}
}else{
HttpUtils.instance.setUseProxy(false);
}
//Logger.log("proxy address=>"+proxyAddress);
//Logger.log("proxy port=>"+proxyPort);
}
@SuppressLint("InflateParams")
public Dialog createDialog(final String proxyAddress,final int proxyPort) {
AlertDialog.Builder builder = new AlertDialog.Builder(getContext());
// Get the layout inflater
LayoutInflater inflater = LayoutInflater.from(getContext());
View view = inflater.inflate(R.layout.df_proxy_auth_layout, null, false);
final EditText username = (EditText)view.findViewById(R.id.df_username);
final EditText password = (EditText)view.findViewById(R.id.df_password);
TextView textViewLabel = (TextView) view.findViewById(R.id.label_textview);
textViewLabel.setText(getContext().getString(R.string.df_proxy_auth,proxyAddress));
// Inflate and set the layout for the dialog
// Pass null as the parent view because its going in the dialog layout
builder.setView(view)
// Add action buttons
.setPositiveButton(R.string.df_signin, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int id) {
ProxyConfigurationManager proxyManager = new ProxyConfigurationManager(getContext());
ProxyConfiguration proxyConf = proxyManager.getProxyConfigurationByAddressAndPort(proxyAddress, proxyPort);
if(proxyConf==null){
proxyConf = new ProxyConfiguration();
proxyConf.setProxyAddress(proxyAddress);
proxyConf.setProxyPort(proxyPort);
if(!username.getText().toString().equals("") && !username.getText().toString().equals(null)){
proxyConf.setUseProxyAuth(true);
proxyConf.setProxyUserName(username.getText().toString());
proxyConf.setProxyPassword(password.getText().toString());
}else{
proxyConf.setUseProxyAuth(false);
proxyConf.setProxyUserName("");
proxyConf.setProxyPassword("");
}
proxyManager.saveEntity(proxyConf);
}else{
if(!username.getText().toString().equals("") && !username.getText().toString().equals(null)){
proxyConf.setUseProxyAuth(true);
proxyConf.setProxyUserName(username.getText().toString());
proxyConf.setProxyPassword(password.getText().toString());
}else{
proxyConf.setUseProxyAuth(false);
proxyConf.setProxyUserName("");
proxyConf.setProxyPassword("");
}
proxyManager.updateEntity(proxyConf);
}
dialog.dismiss();
}
})
.setNegativeButton(R.string.df_cancel, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
}
});
return builder.create();
}
/**
* upload file to server
* @param url for upload
*/
public void upload(String url){
this.upload = true;
this.urlString = url;
if(!requestTask.getStatus().equals(Status.RUNNING) && !requestTask.getStatus().equals(Status.FINISHED)){
requestTask.execute();
}else{
requestTask = new RequestTask();
requestTask.execute();
}
}
public boolean isRunning(){
return requestTask.getStatus().equals(Status.RUNNING);
}
/**
* request using GET
* @return InputStream requestResponse
* @throws IllegalStateException
* @throws IOException
*/
public InputStream getContent() throws IllegalStateException, IOException, NullPointerException, UnknownHostException{
Logger.log(this, "DOWNLOAD CONTENT USING GET URL => " + getUrlString());
if(getUrlString() == null){
return null;
}
DefaultHttpClient httpClient = new DefaultHttpClient();
/*
* set htt params for timeout
*/
HttpParams httpParams = httpClient.getParams();
//HttpConnectionParams.setConnectionTimeout(httpParams, (int)timeout);
//HttpConnectionParams.setSoTimeout(httpParams, (int)timeout);
if(secure){
try {
KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType());
trustStore.load(null, null);
DFSSLSocketFactory sf = new DFSSLSocketFactory(trustStore);
sf.setHostnameVerifier(DFSSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
SchemeRegistry schemeRegistry = new SchemeRegistry();
schemeRegistry.register(new Scheme("http", PlainSocketFactory.getSocketFactory(), 80));
schemeRegistry.register(new Scheme("https", sf, 443));
ClientConnectionManager cm = new SingleClientConnManager(null, schemeRegistry);
httpClient = new DefaultHttpClient(cm,null);
} catch (KeyStoreException e) {
e.printStackTrace();
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
} catch (CertificateException e) {
e.printStackTrace();
} catch (KeyManagementException e) {
e.printStackTrace();
} catch (UnrecoverableKeyException e) {
e.printStackTrace();
}
}
if(HttpUtils.instance.getCookieStore(context)!=null){
httpClient.setCookieStore(HttpUtils.instance.getCookieStore(context));
}
/*if(HttpUtils.instance.getCookieStore(context)!=null){
httpClient.setCookieStore(HttpUtils.instance.getCookieStore(context));
}else{
HttpUtils.instance.setCookieStore(context,httpClient.getCookieStore());
}*/
HttpGet httpGet = new HttpGet(getUrlString());
HttpParams httpParamsGet = httpGet.getParams();
HttpConnectionParams.setConnectionTimeout(httpParamsGet, (int)timeout);
HttpConnectionParams.setSoTimeout(httpParamsGet, (int)timeout);
/*
TimerTask task = new TimerTask() {
@Override
public void run() {
if (httpGet != null) {
httpGet.abort();
}
}
};
new TimerTask(true).schedule(task, timeout);
*/
if(listHeader!=null){
for(String key:listHeader.keySet()){
httpGet.addHeader(key, listHeader.get(key));
}
}
Logger.log("using proxy=>"+HttpUtils.instance.isUseProxy());
String hostname = httpGet.getURI().getHost();
Logger.log("host=>"+hostname);
Logger.log("byPassProxies=>"+HttpUtils.instance.getByPassProxy());
String[] byPassProxies = HttpUtils.instance.getByPassProxy().split(",");
boolean isByPassProxy = false;
for(String byPass:byPassProxies){
if (hostname.equals(byPass)) {
isByPassProxy = true;
break;
}
}
if(HttpUtils.instance.isUseProxy() && !isByPassProxy){
HttpHost proxy = new HttpHost(HttpUtils.instance.getHostname(), HttpUtils.instance.getPort());
if(HttpUtils.instance.isUseAuthProxy()){
httpClient.getCredentialsProvider().setCredentials(AuthScope.ANY,new UsernamePasswordCredentials(HttpUtils.instance.getUsername(), HttpUtils.instance.getPassword()));
}
ConnRouteParams.setDefaultProxy(httpClient.getParams(), proxy);
}
HttpResponse httpResponse = httpClient.execute(httpGet);
filesize = httpResponse.getEntity().getContentLength();
return httpResponse.getEntity().getContent();
}
public void printPostParam(){
for(NameValuePair valuePair:postParams){
Logger.log("["+valuePair.getName()+":"+valuePair.getValue()+"]");
}
}
/**
* request url using post
* @return InputStream requestResponse
* @throws ClientProtocolException
* @throws IOException
*/
public InputStream post() throws ClientProtocolException, IOException, UnknownHostException{
Logger.log(this, "DOWNLOAD CONTENT USING POST URL => " + getUrlString());
DefaultHttpClient httpClient = new DefaultHttpClient();
/*
* set htt params for timeout
*/
HttpParams httpParams = new BasicHttpParams();
//HttpConnectionParams.setConnectionTimeout(httpParams, (int)timeout);
//HttpConnectionParams.setSoTimeout(httpParams, (int)timeout);
/*if(HttpUtils.instance.getCookieStore(context)!=null){
httpClient.setCookieStore(HttpUtils.instance.getCookieStore(context));
}else{
HttpUtils.instance.setCookieStore(context,httpClient.getCookieStore());
}*/
// In a POST request, we don't pass the values in the URL.
//Therefore we use only the web page URL as the parameter of the HttpPost argument
HttpPost httpPost = new HttpPost(getUrlString());
HttpParams httpParamsPost = httpPost.getParams();
HttpConnectionParams.setConnectionTimeout(httpParamsPost, (int)timeout);
HttpConnectionParams.setSoTimeout(httpParamsPost, (int)timeout);
if(secure){
try {
KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType());
trustStore.load(null, null);
DFSSLSocketFactory sf = new DFSSLSocketFactory(trustStore);
sf.setHostnameVerifier(DFSSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
SchemeRegistry schemeRegistry = new SchemeRegistry();
schemeRegistry.register(new Scheme("http", PlainSocketFactory.getSocketFactory(), 80));
schemeRegistry.register(new Scheme("https", sf, 443));
ClientConnectionManager cm = new SingleClientConnManager(null, schemeRegistry);
httpClient = new DefaultHttpClient(cm,null);
} catch (KeyStoreException e) {
e.printStackTrace();
} catch (NoSuchAlgorithmException e) {
e.printStackTrace();
} catch (CertificateException e) {
e.printStackTrace();
} catch (KeyManagementException e) {
e.printStackTrace();
} catch (UnrecoverableKeyException e) {
e.printStackTrace();
}
}
if(HttpUtils.instance.getCookieStore(context)!=null){
httpClient.setCookieStore(HttpUtils.instance.getCookieStore(context));
}
if(listHeader!=null){
for(String key:listHeader.keySet()){
httpPost.addHeader(key, listHeader.get(key));
}
}
Logger.log("using proxy=>"+HttpUtils.instance.isUseProxy());
String hostname = httpPost.getURI().getHost();
Logger.log("host=>"+hostname);
Logger.log("byPassProxies=>"+HttpUtils.instance.getByPassProxy());
String[] byPassProxies = HttpUtils.instance.getByPassProxy().split(",");
boolean isByPassProxy = false;
for(String byPass:byPassProxies){
if (hostname.equals(byPass)) {
isByPassProxy = true;
break;
}
}
if(HttpUtils.instance.isUseProxy() && !isByPassProxy){
HttpHost proxy = new HttpHost(HttpUtils.instance.getHostname(), HttpUtils.instance.getPort());
if(HttpUtils.instance.isUseAuthProxy()){
httpClient.getCredentialsProvider().setCredentials(AuthScope.ANY,new UsernamePasswordCredentials(HttpUtils.instance.getUsername(), HttpUtils.instance.getPassword()));
}
ConnRouteParams.setDefaultProxy(httpClient.getParams(), proxy);
}
// UrlEncodedFormEntity is an entity composed of a list of url-encoded pairs.
//This is typically useful while sending an HTTP POST request.
if(postType.equals(POST_TYPE_FORM)){
UrlEncodedFormEntity urlEncodedFormEntity = new UrlEncodedFormEntity(postParams);
httpPost.setEntity(urlEncodedFormEntity);
}else if(postType.equals(POST_TYPE_RAW)){
StringEntity se = new StringEntity(rawStringPost);
se.setContentType(new BasicHeader(HTTP.CONTENT_TYPE, contentType));
httpPost.setEntity(se);
}
// setEntity() hands the entity (here it is urlEncodedFormEntity) to the request.
// HttpResponse is an interface just like HttpPost.
//Therefore we can't initialize them
HttpResponse httpResponse = httpClient.execute(httpPost);
// According to the JAVA API, InputStream constructor do nothing.
//So we can't initialize InputStream although it is not an interface
filesize = httpResponse.getEntity().getContentLength();
if(isPrintResponseHeader()){
CookieStore cookieStore = httpClient.getCookieStore();
for (Cookie header : cookieStore.getCookies()) {
Logger.log("["+header.getName()+":"+header.getValue()+"]");
}
Logger.log("Header Request");
for (Header header : httpPost.getAllHeaders()) {
Logger.log("["+header.getName()+":"+header.getValue()+"]");
}
Logger.log("Header Response");
for (Header header : httpResponse.getAllHeaders()) {
Logger.log("["+header.getName()+":"+header.getValue()+"]");
}
}
return httpResponse.getEntity().getContent();
}
/**
* do upload file usign post
* @param fileUpload
* @return InputStream requestResponse
* @throws IOException
*/
protected InputStream doFileUpload(File fileUpload) throws IOException{
Logger.log(this, "UPLOAD CONTENT USING POST URL => " + getUrlString());
DefaultHttpClient httpClient = new DefaultHttpClient();
if(HttpUtils.instance.getCookieStore(context)!=null){
httpClient.setCookieStore(HttpUtils.instance.getCookieStore(context));
}else{
HttpUtils.instance.setCookieStore(context,httpClient.getCookieStore());
}
// In a POST request, we don't pass the values in the URL.
//Therefore we use only the web page URL as the parameter of the HttpPost argument
HttpPost httpPost = new HttpPost(getUrlString());
if(HttpUtils.instance.isUseProxy()){
HttpHost proxy = new HttpHost(HttpUtils.instance.getHostname(), HttpUtils.instance.getPort());
if(HttpUtils.instance.isUseAuthProxy()){
httpClient.getCredentialsProvider().setCredentials(AuthScope.ANY,new UsernamePasswordCredentials(HttpUtils.instance.getUsername(), HttpUtils.instance.getPassword()));
}
ConnRouteParams.setDefaultProxy(httpClient.getParams(), proxy);
}
if(fileUpload!=null){
FileBody bin1 = new FileBody(fileUpload);
MultipartEntity reqEntity = new MultipartEntity();
reqEntity.addPart(fileUploadName, bin1);
reqEntity.addPart("user", new StringBody("User"));
for (NameValuePair valuePair : postParams) {
reqEntity.addPart(valuePair.getName(), new StringBody(valuePair.getValue()));
}
httpPost.setEntity(reqEntity);
}
// UrlEncodedFormEntity is an entity composed of a list of url-encoded pairs.
//This is typically useful while sending an HTTP POST request.
//UrlEncodedFormEntity urlEncodedFormEntity = new UrlEncodedFormEntity(postParams);
// setEntity() hands the entity (here it is urlEncodedFormEntity) to the request.
//httpPost.setEntity(urlEncodedFormEntity);
// HttpResponse is an interface just like HttpPost.
//Therefore we can't initialize them
HttpResponse httpResponse = httpClient.execute(httpPost);
// According to the JAVA API, InputStream constructor do nothing.
//So we can't initialize InputStream although it is not an interface
filesize = httpResponse.getEntity().getContentLength();
return httpResponse.getEntity().getContent();
}
public void setPage(String url){
this.urlString = url;
}
public String getUrlString() {
return urlString;
}
public void setUrlString(String urlString) {
this.urlString = urlString;
}
public boolean isPost() {
return post;
}
/**
* set post action
* @param post
*/
public void setPost(boolean post) {
this.post = post;
}
public CookieStore getCookieStore() {
return cookieStore;
}
public void setCookieStore(CookieStore cookieStore) {
this.cookieStore = cookieStore;
}
public List<NameValuePair> getPostParams() {
return postParams;
}
/**
* set post params for request type post and post type is FORM
* @param postParams <code>List<NameValuePair></code>
*/
public void setPostParams(List<NameValuePair> postParams) {
if(postParams!=null){
this.postParams = postParams;
}
}
/**
* add post to request if post is true
* @param key <code>String</code> header key
* @param value <code>String</code> header value
*/
public void addPostData(String key,String value){
BasicNameValuePair newParams = new BasicNameValuePair(key, value);
this.postParams.add(newParams);
}
/**
* add header to request
* @param key <code>String</code> header key
* @param value <code>String</code> header value
*/
public void addHeaderData(String key,String value){
this.listHeader.put(key,value);
}
public void addHeadersData(Map<String,String> listHeader){
this.listHeader.putAll(listHeader);
}
private class RequestTask extends AsyncTask<String, Integer, Boolean>{
@Override
protected void onPreExecute() {
super.onPreExecute();
finish = false;
//intervalUpdateProgress = System.currentTimeMillis();
}
// TODO do download process
/**
* core process for DownloadManager
*/
@Override
protected Boolean doInBackground(String... params) {
setDownloadInfo(DownloadInfo.INFO_ON_PROGRESS);
requestInfo = RequestInfo.INFO_ON_PROGRESS;
publishProgress(0);
if(connection){
try {
InputStream is = null;
if(isUpload()){
//doFileUpload(getInputStreamUpload());
is = doFileUpload(getFileUpload());
}else if(isPost()){
is = post();
}else{
is = getContent();
}
if(responseType.equals(ResponseType.CUSTOM) && customRequest!=null){
customRequest.onRequest(is);
}else if(responseType.equals(ResponseType.STRING)){
BufferedReader reader = new BufferedReader(new InputStreamReader(is, "iso-8859-1"), 8);
StringBuilder sb = new StringBuilder();
String line = null;
while ((line = reader.readLine()) != null) {
sb.append(line + "\n");
publishProgress(sb.length());
}
is.close();
resultString = sb.toString();
}else if(responseType.equals(ResponseType.BITMAP)){
if(filename!=null){
try{
if((Build.VERSION_CODES.M>=Build.VERSION.SDK_INT && ContextCompat.checkSelfPermission(getContext(),
Manifest.permission.WRITE_EXTERNAL_STORAGE)
!= PackageManager.PERMISSION_GRANTED)){
if(getContext() instanceof Activity) {
ActivityCompat.requestPermissions((Activity) getContext(),
new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE},
0);
}
setDownloadInfo(DownloadInfo.INFO_PERMISSION_DENIED);
requestInfo = RequestInfo.INFO_PERMISSION_DENIED;
return false;
}
//ByteArrayOutputStream bytes = new ByteArrayOutputStream();
//bitmap.compress(Bitmap.CompressFormat.JPEG, 100, bytes);
//you can create a new file name "test.jpg" in sdcard folder.
FileOutputStream f = new FileOutputStream(filename);
//InputStream isBitmap = new ByteArrayInputStream(bytes.toByteArray());
int readlen = 0;
Long totalRead = 0l;
byte[] buf = new byte[1024];
while ((readlen = is.read(buf)) > 0){
f.write(buf, 0, readlen);
totalRead += readlen;
publishProgress(totalRead.intValue());
}
f.close();
is.close();
//isBitmap.close();
//bytes.close();
bitmap = BitmapFactory.decodeFile(filename);
if(bitmap==null || filesize!=totalRead){
File file = new File(filename);
file.deleteOnExit();
}
}catch (IOException e) {
e.printStackTrace();
}catch (OutOfMemoryError e) {
e.printStackTrace();
}
}else{
bitmap = BitmapFactory.decodeStream(is);
}
}else if(responseType.equals(ResponseType.RAW)){
if(filename!=null){
if((Build.VERSION_CODES.M>=Build.VERSION.SDK_INT && ContextCompat.checkSelfPermission(getContext(),
Manifest.permission.WRITE_EXTERNAL_STORAGE)
!= PackageManager.PERMISSION_GRANTED)){
if(getContext() instanceof Activity) {
ActivityCompat.requestPermissions((Activity) getContext(),
new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE},
0);
}
setDownloadInfo(DownloadInfo.INFO_PERMISSION_DENIED);
requestInfo = RequestInfo.INFO_PERMISSION_DENIED;
return false;
}
FileOutputStream f = new FileOutputStream(filename);
int readlen = 0;
Long totalRead = 0l;
byte[] buf = new byte[1024];
while ((readlen = is.read(buf)) > 0){
f.write(buf, 0, readlen);
totalRead += readlen;
publishProgress(totalRead.intValue());
}
is.close();
f.close();
if(filesize!=totalRead){
File file = new File(filename);
file.deleteOnExit();
}
}else{
setDownloadInfo(DownloadInfo.INFO_DOWNLOADED_FILEPATH_NOTFOUND);
requestInfo = RequestInfo.INFO_DOWNLOADED_FILEPATH_NOTFOUND;
return false;
}
}
setDownloadInfo(DownloadInfo.INFO_COMPLETE);
requestInfo = RequestInfo.INFO_COMPLETE;
return true;
} catch (ClientProtocolException e) {
setDownloadInfo(DownloadInfo.INFO_ERROR);
requestInfo = RequestInfo.INFO_ERROR;
e.printStackTrace();
} catch (IOException e) {
setDownloadInfo(DownloadInfo.INFO_URL_NOT_FOUND);
requestInfo = RequestInfo.INFO_URL_NOT_FOUND;
e.printStackTrace();
} catch (IllegalStateException e) {
setDownloadInfo(DownloadInfo.INFO_ERROR);
requestInfo = RequestInfo.INFO_ERROR;
e.printStackTrace();
} catch (IllegalArgumentException e) {
setDownloadInfo(DownloadInfo.INFO_ERROR);
requestInfo = RequestInfo.INFO_ERROR;
e.printStackTrace();
} catch (NullPointerException e) {
setDownloadInfo(DownloadInfo.INFO_URL_NOT_FOUND);
requestInfo = RequestInfo.INFO_URL_NOT_FOUND;
e.printStackTrace();
}
}else{
setDownloadInfo(DownloadInfo.INFO_CONNECTION_LOST);
requestInfo = RequestInfo.INFO_CONNECTION_LOST;
}
return false;
}
protected void onPostExecute(Boolean success){ // bikin methode baru
Logger.log(this, "GET FROM WEB COMPLETE_STATUS => "+ success);
setSuccess(success);
finish = true;
if(requestListeners!=null){
if(responseType.equals(ResponseType.STRING)){
requestListeners.onRequestComplete(RequestManager.this,success,null,resultString,resultString);
}else if(responseType.equals(ResponseType.RAW)){
requestListeners.onRequestComplete(RequestManager.this,success,null,null,resultString);
}else if(responseType.equals(ResponseType.BITMAP)){
requestListeners.onRequestComplete(RequestManager.this,success,bitmap,null,bitmap);
}
}
}
@Override
protected void onProgressUpdate(Integer... values) {
super.onProgressUpdate(values);
currentDownload = (long)values[0];
if(requestCancel){
cancel(true);
}
//Long updateTime = System.currentTimeMillis();
if(requestListeners!=null){
//intervalUpdateProgress = System.currentTimeMillis();
requestListeners.onRequestProgress(downloadInfo, currentDownload);
}
}
}
public Boolean getConnection() {
return connection;
}
public void setConnection(Boolean connection) {
this.connection = connection;
}
public Bitmap getBitmap() {
return bitmap;
}
public ResponseType getResponseType() {
return responseType;
}
/**
* set request type if you want request result string set STRING
* BITMAP if you want request image and RAW for another type
* @see ResponseType
* @param responseType
*/
public void setResponseType(ResponseType responseType) {
this.responseType = responseType;
}
public String getResultString() {
return resultString;
}
public String getFilename() {
return filename;
}
public void setFilename(String filename) {
this.filename = filename;
}
/**
* if the request has been finished
* @return
*/
public boolean isFinish() {
return finish;
}
/**
* get response size that will be retrieve
* @return response size will be retrieve
*/
public Long getFilesize() {
return filesize;
}
public Long getCurrentDownload() {
return currentDownload;
}
public boolean isSuccess() {
return success;
}
private void setSuccess(boolean success) {
this.success = success;
}
public DownloadInfo getDownloadInfo() {
return downloadInfo;
}
public void setDownloadInfo(DownloadInfo downloadInfo) {
this.downloadInfo = downloadInfo;
}
public boolean isUpload() {
return upload;
}
public void setUpload(boolean upload) {
this.upload = upload;
}
public InputStream getInputStreamUpload() {
return inputStreamUpload;
}
public void setInputStreamUpload(InputStream inputStreamUpload) {
this.inputStreamUpload = inputStreamUpload;
}
public File getFileUpload() {
return fileUpload;
}
/**
* set file want to upload to server
* @param fileUpload <code>File</code>
*/
public void setFileUpload(File fileUpload) {
this.fileUpload = fileUpload;
}
/**
* interface request listener for callback if response has completed
* @author fitra.bayu
*
*/
public interface RequestListeners{
/**
* this method will be called if request/download in progress
* @param requestInfo <code>DownloadInfo</code>
* @param currentDownload <code>Long</code>
*/
public void onRequestProgress(DownloadInfo requestInfo, Long currentDownload);
/**
* this method will called if response has been completed retrieve
*
* @param requestManager
* @param success <code>Boolean</code> true if request completely retrieve and false if there is any problem in request
* @param bitmap <code>Bitmap</code> if you set ResponseType BITMAP result will be in the bitmap parameter
* @param resultString <code>String</code> if you set ResponseType STRING result will be in the resultString
* @param ressultRaw <code>Object</code> if you set ResponseType RAW result will be in the resultRaw parameter
*/
public void onRequestComplete(RequestManager requestManager, Boolean success, Bitmap bitmap, String resultString, Object ressultRaw);
}
public RequestListeners getRequestListeners() {
return requestListeners;
}
/**
* specify listeners for download
* @param requestListeners <code>RequestListeners</code>
*/
public void setRequestListeners(RequestListeners requestListeners) {
this.requestListeners = requestListeners;
}
@Deprecated
public String getFileUploadName() {
return fileUploadName;
}
public void setFileUploadName(String fileUploadName) {
this.fileUploadName = fileUploadName;
}
public Context getContext() {
return context;
}
public HttpUtils getHttpUtils() {
return httpUtils;
}
public String getPostType() {
return postType;
}
public void setTimeout(int timeout){
this.timeout = timeout;
}
/**
* set post type if you want form or raw
* if you set form you must add post data @see setPostData()
* if you set raw you must set raw string @see setRawStringPost()
* @param postType
*/
public void setPostType(String postType) {
this.postType = postType;
}
public String getRawStringPost() {
return rawStringPost;
}
/**
* set string raw post if you want to add raw post like application/json
* @param rawStringPost <code>String</code>
*/
public void setRawStringPost(String rawStringPost) {
this.rawStringPost = rawStringPost;
}
public String getContentType() {
return contentType;
}
/**
* set content type or mime result if you want example text/html or application/json
* @see ${CONTENT_TYPE_JSON} ${CONTENT_TYPE_HTML}
* @param contentType
*/
public void setContentType(String contentType) {
this.contentType = contentType;
}
public CustomRequest getCustomRequest() {
return customRequest;
}
public void setCustomRequest(CustomRequest customRequest) {
this.customRequest = customRequest;
}
public void setRequestCancel(boolean requestCancel) {
this.requestCancel = requestCancel;
}
public boolean isSecure() {
return secure;
}
public void setSecure(boolean secure) {
this.secure = secure;
}
public RequestInfo getRequestInfo() {
return requestInfo;
}
public void setRequestInfo(RequestInfo requestInfo) {
this.requestInfo = requestInfo;
}
public boolean isPrintResponseHeader() {
return printResponseHeader;
}
public void setPrintResponseHeader(boolean printResponseHeader) {
this.printResponseHeader = printResponseHeader;
}
}
| |
/*
* Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package java.util;
/**
* Private implementation class for EnumSet, for "jumbo" enum types
* (i.e., those with more than 64 elements).
*
* @author Josh Bloch
* @since 1.5
* @serial exclude
*/
class JumboEnumSet<E extends Enum<E>> extends EnumSet<E> {
private static final long serialVersionUID = 334349849919042784L;
/**
* Bit vector representation of this set. The ith bit of the jth
* element of this array represents the presence of universe[64*j +i]
* in this set.
*/
private long elements[];
// Redundant - maintained for performance
private int size = 0;
JumboEnumSet(Class<E>elementType, Enum[] universe) {
super(elementType, universe);
elements = new long[(universe.length + 63) >>> 6];
}
void addRange(E from, E to) {
int fromIndex = from.ordinal() >>> 6;
int toIndex = to.ordinal() >>> 6;
if (fromIndex == toIndex) {
elements[fromIndex] = (-1L >>> (from.ordinal() - to.ordinal() - 1))
<< from.ordinal();
} else {
elements[fromIndex] = (-1L << from.ordinal());
for (int i = fromIndex + 1; i < toIndex; i++)
elements[i] = -1;
elements[toIndex] = -1L >>> (63 - to.ordinal());
}
size = to.ordinal() - from.ordinal() + 1;
}
void addAll() {
for (int i = 0; i < elements.length; i++)
elements[i] = -1;
elements[elements.length - 1] >>>= -universe.length;
size = universe.length;
}
void complement() {
for (int i = 0; i < elements.length; i++)
elements[i] = ~elements[i];
elements[elements.length - 1] &= (-1L >>> -universe.length);
size = universe.length - size;
}
/**
* Returns an iterator over the elements contained in this set. The
* iterator traverses the elements in their <i>natural order</i> (which is
* the order in which the enum constants are declared). The returned
* Iterator is a "weakly consistent" iterator that will never throw {@link
* ConcurrentModificationException}.
*
* @return an iterator over the elements contained in this set
*/
public Iterator<E> iterator() {
return new EnumSetIterator<>();
}
private class EnumSetIterator<E extends Enum<E>> implements Iterator<E> {
/**
* A bit vector representing the elements in the current "word"
* of the set not yet returned by this iterator.
*/
long unseen;
/**
* The index corresponding to unseen in the elements array.
*/
int unseenIndex = 0;
/**
* The bit representing the last element returned by this iterator
* but not removed, or zero if no such element exists.
*/
long lastReturned = 0;
/**
* The index corresponding to lastReturned in the elements array.
*/
int lastReturnedIndex = 0;
EnumSetIterator() {
unseen = elements[0];
}
public boolean hasNext() {
while (unseen == 0 && unseenIndex < elements.length - 1)
unseen = elements[++unseenIndex];
return unseen != 0;
}
public E next() {
if (!hasNext())
throw new NoSuchElementException();
lastReturned = unseen & -unseen;
lastReturnedIndex = unseenIndex;
unseen -= lastReturned;
return (E) universe[(lastReturnedIndex << 6)
+ Long.numberOfTrailingZeros(lastReturned)];
}
public void remove() {
if (lastReturned == 0)
throw new IllegalStateException();
final long oldElements = elements[lastReturnedIndex];
elements[lastReturnedIndex] &= ~lastReturned;
if (oldElements != elements[lastReturnedIndex]) {
size--;
}
lastReturned = 0;
}
}
/**
* Returns the number of elements in this set.
*
* @return the number of elements in this set
*/
public int size() {
return size;
}
/**
* Returns <tt>true</tt> if this set contains no elements.
*
* @return <tt>true</tt> if this set contains no elements
*/
public boolean isEmpty() {
return size == 0;
}
/**
* Returns <tt>true</tt> if this set contains the specified element.
*
* @param e element to be checked for containment in this collection
* @return <tt>true</tt> if this set contains the specified element
*/
public boolean contains(Object e) {
if (e == null)
return false;
Class eClass = e.getClass();
if (eClass != elementType && eClass.getSuperclass() != elementType)
return false;
int eOrdinal = ((Enum)e).ordinal();
return (elements[eOrdinal >>> 6] & (1L << eOrdinal)) != 0;
}
// Modification Operations
/**
* Adds the specified element to this set if it is not already present.
*
* @param e element to be added to this set
* @return <tt>true</tt> if the set changed as a result of the call
*
* @throws NullPointerException if <tt>e</tt> is null
*/
public boolean add(E e) {
typeCheck(e);
int eOrdinal = e.ordinal();
int eWordNum = eOrdinal >>> 6;
long oldElements = elements[eWordNum];
elements[eWordNum] |= (1L << eOrdinal);
boolean result = (elements[eWordNum] != oldElements);
if (result)
size++;
return result;
}
/**
* Removes the specified element from this set if it is present.
*
* @param e element to be removed from this set, if present
* @return <tt>true</tt> if the set contained the specified element
*/
public boolean remove(Object e) {
if (e == null)
return false;
Class eClass = e.getClass();
if (eClass != elementType && eClass.getSuperclass() != elementType)
return false;
int eOrdinal = ((Enum)e).ordinal();
int eWordNum = eOrdinal >>> 6;
long oldElements = elements[eWordNum];
elements[eWordNum] &= ~(1L << eOrdinal);
boolean result = (elements[eWordNum] != oldElements);
if (result)
size--;
return result;
}
// Bulk Operations
/**
* Returns <tt>true</tt> if this set contains all of the elements
* in the specified collection.
*
* @param c collection to be checked for containment in this set
* @return <tt>true</tt> if this set contains all of the elements
* in the specified collection
* @throws NullPointerException if the specified collection is null
*/
public boolean containsAll(Collection<?> c) {
if (!(c instanceof JumboEnumSet))
return super.containsAll(c);
JumboEnumSet es = (JumboEnumSet)c;
if (es.elementType != elementType)
return es.isEmpty();
for (int i = 0; i < elements.length; i++)
if ((es.elements[i] & ~elements[i]) != 0)
return false;
return true;
}
/**
* Adds all of the elements in the specified collection to this set.
*
* @param c collection whose elements are to be added to this set
* @return <tt>true</tt> if this set changed as a result of the call
* @throws NullPointerException if the specified collection or any of
* its elements are null
*/
public boolean addAll(Collection<? extends E> c) {
if (!(c instanceof JumboEnumSet))
return super.addAll(c);
JumboEnumSet es = (JumboEnumSet)c;
if (es.elementType != elementType) {
if (es.isEmpty())
return false;
else
throw new ClassCastException(
es.elementType + " != " + elementType);
}
for (int i = 0; i < elements.length; i++)
elements[i] |= es.elements[i];
return recalculateSize();
}
/**
* Removes from this set all of its elements that are contained in
* the specified collection.
*
* @param c elements to be removed from this set
* @return <tt>true</tt> if this set changed as a result of the call
* @throws NullPointerException if the specified collection is null
*/
public boolean removeAll(Collection<?> c) {
if (!(c instanceof JumboEnumSet))
return super.removeAll(c);
JumboEnumSet es = (JumboEnumSet)c;
if (es.elementType != elementType)
return false;
for (int i = 0; i < elements.length; i++)
elements[i] &= ~es.elements[i];
return recalculateSize();
}
/**
* Retains only the elements in this set that are contained in the
* specified collection.
*
* @param c elements to be retained in this set
* @return <tt>true</tt> if this set changed as a result of the call
* @throws NullPointerException if the specified collection is null
*/
public boolean retainAll(Collection<?> c) {
if (!(c instanceof JumboEnumSet))
return super.retainAll(c);
JumboEnumSet<?> es = (JumboEnumSet<?>)c;
if (es.elementType != elementType) {
boolean changed = (size != 0);
clear();
return changed;
}
for (int i = 0; i < elements.length; i++)
elements[i] &= es.elements[i];
return recalculateSize();
}
/**
* Removes all of the elements from this set.
*/
public void clear() {
Arrays.fill(elements, 0);
size = 0;
}
/**
* Compares the specified object with this set for equality. Returns
* <tt>true</tt> if the given object is also a set, the two sets have
* the same size, and every member of the given set is contained in
* this set.
*
* @param e object to be compared for equality with this set
* @return <tt>true</tt> if the specified object is equal to this set
*/
public boolean equals(Object o) {
if (!(o instanceof JumboEnumSet))
return super.equals(o);
JumboEnumSet es = (JumboEnumSet)o;
if (es.elementType != elementType)
return size == 0 && es.size == 0;
return Arrays.equals(es.elements, elements);
}
/**
* Recalculates the size of the set. Returns true if it's changed.
*/
private boolean recalculateSize() {
int oldSize = size;
size = 0;
for (long elt : elements)
size += Long.bitCount(elt);
return size != oldSize;
}
public EnumSet<E> clone() {
JumboEnumSet<E> result = (JumboEnumSet<E>) super.clone();
result.elements = result.elements.clone();
return result;
}
}
| |
package com.fedevela.dms.services.impl;
/**
* Created by fvelazquez on 9/04/14.
*/
import com.fedevela.core.workflow.pojos.Workflow;
import com.fedevela.core.workflow.pojos.WorkflowConfig;
import com.fedevela.core.workflow.pojos.WorkflowState;
import com.fedevela.asic.daos.DmsDao;
import com.fedevela.dms.services.WorkflowService;
import java.util.List;
import javax.annotation.Resource;
import org.hibernate.criterion.DetachedCriteria;
import org.hibernate.criterion.Restrictions;
import org.springframework.stereotype.Service;
@Service
public class WorkflowServiceImpl implements WorkflowService {
@Resource
private DmsDao dao;
@Override
public WorkflowConfig getWorkflowConfig(
final Long workflow,
final String currentState,
final String event) {
DetachedCriteria criteria = DetachedCriteria.forClass(WorkflowConfig.class);
criteria.add(Restrictions.eq("workflow.idWorkflow", workflow));
criteria.add(Restrictions.eq("currentState.idWorkflowState", currentState));
criteria.add(Restrictions.eq("event.idWorkflowEvent", event));
List<WorkflowConfig> rs = dao.find(criteria);
if (rs == null || rs.isEmpty()) {
return null;
} else {
return rs.get(0);
}
}
@Override
public WorkflowConfig getWorkflowConfig(
final Long idWorkflow,
final String idWorkflowState,
final String idWorkflowEvent,
final String[] authorities,
final boolean doNothingExclude) {
StringBuilder strAuthorities = null;
for (String authority : authorities) {
if (strAuthorities == null) {
strAuthorities = new StringBuilder();
strAuthorities.append("'").append(authority).append("'");
} else {
strAuthorities.append(",'").append(authority).append("'");
}
}
StringBuilder sql = new StringBuilder();
sql.append(" SELECT WC.* ");
sql.append(" FROM PROD.WORKFLOW_CONFIG WC,");
sql.append(" PROD.WORKFLOW_SECURITY WS");
sql.append(" WHERE WC.ID_WORKFLOW=").append(idWorkflow);
sql.append(" AND WC.ID_CURRENT_STATE='").append(idWorkflowState).append("'");
sql.append(" AND WC.ID_WORKFLOW_EVENT='").append(idWorkflowEvent).append("'");
if (doNothingExclude) {
sql.append(" AND WC.ID_WORKFLOW_EVENT<>'doNothing' ");
}
sql.append(" AND WS.ID_WORKFLOW_CONFIG=WC.ID_WORKFLOW_CONFIG");
sql.append(" AND WS.ID_AUTHORITY IN(").append(strAuthorities).append(")");
List<WorkflowConfig> config = dao.find(WorkflowConfig.class, sql);
return config.get(0);
}
@Override
public WorkflowConfig getWorkflowConfig(
final Long idWorkflow,
String idWorkflowState,
String idWorkflowEvent,
String[] authorities,
boolean doNothingExclude,
boolean allowState,
boolean allowEvent) {
StringBuilder strAuthorities = null;
for (String authority : authorities) {
if (strAuthorities == null) {
strAuthorities = new StringBuilder();
strAuthorities.append("'").append(authority).append("'");
} else {
strAuthorities.append(",'").append(authority).append("'");
}
}
StringBuilder sql = new StringBuilder();
sql.append(" SELECT WC.* ");
sql.append(" FROM PROD.WORKFLOW_CONFIG WC,");
sql.append(" PROD.WORKFLOW_SECURITY WS");
sql.append(" WHERE WC.ID_WORKFLOW=").append(idWorkflow);
sql.append(" AND WC.ID_CURRENT_STATE='").append(idWorkflowState).append("'");
sql.append(" AND WC.ID_WORKFLOW_EVENT='").append(idWorkflowEvent).append("'");
if (doNothingExclude) {
sql.append(" AND WC.ID_WORKFLOW_EVENT<>'doNothing' ");
}
sql.append(" AND WS.ID_WORKFLOW_CONFIG=WC.ID_WORKFLOW_CONFIG");
sql.append(" AND WS.ID_AUTHORITY IN(").append(strAuthorities).append(")");
sql.append(" AND WS.ALLOW_STATE='").append((allowState) ? 't' : 'f').append("'");
sql.append(" AND WS.ALLOW_EVENT='").append((allowEvent) ? 't' : 'f').append("'");
List<WorkflowConfig> config = dao.find(WorkflowConfig.class, sql);
if ((config == null) || (config.isEmpty())) {
return null;
} else {
return config.get(0);
}
}
@Override
public List<WorkflowConfig> getWorkflowConfig(
final Long workflow,
final String currentState,
final boolean doNothingExclude) {
DetachedCriteria criteria = DetachedCriteria.forClass(WorkflowConfig.class);
criteria.add(Restrictions.eq("workflow.idWorkflow", workflow));
criteria.add(Restrictions.eq("currentState.idWorkflowState", currentState));
if (doNothingExclude) {
criteria.add(Restrictions.ne("workflowEvent.idWorkflowEvent", "doNothing"));
}
return dao.find(criteria);
}
@Override
public List<WorkflowConfig> getWorkflowConfig(
final Long idWorkflow,
final String idWorkflowState,
final String[] authorities,
final boolean doNothingExclude) {
StringBuilder strAuthorities = null;
for (String authority : authorities) {
if (strAuthorities == null) {
strAuthorities = new StringBuilder();
strAuthorities.append("'").append(authority).append("'");
} else {
strAuthorities.append(",'").append(authority).append("'");
}
}
StringBuilder sql = new StringBuilder();
sql.append(" SELECT WC.* ");
sql.append(" FROM PROD.WORKFLOW_CONFIG WC,");
sql.append(" PROD.WORKFLOW_SECURITY WS");
sql.append(" WHERE WC.ID_WORKFLOW=").append(idWorkflow);
sql.append(" AND WC.ID_CURRENT_STATE='").append(idWorkflowState).append("'");
if (doNothingExclude) {
sql.append(" AND WC.ID_WORKFLOW_EVENT<>'doNothing' ");
}
sql.append(" AND WS.ID_WORKFLOW_CONFIG=WC.ID_WORKFLOW_CONFIG");
sql.append(" AND WS.ID_AUTHORITY IN(").append(strAuthorities).append(")");
return dao.find(WorkflowConfig.class, sql);
}
@Override
public List<WorkflowConfig> getWorkflowConfig(
final Long idWorkflow,
final String idWorkflowState,
final String[] authorities,
final boolean doNothingExclude,
final boolean allowState,
final boolean allowEvent) {
StringBuilder strAuthorities = null;
for (String authority : authorities) {
if (strAuthorities == null) {
strAuthorities = new StringBuilder();
strAuthorities.append("'").append(authority).append("'");
} else {
strAuthorities.append(",'").append(authority).append("'");
}
}
StringBuilder sql = new StringBuilder();
sql.append(" SELECT WC.* ");
sql.append(" FROM PROD.WORKFLOW_CONFIG WC,");
sql.append(" PROD.WORKFLOW_SECURITY WS");
sql.append(" WHERE WC.ID_WORKFLOW=").append(idWorkflow);
sql.append(" AND WC.ID_CURRENT_STATE='").append(idWorkflowState).append("'");
if (doNothingExclude) {
sql.append(" AND WC.ID_WORKFLOW_EVENT<>'doNothing' ");
}
sql.append(" AND WS.ID_WORKFLOW_CONFIG=WC.ID_WORKFLOW_CONFIG");
sql.append(" AND WS.ID_AUTHORITY IN(").append(strAuthorities).append(")");
sql.append(" AND WS.ALLOW_STATE='").append((allowState) ? 't' : 'f').append("'");
sql.append(" AND WS.ALLOW_EVENT='").append((allowEvent) ? 't' : 'f').append("'");
return dao.find(WorkflowConfig.class, sql);
}
@Override
public List<WorkflowState> getWorkflowState(final Long idWorkflow, final String[] authorities, final boolean allowState) {
// String authoritiesString =
StringBuilder strAuthorities = null;
for (String authority : authorities) {
if (strAuthorities == null) {
strAuthorities = new StringBuilder();
strAuthorities.append("'").append(authority).append("'");
} else {
strAuthorities.append(",'").append(authority).append("'");
}
}
StringBuilder sql = new StringBuilder();
sql.append(" SELECT DISTINCT WST.*");
sql.append(" FROM PROD.WORKFLOW_STATE WST,");
sql.append(" PROD.WORKFLOW_CONFIG WC,");
sql.append(" PROD.WORKFLOW_SECURITY WS");
sql.append(" WHERE WC.ID_CURRENT_STATE=WST.ID_WORKFLOW_STATE");
sql.append(" AND WC.ID_WORKFLOW=").append(idWorkflow);
sql.append(" AND WS.ID_WORKFLOW_CONFIG=WC.ID_WORKFLOW_CONFIG");
sql.append(" AND WS.ALLOW_STATE='").append((allowState) ? "t" : "f").append("'");
sql.append(" AND WS.ID_AUTHORITY IN(").append(strAuthorities).append(")");
return dao.find(WorkflowState.class, sql);
}
@Override
public WorkflowState getWorkflowState(String idWorkflowState) {
return dao.get(WorkflowState.class, idWorkflowState);
}
@Override
public void deleteWorkflow(final Long idWorkflow) {
dao.delete(dao.get(Workflow.class, idWorkflow));
}
@Override
public Workflow getWorkflow(final Long idWorkflow) {
return dao.get(Workflow.class, idWorkflow);
}
@Override
public Workflow saveWorkflow(Workflow workflow) {
return dao.persist(workflow);
}
@Override
public void deleteWorkflowConfig(final Long idWorkflowConfig) {
dao.delete(dao.get(WorkflowConfig.class, idWorkflowConfig));
}
@Override
public WorkflowConfig saveWorkflowConfig(WorkflowConfig workflowConfig) {
return dao.persist(workflowConfig);
}
@Override
public WorkflowState saveWorkflowState(WorkflowState workflowState) {
return dao.persist(workflowState);
}
@Override
public void deleteWorkflowState(String idWorkflowState) {
dao.delete(dao.get(WorkflowState.class, idWorkflowState));
}
@Override
public List<WorkflowState> getState() {
return dao.find(WorkflowState.class);
}
@Override
public List<WorkflowState> getState(final Long... idWorkflow) {
StringBuilder sql = new StringBuilder();
sql.append(" SELECT DISTINCT WST.*");
sql.append(" FROM PROD.WORKFLOW_STATE WST,");
sql.append(" PROD.WORKFLOW_CONFIG WC");
sql.append(" WHERE (WC.ID_CURRENT_STATE=WST.ID_WORKFLOW_STATE OR WC.ID_NEXT_STATE=WST.ID_WORKFLOW_STATE)");
sql.append(" AND WC.ID_WORKFLOW IN(?");
if (idWorkflow.length > 1) {
for (int idx = 1; idx < idWorkflow.length; idx++) {
sql.append(", ?");
}
}
sql.append(')');
return dao.find(WorkflowState.class, sql, (Object[]) idWorkflow);
}
@Override
public List<WorkflowConfig> getWorkflowConfig(final Long... idWorkflow) {
StringBuilder sql = new StringBuilder();
sql.append(" SELECT WC.*");
sql.append(" FROM PROD.WORKFLOW_CONFIG WC");
sql.append(" WHERE WC.ID_WORKFLOW IN(?");
if (idWorkflow.length > 1) {
for (int idx = 1; idx < idWorkflow.length; idx++) {
sql.append(", ?");
}
}
sql.append(')');
return dao.find(WorkflowConfig.class, sql, (Object[]) idWorkflow);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.query.continuous;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NavigableSet;
import java.util.Set;
import java.util.TreeMap;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicLong;
import javax.cache.event.CacheEntryEvent;
import javax.cache.event.CacheEntryEventFilter;
import javax.cache.event.CacheEntryUpdatedListener;
import javax.cache.event.EventType;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.cache.CacheEntryEventSerializableFilter;
import org.apache.ignite.cluster.ClusterNode;
import org.apache.ignite.events.CacheQueryExecutedEvent;
import org.apache.ignite.events.CacheQueryReadEvent;
import org.apache.ignite.internal.GridKernalContext;
import org.apache.ignite.internal.IgniteDeploymentCheckedException;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.cluster.ClusterTopologyCheckedException;
import org.apache.ignite.internal.managers.communication.GridIoPolicy;
import org.apache.ignite.internal.managers.deployment.GridDeployment;
import org.apache.ignite.internal.managers.deployment.GridDeploymentInfo;
import org.apache.ignite.internal.managers.deployment.GridDeploymentInfoBean;
import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion;
import org.apache.ignite.internal.processors.cache.GridCacheAdapter;
import org.apache.ignite.internal.processors.cache.GridCacheAffinityManager;
import org.apache.ignite.internal.processors.cache.GridCacheContext;
import org.apache.ignite.internal.processors.cache.GridCacheDeploymentManager;
import org.apache.ignite.internal.processors.cache.distributed.dht.atomic.GridDhtAtomicAbstractUpdateFuture;
import org.apache.ignite.internal.processors.cache.query.CacheQueryType;
import org.apache.ignite.internal.processors.cache.query.continuous.CacheContinuousQueryManager.JCacheQueryLocalListener;
import org.apache.ignite.internal.processors.cache.query.continuous.CacheContinuousQueryManager.JCacheQueryRemoteFilter;
import org.apache.ignite.internal.processors.continuous.GridContinuousBatch;
import org.apache.ignite.internal.processors.continuous.GridContinuousHandler;
import org.apache.ignite.internal.processors.continuous.GridContinuousQueryBatch;
import org.apache.ignite.internal.processors.platform.cache.query.PlatformContinuousQueryFilter;
import org.apache.ignite.internal.util.GridConcurrentSkipListSet;
import org.apache.ignite.internal.util.GridLongList;
import org.apache.ignite.internal.util.tostring.GridToStringInclude;
import org.apache.ignite.internal.util.typedef.CI1;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.T2;
import org.apache.ignite.internal.util.typedef.internal.CU;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteAsyncCallback;
import org.apache.ignite.lang.IgniteBiTuple;
import org.apache.ignite.thread.IgniteStripedThreadPoolExecutor;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jsr166.ConcurrentLinkedDeque8;
import static org.apache.ignite.events.EventType.EVT_CACHE_QUERY_EXECUTED;
import static org.apache.ignite.events.EventType.EVT_CACHE_QUERY_OBJECT_READ;
/**
* Continuous query handler.
*/
public class CacheContinuousQueryHandler<K, V> implements GridContinuousHandler {
/** */
private static final long serialVersionUID = 0L;
/** */
private static final int BACKUP_ACK_THRESHOLD = 100;
/** Cache name. */
private String cacheName;
/** Topic for ordered messages. */
private Object topic;
/** Local listener. */
private transient CacheEntryUpdatedListener<K, V> locLsnr;
/** Remote filter. */
private CacheEntryEventSerializableFilter<K, V> rmtFilter;
/** Deployable object for filter. */
private DeployableObject rmtFilterDep;
/** Internal flag. */
private boolean internal;
/** Notify existing flag. */
private boolean notifyExisting;
/** Old value required flag. */
private boolean oldValRequired;
/** Synchronous flag. */
private boolean sync;
/** Ignore expired events flag. */
private boolean ignoreExpired;
/** Task name hash code. */
private int taskHash;
/** Whether to skip primary check for REPLICATED cache. */
private transient boolean skipPrimaryCheck;
/** Backup queue. */
private transient volatile Collection<CacheContinuousQueryEntry> backupQueue;
/** */
private boolean locCache;
/** */
private boolean keepBinary;
/** */
private transient ConcurrentMap<Integer, PartitionRecovery> rcvs;
/** */
private transient ConcurrentMap<Integer, EntryBuffer> entryBufs;
/** */
private transient AcknowledgeBuffer ackBuf;
/** */
private transient int cacheId;
/** */
private transient volatile Map<Integer, T2<Long, Long>> initUpdCntrs;
/** */
private transient volatile Map<UUID, Map<Integer, T2<Long, Long>>> initUpdCntrsPerNode;
/** */
private transient volatile AffinityTopologyVersion initTopVer;
/** */
private transient boolean ignoreClsNotFound;
/** */
private transient boolean asyncCb;
/** */
private transient UUID nodeId;
/** */
private transient UUID routineId;
/** */
private transient GridKernalContext ctx;
/** */
private transient IgniteLogger log;
/**
* Required by {@link Externalizable}.
*/
public CacheContinuousQueryHandler() {
// No-op.
}
/**
* Constructor.
*
* @param cacheName Cache name.
* @param topic Topic for ordered messages.
* @param locLsnr Local listener.
* @param rmtFilter Remote filter.
* @param oldValRequired Old value required flag.
* @param sync Synchronous flag.
* @param ignoreExpired Ignore expired events flag.
*/
public CacheContinuousQueryHandler(
String cacheName,
Object topic,
CacheEntryUpdatedListener<K, V> locLsnr,
CacheEntryEventSerializableFilter<K, V> rmtFilter,
boolean oldValRequired,
boolean sync,
boolean ignoreExpired,
boolean ignoreClsNotFound) {
assert topic != null;
assert locLsnr != null;
this.cacheName = cacheName;
this.topic = topic;
this.locLsnr = locLsnr;
this.rmtFilter = rmtFilter;
this.oldValRequired = oldValRequired;
this.sync = sync;
this.ignoreExpired = ignoreExpired;
this.ignoreClsNotFound = ignoreClsNotFound;
cacheId = CU.cacheId(cacheName);
}
/**
* @param internal Internal query.
*/
public void internal(boolean internal) {
this.internal = internal;
}
/**
* @param notifyExisting Notify existing.
*/
public void notifyExisting(boolean notifyExisting) {
this.notifyExisting = notifyExisting;
}
/**
* @param locCache Local cache.
*/
public void localCache(boolean locCache) {
this.locCache = locCache;
}
/**
* @param taskHash Task hash.
*/
public void taskNameHash(int taskHash) {
this.taskHash = taskHash;
}
/**
* @param skipPrimaryCheck Whether to skip primary check for REPLICATED cache.
*/
public void skipPrimaryCheck(boolean skipPrimaryCheck) {
this.skipPrimaryCheck = skipPrimaryCheck;
}
/** {@inheritDoc} */
@Override public boolean isEvents() {
return false;
}
/** {@inheritDoc} */
@Override public boolean isMessaging() {
return false;
}
/** {@inheritDoc} */
@Override public boolean isQuery() {
return true;
}
/** {@inheritDoc} */
@Override public boolean keepBinary() {
return keepBinary;
}
/**
* @param keepBinary Keep binary flag.
*/
public void keepBinary(boolean keepBinary) {
this.keepBinary = keepBinary;
}
/** {@inheritDoc} */
@Override public String cacheName() {
return cacheName;
}
/** {@inheritDoc} */
@Override public void updateCounters(AffinityTopologyVersion topVer, Map<UUID, Map<Integer, T2<Long, Long>>> cntrsPerNode,
Map<Integer, T2<Long, Long>> cntrs) {
this.initUpdCntrsPerNode = cntrsPerNode;
this.initUpdCntrs = cntrs;
this.initTopVer = topVer;
}
/** {@inheritDoc} */
@Override public RegisterStatus register(final UUID nodeId, final UUID routineId, final GridKernalContext ctx)
throws IgniteCheckedException {
assert nodeId != null;
assert routineId != null;
assert ctx != null;
if (locLsnr != null) {
if (locLsnr instanceof JCacheQueryLocalListener) {
ctx.resource().injectGeneric(((JCacheQueryLocalListener)locLsnr).impl);
asyncCb = ((JCacheQueryLocalListener)locLsnr).async();
}
else {
ctx.resource().injectGeneric(locLsnr);
asyncCb = U.hasAnnotation(locLsnr, IgniteAsyncCallback.class);
}
}
final CacheEntryEventFilter filter = getEventFilter();
if (filter != null) {
if (filter instanceof JCacheQueryRemoteFilter) {
if (((JCacheQueryRemoteFilter)filter).impl != null)
ctx.resource().injectGeneric(((JCacheQueryRemoteFilter)filter).impl);
if (!asyncCb)
asyncCb = ((JCacheQueryRemoteFilter)filter).async();
}
else {
ctx.resource().injectGeneric(filter);
if (!asyncCb)
asyncCb = U.hasAnnotation(filter, IgniteAsyncCallback.class);
}
}
entryBufs = new ConcurrentHashMap<>();
backupQueue = new ConcurrentLinkedDeque8<>();
ackBuf = new AcknowledgeBuffer();
rcvs = new ConcurrentHashMap<>();
this.nodeId = nodeId;
this.routineId = routineId;
this.ctx = ctx;
final boolean loc = nodeId.equals(ctx.localNodeId());
assert !skipPrimaryCheck || loc;
log = ctx.log(CU.CONTINUOUS_QRY_LOG_CATEGORY);
CacheContinuousQueryListener<K, V> lsnr = new CacheContinuousQueryListener<K, V>() {
@Override public void onExecution() {
if (ctx.event().isRecordable(EVT_CACHE_QUERY_EXECUTED)) {
ctx.event().record(new CacheQueryExecutedEvent<>(
ctx.discovery().localNode(),
"Continuous query executed.",
EVT_CACHE_QUERY_EXECUTED,
CacheQueryType.CONTINUOUS.name(),
cacheName,
null,
null,
null,
filter instanceof CacheEntryEventSerializableFilter ?
(CacheEntryEventSerializableFilter)filter : null,
null,
nodeId,
taskName()
));
}
}
@Override public boolean keepBinary() {
return keepBinary;
}
@Override public void onEntryUpdated(final CacheContinuousQueryEvent<K, V> evt,
boolean primary,
final boolean recordIgniteEvt,
GridDhtAtomicAbstractUpdateFuture fut) {
if (ignoreExpired && evt.getEventType() == EventType.EXPIRED)
return;
if (log.isDebugEnabled())
log.debug("Entry updated on affinity node [evt=" + evt + ", primary=" + primary + ']');
final GridCacheContext<K, V> cctx = cacheContext(ctx);
// Check that cache stopped.
if (cctx == null)
return;
// skipPrimaryCheck is set only when listen locally for replicated cache events.
assert !skipPrimaryCheck || (cctx.isReplicated() && ctx.localNodeId().equals(nodeId));
if (asyncCb) {
ContinuousQueryAsyncClosure clsr = new ContinuousQueryAsyncClosure(
primary,
evt,
recordIgniteEvt,
fut);
ctx.asyncCallbackPool().execute(clsr, evt.partitionId());
}
else {
final boolean notify = filter(evt, primary);
if (log.isDebugEnabled())
log.debug("Filter invoked for event [evt=" + evt + ", primary=" + primary
+ ", notify=" + notify + ']');
if (primary || skipPrimaryCheck) {
if (fut == null)
onEntryUpdate(evt, notify, loc, recordIgniteEvt);
else {
fut.addContinuousQueryClosure(new CI1<Boolean>() {
@Override public void apply(Boolean suc) {
if (!suc)
evt.entry().markFiltered();
onEntryUpdate(evt, notify, loc, recordIgniteEvt);
}
}, sync);
}
}
}
}
@Override public void onUnregister() {
if (filter instanceof PlatformContinuousQueryFilter)
((PlatformContinuousQueryFilter)filter).onQueryUnregister();
}
@Override public void cleanupBackupQueue(Map<Integer, Long> updateCntrs) {
Collection<CacheContinuousQueryEntry> backupQueue0 = backupQueue;
if (backupQueue0 != null) {
Iterator<CacheContinuousQueryEntry> it = backupQueue0.iterator();
while (it.hasNext()) {
CacheContinuousQueryEntry backupEntry = it.next();
Long updateCntr = updateCntrs.get(backupEntry.partition());
if (updateCntr != null && backupEntry.updateCounter() <= updateCntr)
it.remove();
}
}
}
@Override public void flushBackupQueue(GridKernalContext ctx, AffinityTopologyVersion topVer) {
assert topVer != null;
Collection<CacheContinuousQueryEntry> backupQueue0 = backupQueue;
if (backupQueue0 == null)
return;
try {
ClusterNode nodeId0 = ctx.discovery().node(nodeId);
if (nodeId0 != null) {
GridCacheContext<K, V> cctx = cacheContext(ctx);
for (CacheContinuousQueryEntry e : backupQueue0) {
if (!e.isFiltered())
prepareEntry(cctx, nodeId, e);
e.topologyVersion(topVer);
}
ctx.continuous().addBackupNotification(nodeId, routineId, backupQueue0, topic);
}
else
// Node which start CQ leave topology. Not needed to put data to backup queue.
backupQueue = null;
backupQueue0.clear();
}
catch (IgniteCheckedException e) {
U.error(ctx.log(CU.CONTINUOUS_QRY_LOG_CATEGORY),
"Failed to send backup event notification to node: " + nodeId, e);
}
}
@Override public void acknowledgeBackupOnTimeout(GridKernalContext ctx) {
sendBackupAcknowledge(ackBuf.acknowledgeOnTimeout(), routineId, ctx);
}
@Override public void skipUpdateEvent(CacheContinuousQueryEvent<K, V> evt,
AffinityTopologyVersion topVer, boolean primary) {
assert evt != null;
CacheContinuousQueryEntry e = evt.entry();
e.markFiltered();
onEntryUpdated(evt, primary, false, null);
}
@Override public void onPartitionEvicted(int part) {
Collection<CacheContinuousQueryEntry> backupQueue0 = backupQueue;
if (backupQueue0 != null) {
for (Iterator<CacheContinuousQueryEntry> it = backupQueue0.iterator(); it.hasNext(); ) {
if (it.next().partition() == part)
it.remove();
}
}
}
@Override public boolean oldValueRequired() {
return oldValRequired;
}
@Override public boolean notifyExisting() {
return notifyExisting;
}
private String taskName() {
return ctx.security().enabled() ? ctx.task().resolveTaskName(taskHash) : null;
}
};
CacheContinuousQueryManager mgr = manager(ctx);
if (mgr == null)
return RegisterStatus.DELAYED;
return mgr.registerListener(routineId, lsnr, internal);
}
/**
* @return Cache entry event filter.
*/
public CacheEntryEventFilter getEventFilter() {
return rmtFilter;
}
/**
* @param cctx Context.
* @param nodeId ID of the node that started routine.
* @param entry Entry.
* @throws IgniteCheckedException In case of error.
*/
private void prepareEntry(GridCacheContext cctx, UUID nodeId, CacheContinuousQueryEntry entry)
throws IgniteCheckedException {
if (cctx.kernalContext().config().isPeerClassLoadingEnabled() && cctx.discovery().node(nodeId) != null) {
entry.prepareMarshal(cctx);
cctx.deploy().prepare(entry);
}
else
entry.prepareMarshal(cctx);
}
/**
* Wait topology.
*/
public void waitTopologyFuture(GridKernalContext ctx) throws IgniteCheckedException {
GridCacheContext<K, V> cctx = cacheContext(ctx);
if (!cctx.isLocal()) {
cacheContext(ctx).affinity().affinityReadyFuture(initTopVer).get();
for (int partId = 0; partId < cacheContext(ctx).affinity().partitions(); partId++)
getOrCreatePartitionRecovery(ctx, partId);
}
}
/** {@inheritDoc} */
@Override public void unregister(UUID routineId, GridKernalContext ctx) {
assert routineId != null;
assert ctx != null;
GridCacheAdapter<K, V> cache = ctx.cache().internalCache(cacheName);
if (cache != null)
cache.context().continuousQueries().unregisterListener(internal, routineId);
}
/**
* @param ctx Kernal context.
* @return Continuous query manager.
*/
private CacheContinuousQueryManager manager(GridKernalContext ctx) {
GridCacheContext<K, V> cacheCtx = cacheContext(ctx);
return cacheCtx == null ? null : cacheCtx.continuousQueries();
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public void notifyCallback(final UUID nodeId,
final UUID routineId,
Collection<?> objs,
final GridKernalContext ctx) {
assert nodeId != null;
assert routineId != null;
assert objs != null;
assert ctx != null;
if (objs.isEmpty())
return;
if (asyncCb) {
final List<CacheContinuousQueryEntry> entries = objs instanceof List ? (List)objs : new ArrayList(objs);
IgniteStripedThreadPoolExecutor asyncPool = ctx.asyncCallbackPool();
int threadId = asyncPool.threadId(entries.get(0).partition());
int startIdx = 0;
if (entries.size() != 1) {
for (int i = 1; i < entries.size(); i++) {
int curThreadId = asyncPool.threadId(entries.get(i).partition());
// If all entries from one partition avoid creation new collections.
if (curThreadId == threadId)
continue;
final int i0 = i;
final int startIdx0 = startIdx;
asyncPool.execute(new Runnable() {
@Override public void run() {
notifyCallback0(nodeId, ctx, entries.subList(startIdx0, i0));
}
}, threadId);
startIdx = i0;
threadId = curThreadId;
}
}
final int startIdx0 = startIdx;
asyncPool.execute(new Runnable() {
@Override public void run() {
notifyCallback0(nodeId, ctx,
startIdx0 == 0 ? entries : entries.subList(startIdx0, entries.size()));
}
}, threadId);
}
else
notifyCallback0(nodeId, ctx, (Collection)objs);
}
/**
* @param nodeId Node id.
* @param ctx Kernal context.
* @param entries Entries.
*/
private void notifyCallback0(UUID nodeId,
final GridKernalContext ctx,
Collection<CacheContinuousQueryEntry> entries) {
final GridCacheContext cctx = cacheContext(ctx);
if (cctx == null) {
IgniteLogger log = ctx.log(CU.CONTINUOUS_QRY_LOG_CATEGORY);
if (log.isDebugEnabled())
log.debug("Failed to notify callback, cache is not found: " + cacheId);
return;
}
final Collection<CacheEntryEvent<? extends K, ? extends V>> entries0 = new ArrayList<>(entries.size());
for (CacheContinuousQueryEntry e : entries) {
GridCacheDeploymentManager depMgr = cctx.deploy();
ClassLoader ldr = depMgr.globalLoader();
if (ctx.config().isPeerClassLoadingEnabled()) {
GridDeploymentInfo depInfo = e.deployInfo();
if (depInfo != null) {
depMgr.p2pContext(nodeId, depInfo.classLoaderId(), depInfo.userVersion(), depInfo.deployMode(),
depInfo.participants(), depInfo.localDeploymentOwner());
}
}
try {
e.unmarshal(cctx, ldr);
Collection<CacheEntryEvent<? extends K, ? extends V>> evts = handleEvent(ctx, e);
if (evts != null && !evts.isEmpty())
entries0.addAll(evts);
}
catch (IgniteCheckedException ex) {
if (ignoreClsNotFound)
assert internal;
else
U.error(ctx.log(CU.CONTINUOUS_QRY_LOG_CATEGORY), "Failed to unmarshal entry.", ex);
}
}
if (!entries0.isEmpty())
locLsnr.onUpdated(entries0);
}
/**
* @param ctx Context.
* @param e entry.
* @return Entry collection.
*/
private Collection<CacheEntryEvent<? extends K, ? extends V>> handleEvent(GridKernalContext ctx,
CacheContinuousQueryEntry e) {
assert e != null;
GridCacheContext<K, V> cctx = cacheContext(ctx);
final IgniteCache cache = cctx.kernalContext().cache().jcache(cctx.name());
if (internal) {
if (e.isFiltered())
return Collections.emptyList();
else
return F.<CacheEntryEvent<? extends K, ? extends V>>
asList(new CacheContinuousQueryEvent<K, V>(cache, cctx, e));
}
// Initial query entry or evicted entry. These events should be fired immediately.
if (e.updateCounter() == -1L) {
return !e.isFiltered() ? F.<CacheEntryEvent<? extends K, ? extends V>>asList(
new CacheContinuousQueryEvent<K, V>(cache, cctx, e)) :
Collections.<CacheEntryEvent<? extends K, ? extends V>>emptyList();
}
PartitionRecovery rec = getOrCreatePartitionRecovery(ctx, e.partition());
return rec.collectEntries(e, cctx, cache);
}
/**
* @param primary Primary.
* @param evt Query event.
* @return {@code True} if event passed filter otherwise {@code true}.
*/
public boolean filter(CacheContinuousQueryEvent evt, boolean primary) {
CacheContinuousQueryEntry entry = evt.entry();
boolean notify = !entry.isFiltered();
try {
if (notify && getEventFilter() != null)
notify = getEventFilter().evaluate(evt);
}
catch (Exception e) {
U.error(log, "CacheEntryEventFilter failed: " + e);
}
if (!notify)
entry.markFiltered();
if (!primary && !internal && entry.updateCounter() != -1L /* Skip init query and expire entries */) {
entry.markBackup();
Collection<CacheContinuousQueryEntry> backupQueue0 = backupQueue;
if (backupQueue0 != null)
backupQueue0.add(entry.forBackupQueue());
}
return notify;
}
/**
* @param evt Continuous query event.
* @param notify Notify flag.
* @param loc Listener deployed on this node.
* @param recordIgniteEvt Record ignite event.
*/
private void onEntryUpdate(CacheContinuousQueryEvent evt, boolean notify, boolean loc, boolean recordIgniteEvt) {
try {
GridCacheContext<K, V> cctx = cacheContext(ctx);
if (cctx == null)
return;
final CacheContinuousQueryEntry entry = evt.entry();
if (loc) {
if (!locCache) {
Collection<CacheEntryEvent<? extends K, ? extends V>> evts = handleEvent(ctx, entry);
if (!evts.isEmpty())
locLsnr.onUpdated(evts);
if (!internal && !skipPrimaryCheck)
sendBackupAcknowledge(ackBuf.onAcknowledged(entry), routineId, ctx);
}
else {
if (!entry.isFiltered())
locLsnr.onUpdated(F.<CacheEntryEvent<? extends K, ? extends V>>asList(evt));
}
}
else {
if (!entry.isFiltered())
prepareEntry(cctx, nodeId, entry);
CacheContinuousQueryEntry e = handleEntry(entry);
if (e != null) {
if (log.isDebugEnabled())
log.debug("Send the following event to listener: " + e);
ctx.continuous().addNotification(nodeId, routineId, entry, topic, sync, true);
}
}
}
catch (ClusterTopologyCheckedException ex) {
if (log.isDebugEnabled())
log.debug("Failed to send event notification to node, node left cluster " +
"[node=" + nodeId + ", err=" + ex + ']');
}
catch (IgniteCheckedException ex) {
U.error(ctx.log(CU.CONTINUOUS_QRY_LOG_CATEGORY), "Failed to send event notification to node: " + nodeId, ex);
}
if (recordIgniteEvt && notify) {
ctx.event().record(new CacheQueryReadEvent<>(
ctx.discovery().localNode(),
"Continuous query executed.",
EVT_CACHE_QUERY_OBJECT_READ,
CacheQueryType.CONTINUOUS.name(),
cacheName,
null,
null,
null,
getEventFilter() instanceof CacheEntryEventSerializableFilter ?
(CacheEntryEventSerializableFilter)getEventFilter() : null,
null,
nodeId,
taskName(),
evt.getKey(),
evt.getValue(),
evt.getOldValue(),
null
));
}
}
/**
* @return Task name.
*/
private String taskName() {
return ctx.security().enabled() ? ctx.task().resolveTaskName(taskHash) : null;
}
/** {@inheritDoc} */
@Override public void onClientDisconnected() {
if (internal)
return;
for (PartitionRecovery rec : rcvs.values())
rec.resetTopologyCache();
}
/**
* @param ctx Context.
* @param partId Partition id.
* @return Partition recovery.
*/
@NotNull private PartitionRecovery getOrCreatePartitionRecovery(GridKernalContext ctx, int partId) {
PartitionRecovery rec = rcvs.get(partId);
if (rec == null) {
T2<Long, Long> partCntrs = null;
AffinityTopologyVersion initTopVer0 = initTopVer;
if (initTopVer0 != null) {
GridCacheContext<K, V> cctx = cacheContext(ctx);
GridCacheAffinityManager aff = cctx.affinity();
if (initUpdCntrsPerNode != null) {
for (ClusterNode node : aff.nodesByPartition(partId, initTopVer)) {
Map<Integer, T2<Long, Long>> map = initUpdCntrsPerNode.get(node.id());
if (map != null) {
partCntrs = map.get(partId);
break;
}
}
}
else if (initUpdCntrs != null)
partCntrs = initUpdCntrs.get(partId);
}
rec = new PartitionRecovery(ctx.log(CU.CONTINUOUS_QRY_LOG_CATEGORY), initTopVer0,
partCntrs != null ? partCntrs.get2() : null);
PartitionRecovery oldRec = rcvs.putIfAbsent(partId, rec);
if (oldRec != null)
rec = oldRec;
}
return rec;
}
/**
* @param e Entry.
* @return Entry.
*/
private CacheContinuousQueryEntry handleEntry(CacheContinuousQueryEntry e) {
assert e != null;
assert entryBufs != null;
if (internal) {
if (e.isFiltered())
return null;
else
return e;
}
// Initial query entry.
// This events should be fired immediately.
if (e.updateCounter() == -1)
return e;
EntryBuffer buf = entryBufs.get(e.partition());
if (buf == null) {
buf = new EntryBuffer();
EntryBuffer oldRec = entryBufs.putIfAbsent(e.partition(), buf);
if (oldRec != null)
buf = oldRec;
}
return buf.handle(e);
}
/**
*
*/
private static class PartitionRecovery {
/** Event which means hole in sequence. */
private static final CacheContinuousQueryEntry HOLE = new CacheContinuousQueryEntry();
/** */
private final static int MAX_BUFF_SIZE = 100;
/** */
private IgniteLogger log;
/** */
private long lastFiredEvt;
/** */
private AffinityTopologyVersion curTop = AffinityTopologyVersion.NONE;
/** */
private final Map<Long, CacheContinuousQueryEntry> pendingEvts = new TreeMap<>();
/**
* @param log Logger.
* @param topVer Topology version.
* @param initCntr Update counters.
*/
PartitionRecovery(IgniteLogger log, AffinityTopologyVersion topVer, @Nullable Long initCntr) {
this.log = log;
if (initCntr != null) {
assert topVer.topologyVersion() > 0 : topVer;
this.lastFiredEvt = initCntr;
curTop = topVer;
}
}
/**
* Resets cached topology.
*/
void resetTopologyCache() {
curTop = AffinityTopologyVersion.NONE;
}
/**
* Add continuous entry.
*
* @param cctx Cache context.
* @param cache Cache.
* @param entry Cache continuous query entry.
* @return Collection entries which will be fired. This collection should contains only non-filtered events.
*/
<K, V> Collection<CacheEntryEvent<? extends K, ? extends V>> collectEntries(
CacheContinuousQueryEntry entry,
GridCacheContext cctx,
IgniteCache cache
) {
assert entry != null;
if (entry.topologyVersion() == null) { // Possible if entry is sent from old node.
assert entry.updateCounter() == 0L : entry;
return F.<CacheEntryEvent<? extends K, ? extends V>>
asList(new CacheContinuousQueryEvent<K, V>(cache, cctx, entry));
}
List<CacheEntryEvent<? extends K, ? extends V>> entries;
synchronized (pendingEvts) {
if (log.isDebugEnabled()) {
log.debug("Handling event [lastFiredEvt=" + lastFiredEvt +
", curTop=" + curTop +
", entUpdCnt=" + entry.updateCounter() +
", partId=" + entry.partition() +
", pendingEvts=" + pendingEvts + ']');
}
// Received first event.
if (curTop == AffinityTopologyVersion.NONE) {
lastFiredEvt = entry.updateCounter();
curTop = entry.topologyVersion();
if (log.isDebugEnabled()) {
log.debug("First event [lastFiredEvt=" + lastFiredEvt +
", curTop=" + curTop +
", entUpdCnt=" + entry.updateCounter() +
", partId=" + entry.partition() + ']');
}
return !entry.isFiltered() ?
F.<CacheEntryEvent<? extends K, ? extends V>>
asList(new CacheContinuousQueryEvent<K, V>(cache, cctx, entry)) :
Collections.<CacheEntryEvent<? extends K, ? extends V>>emptyList();
}
if (curTop.compareTo(entry.topologyVersion()) < 0) {
if (entry.updateCounter() == 1L && !entry.isBackup()) {
entries = new ArrayList<>(pendingEvts.size());
for (CacheContinuousQueryEntry evt : pendingEvts.values()) {
if (evt != HOLE && !evt.isFiltered())
entries.add(new CacheContinuousQueryEvent<K, V>(cache, cctx, evt));
}
pendingEvts.clear();
curTop = entry.topologyVersion();
lastFiredEvt = entry.updateCounter();
if (!entry.isFiltered())
entries.add(new CacheContinuousQueryEvent<K, V>(cache, cctx, entry));
if (log.isDebugEnabled())
log.debug("Partition was lost [lastFiredEvt=" + lastFiredEvt +
", curTop=" + curTop +
", entUpdCnt=" + entry.updateCounter() +
", partId=" + entry.partition() +
", pendingEvts=" + pendingEvts + ']');
return entries;
}
curTop = entry.topologyVersion();
}
// Check duplicate.
if (entry.updateCounter() > lastFiredEvt) {
pendingEvts.put(entry.updateCounter(), entry);
// Put filtered events.
if (entry.filteredEvents() != null) {
for (long cnrt : entry.filteredEvents()) {
if (cnrt > lastFiredEvt)
pendingEvts.put(cnrt, HOLE);
}
}
}
else {
if (log.isDebugEnabled())
log.debug("Skip duplicate continuous query message: " + entry);
return Collections.emptyList();
}
if (pendingEvts.isEmpty()) {
if (log.isDebugEnabled()) {
log.debug("Nothing sent to listener [lastFiredEvt=" + lastFiredEvt +
", curTop=" + curTop +
", entUpdCnt=" + entry.updateCounter() +
", partId=" + entry.partition() + ']');
}
return Collections.emptyList();
}
Iterator<Map.Entry<Long, CacheContinuousQueryEntry>> iter = pendingEvts.entrySet().iterator();
entries = new ArrayList<>();
if (pendingEvts.size() >= MAX_BUFF_SIZE) {
for (int i = 0; i < MAX_BUFF_SIZE - (MAX_BUFF_SIZE / 10); i++) {
Map.Entry<Long, CacheContinuousQueryEntry> e = iter.next();
if (e.getValue() != HOLE && !e.getValue().isFiltered())
entries.add(new CacheContinuousQueryEvent<K, V>(cache, cctx, e.getValue()));
lastFiredEvt = e.getKey();
iter.remove();
}
if (log.isDebugEnabled()) {
log.debug("Pending events reached max of buffer size [lastFiredEvt=" + lastFiredEvt +
", curTop=" + curTop +
", entUpdCnt=" + entry.updateCounter() +
", partId=" + entry.partition() +
", pendingEvts=" + pendingEvts + ']');
}
}
else {
// Elements are consistently.
while (iter.hasNext()) {
Map.Entry<Long, CacheContinuousQueryEntry> e = iter.next();
if (e.getKey() == lastFiredEvt + 1) {
++lastFiredEvt;
if (e.getValue() != HOLE && !e.getValue().isFiltered())
entries.add(new CacheContinuousQueryEvent<K, V>(cache, cctx, e.getValue()));
iter.remove();
}
else
break;
}
}
}
if (log.isDebugEnabled()) {
log.debug("Will send to listener the following events [entries=" + entries +
", lastFiredEvt=" + lastFiredEvt +
", curTop=" + curTop +
", entUpdCnt=" + entry.updateCounter() +
", partId=" + entry.partition() +
", pendingEvts=" + pendingEvts + ']');
}
return entries;
}
}
/**
*
*/
private static class EntryBuffer {
/** */
private final static int MAX_BUFF_SIZE = 100;
/** */
private final GridConcurrentSkipListSet<Long> buf = new GridConcurrentSkipListSet<>();
/** */
private AtomicLong lastFiredCntr = new AtomicLong();
/**
* @param newVal New value.
* @return Old value if previous value less than new value otherwise {@code -1}.
*/
private long updateFiredCounter(long newVal) {
long prevVal = lastFiredCntr.get();
while (prevVal < newVal) {
if (lastFiredCntr.compareAndSet(prevVal, newVal))
return prevVal;
else
prevVal = lastFiredCntr.get();
}
return prevVal >= newVal ? -1 : prevVal;
}
/**
* Add continuous entry.
*
* @param e Cache continuous query entry.
* @return Collection entries which will be fired.
*/
public CacheContinuousQueryEntry handle(CacheContinuousQueryEntry e) {
assert e != null;
if (e.isFiltered()) {
Long last = buf.lastx();
Long first = buf.firstx();
if (last != null && first != null && last - first >= MAX_BUFF_SIZE) {
NavigableSet<Long> prevHoles = buf.subSet(first, true, last, true);
GridLongList filteredEvts = new GridLongList((int)(last - first));
int size = 0;
Long cntr;
while ((cntr = prevHoles.pollFirst()) != null) {
filteredEvts.add(cntr);
++size;
}
filteredEvts.truncate(size, true);
e.filteredEvents(filteredEvts);
return e;
}
if (lastFiredCntr.get() > e.updateCounter() || e.updateCounter() == 1)
return e;
else {
buf.add(e.updateCounter());
// Double check. If another thread sent a event with counter higher than this event.
if (lastFiredCntr.get() > e.updateCounter() && buf.contains(e.updateCounter())) {
buf.remove(e.updateCounter());
return e;
}
else
return null;
}
}
else {
long prevVal = updateFiredCounter(e.updateCounter());
if (prevVal == -1)
return e;
else {
NavigableSet<Long> prevHoles = buf.subSet(prevVal, true, e.updateCounter(), true);
GridLongList filteredEvts = new GridLongList((int)(e.updateCounter() - prevVal));
int size = 0;
Long cntr;
while ((cntr = prevHoles.pollFirst()) != null) {
filteredEvts.add(cntr);
++size;
}
filteredEvts.truncate(size, true);
e.filteredEvents(filteredEvts);
return e;
}
}
}
}
/** {@inheritDoc} */
@Override public void onNodeLeft() {
Collection<CacheContinuousQueryEntry> backupQueue0 = backupQueue;
if (backupQueue0 != null)
backupQueue = null;
}
/** {@inheritDoc} */
@Override public void p2pMarshal(GridKernalContext ctx) throws IgniteCheckedException {
assert ctx != null;
assert ctx.config().isPeerClassLoadingEnabled();
if (rmtFilter != null && !U.isGrid(rmtFilter.getClass()))
rmtFilterDep = new DeployableObject(rmtFilter, ctx);
}
/** {@inheritDoc} */
@Override public void p2pUnmarshal(UUID nodeId, GridKernalContext ctx) throws IgniteCheckedException {
assert nodeId != null;
assert ctx != null;
assert ctx.config().isPeerClassLoadingEnabled();
if (rmtFilterDep != null)
rmtFilter = rmtFilterDep.unmarshal(nodeId, ctx);
}
/** {@inheritDoc} */
@Override public GridContinuousBatch createBatch() {
return new GridContinuousQueryBatch();
}
/** {@inheritDoc} */
@Override public void onBatchAcknowledged(final UUID routineId,
GridContinuousBatch batch,
final GridKernalContext ctx) {
sendBackupAcknowledge(ackBuf.onAcknowledged(batch), routineId, ctx);
}
/**
* @param t Acknowledge information.
* @param routineId Routine ID.
* @param ctx Context.
*/
private void sendBackupAcknowledge(final IgniteBiTuple<Map<Integer, Long>, Set<AffinityTopologyVersion>> t,
final UUID routineId,
final GridKernalContext ctx) {
if (t != null) {
ctx.closure().runLocalSafe(new Runnable() {
@Override public void run() {
GridCacheContext<K, V> cctx = cacheContext(ctx);
CacheContinuousQueryBatchAck msg = new CacheContinuousQueryBatchAck(cctx.cacheId(),
routineId,
t.get1());
for (AffinityTopologyVersion topVer : t.get2()) {
for (ClusterNode node : ctx.discovery().cacheAffinityNodes(cctx.name(), topVer)) {
if (!node.isLocal()) {
try {
cctx.io().send(node, msg, GridIoPolicy.SYSTEM_POOL);
}
catch (ClusterTopologyCheckedException ignored) {
IgniteLogger log = ctx.log(CU.CONTINUOUS_QRY_LOG_CATEGORY);
if (log.isDebugEnabled())
log.debug("Failed to send acknowledge message, node left " +
"[msg=" + msg + ", node=" + node + ']');
}
catch (IgniteCheckedException e) {
IgniteLogger log = ctx.log(CU.CONTINUOUS_QRY_LOG_CATEGORY);
U.error(log, "Failed to send acknowledge message " +
"[msg=" + msg + ", node=" + node + ']', e);
}
}
}
}
}
});
}
}
/** {@inheritDoc} */
@Nullable @Override public Object orderedTopic() {
return topic;
}
/** {@inheritDoc} */
@Override public GridContinuousHandler clone() {
try {
return (GridContinuousHandler)super.clone();
}
catch (CloneNotSupportedException e) {
throw new IllegalStateException(e);
}
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(CacheContinuousQueryHandler.class, this);
}
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
U.writeString(out, cacheName);
out.writeObject(topic);
boolean b = rmtFilterDep != null;
out.writeBoolean(b);
if (b)
out.writeObject(rmtFilterDep);
else
out.writeObject(rmtFilter);
out.writeBoolean(internal);
out.writeBoolean(notifyExisting);
out.writeBoolean(oldValRequired);
out.writeBoolean(sync);
out.writeBoolean(ignoreExpired);
out.writeInt(taskHash);
out.writeBoolean(keepBinary);
}
/** {@inheritDoc} */
@SuppressWarnings("unchecked")
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
cacheName = U.readString(in);
topic = in.readObject();
boolean b = in.readBoolean();
if (b)
rmtFilterDep = (DeployableObject)in.readObject();
else
rmtFilter = (CacheEntryEventSerializableFilter<K, V>)in.readObject();
internal = in.readBoolean();
notifyExisting = in.readBoolean();
oldValRequired = in.readBoolean();
sync = in.readBoolean();
ignoreExpired = in.readBoolean();
taskHash = in.readInt();
keepBinary = in.readBoolean();
cacheId = CU.cacheId(cacheName);
}
/**
* @param ctx Kernal context.
* @return Cache context.
*/
private GridCacheContext<K, V> cacheContext(GridKernalContext ctx) {
assert ctx != null;
return ctx.cache().<K, V>context().cacheContext(cacheId);
}
/** */
private static class AcknowledgeBuffer {
/** */
private int size;
/** */
@GridToStringInclude
private Map<Integer, Long> updateCntrs = new HashMap<>();
/** */
@GridToStringInclude
private Set<AffinityTopologyVersion> topVers = U.newHashSet(1);
/**
* @param batch Batch.
* @return Non-null tuple if acknowledge should be sent to backups.
*/
@SuppressWarnings("unchecked")
@Nullable synchronized IgniteBiTuple<Map<Integer, Long>, Set<AffinityTopologyVersion>>
onAcknowledged(GridContinuousBatch batch) {
assert batch instanceof GridContinuousQueryBatch;
size += ((GridContinuousQueryBatch)batch).entriesCount();
Collection<CacheContinuousQueryEntry> entries = (Collection)batch.collect();
for (CacheContinuousQueryEntry e : entries)
addEntry(e);
return size >= BACKUP_ACK_THRESHOLD ? acknowledgeData() : null;
}
/**
* @param e Entry.
* @return Non-null tuple if acknowledge should be sent to backups.
*/
@Nullable synchronized IgniteBiTuple<Map<Integer, Long>, Set<AffinityTopologyVersion>>
onAcknowledged(CacheContinuousQueryEntry e) {
size++;
addEntry(e);
return size >= BACKUP_ACK_THRESHOLD ? acknowledgeData() : null;
}
/**
* @param e Entry.
*/
private void addEntry(CacheContinuousQueryEntry e) {
topVers.add(e.topologyVersion());
Long cntr0 = updateCntrs.get(e.partition());
if (cntr0 == null || e.updateCounter() > cntr0)
updateCntrs.put(e.partition(), e.updateCounter());
}
/**
* @return Non-null tuple if acknowledge should be sent to backups.
*/
@Nullable synchronized IgniteBiTuple<Map<Integer, Long>, Set<AffinityTopologyVersion>>
acknowledgeOnTimeout() {
return size > 0 ? acknowledgeData() : null;
}
/**
* @return Tuple with acknowledge information.
*/
private IgniteBiTuple<Map<Integer, Long>, Set<AffinityTopologyVersion>> acknowledgeData() {
assert size > 0;
Map<Integer, Long> cntrs = new HashMap<>(updateCntrs);
IgniteBiTuple<Map<Integer, Long>, Set<AffinityTopologyVersion>> res =
new IgniteBiTuple<>(cntrs, topVers);
topVers = U.newHashSet(1);
size = 0;
return res;
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(AcknowledgeBuffer.class, this);
}
}
/**
*
*/
private class ContinuousQueryAsyncClosure implements Runnable {
/** */
private final CacheContinuousQueryEvent<K, V> evt;
/** */
private final boolean primary;
/** */
private final boolean recordIgniteEvt;
/** */
private final IgniteInternalFuture<?> fut;
/**
* @param primary Primary flag.
* @param evt Event.
* @param recordIgniteEvt Fired event.
* @param fut Dht future.
*/
ContinuousQueryAsyncClosure(
boolean primary,
CacheContinuousQueryEvent<K, V> evt,
boolean recordIgniteEvt,
IgniteInternalFuture<?> fut) {
this.primary = primary;
this.evt = evt;
this.recordIgniteEvt = recordIgniteEvt;
this.fut = fut;
}
/** {@inheritDoc} */
@Override public void run() {
final boolean notify = filter(evt, primary);
if (!primary())
return;
if (fut == null) {
onEntryUpdate(evt, notify, nodeId.equals(ctx.localNodeId()), recordIgniteEvt);
return;
}
if (fut.isDone()) {
if (fut.error() != null)
evt.entry().markFiltered();
onEntryUpdate(evt, notify, nodeId.equals(ctx.localNodeId()), recordIgniteEvt);
}
else {
fut.listen(new CI1<IgniteInternalFuture<?>>() {
@Override public void apply(IgniteInternalFuture<?> f) {
if (f.error() != null)
evt.entry().markFiltered();
ctx.asyncCallbackPool().execute(new Runnable() {
@Override public void run() {
onEntryUpdate(evt, notify, nodeId.equals(ctx.localNodeId()), recordIgniteEvt);
}
}, evt.entry().partition());
}
});
}
}
/**
* @return {@code True} if event fired on this node.
*/
private boolean primary() {
return primary || skipPrimaryCheck;
}
/** {@inheritDoc} */
public String toString() {
return S.toString(ContinuousQueryAsyncClosure.class, this);
}
}
/**
* Deployable object.
*/
protected static class DeployableObject implements Externalizable {
/** */
private static final long serialVersionUID = 0L;
/** Serialized object. */
private byte[] bytes;
/** Deployment class name. */
private String clsName;
/** Deployment info. */
private GridDeploymentInfo depInfo;
/**
* Required by {@link Externalizable}.
*/
public DeployableObject() {
// No-op.
}
/**
* @param obj Object.
* @param ctx Kernal context.
* @throws IgniteCheckedException In case of error.
*/
protected DeployableObject(Object obj, GridKernalContext ctx) throws IgniteCheckedException {
assert obj != null;
assert ctx != null;
Class cls = U.detectClass(obj);
clsName = cls.getName();
GridDeployment dep = ctx.deploy().deploy(cls, U.detectClassLoader(cls));
if (dep == null)
throw new IgniteDeploymentCheckedException("Failed to deploy object: " + obj);
depInfo = new GridDeploymentInfoBean(dep);
bytes = U.marshal(ctx, obj);
}
/**
* @param nodeId Node ID.
* @param ctx Kernal context.
* @return Deserialized object.
* @throws IgniteCheckedException In case of error.
*/
<T> T unmarshal(UUID nodeId, GridKernalContext ctx) throws IgniteCheckedException {
assert ctx != null;
GridDeployment dep = ctx.deploy().getGlobalDeployment(depInfo.deployMode(), clsName, clsName,
depInfo.userVersion(), nodeId, depInfo.classLoaderId(), depInfo.participants(), null);
if (dep == null)
throw new IgniteDeploymentCheckedException("Failed to obtain deployment for class: " + clsName);
return U.unmarshal(ctx, bytes, U.resolveClassLoader(dep.classLoader(), ctx.config()));
}
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
U.writeByteArray(out, bytes);
U.writeString(out, clsName);
out.writeObject(depInfo);
}
/** {@inheritDoc} */
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
bytes = U.readByteArray(in);
clsName = U.readString(in);
depInfo = (GridDeploymentInfo)in.readObject();
}
}
}
| |
package com.planet_ink.coffee_mud.Behaviors;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2000-2014 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
@SuppressWarnings({"unchecked","rawtypes"})
public class Mobile extends ActiveTicker implements MobileBehavior
{
@Override public String ID(){return "Mobile";}
@Override protected int canImproveCode(){return Behavior.CAN_MOBS;}
@Override public long flags(){return Behavior.FLAG_MOBILITY;}
protected boolean wander=false;
protected boolean dooropen=false;
protected boolean firstRun=false;
protected int leash=0;
protected Map<Room,Integer> leashHash=null;
protected List<Integer> restrictedLocales=null;
protected int[] altStatusTaker=null;
protected int tickStatus=Tickable.STATUS_NOT;
protected int ticksSuspended=0;
@Override
public String accountForYourself()
{
return "wandering";
}
@Override
public int getTickStatus()
{
final int[] o=altStatusTaker;
if((o!=null)&&(o[0]!=Tickable.STATUS_NOT))
return o[0];
return tickStatus;
}
public Mobile()
{
super();
minTicks=20; maxTicks=60; chance=100;
leash=0;
wander=false;
dooropen=false;
restrictedLocales=null;
tickReset();
}
public boolean okRoomForMe(MOB mob, Room currentRoom, Room newRoom)
{
if(newRoom==null) return false;
if(leash>0)
{
if(currentRoom==null) return false;
if(leashHash==null) leashHash=new Hashtable();
Integer DISTNOW=leashHash.get(currentRoom);
Integer DISTLATER=leashHash.get(newRoom);
if(DISTNOW==null)
{
DISTNOW=Integer.valueOf(0);
leashHash.put(currentRoom,DISTNOW);
}
if(DISTLATER==null)
{
DISTLATER=Integer.valueOf(DISTNOW.intValue()+1);
leashHash.put(newRoom,DISTLATER);
}
if(DISTLATER.intValue()>(DISTNOW.intValue()+1))
{
DISTLATER=Integer.valueOf(DISTNOW.intValue()+1);
leashHash.remove(newRoom);
leashHash.put(newRoom,DISTLATER);
}
if(DISTLATER.intValue()>leash)
return false;
}
if((mob.charStats().getBreathables().length>0) && (Arrays.binarySearch(mob.charStats().getBreathables(), newRoom.getAtmosphere())<0))
return false;
if(restrictedLocales==null) return true;
return !restrictedLocales.contains(Integer.valueOf(newRoom.domainType()));
}
@Override
public void setParms(String newParms)
{
super.setParms(newParms);
wander=false;
dooropen=false;
leash=0;
firstRun=true;
leashHash=null;
restrictedLocales=null;
leash=CMParms.getParmInt(newParms,"LEASH",0);
final Vector<String> V=CMParms.parse(newParms);
for(int v=0;v<V.size();v++)
{
String s=V.elementAt(v);
if(s.equalsIgnoreCase("WANDER"))
wander=true;
else
if(s.equalsIgnoreCase("OPENDOORS"))
dooropen=true;
else
if((s.startsWith("+")||(s.startsWith("-")))&&(s.length()>1))
{
if(restrictedLocales==null)
restrictedLocales=new Vector();
if(s.equalsIgnoreCase("+ALL"))
restrictedLocales.clear();
else
if(s.equalsIgnoreCase("-ALL"))
{
restrictedLocales.clear();
for(int i=0;i<Room.indoorDomainDescs.length;i++)
restrictedLocales.add(Integer.valueOf(Room.INDOORS+i));
for(int i=0;i<Room.outdoorDomainDescs.length;i++)
restrictedLocales.add(Integer.valueOf(i));
}
else
{
final char c=s.charAt(0);
s=s.substring(1).toUpperCase().trim();
int code=-1;
for(int i=0;i<Room.indoorDomainDescs.length;i++)
if(Room.indoorDomainDescs[i].startsWith(s))
code=Room.INDOORS+i;
if(code>=0)
{
if((c=='+')&&(restrictedLocales.contains(Integer.valueOf(code))))
restrictedLocales.remove(Integer.valueOf(code));
else
if((c=='-')&&(!restrictedLocales.contains(Integer.valueOf(code))))
restrictedLocales.add(Integer.valueOf(code));
}
code=-1;
for(int i=0;i<Room.outdoorDomainDescs.length;i++)
if(Room.outdoorDomainDescs[i].startsWith(s))
code=i;
if(code>=0)
{
if((c=='+')&&(restrictedLocales.contains(Integer.valueOf(code))))
restrictedLocales.remove(Integer.valueOf(code));
else
if((c=='-')&&(!restrictedLocales.contains(Integer.valueOf(code))))
restrictedLocales.add(Integer.valueOf(code));
}
}
}
}
if((restrictedLocales!=null)&&(restrictedLocales.size()==0))
restrictedLocales=null;
}
public boolean emergencyMove(MOB mob, Room room)
{
if(!CMLib.flags().canBreatheHere(mob, room)) // the fish exception
{
int dir=-1;
for(int d=Directions.NUM_DIRECTIONS()-1;d>=0;d--)
{
final Room R=room.getRoomInDir(d);
if((R!=null)&&(okRoomForMe(mob,room,R)))
{
dir=d;
CMLib.tracking().walk(mob, dir, true, true);
if(mob.location()!=room)
{
return true;
}
}
}
}
return false;
}
@Override
public boolean tick(Tickable ticking, int tickID)
{
tickStatus=Tickable.STATUS_MISC2+0;
super.tick(ticking,tickID);
if(ticksSuspended>0)
{
ticksSuspended--;
return true;
}
if((ticking instanceof MOB)
&&(!((MOB)ticking).isInCombat())
&&(!CMSecurity.isDisabled(CMSecurity.DisFlag.MOBILITY)))
{
final MOB mob=(MOB)ticking;
final Room room=mob.location();
if(firstRun)
{
firstRun=true;
emergencyMove(mob,room);
}
if(canAct(ticking,tickID))
{
Vector objections=null;
if(room==null) return true;
if((room.getArea()!=null)
&&(room.getArea().getAreaState()!=Area.State.ACTIVE))
return true;
if((!CMLib.flags().canWorkOnSomething(mob)) && (CMLib.dice().roll(1,100,0)>1))
{
tickDown=0;
return true;
}
for(int d=Directions.NUM_DIRECTIONS()-1;d>=0;d--)
{
final Room R=room.getRoomInDir(d);
if((R!=null)&&(!okRoomForMe(mob,room,R)))
{
if(objections==null) objections=new Vector();
objections.addElement(R);
}
}
tickStatus=Tickable.STATUS_MISC2+16;
altStatusTaker=new int[1];
CMLib.tracking().beMobile((MOB)ticking,dooropen,wander,false,objections!=null,altStatusTaker,objections);
if(mob.location()==room)
tickDown=0;
}
}
tickStatus=Tickable.STATUS_NOT;
return true;
}
@Override public void suspendMobility(int numTicks) { ticksSuspended=numTicks;}
}
| |
/*
* Copyright (C) 2019 Authlete, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the
* License.
*/
package com.authlete.jaxrs.server.api.backchannel;
import static com.authlete.jaxrs.server.util.ExceptionUtil.internalServerErrorException;
import java.net.URI;
import java.util.Date;
import javax.net.ssl.SSLContext;
import javax.ws.rs.client.Client;
import javax.ws.rs.client.ClientBuilder;
import javax.ws.rs.client.Entity;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.Status;
import org.glassfish.jersey.client.ClientProperties;
import com.authlete.common.dto.BackchannelAuthenticationCompleteRequest.Result;
import com.authlete.common.dto.BackchannelAuthenticationCompleteResponse;
import com.authlete.common.types.User;
import com.authlete.jaxrs.spi.BackchannelAuthenticationCompleteRequestHandlerSpiAdapter;
/**
* Implementation of {@link com.authlete.jaxrs.spi.BackchannelAuthenticationCompleteRequestHandlerSpi
* BackchannelAuthenticationCompleteRequestHandlerSpi} interface which needs to
* be given to the constructor of {@link com.authlete.jaxrs.BackchannelAuthenticationCompleteRequestHandler
* BackchannelAuthenticationCompleteRequestHandler}.
*
* @author Hideki Ikeda
*/
public class BackchannelAuthenticationCompleteHandlerSpiImpl extends BackchannelAuthenticationCompleteRequestHandlerSpiAdapter
{
/**
* The result of end-user authentication and authorization.
*/
private final Result mResult;
/**
* The authenticated user.
*/
private final User mUser;
/**
* The time when the user was authenticated in seconds since Unix epoch.
*/
private long mUserAuthenticatedAt;
/**
* Requested ACRs.
*/
private String[] mAcrs;
/**
* The description of the error.
*/
private String mErrorDescription;
/**
* The URI of a document which describes the error in detail.
*/
private URI mErrorUri;
public BackchannelAuthenticationCompleteHandlerSpiImpl(
Result result, User user, Date userAuthenticatedAt, String[] acrs,
String errorDescription, URI errorUri)
{
// The result of end-user authentication and authorization.
mResult = result;
// The end-user.
mUser = user;
if (result != Result.AUTHORIZED)
{
// The description of the error.
mErrorDescription = errorDescription;
// The URI of a document which describes the error in detail.
mErrorUri = errorUri;
// The end-user has not authorized the client.
return;
}
// The time at which end-user has been authenticated.
mUserAuthenticatedAt = (userAuthenticatedAt == null) ? 0 : userAuthenticatedAt.getTime() / 1000L;
// The requested ACRs.
mAcrs = acrs;
}
@Override
public Result getResult()
{
return mResult;
}
@Override
public String getUserSubject()
{
return mUser.getSubject();
}
@Override
public long getUserAuthenticatedAt()
{
return mUserAuthenticatedAt;
}
@Override
public String getAcr()
{
// Note that this is a dummy implementation. Regardless of whatever
// the actual authentication was, this implementation returns the
// first element of the requested ACRs if it is available.
//
// Of course, this implementation is not suitable for commercial use.
if (mAcrs == null || mAcrs.length == 0)
{
return null;
}
// The first element of the requested ACRs.
String acr = mAcrs[0];
if (acr == null || acr.length() == 0)
{
return null;
}
// Return the first element of the requested ACRs. Again,
// this implementation is not suitable for commercial use.
return acr;
}
@Override
public Object getUserClaim(String claimName)
{
return mUser.getClaim(claimName, null);
}
@Override
public void sendNotification(BackchannelAuthenticationCompleteResponse info)
{
// The URL of the consumption device's notification endpoint.
URI clientNotificationEndpointUri = info.getClientNotificationEndpoint();
// The token that is needed for client authentication at the consumption
// device's notification endpoint.
String notificationToken = info.getClientNotificationToken();
// The notification content (JSON) to send to the consumption device.
String notificationContent = info.getResponseContent();
// Send the notification to the consumption device's notification endpoint.
Response response =
doSendNotification(clientNotificationEndpointUri, notificationToken, notificationContent);
// The status of the response from the consumption device.
Status status = Status.fromStatusCode(response.getStatusInfo().getStatusCode());
// TODO: CIBA specification does not specify how to deal with responses
// returned from the consumption device in case of error push notification.
// Then, even in case of error push notification, the current implementation
// treats the responses as in the case of successful push notification.
// Check if the "HTTP 200 OK" or "HTTP 204 No Content".
if (status == Status.OK || status == Status.NO_CONTENT)
{
// In this case, the request was successfully processed by the consumption
// device since the specification says as follows.
//
// CIBA Core spec, 10.2. Ping Callback and 10.3. Push Callback
// For valid requests, the Client Notification Endpoint SHOULD
// respond with an HTTP 204 No Content. The OP SHOULD also accept
// HTTP 200 OK and any body in the response SHOULD be ignored.
//
return;
}
if (status.getFamily() == Status.Family.REDIRECTION)
{
// HTTP 3xx code. This case must be ignored since the specification
// says as follows.
//
// CIBA Core spec, 10.2. Ping Callback, 10.3. Push Callback
// The Client MUST NOT return an HTTP 3xx code. The OP MUST
// NOT follow redirects.
//
return;
}
}
private Response doSendNotification(URI clientNotificationEndpointUri,
String notificationToken, String notificationContent)
{
// A web client to send a notification to the consumption device's notification
// endpoint.
Client webClient = createClient();
try
{
// Send the notification to the consumption device..
return webClient.target(clientNotificationEndpointUri).request()
// CIBA Core says "The OP MUST NOT follow redirects."
.property(ClientProperties.FOLLOW_REDIRECTS, Boolean.FALSE)
.header(HttpHeaders.AUTHORIZATION, "Bearer " + notificationToken)
.post(Entity.json(notificationContent));
}
catch (Throwable t)
{
// Failed to send the notification to the consumption device.
throw internalServerErrorException(
t.getMessage() + ": Failed to send the notification to the consumption device");
}
finally
{
// Close the web client.
webClient.close();
}
}
@Override
public String getErrorDescription()
{
return mErrorDescription;
}
@Override
public URI getErrorUri()
{
return mErrorUri;
}
private Client createClient()
{
// SSLContext's for older TLS versions ("TLSv1" and "TLSv1.1") may not
// include any FAPI cipher suites. Here we create an SSLContext with
// "TLSv1.2" whose getDefaultSSLParameters().getCipherSuites() probably
// includes FAPI cipher suites.
SSLContext sc = createSslContext("TLSv1.2");
return ClientBuilder.newBuilder().sslContext(sc).build();
}
private SSLContext createSslContext(String protocol)
{
try
{
// Get an SSL context for the protocol.
SSLContext sc = SSLContext.getInstance(protocol);
// Initialize the SSL context.
sc.init(null, null, null);
return sc;
}
catch (Exception e)
{
throw internalServerErrorException(
"Failed to get an SSLContext for " + protocol + ": " + e.getMessage());
}
}
}
| |
package it.unibz.krdb.obda.reformulation.tests;
/*
* #%L
* ontop-quest-owlapi3
* %%
* Copyright (C) 2009 - 2014 Free University of Bozen-Bolzano
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import it.unibz.krdb.obda.exception.InvalidMappingException;
import it.unibz.krdb.obda.exception.InvalidPredicateDeclarationException;
import it.unibz.krdb.obda.io.ModelIOManager;
import it.unibz.krdb.obda.model.OBDADataFactory;
import it.unibz.krdb.obda.model.OBDAModel;
import it.unibz.krdb.obda.model.impl.OBDADataFactoryImpl;
import it.unibz.krdb.obda.owlrefplatform.core.QuestConstants;
import it.unibz.krdb.obda.owlrefplatform.core.QuestPreferences;
import it.unibz.krdb.obda.owlrefplatform.owlapi3.QuestOWL;
import it.unibz.krdb.obda.owlrefplatform.owlapi3.QuestOWLFactory;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.sql.Statement;
import junit.framework.TestCase;
import org.semanticweb.owlapi.apibinding.OWLManager;
import org.semanticweb.owlapi.model.OWLOntology;
import org.semanticweb.owlapi.model.OWLOntologyManager;
import org.semanticweb.owlapi.reasoner.SimpleConfiguration;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/***
* A simple test that check if the system is able to handle mapping variants
* to construct the proper datalog program.
*/
public class OBDA2DatalogTest extends TestCase {
private OBDADataFactory fac;
private Connection conn;
Logger log = LoggerFactory.getLogger(this.getClass());
private OBDAModel obdaModel;
private OWLOntology ontology;
final String owlfile = "src/test/resources/test/mappinganalyzer/ontology.owl";
@Override
public void setUp() throws Exception {
// Initializing and H2 database with the stock exchange data
// String driver = "org.h2.Driver";
String url = "jdbc:h2:mem:questjunitdb";
String username = "sa";
String password = "";
fac = OBDADataFactoryImpl.getInstance();
conn = DriverManager.getConnection(url, username, password);
Statement st = conn.createStatement();
FileReader reader = new FileReader("src/test/resources/test/mappinganalyzer/create-tables.sql");
BufferedReader in = new BufferedReader(reader);
StringBuilder bf = new StringBuilder();
String line = in.readLine();
while (line != null) {
bf.append(line);
line = in.readLine();
}
st.executeUpdate(bf.toString());
conn.commit();
// Loading the OWL file
OWLOntologyManager manager = OWLManager.createOWLOntologyManager();
ontology = manager.loadOntologyFromOntologyDocument((new File(owlfile)));
obdaModel = fac.getOBDAModel();
}
@Override
public void tearDown() throws Exception {
dropTables();
conn.close();
}
private void dropTables() throws SQLException, IOException {
Statement st = conn.createStatement();
FileReader reader = new FileReader("src/test/resources/test/mappinganalyzer/drop-tables.sql");
BufferedReader in = new BufferedReader(reader);
StringBuilder bf = new StringBuilder();
String line = in.readLine();
while (line != null) {
bf.append(line);
line = in.readLine();
}
st.executeUpdate(bf.toString());
st.close();
conn.commit();
}
private void runTests() throws Exception {
QuestPreferences p = new QuestPreferences();
p.setCurrentValueOf(QuestPreferences.ABOX_MODE, QuestConstants.VIRTUAL);
p.setCurrentValueOf(QuestPreferences.OPTIMIZE_EQUIVALENCES, "true");
p.setCurrentValueOf(QuestPreferences.OPTIMIZE_TBOX_SIGMA, "true");
// Creating a new instance of the reasoner
QuestOWLFactory factory = new QuestOWLFactory();
factory.setOBDAController(obdaModel);
factory.setPreferenceHolder(p);
QuestOWL reasoner = (QuestOWL) factory.createReasoner(ontology, new SimpleConfiguration());
// Get ready for querying
reasoner.getStatement();
}
public void testMapping_1() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException {
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load("src/test/resources/test/mappinganalyzer/case_1.obda");
try {
runTests();
} catch (Exception e) {
assertTrue(e.toString(), false);
}
}
public void testMapping_2() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException {
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load("src/test/resources/test/mappinganalyzer/case_2.obda");
try {
runTests();
} catch (Exception e) {
assertTrue(e.toString(), false);
}
}
public void testMapping_3() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException {
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load("src/test/resources/test/mappinganalyzer/case_3.obda");
try {
runTests();
} catch (Exception e) {
assertTrue(e.toString(), false);
}
}
public void testMapping_4() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException {
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load("src/test/resources/test/mappinganalyzer/case_4.obda");
try {
runTests();
} catch (Exception e) {
assertTrue(e.toString(), false);
}
}
public void testMapping_5() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException {
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load("src/test/resources/test/mappinganalyzer/case_5.obda");
try {
runTests();
} catch (Exception e) {
assertTrue(e.toString(), true); // FAIL
}
}
public void testMapping_6() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException {
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load("src/test/resources/test/mappinganalyzer/case_6.obda");
try {
runTests();
} catch (Exception e) {
assertTrue(e.toString(), false);
}
}
public void testMapping_7() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException {
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load("src/test/resources/test/mappinganalyzer/case_7.obda");
try {
runTests();
} catch (Exception e) {
assertTrue(e.toString(), false);
}
}
public void testMapping_8() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException {
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load("src/test/resources/test/mappinganalyzer/case_8.obda");
try {
runTests();
} catch (Exception e) {
assertTrue(e.toString(), true); // FAIL we cannot handle the case in the look up table were id map to two different values
}
}
public void testMapping_9() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException {
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load("src/test/resources/test/mappinganalyzer/case_9.obda");
try {
runTests();
} catch (Exception e) {
assertTrue(e.toString(), true); // FAIL we cannot handle the case in the look up table were id map to two different values
}
}
public void testMapping_10() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException {
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load("src/test/resources/test/mappinganalyzer/case_10.obda");
try {
runTests();
} catch (Exception e) {
assertTrue(e.toString(), true); // FAIL we cannot handle the case in the look up table were alias map to two different values
}
}
public void testMapping_11() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException {
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load("src/test/resources/test/mappinganalyzer/case_11.obda");
try {
runTests();
} catch (Exception e) {
assertTrue(e.toString(), false);
}
}
public void testMapping_12() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException {
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load("src/test/resources/test/mappinganalyzer/case_12.obda");
try {
runTests();
} catch (Exception e) {
assertTrue(e.toString(), true); // FAIL we cannot handle the case in the look up table were name map to two different values
}
}
public void testMapping_13() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException {
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load("src/test/resources/test/mappinganalyzer/case_13.obda");
try {
runTests();
} catch (Exception e) {
assertTrue(e.toString(), false);
}
}
public void testMapping_14() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException {
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load("src/test/resources/test/mappinganalyzer/case_14.obda");
try {
runTests();
} catch (Exception e) {
assertTrue(e.toString(), true); // FAIL
}
}
public void testMapping_15() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException {
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load("src/test/resources/test/mappinganalyzer/case_15.obda");
try {
runTests();
} catch (Exception e) {
assertTrue(e.toString(), false);
}
}
public void testMapping_16() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException {
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load("src/test/resources/test/mappinganalyzer/case_16.obda");
try {
runTests();
} catch (Exception e) {
assertTrue(e.toString(), false);
}
}
public void testMapping_17() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException {
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load("src/test/resources/test/mappinganalyzer/case_17.obda");
try {
runTests();
} catch (Exception e) {
assertTrue(e.toString(), true); // FAIL
}
}
public void testMapping_18() throws IOException, InvalidPredicateDeclarationException, InvalidMappingException {
ModelIOManager ioManager = new ModelIOManager(obdaModel);
ioManager.load("src/test/resources/test/mappinganalyzer/case_18.obda");
try {
runTests();
} catch (Exception e) {
assertTrue(e.toString(), false);
}
}
}
| |
//
// This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.4-2
// See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>
// Any modifications to this file will be lost upon recompilation of the source schema.
// Generated on: 2014.08.09 at 09:22:32 PM IST
//
package com.pacificmetrics.ims.apip.qti.item;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlElementRefs;
import javax.xml.bind.annotation.XmlID;
import javax.xml.bind.annotation.XmlMixed;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
import javax.xml.bind.annotation.adapters.CollapsedStringAdapter;
import javax.xml.bind.annotation.adapters.NormalizedStringAdapter;
import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter;
import org.w3.math.mathml.Math;
import org.w3.xinclude.Include;
/**
*
* The GapText complexType is the container for text assigned to a gap-based interaction. A simple run of text to be inserted into a gap by the user, may be subject to variable value substitution with printedVariable.
*
*
* <p>Java class for GapText.Type complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="GapText.Type">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <choice maxOccurs="unbounded" minOccurs="0">
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}printedVariable"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}feedbackInline"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}templateInline"/>
* <element ref="{http://www.w3.org/1998/Math/MathML}math"/>
* <element ref="{http://www.w3.org/2001/XInclude}include"/>
* <choice>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}img"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}br"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}object"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}em"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}a"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}code"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}span"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}sub"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}acronym"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}big"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}tt"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}kbd"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}q"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}i"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}dfn"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}abbr"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}strong"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}sup"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}var"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}small"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}samp"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}b"/>
* <element ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}cite"/>
* </choice>
* </choice>
* </sequence>
* <attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}xmllang.GapText.Attr"/>
* <attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}templateIdentifier.GapText.Attr"/>
* <attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}matchGroup.GapText.Attr"/>
* <attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}label.GapText.Attr"/>
* <attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}matchMin.GapText.Attr"/>
* <attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}showHide.GapText.Attr"/>
* <attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}id.GapText.Attr"/>
* <attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}matchMax.GapText.Attr"/>
* <attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}class.GapText.Attr"/>
* <attGroup ref="{http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2}identifier.GapText.Attr"/>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "GapText.Type", propOrder = {
"content"
})
@XmlRootElement(name = "gapText")
public class GapText {
@XmlElementRefs({
@XmlElementRef(name = "samp", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false),
@XmlElementRef(name = "i", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false),
@XmlElementRef(name = "big", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false),
@XmlElementRef(name = "em", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false),
@XmlElementRef(name = "templateInline", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = TemplateInline.class, required = false),
@XmlElementRef(name = "tt", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false),
@XmlElementRef(name = "code", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false),
@XmlElementRef(name = "sub", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false),
@XmlElementRef(name = "printedVariable", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = PrintedVariable.class, required = false),
@XmlElementRef(name = "math", namespace = "http://www.w3.org/1998/Math/MathML", type = Math.class, required = false),
@XmlElementRef(name = "small", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false),
@XmlElementRef(name = "feedbackInline", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = FeedbackInline.class, required = false),
@XmlElementRef(name = "b", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false),
@XmlElementRef(name = "cite", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false),
@XmlElementRef(name = "kbd", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false),
@XmlElementRef(name = "abbr", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false),
@XmlElementRef(name = "dfn", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false),
@XmlElementRef(name = "q", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = Q.class, required = false),
@XmlElementRef(name = "include", namespace = "http://www.w3.org/2001/XInclude", type = Include.class, required = false),
@XmlElementRef(name = "br", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = Br.class, required = false),
@XmlElementRef(name = "a", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = A.class, required = false),
@XmlElementRef(name = "object", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = com.pacificmetrics.ims.apip.qti.item.Object.class, required = false),
@XmlElementRef(name = "span", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false),
@XmlElementRef(name = "img", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = Img.class, required = false),
@XmlElementRef(name = "var", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false),
@XmlElementRef(name = "sup", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false),
@XmlElementRef(name = "acronym", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false),
@XmlElementRef(name = "strong", namespace = "http://www.imsglobal.org/xsd/apip/apipv1p0/qtiitem/imsqti_v2p2", type = JAXBElement.class, required = false)
})
@XmlMixed
protected List<java.lang.Object> content;
@XmlAttribute(name = "lang", namespace = "http://www.w3.org/XML/1998/namespace")
protected String lang;
@XmlAttribute(name = "templateIdentifier")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String templateIdentifier;
@XmlAttribute(name = "matchGroup")
protected List<String> matchGroups;
@XmlAttribute(name = "label")
@XmlJavaTypeAdapter(NormalizedStringAdapter.class)
@XmlSchemaType(name = "normalizedString")
protected String label;
@XmlAttribute(name = "matchMin")
@XmlSchemaType(name = "nonNegativeInteger")
protected BigInteger matchMin;
@XmlAttribute(name = "showHide")
protected String showHide;
@XmlAttribute(name = "id")
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
@XmlID
protected String id;
@XmlAttribute(name = "matchMax", required = true)
@XmlSchemaType(name = "nonNegativeInteger")
protected BigInteger matchMax;
@XmlAttribute(name = "class")
protected List<String> clazzs;
@XmlAttribute(name = "identifier", required = true)
@XmlJavaTypeAdapter(CollapsedStringAdapter.class)
protected String identifier;
/**
*
* The GapText complexType is the container for text assigned to a gap-based interaction. A simple run of text to be inserted into a gap by the user, may be subject to variable value substitution with printedVariable.
* Gets the value of the content property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the content property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getContent().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >}
* {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >}
* {@link String }
* {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >}
* {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >}
* {@link TemplateInline }
* {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >}
* {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >}
* {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >}
* {@link PrintedVariable }
* {@link Math }
* {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >}
* {@link FeedbackInline }
* {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >}
* {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >}
* {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >}
* {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >}
* {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >}
* {@link Q }
* {@link Include }
* {@link Br }
* {@link A }
* {@link com.pacificmetrics.ims.apip.qti.item.Object }
* {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >}
* {@link Img }
* {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >}
* {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >}
* {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >}
* {@link JAXBElement }{@code <}{@link HTMLTextType }{@code >}
*
*
*/
public List<java.lang.Object> getContent() {
if (content == null) {
content = new ArrayList<java.lang.Object>();
}
return this.content;
}
/**
* Gets the value of the lang property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getLang() {
return lang;
}
/**
* Sets the value of the lang property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setLang(String value) {
this.lang = value;
}
/**
* Gets the value of the templateIdentifier property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getTemplateIdentifier() {
return templateIdentifier;
}
/**
* Sets the value of the templateIdentifier property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setTemplateIdentifier(String value) {
this.templateIdentifier = value;
}
/**
* Gets the value of the matchGroups property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the matchGroups property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getMatchGroups().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getMatchGroups() {
if (matchGroups == null) {
matchGroups = new ArrayList<String>();
}
return this.matchGroups;
}
/**
* Gets the value of the label property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getLabel() {
return label;
}
/**
* Sets the value of the label property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setLabel(String value) {
this.label = value;
}
/**
* Gets the value of the matchMin property.
*
* @return
* possible object is
* {@link BigInteger }
*
*/
public BigInteger getMatchMin() {
if (matchMin == null) {
return new BigInteger("0");
} else {
return matchMin;
}
}
/**
* Sets the value of the matchMin property.
*
* @param value
* allowed object is
* {@link BigInteger }
*
*/
public void setMatchMin(BigInteger value) {
this.matchMin = value;
}
/**
* Gets the value of the showHide property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getShowHide() {
if (showHide == null) {
return "show";
} else {
return showHide;
}
}
/**
* Sets the value of the showHide property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setShowHide(String value) {
this.showHide = value;
}
/**
* Gets the value of the id property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getId() {
return id;
}
/**
* Sets the value of the id property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setId(String value) {
this.id = value;
}
/**
* Gets the value of the matchMax property.
*
* @return
* possible object is
* {@link BigInteger }
*
*/
public BigInteger getMatchMax() {
return matchMax;
}
/**
* Sets the value of the matchMax property.
*
* @param value
* allowed object is
* {@link BigInteger }
*
*/
public void setMatchMax(BigInteger value) {
this.matchMax = value;
}
/**
* Gets the value of the clazzs property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the clazzs property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getClazzs().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link String }
*
*
*/
public List<String> getClazzs() {
if (clazzs == null) {
clazzs = new ArrayList<String>();
}
return this.clazzs;
}
/**
* Gets the value of the identifier property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getIdentifier() {
return identifier;
}
/**
* Sets the value of the identifier property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setIdentifier(String value) {
this.identifier = value;
}
}
| |
/*
* EnvelopeBacktracker.java
*
* Created on February 4, 2009
* Modified:
*
*/
package xal.model.alg;
import xal.model.IElement;
import xal.model.IProbe;
import xal.model.ModelException;
import xal.model.elem.ChargeExchangeFoil;
import xal.model.elem.IdealRfGap;
import xal.model.probe.EnvelopeProbe;
import xal.model.probe.traj.EnvelopeProbeState;
import xal.tools.beam.CovarianceMatrix;
import xal.tools.beam.PhaseMap;
import xal.tools.beam.PhaseMatrix;
/**
*
* <h1>Tracking algorithm for <code>EnvelopeProbe</code> objects.</h1>
*
* <p>
* This tracker object is based
* The <code>EnvelopeProbe</code>'s
* state, which is a <code>CovarianceMatrix</code> object, is advanced using the linear
* dynamics portion of any beamline element (<code>IElement</code> exposing object) transfer
* map. The linear portion is represented as a matrix, thus, the state advance is accomplished
* with a transpose conjugation with this matrix.
* </p>
* <p>
* The effects of space charge are also included in the dynamics calculations. Space charge
* effects are also represented with a matrix transpose conjugation, however, the matrix is
* computed using the values of the probe's correlation matrix. The result is a nonlinear
* effect. The space charge forces are computed using a linear fit to the fields generated by
* an ellipsoidal charge distribution with the same statistics described in the probe's
* correlation matrix. The linear fit is weighted by the beam distribution itself, so it is
* more accurate in regions of higher charged density. For a complete description see the reference
* below.
* </p>
* <p>
* <strong>NOTES</strong>: (CKA)
* <br>
* · This class has been un-deprecated. Currently refactoring the
* hierarchy structure of the Algorithm base to simplify implementation of
* implementation is not supported yet. Considering a modification
* of the Tracker class
* </p>
*
* @see xal.tools.beam.em.BeamEllipsoid
* @see xal.model.alg.EnvelopeTracker
* @see <a href="http://lib-www.lanl.gov/cgi-bin/getfile?00796950.pdf">Theory and Technique
* of Beam Envelope Simulation</a>
*
* @version 1.0
*
* @author Christopher K. Allen
* @since Feb 4, 2009
*
*/
public class EnvelopeBacktracker extends EnvelopeTrackerBase {
/*
* Global Constants
*/
/** string type identifier for algorithm */
public static final String s_strTypeId = EnvelopeTracker.class.getName();
/** current algorithm version */
public static final int s_intVersion = 4;
/** probe type recognized by this algorithm */
public static final Class<EnvelopeProbe> s_clsProbeType = EnvelopeProbe.class;
/*
* Initialization
*/
/**
* <p>
* Create a new, uninitialized <code>EnvelopeBacktracker()</code>
* algorithm object. This is the default constructor to be used
* when creating objects of this type.
* </p>
*/
public EnvelopeBacktracker() {
super(s_strTypeId, s_intVersion, s_clsProbeType);
}
/**
* <p>
* This method is a protected constructor meant only for building
* child classes.
* </p>
*
* @param strType string type identifier of algorithm
* @param intVersion version of algorithm
* @param clsProbeType class object for probe handled by this algorithm.
*/
protected EnvelopeBacktracker(String strType, int intVersion,
Class<? extends IProbe> clsProbeType) {
super(strType, intVersion, clsProbeType);
}
/**
* Copy constructor for EnvelopeBackTracker
*
* @param sourceTracker Tracker that is being copied
*/
public EnvelopeBacktracker(EnvelopeBacktracker sourceTracker) {
super( sourceTracker );
}
/**
* Create a deep copy of EnvelopeBackTracker
*/
@Override
public EnvelopeBacktracker copy() {
return new EnvelopeBacktracker( this );
}
/**
* <h2>Implementation of Abstract Tracker#doPropagation(IProbe, IElement)</h2>
*
* <p>
* This method is essentially the same implementation as the method
* <code>EnvelopeTracker#doPropagation()</code>, only here the probe object
* is back-propagated. The method calls <code>Tracker.retractProbe()</code>
* rather than <code>Tracker.advanceProbe()</code>, and the implemented
* method <code>EnvelopeBacktracker.retractState()</code> rather than
* <code>EnvelopeTracker.advanceState</code>.
* </p>
*
* @author Christopher K. Allen
* @since Feb 9, 2009
*
* @see xal.model.alg.Tracker#propagate(xal.model.IProbe, xal.model.IElement)
* @see xal.model.alg.EnvelopeTracker#doPropagation(IProbe, IElement)
*/
@Override
public void doPropagation(IProbe probe, IElement elem)
throws ModelException
{
//sako
double elemPos = this.getElemPosition();
double elemLen = elem.getLength();
double propLen = elemLen - elemPos;
if (propLen < 0) {
System.err.println("doPropagation, elemPos, elemLen = "+elemPos+" "+elemLen);
return;
}
// Determine the number of integration steps and the step size
int cntSteps; // number of steps through element
double dblStep; // step size through element
if(this.getUseSpacecharge())
cntSteps = (int) Math.max(Math.ceil(propLen / getStepSize()), 1);
else
cntSteps = 1;
dblStep = elem.getLength() / cntSteps;
// dblStep = propLen / cntSteps;
for (int i=0 ; i<cntSteps ; i++) {
this.retractState(probe, elem, dblStep);
this.retractProbe(probe, elem, dblStep);
}
}
/**
* <h2>Back-propagates the Defining State of the Probe Object</h2>
*
* <p>
* This method uses the same basic algorithm as in
* <code>EnvelopeTracker#advanceState()</code>, only the probe object is
* back-propagated. The method utilizes all the space charge mechanisms
* of the base class <code>EnvelopeTracker</code>.
* </p>
*
* @param ifcElem interface to the beam element
* @param ifcProbe interface to the probe
* @param dblLen length of element subsection to retract
*
* @throws ModelException bad element transfer matrix/corrupt probe state
*
* @author Christopher K. Allen
* @since Feb 9, 2009
*
* @see xal.model.alg.EnvelopeTracker#advanceState(xal.model.IProbe, xal.model.IElement, double)
*
*/
protected void retractState(IProbe ifcProbe, IElement ifcElem, double dblLen)
throws ModelException {
// Identify probe
EnvelopeProbe probe = (EnvelopeProbe)ifcProbe;
// Get initial conditions of probe
// R3 vecPhs0 = probe.getBetatronPhase();
// Twiss[] twiss0 = probe.getCovariance().computeTwiss();
PhaseMatrix matResp0 = probe.getResponseMatrix();
PhaseMatrix matTau0 = probe.getCovariance();
// Remove the emittance growth
if (this.getEmittanceGrowth())
matTau0 = this.removeEmittanceGrowth(probe, ifcElem, matTau0);
// Compute the transfer matrix
//def PhaseMatrix matPhi = compTransferMatrix(dblLen, probe, ifcElem);
PhaseMatrix matPhi = compTransferMatrix(dblLen, probe, ifcElem);
// Advance the probe states
PhaseMatrix matResp1 = matPhi.times( matResp0 );
PhaseMatrix matTau1 = matTau0.conjugateTrans( matPhi );
// Save the new state variables in the probe
probe.setResponseMatrix(matResp1);
probe.setCurrentResponseMatrix(matPhi);
probe.setCovariance(new CovarianceMatrix(matTau1));
// probe.advanceTwiss(matPhi, ifcElem.energyGain(probe, dblLen) );
// phase update:
// Twiss [] twiss1 = probe.getCovariance().computeTwiss();
// R3 vecPhs1 = vecPhs0.plus( matPhi.compPhaseAdvance(twiss0, twiss1) );
// probe.setBetatronPhase(vecPhs1);
/** sako
* treatment of ChargeExchangeFoil
**/
treatChargeExchange(probe, ifcElem);
}
/**
* <h2>Compute Transfer Matrix Including Space Charge</h2>
*
* <p>
* Computes the back-propagating transfer matrix over the incremental
* distance <code>dblLen</code>
* for the beamline modeling element <code>ifcElem</code>, and for the given
* <code>probe</code>. We include space charge and emittance growth effects
* if specified.
* </p>
*
* <p>
* <strong>NOTE</strong>: (CKA)
* <br>
* · If space charge is included, the space charge matrix is computed for length
* <code>dblLen</code>, but at a half-step location behind the current probe
* position. This method is the same technique used by Trace3D. The space charge
* matrix is then pre- and post- multiplied by the element transfer matrix for
* a half-step before and after the mid-step position, respectively.
* <br>
* · I do not
* know if this (leap-frog) technique buys us much more accuracy then full
* stepping.
* </p>
*
* @param dblLen incremental path length
* @param probe beam probe under simulation
* @param ifcElem beamline element propagating probe
*
* @return transfer matrix for given element
*
* @throws ModelException bubbles up from IElement#transferMap()
*
* @see EnvelopeTracker#compScheffMatrix(double, EnvelopeProbe, PhaseMatrix)
* @see EnvelopeTracker#transferEmitGrowth(EnvelopeProbe, IElement, PhaseMatrix)
* @see EnvelopeTracker#modTransferMatrixForDisplError(double, double, double, PhaseMatrix)
*/
private PhaseMatrix compTransferMatrix(double dblLen, EnvelopeProbe probe, IElement ifcElem)
throws ModelException
{
// Returned value
PhaseMatrix matPhi; // transfer matrix including all effects
// Check for exceptional circumstance and modify transfer matrix accordingly
if (ifcElem instanceof IdealRfGap) {
IdealRfGap elemRfGap = (IdealRfGap)ifcElem;
double dW = elemRfGap.energyGain(probe, dblLen);
double W = probe.getKineticEnergy();
probe.setKineticEnergy(W-dW);
PhaseMatrix matPhiI = elemRfGap.transferMap(probe, dblLen).getFirstOrder();
if (this.getEmittanceGrowth()) {
double dphi = this.effPhaseSpread(probe, elemRfGap);
matPhiI = super.modTransferMatrixForEmitGrowth(dphi, matPhiI);
}
matPhi = matPhiI.inverse();
probe.setKineticEnergy(W);
return matPhi;
}
if (dblLen==0.0) {
matPhi = ifcElem.transferMap(probe, dblLen).getFirstOrder();
return matPhi;
}
// Check for easy case of no space charge
if (!this.getUseSpacecharge()) {
matPhi = ifcElem.transferMap(probe, dblLen).getFirstOrder();
// we must treat space charge
} else {
// Store the current probe state (for rollback)
EnvelopeProbeState state0 = probe.cloneCurrentProbeState();
//ProbeState state0 = probe.createProbeState();
// Get half-step back-propagation matrix at current probe location
// NOTE: invert by computing for negative propagation length
PhaseMap mapElem0 = ifcElem.transferMap(probe, -dblLen/2.0);
PhaseMatrix matPhi0 = mapElem0.getFirstOrder();
// Get the RMS envelopes at probe location
CovarianceMatrix covTau0 = probe.getCovariance(); // covariance matrix at entrance
// Move probe back a half step for position-dependent transfer maps
double pos = probe.getPosition() - dblLen/2.0;
PhaseMatrix matTau1 = covTau0.conjugateTrans(matPhi0);
CovarianceMatrix covTau1 = new CovarianceMatrix(matTau1);
probe.setPosition(pos);
probe.setCovariance(covTau1);
// space charge transfer matrix
// NOTE: invert by computing for negative propagation length
PhaseMatrix matPhiSc = this.compScheffMatrix(-dblLen, probe, ifcElem);
// Compute half-step transfer matrix at new probe location
PhaseMap mapElem1 = ifcElem.transferMap(probe, -dblLen/2.0);
PhaseMatrix matPhi1 = mapElem1.getFirstOrder();
// Restore original probe state
probe.applyState(state0);
// Compute the full transfer matrix for the distance dblLen
matPhi = matPhi1.times( matPhiSc.times(matPhi0) );
}
return matPhi;
}
/**
* <h2>Remove Emittance Growth Through an RF Gap</h2>
* <p>
* Method to modify the covariance matrix when simulating emittance
* growth through RF accelerating gaps. (The method only
* considers the case of propagation
* through an <code>IdealRfGap</code> element). If the <code>IElement</code>
* argument is any other type of element, nothing is done.
* The argument <code>matTau</code> is the covariance matrix after the
* usual propagation through the <code>elem</code> element.
* </p>
* <p>
* Note that this method is essentially the complement of the method
* {@link EnvelopeTracker#addEmittanceGrowth(EnvelopeProbe, IElement, PhaseMatrix)}.
* Whereas <code>addEmittanceGrowth()</code> augments the momentum
* elements of <b>σ</b>, this method reduces them by the same amount.
* Specifically, let <i>x</i> be either transverse phase space variable.
* The emittance growth effect is achieved
* by first multiplying the element <x'|x> of the RF gap transfer
* matrix <b>Φ</b> by the factor
* <i>F<sub>t</sub></i>(Δ<i>φ</i>)
* returned by method
* {@link EnvelopeTrackerBase#compTransFourierTransform(double)}
* (see {@link EnvelopeTrackerBase#modTransferMatrixForEmitGrowth(double, PhaseMatrix)}).
* Currently this action is done in
* {@link #compTransferMatrix(double, EnvelopeProbe, IElement)}.
* Once the covariance matrix <b>τ</b> is back-propagated by the
* modified transfer
* matrix <b>Φ</b>, the moment <<i>x'</i><sup>2</sup>> is
* reduced by the result of this function.
* </p>
* <p>
* The discussion below is taken directly from
* {@link EnvelopeTracker#addEmittanceGrowth(EnvelopeProbe, IElement, PhaseMatrix)}.
* It is applicable here if the emittance is reduced by
* Δ<<i>x'<sub>f</sub></i><sup>2</sup>> rather than increased
* by it.
* </p>
* <p>
* The before gap and after gap transverse RMS divergence angles,
* <i>x'<sub>i</sub></i> and
* <i>x'<sub>f</sub></i>, respectively,
* are related by the following formula:
* <br>
* <br>
* <<i>x'<sub>f</sub></i><sup>2</sup>> =
* Δ<<i>x'<sub>f</sub></i><sup>2</sup>> +
* <<i>x'<sub>i</sub></i><sup>2</sup>>
* <br>
* <br>
* where Δ<<i>x'<sub>f</sub></i><sup>2</sup>>
* is the emittance growth factor given by
* <br>
* <br>
* Δ<<i>x'<sub>f</sub></i><sup>2</sup>> ≡
* <i>k<sub>t</sub></i><sup>2</sup>
* <i>G<sub>t</sub></i>(<i>φ<sub>s</sub></i>,Δ<i>φ</i>)
* <<i>x<sub>i</sub></i></i><sup>2</sup>>.
* <br>
* <br>
* where
* <i>G<sub>t</sub></i>(<i>φ<sub>s</sub></i>,Δ<i>φ</i>)
* is the transverse 3-dimensional emittance growth function,
* and <i>x<sub>i</sub></i> represents the
* before-gap position for <em>either</em>
* transverse phase plane. The action of this method is described
* by the original equation.
* </p>
* <p>
* The resulting action on the before gap and after gap transverse RMS emittances,
* <i>ε<sub>t,i</sub></i> and
* <i>ε<sub>t,f</sub></i>, respectively,
* is now described by the following formula:
* <br>
* <br>
* <i>ε<sub>t,f</sub></i><sup>2</sup> =
* <i>η</i><sup>2</sup><i>ε<sub>t,i</sub></i><sup>2</sup> +
* Δ<i>ε<sub>t,f</sub></i><sup>2</sup>
* <br>
* <br>
* where <i>η</i> is the momentum compaction due to acceleration
* <br>
* <br>
* <i>η</i> ≡
* <i>β<sub>i</sub>γ<sub>i</sub></i>/<i>β<sub>f</sub>γ<sub>f</sub></i>
* <br>
* <br>
* and Δ<i>ε<sub>t,f</sub></i> is the emittance increase term
* <br>
* <br>
* Δ<i>ε<sub>t,f</sub></i><sup>2</sup> ≡
* Δ<<i>x'<sub>f</sub></i><sup>2</sup>>
* <<i>x<sub>f</sub></i></i><sup>2</sup>><sup>2</sup>.
* <br>
* <br>
* There are analogous formulas for the before and after gap
* longitudinal plane emittances
* <i>ε<sub>z,i</sub></i> and
* <i>ε<sub>z,f</sub></i>, respectively, with
* <i>G<sub>t</sub></i>(<i>φ<sub>s</sub></i>,Δ<i>φ</i>)
* replaced by
* <i>G<sub>z</sub></i>(<i>φ<sub>s</sub></i>,Δ<i>φ</i>)
* and <i>x</i><sub>(<i>f,i</i>)</sub> replaced by
* <i>z</i><sub>(<i>f,i</i>)</sub>.
* </p>
* <p>
* <strong>NOTES</strong>: CKA
* <br>
* · Since we are modeling the RF gap as a thin lens, only the
* momentum (divergance angle) is modified, <<i>x</i><sup>2</sup>>,
* <<i>y</i><sup>2</sup>>, and <<i>z</i><sup>2</sup>> remain
* unaffected. Thus, <<i>x<sub>f</sub></i><sup>2</sup>>
* = <<i>x<sub>i</sub></i><sup>2</sup>> and
* <<i>z<sub>f</sub></i><sup>2</sup>>
* = <<i>z<sub>i</sub></i><sup>2</sup>> and may be computed
* as such in the above.
* <br>
* · The <<i>x'</i><sup>2</sup>> element is modified by the formula
* <br>
* <br>
* <<i>x'</i><sup>2</sup>> = <<i>x'</i><sup>2</sup>> + <i>c<sub>eg</sub></i><<i>x</i><sup>2</sup>>
* <br>
* <br>
* where <i>c<sub>eg</sub></i> is the emittance growth coefficent. There are similar
* equations for the other phase planes. The emittance growth coefficents are computed
* in the base class <code>EnvelopeTrackerBase</code> by the methods
* <code>emitGrowthCoefTrans(EnvelopeProbe, IdealRfGap)</code> and
* <code>emitGrowthCoefLong(EnvelopeProbe, IdealRfGap)</code>.
* </p>
* <p>
* <strong>NOTES</strong>: (H. SAKO)
* <br>
* · Increase emittance using same (nonlinear) procedure on the second
* moments as in Trace3D.
* </p>
*
* @param iElem <code>IElement</code> element for exceptional processing
* @param probe <code>IProbe</code> object associated with correlation matrix
* @param matTau correlation matrix after (normal) propagation thru <code>elem</code>
*
* @return covariance matrix of <code>probe</code> after adjusting for emittance growth
*
* @throws ModelException unknown/unsupported emittance growth model, or
* unknown/unsupported phase plane
*
* @see #compTransferMatrix(double, EnvelopeProbe, IElement)
* @see EnvelopeTrackerBase#compTransFourierTransform(double)
* @see EnvelopeTrackerBase#compLongFourierTransform(double)
* @see EnvelopeTracker#addEmittanceGrowth(EnvelopeProbe, IElement, PhaseMatrix)
*
* @author Hiroyuki Sako
* @author Christopher K. Allen
*/
private PhaseMatrix removeEmittanceGrowth(EnvelopeProbe probe, IElement iElem, PhaseMatrix matTau)
throws ModelException
{
// Check for RF Gap
if (!(iElem instanceof IdealRfGap))
return matTau;
if (!this.getEmittanceGrowth())
return matTau;
// Get the synchronous phase and compute the phase spread
IdealRfGap elemRfGap = (IdealRfGap)iElem;
double W = probe.getKineticEnergy();
double dW = elemRfGap.energyGain(probe);
probe.setKineticEnergy(W - dW);
double phi_s = elemRfGap.getPhase();
double dphi = this.effPhaseSpread(probe, elemRfGap);
// Compute the divergence angle increment coefficients
// (emittance growth coefficients)
double dxp_2; // transverse divergence angle augmentation factor
double dzp_2; // longitudinal divergence angle augmentation factor
// if (this.getEmitGrowthModel() == EmitGrowthModel.TRACE3D) {
//
// dxp_2 = this.emitGrowthCoefTrans(probe, elemRfGap);
// dzp_2 = this.emitGrowthCoefLong(probe, elemRfGap);
//
// } else {
double Gt = this.compEmitGrowthFunction(PhasePlane.TRANSVERSE, phi_s, dphi);
double kt = elemRfGap.compTransFocusing(probe);
dxp_2 = kt*kt*Gt;
double Gz = this.compEmitGrowthFunction(PhasePlane.LONGITUDINAL, phi_s, dphi);
double kz = elemRfGap.compLongFocusing(probe);
// double gf = elemRfGap.gammaFinal(probe);
// double gf_2 = gf*gf;
// dzp_2 = kz*kz*Gz/(gf_2*gf_2);
// dzp_2 = gf_2*gf_2*kz*kz*Gz;
dzp_2 = kz*kz*Gz;
// }
probe.setKineticEnergy(W);
// Compute new correlation matrix
// Transverse planes
double x_2 = matTau.getElem(0,0);
double xp_2 = matTau.getElem(1,1);
double xp_2eg = xp_2 - dxp_2*x_2;
matTau.setElem(1,1,xp_2eg);
double y_2 = matTau.getElem(2,2);
double yp_2 = matTau.getElem(3,3);
double yp_2eg = yp_2 - dxp_2*y_2;
matTau.setElem(3,3,yp_2eg);
// Longitudinal plane
double z_2 = matTau.getElem(4,4);
double zp_2 = matTau.getElem(5,5);
double zp_2eg = zp_2 - dzp_2*z_2;
matTau.setElem(5,5,zp_2eg);
return matTau;
}
// protected PhaseMatrix modTransferMatrixForEmitReduction(double dphi, PhaseMatrix matPhi)
// throws ModelException
//{
//
// if (!this.getEmittanceGrowthFlag())
// return matPhi;
//
// // Compute auxiliary parameters
// double Ft; // transverse plane Fourier transform
// double Fz; // longitudinal plane Fourier transform
//
// Ft = this.compTransFourierTransform(dphi);
// Fz = this.compLongFourierTransform(dphi);
//
// // Modify the transfer matrix
// double fl; // thin-lens focal-length element of tranfer matrix
//
// fl = matPhi.getElem(PhaseIndexHom.Xp, PhaseIndexHom.X);
// matPhi.setElem(PhaseIndexHom.Xp, PhaseIndexHom.X, fl/Ft);
//
// fl = matPhi.getElem(PhaseIndexHom.Yp, PhaseIndexHom.Y);
// matPhi.setElem(PhaseIndexHom.Yp, PhaseIndexHom.Y, fl/Ft);
//
// fl = matPhi.getElem(PhaseIndexHom.Zp, PhaseIndexHom.Z);
// matPhi.setElem(PhaseIndexHom.Zp, PhaseIndexHom.Z, fl/Fz);
//
// return matPhi;
//}
/**
* <p>
* Test for a <code>ChargeExchangeFoil</code> element.
* If found, the probe represent an H<sup>+</sup> beam, the electrons
* are added and the beam becomes H<sup>-</sup>.
* </p>
* <p>
* The opposite of
* {@link EnvelopeTracker#treatChargeExchange(EnvelopeProbe, IElement)}
* </p>
*
* @param probe Propagating beam
* @param ifcElem Element to tested for <code>ChargeExchangeFoil</code> type
*
* @author Hiroyuki Sako
*
* @see xal.model.elem.ChargeExchangeFoil
* @see EnvelopeTracker#treatChargeExchange(EnvelopeProbe, IElement)
*/
private void treatChargeExchange(EnvelopeProbe probe, IElement ifcElem) {
if (ifcElem instanceof ChargeExchangeFoil) {
double q = probe.getSpeciesCharge();
if (q>0) {
System.out.println("charge exchanged at "+ifcElem.getId()+" from " + q + " to "+ (-q));
probe.setSpeciesCharge(-q);
}
}
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* User: anna
* Date: 23-Jun-2009
*/
package com.intellij.refactoring.extractMethod;
import com.intellij.codeInsight.PsiEquivalenceUtil;
import com.intellij.openapi.util.Pair;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.codeStyle.SuggestedNameInfo;
import com.intellij.psi.codeStyle.VariableKind;
import com.intellij.psi.search.LocalSearchScope;
import com.intellij.psi.search.SearchScope;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.refactoring.util.VariableData;
import com.intellij.refactoring.util.duplicates.DuplicatesFinder;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.*;
public class ParametersFolder {
private final Map<PsiVariable, PsiExpression> myExpressions = new HashMap<PsiVariable, PsiExpression>();
private final Map<PsiVariable, List<PsiExpression>> myMentionedInExpressions = new HashMap<PsiVariable, List<PsiExpression>>();
private final Set<String> myUsedNames = new HashSet<String>();
private final Set<PsiVariable> myDeleted = new HashSet<PsiVariable>();
private boolean myFoldingSelectedByDefault = false;
public void clear() {
myExpressions.clear();
myMentionedInExpressions.clear();
myUsedNames.clear();
myDeleted.clear();
}
public boolean isParameterSafeToDelete(@Nonnull VariableData data, @Nonnull LocalSearchScope scope) {
Next:
for (PsiReference reference : ReferencesSearch.search(data.variable, scope)) {
PsiElement expression = reference.getElement();
while (expression != null) {
for (PsiExpression psiExpression : myExpressions.values()) {
if (PsiEquivalenceUtil.areElementsEquivalent(expression, psiExpression)) {
continue Next;
}
}
expression = PsiTreeUtil.getParentOfType(expression, PsiExpression.class);
}
return false;
}
final PsiExpression psiExpression = myExpressions.get(data.variable);
if (psiExpression == null) return true;
for (PsiVariable variable : myExpressions.keySet()) {
if (variable != data.variable && !myDeleted.contains(variable)) {
final PsiExpression expr = myExpressions.get(variable);
if (expr != null && PsiEquivalenceUtil.areElementsEquivalent(expr, psiExpression)) {
myDeleted.add(data.variable);
return true;
}
}
}
return false;
}
public void foldParameterUsagesInBody(@Nonnull VariableData data, PsiElement[] elements, SearchScope scope) {
if (myDeleted.contains(data.variable)) return;
final PsiExpression psiExpression = myExpressions.get(data.variable);
if (psiExpression == null) return;
final Set<PsiExpression> eqExpressions = new HashSet<PsiExpression>();
for (PsiReference reference : ReferencesSearch.search(data.variable, scope)) {
final PsiExpression expression = findEquivalent(psiExpression, reference.getElement());
if (expression != null && expression.isValid()) {
eqExpressions.add(expression);
}
}
for (PsiExpression expression : eqExpressions) {
final PsiExpression refExpression =
JavaPsiFacade.getElementFactory(expression.getProject()).createExpressionFromText(data.variable.getName(), expression);
final PsiElement replaced = expression.replace(refExpression);
for (int i = 0, psiElementsLength = elements.length; i < psiElementsLength; i++) {
PsiElement psiElement = elements[i];
if (expression == psiElement) {
elements[i] = replaced;
break;
}
}
}
}
public boolean isParameterFoldable(@Nonnull VariableData data,
@Nonnull LocalSearchScope scope,
@Nonnull final List<? extends PsiVariable> inputVariables) {
final List<PsiExpression> mentionedInExpressions = getMentionedExpressions(data.variable, scope, inputVariables);
if (mentionedInExpressions == null) return false;
int currentRank = 0;
PsiExpression mostRanked = null;
for (int i = mentionedInExpressions.size() - 1; i >= 0; i--) {
PsiExpression expression = mentionedInExpressions.get(i);
if (expression instanceof PsiArrayAccessExpression) {
mostRanked = expression;
if (!isConditional(expression, scope)) {
myFoldingSelectedByDefault = true;
break;
}
}
final int r = findUsedVariables(data, inputVariables, expression).size();
if (currentRank < r) {
currentRank = r;
mostRanked = expression;
}
}
if (mostRanked != null) {
myExpressions.put(data.variable, mostRanked);
data.type = mostRanked.getType();
final JavaCodeStyleManager codeStyleManager = JavaCodeStyleManager.getInstance(mostRanked.getProject());
final SuggestedNameInfo nameInfo = codeStyleManager.suggestVariableName(VariableKind.PARAMETER, null, mostRanked, data.type);
if (nameInfo.names.length > 0) {
data.name = nameInfo.names[0];
}
setUniqueName(data);
}
return mostRanked != null;
}
private static boolean isConditional(PsiElement expr, LocalSearchScope scope) {
while (expr != null) {
final PsiElement parent = expr.getParent();
if (parent != null && scope.containsRange(parent.getContainingFile(), parent.getTextRange())) {
if (parent instanceof PsiIfStatement) {
if (((PsiIfStatement)parent).getCondition() != expr) return true;
} else if (parent instanceof PsiConditionalExpression) {
if (((PsiConditionalExpression)parent).getCondition() != expr) return true;
} else if (parent instanceof PsiSwitchStatement) {
if (((PsiSwitchStatement)parent).getExpression() != expr) return true;
}
} else {
return false;
}
expr = parent;
}
return false;
}
private void setUniqueName(VariableData data) {
int idx = 1;
while (myUsedNames.contains(data.name)) {
data.name += idx;
}
myUsedNames.add(data.name);
}
private static Set<PsiVariable> findUsedVariables(VariableData data, final List<? extends PsiVariable> inputVariables,
PsiExpression expression) {
final Set<PsiVariable> found = new HashSet<PsiVariable>();
expression.accept(new JavaRecursiveElementVisitor() {
@Override
public void visitReferenceExpression(PsiReferenceExpression referenceExpression) {
super.visitReferenceExpression(referenceExpression);
PsiElement resolved = referenceExpression.resolve();
if (resolved instanceof PsiVariable && inputVariables.contains(resolved)) {
found.add((PsiVariable)resolved);
}
}
});
found.remove(data.variable);
return found;
}
public boolean isFoldable() {
return !myExpressions.isEmpty();
}
@Nullable
private List<PsiExpression> getMentionedExpressions(PsiVariable var, LocalSearchScope scope, final List<? extends PsiVariable> inputVariables) {
if (myMentionedInExpressions.containsKey(var)) return myMentionedInExpressions.get(var);
final PsiElement[] scopeElements = scope.getScope();
List<PsiExpression> expressions = null;
for (PsiReference reference : ReferencesSearch.search(var, scope)) {
PsiElement expression = reference.getElement();
if (expressions == null) {
expressions = new ArrayList<PsiExpression>();
while (expression != null) {
if (isAccessedForWriting((PsiExpression)expression)) return null;
for (PsiElement scopeElement : scopeElements) {
if (PsiTreeUtil.isAncestor(expression, scopeElement, true)) {
expression = null;
break;
}
}
if (expression == null) break;
final PsiType expressionType = ((PsiExpression)expression).getType();
if (expressionType != null && !PsiType.VOID.equals(expressionType) && !(expression.getParent() instanceof PsiExpressionStatement)) {
if (dependsOnLocals(expression, inputVariables)) {
break;
}
expressions.add((PsiExpression)expression);
}
expression = PsiTreeUtil.getParentOfType(expression, PsiExpression.class);
}
}
else {
for (Iterator<PsiExpression> iterator = expressions.iterator(); iterator.hasNext();) {
if (findEquivalent(iterator.next(), expression) == null) {
iterator.remove();
}
}
}
}
myMentionedInExpressions.put(var, expressions);
return expressions;
}
private static boolean isAccessedForWriting(PsiExpression expression) {
final PsiExpression[] exprWithWriteAccessInside = new PsiExpression[1];
expression.accept(new JavaRecursiveElementWalkingVisitor() {
@Override
public void visitElement(PsiElement element) {
if (exprWithWriteAccessInside[0] != null) return;
super.visitElement(element);
}
@Override
public void visitExpression(PsiExpression expression) {
if (PsiUtil.isAccessedForWriting(expression)) {
exprWithWriteAccessInside[0] = expression;
}
super.visitExpression(expression);
}
});
return exprWithWriteAccessInside[0] != null;
}
private static boolean dependsOnLocals(final PsiElement expression, final List<? extends PsiVariable> inputVariables) {
final boolean[] localVarsUsed = new boolean[]{false};
expression.accept(new JavaRecursiveElementWalkingVisitor(){
@Override
public void visitReferenceExpression(PsiReferenceExpression expression) {
final PsiElement resolved = expression.resolve();
if (resolved instanceof PsiVariable) {
final PsiVariable variable = (PsiVariable)resolved;
if (!inputVariables.contains(variable)) {
localVarsUsed[0] = true;
return;
}
}
super.visitReferenceExpression(expression);
}
});
return localVarsUsed[0];
}
@Nonnull
public String getGeneratedCallArgument(@Nonnull VariableData data) {
return myExpressions.containsKey(data.variable) ? myExpressions.get(data.variable).getText() : data.variable.getName();
}
public boolean annotateWithParameter(@Nonnull VariableData data, @Nonnull PsiElement element) {
final PsiExpression psiExpression = myExpressions.get(data.variable);
if (psiExpression != null) {
final PsiExpression expression = findEquivalent(psiExpression, element);
if (expression != null) {
expression.putUserData(DuplicatesFinder.PARAMETER, Pair.create(data.variable, expression.getType()));
return true;
}
}
return false;
}
@javax.annotation.Nullable
private static PsiExpression findEquivalent(PsiExpression expr, PsiElement element) {
PsiElement expression = element;
while (expression != null) {
if (PsiEquivalenceUtil.areElementsEquivalent(expression, expr)) {
return (PsiExpression)expression;
}
expression = PsiTreeUtil.getParentOfType(expression, PsiExpression.class);
}
return null;
}
public boolean wasExcluded(PsiVariable variable) {
return myDeleted.contains(variable) || (myMentionedInExpressions.containsKey(variable) && myExpressions.get(variable) == null);
}
public boolean isFoldingSelectedByDefault() {
return myFoldingSelectedByDefault;
}
}
| |
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.util.time;
import static org.threeten.bp.temporal.ChronoField.DAY_OF_MONTH;
import static org.threeten.bp.temporal.ChronoField.MONTH_OF_YEAR;
import static org.threeten.bp.temporal.ChronoField.YEAR;
import static org.threeten.bp.temporal.ChronoUnit.DAYS;
import static org.threeten.bp.temporal.ChronoUnit.MONTHS;
import java.util.GregorianCalendar;
import java.util.TimeZone;
import org.fudgemsg.FudgeMsg;
import org.fudgemsg.types.FudgeDate;
import org.threeten.bp.Clock;
import org.threeten.bp.DayOfWeek;
import org.threeten.bp.Duration;
import org.threeten.bp.Instant;
import org.threeten.bp.LocalDate;
import org.threeten.bp.LocalDateTime;
import org.threeten.bp.Period;
import org.threeten.bp.ZoneId;
import org.threeten.bp.ZoneOffset;
import org.threeten.bp.ZonedDateTime;
import org.threeten.bp.format.DateTimeFormatter;
import org.threeten.bp.format.DateTimeFormatterBuilder;
import org.threeten.bp.format.SignStyle;
import org.threeten.bp.temporal.Temporal;
import com.opengamma.OpenGammaRuntimeException;
import com.opengamma.util.ArgumentChecker;
/**
* Utility class for dates.
* <p>
* This is a thread-safe static utility class.
*/
public final class DateUtils {
/**
* The original JVM time-zone.
*/
public static final ZoneId ORIGINAL_TIME_ZONE = Clock.systemDefaultZone().getZone();
static {
// essential that OpenGamm runs in a default time-zone that has no Daylight Savings
// UTC is desirable for many other reasons, so use it here
TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
}
/**
* The number of seconds in one day.
*/
public static final long SECONDS_PER_DAY = 86400L;
/**
* The number of days in one year (estimated as 365.25, commonly used in financial calculations).
*/
public static final double DAYS_PER_YEAR = 365.25;
/**
* The exact number of days per year.
*/
public static final double EXACT_DAYS_PER_YEAR = 365.2435;
/**
* The number of milliseconds in one day.
*/
public static final long MILLISECONDS_PER_DAY = SECONDS_PER_DAY * 1000;
/**
* The number of seconds in one year.
*/
public static final long SECONDS_PER_YEAR = (long) (SECONDS_PER_DAY * DAYS_PER_YEAR);
/**
* The number of milliseconds in one year.
*/
public static final long MILLISECONDS_PER_YEAR = SECONDS_PER_YEAR * 1000;
/**
* A formatter for yyyyMMdd.
*/
private static final DateTimeFormatter YYYYMMDD_LOCAL_DATE;
static {
YYYYMMDD_LOCAL_DATE = new DateTimeFormatterBuilder()
.appendValue(YEAR, 4, 10, SignStyle.EXCEEDS_PAD)
.appendValue(MONTH_OF_YEAR, 2)
.appendValue(DAY_OF_MONTH, 2)
.toFormatter();
}
/**
* A formatter for MM-dd
*/
private static final DateTimeFormatter MM_DD_LOCAL_DATE;
static {
MM_DD_LOCAL_DATE = new DateTimeFormatterBuilder()
.appendValue(MONTH_OF_YEAR, 2)
.appendLiteral("-")
.appendValue(DAY_OF_MONTH, 2)
.toFormatter();
}
/**
* Restricted constructor.
*/
private DateUtils() {
}
//-------------------------------------------------------------------------
/**
* Initializes the default time-zone to UTC.
* <p>
* This method actually does nothing, as the code is in a static initializer.
*/
public static void initTimeZone() {
}
/**
* Gets the original time-zone before it was set to UTC.
*
* @return the original time-zone, not null
*/
public static TimeZone originalTimeZone() {
return TimeZone.getTimeZone(ORIGINAL_TIME_ZONE.getId());
}
//-------------------------------------------------------------------------
/**
* Returns endDate - startDate in years, where a year is defined as 365.25 days.
*
* @param startDate the start date, not null
* @param endDate the end date, not null
* @return the difference in years
* @throws IllegalArgumentException if either date is null
*/
public static double getDifferenceInYears(final Instant startDate, final Instant endDate) {
ArgumentChecker.notNull(startDate, "startDate");
ArgumentChecker.notNull(endDate, "endDate");
return (double) (endDate.toEpochMilli() - startDate.toEpochMilli()) / MILLISECONDS_PER_YEAR;
}
/**
* Returns endDate - startDate in years, where a year is defined as 365.25 days.
*
* @param startDate the start date, not null
* @param endDate the end date, not null
* @return the difference in years
* @throws IllegalArgumentException if either date is null
*/
public static double getDifferenceInYears(final ZonedDateTime startDate, final ZonedDateTime endDate) {
ArgumentChecker.notNull(startDate, "startDate");
ArgumentChecker.notNull(endDate, "endDate");
return (double) (endDate.toInstant().toEpochMilli() - startDate.toInstant().toEpochMilli()) / MILLISECONDS_PER_YEAR;
}
/**
* Returns endDate - startDate in years, where a year is defined as 365.25 days.
*
* @param startDate the start date, not null
* @param endDate the end date, not null
* @return the difference in years
* @throws IllegalArgumentException if either date is null
*/
public static double getDifferenceInYears(final LocalDate startDate, final LocalDate endDate) {
ArgumentChecker.notNull(startDate, "startDate");
ArgumentChecker.notNull(endDate, "endDate");
final double diff = endDate.toEpochDay() - startDate.toEpochDay();
return diff / DAYS_PER_YEAR;
}
/**
* Returns endDate - startDate in years, where a year-length is specified.
*
* @param startDate the start date, not null
* @param endDate the end date, not null
* @param daysPerYear the number of days in the year for calculation
* @return the difference in years
* @throws IllegalArgumentException if either date is null
*/
public static double getDifferenceInYears(final Instant startDate, final Instant endDate, final double daysPerYear) {
ArgumentChecker.notNull(startDate, "startDate");
ArgumentChecker.notNull(endDate, "endDate");
return (endDate.toEpochMilli() - startDate.toEpochMilli()) / MILLISECONDS_PER_DAY / daysPerYear;
}
//-------------------------------------------------------------------------
/**
* Method that allows a fraction of a year to be added to a date. If the yearFraction that is used does not give an integer number
* of seconds, it is rounded to the nearest nanosecond. Note that the number of days in a year is defined to be 365.25.
*
* @param startDate the start date, not null
* @param yearFraction the fraction of a year
* @return the calculated instant, not null
* @throws IllegalArgumentException if the date is null
*/
public static Instant getDateOffsetWithYearFraction(final Instant startDate, final double yearFraction) {
ArgumentChecker.notNull(startDate, "startDate");
final long nanos = Math.round(1e9 * SECONDS_PER_YEAR * yearFraction);
return startDate.plusNanos(nanos);
}
/**
* Method that allows a fraction of a year to be added to a date. If the yearFraction that is used does not give an integer number
* of seconds, it is rounded to the nearest nanosecond. Note that the number of days in a year is defined to be 365.25.
*
* @param startDate the start date, not null
* @param yearFraction the fraction of a year
* @return the calculated date-time, not null
* @throws IllegalArgumentException if the date is null
*/
public static ZonedDateTime getDateOffsetWithYearFraction(final ZonedDateTime startDate, final double yearFraction) {
ArgumentChecker.notNull(startDate, "startDate");
final Instant instant = startDate.toInstant();
final Instant offsetDate = getDateOffsetWithYearFraction(instant, yearFraction);
return ZonedDateTime.ofInstant(offsetDate, startDate.getZone());
}
/**
* Method that allows a fraction of a year to be added to a date. If the yearFraction that is used does not give an integer number of seconds,
* it is rounded to the nearest nanosecond.
*
* @param startDate the start date, not null
* @param yearFraction the fraction of a year
* @param daysPerYear the number of days in the year for calculation
* @return the calculated instant, not null
* @throws IllegalArgumentException if the date is null
*/
public static Instant getDateOffsetWithYearFraction(final Instant startDate, final double yearFraction, final double daysPerYear) {
ArgumentChecker.notNull(startDate, "startDate");
final long nanos = Math.round(1e9 * SECONDS_PER_DAY * daysPerYear * yearFraction);
return startDate.plusNanos(nanos);
}
/**
* Method that allows a fraction of a year to be added to a date. If the yearFraction that is used does not give an integer number of seconds,
* it is rounded to the nearest nanosecond.
*
* @param startDate the start date, not null
* @param yearFraction the fraction of a year
* @param daysPerYear the number of days in the year for calculation
* @return the calculated date-time, not null
* @throws IllegalArgumentException if the date is null
*/
public static ZonedDateTime getDateOffsetWithYearFraction(final ZonedDateTime startDate, final double yearFraction, final double daysPerYear) {
ArgumentChecker.notNull(startDate, "startDate");
final Instant instant = startDate.toInstant();
final Instant offsetDate = getDateOffsetWithYearFraction(instant, yearFraction, daysPerYear);
return ZonedDateTime.ofInstant(offsetDate, startDate.getZone());
}
//-------------------------------------------------------------------------
/**
* Returns a UTC date given year, month, day with the time set to midnight (UTC).
*
* @param year the year
* @param month the month
* @param day the day of month
* @return the date-time, not null
*/
public static ZonedDateTime getUTCDate(final int year, final int month, final int day) {
return LocalDate.of(year, month, day).atStartOfDay(ZoneOffset.UTC);
}
/**
* Returns a UTC date given year, month, day, hour and minutes.
*
* @param year the year
* @param month the month
* @param day the day of month
* @param hour the hour
* @param minute the minute
* @return the date-time, not null
*/
public static ZonedDateTime getUTCDate(final int year, final int month, final int day, final int hour, final int minute) {
return ZonedDateTime.of(LocalDateTime.of(year, month, day, hour, minute), ZoneOffset.UTC);
}
//-------------------------------------------------------------------------
/**
* Calculates the exact number of 24 hour days in between two dates. Accounts for dates being in different time zones.
*
* @param startDate the start date, not null
* @param endDate the end date, not null
* @return the exact fraction of days between two dates
* @throws IllegalArgumentException if the date is null
*/
public static double getExactDaysBetween(final ZonedDateTime startDate, final ZonedDateTime endDate) {
ArgumentChecker.notNull(startDate, "startDate");
ArgumentChecker.notNull(endDate, "endDate");
return (endDate.toInstant().getEpochSecond() - startDate.toInstant().getEpochSecond()) / (double) SECONDS_PER_DAY;
}
/**
* Calculates the number of days in between two dates.
*
* @param startDate the start date, not null
* @param endDate the end date, not null
* @return the number of days between two dates
* @throws IllegalArgumentException if the date is null
*/
public static int getDaysBetween(final Temporal startDate, final Temporal endDate) {
return getDaysBetween(startDate, true, endDate, false);
}
/**
* Calculates the number of days in between two dates.
*
* @param startDate the start date, not null
* @param includeStart whether to include the start
* @param endDate the end date, not null
* @param includeEnd whether to include the end
* @return the number of days between two dates
* @throws IllegalArgumentException if the date is null
*/
public static int getDaysBetween(final Temporal startDate, final boolean includeStart, final Temporal endDate, final boolean includeEnd) {
ArgumentChecker.notNull(startDate, "startDate");
ArgumentChecker.notNull(endDate, "endDate");
int daysBetween = (int) Math.abs(DAYS.between(startDate, endDate));
if (includeStart && includeEnd) {
daysBetween++;
} else if (!includeStart && !includeEnd) {
daysBetween--;
}
return daysBetween;
}
/**
* Prints the date in yyyyMMdd format.
*
* @param date the date, not null
* @return the date as a string, not null
* @throws IllegalArgumentException if the date is null
*/
public static String printYYYYMMDD(final Temporal date) {
ArgumentChecker.notNull(date, "date");
return YYYYMMDD_LOCAL_DATE.format(date);
}
/**
* Prints the date in MM-dd format.
*
* @param date the date, not null
* @return the date as a string, not null
* @throws IllegalArgumentException if the date is null
*/
public static String printMMDD(final Temporal date) {
ArgumentChecker.notNull(date, "date");
return MM_DD_LOCAL_DATE.format(date);
}
/**
* Gets the previous Monday to Friday week-day before now.
*
* @return the date, not null
*/
public static LocalDate previousWeekDay() {
final Clock clock = Clock.systemUTC();
return previousWeekDay(LocalDate.now(clock));
}
/**
* Gets the next Monday to Friday week-day after now.
*
* @return the date, not null
*/
public static LocalDate nextWeekDay() {
final Clock clock = Clock.systemUTC();
return nextWeekDay(LocalDate.now(clock));
}
/**
* Gets the next Monday to Friday week-day after now.
*
* @param startDate the date to start from
* @return the date, not null
*/
public static LocalDate nextWeekDay(final LocalDate startDate) {
ArgumentChecker.notNull(startDate, "startDate");
LocalDate next = null;
final DayOfWeek dayOfWeek = startDate.getDayOfWeek();
switch (dayOfWeek) {
case FRIDAY:
next = startDate.plusDays(3);
break;
case SATURDAY:
next = startDate.plusDays(2);
break;
case MONDAY:
case TUESDAY:
case WEDNESDAY:
case THURSDAY:
case SUNDAY:
next = startDate.plusDays(1);
break;
default:
throw new OpenGammaRuntimeException("Unrecognised day of the week");
}
return next;
}
/**
* Gets the previous Monday to Friday week-day before now.
*
* @param startDate the date to start from
* @return the date, not null
*/
public static LocalDate previousWeekDay(final LocalDate startDate) {
ArgumentChecker.notNull(startDate, "startDate");
LocalDate previous = null;
final DayOfWeek dayOfWeek = startDate.getDayOfWeek();
switch (dayOfWeek) {
case MONDAY:
previous = startDate.minusDays(3);
break;
case TUESDAY:
case WEDNESDAY:
case THURSDAY:
case FRIDAY:
case SATURDAY:
previous = startDate.minusDays(1);
break;
case SUNDAY:
previous = startDate.minusDays(2);
break;
default:
throw new OpenGammaRuntimeException("Unrecognised day of the week");
}
return previous;
}
/**
* Converts a date in integer YYYYMMDD representation to epoch millis.
*
* @param date in integer YYYYMMDD representation
* @return the epoch millis
*/
public static long getUTCEpochMilis(final int date) {
final LocalDate localDate = LocalDate.parse(String.valueOf(date), YYYYMMDD_LOCAL_DATE);
return localDate.toEpochDay() * 24 * 60 * 60 * 1000;
}
/**
* Converts a date in integer YYYYMMDD representation to a UTC date-time.
*
* @param date in integer YYYYMMDD representation
* @return the date-time, not null
*/
public static ZonedDateTime toZonedDateTimeUTC(final int date) {
final LocalDate localDate = LocalDate.parse(String.valueOf(date), YYYYMMDD_LOCAL_DATE);
final ZonedDateTime zonedDateTime = getUTCDate(localDate.getYear(), localDate.getMonthValue(), localDate.getDayOfMonth());
return zonedDateTime;
}
/**
* Converts a date in integer YYYYMMDD representation to a date.
*
* @param date in integer YYYYMMDD representation
* @return the date, not null
*/
public static LocalDate toLocalDate(final int date) {
return toLocalDate(String.valueOf(date));
}
/**
* Converts a date in string YYYYMMDD representation to epoch millis.
*
* @param date in YYYYMMDD representation, not null
* @return the date
*/
public static LocalDate toLocalDate(final String date) {
ArgumentChecker.notNull(date, "date");
return LocalDate.parse(date, YYYYMMDD_LOCAL_DATE);
}
/**
* Constructs a LocalDate from a <code>java.util.Date</code> using exactly the same field values.
* <p>
* Each field is queried from the Date and assigned to the LocalDate. This is useful if you have been using the Date as a local date, ignoring the zone.
*
* @param date the Date to extract fields from
* @return the created LocalDate
* @throws IllegalArgumentException if the calendar is null
* @throws IllegalArgumentException if the date is invalid for the ISO chronology
*/
@SuppressWarnings("deprecation")
public static LocalDate fromDateFields(final java.util.Date date) {
ArgumentChecker.notNull(date, "date");
return LocalDate.of(date.getYear() + 1900, date.getMonth() + 1, date.getDate());
}
/**
* Constructs a LocalDate from a date or passed over the wire via {@link FudgeMsg}.
*
* @param date an Object
* @return the created LocalDate
* @throws IllegalArgumentException if the date is not a recognized type
*/
public static LocalDate toLocalDate(final Object date) {
if (date instanceof LocalDate) {
return (LocalDate) date;
}
if (date instanceof FudgeDate) {
return ((FudgeDate) date).toLocalDate();
}
throw new IllegalArgumentException(date.toString() + " is not a date");
}
//-------------------------------------------------------------------------
/**
* Creates a clock with a fixed time-source and UTC time-zone.
*
* @param instant the instant to be provided by the clock, not null
* @return the clock, not null
*/
public static Clock fixedClockUTC(final Instant instant) {
return Clock.fixed(instant, ZoneOffset.UTC);
}
//-------------------------------------------------------------------------
/**
* Gets the estimated duration of the period.
*
* @param period the period to estimate the duration of, not null
* @return the estimated duration, not null
*/
public static Duration estimatedDuration(final Period period) {
final Duration monthsDuration = MONTHS.getDuration().multipliedBy(period.toTotalMonths());
final Duration daysDuration = DAYS.getDuration().multipliedBy(period.getDays());
return monthsDuration.plus(daysDuration);
}
/**
* Converts GregorianCalendar to ZonedDateTime.
*
* @param calendar the calendar, not null
* @return the zoned-date-time, not null
*/
public static ZonedDateTime toZonedDateTime(final GregorianCalendar calendar) {
final ZoneId zone = ZoneId.of(calendar.getTimeZone().getID());
final Instant instant = Instant.ofEpochMilli(calendar.getTimeInMillis());
return ZonedDateTime.ofInstant(instant, zone);
}
/**
* Converts a string to a period, allowing the old format of {@code PT0S} for {@code P0D}.
*
* @param period the period to parse, not null
* @return the parsed period, not null
* @deprecated Don't rely on this, fix the source of data where the PT0S values are coming from
*/
@Deprecated
public static Period toPeriod(final String period) {
if ("PT0S".equals(period)) {
return Period.ZERO;
}
return Period.parse(period);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.query.topn;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.collect.Iterables;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.PostAggregator;
import org.apache.druid.query.dimension.DimensionSpec;
import org.joda.time.DateTime;
import java.nio.ByteBuffer;
import java.util.Comparator;
import java.util.List;
/**
*/
public class NumericTopNMetricSpec implements TopNMetricSpec
{
private static final byte CACHE_TYPE_ID = 0x0;
private final String metric;
@JsonCreator
public NumericTopNMetricSpec(
@JsonProperty("metric") String metric
)
{
this.metric = metric;
}
@Override
public void verifyPreconditions(List<AggregatorFactory> aggregatorSpecs, List<PostAggregator> postAggregatorSpecs)
{
Preconditions.checkNotNull(metric, "metric can't be null");
Preconditions.checkNotNull(aggregatorSpecs, "aggregations cannot be null");
Preconditions.checkArgument(
aggregatorSpecs.size() > 0 || postAggregatorSpecs.size() > 0,
"Must have at least one AggregatorFactory or PostAggregator"
);
final AggregatorFactory aggregator = Iterables.tryFind(
aggregatorSpecs,
new Predicate<AggregatorFactory>()
{
@Override
public boolean apply(AggregatorFactory input)
{
return input.getName().equals(metric);
}
}
).orNull();
final PostAggregator postAggregator = Iterables.tryFind(
postAggregatorSpecs,
new Predicate<PostAggregator>()
{
@Override
public boolean apply(PostAggregator input)
{
return input.getName().equals(metric);
}
}
).orNull();
Preconditions.checkArgument(
aggregator != null || postAggregator != null,
"Must have an AggregatorFactory or PostAggregator for metric[%s], gave[%s] and [%s]",
metric,
aggregatorSpecs,
postAggregatorSpecs
);
}
@JsonProperty
public String getMetric()
{
return metric;
}
@Override
public Comparator getComparator(List<AggregatorFactory> aggregatorSpecs, List<PostAggregator> postAggregatorSpecs)
{
Comparator comp = null;
for (AggregatorFactory factory : aggregatorSpecs) {
if (metric.equals(factory.getName())) {
comp = factory.getComparator();
break;
}
}
for (PostAggregator pf : postAggregatorSpecs) {
if (metric.equals(pf.getName())) {
comp = pf.getComparator();
break;
}
}
return comp;
}
@Override
public TopNResultBuilder getResultBuilder(
DateTime timestamp,
DimensionSpec dimSpec,
int threshold,
Comparator comparator,
List<AggregatorFactory> aggFactories,
List<PostAggregator> postAggs
)
{
return new TopNNumericResultBuilder(timestamp, dimSpec, metric, threshold, comparator, aggFactories, postAggs);
}
@Override
public byte[] getCacheKey()
{
byte[] metricBytes = StringUtils.toUtf8(metric);
return ByteBuffer.allocate(1 + metricBytes.length)
.put(CACHE_TYPE_ID)
.put(metricBytes)
.array();
}
@Override
public <T> TopNMetricSpecBuilder<T> configureOptimizer(TopNMetricSpecBuilder<T> builder)
{
return builder;
}
@Override
public void initTopNAlgorithmSelector(TopNAlgorithmSelector selector)
{
selector.setAggregateTopNMetricFirst(true);
}
@Override
public String getMetricName(DimensionSpec dimSpec)
{
return metric;
}
@Override
public boolean canBeOptimizedUnordered()
{
return true;
}
@Override
public String toString()
{
return "NumericTopNMetricSpec{" +
"metric='" + metric + '\'' +
'}';
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
NumericTopNMetricSpec that = (NumericTopNMetricSpec) o;
if (metric != null ? !metric.equals(that.metric) : that.metric != null) {
return false;
}
return true;
}
@Override
public int hashCode()
{
return metric != null ? metric.hashCode() : 0;
}
}
| |
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.document.json;
import com.fasterxml.jackson.core.JsonFactory;
import com.yahoo.document.ArrayDataType;
import com.yahoo.document.DataType;
import com.yahoo.document.DocumentType;
import com.yahoo.document.DocumentTypeManager;
import com.yahoo.document.DocumentUpdate;
import com.yahoo.document.Field;
import com.yahoo.document.MapDataType;
import com.yahoo.document.PositionDataType;
import com.yahoo.document.ReferenceDataType;
import com.yahoo.document.StructDataType;
import com.yahoo.document.TensorDataType;
import com.yahoo.document.WeightedSetDataType;
import com.yahoo.document.serialization.DocumentDeserializerFactory;
import com.yahoo.document.serialization.DocumentSerializer;
import com.yahoo.document.serialization.DocumentSerializerFactory;
import com.yahoo.io.GrowableByteBuffer;
import com.yahoo.tensor.TensorType;
import com.yahoo.text.Utf8;
import org.junit.Test;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import static com.yahoo.test.json.JsonTestHelper.assertJsonEquals;
import static com.yahoo.test.json.JsonTestHelper.inputJson;
/**
* Tests roundtrip serialization (JSON -> DocumentUpdate -> Buffer -> DocumentUpdate -> JSON) of document updates.
*
* @author Vegard Sjonfjell
*/
public class DocumentUpdateJsonSerializerTest {
final static TensorType sparseTensorType = new TensorType.Builder().mapped("x").mapped("y").build();
final static TensorType denseTensorType = new TensorType.Builder().indexed("x", 2).indexed("y", 3).build();
final static TensorType mixedTensorType = new TensorType.Builder().mapped("x").indexed("y", 3).build();
final static DocumentTypeManager types = new DocumentTypeManager();
final static JsonFactory parserFactory = new JsonFactory();
final static DocumentType docType = new DocumentType("doctype");
final static DocumentType refTargetDocType = new DocumentType("target_doctype");
final static String DEFAULT_DOCUMENT_ID = "id:test:doctype::1";
static {
StructDataType myStruct = new StructDataType("my_struct");
myStruct.addField(new Field("my_string_field", DataType.STRING));
myStruct.addField(new Field("my_int_field", DataType.INT));
types.registerDocumentType(refTargetDocType);
docType.addField(new Field("string_field", DataType.STRING));
docType.addField(new Field("int_field", DataType.INT));
docType.addField(new Field("float_field", DataType.FLOAT));
docType.addField(new Field("double_field", DataType.DOUBLE));
docType.addField(new Field("byte_field", DataType.BYTE));
docType.addField(new Field("sparse_tensor", new TensorDataType(sparseTensorType)));
docType.addField(new Field("dense_tensor", new TensorDataType(denseTensorType)));
docType.addField(new Field("mixed_tensor", new TensorDataType(mixedTensorType)));
docType.addField(new Field("reference_field", new ReferenceDataType(refTargetDocType, 777)));
docType.addField(new Field("predicate_field", DataType.PREDICATE));
docType.addField(new Field("raw_field", DataType.RAW));
docType.addField(new Field("int_array", new ArrayDataType(DataType.INT)));
docType.addField(new Field("string_array", new ArrayDataType(DataType.STRING)));
docType.addField(new Field("int_set", new WeightedSetDataType(DataType.INT, true, true)));
docType.addField(new Field("string_set", new WeightedSetDataType(DataType.STRING, true, true)));
docType.addField(new Field("string_map", new MapDataType(DataType.STRING, DataType.STRING)));
docType.addField(new Field("deep_map", new MapDataType(DataType.STRING, new MapDataType(DataType.STRING, DataType.STRING))));
docType.addField(new Field("map_array", new MapDataType(DataType.STRING, new ArrayDataType(DataType.STRING))));
docType.addField(new Field("map_struct", new MapDataType(DataType.STRING, myStruct)));
docType.addField(new Field("singlepos_field", PositionDataType.INSTANCE));
docType.addField(new Field("multipos_field", new ArrayDataType(PositionDataType.INSTANCE)));
types.registerDocumentType(docType);
}
private static GrowableByteBuffer serializeDocumentUpdate(DocumentUpdate update) {
DocumentSerializer serializer = DocumentSerializerFactory.createHead(new GrowableByteBuffer());
update.serialize(serializer);
serializer.getBuf().rewind();
return serializer.getBuf();
}
private static DocumentUpdate deserializeDocumentUpdate(GrowableByteBuffer buffer) {
return new DocumentUpdate(DocumentDeserializerFactory.createHead(types, buffer));
}
private static DocumentUpdate roundtripSerialize(DocumentUpdate update) {
GrowableByteBuffer buffer = serializeDocumentUpdate(update);
return deserializeDocumentUpdate(buffer);
}
private static DocumentUpdate jsonToDocumentUpdate(String jsonDoc, String docId) {
final InputStream rawDoc = new ByteArrayInputStream(Utf8.toBytes(jsonDoc));
JsonReader reader = new JsonReader(types, rawDoc, parserFactory);
return (DocumentUpdate) reader.readSingleDocument(DocumentOperationType.UPDATE, docId);
}
private static String documentUpdateToJson(DocumentUpdate update) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
DocumentUpdateJsonSerializer serializer = new DocumentUpdateJsonSerializer(outputStream);
serializer.serialize(update);
try {
return new String(outputStream.toByteArray(), "UTF-8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
private static void roundtripSerializeJsonAndMatch(String jsonDoc, String expectedJsonDoc) {
jsonDoc = jsonDoc.replaceFirst("DOCUMENT_ID", DEFAULT_DOCUMENT_ID);
expectedJsonDoc = expectedJsonDoc.replaceFirst("DOCUMENT_ID", DEFAULT_DOCUMENT_ID);
DocumentUpdate update = jsonToDocumentUpdate(jsonDoc, DEFAULT_DOCUMENT_ID);
DocumentUpdate roundtripUpdate = roundtripSerialize(update);
assertJsonEquals(expectedJsonDoc, documentUpdateToJson(roundtripUpdate));
}
private static void roundtripSerializeJsonAndMatch(String jsonDoc) {
roundtripSerializeJsonAndMatch(jsonDoc, jsonDoc);
}
@Test
public void testArithmeticUpdate() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'int_field': {",
" 'increment': 3.0",
" },",
" 'float_field': {",
" 'decrement': 1.5",
" },",
" 'double_field': {",
" 'divide': 3.2",
" },",
" 'byte_field': {",
" 'multiply': 2.0",
" }",
" }",
"}"
));
}
@Test
public void testAssignSimpleTypes() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'int_field': {",
" 'assign': 42",
" },",
" 'float_field': {",
" 'assign': 32.45",
" },",
" 'double_field': {",
" 'assign': 45.93",
" },",
" 'string_field': {",
" 'assign': \"My favorite string\"",
" },",
" 'byte_field': {",
" 'assign': 127",
" }",
" }",
"}"
));
}
@Test
public void testAssignWeightedSet() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'int_set': {",
" 'assign': {",
" '123': 456,",
" '789': 101112",
" }",
" },",
" 'string_set': {",
" 'assign': {",
" 'meow': 218478,",
" 'slurp': 2123",
" }",
" }",
" }",
"}"
));
}
@Test
public void testAddUpdate() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'int_array': {",
" 'add': [",
" 123,",
" 456,",
" 789",
" ]",
" },",
" 'string_array': {",
" 'add': [",
" 'bjarne',",
" 'andrei',",
" 'rich'",
" ]",
" }",
" }",
"}"
));
}
@Test
public void testRemoveUpdate() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'int_array': {",
" 'remove': [",
" 123,",
" 789",
" ]",
" },",
" 'string_array': {",
" 'remove': [",
" 'bjarne',",
" 'rich'",
" ]",
" }",
" }",
"}"
));
}
@Test
public void testMatchUpdateArithmetic() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'int_array': {",
" 'match': {",
" 'element': 456,",
" 'multiply': 8.0",
" }",
" }",
" }",
"}"
));
}
@Test
public void testMatchUpdateAssign() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'string_array': {",
" 'match': {",
" 'element': 3,",
" 'assign': 'kjeks'",
" }",
" }",
" }",
"}"
));
}
@Test
public void testAssignTensor() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'sparse_tensor': {",
" 'assign': {",
" 'cells': [",
" { 'address': { 'x': 'a', 'y': 'b' }, 'value': 2.0 },",
" { 'address': { 'x': 'c', 'y': 'b' }, 'value': 3.0 }",
" ]",
" }",
" }",
" }",
"}"
));
}
@Test
public void test_tensor_modify_update_on_dense_tensor() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'dense_tensor': {",
" 'modify': {",
" 'operation': 'replace',",
" 'cells': [",
" { 'address': { 'x': '0', 'y': '0' }, 'value': 2.0 },",
" { 'address': { 'x': '1', 'y': '2' }, 'value': 3.0 }",
" ]",
" }",
" }",
" }",
"}"
));
}
@Test
public void test_tensor_modify_update_on_sparse_tensor() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'sparse_tensor': {",
" 'modify': {",
" 'operation': 'add',",
" 'cells': [",
" { 'address': { 'x': 'a', 'y': 'b' }, 'value': 2.0 },",
" { 'address': { 'x': 'c', 'y': 'd' }, 'value': 3.0 }",
" ]",
" }",
" }",
" }",
"}"
));
}
@Test
public void test_tensor_modify_update_on_mixed_tensor() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'mixed_tensor': {",
" 'modify': {",
" 'operation': 'multiply',",
" 'cells': [",
" { 'address': { 'x': 'a', 'y': '0' }, 'value': 2.0 },",
" { 'address': { 'x': 'c', 'y': '1' }, 'value': 3.0 }",
" ]",
" }",
" }",
" }",
"}"
));
}
@Test
public void test_tensor_add_update() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'sparse_tensor': {",
" 'add': {",
" 'cells': [",
" { 'address': { 'x': '0', 'y': '0' }, 'value': 2.0 },",
" { 'address': { 'x': '1', 'y': '2' }, 'value': 3.0 }",
" ]",
" }",
" }",
" }",
"}"
));
}
@Test
public void test_tensor_add_update_mixed() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'mixed_tensor': {",
" 'add': {",
" 'cells': [",
" { 'address': { 'x': '1', 'y': '0' }, 'value': 2.0 },",
" { 'address': { 'x': '1', 'y': '1' }, 'value': 0.0 },",
" { 'address': { 'x': '1', 'y': '2' }, 'value': 0.0 },",
" { 'address': { 'x': '0', 'y': '0' }, 'value': 0.0 },",
" { 'address': { 'x': '0', 'y': '1' }, 'value': 0.0 },",
" { 'address': { 'x': '0', 'y': '2' }, 'value': 3.0 }",
" ]",
" }",
" }",
" }",
"}"
));
}
@Test
public void test_tensor_remove_update() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'sparse_tensor': {",
" 'remove': {",
" 'addresses': [",
" {'x':'0','y':'0'},",
" {'x':'1','y':'2'}",
" ]",
" }",
" }",
" }",
"}"
));
}
@Test
public void test_tensor_remove_update_mixed() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'mixed_tensor': {",
" 'remove': {",
" 'addresses': [",
" {'x':'0' }",
" ]",
" }",
" }",
" }",
"}"
));
}
@Test
public void test_tensor_remove_update_with_not_fully_specified_address() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'sparse_tensor': {",
" 'remove': {",
" 'addresses': [",
" {'y':'0'},",
" {'y':'2'}",
" ]",
" }",
" }",
" }",
"}"
));
}
@Test
public void reference_field_id_can_be_update_assigned_non_empty_id() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'reference_field': {",
" 'assign': 'id:ns:target_doctype::foo'",
" }",
" }",
"}"
));
}
@Test
public void reference_field_id_can_be_update_assigned_empty_id() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'reference_field': {",
" 'assign': ''",
" }",
" }",
"}"
));
}
@Test
public void testAssignPredicate() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'predicate_field': {",
" 'assign': 'foo in [bar]'",
" }",
" }",
"}"
));
}
@Test
public void testAssignRaw() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'raw_field': {",
" 'assign': 'RG9uJ3QgYmVsaWV2ZSBoaXMgbGllcw=='",
" }",
" }",
"}"
));
}
@Test
public void testAssignMap() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'string_map': {",
" 'assign': { ",
" 'conversion gel': 'deadly',",
" 'repulsion gel': 'safe',",
" 'propulsion gel': 'insufficient data'",
" }",
" }",
" }",
"}"
));
}
@Test
public void testSimultaneousFieldsAndFieldPathsUpdate() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'singlepos_field': {",
" 'assign': 'N60.222333;E10.12'",
" },",
" 'deep_map{my_field}': {",
" 'assign': {",
" 'my_key': 'my value',",
" 'new_key': 'new value'",
" }",
" }",
" }",
"}"
));
}
@Test
public void testAssignFieldPathUpdate() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'deep_map{my_field}': {",
" 'assign': {",
" 'my_key': 'my value',",
" 'new_key': 'new value'",
" }",
" },",
" 'map_struct{my_key}': {",
" 'assign': {",
" 'my_string_field': 'Some string',",
" 'my_int_field': 5",
" }",
" },",
" 'map_struct{my_key}.my_int_field': {",
" 'assign': 10",
" }",
" }",
"}"
));
}
@Test
public void testRemoveFieldPathUpdate() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'int_array[5]': {",
" 'remove': 0",
" }",
" }",
"}"
));
}
@Test
public void testAddFieldPathUpdate() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'map_array{my_value}': {",
" 'add': ['some', 'fancy', 'strings']",
" }",
" }",
"}"
));
}
@Test
public void testArithmeticFieldPathUpdate() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'map_struct{my_key}.my_int_field': {",
" 'increment': 5.0",
" },",
" 'int_array[10]': {",
" 'divide': 3.0",
" }",
" }",
"}"
));
}
@Test
public void testMultipleOperationsOnSingleFieldPath() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'map_struct{my_key}': {",
" 'assign': {",
" 'my_string_field': 'Some string'",
" },",
" 'remove': 0",
" }",
" }",
"}"
));
}
@Test
public void testAssignSinglePos() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'singlepos_field': {",
" 'assign': 'N60.222333;E10.12'",
" }",
" }",
"}"
));
}
@Test
public void testAssignMultiPos() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'multipos_field': {",
" 'assign': [ 'N0.0;E0.0', 'S1.1;W1.1', 'N10.2;W122.2' ]",
" }",
" }",
"}"
));
}
@Test
public void testClearField() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'int_field': {",
" 'assign': null",
" },",
" 'string_field': {",
" 'assign': null",
" }",
" }",
"}"
));
}
@Test
public void testCreateIfNotExistTrue() {
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'create': true,",
" 'fields': {",
" 'int_field': {",
" 'assign': 42",
" }",
" }",
"}"
));
}
@Test
public void testCreateIfNotExistFalse() {
// NOTE: DocumentUpdateJsonSerializer only writes 'create' when true.
roundtripSerializeJsonAndMatch(inputJson(
"{",
" 'update': 'DOCUMENT_ID',",
" 'create': false,",
" 'fields': {",
" 'int_field': {",
" 'assign': 42",
" }",
" }",
"}"
), inputJson("{",
" 'update': 'DOCUMENT_ID',",
" 'fields': {",
" 'int_field': {",
" 'assign': 42",
" }",
" }",
"}"));
}
}
| |
package com.forsrc.utils;
import org.apache.commons.codec.binary.Hex;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.io.IOUtils;
import java.io.*;
import java.security.MessageDigest;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.concurrent.ExecutorService;
/**
* The type Md 5 utils.
*/
public class MD5Utils {
/**
* The constant MAX_BUFFER_SIZE.
*/
public static final int MAX_BUFFER_SIZE = 1024 * 1024 * 1;
/**
* The constant FILE_DEF_EXT.
*/
public final static String FILE_DEF_EXT = ".json.md5"; //$NON-NLS-1$
/**
* The constant MAX_BUFFER.
*/
public final static int MAX_BUFFER = 1024 * 1024;
/**
* The constant isSaveMd5Info.
*/
public static boolean isSaveMd5Info = true;
/**
* The constant isShowProgress.
*/
public static boolean isShowProgress = false;
/**
* Md 5 string.
*
* @param string the string
* @return String string
* @throws
* @Title: md5
* @Description:
*/
public static String md5(String string) {
if (string == null) {
return null;
}
return DigestUtils.md5Hex(string);
}
/**
* Md 5 dir.
*
* @param dir the dir
* @return void
* @throws IOException the io exception
* @Title: md5dir
* @Description:
*/
public static void md5dir(File dir) throws IOException {
if (dir == null || !dir.exists()) {
// LogUtils.logger.debug(dir + "(Not exists)");
return;
}
if (dir.isFile()) {
md5(dir);
return;
}
Set<File> list = new HashSet<File>();
File[] files = dir.listFiles();
for (File f : files) {
String fileName = f.getAbsolutePath();
if (f.isDirectory()) {
list.add(f);
continue;
}
if (fileName.endsWith(".md5")) {
continue;
}
md5(f);
}
Iterator<File> it = list.iterator();
while (it.hasNext()) {
md5dir(it.next());
}
}
/**
* Md 5 thread.
*
* @param file the file
* @param pool the pool
* @throws IOException the io exception
*/
public static void md5Thread(File file, ExecutorService pool)
throws IOException {
if (file == null || !file.exists()) {
return;
}
if (file.isFile()) {
md5(file);
return;
}
try {
md5DirThread(file, pool);
} catch (IOException e) {
e.printStackTrace();
throw new IOException(e);
}
}
private static void md5DirThread(File dir, ExecutorService pool)
throws IOException {
if (dir.isFile()) {
return;
}
File[] files = dir.listFiles();
for (File f : files) {
String fileName = f.getPath();
if (f.isDirectory()) {
md5DirThread(new File(fileName), pool);
continue;
}
if (fileName.endsWith(".md5")) {
continue;
}
if (pool != null) {
pool.execute(new Md5Thread(f));
}
}
}
/**
* Gets file md 5.
*
* @param file the file
* @param isBigfile the is bigfile
* @return String file md 5
* @throws IOException the io exception
* @Title: getFileMd5
* @Description:
*/
public static String getFileMd5_(File file, boolean isBigfile)
throws IOException {
if (isBigfile) {
return getFileMd5(file);
}
String md5 = "";
InputStream in = null;
try {
in = new BufferedInputStream(new FileInputStream(file), MAX_BUFFER);
try {
md5 = DigestUtils.md5Hex(in);
} catch (IOException e) {
throw new IOException(e);
}
} catch (FileNotFoundException e) {
throw new IOException(e);
} finally {
IOUtils.closeQuietly(in);
}
return md5;
}
/**
* Gets file md 5 x.
*
* @param file the file
* @return the file md 5 x
* @throws IOException the io exception
*/
public static String getFileMd5X(File file) throws IOException {
return getFileMd5(file);
}
/**
* Gets file md 5.
*
* @param file the file
* @return the file md 5
* @throws IOException the io exception
*/
public static String getFileMd5(File file) throws IOException {
if (file == null || !file.exists() || file.isDirectory()) {
return "";
}
File md5File = new File(file.getPath() + FILE_DEF_EXT);
String json = FileUtils.getFileTxt(md5File);
String md5 = null;
if (md5File.exists() && json != null && checkMd5(file)) {
md5 = (String) JsonUtils.getValue("md5", json);
if (md5 != null && md5.length() > 0) {
return md5;
}
}
InputStream in = null;
long start = System.currentTimeMillis();
try {
MessageDigest md = DigestUtils.getMd5Digest();
in = new BufferedInputStream(new FileInputStream(file), MAX_BUFFER);
byte[] buffer = new byte[1024 * 1024];
int length = -1;
long index = 1;
long read = 0;
while ((length = in.read(buffer)) != -1) {
md.update(buffer, 0, length);
if (isShowProgress
&& (read = read + length) >= MAX_BUFFER * index) {
double rate = read * 100.00 / file.length();
double speed = 1.000
* read
* 1000
/ (1024 * 1024 * 1.000 * (System
.currentTimeMillis() - start));
CmdUtils.printMark(new StringBuilder()
.append(DateTimeUtils.getDateTime())
.append(" [INFO] md5 ")
.append(String.format("%5.2f", rate))
.append("% ")
.append(String.format(
"%" + (file.length() + "").length() + "d",
read)).append("/").append(file.length())
.append(" ")
.append(String.format("%.3f", speed) + "m/s (")
.append(file.getName()).append(")").toString());
}
}
return new String(Hex.encodeHex(md.digest()));
} catch (IOException e) {
e.printStackTrace();
throw new IOException("GetFileMd5():" + file.getPath(), e);
} finally {
try {
if (in != null) {
in.close();
}
} catch (IOException e) {
e.printStackTrace();
throw new IOException(e);
}
}
}
/**
* Md 5.
*
* @param file the file
* @return String
* @throws IOException the io exception
* @Title: md5
* @Description:
*/
public static void md5(File file) throws IOException {
if (file == null || !file.exists()) {
return;
}
long start = System.currentTimeMillis();
if (file.isDirectory()) {
md5dir(file);
return;
}
if (checkMd5(file)) {
// LogUtils.logger.info("md5: " + file.getAbsolutePath() +
// " skip.");
LogUtils.LOGGER.info(new StringBuilder()
.append("md5: ")
.append(file.getPath())
.append(" [Already md5] ")
.append(file.length())
.append(" md5: ")
.append((String) JsonUtils.getValue(
"md5",
FileUtils.getFileTxt(new File(file.getPath()
+ FILE_DEF_EXT)))).toString());
return;
}
// LogUtils.logger.info("md5: " + file.getAbsolutePath() + " ...");
String md5 = getFileMd5(file);
if (isSaveMd5Info && md5.length() > 0) {
// FileUtils .setFileTxt(new File(file.getAbsolutePath() + ".md5"),
// md5);
File jsonFile = new File(file.getPath() + FILE_DEF_EXT);
String json = JsonUtils.setValue("fileName", file.getName(),
FileUtils.getFileTxt(jsonFile));
json = JsonUtils.setValue("md5", md5, json);
json = JsonUtils.setValue("md5Time", DateTimeUtils.getDateTime(),
json);
json = JsonUtils.setValue("filePath", file.getPath(), json);
json = JsonUtils.setValue("fileLength", file.length() + "", json);
json = JsonUtils.setValue("fileLastModifiedTime", DateTimeUtils
.getDateTime(file.lastModified(), "yyyy-MM-dd HH:mm:ss"),
json);
json = JsonUtils.setValue("fileLastModified", file.lastModified()
+ "", json);
json = JsonUtils.setValue("md5Start1k", getFileMd5(file, 0, 1024L)
+ "", json);
json = JsonUtils.setValue("md5End1k",
getFileMd5(file, file.length() - 1024, 1024L) + "", json);
FileUtils.setFileTxt(jsonFile, JsonUtils.jsonToPrintln(json));
}
// LogUtils.logger.info("md5: " + file.getPath() + " ["
// + (System.currentTimeMillis() - start) + "ms] " + md5);
LogUtils.LOGGER.info(new StringBuilder().append("md5: ")
.append(file.getPath()).append(" [")
.append((System.currentTimeMillis() - start)).append("ms] ")
.append(file.length()).append(" md5: ").append(md5).toString());
}
/**
* Is md 5 file boolean.
*
* @param file the file
* @return the boolean
*/
public static boolean isMd5File(File file) {
if (file == null || !file.exists() || file.isDirectory()
|| file.length() <= 0) {
return false;
}
if (isMd5FileType(file.getAbsolutePath()) || checkMd5(file)) {
return true;
}
return false;
}
/**
* Is md 5 file type boolean.
*
* @param fileName the file name
* @return the boolean
*/
public static boolean isMd5FileType(String fileName) {
if (fileName == null) {
return false;
}
int length = fileName.length();
if (length > FILE_DEF_EXT.length() && fileName.endsWith(FILE_DEF_EXT)
&& checkMd5(new File(fileName.replace(FILE_DEF_EXT, "")))) {
return true;
}
return false;
}
/**
* Gets file md 5.
*
* @param file the file
* @param start the start
* @return the file md 5
* @throws IOException the io exception
*/
public static String getFileMd5(File file, long start) throws IOException {
RandomAccessFile raf = null;
long size = 0;
try {
raf = new RandomAccessFile(file, "r");
size = raf.length();
start = start >= size ? size - MAX_BUFFER_SIZE : start;
start = start < 0 ? 0 : start;
MessageDigest md = DigestUtils.getMd5Digest();
byte[] buf = new byte[MAX_BUFFER_SIZE];
raf.seek(start);
int length = 1024 * 1024;
// System.out.println("L start " + start);
int read = 0;
while (read < length) {
int len = raf.read(buf);
if (len == -1) {
break;
}
md.update(buf, 0, (int) (read + len > length ? length : len));
read += len;
}
return new String(Hex.encodeHex(md.digest()));
} catch (IOException e) {
e.printStackTrace();
throw e;
} finally {
if (raf != null) {
raf.close();
}
}
}
/**
* Gets file md 5.
*
* @param file the file
* @param start the start
* @param length the length
* @return the file md 5
* @throws IOException the io exception
*/
public static String getFileMd5(File file, long start, long length)
throws IOException {
// RandomAccessFile in = null;
InputStream in = null;
long size = 0;
try {
// in = new RandomAccessFile(file, "r");
// size = in.length();
in = new BufferedInputStream(new FileInputStream(file), 1024 * 2);
size = file.length();
start = start >= size ? size - length : start;
start = start < 0 ? 0 : start;
MessageDigest md = DigestUtils.getMd5Digest();
byte[] buf = new byte[1024 * 2];
// in.seek(start);
in.skip(start);
// System.out.println("L start " + start);
int read = 0;
while (read < length) {
int len = in.read(buf);
if (len == -1) {
break;
}
md.update(buf, 0, (int) (read + len > length ? length : len));
read += len;
}
// System.out.println("L read " + read);
return new String(Hex.encodeHex(md.digest()));
} catch (IOException e) {
e.printStackTrace();
throw e;
} finally {
if (in != null) {
in.close();
}
}
}
/**
* Gets file md 5 old.
*
* @param file the file
* @param start the start
* @param length the length
* @return the file md 5 old
* @throws IOException the io exception
*/
public static String getFileMd5Old(File file, long start, long length)
throws IOException {
RandomAccessFile raf = null;
long size = 0;
try {
raf = new RandomAccessFile(file, "r");
size = raf.length();
start = start >= size ? size - length : start;
start = start < 0 ? 0 : start;
MessageDigest md = DigestUtils.getMd5Digest();
byte[] buf = new byte[MAX_BUFFER_SIZE];
raf.seek(start);
int len = 0;
long read = 0;
long index = 0;
// System.out.println("L start " + start);
while ((len = raf.read(buf)) != -1) {
index++;
if (len >= length && index == 1) {
read = length;
md.update(buf, 0, (int) length);
break;
}
read += len;
if (read >= length * 2) {
int rd = (int) (length - read + len);
md.update(buf, 0, (int) (length - read));
read = read - rd;
break;
}
if (read == length) {
md.update(buf, 0, len);
break;
}
if (read > length) {
int rd = (int) (length - read + len);
md.update(buf, 0, rd);
read = read - len + rd;
break;
}
md.update(buf, 0, len);
}
// System.out.println("L read " + read);
return new String(Hex.encodeHex(md.digest()));
} catch (IOException e) {
e.printStackTrace();
throw e;
} finally {
if (raf != null) {
raf.close();
}
}
}
/**
* Check md 5 boolean.
*
* @param file the file
* @return the boolean
*/
public static boolean checkMd5(File file) {
return checkMd5(file, null);
}
/**
* Check md 5 boolean.
*
* @param file the file
* @param md5 the md 5
* @return the boolean
*/
public static boolean checkMd5(File file, String md5) {
if (file == null || !file.exists()) {
return false;
}
File jsonFile = new File(file.getPath() + FILE_DEF_EXT);
if (!jsonFile.exists()) {
return false;
}
try {
String jsonString = FileUtils.getFileTxt(jsonFile);
if (md5 != null
&& !md5.equals(JsonUtils.getValue("md5", jsonString))) {
return false;
}
String length = (String) JsonUtils.getValue("fileLength",
jsonString);
if (length == null || !length.equals(file.length() + "")) {
return false;
}
String md5Start1k = (String) JsonUtils.getValue("md5Start1k",
jsonString);
if (md5Start1k == null
|| !md5Start1k.equals(getFileMd5(file, 0, 1024))) {
return false;
}
String md5End1k = (String) JsonUtils.getValue("md5End1k",
jsonString);
if (md5End1k == null
|| !md5End1k.equals(getFileMd5(file, file.length() - 1024,
1024))) {
return false;
}
} catch (IOException e) {
return false;
}
return true;
}
private static class Md5Thread extends Thread {
private File file;
private boolean isShow = false;
/**
* Instantiates a new Md 5 thread.
*/
public Md5Thread() {
}
/**
* Instantiates a new Md 5 thread.
*
* @param file the file
*/
public Md5Thread(File file) {
this.file = file;
this.isShow = false;
}
@Override
public void run() {
if (this.file == null) {
return;
}
try {
md5(this.file);
} catch (IOException e) {
e.printStackTrace();
}
}
/**
* Gets file.
*
* @return the file
*/
public File getFile() {
return this.file;
}
/**
* Sets file.
*
* @param file the file
*/
public void setFile(File file) {
this.file = file;
}
}
}
| |
/*
Copyright 2011-2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.google.security.zynamics.zylib.gui.CodeDisplay;
import com.google.security.zynamics.zylib.gui.JCaret.ICaretListener;
import com.google.security.zynamics.zylib.gui.JCaret.JCaret;
import java.awt.BorderLayout;
import java.awt.Font;
import java.awt.FontMetrics;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Point;
import java.awt.RenderingHints;
import java.awt.event.AdjustmentEvent;
import java.awt.event.AdjustmentListener;
import java.awt.event.ComponentEvent;
import java.awt.event.ComponentListener;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import java.awt.event.KeyEvent;
import java.awt.event.KeyListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.awt.event.MouseWheelEvent;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import javax.swing.JComponent;
import javax.swing.JScrollBar;
/**
* A general-purpose JComponent to allow the synchronized side-by-side rendering of multiple
* "columns" of highlighted text. This is useful for all sorts of things:
* * Write a code editor that displays line numbers at the left-hand side but allows wrapping of
* lines
* * An IDA-style display with addresses at the left, opcode bytes to the right of it,
* instructions, and finally comments to the far right
* * ... etc.
*
* <p> Originally intended to replace some tables in BinNavi, but more widely applicable for all
* sorts of (monospace) text rendering.
*/
public class CodeDisplay extends JComponent {
/** The metrics of the font to be used. */
private FontMetrics fontMetrics;
/** The font to be used in this component. */
private Font textFont;
/** Vertical scroll bar to scroll through the different rows in the display. */
private final JScrollBar verticalScrollbar =
new JScrollBar(JScrollBar.VERTICAL, 0/* value */, 1/* extent */, 0/* min */, 1/* max */);
/** Horizontal scroll bar that is used to scroll sideways. */
private final JScrollBar horizontalScrollbar =
new JScrollBar(JScrollBar.HORIZONTAL, 0/* value */, 1/* extent */, 0/* min */, 1/* max */);
/** Default internal listener that is used to handle various events. */
private final InternalListener listener = new InternalListener();
/** The reference to the underlying data model. */
private final ICodeDisplayModel codeModel;
/** Internal double-buffer for drawing. */
private BufferedImage bufferedImage;
private Graphics2D bufferedGraphics;
/** The dimensions of individual characters in the selected font and the height of a text row. */
private int fontCharWidth = 0;
private int fontLineHeight = 0;
/** The caret inside the component. */
private final JCaret caret = new JCaret();
private CodeDisplayCoordinate caretPosition = new CodeDisplayCoordinate(0, 0, 0, 0);
/** Coordinates for the caret in the local FormattedCharacterBuffer. */
private int caretX = 0;
private int caretY = 0;
/** The number of currently visible rows. */
private int currentlyVisibleLines = 0;
/** The number of currently visible character columns. */
private int currentlyVisibleColumns = 0;
/** The first row to draw onto the screen. */
private int currentFirstRow = 0;
/** The first line of the first row to be drawn onto the screen. */
private int currentFirstLine = 0;
/**
* The first column of characters (do not confuse with the columns of the data model) that is
* displayed on screen.
*/
private int currentFirstCharColumn = 0;
/** The formatted character buffer that is used for drawing the component. */
private FormattedCharacterBuffer charBuffer = new FormattedCharacterBuffer(0, 0);
/** Used for keeping track of objects listening for events from this code display */
private List<CodeDisplayEventListener> eventListeners = new ArrayList<>();
/**
* The code makes use of the well-orderedness of TreeMap, this map *cannot* simply be replaced
* with a different map.
*/
private TreeMap<Integer, CodeDisplayCoordinate> yCoordinateToRowAndLine = new TreeMap<>();
// Utility function for people using this class.
public static String padRight(String s, int n) {
if (n == 0) {
return "";
}
return String.format("%1$-" + n + "s", s);
}
public CodeDisplay(ICodeDisplayModel codeDisplayModel) {
codeModel = codeDisplayModel;
textFont = new Font(Font.MONOSPACED, Font.PLAIN, 12);
// Necessary to receive input
setFocusable(true);
setLayout(new BorderLayout());
initializeListeners();
initializeScrollbars();
initializeFontMetrics(textFont);
// Calculates how many lines and columns will be actually visible at the
// moment.
currentlyVisibleLines = getNumberOfVisibleLines();
currentlyVisibleColumns = getNumberOfVisibleColumns();
// Initialize the internal graphics buffer.
initializeGraphicsBuffer();
setScrollBarMaximum();
// By default, this component is disabled.
setEnabled(true);
// Set the initial caret to the first editable column.
int xPosition = 0;
CodeDisplayCoordinate testCoordinate = new CodeDisplayCoordinate(0, 0, 0, 0);
for (int columnIndex = 0; columnIndex < codeModel.getNumberOfColumns(); columnIndex++) {
xPosition += codeModel.getColumnWidthInCharacters(columnIndex);
testCoordinate.setColumn(columnIndex);
if (codeModel.canHaveCaret(testCoordinate)) {
setCaretPosition(testCoordinate);
caretX = xPosition;
caretY = 1;
}
}
}
private void initializeGraphicsBuffer() {
bufferedImage = new BufferedImage(((codeModel.getTotalWidthInCharacters() + 1) * fontCharWidth),
(currentlyVisibleLines + 10) * fontLineHeight, BufferedImage.TYPE_INT_RGB);
bufferedGraphics = (Graphics2D) bufferedImage.getGraphics();
}
private void notifyCaretListeners() {
for (CodeDisplayEventListener listener : eventListeners) {
listener.caretChanged(caretPosition);
}
}
/**
* In order to get the font metrics, a graphics context is required. This function temporarily
* creates one.
*/
private void initializeFontMetrics(Font font) {
final BufferedImage temporaryImage = new BufferedImage(10, 10, BufferedImage.TYPE_INT_RGB);
Graphics2D temporaryGraphics2D = (Graphics2D) temporaryImage.getGraphics();
temporaryGraphics2D.setFont(font);
fontMetrics = temporaryGraphics2D.getFontMetrics();
fontCharWidth = fontMetrics.getMaxAdvance();
fontLineHeight = fontMetrics.getHeight();
}
private void initializeListeners() {
// Add the input listeners
addMouseListener(listener);
addMouseMotionListener(listener);
addMouseWheelListener(listener);
addFocusListener(listener);
addComponentListener(listener);
addKeyListener(listener);
caret.addCaretListener(listener);
}
/**
* Creates and initializes the scroll bars that are used to scroll through the data.
**/
private void initializeScrollbars() {
verticalScrollbar.addAdjustmentListener(listener);
add(verticalScrollbar, BorderLayout.EAST);
horizontalScrollbar.addAdjustmentListener(listener);
add(horizontalScrollbar, BorderLayout.SOUTH);
}
private int getNumberOfVisibleColumns() {
final int rawWidth = getWidth() - verticalScrollbar.getWidth();
return (rawWidth / fontCharWidth) + ((rawWidth % fontCharWidth) == 0 ? 0 : 1);
}
private int getNumberOfVisibleLines() {
final int rawHeight = getHeight() - horizontalScrollbar.getHeight();
return (rawHeight / fontLineHeight) + ((rawHeight % fontLineHeight) == 0 ? 0 : 1);
}
/**
* Updates the maximum scroll range of the scroll bar depending on the number
**/
private void setScrollBarMaximum() {
final int totalRows = codeModel.getNumberOfRows();
int scrollRange = totalRows;
// Disables the vertical scroll bar if all rows are visible.
if (scrollRange < 0) {
scrollRange = 0;
verticalScrollbar.setValue(0);
verticalScrollbar.setEnabled(false);
} else {
verticalScrollbar.setEnabled(true);
}
verticalScrollbar.setMaximum(scrollRange);
final int totalWidth = codeModel.getTotalWidthInCharacters();
int realWidth = getWidth();
realWidth -= verticalScrollbar.getWidth();
// Disables the horizontal scroll bar if everything fits into the component.
if (realWidth >= (totalWidth * fontCharWidth)) {
horizontalScrollbar.setValue(0);
horizontalScrollbar.setEnabled(false);
} else {
horizontalScrollbar.setMaximum(totalWidth + 1);
horizontalScrollbar.setEnabled(true);
}
}
/**
* Updates/rewrites the internal representation of the character buffer (that will be drawn) from
* the data model.
* Updating the character buffer from the data model requires a bit of care: The model provides a
* sequence of rows, each divided into a fixed number of columns, and each cell (identified by a
* row/column combination) can have multiple lines of text.
* To properly copy this into a FormattedCharacterBuffer, the code iterates over all rows first.
* For each row, it determines what the maximum number of text lines is (over all columns) - this
* is needed to determine the overall height of the row. The individual cells are then filled into
* the right row, and everything advances to the next row.
**/
private void updateCharacterBufferFromModel() {
charBuffer.clear();
charBuffer.setBackgroundColor(java.awt.Color.LIGHT_GRAY.brighter());
currentFirstRow = verticalScrollbar.getValue();
currentFirstCharColumn = horizontalScrollbar.getValue();
// Draw the header, if requested.
int totalCopiedLines = 0;
if (codeModel.hasHeaderRow()) {
totalCopiedLines = 1;
int currentColumnIndex = 0;
for (int fieldIndex = 0; fieldIndex < codeModel.getNumberOfColumns();
currentColumnIndex += codeModel.getColumnWidthInCharacters(fieldIndex), fieldIndex++) {
charBuffer.copyInto(0, currentColumnIndex, codeModel.getHeader(fieldIndex));
}
}
// Iterates over the rows. Since each row has a height of at least one line,
// the for() loop iterates over more than what is strictly required and
// aborts early.
for (int rowIndex = currentFirstRow;
rowIndex < Math.min(currentFirstRow + currentlyVisibleLines, codeModel.getNumberOfRows());
rowIndex++) {
for (int lineIndex = (rowIndex == currentFirstRow) ? currentFirstLine : 0;
lineIndex < codeModel.getMaximumLinesForRow(rowIndex); lineIndex++) {
// Iterate over all the columns that need to be drawn.
int currentColumnIndex = 0;
for (int fieldIndex = 0; fieldIndex < codeModel.getNumberOfColumns();
currentColumnIndex += codeModel.getColumnWidthInCharacters(fieldIndex), fieldIndex++) {
// Update the current X/Y position of the caret in terms of this buffer.
if ((caretPosition.getRow() == rowIndex) && (caretPosition.getLine() == lineIndex)
&& (caretPosition.getColumn() == fieldIndex)) {
caretX = currentColumnIndex + caretPosition.getFieldIndex();
caretY = totalCopiedLines;
}
FormattedCharacterBuffer line =
codeModel.getLineFormatted(rowIndex, fieldIndex, lineIndex);
if (line != null) {
charBuffer.copyInto(totalCopiedLines, currentColumnIndex, line);
}
// Update the map that allows quick mapping of pixel positions to rows
// and lines within rows.
int linestart = totalCopiedLines * fontLineHeight + (fontLineHeight / 2);
if (!yCoordinateToRowAndLine.containsKey(linestart)) {
yCoordinateToRowAndLine.put(
linestart, new CodeDisplayCoordinate(rowIndex, lineIndex, 0, 0));
} else {
CodeDisplayCoordinate coordinate = yCoordinateToRowAndLine.get(linestart);
coordinate.setRow(rowIndex);
coordinate.setLine(lineIndex);
}
}
totalCopiedLines++;
}
}
setScrollBarMaximum();
}
@Override
protected void paintComponent(final Graphics gx) {
super.paintComponent(gx);
bufferedGraphics.setRenderingHint(
RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
updateVisibleLinesAndColumns();
updateCharacterBufferFromModel();
currentFirstRow = verticalScrollbar.getValue();
currentFirstCharColumn = horizontalScrollbar.getValue();
charBuffer.paintBuffer(bufferedGraphics, 0, 0, currentFirstCharColumn);
gx.drawImage(bufferedImage, 2, 2, this);
caret.draw(gx, 2 + (caretX * fontCharWidth), 6 + (caretY * fontLineHeight), fontLineHeight - 1);
}
void updateVisibleLinesAndColumns() {
// Calculates how many lines and columns will be actually visible at the
// moment.
int currentVisibleLines = getNumberOfVisibleLines();
int currentVisibleColumns = getNumberOfVisibleColumns();
if ((currentlyVisibleLines != currentVisibleLines)
|| (currentlyVisibleColumns != currentVisibleColumns)) {
currentlyVisibleLines = currentVisibleLines;
currentlyVisibleColumns = currentVisibleColumns;
// Include room for the newline characters.
int properWidth = codeModel.getTotalWidthInCharacters();
int properLines = currentlyVisibleLines + 1;
charBuffer = new FormattedCharacterBuffer(properLines, properWidth);
bufferedImage = new BufferedImage(
properWidth * fontCharWidth, properLines * fontLineHeight, BufferedImage.TYPE_INT_RGB);
bufferedGraphics = (Graphics2D) bufferedImage.getGraphics();
}
}
void setSelectionStart() {}
private boolean fillColumnAndFieldIndexFromX(int x, CodeDisplayCoordinate coordinate) {
x = x + currentFirstCharColumn * fontCharWidth;
int columnstart = 0;
int characterIndex = 0;
for (int index = 0; index < codeModel.getNumberOfColumns(); index++) {
int columnend = columnstart + codeModel.getColumnWidthInCharacters(index) * fontCharWidth;
if ((x >= columnstart) && (x < columnend)) {
x = x - columnstart;
characterIndex = x / fontCharWidth;
coordinate.setColumn(index);
coordinate.setFieldIndex(characterIndex);
return true;
}
columnstart = columnend;
}
return false;
}
/**
* Given a position on the screen, calculate what row / cell / line a given coordinate obtained
* from an event falls into.
*/
private boolean fillCoordinateFromXY(int x, int y, CodeDisplayCoordinate newCoordinate) {
Map.Entry<Integer, CodeDisplayCoordinate> coordinate = yCoordinateToRowAndLine.floorEntry(y);
if (coordinate == null) {
// If the click went outside the last row or behind the last colum, return an appropriate
// coordinate.
newCoordinate.setRow(2);
newCoordinate.setColumn(1);
return true;
}
newCoordinate.setRow(coordinate.getValue().getRow());
newCoordinate.setLine(coordinate.getValue().getLine());
return fillColumnAndFieldIndexFromX(x, newCoordinate);
}
public CodeDisplayCoordinate getCaretPosition() {
return caretPosition;
}
// Sets the position of the caret, and then figures out what the X and Y coordinates of the new
// caret in the buffer need to be.
public void setCaretPosition(CodeDisplayCoordinate coordinate) {
caretPosition = coordinate;
}
/**
* Code that is used to interacting with JTables often requires a method like this to function -
* mapping a given point (the site of an event etc.) to a proper table row.
*/
public int rowAtPoint(Point point) {
CodeDisplayCoordinate coordinate = new CodeDisplayCoordinate(0, 0, 0, 0);
fillCoordinateFromXY(point.x, point.y, coordinate);
return coordinate.getRow();
}
public int columnAtPoint(Point point) {
CodeDisplayCoordinate coordinate = new CodeDisplayCoordinate(0, 0, 0, 0);
fillCoordinateFromXY(point.x, point.y, coordinate);
return coordinate.getColumn();
}
public int lineAtPoint(Point point) {
CodeDisplayCoordinate coordinate = new CodeDisplayCoordinate(0, 0, 0, 0);
fillCoordinateFromXY(point.x, point.y, coordinate);
return coordinate.getLine();
}
public void addCaretChangedListener(CodeDisplayEventListener e) {
eventListeners.add(e);
}
public void removeCaretChangedListener(CodeDisplayEventListener e) {
eventListeners.remove(e);
}
/**
* Internal event listener.
*/
private class InternalListener extends MouseAdapter
implements AdjustmentListener, FocusListener, ICaretListener, ComponentListener, KeyListener {
@Override
public void adjustmentValueChanged(final AdjustmentEvent event) {
repaint();
}
@Override
public void caretStatusChanged(final JCaret source) {
repaint();
}
@Override
public void componentHidden(final ComponentEvent event) {}
@Override
public void componentMoved(final ComponentEvent event) {}
@Override
public void componentResized(final ComponentEvent event) {
updateVisibleLinesAndColumns();
updateCharacterBufferFromModel();
setScrollBarMaximum();
}
@Override
public void componentShown(final ComponentEvent event) {}
@Override
public void focusGained(final FocusEvent event) {
repaint();
}
@Override
public void focusLost(final FocusEvent event) {
repaint();
}
@Override
public void keyPressed(final KeyEvent event) {
if (!event.isActionKey()) {
return;
}
CodeDisplayCoordinate newCoordinate = new CodeDisplayCoordinate(caretPosition);
int line = newCoordinate.getLine();
int row = newCoordinate.getRow();
int fieldIndex = newCoordinate.getFieldIndex();
int column = newCoordinate.getColumn();
switch (event.getKeyCode()) {
case KeyEvent.VK_UP:
if (line == 0) {
int newRow = row - 1;
newCoordinate.setRow(Math.max(newRow, 0));
int maximumLines = codeModel.getMaximumLinesForRow(newRow);
if (maximumLines > 0) {
newCoordinate.setLine(codeModel.getMaximumLinesForRow(newRow) - 1);
}
} else {
newCoordinate.setLine(line - 1);
}
break;
case KeyEvent.VK_DOWN:
if (line == codeModel.getMaximumLinesForRow(row) - 1) {
newCoordinate.setRow(Math.min(row + 1, codeModel.getNumberOfRows()));
newCoordinate.setLine(0);
} else {
newCoordinate.setLine(line + 1);
}
break;
case KeyEvent.VK_LEFT:
if (fieldIndex == 0) {
// Skip one column to the left if it is editable.
newCoordinate.setColumn(column - 1);
newCoordinate.setFieldIndex(codeModel.getColumnWidthInCharacters(column - 1) - 1);
} else {
newCoordinate.setFieldIndex(fieldIndex - 1);
}
break;
case KeyEvent.VK_RIGHT:
fieldIndex = newCoordinate.getFieldIndex();
if (fieldIndex == codeModel.getColumnWidthInCharacters(column) - 1) {
// Skip one column to the right if it is editable.
newCoordinate.setColumn(column + 1);
newCoordinate.setFieldIndex(0);
} else {
newCoordinate.setFieldIndex(fieldIndex + 1);
}
break;
case KeyEvent.VK_PAGE_DOWN:
// Shift the caret down by as many lines as are currently displayed.
for (int count = 0; count < currentlyVisibleLines - 2; count++) {
if (newCoordinate.getLine() >= codeModel.getMaximumLinesForRow(row) - 1) {
newCoordinate.setRow(
Math.min(newCoordinate.getRow() + 1, codeModel.getNumberOfRows()));
newCoordinate.setLine(0);
} else {
newCoordinate.setLine(newCoordinate.getLine() + 1);
}
}
// getCaretXYFromCoordinate()
verticalScrollbar.setValue(newCoordinate.getRow());
break;
case KeyEvent.VK_PAGE_UP:
// Shift the caret down by as many lines as are currently displayed.
for (int count = 0; count < currentlyVisibleLines - 2; count++) {
if (newCoordinate.getLine() == 0) {
int newRow = Math.max(newCoordinate.getRow() - 1, 0);
newCoordinate.setRow(Math.max(newRow, 0));
newCoordinate.setLine(codeModel.getMaximumLinesForRow(newRow) - 1);
} else {
newCoordinate.setLine(Math.max(newCoordinate.getLine() - 1, 0));
}
}
verticalScrollbar.setValue(newCoordinate.getRow());
break;
case KeyEvent.VK_HOME:
// Delegate the handling of this to the data model.
codeModel.keyPressedOrTyped(newCoordinate, event);
break;
case KeyEvent.VK_END:
// Delegate the handling of this to the data model.
codeModel.keyPressedOrTyped(newCoordinate, event);
break;
case KeyEvent.VK_BACK_SPACE:
codeModel.keyPressedOrTyped(newCoordinate, event);
break;
default:
throw new IllegalArgumentException();
}
if (codeModel.canHaveCaret(newCoordinate)) {
setCaretPosition(newCoordinate);
updateCharacterBufferFromModel();
notifyCaretListeners();
repaint();
}
}
@Override
public void keyReleased(final KeyEvent event) {}
@Override
public void keyTyped(final KeyEvent event) {
if (codeModel.isEditable(caretPosition)) {
CodeDisplayCoordinate before = new CodeDisplayCoordinate(caretPosition);
codeModel.keyPressedOrTyped(caretPosition, event);
if (!before.equals(caretPosition)) {
notifyCaretListeners();
}
updateCharacterBufferFromModel();
repaint();
}
}
@Override
public void mouseClicked(final MouseEvent event) {
// Set the caret
requestFocusInWindow();
}
@Override
public void mouseDragged(final MouseEvent event) {}
@Override
public void mouseEntered(final MouseEvent event) {}
@Override
public void mouseExited(final MouseEvent event) {}
@Override
public void mouseMoved(final MouseEvent event) {}
@Override
public void mousePressed(final MouseEvent event) {
CodeDisplayCoordinate coordinate = new CodeDisplayCoordinate(0, 0, 0, 0);
if (!fillCoordinateFromXY(event.getX(), event.getY(), coordinate)) {
return;
}
if (codeModel.canHaveCaret(coordinate)) {
// Set the position of the caret accordingly.
setCaretPosition(coordinate);
updateCharacterBufferFromModel();
notifyCaretListeners();
repaint();
}
}
@Override
public void mouseReleased(final MouseEvent event) {}
@Override
public void mouseWheelMoved(final MouseWheelEvent e) {
final int notches = e.getWheelRotation();
verticalScrollbar.setValue(verticalScrollbar.getValue() + notches);
}
}
@Override
public java.awt.Dimension getPreferredSize() {
// The preferred size is as wide as the columns dictate, and 40 (arbitrary number) of rows.
// Override if different dimensions are needed.
return new java.awt.Dimension(
fontCharWidth * codeModel.getTotalWidthInCharacters(), fontLineHeight * 40);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.orm.entities;
import java.util.ArrayList;
import java.util.List;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
import javax.persistence.QueryHint;
import javax.persistence.Table;
import javax.persistence.TableGenerator;
import org.apache.ambari.server.actionmanager.HostRoleStatus;
import org.apache.ambari.server.state.RepositoryType;
import org.apache.ambari.server.state.stack.upgrade.Direction;
import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
import org.apache.commons.lang.builder.EqualsBuilder;
import com.google.common.base.Objects;
/**
* Models the data representation of an upgrade
*/
@Entity
@Table(name = "upgrade")
@TableGenerator(
name = "upgrade_id_generator",
table = "ambari_sequences",
pkColumnName = "sequence_name",
valueColumnName = "sequence_value",
pkColumnValue = "upgrade_id_seq",
initialValue = 0)
@NamedQueries({
@NamedQuery(name = "UpgradeEntity.findAll", query = "SELECT u FROM UpgradeEntity u"),
@NamedQuery(
name = "UpgradeEntity.findAllForCluster",
query = "SELECT u FROM UpgradeEntity u WHERE u.clusterId = :clusterId"),
@NamedQuery(
name = "UpgradeEntity.findUpgrade",
query = "SELECT u FROM UpgradeEntity u WHERE u.upgradeId = :upgradeId"),
@NamedQuery(
name = "UpgradeEntity.findUpgradeByRequestId",
query = "SELECT u FROM UpgradeEntity u WHERE u.requestId = :requestId"),
@NamedQuery(
name = "UpgradeEntity.findLatestForClusterInDirection",
query = "SELECT u FROM UpgradeEntity u JOIN RequestEntity r ON u.requestId = r.requestId WHERE u.clusterId = :clusterId AND u.direction = :direction ORDER BY r.startTime DESC, u.upgradeId DESC"),
@NamedQuery(
name = "UpgradeEntity.findLatestForCluster",
query = "SELECT u FROM UpgradeEntity u JOIN RequestEntity r ON u.requestId = r.requestId WHERE u.clusterId = :clusterId ORDER BY r.startTime DESC"),
@NamedQuery(
name = "UpgradeEntity.findAllRequestIds",
query = "SELECT upgrade.requestId FROM UpgradeEntity upgrade"),
@NamedQuery(
name = "UpgradeEntity.findRevertable",
query = "SELECT upgrade FROM UpgradeEntity upgrade WHERE upgrade.revertAllowed = 1 AND upgrade.clusterId = :clusterId ORDER BY upgrade.upgradeId DESC",
hints = {
@QueryHint(name = "eclipselink.query-results-cache", value = "true"),
@QueryHint(name = "eclipselink.query-results-cache.ignore-null", value = "false"),
@QueryHint(name = "eclipselink.query-results-cache.size", value = "1")
}),
@NamedQuery(
name = "UpgradeEntity.findRevertableUsingJPQL",
query = "SELECT upgrade FROM UpgradeEntity upgrade WHERE upgrade.repoVersionId IN (SELECT upgrade.repoVersionId FROM UpgradeEntity upgrade WHERE upgrade.clusterId = :clusterId AND upgrade.orchestration IN :revertableTypes GROUP BY upgrade.repoVersionId HAVING MOD(COUNT(upgrade.repoVersionId), 2) != 0) ORDER BY upgrade.upgradeId DESC",
hints = {
@QueryHint(name = "eclipselink.query-results-cache", value = "true"),
@QueryHint(name = "eclipselink.query-results-cache.ignore-null", value = "false"),
@QueryHint(name = "eclipselink.query-results-cache.size", value = "1")
})
})
public class UpgradeEntity {
@Id
@Column(name = "upgrade_id", nullable = false, insertable = true, updatable = false)
@GeneratedValue(strategy = GenerationType.TABLE, generator = "upgrade_id_generator")
private Long upgradeId;
@Column(name = "cluster_id", nullable = false, insertable = true, updatable = false)
private Long clusterId;
@Column(name = "request_id", nullable = false, insertable = false, updatable = false)
private Long requestId;
/**
* The request entity associated with this upgrade. This relationship allows
* JPA to correctly order non-flushed commits during the transaction which
* creates the upgrade. Without it, JPA would not know the correct order and
* may try to create the upgrade before the request.
*/
@OneToOne(optional = false, fetch = FetchType.LAZY)
@JoinColumn(name = "request_id", nullable = false, insertable = true, updatable = false)
private RequestEntity requestEntity = null;
@Column(name="direction", nullable = false)
@Enumerated(value = EnumType.STRING)
private Direction direction = Direction.UPGRADE;
@Column(name="upgrade_package", nullable = false)
private String upgradePackage;
@Column(name="upgrade_type", nullable = false)
@Enumerated(value = EnumType.STRING)
private UpgradeType upgradeType;
@Column(name = "repo_version_id", insertable = false, updatable = false)
private Long repoVersionId;
@JoinColumn(name = "repo_version_id", referencedColumnName = "repo_version_id", nullable = false)
private RepositoryVersionEntity repositoryVersion;
@Column(name = "skip_failures", nullable = false)
private Integer skipFailures = 0;
@Column(name = "skip_sc_failures", nullable = false)
private Integer skipServiceCheckFailures = 0;
@Column(name="downgrade_allowed", nullable = false)
private Short downgradeAllowed = 1;
/**
* Whether this upgrade is a candidate to be reverted. The current restriction
* on this behavior is that only the most recent
* {@link RepositoryType#PATCH}/{@link RepositoryType#MAINT} for a given
* cluster can be reverted at a time.
* <p/>
* All upgrades are created with this value defaulted to {@code false}. Upon
* successful finalization of the upgrade, if the upgrade was the correct type
* and direction, then it becomes a candidate for reversion and this value is
* set to {@code true}. If an upgrade is reverted after being finalized, then
* this value to should set to {@code false} explicitely.
* <p/>
* There can exist <i>n</i> number of upgrades with this value set to
* {@code true}. The idea is that only the most recent upgrade with this value
* set to {@code true} will be able to be reverted.
*/
@Column(name = "revert_allowed", nullable = false)
private Short revertAllowed = 0;
@Column(name="orchestration", nullable = false)
@Enumerated(value = EnumType.STRING)
private RepositoryType orchestration = RepositoryType.STANDARD;
/**
* {@code true} if the upgrade has been marked as suspended.
*/
@Column(name = "suspended", nullable = false, length = 1)
private Short suspended = 0;
@OneToMany(mappedBy = "upgradeEntity", cascade = { CascadeType.ALL })
private List<UpgradeGroupEntity> upgradeGroupEntities;
/**
* Uni-directional relationship between an upgrade an all of the components in
* that upgrade.
*/
@OneToMany(orphanRemoval=true, cascade = { CascadeType.ALL })
@JoinColumn(name = "upgrade_id")
private List<UpgradeHistoryEntity> upgradeHistory;
/**
* @return the id
*/
public Long getId() {
return upgradeId;
}
/**
* @param id the id
*/
public void setId(Long id) {
upgradeId = id;
}
/**
* @return the cluster id
*/
public Long getClusterId() {
return clusterId;
}
/**
* @param id the cluster id
*/
public void setClusterId(Long id) {
clusterId = id;
}
/**
* @return the upgrade items
*/
public List<UpgradeGroupEntity> getUpgradeGroups() {
return upgradeGroupEntities;
}
/**
* @param items the upgrade items
*/
public void setUpgradeGroups(List<UpgradeGroupEntity> items) {
for (UpgradeGroupEntity entity : items) {
entity.setUpgradeEntity(this);
}
upgradeGroupEntities = items;
}
/**
* @return the request id
*/
public Long getRequestId() {
return requestId;
}
public void setRequestEntity(RequestEntity requestEntity) {
this.requestEntity = requestEntity;
requestId = requestEntity.getRequestId();
}
/**
* @return the direction of the upgrade
*/
public Direction getDirection() {
return direction;
}
/**
* @param direction the direction of the upgrade
*/
public void setDirection(Direction direction) {
this.direction = direction;
}
/**
* @return the upgrade type, such as rolling oNr non_rolling
*/
public UpgradeType getUpgradeType() {
return upgradeType;
}
/**
* @return possibility to process downgrade
*/
public Boolean isDowngradeAllowed() {
return downgradeAllowed != null ? (downgradeAllowed != 0) : null;
}
/**
* @param canDowngrade {@code true} to allow downgrade, {@code false} to disallow downgrade
*/
public void setDowngradeAllowed(boolean canDowngrade) {
downgradeAllowed = (!canDowngrade ? (short) 0 : (short) 1);
}
/**
* Gets whether this upgrade supports being reverted. Upgrades can be reverted
* (downgraded after finalization) if they are either
* {@link RepositoryType#MAINT} or {@link RepositoryType#PATCH} and have never
* been previously downgraded.
*
* @return {@code true} if this upgrade can potentially be revereted.
*/
public Boolean isRevertAllowed() {
return revertAllowed != null ? (revertAllowed != 0) : null;
}
/**
* Sets whether this upgrade supports being reverted. This should only ever be
* called from the finalization of an upgrade. {@link RepositoryType#MAINT} or
* {@link RepositoryType#PATCH} upgrades can be revereted only if they have
* not previously been downgraded.
*
* @param revertable
* {@code true} to mark this as being revertable, {@code false}
* otherwise.
*/
public void setRevertAllowed(boolean revertable) {
revertAllowed = (!revertable ? (short) 0 : (short) 1);
}
/**
* @param upgradeType
* the upgrade type to set
*/
public void setUpgradeType(UpgradeType upgradeType) {
this.upgradeType = upgradeType;
}
/**
* @return the upgrade package name, without the extension.
*/
public String getUpgradePackage() {
return upgradePackage;
}
/**
* @param upgradePackage the upgrade pack to set
*/
public void setUpgradePackage(String upgradePackage) {
this.upgradePackage = upgradePackage;
}
/**
* Gets whether skippable components that failed are automatically skipped.
* They will be placed into the {@link HostRoleStatus#SKIPPED_FAILED} state.
*
* @return {@code true} if skippable failed components are automatically
* skipped when they fail.
*/
public boolean isComponentFailureAutoSkipped() {
return skipFailures != 0;
}
/**
* Sets whether skippable components that failed are automatically skipped.
*
* @param autoSkipComponentFailures
* {@code true} to automatically skip component failures which are
* marked as skippable.
*/
public void setAutoSkipComponentFailures(boolean autoSkipComponentFailures) {
skipFailures = autoSkipComponentFailures ? 1 : 0;
}
/**
* Gets whether skippable service checks that failed are automatically
* skipped. They will be placed into the {@link HostRoleStatus#SKIPPED_FAILED}
* state.
*
* @return {@code true} if service checks are automatically skipped when they
* fail.
*/
public boolean isServiceCheckFailureAutoSkipped() {
return skipServiceCheckFailures != 0;
}
/**
* Sets whether skippable service checks that failed are automatically
* skipped.
*
* @param autoSkipServiceCheckFailures
* {@code true} to automatically skip service check failures which
* are marked as being skippable.
*/
public void setAutoSkipServiceCheckFailures(boolean autoSkipServiceCheckFailures) {
skipServiceCheckFailures = autoSkipServiceCheckFailures ? 1 : 0;
}
/**
* Gets whether the upgrade is suspended. A suspended upgrade will appear to
* have its request aborted, but the intent is to resume it at a later point.
*
* @return {@code true} if the upgrade is suspended.
*/
public boolean isSuspended() {
return suspended != 0;
}
/**
* Sets whether the upgrade is suspended.
*
* @param suspended
* {@code true} to mark the upgrade as suspended.
*/
public void setSuspended(boolean suspended) {
this.suspended = suspended ? (short) 1 : (short) 0;
}
/**
* Adds a historical entry for a service component in this upgrade.
*
* @param historicalEntry
* the entry to add.
*/
public void addHistory(UpgradeHistoryEntity historicalEntry) {
if (null == upgradeHistory) {
upgradeHistory = new ArrayList<>();
}
upgradeHistory.add(historicalEntry);
}
/**
* Gets the history of this component's upgrades and downgrades.
*
* @return the component history, or {@code null} if none.
*/
public List<UpgradeHistoryEntity> getHistory() {
return upgradeHistory;
}
/**
* Upgrades will always have a single version being upgraded to and downgrades
* will have a single version being downgraded from. This repository
* represents that version.
* <p/>
* When the direction is {@link Direction#UPGRADE}, this represents the target
* repository. <br/>
* When the direction is {@link Direction#DOWNGRADE}, this represents the
* repository being downgraded from.
*
* @return the repository version being upgraded to or downgraded from (never
* {@code null}).
*/
public RepositoryVersionEntity getRepositoryVersion() {
return repositoryVersion;
}
/**
* Sets the repository version for this upgrade. This value will change
* depending on the direction of the upgrade.
* <p/>
* When the direction is {@link Direction#UPGRADE}, this represents the target
* repository. <br/>
* When the direction is {@link Direction#DOWNGRADE}, this represents the
* repository being downgraded from.
*
* @param repositoryVersion
* the repository version being upgraded to or downgraded from (not
* {@code null}).
*/
public void setRepositoryVersion(RepositoryVersionEntity repositoryVersion) {
this.repositoryVersion = repositoryVersion;
}
/**
* Sets the orchestration for the upgrade. Only different when an upgrade is a revert of a patch.
* In that case, the orchestration is set to PATCH even if the target repository is type STANDARD.
*
* @param type the orchestration
*/
public void setOrchestration(RepositoryType type) {
orchestration = type;
}
/**
* @return the orchestration type
*/
public RepositoryType getOrchestration() {
return orchestration;
}
/**
* {@inheritDoc}
*/
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
UpgradeEntity that = (UpgradeEntity) o;
return new EqualsBuilder()
.append(upgradeId, that.upgradeId)
.append(clusterId, that.clusterId)
.append(requestId, that.requestId)
.append(direction, that.direction)
.append(suspended, that.suspended)
.append(upgradeType, that.upgradeType)
.append(upgradePackage, that.upgradePackage)
.isEquals();
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode() {
return Objects.hashCode(upgradeId, clusterId, requestId, direction, suspended, upgradeType,
upgradePackage);
}
}
| |
/**
* Copyright (C) 2015 nshmura
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nshmura.recyclertablayout;
import android.animation.ValueAnimator;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.ColorStateList;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.os.Build;
import android.support.v4.view.ViewCompat;
import android.support.v4.view.ViewPager;
import android.support.v7.internal.widget.TintManager;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
public class RecyclerTabLayout extends RecyclerView {
protected static final long DEFAULT_SCROLL_DURATION = 200;
protected static final float DEFAULT_POSITION_THRESHOLD = 0.6f;
protected static final float POSITION_THRESHOLD_ALLOWABLE = 0.001f;
protected Paint mIndicatorPaint;
protected int mTabBackgroundResId;
protected int mTabMinWidth;
protected int mTabMaxWidth;
protected int mTabTextAppearance;
protected int mTabSelectedTextColor;
protected boolean mTabSelectedTextColorSet;
protected int mTabPaddingStart;
protected int mTabPaddingTop;
protected int mTabPaddingEnd;
protected int mTabPaddingBottom;
protected int mIndicatorHeight;
protected LinearLayoutManager mLinearLayoutManager;
protected RecyclerOnScrollListener mRecyclerOnScrollListener;
protected ViewPager mViewPager;
protected Adapter<?> mAdapter;
protected int mIndicatorPosition;
protected int mIndicatorOffset;
protected int mScrollOffset;
protected float mOldPositionOffset;
protected float mPositionThreshold;
protected boolean mRequestScrollToTab;
public RecyclerTabLayout(Context context) {
this(context, null);
}
public RecyclerTabLayout(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public RecyclerTabLayout(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
setWillNotDraw(false);
mIndicatorPaint = new Paint();
mLinearLayoutManager = new LinearLayoutManager(getContext());
mLinearLayoutManager.setOrientation(LinearLayoutManager.HORIZONTAL);
setLayoutManager(mLinearLayoutManager);
setItemAnimator(null);
TypedArray a = context.obtainStyledAttributes(attrs, R.styleable.rtl_RecyclerTabLayout,
defStyle, R.style.rtl_RecyclerTabLayout);
setIndicatorColor(a.getColor(R.styleable
.rtl_RecyclerTabLayout_rtl_tabIndicatorColor, 0));
setIndicatorHeight(a.getDimensionPixelSize(R.styleable
.rtl_RecyclerTabLayout_rtl_tabIndicatorHeight, 0));
mTabTextAppearance = a.getResourceId(R.styleable.rtl_RecyclerTabLayout_rtl_tabTextAppearance,
R.style.rtl_RecyclerTabLayout_Tab);
mTabPaddingStart = mTabPaddingTop = mTabPaddingEnd = mTabPaddingBottom = a
.getDimensionPixelSize(R.styleable.rtl_RecyclerTabLayout_rtl_tabPadding, 0);
mTabPaddingStart = a.getDimensionPixelSize(
R.styleable.rtl_RecyclerTabLayout_rtl_tabPaddingStart, mTabPaddingStart);
mTabPaddingTop = a.getDimensionPixelSize(
R.styleable.rtl_RecyclerTabLayout_rtl_tabPaddingTop, mTabPaddingTop);
mTabPaddingEnd = a.getDimensionPixelSize(
R.styleable.rtl_RecyclerTabLayout_rtl_tabPaddingEnd, mTabPaddingEnd);
mTabPaddingBottom = a.getDimensionPixelSize(
R.styleable.rtl_RecyclerTabLayout_rtl_tabPaddingBottom, mTabPaddingBottom);
if (a.hasValue(R.styleable.rtl_RecyclerTabLayout_rtl_tabSelectedTextColor)) {
mTabSelectedTextColor = a
.getColor(R.styleable.rtl_RecyclerTabLayout_rtl_tabSelectedTextColor, 0);
mTabSelectedTextColorSet = true;
}
mTabMinWidth = a.getDimensionPixelSize(
R.styleable.rtl_RecyclerTabLayout_rtl_tabMinWidth, 0);
mTabMaxWidth = a.getDimensionPixelSize(
R.styleable.rtl_RecyclerTabLayout_rtl_tabMaxWidth, 0);
mTabBackgroundResId = a
.getResourceId(R.styleable.rtl_RecyclerTabLayout_rtl_tabBackground, 0);
a.recycle();
mPositionThreshold = DEFAULT_POSITION_THRESHOLD;
}
@Override
protected void onDetachedFromWindow() {
if (mRecyclerOnScrollListener != null) {
removeOnScrollListener(mRecyclerOnScrollListener);
mRecyclerOnScrollListener = null;
}
super.onDetachedFromWindow();
}
public void setIndicatorColor(int color) {
mIndicatorPaint.setColor(color);
}
public void setIndicatorHeight(int indicatorHeight) {
mIndicatorHeight = indicatorHeight;
}
public void setAutoSelectionMode(boolean autoSelect) {
if (mRecyclerOnScrollListener != null) {
removeOnScrollListener(mRecyclerOnScrollListener);
mRecyclerOnScrollListener = null;
}
if (autoSelect) {
mRecyclerOnScrollListener = new RecyclerOnScrollListener(this, mLinearLayoutManager);
addOnScrollListener(mRecyclerOnScrollListener);
}
}
public void setPositionThreshold(float positionThreshold) {
mPositionThreshold = positionThreshold;
}
public void setUpWithViewPager(ViewPager viewPager) {
DefaultAdapter adapter = new DefaultAdapter(viewPager);
adapter.setTabPadding(mTabPaddingStart, mTabPaddingTop, mTabPaddingEnd, mTabPaddingBottom);
adapter.setTabTextAppearance(mTabTextAppearance);
adapter.setTabSelectedTextColor(mTabSelectedTextColorSet, mTabSelectedTextColor);
adapter.setTabMaxWidth(mTabMaxWidth);
adapter.setTabMinWidth(mTabMinWidth);
adapter.setTabBackgroundResId(mTabBackgroundResId);
setUpWithAdapter(adapter);
}
public void setUpWithAdapter(RecyclerTabLayout.Adapter<?> adapter) {
mAdapter = adapter;
mViewPager = adapter.getViewPager();
if (mViewPager.getAdapter() == null) {
throw new IllegalArgumentException("ViewPager does not have a PagerAdapter set");
}
mViewPager.addOnPageChangeListener(new ViewPagerOnPageChangeListener(this));
setAdapter(adapter);
scrollToTab(mViewPager.getCurrentItem());
}
public void setCurrentItem(int position, boolean smoothScroll) {
if (mViewPager != null) {
mViewPager.setCurrentItem(position, smoothScroll);
scrollToTab(mViewPager.getCurrentItem());
return;
}
if (smoothScroll && position != mIndicatorPosition) {
if (Build.VERSION.SDK_INT > Build.VERSION_CODES.HONEYCOMB) {
startAnimation(position);
} else {
scrollToTab(position); //FIXME add animation
}
} else {
scrollToTab(position);
}
}
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
protected void startAnimation(final int position) {
float distance = 1;
View view = mLinearLayoutManager.findViewByPosition(position);
if (view != null) {
float currentX = view.getX() + view.getMeasuredWidth() / 2.f;
float centerX = getMeasuredWidth() / 2.f;
distance = Math.abs(centerX - currentX) / view.getMeasuredWidth();
}
ValueAnimator animator;
if (position < mIndicatorPosition) {
animator = ValueAnimator.ofFloat(distance, 0);
} else {
animator = ValueAnimator.ofFloat(-distance, 0);
}
animator.setDuration(DEFAULT_SCROLL_DURATION);
animator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
scrollToTab(position, (float) animation.getAnimatedValue(), true);
}
});
animator.start();
}
protected void scrollToTab(int position) {
scrollToTab(position, 0, false);
mAdapter.setCurrentIndicatorPosition(position);
mAdapter.notifyDataSetChanged();
}
protected void scrollToTab(int position, float positionOffset, boolean fitIndicator) {
int scrollOffset = 0;
View selectedView = mLinearLayoutManager.findViewByPosition(position);
View nextView = mLinearLayoutManager.findViewByPosition(position + 1);
if (selectedView != null) {
int width = getMeasuredWidth();
float scroll1 = width / 2.f - selectedView.getMeasuredWidth() / 2.f;
if (nextView != null) {
float scroll2 = width / 2.f - nextView.getMeasuredWidth() / 2.f;
float scroll = scroll1 + (selectedView.getMeasuredWidth() - scroll2);
float dx = scroll * positionOffset;
scrollOffset = (int) (scroll1 - dx);
mScrollOffset = (int) dx;
mIndicatorOffset = (int) ((scroll1 - scroll2) * positionOffset);
} else {
scrollOffset = (int) scroll1;
mScrollOffset = 0;
mIndicatorOffset = 0;
}
if (fitIndicator) {
mScrollOffset = 0;
mIndicatorOffset = 0;
}
if (mAdapter != null && mIndicatorPosition == position) {
updateCurrentIndicatorPosition(position, positionOffset - mOldPositionOffset,
positionOffset);
}
mIndicatorPosition = position;
} else {
if (getMeasuredWidth() > 0 && mTabMinWidth == mTabMaxWidth) { //fixed size
int width = mTabMinWidth;
int offset = (int) (positionOffset * -width);
int leftOffset = (int) ((getMeasuredWidth() - width) / 2.f);
scrollOffset = offset + leftOffset;
}
mRequestScrollToTab = true;
}
mLinearLayoutManager.scrollToPositionWithOffset(position, scrollOffset);
if (mIndicatorHeight > 0) {
invalidate();
}
mOldPositionOffset = positionOffset;
}
protected void updateCurrentIndicatorPosition(int position, float dx, float positionOffset) {
int indicatorPosition = -1;
if (dx > 0 && positionOffset >= mPositionThreshold - POSITION_THRESHOLD_ALLOWABLE) {
indicatorPosition = position + 1;
} else if (dx < 0 && positionOffset <= 1 - mPositionThreshold + POSITION_THRESHOLD_ALLOWABLE) {
indicatorPosition = position;
}
if (indicatorPosition >= 0 && indicatorPosition != mAdapter.getCurrentIndicatorPosition()) {
mAdapter.setCurrentIndicatorPosition(indicatorPosition);
mAdapter.notifyDataSetChanged();
}
}
@Override
public void onDraw(Canvas canvas) {
View view = mLinearLayoutManager.findViewByPosition(mIndicatorPosition);
if (view == null) {
if (mRequestScrollToTab) {
mRequestScrollToTab = false;
scrollToTab(mViewPager.getCurrentItem());
}
return;
}
mRequestScrollToTab = false;
int left = view.getLeft() + mScrollOffset - mIndicatorOffset;
int right = view.getRight() + mScrollOffset + mIndicatorOffset;
int top = getHeight() - mIndicatorHeight;
int bottom = getHeight();
canvas.drawRect(left, top, right, bottom, mIndicatorPaint);
}
protected static class RecyclerOnScrollListener extends OnScrollListener {
protected RecyclerTabLayout mRecyclerTabLayout;
protected LinearLayoutManager mLinearLayoutManager;
public RecyclerOnScrollListener(RecyclerTabLayout recyclerTabLayout,
LinearLayoutManager linearLayoutManager) {
mRecyclerTabLayout = recyclerTabLayout;
mLinearLayoutManager = linearLayoutManager;
}
public int mDx;
@Override
public void onScrolled(RecyclerView recyclerView, int dx, int dy) {
mDx += dx;
}
@Override
public void onScrollStateChanged(RecyclerView recyclerView, int newState) {
switch (newState) {
case SCROLL_STATE_IDLE:
if (mDx > 0) {
selectCenterTabForRightScroll();
} else {
selectCenterTabForLeftScroll();
}
mDx = 0;
break;
case SCROLL_STATE_DRAGGING:
case SCROLL_STATE_SETTLING:
}
}
protected void selectCenterTabForRightScroll() {
int first = mLinearLayoutManager.findFirstVisibleItemPosition();
int last = mLinearLayoutManager.findLastVisibleItemPosition();
int center = mRecyclerTabLayout.getWidth() / 2;
for (int position = first; position <= last; position++) {
View view = mLinearLayoutManager.findViewByPosition(position);
if (view.getLeft() + view.getWidth() >= center) {
mRecyclerTabLayout.setCurrentItem(position, false);
break;
}
}
}
protected void selectCenterTabForLeftScroll() {
int first = mLinearLayoutManager.findFirstVisibleItemPosition();
int last = mLinearLayoutManager.findLastVisibleItemPosition();
int center = mRecyclerTabLayout.getWidth() / 2;
for (int position = last; position >= first; position--) {
View view = mLinearLayoutManager.findViewByPosition(position);
if (view.getLeft() <= center) {
mRecyclerTabLayout.setCurrentItem(position, false);
break;
}
}
}
}
protected static class ViewPagerOnPageChangeListener implements ViewPager.OnPageChangeListener {
private final RecyclerTabLayout mRecyclerTabLayout;
private int mScrollState;
public ViewPagerOnPageChangeListener(RecyclerTabLayout recyclerTabLayout) {
mRecyclerTabLayout = recyclerTabLayout;
}
@Override
public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {
mRecyclerTabLayout.scrollToTab(position, positionOffset, false);
}
@Override
public void onPageScrollStateChanged(int state) {
mScrollState = state;
}
@Override
public void onPageSelected(int position) {
if (mScrollState == ViewPager.SCROLL_STATE_IDLE) {
if (mRecyclerTabLayout.mIndicatorPosition != position) {
mRecyclerTabLayout.scrollToTab(position);
}
}
}
}
public static abstract class Adapter<T extends RecyclerView.ViewHolder>
extends RecyclerView.Adapter<T> {
protected ViewPager mViewPager;
protected int mIndicatorPosition;
public Adapter(ViewPager viewPager) {
mViewPager = viewPager;
}
public ViewPager getViewPager() {
return mViewPager;
}
public void setCurrentIndicatorPosition(int indicatorPosition) {
mIndicatorPosition = indicatorPosition;
}
public int getCurrentIndicatorPosition() {
return mIndicatorPosition;
}
}
public static class DefaultAdapter
extends RecyclerTabLayout.Adapter<DefaultAdapter.ViewHolder> {
protected static final int MAX_TAB_TEXT_LINES = 2;
protected int mTabPaddingStart;
protected int mTabPaddingTop;
protected int mTabPaddingEnd;
protected int mTabPaddingBottom;
protected int mTabTextAppearance;
protected boolean mTabSelectedTextColorSet;
protected int mTabSelectedTextColor;
private int mTabMaxWidth;
private int mTabMinWidth;
private int mTabBackgroundResId;
public DefaultAdapter(ViewPager viewPager) {
super(viewPager);
}
@SuppressWarnings("deprecation")
@Override
public DefaultAdapter.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
TabTextView tabTextView = new TabTextView(parent.getContext());
if (mTabSelectedTextColorSet) {
tabTextView.setTextColor(tabTextView.createColorStateList(
tabTextView.getCurrentTextColor(), mTabSelectedTextColor));
}
ViewCompat.setPaddingRelative(tabTextView, mTabPaddingStart, mTabPaddingTop,
mTabPaddingEnd, mTabPaddingBottom);
tabTextView.setTextAppearance(parent.getContext(), mTabTextAppearance);
tabTextView.setGravity(Gravity.CENTER);
tabTextView.setMaxLines(MAX_TAB_TEXT_LINES);
tabTextView.setEllipsize(TextUtils.TruncateAt.END);
tabTextView.setMaxWidth(mTabMaxWidth);
tabTextView.setMinWidth(mTabMinWidth);
tabTextView.setTextAppearance(tabTextView.getContext(), mTabTextAppearance);
if (mTabSelectedTextColorSet) {
tabTextView.setTextColor(tabTextView.createColorStateList(
tabTextView.getCurrentTextColor(), mTabSelectedTextColor));
}
if (mTabBackgroundResId != 0) {
tabTextView.setBackgroundDrawable(
TintManager.getDrawable(tabTextView.getContext(), mTabBackgroundResId));
}
tabTextView.setLayoutParams(createLayoutParamsForTabs());
return new ViewHolder(tabTextView);
}
@Override
public void onBindViewHolder(DefaultAdapter.ViewHolder holder, int position) {
CharSequence title = getViewPager().getAdapter().getPageTitle(position);
holder.title.setText(title);
holder.title.setSelected(getCurrentIndicatorPosition() == position);
}
@Override
public int getItemCount() {
return getViewPager().getAdapter().getCount();
}
public void setTabPadding(int tabPaddingStart, int tabPaddingTop, int tabPaddingEnd,
int tabPaddingBottom) {
mTabPaddingStart = tabPaddingStart;
mTabPaddingTop = tabPaddingTop;
mTabPaddingEnd = tabPaddingEnd;
mTabPaddingBottom = tabPaddingBottom;
}
public void setTabTextAppearance(int tabTextAppearance) {
mTabTextAppearance = tabTextAppearance;
}
public void setTabSelectedTextColor(boolean tabSelectedTextColorSet,
int tabSelectedTextColor) {
mTabSelectedTextColorSet = tabSelectedTextColorSet;
mTabSelectedTextColor = tabSelectedTextColor;
}
public void setTabMaxWidth(int tabMaxWidth) {
mTabMaxWidth = tabMaxWidth;
}
public void setTabMinWidth(int tabMinWidth) {
mTabMinWidth = tabMinWidth;
}
public void setTabBackgroundResId(int tabBackgroundResId) {
mTabBackgroundResId = tabBackgroundResId;
}
protected RecyclerView.LayoutParams createLayoutParamsForTabs() {
return new RecyclerView.LayoutParams(
LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
}
public class ViewHolder extends RecyclerView.ViewHolder {
public TextView title;
public ViewHolder(View itemView) {
super(itemView);
title = (TextView) itemView;
itemView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
getViewPager().setCurrentItem(getAdapterPosition(), true);
}
});
}
}
}
public static class TabTextView extends TextView {
public TabTextView(Context context) {
super(context);
}
public ColorStateList createColorStateList(int defaultColor, int selectedColor) {
final int[][] states = new int[2][];
final int[] colors = new int[2];
states[0] = SELECTED_STATE_SET;
colors[0] = selectedColor;
// Default enabled state
states[1] = EMPTY_STATE_SET;
colors[1] = defaultColor;
return new ColorStateList(states, colors);
}
}
}
| |
/*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.util.newProjectWizard;
import com.intellij.CommonBundle;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.util.BrowseFilesListener;
import com.intellij.ide.util.ElementsChooser;
import com.intellij.ide.util.JavaUtil;
import com.intellij.ide.util.projectWizard.AbstractStepWithProgress;
import com.intellij.ide.util.projectWizard.SourcePathsBuilder;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ApplicationNamesInfo;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.fileChooser.FileChooserDescriptor;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.ui.MultiLineLabelUI;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VfsUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.ui.DocumentAdapter;
import com.intellij.ui.FieldPanel;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import javax.swing.event.DocumentEvent;
import java.awt.*;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* @author Eugene Zhuravlev
* Date: Jan 6, 2004
*/
public class SourcePathsStep extends AbstractStepWithProgress<List<Pair<String, String>>> {
private static final Logger LOG = Logger.getInstance("#com.intellij.ide.util.newProjectWizard.SourcePathsStep");
private String myCurrentMode;
@NonNls private static final String CREATE_SOURCE_PANEL = "create_source";
@NonNls private static final String CHOOSE_SOURCE_PANEL = "choose_source";
private static final List<Pair<String,String>> EMPTY_STRING_STRING_ARRAY = Collections.emptyList();
private final SourcePathsBuilder myBuilder;
private final Icon myIcon;
private final String myHelpId;
private ElementsChooser<Pair<String,String>> mySourcePathsChooser;
private String myCurrentContentEntryPath = null;
private JRadioButton myRbCreateSource;
private JRadioButton myRbNoSource;
private JTextField myTfSourceDirectoryName;
private JTextField myTfFullPath;
private JPanel myResultPanel;
public SourcePathsStep(SourcePathsBuilder builder, Icon icon, @NonNls String helpId) {
super(IdeBundle.message("prompt.stop.searching.for.sources", ApplicationNamesInfo.getInstance().getProductName()));
myBuilder = builder;
myIcon = icon;
myHelpId = helpId;
}
protected JComponent createResultsPanel() {
myResultPanel = new JPanel(new CardLayout());
myResultPanel.add(createComponentForEmptyRootCase(), CREATE_SOURCE_PANEL);
myResultPanel.add(createComponentForChooseSources(), CHOOSE_SOURCE_PANEL);
return myResultPanel;
}
private JComponent createComponentForEmptyRootCase() {
final JPanel panel = new JPanel(new GridBagLayout());
final String text = IdeBundle.message("prompt.please.specify.java.sources.directory");
final JLabel label = new JLabel(text);
label.setUI(new MultiLineLabelUI());
panel.add(label, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(8, 10, 0, 10), 0, 0));
myRbCreateSource = new JRadioButton(IdeBundle.message("radio.create.source.directory"), true);
panel.add(myRbCreateSource, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(8, 10, 0, 10), 0, 0));
myTfSourceDirectoryName = new JTextField(suggestSourceDirectoryName());
final JLabel srcPathLabel = new JLabel(IdeBundle.message("prompt.enter.relative.path.to.module.content.root", File.separator));
panel.add(srcPathLabel, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(8, 30, 0, 0), 0, 0));
final FileChooserDescriptor chooserDescriptor = new FileChooserDescriptor(false, true, false, false, false, false);
chooserDescriptor.setIsTreeRootVisible(true);
final FieldPanel fieldPanel = createFieldPanel(myTfSourceDirectoryName, null, new BrowsePathListener(myTfSourceDirectoryName, chooserDescriptor));
panel.add(fieldPanel, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(8, 30, 0, 10), 0, 0));
myRbNoSource = new JRadioButton(IdeBundle.message("radio.do.not.create.source.directory"), true);
panel.add(myRbNoSource, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 1.0, 1.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(8, 10, 0, 10), 0, 0));
final JLabel fullPathLabel = new JLabel(IdeBundle.message("label.source.directory"));
panel.add(fullPathLabel, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 1.0, 0.0, GridBagConstraints.SOUTHWEST, GridBagConstraints.NONE, new Insets(8, 10, 0, 10), 0, 0));
myTfFullPath = new JTextField();
myTfFullPath.setEditable(false);
final Insets borderInsets = myTfFullPath.getBorder().getBorderInsets(myTfFullPath);
myTfFullPath.setBorder(BorderFactory.createEmptyBorder(borderInsets.top, borderInsets.left, borderInsets.bottom, borderInsets.right));
panel.add(myTfFullPath, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 1.0, 0.0, GridBagConstraints.SOUTHWEST, GridBagConstraints.HORIZONTAL, new Insets(8, 10, 8, 10), 0, 0));
ButtonGroup group = new ButtonGroup();
group.add(myRbCreateSource);
group.add(myRbNoSource);
myTfSourceDirectoryName.getDocument().addDocumentListener(new DocumentAdapter() {
public void textChanged(DocumentEvent event) {
updateFullPathField();
}
});
myRbCreateSource.addItemListener(new ItemListener() {
public void itemStateChanged(ItemEvent e) {
final boolean enabled = e.getStateChange() == ItemEvent.SELECTED;
srcPathLabel.setEnabled(enabled);
fieldPanel.setEnabled(enabled);
fullPathLabel.setVisible(enabled);
myTfFullPath.setVisible(enabled);
if (enabled) {
myTfSourceDirectoryName.requestFocus();
}
}
});
return panel;
}
@NonNls protected String suggestSourceDirectoryName() {
return "src";
}
private void updateFullPathField() {
final String sourceDirectoryPath = getSourceDirectoryPath();
if (sourceDirectoryPath != null) {
myTfFullPath.setText(sourceDirectoryPath.replace('/', File.separatorChar));
}
else {
myTfFullPath.setText("");
}
}
private JComponent createComponentForChooseSources() {
final JPanel panel = new JPanel(new GridBagLayout());
mySourcePathsChooser = new ElementsChooser<Pair<String, String>>(true) {
public String getItemText(@NotNull Pair<String, String> pair) {
if ("".equals(pair.second)) return pair.first;
return pair.first + " (" + pair.second + ")";
}
};
final String text = IdeBundle.message("label.java.source.files.have.been.found");
final JLabel label = new JLabel(text);
label.setUI(new MultiLineLabelUI());
panel.add(label, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 2, 1, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.HORIZONTAL, new Insets(8, 10, 0, 10), 0, 0));
panel.add(mySourcePathsChooser, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 2, 1, 1.0, 1.0, GridBagConstraints.NORTHWEST, GridBagConstraints.BOTH, new Insets(8, 10, 8, 10), 0, 0));
final JButton markAllButton = new JButton(IdeBundle.message("button.mark.all"));
panel.add(markAllButton, new GridBagConstraints(0, GridBagConstraints.RELATIVE, 1, 1, 0.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(0, 10, 8, 2), 0, 0));
final JButton unmarkAllButton = new JButton(IdeBundle.message("button.unmark.all"));
panel.add(unmarkAllButton, new GridBagConstraints(1, GridBagConstraints.RELATIVE, 1, 1, 1.0, 0.0, GridBagConstraints.NORTHWEST, GridBagConstraints.NONE, new Insets(0, 0, 8, 10), 0, 0));
markAllButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
mySourcePathsChooser.setAllElementsMarked(true);
}
});
unmarkAllButton.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
mySourcePathsChooser.setAllElementsMarked(false);
}
});
return panel;
}
public JComponent getPreferredFocusedComponent() {
return myRbCreateSource.isSelected()? myTfSourceDirectoryName : mySourcePathsChooser.getComponent();
}
public void updateDataModel() {
List<Pair<String,String>> paths = null;
if (CHOOSE_SOURCE_PANEL.equals(myCurrentMode)) {
final List<Pair<String, String>> selectedElements = mySourcePathsChooser.getMarkedElements();
if (selectedElements.size() > 0) {
paths = new ArrayList<Pair<String, String>>(selectedElements.size());
for (final Pair<String, String> path : selectedElements) {
paths.add(Pair.create(path.first.replace(File.separatorChar, '/'), path.second));
}
}
}
else {
if (myRbCreateSource.isSelected()) {
final String sourceDirectoryPath = getSourceDirectoryPath();
if (sourceDirectoryPath != null) {
paths = Collections.singletonList(Pair.create(sourceDirectoryPath, ""));
}
}
}
myBuilder.setContentEntryPath(getContentRootPath());
if (paths != null) {
myBuilder.setSourcePaths(paths);
}
else {
myBuilder.setSourcePaths(new ArrayList<Pair<String, String>>());
}
}
public boolean validate() {
if (!super.validate()) {
return false;
}
if (CREATE_SOURCE_PANEL.equals(myCurrentMode) && myRbCreateSource.isSelected()) {
final String sourceDirectoryPath = getSourceDirectoryPath();
final String relativePath = myTfSourceDirectoryName.getText().trim();
if (relativePath.length() == 0) {
String text = IdeBundle.message("prompt.relative.path.to.sources.empty", sourceDirectoryPath);
final int answer = Messages.showDialog(myTfSourceDirectoryName, text, IdeBundle.message("title.mark.source.directory"),
new String[]{IdeBundle.message("action.mark"), IdeBundle.message("action.do.not.mark"),
CommonBundle.getCancelButtonText()}, 0, Messages.getQuestionIcon());
if (answer == 2) {
return false; // cancel
}
if (answer == 1) { // don't mark
myRbNoSource.doClick();
}
}
if (sourceDirectoryPath != null) {
final File rootDir = new File(getContentRootPath());
final File srcDir = new File(sourceDirectoryPath);
try {
if (!FileUtil.isAncestor(rootDir, srcDir, false)) {
Messages.showErrorDialog(myTfSourceDirectoryName,
IdeBundle.message("error.source.directory.should.be.under.module.content.root.directory"),
CommonBundle.getErrorTitle());
return false;
}
}
catch (IOException e) {
Messages.showErrorDialog(myTfSourceDirectoryName, e.getMessage(), CommonBundle.getErrorTitle());
return false;
}
srcDir.mkdirs();
}
}
return true;
}
@Nullable
private String getSourceDirectoryPath() {
final String contentEntryPath = getContentRootPath();
final String dirName = myTfSourceDirectoryName.getText().trim().replace(File.separatorChar, '/');
if (contentEntryPath != null) {
return dirName.length() > 0? contentEntryPath + "/" + dirName : contentEntryPath;
}
return null;
}
protected boolean shouldRunProgress() {
return isContentEntryChanged();
}
protected void onFinished(final List<Pair<String, String>> foundPaths, final boolean canceled) {
if (foundPaths.size() > 0) {
myCurrentMode = CHOOSE_SOURCE_PANEL;
mySourcePathsChooser.setElements(foundPaths, true);
}
else {
myCurrentMode = CREATE_SOURCE_PANEL;
updateFullPathField();
}
updateStepUI(canceled ? null : getContentRootPath());
if (CHOOSE_SOURCE_PANEL.equals(myCurrentMode)) {
mySourcePathsChooser.selectElements(foundPaths.subList(0, 1));
}
else if (CREATE_SOURCE_PANEL.equals(myCurrentMode)) {
myTfSourceDirectoryName.selectAll();
}
}
private void updateStepUI(final String contentEntryPath) {
myCurrentContentEntryPath = contentEntryPath;
((CardLayout)myResultPanel.getLayout()).show(myResultPanel, myCurrentMode);
myResultPanel.revalidate();
}
protected boolean isContentEntryChanged() {
final String contentEntryPath = getContentRootPath();
return myCurrentContentEntryPath == null? contentEntryPath != null : !myCurrentContentEntryPath.equals(contentEntryPath);
}
protected List<Pair<String,String>> calculate() {
String contentEntryPath = getContentRootPath();
if (contentEntryPath == null) {
return EMPTY_STRING_STRING_ARRAY;
}
final File entryFile = new File(contentEntryPath);
if (!entryFile.exists()) {
return EMPTY_STRING_STRING_ARRAY;
}
final File[] children = entryFile.listFiles();
if (children == null || children.length == 0) {
return EMPTY_STRING_STRING_ARRAY;
}
final List<Pair<File,String>> suggestedRoots = JavaUtil.suggestRoots(entryFile);
final List<Pair<String,String>> paths = new ArrayList<Pair<String, String>>();
for (final Pair<File, String> suggestedRoot : suggestedRoots) {
try {
if (FileUtil.isAncestor(entryFile, suggestedRoot.first, false)) {
final String path = FileUtil.resolveShortWindowsName(suggestedRoot.first.getPath());
paths.add(Pair.create(path, suggestedRoot.second));
}
}
catch (IOException e) {
LOG.info(e);
}
}
return paths;
}
@Nullable
private String getContentRootPath() {
return myBuilder.getContentEntryPath();
}
protected void setSourceDirectoryName(String name) {
name = name == null? "" : name.trim();
myTfSourceDirectoryName.setText(name);
}
protected String getProgressText() {
final String root = getContentRootPath();
return IdeBundle.message("progress.searching.for.sources", root != null? root.replace('/', File.separatorChar) : "") ;
}
private class BrowsePathListener extends BrowseFilesListener {
private final FileChooserDescriptor myChooserDescriptor;
private final JTextField myField;
public BrowsePathListener(JTextField textField, final FileChooserDescriptor chooserDescriptor) {
super(textField, IdeBundle.message("prompt.select.source.directory"), "", chooserDescriptor);
myChooserDescriptor = chooserDescriptor;
myField = textField;
}
@Nullable
private VirtualFile getContentEntryDir() {
final String contentEntryPath = getContentRootPath();
if (contentEntryPath != null) {
return ApplicationManager.getApplication().runWriteAction(new Computable<VirtualFile>() {
public VirtualFile compute() {
return LocalFileSystem.getInstance().refreshAndFindFileByPath(contentEntryPath);
}
});
}
return null;
}
public void actionPerformed(ActionEvent e) {
final VirtualFile contentEntryDir = getContentEntryDir();
if (contentEntryDir != null) {
myChooserDescriptor.setRoot(contentEntryDir);
final String textBefore = myField.getText().trim();
super.actionPerformed(e);
if (!textBefore.equals(myField.getText().trim())) {
final String fullPath = myField.getText().trim().replace(File.separatorChar, '/');
final VirtualFile fileByPath = LocalFileSystem.getInstance().findFileByPath(fullPath);
LOG.assertTrue(fileByPath != null);
myField.setText(VfsUtil.getRelativePath(fileByPath, contentEntryDir, File.separatorChar));
}
}
}
}
public Icon getIcon() {
return myIcon;
}
public String getHelpId() {
return myHelpId;
}
}
| |
/*******************************************************************************
* Copyright 2015 Ivan Shubin http://mindengine.net
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package net.mindengine.galen.validation;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Pattern;
import net.mindengine.galen.specs.page.*;
import net.mindengine.galen.specs.reader.page.TaggedPageSection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import net.mindengine.galen.specs.Spec;
import static net.mindengine.galen.validation.ValidationResult.doesNotHaveErrors;
public class SectionValidation {
private final static Logger LOG = LoggerFactory.getLogger(SectionValidation.class);
private static final boolean SHOULD_REPORT = true;
private static final boolean SHOULD_NOT_REPORT = false;
private static final List<ValidationResult> EMPTY_RESULTS = new LinkedList<ValidationResult>();
private static final boolean DONT_TELL_FOR_ITSELF = false;
private static final boolean TELL_FOR_ITSELF = true;
private List<TaggedPageSection> pageSections;
private PageValidation pageValidation;
private ValidationListener validationListener;
public SectionValidation(List<TaggedPageSection> pageSections, PageValidation pageValidation, ValidationListener validationListener) {
this.pageSections = pageSections;
this.pageValidation = pageValidation;
this.validationListener = validationListener;
}
public List<ValidationResult> check() {
//Fetching all multi objects from page before validation
pageValidation.getPageSpec().updateMultiObjects(pageValidation.getPage());
List<ValidationResult> validationResults = new LinkedList<ValidationResult>();
for (TaggedPageSection section : pageSections) {
validationResults.addAll(checkTaggedSection(section));
}
return validationResults;
}
private List<ValidationResult> checkTaggedSection(TaggedPageSection section) {
tellBeforeSection(section);
List<ValidationResult> validationResult= new LinkedList<ValidationResult>();
validationResult.addAll(checkSection(section, DONT_TELL_FOR_ITSELF));
List<ConditionalBlock> conditionalBlocks = section.getConditionalBlocks();
if (conditionalBlocks != null) {
for (ConditionalBlock block : conditionalBlocks) {
validationResult.addAll(checkConditionalBlock(block));
}
}
tellAfterSection(section);
return validationResult;
}
private void tellAfterSection(PageSection section) {
if (validationListener != null) {
validationListener.onAfterSection(pageValidation, section);
}
}
private void tellBeforeSection(PageSection section) {
if (validationListener != null) {
validationListener.onBeforeSection(pageValidation, section);
}
}
private List<ValidationResult> checkObjects(List<ObjectSpecs> objects, boolean shouldReport) {
List<ValidationResult> validationResults = new LinkedList<ValidationResult>();
for (ObjectSpecs object : objects) {
List<String> allObjectNames = findAllObjectNames(object.getObjectName());
for (String objectName : allObjectNames) {
if (shouldReport) {
tellOnObject(objectName);
}
validationResults.addAll(checkObject(objectName, object.getSpecs(), shouldReport));
checkSpecGroups(objectName, object.getSpecGroups(), shouldReport);
if (shouldReport) {
tellOnAfterObject(objectName);
}
}
}
return validationResults;
}
private void checkSpecGroups(String objectName, List<SpecGroup> specGroups, boolean shouldReport) {
if (specGroups != null) {
for (SpecGroup specGroup : specGroups) {
tellOnSpecGroup(specGroup);
checkObject(objectName, specGroup.getSpecs(), shouldReport);
tellOnAfterSpecGroup(specGroup);
}
}
}
private List<ValidationResult> checkConditionalBlock(ConditionalBlock block) {
if (oneOfConditionsApplies(block.getStatements())) {
return checkSection(block.getBodyObjects(), DONT_TELL_FOR_ITSELF);
}
else if (block.getOtherwiseObjects() != null) {
return checkSection(block.getOtherwiseObjects(), DONT_TELL_FOR_ITSELF);
}
else return EMPTY_RESULTS;
}
private List<ValidationResult> checkSection(PageSection section, boolean tellForItSelf) {
if (tellForItSelf) {
tellBeforeSection(section);
}
List<ValidationResult> result = new LinkedList<ValidationResult>();
if (section.getSections() != null) {
for (PageSection subSection : section.getSections()) {
result.addAll(checkSection(subSection, TELL_FOR_ITSELF));
}
}
result.addAll(checkObjects(section.getObjects(), SHOULD_REPORT));
if (tellForItSelf) {
tellAfterSection(section);
}
return result;
}
private boolean oneOfConditionsApplies(List<ConditionalBlockStatement> statements) {
for (ConditionalBlockStatement statement : statements) {
List<ValidationResult> validationResults = checkSectionSilently(statement.getSection());
boolean statementStatus = doesNotHaveErrors(validationResults);
if (statement.isInverted()) {
statementStatus = !statementStatus;
}
if (statementStatus) {
return true;
}
}
return false;
}
private List<ValidationResult> checkSectionSilently(PageSection section) {
List<ValidationResult> result = new LinkedList<ValidationResult>();
if (section.getSections() != null) {
for (PageSection subSection : section.getSections()) {
result.addAll(checkSectionSilently(subSection));
}
}
result.addAll(checkObjectsSilently(section.getObjects()));
return result;
}
private List<ValidationResult> checkObjectsSilently(List<ObjectSpecs> objects) {
return checkObjects(objects, SHOULD_NOT_REPORT);
}
private List<String> findAllObjectNames(String objectsDefinition) {
List<String> objectNames = new LinkedList<String>();
String names[] = objectsDefinition.split(",");
for (String name : names) {
name = name.trim();
if (!name.isEmpty()) {
if (isRegularExpression(name)) {
objectNames.addAll(fetchUsingRegex(name));
}
else {
objectNames.add(name);
}
}
}
return objectNames;
}
private List<String> fetchUsingRegex(String simpleRegex) {
String regex = simpleRegex.replace("#", "[0-9]+").replace("*", "[a-zA-Z0-9_]+");
Pattern pattern = Pattern.compile(regex);
List<String> objectNames = new LinkedList<String>();
for (String objectName : pageValidation.getPageSpec().getObjects().keySet()) {
if (pattern.matcher(objectName).matches()) {
objectNames.add(objectName);
}
}
return objectNames;
}
private boolean isRegularExpression(String name) {
if (name.contains("*") || name.contains("#")) {
return true;
}
else return false;
}
private void tellOnAfterObject(String objectName) {
if (validationListener != null) {
try {
validationListener.onAfterObject(pageValidation, objectName);
}
catch (Exception e) {
LOG.trace("Unknown error during validation after object", e);
}
}
}
private void tellOnObject(String objectName) {
if (validationListener != null) {
try {
validationListener.onObject(pageValidation, objectName);
}
catch (Exception e) {
LOG.trace("Unknown error during validation on object", e);
}
}
}
private void tellOnSpecGroup(SpecGroup specGroup) {
if (validationListener != null) {
try {
validationListener.onSpecGroup(pageValidation, specGroup.getName());
}
catch (Exception e) {
LOG.trace("Unknown error during validation of spec group", e);
}
}
}
private void tellOnAfterSpecGroup(SpecGroup specGroup) {
if (validationListener != null) {
try {
validationListener.onAfterSpecGroup(pageValidation, specGroup.getName());
}
catch (Exception e) {
LOG.trace("Unknown error during validation of spec group", e);
}
}
}
private List<ValidationResult> checkObject(String objectName, List<Spec> specs, boolean shouldReport) {
List<ValidationResult> validationResults = new LinkedList<ValidationResult>();
for (Spec spec : specs) {
if (shouldReport) {
tellBeforeSpec(pageValidation, objectName, spec);
}
ValidationResult result = pageValidation.check(objectName, spec);
if (result.getError()!= null) {
validationResults.add(result);
if (shouldReport) {
tellOnSpecError(pageValidation, objectName, spec, result);
}
}
else if (shouldReport) {
tellOnSpecSuccess(pageValidation, objectName, spec, result);
}
}
return validationResults;
}
private void tellBeforeSpec(PageValidation pageValidation, String objectName, Spec spec) {
try {
if (validationListener != null) {
validationListener.onBeforeSpec(pageValidation, objectName, spec);
}
} catch (Exception e) {
LOG.trace("Unknown error during before spec event", e);
}
}
private void tellOnSpecError(PageValidation pageValidation, String objectName, Spec spec, ValidationResult result) {
try {
if (validationListener != null) {
validationListener.onSpecError(pageValidation, objectName, spec, result);
}
}
catch (Exception e) {
LOG.trace("Unknown error during tell spec error", e);
}
}
private void tellOnSpecSuccess(PageValidation pageValidation, String objectName, Spec spec, ValidationResult result) {
try {
if (validationListener != null) {
validationListener.onSpecSuccess(pageValidation, objectName, spec, result);
}
}
catch (Exception e) {
LOG.trace("Unknown error during tell spec success", e);
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.geospatial;
import io.trino.Session;
import io.trino.plugin.hive.TestingHivePlugin;
import io.trino.plugin.hive.authentication.HiveIdentity;
import io.trino.plugin.hive.metastore.Database;
import io.trino.plugin.hive.metastore.HiveMetastore;
import io.trino.spi.security.PrincipalType;
import io.trino.testing.AbstractTestQueryFramework;
import io.trino.testing.DistributedQueryRunner;
import org.testng.annotations.Test;
import java.io.File;
import java.util.Optional;
import static io.trino.SystemSessionProperties.SPATIAL_PARTITIONING_TABLE_NAME;
import static io.trino.plugin.hive.metastore.file.FileHiveMetastore.createTestingFileHiveMetastore;
import static io.trino.testing.TestingConnectorSession.SESSION;
import static io.trino.testing.TestingSession.testSessionBuilder;
import static java.lang.String.format;
public class TestSpatialJoins
extends AbstractTestQueryFramework
{
// A set of polygons such that:
// - a and c intersect;
// - c covers b;
private static final String POLYGONS_SQL = "VALUES " +
"('POLYGON ((-0.5 -0.6, 1.5 0, 1 1, 0 1, -0.5 -0.6))', 'a', 1), " +
"('POLYGON ((2 2, 3 2, 2.5 3, 2 2))', 'b', 2), " +
"('POLYGON ((0.8 0.7, 0.8 4, 5 4, 4.5 0.8, 0.8 0.7))', 'c', 3), " +
"('POLYGON ((7 7, 11 7, 11 11, 7 7))', 'd', 4), " +
"('POLYGON EMPTY', 'empty', 5), " +
"(null, 'null', 6)";
// A set of points such that:
// - a contains x
// - b and c contain y
// - d contains z
private static final String POINTS_SQL = "VALUES " +
"(-0.1, -0.1, 'x', 1), " +
"(2.1, 2.1, 'y', 2), " +
"(7.1, 7.2, 'z', 3), " +
"(null, 1.2, 'null', 4)";
private static final String MULTI_POINTS_SQL = "VALUES " +
"(-0.1, -0.1, 5.1, 5.1, 'x', 1), " +
"(7.1, 7.1, 2.1, 2.1, 'y', 2), " +
"(7.1, 7.2, 8, 9, 'z', 3), " +
"(null, 1.2, 4, null, 'null', 4)";
@Override
protected DistributedQueryRunner createQueryRunner()
throws Exception
{
Session session = testSessionBuilder()
.setSource(TestSpatialJoins.class.getSimpleName())
.setCatalog("hive")
.setSchema("default")
.build();
DistributedQueryRunner queryRunner = DistributedQueryRunner.builder(session).build();
queryRunner.installPlugin(new GeoPlugin());
File baseDir = queryRunner.getCoordinator().getBaseDataDir().resolve("hive_data").toFile();
HiveMetastore metastore = createTestingFileHiveMetastore(baseDir);
metastore.createDatabase(
new HiveIdentity(SESSION),
Database.builder()
.setDatabaseName("default")
.setOwnerName(Optional.of("public"))
.setOwnerType(Optional.of(PrincipalType.ROLE))
.build());
queryRunner.installPlugin(new TestingHivePlugin(metastore));
queryRunner.createCatalog("hive", "hive");
return queryRunner;
}
@Test
public void testBroadcastSpatialJoinContains()
{
testSpatialJoinContains(getSession());
}
@Test
public void testDistributedSpatialJoinContains()
{
assertUpdate(format("CREATE TABLE contains_partitioning AS " +
"SELECT spatial_partitioning(ST_GeometryFromText(wkt)) as v " +
"FROM (%s) as a (wkt, name, id)", POLYGONS_SQL), 1);
Session session = Session.builder(getSession())
.setSystemProperty(SPATIAL_PARTITIONING_TABLE_NAME, "contains_partitioning")
.build();
testSpatialJoinContains(session);
}
private void testSpatialJoinContains(Session session)
{
// Test ST_Contains(build, probe)
assertQuery(session, "SELECT b.name, a.name " +
"FROM (" + POINTS_SQL + ") AS a (latitude, longitude, name, id), (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"WHERE ST_Contains(ST_GeometryFromText(wkt), ST_Point(longitude, latitude))",
"VALUES ('a', 'x'), ('b', 'y'), ('c', 'y'), ('d', 'z')");
assertQuery(session, "SELECT b.name, a.name " +
"FROM (" + POINTS_SQL + ") AS a (latitude, longitude, name, id) JOIN (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"ON ST_Contains(ST_GeometryFromText(wkt), ST_Point(longitude, latitude))",
"VALUES ('a', 'x'), ('b', 'y'), ('c', 'y'), ('d', 'z')");
assertQuery(session, "SELECT b.name, a.name " +
"FROM (" + POLYGONS_SQL + ") AS a (wkt, name, id), (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"WHERE ST_Contains(ST_GeometryFromText(b.wkt), ST_GeometryFromText(a.wkt))",
"VALUES ('a', 'a'), ('b', 'b'), ('c', 'c'), ('d', 'd'), ('c', 'b')");
assertQuery(session, "SELECT b.name, a.name " +
"FROM (" + POLYGONS_SQL + ") AS a (wkt, name, id) JOIN (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"ON ST_Contains(ST_GeometryFromText(b.wkt), ST_GeometryFromText(a.wkt))",
"VALUES ('a', 'a'), ('b', 'b'), ('c', 'c'), ('d', 'd'), ('c', 'b')");
// Test ST_Contains(probe, build)
assertQuery(session, "SELECT b.name, a.name " +
"FROM (" + POLYGONS_SQL + ") AS b (wkt, name, id), (" + POINTS_SQL + ") AS a (latitude, longitude, name, id) " +
"WHERE ST_Contains(ST_GeometryFromText(wkt), ST_Point(longitude, latitude))",
"VALUES ('a', 'x'), ('b', 'y'), ('c', 'y'), ('d', 'z')");
assertQuery(session, "SELECT b.name, a.name " +
"FROM (" + POLYGONS_SQL + ") AS a (wkt, name, id), (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"WHERE ST_Contains(ST_GeometryFromText(a.wkt), ST_GeometryFromText(b.wkt))",
"VALUES ('a', 'a'), ('b', 'b'), ('c', 'c'), ('d', 'd'), ('b', 'c')");
}
@Test
public void testBroadcastSpatialJoinContainsWithExtraConditions()
{
assertQuery("SELECT b.name, a.name " +
"FROM (" + POLYGONS_SQL + ") AS a (wkt, name, id), (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"WHERE ST_Contains(ST_GeometryFromText(b.wkt), ST_GeometryFromText(a.wkt)) AND a.name != b.name",
"VALUES ('c', 'b')");
assertQuery("SELECT b.name, a.name " +
"FROM (" + POLYGONS_SQL + ") AS a (wkt, name, id) JOIN (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"ON ST_Contains(ST_GeometryFromText(b.wkt), ST_GeometryFromText(a.wkt)) AND a.name != b.name",
"VALUES ('c', 'b')");
}
@Test
public void testBroadcastSpatialJoinContainsWithStatefulExtraCondition()
{
// Generate multi-page probe input: 10K points in polygon 'a' and 10K points in polygon 'b'
String pointsX = generatePointsSql(0, 0, 1, 1, 10_000, "x");
String pointsY = generatePointsSql(2, 2, 2.5, 2.5, 10_000, "y");
// Run spatial join with additional stateful filter
assertQuery("SELECT b.name, a.name " +
"FROM (" + pointsX + " UNION ALL " + pointsY + ") AS a (latitude, longitude, name, id), (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"WHERE ST_Contains(ST_GeometryFromText(wkt), ST_Point(longitude, latitude)) AND stateful_sleeping_sum(0.001, 100, a.id, b.id) <= 3",
"VALUES ('a', 'x1'), ('a', 'x2'), ('b', 'y1')");
}
private static String generatePointsSql(double minX, double minY, double maxX, double maxY, int pointCount, String prefix)
{
return format(
"SELECT %s + n * %f, %s + n * %f, '%s' || CAST (n AS VARCHAR), n " +
"FROM (SELECT sequence(1, %s) as numbers) " +
"CROSS JOIN UNNEST (numbers) AS t(n)",
minX,
(maxX - minX) / pointCount,
minY,
(maxY - minY) / pointCount,
prefix,
pointCount);
}
@Test
public void testBroadcastSpatialJoinContainsWithEmptyBuildSide()
{
assertQueryReturnsEmptyResult("SELECT b.name, a.name " +
"FROM (" + POLYGONS_SQL + ") AS a (wkt, name, id), (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"WHERE b.name = 'invalid' AND ST_Contains(ST_GeometryFromText(b.wkt), ST_GeometryFromText(a.wkt))");
}
@Test
public void testBroadcastSpatialJoinContainsWithEmptyProbeSide()
{
assertQueryReturnsEmptyResult("SELECT b.name, a.name " +
"FROM (" + POLYGONS_SQL + ") AS a (wkt, name, id), (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"WHERE a.name = 'invalid' AND ST_Contains(ST_GeometryFromText(b.wkt), ST_GeometryFromText(a.wkt))");
}
@Test
public void testBroadcastSpatialJoinIntersects()
{
testSpatialJoinIntersects(getSession());
}
@Test
public void tesDistributedSpatialJoinIntersects()
{
assertUpdate(format("CREATE TABLE intersects_partitioning AS " +
"SELECT spatial_partitioning(ST_GeometryFromText(wkt)) as v " +
"FROM (%s) as a (wkt, name, id)", POLYGONS_SQL), 1);
Session session = Session.builder(getSession())
.setSystemProperty(SPATIAL_PARTITIONING_TABLE_NAME, "intersects_partitioning")
.build();
testSpatialJoinIntersects(session);
}
private void testSpatialJoinIntersects(Session session)
{
// Test ST_Intersects(build, probe)
assertQuery(session, "SELECT a.name, b.name " +
"FROM (" + POLYGONS_SQL + ") AS a (wkt, name, id), (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"WHERE ST_Intersects(ST_GeometryFromText(b.wkt), ST_GeometryFromText(a.wkt))",
"SELECT * FROM VALUES ('a', 'a'), ('b', 'b'), ('c', 'c'), ('d', 'd'), " +
"('a', 'c'), ('c', 'a'), ('c', 'b'), ('b', 'c')");
assertQuery(session, "SELECT a.name, b.name " +
"FROM (" + POLYGONS_SQL + ") AS a (wkt, name, id) JOIN (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"ON ST_Intersects(ST_GeometryFromText(b.wkt), ST_GeometryFromText(a.wkt))",
"SELECT * FROM VALUES ('a', 'a'), ('b', 'b'), ('c', 'c'), ('d', 'd'), " +
"('a', 'c'), ('c', 'a'), ('c', 'b'), ('b', 'c')");
// Test ST_Intersects(probe, build)
assertQuery(session, "SELECT a.name, b.name " +
"FROM (" + POLYGONS_SQL + ") AS a (wkt, name, id), (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"WHERE ST_Intersects(ST_GeometryFromText(a.wkt), ST_GeometryFromText(b.wkt))",
"SELECT * FROM VALUES ('a', 'a'), ('b', 'b'), ('c', 'c'), ('d', 'd'), " +
"('a', 'c'), ('c', 'a'), ('c', 'b'), ('b', 'c')");
}
@Test
public void testBroadcastSpatialJoinIntersectsWithExtraConditions()
{
assertQuery("SELECT a.name, b.name " +
"FROM (" + POLYGONS_SQL + ") AS a (wkt, name, id), (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"WHERE ST_Intersects(ST_GeometryFromText(b.wkt), ST_GeometryFromText(a.wkt)) " +
" AND a.name != b.name",
"VALUES ('a', 'c'), ('c', 'a'), ('c', 'b'), ('b', 'c')");
assertQuery("SELECT a.name, b.name " +
"FROM (" + POLYGONS_SQL + ") AS a (wkt, name, id) JOIN (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"ON ST_Intersects(ST_GeometryFromText(b.wkt), ST_GeometryFromText(a.wkt)) " +
" AND a.name != b.name",
"VALUES ('a', 'c'), ('c', 'a'), ('c', 'b'), ('b', 'c')");
assertQuery("SELECT a.name, b.name " +
"FROM (" + POLYGONS_SQL + ") AS a (wkt, name, id), (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"WHERE ST_Intersects(ST_GeometryFromText(b.wkt), ST_GeometryFromText(a.wkt)) " +
" AND a.name < b.name",
"VALUES ('a', 'c'), ('b', 'c')");
}
@Test
public void testBroadcastDistanceQuery()
{
testDistanceQuery(getSession());
}
@Test
public void testDistributedDistanceQuery()
{
assertUpdate("CREATE TABLE distance_partitioning AS SELECT spatial_partitioning(ST_Point(x, y)) as v " +
"FROM (VALUES (0, 0, '0_0'), (1, 0, '1_0'), (3, 0, '3_0'), (10, 0, '10_0')) as a (x, y, name)", 1);
Session session = Session.builder(getSession())
.setSystemProperty(SPATIAL_PARTITIONING_TABLE_NAME, "distance_partitioning")
.build();
testDistanceQuery(session);
}
private void testDistanceQuery(Session session)
{
// ST_Distance(probe, build)
assertQuery(session, "SELECT a.name, b.name " +
"FROM (VALUES (0, 0, '0_0'), (1, 0, '1_0'), (3, 0, '3_0'), (10, 0, '10_0')) as a (x, y, name), " +
"(VALUES (0, 1, '0_1'), (1, 1, '1_1'), (3, 1, '3_1'), (10, 1, '10_1')) as b (x, y, name) " +
"WHERE ST_Distance(ST_Point(a.x, a.y), ST_Point(b.x, b.y)) <= 1.5",
"VALUES ('0_0', '0_1'), ('0_0', '1_1'), ('1_0', '0_1'), ('1_0', '1_1'), ('3_0', '3_1'), ('10_0', '10_1')");
// ST_Distance(build, probe)
assertQuery(session, "SELECT a.name, b.name " +
"FROM (VALUES (0, 0, '0_0'), (1, 0, '1_0'), (3, 0, '3_0'), (10, 0, '10_0')) as a (x, y, name), " +
"(VALUES (0, 1, '0_1'), (1, 1, '1_1'), (3, 1, '3_1'), (10, 1, '10_1')) as b (x, y, name) " +
"WHERE ST_Distance(ST_Point(b.x, b.y), ST_Point(a.x, a.y)) <= 1.5",
"VALUES ('0_0', '0_1'), ('0_0', '1_1'), ('1_0', '0_1'), ('1_0', '1_1'), ('3_0', '3_1'), ('10_0', '10_1')");
// radius expression
assertQuery(session, "SELECT a.name, b.name FROM (VALUES (0, 0, '0_0'), (1, 0, '1_0'), (3, 0, '3_0'), (10, 0, '10_0')) as a (x, y, name), " +
"(VALUES (0, 1, '0_1'), (1, 1, '1_1'), (3, 1, '3_1'), (10, 1, '10_1')) as b (x, y, name) " +
"WHERE ST_Distance(ST_Point(a.x, a.y), ST_Point(b.x, b.y)) <= sqrt(b.x * b.x + b.y * b.y)",
"VALUES ('0_0', '0_1'), ('0_0', '1_1'), ('0_0', '3_1'), ('0_0', '10_1'), ('1_0', '1_1'), ('1_0', '3_1'), ('1_0', '10_1'), ('3_0', '3_1'), ('3_0', '10_1'), ('10_0', '10_1')");
}
@Test
public void testBroadcastSpatialLeftJoin()
{
// Test ST_Intersects(build, probe)
assertQuery("SELECT a.name, b.name " +
"FROM (" + POLYGONS_SQL + ") AS a (wkt, name, id) LEFT JOIN (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"ON ST_Intersects(ST_GeometryFromText(b.wkt), ST_GeometryFromText(a.wkt))",
"SELECT * FROM VALUES ('a', 'a'), ('b', 'b'), ('c', 'c'), ('d', 'd'), " +
"('a', 'c'), ('c', 'a'), ('c', 'b'), ('b', 'c'), ('empty', null), ('null', null)");
// Empty build side
assertQuery("SELECT a.name, b.name " +
"FROM (" + POLYGONS_SQL + ") AS a (wkt, name, id) LEFT JOIN (VALUES (null, 'null', 1)) AS b (wkt, name, id) " +
"ON ST_Intersects(ST_GeometryFromText(b.wkt), ST_GeometryFromText(a.wkt))",
"VALUES ('a', null), ('b', null), ('c', null), ('d', null), ('empty', null), ('null', null)");
// Extra condition
assertQuery("SELECT a.name, b.name " +
"FROM (" + POLYGONS_SQL + ") AS a (wkt, name, id) LEFT JOIN (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"ON a.name > b.name AND ST_Intersects(ST_GeometryFromText(b.wkt), ST_GeometryFromText(a.wkt))",
"VALUES ('a', null), ('b', null), ('c', 'a'), ('c', 'b'), ('d', null), ('empty', null), ('null', null)");
}
@Test
public void testBroadcastSpatialRightJoin()
{
// Test ST_Intersects(build, probe)
assertQuery("SELECT a.name, b.name " +
"FROM (" + POLYGONS_SQL + ") AS a (wkt, name, id) RIGHT JOIN (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"ON ST_Intersects(ST_GeometryFromText(b.wkt), ST_GeometryFromText(a.wkt))",
"VALUES ('a', 'a'), ('b', 'b'), ('c', 'c'), ('d', 'd'), ('a', 'c'), ('c', 'a'), ('c', 'b'), ('b', 'c'), (null, 'empty'), (null, 'null')");
// Empty build side
assertQuery("SELECT a.name, b.name " +
"FROM (" + POLYGONS_SQL + ") AS a (wkt, name, id) RIGHT JOIN (VALUES (null, 'null', 1)) AS b (wkt, name, id) " +
"ON ST_Intersects(ST_GeometryFromText(b.wkt), ST_GeometryFromText(a.wkt))",
"VALUES (null, 'null')");
// Extra condition
assertQuery("SELECT a.name, b.name " +
"FROM (" + POLYGONS_SQL + ") AS a (wkt, name, id) RIGHT JOIN (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"ON a.name > b.name AND ST_Intersects(ST_GeometryFromText(b.wkt), ST_GeometryFromText(a.wkt))",
"VALUES (null, 'c'), (null, 'd'), ('c', 'a'), ('c', 'b'), (null, 'empty'), (null, 'null')");
}
@Test
public void testSpatialJoinOverRightJoin()
{
assertQuery("SELECT a.name, b.name, c.name " +
"FROM (" + POINTS_SQL + ") AS a (latitude, longitude, name, id) " +
"RIGHT JOIN (" + POINTS_SQL + ") AS b (latitude, longitude, name, id) ON a.latitude = b.latitude AND a.longitude = b.longitude AND a.latitude > 0 " +
"JOIN (" + POINTS_SQL + ") AS c (latitude, longitude, name, id) ON ST_Distance(ST_Point(a.latitude, b.longitude), ST_Point(c.latitude, c.longitude)) < 1 ",
"VALUES ('y', 'y', 'y'), ('z', 'z', 'z')");
}
@Test
public void testSpatialJoinOverLeftJoinWithOrPredicate()
{
assertQuery("SELECT a.name, b.name " +
"FROM (" + MULTI_POINTS_SQL + ") AS a (latitude1, longitude1, latitude2, longitude2, name, id) " +
"LEFT JOIN (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"ON ST_Contains(ST_GeometryFromText(b.wkt), ST_Point(a.latitude1, a.longitude1)) OR ST_Contains(ST_GeometryFromText(b.wkt), ST_Point(a.latitude2, a.longitude2))",
"VALUES ('x', 'a'), ('y', 'b') , ('y', 'c'), ('z', NULL), ('null', NULL)");
}
@Test
public void testSpatialJoinOverRightJoinWithOrPredicate()
{
assertQuery("SELECT a.name, b.name " +
"FROM (" + MULTI_POINTS_SQL + ") AS a (latitude1, longitude1, latitude2, longitude2, name, id) " +
"RIGHT JOIN (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"ON ST_Contains(ST_GeometryFromText(b.wkt), ST_Point(a.latitude1, a.longitude1)) OR ST_Contains(ST_GeometryFromText(b.wkt), ST_Point(a.latitude2, a.longitude2))",
"VALUES ('x', 'a'), ('y', 'b') , ('y', 'c'), (NULL, 'd'), (NULL, 'empty'), (NULL, 'null')");
}
@Test
public void testSpatialJoinOverInnerJoinWithOrPredicate()
{
assertQuery("SELECT a.name, b.name " +
"FROM (" + MULTI_POINTS_SQL + ") AS a (latitude1, longitude1, latitude2, longitude2, name, id) " +
"INNER JOIN (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"ON ST_Contains(ST_GeometryFromText(b.wkt), ST_Point(a.latitude1, a.longitude1)) OR ST_Contains(ST_GeometryFromText(b.wkt), ST_Point(a.latitude2, a.longitude2))",
"VALUES ('x', 'a'), ('y', 'b') , ('y', 'c')");
}
@Test
public void testSpatialJoinOverFullJoinWithOrPredicate()
{
assertQuery("SELECT a.name, b.name " +
"FROM (" + MULTI_POINTS_SQL + ") AS a (latitude1, longitude1, latitude2, longitude2, name, id) " +
"FULL JOIN (" + POLYGONS_SQL + ") AS b (wkt, name, id) " +
"ON ST_Contains(ST_GeometryFromText(b.wkt), ST_Point(a.latitude1, a.longitude1)) OR ST_Contains(ST_GeometryFromText(b.wkt), ST_Point(a.latitude2, a.longitude2))",
"VALUES ('x', 'a'), ('y', 'b'), ('y', 'c'), (NULL, 'd'), (NULL, 'empty'), ('z', NULL), (NULL, 'null'), ('null', NULL)");
}
}
| |
/*
* The MIT License (MIT)
*
* Copyright (c) 2007-2015 Broad Institute
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.broad.igv;
import org.apache.log4j.Logger;
import org.broad.igv.renderer.SequenceRenderer;
import java.io.IOException;
import java.text.DecimalFormat;
import java.util.*;
import java.util.List;
import java.util.regex.Pattern;
/**
* User: jrobinso
* Date: Feb 3, 2010
*/
public class Globals {
private static Logger log = Logger.getLogger(Globals.class);
public static final int DESIGN_DPI = 96;
public static final DecimalFormat DECIMAL_FORMAT = new DecimalFormat();
final static public String HISTORY_DELIMITER = ";";
public static final String DEFAULT_GENOME = "hg19";
// External resoure URLs
public static final String DEFAULT_GENOME_URL = "http://igv.broadinstitute.org/genomes/genomes.txt";
public static final String DEFAULT_DATA_URL = "https://data.broadinstitute.org/igvdata/$$_dataServerRegistry.txt";
public static final String CHR_ALL = "All";
public static final String TRACK_NAME_ATTRIBUTE = "NAME";
public static final String TRACK_DATA_FILE_ATTRIBUTE = "DATA FILE";
public static final String TRACK_DATA_TYPE_ATTRIBUTE = "DATA TYPE";
private static boolean headless = false;
private static boolean suppressMessages = false;
private static boolean batch = false;
private static boolean testing = false;
public static int CONNECT_TIMEOUT = 20000; // 20 seconds
public static int READ_TIMEOUT = 1000 * 3 * 60; // 3 minutes
/**
* Field description
*/
final public static String SESSION_FILE_EXTENSION = ".xml";
/**
* GENOME ARCHIVE CONSTANTS
*/
final public static String GENOME_FILE_EXTENSION = ".genome";
final public static String ZIP_EXTENSION = ".zip";
final public static String GZIP_FILE_EXTENSION = ".gz";
final public static String GENOME_ARCHIVE_PROPERTY_FILE_NAME = "property.txt";
final public static String GENOME_ARCHIVE_ID_KEY = "id";
final public static String GENOME_ARCHIVE_NAME_KEY = "name";
final public static String GENOME_ARCHIVE_VERSION_KEY = "version";
final public static String GENOME_ORDERED_KEY = "ordered";
final public static String GENOME_GENETRACK_NAME = "geneTrackName";
final public static String GENOME_URL_KEY = "url";
final public static String GENOME_ARCHIVE_CYTOBAND_FILE_KEY = "cytobandFile";
final public static String GENOME_ARCHIVE_GENE_FILE_KEY = "geneFile";
final public static String GENOME_ARCHIVE_SEQUENCE_FILE_LOCATION_KEY = "sequenceLocation";
/**
* Whether the sequenceLocation has been modified from the version of the .genome
* file on the server
*/
public static final String GENOME_ARCHIVE_CUSTOM_SEQUENCE_LOCATION_KEY = "customSequenceLocation";
public static final String GENOME_CHR_ALIAS_FILE_KEY = "chrAliasFile";
// Default user folder
final static public Pattern commaPattern = Pattern.compile(",");
final static public Pattern tabPattern = Pattern.compile("\t");
final static public Pattern colonPattern = Pattern.compile(":");
final static public Pattern dashPattern = Pattern.compile("-");
final static public Pattern equalPattern = Pattern.compile("=");
final static public Pattern semicolonPattern = Pattern.compile(";");
final static public Pattern singleTabMultiSpacePattern = Pattern.compile("\t|( +)");
final static public Pattern forwardSlashPattern = Pattern.compile("/");
final static public Pattern whitespacePattern = Pattern.compile("\\s+");
public static List emptyList = new ArrayList();
public static String VERSION;
public static String BUILD;
public static String TIMESTAMP;
public static double log2 = Math.log(2);
final public static boolean IS_WINDOWS =
System.getProperty("os.name").toLowerCase().startsWith("windows");
final public static boolean IS_MAC =
System.getProperty("os.name").toLowerCase().startsWith("mac");
final public static boolean IS_LINUX =
System.getProperty("os.name").toLowerCase().startsWith("linux");
final public static boolean IS_JWS =
System.getProperty("webstart.version", null) != null || System.getProperty("javawebstart.version", null) != null;
public static final String JAVA_VERSION_STRING = "java.version";
//Location of bedtools executable
//Note: It is recommended you use an absolute path here.
//System paths can be finnicky and vary depending on how IGV is launched
//However, the path environment variable will be checked if the executable
//is named rather than the full path given
public static String BEDtoolsPath = "/usr/local/bin/bedtools"; //"bedtools"
public static boolean toolsMenuEnabled = false;
public static boolean development = false;
public static String versionURL = "http://www.broadinstitute.org/igv/projects/current/version.txt";
public static String downloadURL = "http://www.broadinstitute.org/igv/download";
static {
Properties properties = new Properties();
try {
properties.load(Globals.class.getResourceAsStream("/resources/about.properties"));
} catch (IOException e) {
log.error("*** Error retrieving version and build information! ***", e);
}
VERSION = properties.getProperty("version", "???");
BUILD = properties.getProperty("build", "???");
TIMESTAMP = properties.getProperty("timestamp", "???");
BEDtoolsPath = System.getProperty("BEDtoolsPath", BEDtoolsPath);
//Runtime property overrides compile-time property, if both exist.
//If neither exist we default to false
final String developmentProperty = System.getProperty("development", properties.getProperty("development", "false"));
development = Boolean.parseBoolean(developmentProperty);
if(development){
log.warn("Development mode is enabled");
}
}
public static void setHeadless(boolean bool) {
headless = bool;
}
public static boolean isHeadless() {
return headless;
}
public static void setTesting(boolean testing) {
Globals.testing = testing;
}
public static boolean isTesting() {
return testing;
}
public static void setSuppressMessages(boolean bool) {
suppressMessages = bool;
}
public static boolean isSuppressMessages() {
return suppressMessages;
}
public static String applicationString() {
return "IGV Version " + VERSION + " (" + BUILD + ")" + TIMESTAMP;
}
public static String versionString() {
return "<html>Version " + VERSION + " (" + BUILD + ")<br>" + TIMESTAMP;
}
public static boolean isDevelopment() {
return development;
}
public static boolean isBatch() {
return batch;
}
public static void setBatch(boolean batch) {
Globals.batch = batch;
}
/**
* Checks whether the current JVM is the minimum specified version
* or higher. Only compares up to as many characters as
* in {@code minVersion}
*
* @param minVersion
* @return
*/
public static boolean checkJavaVersion(String minVersion) {
String curVersion = System.getProperty(JAVA_VERSION_STRING);
if (curVersion.length() >= minVersion.length()) {
curVersion = curVersion.substring(0, minVersion.length());
}
return curVersion.compareTo(minVersion) >= 0;
}
/**
* Return the URL to fetch the current IGV version (note: not Java version)
* @return
*/
public static String getVersionURL() {
return versionURL;
}
}
| |
package monasca.statsd;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Map;
/**
*
* A simple StatsD client implementation facilitating event reporting.
*
* <p>Upon instantiation, this client will establish a socket connection to a StatsD instance
* running on the specified host and port. Events are then sent over this connection as they are
* received by the client.
* </p>
*
* <p>Six key methods are provided for the submission of events for the application under
* scrutiny:
* <ul>
* <li>{@link #event(String, String)}</li>
* <li>{@link #event(String, String, AlertType)}</li>
* <li>{@link #event(String, String, long)}</li>
* <li>{@link #event(String, String, Priority)}</li>
* <li>{@link #event(String, String, String...)}</li>
* <li>{@link #event(String, String, long, String, Priority, String, AlertType, Map<String, String>)}</li>
* </ul>
* From the perspective of the application, these methods are blocking.
* </p>
*
* <p>As part of a clean system shutdown, the {@link #stop()} method should be invoked
* on any StatsD clients.</p>
*
*/
public class BlockingStatsDEventClient extends StatsDClientBase {
private static final StatsDClientErrorHandler NO_OP_HANDLER = new StatsDClientErrorHandler() {
@Override
public void handle(Exception e) { /* No-op */ }
};
/**
* Create a new StatsD client communicating with a StatsD instance on the
* specified host and port. All messages send via this client will have
* their keys prefixed with the specified string. The new client will
* attempt to open a connection to the StatsD server immediately upon
* instantiation, and may throw an exception if that a connection cannot
* be established. Once a client has been instantiated in this way, all
* exceptions thrown during subsequent usage are consumed, guaranteeing
* that failures in metrics will not affect normal code execution.
*
* @param prefix
* the prefix to apply to keys sent via this client
* @param hostname
* the host name of the targeted StatsD server
* @param port
* the port of the targeted StatsD server
* @throws StatsDClientException
* if the client could not be started
*/
public BlockingStatsDEventClient(String prefix, String hostname, int port) throws StatsDClientException {
this(prefix, hostname, port, null, NO_OP_HANDLER);
}
/**
* Create a new StatsD client communicating with a StatsD instance on the
* specified host and port. All messages send via this client will have
* their keys prefixed with the specified string. The new client will
* attempt to open a connection to the StatsD server immediately upon
* instantiation, and may throw an exception if that a connection cannot
* be established. Once a client has been instantiated in this way, all
* exceptions thrown during subsequent usage are consumed, guaranteeing
* that failures in metrics will not affect normal code execution.
*
* @param prefix
* the prefix to apply to keys sent via this client
* @param hostname
* the host name of the targeted StatsD server
* @param port
* the port of the targeted StatsD server
* @param defaultDimensions
* dimensions to be added to all content sent (each of them should be in the format key:value)
* @throws StatsDClientException
* if the client could not be started
*/
public BlockingStatsDEventClient(String prefix, String hostname, int port, Map<String, String> defaultDimensions) throws StatsDClientException {
this(prefix, hostname, port, defaultDimensions, NO_OP_HANDLER);
}
/**
* Create a new StatsD client communicating with a StatsD instance on the
* specified host and port. All messages send via this client will have
* their keys prefixed with the specified string. The new client will
* attempt to open a connection to the StatsD server immediately upon
* instantiation, and may throw an exception if that a connection cannot
* be established. Once a client has been instantiated in this way, all
* exceptions thrown during subsequent usage are passed to the specified
* handler and then consumed, guaranteeing that failures in metrics will
* not affect normal code execution.
*
* @param prefix
* the prefix to apply to keys sent via this client
* @param hostname
* the host name of the targeted StatsD server
* @param port
* the port of the targeted StatsD server
* @param defaultDimensions
* dimensions to be added to all content sent (each of them should be in the format key:value)
* @param errorHandler
* handler to use when an exception occurs during usage
* @throws StatsDClientException
* if the client could not be started
*/
public BlockingStatsDEventClient(String prefix, String hostname, int port, Map<String, String> defaultDimensions,
StatsDClientErrorHandler errorHandler) throws StatsDClientException {
super(prefix, hostname, port, defaultDimensions, errorHandler);
}
/**
* Cleanly shut down this StatsD client. This method may throw an exception if
* the socket cannot be closed.
*/
public void stop() {
super.stop();
}
/**
* Submit event with title and message
* @param title
* @param message
*/
public boolean event( String title, String message ) {
return event(title, message, 0, null, null, null, null, null);
}
/**
* Submit event with title, message and a time
* @param title
* @param message
* @param dateHappened - It should be in seconds
*/
public boolean event( String title, String message, long dateHappened ) {
return event(title, message, dateHappened, null, null, null, null, null);
}
/**
* Submit event with title, message and priority for the message
* @param title
* @param message
* @param priority
*/
public boolean event( String title, String message, Priority priority ) {
return event( title, message, 0L, null, priority, null, null, null);
}
/**
* Submit event with title, message and alter type
* @param title
* @param message
* @param alertType
*/
public boolean event( String title, String message, AlertType alertType ) {
return event( title, message, 0, null, null, null, alertType, null);
}
/**
* Submit message with title, message and tags
* @param title
* @param message
* @param tags
*/
public boolean event( String title, String message, Map<String, String> dimensions ) {
return event( title, message, 0L, null, null, null, null, dimensions);
}
/**
*
* Reports an event to the monasca agent
*
* @param title
* @param message
* @param dateHappened
* @param aggregationKey
* @param priority
* @param sourceTypeName
* @param alterType
* @param dimensions
*/
public boolean event(String title, String message, long dateHappened, String aggregationKey,
Priority priority, String sourceTypeName, AlertType alterType, Map<String, String> dimensions) {
boolean success = true;
if (title != null && message != null) {
try {
blockingSend(prepareMessage(title, message, dateHappened, aggregationKey, priority, sourceTypeName, alterType, dimensions));
} catch (IOException e) {
success = false;
}
}
return success;
}
protected String prepareMessage(String title, String message, long dateHappened, String aggregationKey,
Priority priority, String sourceTypeName, AlertType alterType, Map<String, String> dimensions) {
StringBuilder sb = new StringBuilder();
sb.append(String.format("_e{%d,%d}:%s|%s", title.length(), message.length(), title, message));
if (dateHappened > 0) {
sb.append(String.format("|d:%d", dateHappened));
}
if (super.address.getHostName() != null) {
sb.append(String.format("|h:%s", super.address.getHostName()));
}
if (aggregationKey != null) {
sb.append(String.format("|k:%s", aggregationKey));
}
if (priority != null) {
sb.append(String.format("|p:%s", priority.name()));
}
if (sourceTypeName != null) {
sb.append(String.format("|s:%s", sourceTypeName));
}
if (alterType != null) {
sb.append(String.format("|t:%s", alterType.name()));
}
sb.append(dimensionString(dimensions));
return sb.toString();
}
protected void blockingSend(String message) throws IOException {
final ByteBuffer sendBuffer = ByteBuffer.allocate(PACKET_SIZE_BYTES);
sendBuffer.put(message.getBytes());
super.blockingSend(sendBuffer);
}
}
| |
package com.tommytony.war.spout;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.bukkit.ChatColor;
import org.bukkit.entity.Player;
import org.getspout.spoutapi.SpoutManager;
import org.getspout.spoutapi.gui.Color;
import org.getspout.spoutapi.gui.GenericGradient;
import org.getspout.spoutapi.gui.GenericLabel;
import org.getspout.spoutapi.gui.WidgetAnchor;
import org.getspout.spoutapi.player.SpoutPlayer;
import com.tommytony.war.Team;
import com.tommytony.war.War;
import com.tommytony.war.Warzone;
import com.tommytony.war.config.TeamConfig;
import com.tommytony.war.config.TeamKind;
public class SpoutDisplayer {
private static int LINE_HEIGHT = 5;
private static int LINE_HEIGHT_WITH_MARGIN = 8;
Map<String, List<PlayerMessage>> playerMessages = new HashMap<String, List<PlayerMessage>>();
public void msg(SpoutPlayer sp, String message) {
if (!playerMessages.containsKey(sp.getName())) {
playerMessages.put(sp.getName(), new ArrayList<PlayerMessage>());
}
List<PlayerMessage> messages = playerMessages.get(sp.getName());
messages.add(new PlayerMessage(message));
// prevent huge stack of messages, 5 max
if (messages.size() > 5) {
// remove first
messages.remove(0);
}
List<Integer> statsOffset = new ArrayList<Integer>();
List<GenericLabel> lines = getStatsLines(Warzone.getZoneByPlayerName(sp.getName()), statsOffset);
drawMessages(sp.getName(), lines, statsOffset);
}
public void fadeOutOldMessages() {
for (String playerName : playerMessages.keySet()) {
List<PlayerMessage> messages = playerMessages.get(playerName);
List<PlayerMessage> toRemove = new ArrayList<PlayerMessage>();
for (PlayerMessage message : messages) {
if (System.currentTimeMillis() - message.getSendTime() > 15000) {
toRemove.add(message);
}
}
for (PlayerMessage removing : toRemove) {
messages.remove(removing);
}
if (toRemove.size() > 0) {
List<Integer> statsOffset = new ArrayList<Integer>();
List<GenericLabel> lines = getStatsLines(Warzone.getZoneByPlayerName(playerName), statsOffset);
drawMessages(playerName, lines, statsOffset);
}
}
}
public void remove(String playerName) {
Player player = War.war.getServer().getPlayer(playerName);
if (player != null && playerMessages.containsKey(playerName)) {
clear(SpoutManager.getPlayer(player));
playerMessages.remove(playerName);
}
}
private void clear(SpoutPlayer player) {
player.getMainScreen().removeWidgets(War.war);
}
public void clearAll() {
List<String> namesToRemove = new ArrayList<String>();
for (String name : playerMessages.keySet()) {
Player player = War.war.getServer().getPlayer(name);
if (player != null && playerMessages.containsKey(name)) {
clear(SpoutManager.getPlayer(player));
}
namesToRemove.add(name);
}
for (String toRemove : namesToRemove) {
playerMessages.remove(toRemove);
}
}
public static String cleanForNotification(String toNotify) {
if (toNotify.length() > 26) {
return toNotify.substring(0, 25);
}
return toNotify;
}
public void updateStats(Warzone zone) {
List<Integer> statsOffset = new ArrayList<Integer>();
List<GenericLabel> statsLines = getStatsLines(zone, statsOffset);
for (Team t : zone.getTeams()) {
for (Player p : t.getPlayers()) {
SpoutPlayer sp = SpoutManager.getPlayer(p);
if (sp.isSpoutCraftEnabled()) {
drawMessages(sp.getName(), statsLines, statsOffset);
}
}
}
}
public void updateStats(Player player) {
SpoutPlayer sp = SpoutManager.getPlayer(player);
if (sp.isSpoutCraftEnabled()) {
List<Integer> statsOffset = new ArrayList<Integer>();
Warzone zone = Warzone.getZoneByPlayerName(player.getName());
List<GenericLabel> statsLines = getStatsLines(zone, statsOffset);
drawMessages(sp.getName(), statsLines, statsOffset);
}
}
private static List<GenericLabel> getStatsLines(Warzone zone, List<Integer> offset) {
List<GenericLabel> lines = new ArrayList<GenericLabel>();
offset.add(0);
offset.add(0);
if (zone != null) {
offset.clear();
List<GenericLabel> teamlines = new ArrayList<GenericLabel>();
List<GenericLabel> playerlines = new ArrayList<GenericLabel>();
List<GenericLabel> scorelines = new ArrayList<GenericLabel>();
List<GenericLabel> lifelines = new ArrayList<GenericLabel>();
int teamMax = 0, scoreMax = 0, lifeMax = 0;
GenericLabel line;
GenericLabel teamsHeader = new GenericLabel(ChatColor.GRAY + "War> " + ChatColor.WHITE + zone.getName());
int teamsHeaderWidth = GenericLabel.getStringWidth(teamsHeader.getText()) + 1;
teamsHeader.setAnchor(WidgetAnchor.TOP_LEFT)
.setX(3)
.setY(2)
.setWidth(teamsHeaderWidth)
.setHeight(LINE_HEIGHT);
lines.add(teamsHeader);
// First, we collect all the team names
int lineCounter = 1;
for (Team t : zone.getTeams()) {
// team name
String teamStr = t.getName() + " (" + t.getPlayers().size() + "/" + t.getTeamConfig().resolveInt(TeamConfig.TEAMSIZE) + ")";
line = new GenericLabel(teamStr);
if (t.getPlayers().size() == 0) {
line.setTextColor(new Color(100,100,100));
}
else {
line.setText(t.getKind().getColor() + teamStr.replace("(", ChatColor.GRAY + "(" + ChatColor.WHITE).replace(")", ChatColor.GRAY + ")" + ChatColor.WHITE));
}
line.setAnchor(WidgetAnchor.TOP_LEFT)
.setX(3)
.setY(4 + lineCounter * LINE_HEIGHT_WITH_MARGIN)
.setWidth(GenericLabel.getStringWidth(line.getText()))
.setHeight(LINE_HEIGHT);
teamlines.add(line);
lineCounter++;
}
// We need to find the longest name
for (GenericLabel l : teamlines) {
if (GenericLabel.getStringWidth(l.getText()) > teamMax) {
teamMax = GenericLabel.getStringWidth(l.getText());
}
if (teamsHeaderWidth > teamMax) {
teamMax = teamsHeaderWidth;
}
}
// points header
GenericLabel pointsHeader = new GenericLabel(ChatColor.GRAY + "score");
int pointsHeaderWidth = GenericLabel.getStringWidth(pointsHeader.getText());
pointsHeader.setAnchor(WidgetAnchor.TOP_LEFT)
.setX(3 + teamMax + 2)
.setY(2)
.setWidth(pointsHeaderWidth)
.setHeight(LINE_HEIGHT);
lines.add(pointsHeader);
lineCounter = 1;
for (Team t : zone.getTeams()) {
// scores
line = new GenericLabel(t.getPoints() + "/" + t.getTeamConfig().resolveInt(TeamConfig.MAXSCORE));
if (t.getPlayers().size() == 0) line.setTextColor(new Color(100, 100, 100));
line.setAnchor(WidgetAnchor.TOP_LEFT)
.setX(3 + teamMax + 4)
.setY(4 + lineCounter * LINE_HEIGHT_WITH_MARGIN)
.setWidth(GenericLabel.getStringWidth(line.getText()))
.setHeight(LINE_HEIGHT);
scorelines.add(line);
lineCounter++;
}
for (GenericLabel l : scorelines) {
if (GenericLabel.getStringWidth(l.getText()) > scoreMax) {
scoreMax = GenericLabel.getStringWidth(l.getText());
}
}
if (pointsHeaderWidth > scoreMax) {
scoreMax = pointsHeaderWidth;
}
// lifepool header
GenericLabel livesHeader = new GenericLabel(ChatColor.GRAY + "lives");
int livesHeaderWidth = GenericLabel.getStringWidth(livesHeader.getText());
livesHeader.setAnchor(WidgetAnchor.TOP_LEFT)
.setX(3 + teamMax + 4 + scoreMax + 2)
.setY(2)
.setWidth(livesHeaderWidth)
.setHeight(LINE_HEIGHT);
lines.add(livesHeader);
// and finally, lives.
lineCounter = 1;
for (Team t : zone.getTeams()) {
line = new GenericLabel(t.getRemainingLifes() + "/" + t.getTeamConfig().resolveInt(TeamConfig.LIFEPOOL));
if (t.getPlayers().size() == 0) line.setTextColor(new Color(100, 100, 100));
line.setAnchor(WidgetAnchor.TOP_LEFT)
.setX(3 + teamMax + 4 + scoreMax + 4)
.setY(4 + lineCounter * LINE_HEIGHT_WITH_MARGIN)
.setWidth(GenericLabel.getStringWidth(line.getText()))
.setHeight(LINE_HEIGHT);
lifelines.add(line);
lineCounter++;
}
for (GenericLabel l : lifelines) {
if (GenericLabel.getStringWidth(l.getText()) > lifeMax) {
lifeMax = GenericLabel.getStringWidth(l.getText());
}
}
if (livesHeaderWidth > lifeMax) {
lifeMax = livesHeaderWidth;
}
for (GenericLabel l : teamlines) { lines.add(l); }
for (GenericLabel l : playerlines) { lines.add(l); }
for (GenericLabel l : scorelines) { lines.add(l); }
for (GenericLabel l : lifelines) { lines.add(l); }
offset.add(3 + teamMax + 1 + scoreMax + 1 + lifeMax + 5);
offset.add(4 + lineCounter * LINE_HEIGHT_WITH_MARGIN);
}
return lines;
}
private static void drawStats(SpoutPlayer sp, List<GenericLabel> lines) {
for (GenericLabel l : lines) {
sp.getMainScreen().attachWidget(War.war, l.copy());
}
}
private void drawMessages(String playerName, List<GenericLabel> statsLines, List<Integer> statsOffset) {
Player bukkitPlayer = War.war.getServer().getPlayer(playerName);
if (bukkitPlayer != null) {
SpoutPlayer player = SpoutManager.getPlayer(bukkitPlayer);
List<PlayerMessage> messages = playerMessages.get(playerName);
// remove old widgets
clear(player);
// add bg
GenericGradient gradient = new GenericGradient();
gradient.setAnchor(WidgetAnchor.TOP_LEFT);
gradient.setTopColor(new Color(0.0F, 0.0F, 0.0F, 0.4F)); // (order is Red, Green, Blue, Alpha)
gradient.setBottomColor(new Color(0.0F, 0.0F, 0.0F, 0.0F));
gradient.setHeight(statsOffset.get(1) + 4).setWidth((int)(statsOffset.get(0)));
player.getMainScreen().attachWidget(War.war, gradient);
// border in color of team
GenericGradient teamGradient = new GenericGradient();
teamGradient.setAnchor(WidgetAnchor.TOP_LEFT);
Team team = Team.getTeamByPlayerName(playerName);
Color spoutColor = new Color(250.0F, 250.0F, 250.0F, 1.0F);
if (team != null) {
spoutColor = team.getKind().getSpoutColor();
}
spoutColor.setAlpha(0.5F);
teamGradient.setY(2 + LINE_HEIGHT_WITH_MARGIN);
teamGradient.setTopColor(spoutColor);
teamGradient.setBottomColor(new Color(256, 256, 256, 1.0F));
teamGradient.setHeight(2).setWidth((int)(statsOffset.get(0)));
player.getMainScreen().attachWidget(War.war, teamGradient);
// update stats panel
drawStats(player, statsLines);
// finally messages
if (messages != null && messages.size() > 0) {
Warzone zone = Warzone.getZoneByPlayerName(playerName);
int verticalOffset = statsOffset.get(1) + 4;
for (PlayerMessage message : messages) {
int horizontalOffset = 2;
String messageStr = ChatColor.GRAY + ">" + ChatColor.WHITE + " " + message.getMessage();
String[] words = messageStr.split(" ");
for (String word : words) {
if (horizontalOffset > 160) {
horizontalOffset = 2;
verticalOffset += LINE_HEIGHT_WITH_MARGIN;
}
word = addMissingColor(word, zone);
GenericLabel label = new GenericLabel(word);
int width = GenericLabel.getStringWidth(word);
label.setAnchor(WidgetAnchor.TOP_LEFT);
label.setWidth(width);
label.setHeight(LINE_HEIGHT);
label.setX(horizontalOffset);
label.setY(verticalOffset);
player.getMainScreen().attachWidget(War.war, label);
horizontalOffset += width + 2;
}
verticalOffset += LINE_HEIGHT_WITH_MARGIN + 1;
}
}
}
}
public static String addMissingColor(String word, Warzone zone) {
if (zone != null) {
for (Team team : zone.getTeams()) {
for (Player player : team.getPlayers()) {
if (word.startsWith(player.getName())) {
return team.getKind().getColor() + word + ChatColor.WHITE;
}
}
}
}
for (TeamKind kind : TeamKind.values()) {
if (word.startsWith(kind.toString())) {
return kind.getColor() + word + ChatColor.WHITE;
}
}
if (word.equals("War>")) {
return ChatColor.GRAY + word + ChatColor.WHITE;
}
// white by default
return word;
}
// private Color getWordColor(String word, Warzone zone) {
// if (zone != null) {
// for (Team team : zone.getTeams()) {
// for (Player player : team.getPlayers()) {
// if (word.startsWith(player.getName())) {
// return team.getKind().getSpoutColor();
// }
// }
// }
// }
//
// for (TeamKind kind : TeamKind.values()) {
// if (word.startsWith(kind.toString())) {
// return kind.getSpoutColor();
// }
// }
//
// if (word.equals("War>")) {
// return new Color(200,200,200);
// }
//
// // white by default
// return new Color(255,255,255);
// }
}
| |
// Generated from BigDataScript.g4 by ANTLR 4.2.2
package org.bigDataScript.antlr;
import org.antlr.v4.runtime.ParserRuleContext;
import org.antlr.v4.runtime.misc.NotNull;
import org.antlr.v4.runtime.tree.ErrorNode;
import org.antlr.v4.runtime.tree.TerminalNode;
/**
* This class provides an empty implementation of {@link BigDataScriptListener},
* which can be extended to create a listener which only needs to handle a subset
* of the available methods.
*/
public class BigDataScriptBaseListener implements BigDataScriptListener {
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterCheckpoint(@NotNull BigDataScriptParser.CheckpointContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitCheckpoint(@NotNull BigDataScriptParser.CheckpointContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionLogicAnd(@NotNull BigDataScriptParser.ExpressionLogicAndContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionLogicAnd(@NotNull BigDataScriptParser.ExpressionLogicAndContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterTypePrimitiveString(@NotNull BigDataScriptParser.TypePrimitiveStringContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitTypePrimitiveString(@NotNull BigDataScriptParser.TypePrimitiveStringContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterEol(@NotNull BigDataScriptParser.EolContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitEol(@NotNull BigDataScriptParser.EolContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterStatementInclude(@NotNull BigDataScriptParser.StatementIncludeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitStatementInclude(@NotNull BigDataScriptParser.StatementIncludeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionAssignmentList(@NotNull BigDataScriptParser.ExpressionAssignmentListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionAssignmentList(@NotNull BigDataScriptParser.ExpressionAssignmentListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionEq(@NotNull BigDataScriptParser.ExpressionEqContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionEq(@NotNull BigDataScriptParser.ExpressionEqContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionMinus(@NotNull BigDataScriptParser.ExpressionMinusContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionMinus(@NotNull BigDataScriptParser.ExpressionMinusContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionDepOperator(@NotNull BigDataScriptParser.ExpressionDepOperatorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionDepOperator(@NotNull BigDataScriptParser.ExpressionDepOperatorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterWhile(@NotNull BigDataScriptParser.WhileContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitWhile(@NotNull BigDataScriptParser.WhileContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterForEnd(@NotNull BigDataScriptParser.ForEndContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitForEnd(@NotNull BigDataScriptParser.ForEndContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionNe(@NotNull BigDataScriptParser.ExpressionNeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionNe(@NotNull BigDataScriptParser.ExpressionNeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionBitXor(@NotNull BigDataScriptParser.ExpressionBitXorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionBitXor(@NotNull BigDataScriptParser.ExpressionBitXorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionBitNegation(@NotNull BigDataScriptParser.ExpressionBitNegationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionBitNegation(@NotNull BigDataScriptParser.ExpressionBitNegationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterForCondition(@NotNull BigDataScriptParser.ForConditionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitForCondition(@NotNull BigDataScriptParser.ForConditionContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterProgramUnit(@NotNull BigDataScriptParser.ProgramUnitContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitProgramUnit(@NotNull BigDataScriptParser.ProgramUnitContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionBitAnd(@NotNull BigDataScriptParser.ExpressionBitAndContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionBitAnd(@NotNull BigDataScriptParser.ExpressionBitAndContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterPost(@NotNull BigDataScriptParser.PostContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitPost(@NotNull BigDataScriptParser.PostContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterContinue(@NotNull BigDataScriptParser.ContinueContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitContinue(@NotNull BigDataScriptParser.ContinueContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterWarning(@NotNull BigDataScriptParser.WarningContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitWarning(@NotNull BigDataScriptParser.WarningContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterBlock(@NotNull BigDataScriptParser.BlockContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitBlock(@NotNull BigDataScriptParser.BlockContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterReferenceMap(@NotNull BigDataScriptParser.ReferenceMapContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitReferenceMap(@NotNull BigDataScriptParser.ReferenceMapContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionLogicNot(@NotNull BigDataScriptParser.ExpressionLogicNotContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionLogicNot(@NotNull BigDataScriptParser.ExpressionLogicNotContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterForLoop(@NotNull BigDataScriptParser.ForLoopContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitForLoop(@NotNull BigDataScriptParser.ForLoopContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterForLoopList(@NotNull BigDataScriptParser.ForLoopListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitForLoopList(@NotNull BigDataScriptParser.ForLoopListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterIf(@NotNull BigDataScriptParser.IfContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitIf(@NotNull BigDataScriptParser.IfContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionVariableInitImplicit(@NotNull BigDataScriptParser.ExpressionVariableInitImplicitContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionVariableInitImplicit(@NotNull BigDataScriptParser.ExpressionVariableInitImplicitContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionAssignmentMult(@NotNull BigDataScriptParser.ExpressionAssignmentMultContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionAssignmentMult(@NotNull BigDataScriptParser.ExpressionAssignmentMultContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionDep(@NotNull BigDataScriptParser.ExpressionDepContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionDep(@NotNull BigDataScriptParser.ExpressionDepContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionLt(@NotNull BigDataScriptParser.ExpressionLtContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionLt(@NotNull BigDataScriptParser.ExpressionLtContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionAssignmentDiv(@NotNull BigDataScriptParser.ExpressionAssignmentDivContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionAssignmentDiv(@NotNull BigDataScriptParser.ExpressionAssignmentDivContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterPre(@NotNull BigDataScriptParser.PreContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitPre(@NotNull BigDataScriptParser.PreContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionUnaryPlus(@NotNull BigDataScriptParser.ExpressionUnaryPlusContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionUnaryPlus(@NotNull BigDataScriptParser.ExpressionUnaryPlusContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterTypePrimitiveVoid(@NotNull BigDataScriptParser.TypePrimitiveVoidContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitTypePrimitiveVoid(@NotNull BigDataScriptParser.TypePrimitiveVoidContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterIncludeFile(@NotNull BigDataScriptParser.IncludeFileContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitIncludeFile(@NotNull BigDataScriptParser.IncludeFileContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionLogicOr(@NotNull BigDataScriptParser.ExpressionLogicOrContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionLogicOr(@NotNull BigDataScriptParser.ExpressionLogicOrContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionParallel(@NotNull BigDataScriptParser.ExpressionParallelContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionParallel(@NotNull BigDataScriptParser.ExpressionParallelContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterTypePrimitiveReal(@NotNull BigDataScriptParser.TypePrimitiveRealContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitTypePrimitiveReal(@NotNull BigDataScriptParser.TypePrimitiveRealContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterLiteralBool(@NotNull BigDataScriptParser.LiteralBoolContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitLiteralBool(@NotNull BigDataScriptParser.LiteralBoolContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionGoal(@NotNull BigDataScriptParser.ExpressionGoalContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionGoal(@NotNull BigDataScriptParser.ExpressionGoalContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterVarDeclaration(@NotNull BigDataScriptParser.VarDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitVarDeclaration(@NotNull BigDataScriptParser.VarDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionTimes(@NotNull BigDataScriptParser.ExpressionTimesContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionTimes(@NotNull BigDataScriptParser.ExpressionTimesContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExit(@NotNull BigDataScriptParser.ExitContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExit(@NotNull BigDataScriptParser.ExitContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionList(@NotNull BigDataScriptParser.ExpressionListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionList(@NotNull BigDataScriptParser.ExpressionListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionPlus(@NotNull BigDataScriptParser.ExpressionPlusContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionPlus(@NotNull BigDataScriptParser.ExpressionPlusContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFunctionCall(@NotNull BigDataScriptParser.FunctionCallContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFunctionCall(@NotNull BigDataScriptParser.FunctionCallContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionParen(@NotNull BigDataScriptParser.ExpressionParenContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionParen(@NotNull BigDataScriptParser.ExpressionParenContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionCond(@NotNull BigDataScriptParser.ExpressionCondContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionCond(@NotNull BigDataScriptParser.ExpressionCondContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionUnaryMinus(@NotNull BigDataScriptParser.ExpressionUnaryMinusContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionUnaryMinus(@NotNull BigDataScriptParser.ExpressionUnaryMinusContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionBitOr(@NotNull BigDataScriptParser.ExpressionBitOrContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionBitOr(@NotNull BigDataScriptParser.ExpressionBitOrContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterStatementExpr(@NotNull BigDataScriptParser.StatementExprContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitStatementExpr(@NotNull BigDataScriptParser.StatementExprContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterLiteralInt(@NotNull BigDataScriptParser.LiteralIntContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitLiteralInt(@NotNull BigDataScriptParser.LiteralIntContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterLiteralMapEmpty(@NotNull BigDataScriptParser.LiteralMapEmptyContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitLiteralMapEmpty(@NotNull BigDataScriptParser.LiteralMapEmptyContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterMethodCall(@NotNull BigDataScriptParser.MethodCallContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitMethodCall(@NotNull BigDataScriptParser.MethodCallContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterStatementVarDeclaration(@NotNull BigDataScriptParser.StatementVarDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitStatementVarDeclaration(@NotNull BigDataScriptParser.StatementVarDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterWait(@NotNull BigDataScriptParser.WaitContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitWait(@NotNull BigDataScriptParser.WaitContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterLiteralString(@NotNull BigDataScriptParser.LiteralStringContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitLiteralString(@NotNull BigDataScriptParser.LiteralStringContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionGt(@NotNull BigDataScriptParser.ExpressionGtContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionGt(@NotNull BigDataScriptParser.ExpressionGtContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionModulo(@NotNull BigDataScriptParser.ExpressionModuloContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionModulo(@NotNull BigDataScriptParser.ExpressionModuloContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterTypePrimitiveBool(@NotNull BigDataScriptParser.TypePrimitiveBoolContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitTypePrimitiveBool(@NotNull BigDataScriptParser.TypePrimitiveBoolContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterTypePrimitiveInt(@NotNull BigDataScriptParser.TypePrimitiveIntContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitTypePrimitiveInt(@NotNull BigDataScriptParser.TypePrimitiveIntContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterError(@NotNull BigDataScriptParser.ErrorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitError(@NotNull BigDataScriptParser.ErrorContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionAssignmentBitAnd(@NotNull BigDataScriptParser.ExpressionAssignmentBitAndContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionAssignmentBitAnd(@NotNull BigDataScriptParser.ExpressionAssignmentBitAndContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionLe(@NotNull BigDataScriptParser.ExpressionLeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionLe(@NotNull BigDataScriptParser.ExpressionLeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterLiteralMap(@NotNull BigDataScriptParser.LiteralMapContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitLiteralMap(@NotNull BigDataScriptParser.LiteralMapContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterPrintln(@NotNull BigDataScriptParser.PrintlnContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitPrintln(@NotNull BigDataScriptParser.PrintlnContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionAssignmentBitOr(@NotNull BigDataScriptParser.ExpressionAssignmentBitOrContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionAssignmentBitOr(@NotNull BigDataScriptParser.ExpressionAssignmentBitOrContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterTypeList(@NotNull BigDataScriptParser.TypeListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitTypeList(@NotNull BigDataScriptParser.TypeListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionTask(@NotNull BigDataScriptParser.ExpressionTaskContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionTask(@NotNull BigDataScriptParser.ExpressionTaskContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterReferenceVar(@NotNull BigDataScriptParser.ReferenceVarContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitReferenceVar(@NotNull BigDataScriptParser.ReferenceVarContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionSys(@NotNull BigDataScriptParser.ExpressionSysContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionSys(@NotNull BigDataScriptParser.ExpressionSysContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionAssignmentMinus(@NotNull BigDataScriptParser.ExpressionAssignmentMinusContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionAssignmentMinus(@NotNull BigDataScriptParser.ExpressionAssignmentMinusContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterReferenceList(@NotNull BigDataScriptParser.ReferenceListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitReferenceList(@NotNull BigDataScriptParser.ReferenceListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterLiteralListEmpty(@NotNull BigDataScriptParser.LiteralListEmptyContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitLiteralListEmpty(@NotNull BigDataScriptParser.LiteralListEmptyContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterDebug(@NotNull BigDataScriptParser.DebugContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitDebug(@NotNull BigDataScriptParser.DebugContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterTypeMap(@NotNull BigDataScriptParser.TypeMapContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitTypeMap(@NotNull BigDataScriptParser.TypeMapContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterBreak(@NotNull BigDataScriptParser.BreakContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitBreak(@NotNull BigDataScriptParser.BreakContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterKill(@NotNull BigDataScriptParser.KillContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitKill(@NotNull BigDataScriptParser.KillContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterVariableInit(@NotNull BigDataScriptParser.VariableInitContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitVariableInit(@NotNull BigDataScriptParser.VariableInitContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterStatmentEol(@NotNull BigDataScriptParser.StatmentEolContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitStatmentEol(@NotNull BigDataScriptParser.StatmentEolContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterVariableInitImplicit(@NotNull BigDataScriptParser.VariableInitImplicitContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitVariableInitImplicit(@NotNull BigDataScriptParser.VariableInitImplicitContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterBreakpoint(@NotNull BigDataScriptParser.BreakpointContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitBreakpoint(@NotNull BigDataScriptParser.BreakpointContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionTaskLiteral(@NotNull BigDataScriptParser.ExpressionTaskLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionTaskLiteral(@NotNull BigDataScriptParser.ExpressionTaskLiteralContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionDivide(@NotNull BigDataScriptParser.ExpressionDivideContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionDivide(@NotNull BigDataScriptParser.ExpressionDivideContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterHelp(@NotNull BigDataScriptParser.HelpContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitHelp(@NotNull BigDataScriptParser.HelpContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionAssignment(@NotNull BigDataScriptParser.ExpressionAssignmentContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionAssignment(@NotNull BigDataScriptParser.ExpressionAssignmentContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterTypeArray(@NotNull BigDataScriptParser.TypeArrayContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitTypeArray(@NotNull BigDataScriptParser.TypeArrayContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterPrint(@NotNull BigDataScriptParser.PrintContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitPrint(@NotNull BigDataScriptParser.PrintContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterLiteralReal(@NotNull BigDataScriptParser.LiteralRealContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitLiteralReal(@NotNull BigDataScriptParser.LiteralRealContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionAssignmentPlus(@NotNull BigDataScriptParser.ExpressionAssignmentPlusContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionAssignmentPlus(@NotNull BigDataScriptParser.ExpressionAssignmentPlusContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterForInit(@NotNull BigDataScriptParser.ForInitContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitForInit(@NotNull BigDataScriptParser.ForInitContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterExpressionGe(@NotNull BigDataScriptParser.ExpressionGeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitExpressionGe(@NotNull BigDataScriptParser.ExpressionGeContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterLiteralList(@NotNull BigDataScriptParser.LiteralListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitLiteralList(@NotNull BigDataScriptParser.LiteralListContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterFunctionDeclaration(@NotNull BigDataScriptParser.FunctionDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitFunctionDeclaration(@NotNull BigDataScriptParser.FunctionDeclarationContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterReturn(@NotNull BigDataScriptParser.ReturnContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitReturn(@NotNull BigDataScriptParser.ReturnContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void enterEveryRule(@NotNull ParserRuleContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void exitEveryRule(@NotNull ParserRuleContext ctx) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void visitTerminal(@NotNull TerminalNode node) { }
/**
* {@inheritDoc}
*
* <p>The default implementation does nothing.</p>
*/
@Override public void visitErrorNode(@NotNull ErrorNode node) { }
}
| |
/**
*
*/
package com.zimbra.qa.selenium.projects.ajax.ui.preferences;
import com.zimbra.qa.selenium.framework.ui.AbsApplication;
import com.zimbra.qa.selenium.framework.ui.AbsDialog;
import com.zimbra.qa.selenium.framework.ui.AbsPage;
import com.zimbra.qa.selenium.framework.ui.AbsTab;
import com.zimbra.qa.selenium.framework.ui.Button;
import com.zimbra.qa.selenium.framework.util.HarnessException;
/**
* Represents a "Rename Folder" dialog box
* <p>
* @author Matt Rhoades
*
*/
public class DialogEditFilter extends AbsDialog {
public static class Locators {
public static final String MainDivID = "ZmFilterRuleDialog";
}
// It is difficult to determine if the first criteria is already
// filled out. If not, then user needs to click "+" to add a
// new one.
//
// Use this boolean to keep track.
protected boolean IsFirstCriteria = true;
protected boolean isFirstAction = true;
public DialogEditFilter(AbsApplication application, AbsTab tab) {
super(application, tab);
}
@Override
public AbsPage zClickButton(Button button) throws HarnessException {
logger.info(myPageName() + " zClickButton("+ button +")");
String locator = null;
AbsPage page = null;
if ( button == Button.B_OK ) {
locator = "css=div[id='ZmFilterRuleDialog_buttons'] td[id^='OK_'] td[id$='_title']";
} else if ( button == Button.B_CANCEL ) {
locator = "css=div[id='ZmFilterRuleDialog_buttons'] td[id^='Cancel_'] td[id$='_title']";
} else {
throw new HarnessException("Button "+ button +" not implemented");
}
// Default behavior, click the locator
//
// Make sure the locator was set
if ( locator == null ) {
throw new HarnessException("Button "+ button +" not implemented");
}
zClick(locator);
zWaitForBusyOverlay();
if ( page != null ) {
page.zWaitForActive();
}
return (page);
}
@Override
public String zGetDisplayedText(String locator) throws HarnessException {
throw new HarnessException("implement me");
}
/* (non-Javadoc)
* @see framework.ui.AbsDialog#myPageName()
*/
@Override
public String myPageName() {
return (this.getClass().getName());
}
@Override
public boolean zIsActive() throws HarnessException {
logger.info(myPageName() + " zIsActive()");
String locator = "css=div[id='"+ Locators.MainDivID +"']";
boolean present = this.sIsElementPresent(locator);
if ( !present ) {
logger.info("Locator was not present: " + locator);
return (false);
}
boolean visible = this.zIsVisiblePerPosition(locator, 0, 0);
if ( !visible ) {
logger.info("Locator was not visible: " + locator);
return (false);
}
return (true);
}
public enum Condition {
Any,
All,
}
public enum ConditionType {
From,
To,
Cc,
ToOrCc,
Subject,
HeaderNamed,
Size,
Date,
Body,
Attachment,
ReadReceipt,
AddressIn,
Calendar,
Social,
Message,
Address,
}
public enum ConditionConstraint {
MatchesExactly,
DoesNotMatchExcactly,
Contains,
DoesNotContain,
MatchesWildcard,
DoesNotMatchWildCard,
}
public void zSetConditionAnyOrAll(Condition type) throws HarnessException {
// Click the pulldown to activate the menu
String locator = "css=div[id='ZmFilterRuleDialog_condition'] td[id$='_select_container'] td[id$='_dropdown'] div[class='ImgSelectPullDownArrow']";
this.zClick(locator);
this.zWaitForBusyOverlay();
throw new HarnessException("see https://bugzilla.zimbra.com/show_bug.cgi?id=63823");
}
public void zSetFilterName(String name) throws HarnessException {
String locator = "css=input[id='ZmFilterRuleDialog_name']";
if ( !this.sIsElementPresent(locator) )
throw new HarnessException("Unable to locate filter name input box");
this.sType(locator, name);
this.zWaitForBusyOverlay();
}
public void zAddFilterCriteria(ConditionType type, ConditionConstraint constraint, String value) throws HarnessException {
String rowLocator = "css=table[id='ZmFilterRuleDialog_conditions']>tbody>tr";
String locator;
if ( !this.IsFirstCriteria ) {
int i = this.sGetCssCount(rowLocator);
if ( i < 1)
throw new HarnessException("couldn't find any filter condition rows!");
// Click the "+" to add a new row
locator = rowLocator + ":nth-child("+i+") div[class='ImgPlus']";
this.zClick(locator);
this.zWaitForBusyOverlay();
this.IsFirstCriteria = false;
}
int count = this.sGetCssCount(rowLocator);
if ( count < 1)
throw new HarnessException("couldn't find any filter condition rows!");
rowLocator = "css=table[id='ZmFilterRuleDialog_conditions']>tbody>tr:nth-child(" + count + ")"; // Use the last row
if ( !type.equals(ConditionType.Subject) ) {
// TODO!
throw new HarnessException("implement me!");
}
if ( !constraint.equals(ConditionConstraint.Contains) ) {
// TODO!
throw new HarnessException("implement me!");
}
locator = rowLocator + " div[id^='FilterRuleDialog_INPUTFIELD_'] input[id^='FilterRuleDialog_INPUT_']";
this.sType(locator, value);
this.zWaitForBusyOverlay();
}
public void zAddFilterAction() throws HarnessException {
throw new HarnessException("implement me!");
}
}
| |
/**
* (c) Copyright 2013 WibiData, Inc.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kiji.schema.filter;
import static org.junit.Assert.assertEquals;
import static org.kiji.schema.avro.ComponentType.INTEGER;
import static org.kiji.schema.avro.ComponentType.LONG;
import static org.kiji.schema.avro.ComponentType.STRING;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.lang.reflect.Field;
import java.util.List;
import java.util.Random;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.filter.WritableByteArrayComparable;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.kiji.schema.EntityId;
import org.kiji.schema.EntityIdFactory;
import org.kiji.schema.avro.ComponentType;
import org.kiji.schema.avro.HashSpec;
import org.kiji.schema.avro.HashType;
import org.kiji.schema.avro.RowKeyComponent;
import org.kiji.schema.avro.RowKeyEncoding;
import org.kiji.schema.avro.RowKeyFormat2;
/** Tests the FormattedEntityIdRowFilter. */
public class TestFormattedEntityIdRowFilter {
// filter is a backwards operation, so false means the row will be included
private static final boolean INCLUDE = false;
private static final boolean EXCLUDE = true;
private static final Random RANDOM = new Random(1001L);
private static final RowKeyFormat2[] TEST_CASES = new RowKeyFormat2[] {
createRowKeyFormat(1, INTEGER),
createRowKeyFormat(5, INTEGER),
createRowKeyFormat(1, LONG),
createRowKeyFormat(5, LONG),
createRowKeyFormat(1, STRING),
createRowKeyFormat(5, STRING),
createRowKeyFormat(1, STRING, STRING),
createRowKeyFormat(5, STRING, STRING),
createRowKeyFormat(1, INTEGER, INTEGER),
createRowKeyFormat(5, INTEGER, INTEGER),
createRowKeyFormat(1, LONG, LONG),
createRowKeyFormat(5, LONG, LONG),
createRowKeyFormat(1, INTEGER, LONG, STRING),
createRowKeyFormat(5, INTEGER, LONG, STRING),
createRowKeyFormat(1, STRING, INTEGER, LONG, STRING),
createRowKeyFormat(5, STRING, INTEGER, LONG, STRING),
};
private static RowKeyFormat2 createRowKeyFormat(int hashLength, ComponentType... componentTypes) {
RowKeyFormat2.Builder builder = RowKeyFormat2.newBuilder()
.setEncoding(RowKeyEncoding.FORMATTED);
if (hashLength > 0) {
builder.setSalt(new HashSpec(HashType.MD5, hashLength, false));
}
List<RowKeyComponent> components = Lists.newArrayList();
char field = 'a';
for (ComponentType componentType : componentTypes) {
components.add(new RowKeyComponent(String.valueOf(field), componentType));
field = (char) (field + 1);
}
builder.setComponents(components);
return builder.build();
}
private static FormattedEntityIdRowFilter createFilter(RowKeyFormat2 format, Object... components)
throws Exception {
return new FormattedEntityIdRowFilter(format, components);
}
private static Object createStableValue(ComponentType type) {
switch (type) {
case INTEGER:
return 42;
case LONG:
return 349203L;
case STRING:
return "value";
default:
throw new IllegalArgumentException("Unknown ComponentType: " + type);
}
}
private static Object createRandomValue(ComponentType type) {
switch (type) {
case INTEGER:
return RANDOM.nextInt();
case LONG:
return RANDOM.nextLong();
case STRING:
byte[] bytes = new byte[16];
RANDOM.nextBytes(bytes);
return Bytes.toStringBinary(bytes);
default:
throw new IllegalArgumentException("Unknown ComponentType: " + type);
}
}
private static Object createMinValue(ComponentType type) {
switch (type) {
case INTEGER:
return Integer.MIN_VALUE;
case LONG:
return Long.MIN_VALUE;
case STRING:
return "";
default:
throw new IllegalArgumentException("Unknown ComponentType: " + type);
}
}
private static class FilterAndTestValues {
private List<Object> mFilterValues = Lists.newArrayList();
private List<List<Object>> mIncludedTestValues = Lists.newArrayList();
private List<List<Object>> mExcludedTestValues = Lists.newArrayList();
}
private static List<FilterAndTestValues> createFilterAndTestValues(
List<RowKeyComponent> components) {
List<FilterAndTestValues> filterAndTestValues = Lists.newArrayList();
List<List<Object>> filterCombinations = createFilterCombinations(components);
// skip over the last all-null combination, which does not make much sense
// for a filter
for (List<Object> filterValues : filterCombinations.subList(0, filterCombinations.size() - 1)) {
FilterAndTestValues fatv = new FilterAndTestValues();
fatv.mFilterValues = filterValues;
fatv.mIncludedTestValues.add(correctEntityComponents(components, filterValues));
List<List<Object>> excludedCombinations =
createExcludedCombinations(components, filterValues);
for (List<Object> excludedCombination : excludedCombinations) {
fatv.mExcludedTestValues.add(excludedCombination);
}
filterAndTestValues.add(fatv);
}
return filterAndTestValues;
}
// corrects values so that the EntityId constructed from this set can be
// constructed. returns a new list with the corrected values
private static List<Object> correctEntityComponents(
List<RowKeyComponent> components, List<Object> values) {
List<Object> correctedValues = Lists.newArrayList(values);
for (int i = 0; i < correctedValues.size(); i++) {
if (null == correctedValues.get(i)) {
correctedValues.set(i, createRandomValue(components.get(i).getType()));
}
}
return correctedValues;
}
private static List<List<Object>> createFilterCombinations(List<RowKeyComponent> components) {
List<List<Object>> combinations = Lists.newArrayList();
ComponentType type = components.get(0).getType();
if (components.size() == 1) {
combinations.add(Lists.newArrayList(createStableValue(type)));
combinations.add(Lists.newArrayList((Object)null));
} else {
List<List<Object>> subCombinations =
createFilterCombinations(components.subList(1, components.size()));
for (List<Object> subCombination : subCombinations) {
List<Object> newCombination = Lists.newArrayList(createStableValue(type));
newCombination.addAll(subCombination);
combinations.add(newCombination);
newCombination = Lists.newArrayList((Object)null);
newCombination.addAll(subCombination);
combinations.add(newCombination);
}
}
return combinations;
}
private static List<List<Object>> createExcludedCombinations(
List<RowKeyComponent> components, List<Object> filterValues) {
List<List<Object>> combinations = Lists.newArrayList();
ComponentType type = components.get(0).getType();
if (filterValues.size() == 1) {
combinations.add(Lists.newArrayList(createRandomValue(type)));
combinations.add(Lists.newArrayList(createMinValue(type)));
} else {
List<List<Object>> subCombinations = createExcludedCombinations(
components.subList(1, components.size()), filterValues.subList(1, filterValues.size()));
for (List<Object> subCombination : subCombinations) {
List<Object> newCombination = Lists.newArrayList(createRandomValue(type));
newCombination.addAll(subCombination);
combinations.add(newCombination);
newCombination = Lists.newArrayList(createMinValue(type));
newCombination.addAll(subCombination);
combinations.add(newCombination);
}
}
return combinations;
}
@Test
public void testAllCases() throws Exception {
for (RowKeyFormat2 rowKeyFormat : TEST_CASES) {
EntityIdFactory factory = EntityIdFactory.getFactory(rowKeyFormat);
List<FilterAndTestValues> filterAndTestValues =
createFilterAndTestValues(rowKeyFormat.getComponents());
for (FilterAndTestValues filterAndTest : filterAndTestValues) {
FormattedEntityIdRowFilter filter =
createFilter(rowKeyFormat, filterAndTest.mFilterValues.toArray());
for (List<Object> includedValues : filterAndTest.mIncludedTestValues) {
runTest(rowKeyFormat, filter, factory, INCLUDE, includedValues.toArray());
}
for (List<Object> excludedValues : filterAndTest.mExcludedTestValues) {
runTest(rowKeyFormat, filter, factory, EXCLUDE, excludedValues.toArray());
}
}
}
}
private final RowKeyFormat2 mRowKeyFormat = createRowKeyFormat(1, INTEGER, LONG, STRING);
private final EntityIdFactory mFactory = EntityIdFactory.getFactory(mRowKeyFormat);
@Test
public void testFormattedEntityIdRowFilter() throws Exception {
FormattedEntityIdRowFilter filter = createFilter(mRowKeyFormat, 100, null, "value");
runTest(mRowKeyFormat, filter, mFactory, INCLUDE, 100, 2000L, "value");
runTest(mRowKeyFormat, filter, mFactory, EXCLUDE, 100, null, null);
runTest(mRowKeyFormat, filter, mFactory, EXCLUDE, 0, null, null);
}
@Test
public void testPrefixMatching() throws Exception {
FormattedEntityIdRowFilter filter = createFilter(mRowKeyFormat, 42, null, null);
runTest(mRowKeyFormat, filter, mFactory, INCLUDE, 42, 4200L, "name");
runTest(mRowKeyFormat, filter, mFactory, INCLUDE, 42, 4200L, null);
runTest(mRowKeyFormat, filter, mFactory, INCLUDE, 42, null, null);
runTest(mRowKeyFormat, filter, mFactory, EXCLUDE, 43, 4200L, "name");
}
@Test
public void testMidComponentMatching() throws Exception {
FormattedEntityIdRowFilter filter = createFilter(mRowKeyFormat, null, 6000L, null);
runTest(mRowKeyFormat, filter, mFactory, INCLUDE, 50, 6000L, "anything");
runTest(mRowKeyFormat, filter, mFactory, INCLUDE, 50, 6000L, null);
runTest(mRowKeyFormat, filter, mFactory, EXCLUDE, 50, 5999L, "anything");
}
@Test
public void testSuffixComponentMatching() throws Exception {
FormattedEntityIdRowFilter filter = createFilter(mRowKeyFormat, null, null, "value");
runTest(mRowKeyFormat, filter, mFactory, INCLUDE, 50, 6000L, "value");
runTest(mRowKeyFormat, filter, mFactory, EXCLUDE, 50, 6000L, null);
runTest(mRowKeyFormat, filter, mFactory, EXCLUDE, 50, 5999L, "anything");
}
@Test
public void testPrefixNumberMatching() throws Exception {
RowKeyFormat2 rowKeyFormat = createRowKeyFormat(1, LONG, LONG);
EntityIdFactory factory = EntityIdFactory.getFactory(rowKeyFormat);
FormattedEntityIdRowFilter filter = createFilter(rowKeyFormat, 4224L, null);
runTest(rowKeyFormat, filter, factory, INCLUDE, 4224L, 5005L);
runTest(rowKeyFormat, filter, factory, INCLUDE, 4224L, null);
runTest(rowKeyFormat, filter, factory, INCLUDE, 4224L, Long.MAX_VALUE);
runTest(rowKeyFormat, filter, factory, INCLUDE, 4224L, Long.MIN_VALUE);
runTest(rowKeyFormat, filter, factory, EXCLUDE, Long.MIN_VALUE, 5005L);
runTest(rowKeyFormat, filter, factory, EXCLUDE, Long.MIN_VALUE, null);
runTest(rowKeyFormat, filter, factory, EXCLUDE, Long.MIN_VALUE, Long.MAX_VALUE);
runTest(rowKeyFormat, filter, factory, EXCLUDE, Long.MIN_VALUE, Long.MIN_VALUE);
}
@Test
public void testUnicodeStringInFilterMatching() throws Exception {
RowKeyFormat2 rowKeyFormat = createRowKeyFormat(1, STRING);
EntityIdFactory factory = EntityIdFactory.getFactory(rowKeyFormat);
String match = "This is a star: \u2605";
String noMatch = "This is not a star";
FormattedEntityIdRowFilter filter = createFilter(rowKeyFormat, match);
runTest(rowKeyFormat, filter, factory, INCLUDE, match);
runTest(rowKeyFormat, filter, factory, EXCLUDE, noMatch);
}
@Test
public void testUnicodeStringInEntityIdMatching() throws Exception {
RowKeyFormat2 rowKeyFormat = createRowKeyFormat(1, STRING);
EntityIdFactory factory = EntityIdFactory.getFactory(rowKeyFormat);
String match = "This is not a star";
String noMatch = "This is a star: \u2605";
FormattedEntityIdRowFilter filter = createFilter(rowKeyFormat, match);
runTest(rowKeyFormat, filter, factory, INCLUDE, match);
runTest(rowKeyFormat, filter, factory, EXCLUDE, noMatch);
}
@Test
public void testPrefixDefinedByFewerThanFormatComponents() throws Exception {
// this is the same as a filter defined with (100, null, null)
FormattedEntityIdRowFilter filter = createFilter(mRowKeyFormat, 100);
runTest(mRowKeyFormat, filter, mFactory, INCLUDE, 100, 2000L, "value");
runTest(mRowKeyFormat, filter, mFactory, INCLUDE, 100, null, null);
runTest(mRowKeyFormat, filter, mFactory, EXCLUDE, 0, 2000L, "value");
runTest(mRowKeyFormat, filter, mFactory, EXCLUDE, 0, null, null);
}
@Test
public void testLatinNewlineCharacterInclusion() throws Exception {
RowKeyFormat2 rowKeyFormat = createRowKeyFormat(1, INTEGER, LONG);
EntityIdFactory factory = EntityIdFactory.getFactory(rowKeyFormat);
// Create and serialize a filter
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
FormattedEntityIdRowFilter filter = createFilter(rowKeyFormat, 10);
filter.toHBaseFilter(null).write(dos);
// Deserialize the filter
ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
DataInputStream dis = new DataInputStream(bais);
Filter deserializedFilter = new FilterList();
deserializedFilter.readFields(dis);
// Filter an entity with the deserialized filter
EntityId entityId = factory.getEntityId(10, 10L);
byte[] hbaseKey = entityId.getHBaseRowKey();
boolean filtered = deserializedFilter.filterRowKey(hbaseKey, 0, hbaseKey.length);
assertEquals(INCLUDE, filtered);
}
@Test
public void testHashIsCalculatedWhenAllHashComponentsAreSpecified() throws Exception {
final int hashLength = 2;
RowKeyFormat2.Builder builder = RowKeyFormat2.newBuilder()
.setEncoding(RowKeyEncoding.FORMATTED)
.setSalt(new HashSpec(HashType.MD5, hashLength, false))
.setRangeScanStartIndex(1);
List<RowKeyComponent> components = ImmutableList.of(
new RowKeyComponent("id", INTEGER), // this one is included in the hash
new RowKeyComponent("ts", LONG)); // this one is not
builder.setComponents(components);
RowKeyFormat2 rowKeyFormat = builder.build();
EntityIdFactory factory = EntityIdFactory.getFactory(rowKeyFormat);
FormattedEntityIdRowFilter filter = createFilter(rowKeyFormat, 100);
Object[] componentValues = new Object[] { Integer.valueOf(100), Long.valueOf(900000L) };
runTest(rowKeyFormat, filter, factory, INCLUDE, componentValues);
EntityId entityId = factory.getEntityId(componentValues);
byte[] hbaseKey = entityId.getHBaseRowKey();
Filter hbaseFilter = filter.toHBaseFilter(null);
// A row key with a different hash but the same first component should be
// excluded by the filter. The hash is 0x9f0f
hbaseKey[0] = (byte) 0x7F;
hbaseKey[1] = (byte) 0xFF;
boolean filtered = hbaseFilter.filterRowKey(hbaseKey, 0, hbaseKey.length);
doInclusionAssert(rowKeyFormat, filter, entityId, hbaseFilter, hbaseKey, EXCLUDE);
}
@Test
public void testHashWildcardIsUsedForMissingHashComponents() throws Exception {
RowKeyFormat2 rowKeyFormat = createRowKeyFormat(1, INTEGER, LONG, STRING);
rowKeyFormat.setRangeScanStartIndex(2);
EntityIdFactory factory = EntityIdFactory.getFactory(rowKeyFormat);
FormattedEntityIdRowFilter filter = createFilter(rowKeyFormat, 100, null, "value");
runTest(rowKeyFormat, filter, factory, INCLUDE, 100, 2000L, "value");
runTest(rowKeyFormat, filter, factory, EXCLUDE, 100, null, null);
runTest(rowKeyFormat, filter, factory, EXCLUDE, 0, null, null);
}
@Test
public void testPrefixFilterHaltsFiltering() throws Exception {
RowKeyFormat2 rowKeyFormat = createRowKeyFormat(1, INTEGER, LONG, LONG);
EntityIdFactory factory = EntityIdFactory.getFactory(rowKeyFormat);
FormattedEntityIdRowFilter filter = createFilter(rowKeyFormat, 100, null, 9000L);
Filter hbaseFilter = filter.toHBaseFilter(null);
EntityId passingEntityId = factory.getEntityId(100, 100L, 9000L);
byte[] passingHbaseKey = passingEntityId.getHBaseRowKey();
doInclusionAssert(rowKeyFormat, filter, passingEntityId, hbaseFilter, passingHbaseKey, INCLUDE);
boolean filterAllRemaining = hbaseFilter.filterAllRemaining();
String message = createFailureMessage(rowKeyFormat, filter, passingEntityId, hbaseFilter,
passingHbaseKey, filterAllRemaining);
assertEquals(message, false, filterAllRemaining);
EntityId failingEntityId = factory.getEntityId(101, 100L, 9000L);
byte[] failingHbaseKey = failingEntityId.getHBaseRowKey();
doInclusionAssert(rowKeyFormat, filter, failingEntityId, hbaseFilter, failingHbaseKey, EXCLUDE);
filterAllRemaining = hbaseFilter.filterAllRemaining();
message = createFailureMessage(rowKeyFormat, filter, failingEntityId, hbaseFilter,
failingHbaseKey, filterAllRemaining);
assertEquals(message, true, filterAllRemaining);
}
private void runTest(RowKeyFormat2 rowKeyFormat, FormattedEntityIdRowFilter filter,
EntityIdFactory factory, boolean expectedFilter, Object... components) throws Exception {
EntityId entityId = factory.getEntityId(components);
byte[] hbaseKey = entityId.getHBaseRowKey();
Filter hbaseFilter = filter.toHBaseFilter(null);
doInclusionAssert(rowKeyFormat, filter, entityId, hbaseFilter, hbaseKey, expectedFilter);
}
private void doInclusionAssert(RowKeyFormat2 rowKeyFormat, FormattedEntityIdRowFilter filter,
EntityId entityId, Filter hbaseFilter, byte[] hbaseKey, boolean expectedFilter)
throws Exception {
boolean filtered = hbaseFilter.filterRowKey(hbaseKey, 0, hbaseKey.length);
String message = createFailureMessage(rowKeyFormat, filter, entityId, hbaseFilter,
hbaseKey, filtered);
assertEquals(message, expectedFilter, filtered);
}
private String createFailureMessage(RowKeyFormat2 rowKeyFormat, FormattedEntityIdRowFilter filter,
EntityId entityId, Filter hbaseFilter, byte[] hbaseKey, boolean filtered)
throws Exception {
return String.format(
"RowKeyFormat: %s%nComponents: %s%nEntityId: %s%nFilter: %s%nHBase key: %s%nIncluded: %s%n",
rowKeyFormat, fetchComponents(filter), entityId.toShellString(),
filterToString(hbaseFilter), toBinaryString(hbaseKey), !filtered);
}
private String toBinaryString(byte[] bytes) {
StringBuilder buf = new StringBuilder();
for (byte b : bytes) {
buf.append(String.format("\\x%02x", b & 0xFF));
}
return buf.toString();
}
private String fetchComponents(FormattedEntityIdRowFilter filter) throws Exception {
Field componentField = filter.getClass().getDeclaredField("mComponents");
componentField.setAccessible(true);
return Lists.newArrayList((Object[])componentField.get(filter)).toString();
}
private String filterToString(Filter filter) throws Exception {
if (filter instanceof FilterList) {
List<Filter> filters = ((FilterList) filter).getFilters();
return String.format("[%s] AND [%s]",
prefixFilterToString((PrefixFilter) filters.get(0)),
rowFilterToString((RowFilter) filters.get(1)));
} else {
return rowFilterToString((RowFilter) filter);
}
}
private String prefixFilterToString(PrefixFilter prefixFilter) throws Exception {
return toBinaryString(prefixFilter.getPrefix());
}
private String rowFilterToString(RowFilter rowFilter) throws Exception {
WritableByteArrayComparable comparator = rowFilter.getComparator();
Field patternField = comparator.getClass().getDeclaredField("pattern");
patternField.setAccessible(true);
return String.format("Regex: %s", patternField.get(comparator));
}
}
| |
/*
* Copyright 2008 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableSet;
import com.google.javascript.jscomp.NodeUtil.Visitor;
import com.google.javascript.rhino.JSDocInfo;
import com.google.javascript.rhino.JSTypeExpression;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
/**
* This pass walks the AST to create a Collection of 'new' nodes and
* 'goog.require' nodes. It reconciles these Collections, creating a
* warning for each discrepancy.
*
* <p>The rules on when a warning is reported are: <ul>
* <li>Type is referenced in code -> goog.require is required
* (missingRequires check fails if it's not there)
* <li>Type is referenced in an @extends or @implements -> goog.require is required
* (missingRequires check fails if it's not there)
* <li>Type is referenced in other JsDoc (@type etc) -> goog.require is optional
* (don't warn, regardless of if it is there)
* <li>Type is not referenced at all -> goog.require is forbidden
* (extraRequires check fails if it is there)
* </ul>
*
*/
class CheckRequiresForConstructors implements HotSwapCompilerPass, NodeTraversal.Callback {
private final AbstractCompiler compiler;
private final CodingConvention codingConvention;
private final Set<String> constructors = new HashSet<>();
private final Map<String, Node> requires = new HashMap<>();
// Adding an entry to usages indicates that the name is used and should be required.
private final Map<String, Node> usages = new HashMap<>();
// Adding an entry to weakUsages indicates that the name is used, but in a way which may not
// require a goog.require, such as in a @type annotation. If the only usages of a name are
// in weakUsages, don't give a missingRequire warning, nor an extraRequire warning.
private final Map<String, Node> weakUsages = new HashMap<>();
// Warnings
static final DiagnosticType MISSING_REQUIRE_WARNING =
DiagnosticType.disabled(
"JSC_MISSING_REQUIRE_WARNING", "''{0}'' used but not goog.require''d");
static final DiagnosticType EXTRA_REQUIRE_WARNING = DiagnosticType.disabled(
"JSC_EXTRA_REQUIRE_WARNING",
"''{0}'' goog.require''d but not used");
static final DiagnosticType DUPLICATE_REQUIRE_WARNING = DiagnosticType.disabled(
"JSC_DUPLICATE_REQUIRE_WARNING",
"''{0}'' goog.require''d more than once.");
private static final Set<String> DEFAULT_EXTRA_NAMESPACES = ImmutableSet.of(
"goog.testing.asserts", "goog.testing.jsunit");
CheckRequiresForConstructors(AbstractCompiler compiler) {
this.compiler = compiler;
this.codingConvention = compiler.getCodingConvention();
}
/**
* Uses Collections of new and goog.require nodes to create a compiler warning
* for each new class name without a corresponding goog.require().
*/
@Override
public void process(Node externs, Node root) {
NodeTraversal.traverseRoots(compiler, this, externs, root);
}
@Override
public void hotSwapScript(Node scriptRoot, Node originalRoot) {
NodeTraversal.traverseEs6(compiler, scriptRoot, this);
}
// Return true if the name is a class name (starts with an uppercase
// character, but is not in all caps).
private static boolean isClassName(String name) {
return (name != null && name.length() > 1
&& Character.isUpperCase(name.charAt(0))
&& !name.equals(name.toUpperCase()));
}
// Return the shortest prefix of the className that refers to a class,
// or null if no part refers to a class.
private static String getOutermostClassName(String className) {
for (String part : Splitter.on('.').split(className)) {
if (isClassName(part)) {
return className.substring(0,
className.indexOf(part) + part.length());
}
}
return null;
}
@Override
public boolean shouldTraverse(NodeTraversal t, Node n, Node parent) {
return parent == null || !parent.isScript() || !t.getInput().isExtern();
}
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
maybeAddJsDocUsages(t, n);
switch (n.getType()) {
case Token.ASSIGN:
case Token.VAR:
case Token.LET:
case Token.CONST:
maybeAddConstructor(n);
break;
case Token.FUNCTION:
// Exclude function expressions.
if (NodeUtil.isStatement(n)) {
maybeAddConstructor(n);
}
break;
case Token.GETPROP:
visitGetProp(n);
break;
case Token.CALL:
visitCallNode(n, parent);
break;
case Token.SCRIPT:
visitScriptNode(t);
break;
case Token.NEW:
visitNewNode(t, n);
break;
case Token.CLASS:
visitClassNode(n);
}
}
private void visitScriptNode(NodeTraversal t) {
Set<String> classNames = new HashSet<>();
// For every usage, check that there is a goog.require, and warn if not.
for (Map.Entry<String, Node> entry : usages.entrySet()) {
String className = entry.getKey();
Node node = entry.getValue();
String outermostClassName = getOutermostClassName(className);
// The parent namespace is also checked as part of the requires so that classes
// used by goog.module are still checked properly. This may cause missing requires
// to be missed but in practice that should happen rarely.
String nonNullClassName = outermostClassName != null ? outermostClassName : className;
String parentNamespace = null;
int separatorIndex = nonNullClassName.lastIndexOf('.');
if (separatorIndex > 0) {
parentNamespace = nonNullClassName.substring(0, separatorIndex);
}
boolean notProvidedByConstructors =
(constructors == null
|| (!constructors.contains(className) && !constructors.contains(outermostClassName)));
boolean notProvidedByRequires =
(requires == null || (!requires.containsKey(className)
&& !requires.containsKey(outermostClassName)
&& !requires.containsKey(parentNamespace)));
if (notProvidedByConstructors && notProvidedByRequires
&& !classNames.contains(className)) {
// TODO(mknichel): If the symbol is not explicitly provided, find the next best
// symbol from the provides in the same file.
compiler.report(t.makeError(node, MISSING_REQUIRE_WARNING, className));
classNames.add(className);
}
}
// For every goog.require, check that there is a usage (in either usages or weakUsages)
// and warn if there is not.
for (Map.Entry<String, Node> entry : requires.entrySet()) {
String require = entry.getKey();
Node call = entry.getValue();
Node parent = call.getParent();
if (parent.isAssign()) {
// var baz = goog.require('foo.bar.baz');
// Assume that the var 'baz' is used somewhere, and don't warn.
continue;
}
if (!usages.containsKey(require) && !weakUsages.containsKey(require)) {
reportExtraRequireWarning(call, require);
}
}
// for the next script, if there is one, we don't want the new, ctor, and
// require nodes to spill over.
this.usages.clear();
this.weakUsages.clear();
this.requires.clear();
this.constructors.clear();
}
private void reportExtraRequireWarning(Node call, String require) {
if (DEFAULT_EXTRA_NAMESPACES.contains(require)) {
return;
}
JSDocInfo jsDoc = call.getJSDocInfo();
if (jsDoc != null && jsDoc.getSuppressions().contains("extraRequire")) {
// There is a @suppress {extraRequire} on the call node. Even though the compiler generally
// doesn't understand @suppress in that position, respect it in this case,
// since lots of people put it there to suppress the closure-linter's extraRequire check.
return;
}
compiler.report(JSError.make(call, EXTRA_REQUIRE_WARNING, require));
}
private void reportDuplicateRequireWarning(Node call, String require) {
compiler.report(JSError.make(call, DUPLICATE_REQUIRE_WARNING, require));
}
private void visitCallNode(Node call, Node parent) {
String required = codingConvention.extractClassNameIfRequire(call, parent);
if (required != null) {
if (requires.containsKey(required)) {
reportDuplicateRequireWarning(call, required);
} else {
requires.put(required, call);
}
}
Node callee = call.getFirstChild();
if (callee.isName()) {
weakUsages.put(callee.getString(), callee);
}
}
private void visitGetProp(Node getprop) {
// For "foo.bar.baz.qux" add weak usages for "foo.bar.baz.qux", foo.bar.baz",
// "foo.bar", and "foo" because those might all be goog.provide'd in different files,
// so it doesn't make sense to require the user to goog.require all of them.
for (; getprop != null; getprop = getprop.getFirstChild()) {
weakUsages.put(getprop.getQualifiedName(), getprop);
}
}
private void visitNewNode(NodeTraversal t, Node newNode) {
Node qNameNode = newNode.getFirstChild();
// If the ctor is something other than a qualified name, ignore it.
if (!qNameNode.isQualifiedName()) {
return;
}
// Grab the root ctor namespace.
Node root = NodeUtil.getRootOfQualifiedName(qNameNode);
// We only consider programmer-defined constructors that are
// global variables, or are defined on global variables.
if (!root.isName()) {
return;
}
String name = root.getString();
Var var = t.getScope().getVar(name);
if (var != null && (var.isLocal() || var.isExtern())) {
return;
}
usages.put(qNameNode.getQualifiedName(), newNode);
// for "new foo.bar.Baz.Qux" add weak usages for "foo.bar.Baz", "foo.bar", and "foo"
// because those might be goog.provide'd from a different file than foo.bar.Baz.Qux,
// so it doesn't make sense to require the user to goog.require all of them.
for (; qNameNode != null; qNameNode = qNameNode.getFirstChild()) {
weakUsages.put(qNameNode.getQualifiedName(), qNameNode);
}
}
private void visitClassNode(Node classNode) {
String name = NodeUtil.getClassName(classNode);
if (name != null) {
constructors.add(name);
}
Node extendClass = classNode.getFirstChild().getNext();
if (extendClass.isQualifiedName()) {
usages.put(extendClass.getQualifiedName(), extendClass);
}
}
private void maybeAddConstructor(Node n) {
JSDocInfo info = n.getJSDocInfo();
if (info != null) {
String ctorName = n.getFirstChild().getQualifiedName();
if (info.isConstructorOrInterface()) {
constructors.add(ctorName);
} else {
JSTypeExpression typeExpr = info.getType();
if (typeExpr != null) {
Node typeExprRoot = typeExpr.getRoot();
if (typeExprRoot.isFunction() && typeExprRoot.getFirstChild().isNew()) {
constructors.add(ctorName);
}
}
}
}
}
/**
* If this returns true, check for @extends and @implements annotations on this node.
* Otherwise, it's probably an alias for an existing class, so skip those annotations.
*
* @return Whether the given node declares a function. True for the following forms:
* <li><pre>function foo() {}</pre>
* <li><pre>var foo = function() {};</pre>
* <li><pre>foo.bar = function() {};</pre>
*/
private boolean declaresFunction(Node n) {
if (n.isFunction()) {
return true;
}
if (n.isAssign() && n.getLastChild().isFunction()) {
return true;
}
if (NodeUtil.isNameDeclaration(n) && n.getFirstChild().hasChildren()
&& n.getFirstChild().getFirstChild().isFunction()) {
return true;
}
return false;
}
private void maybeAddJsDocUsages(NodeTraversal t, Node n) {
JSDocInfo info = n.getJSDocInfo();
if (info == null) {
return;
}
if (declaresFunction(n)) {
for (JSTypeExpression expr : info.getImplementedInterfaces()) {
maybeAddUsage(t, n, expr);
}
if (info.getBaseType() != null) {
maybeAddUsage(t, n, info.getBaseType());
}
for (JSTypeExpression extendedInterface : info.getExtendedInterfaces()) {
maybeAddUsage(t, n, extendedInterface);
}
}
for (Node typeNode : info.getTypeNodes()) {
maybeAddWeakUsage(t, n, typeNode);
}
}
/**
* Adds a weak usage for the given type expression (unless it references a variable that is
* defined in the externs, in which case no goog.require() is needed). When a "weak usage"
* is added, it means that a goog.require for that type is optional: No
* warning is given whether the require is there or not.
*/
private void maybeAddWeakUsage(NodeTraversal t, Node n, Node typeNode) {
maybeAddUsage(t, n, typeNode, this.weakUsages, Predicates.<Node>alwaysTrue());
}
/**
* Adds a usage for the given type expression (unless it references a variable that is
* defined in the externs, in which case no goog.require() is needed). When a usage is
* added, it means that there should be a goog.require for that type.
*/
private void maybeAddUsage(NodeTraversal t, Node n, final JSTypeExpression expr) {
// Just look at the root node, don't traverse.
Predicate<Node> pred = new Predicate<Node>() {
@Override
public boolean apply(Node n) {
return n == expr.getRoot();
}
};
maybeAddUsage(t, n, expr.getRoot(), this.usages, pred);
}
private void maybeAddUsage(final NodeTraversal t, final Node n, Node rootTypeNode,
final Map<String, Node> usagesMap, Predicate<Node> pred) {
Visitor visitor = new Visitor() {
@Override
public void visit(Node typeNode) {
if (typeNode.isString()) {
String typeString = typeNode.getString();
String rootName = Splitter.on('.').split(typeString).iterator().next();
Var var = t.getScope().getVar(rootName);
if (var == null || !var.isExtern()) {
usagesMap.put(typeString, n);
// Regardless of whether we're adding a weak or strong usage here, add weak usages for
// the prefixes of the namespace, like we do for GETPROP nodes. Otherwise we get an
// extra require warning for cases like:
//
// goog.require('foo.bar.SomeService');
//
// /** @constructor @extends {foo.bar.SomeService.Handler} */
// var MyHandler = function() {};
Node getprop = NodeUtil.newQName(compiler, typeString);
getprop.useSourceInfoIfMissingFromForTree(typeNode);
visitGetProp(getprop);
}
}
}
};
NodeUtil.visitPreOrder(rootTypeNode, visitor, pred);
}
}
| |
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// http://code.google.com/p/protobuf/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.google.protobuf;
import com.google.protobuf.Descriptors.Descriptor;
import com.google.protobuf.Descriptors.EnumValueDescriptor;
import com.google.protobuf.Descriptors.FieldDescriptor;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
/**
* All generated protocol message classes extend this class. This class
* implements most of the Message and Builder interfaces using Java reflection.
* Users can ignore this class and pretend that generated messages implement
* the Message interface directly.
*
* @author kenton@google.com Kenton Varda
*/
public abstract class GeneratedMessage extends AbstractMessage {
protected GeneratedMessage() {}
private UnknownFieldSet unknownFields = UnknownFieldSet.getDefaultInstance();
/**
* Get the FieldAccessorTable for this type. We can't have the message
* class pass this in to the constructor because of bootstrapping trouble
* with DescriptorProtos.
*/
protected abstract FieldAccessorTable internalGetFieldAccessorTable();
public Descriptor getDescriptorForType() {
return internalGetFieldAccessorTable().descriptor;
}
/** Internal helper which returns a mutable map. */
private final Map<FieldDescriptor, Object> getAllFieldsMutable() {
TreeMap<FieldDescriptor, Object> result =
new TreeMap<FieldDescriptor, Object>();
Descriptor descriptor = internalGetFieldAccessorTable().descriptor;
for (FieldDescriptor field : descriptor.getFields()) {
if (field.isRepeated()) {
List value = (List) getField(field);
if (!value.isEmpty()) {
result.put(field, value);
}
} else {
if (hasField(field)) {
result.put(field, getField(field));
}
}
}
return result;
}
public boolean isInitialized() {
for (FieldDescriptor field : getDescriptorForType().getFields()) {
// Check that all required fields are present.
if (field.isRequired()) {
if (!hasField(field)) {
return false;
}
}
// Check that embedded messages are initialized.
if (field.getJavaType() == FieldDescriptor.JavaType.MESSAGE) {
if (field.isRepeated()) {
@SuppressWarnings("unchecked")
List<Message> messageList = (List<Message>) getField(field);
for (Message element : messageList) {
if (!element.isInitialized()) {
return false;
}
}
} else {
if (hasField(field) && !((Message) getField(field)).isInitialized()) {
return false;
}
}
}
}
return true;
}
public Map<FieldDescriptor, Object> getAllFields() {
return Collections.unmodifiableMap(getAllFieldsMutable());
}
public boolean hasField(Descriptors.FieldDescriptor field) {
return internalGetFieldAccessorTable().getField(field).has(this);
}
public Object getField(FieldDescriptor field) {
return internalGetFieldAccessorTable().getField(field).get(this);
}
public int getRepeatedFieldCount(FieldDescriptor field) {
return internalGetFieldAccessorTable().getField(field)
.getRepeatedCount(this);
}
public Object getRepeatedField(FieldDescriptor field, int index) {
return internalGetFieldAccessorTable().getField(field)
.getRepeated(this, index);
}
public final UnknownFieldSet getUnknownFields() {
return unknownFields;
}
@SuppressWarnings("unchecked")
public abstract static class Builder <BuilderType extends Builder>
extends AbstractMessage.Builder<BuilderType> {
protected Builder() {}
/**
* Get the message being built. We don't just pass this to the
* constructor because it becomes null when build() is called.
*/
protected abstract GeneratedMessage internalGetResult();
/**
* Get the FieldAccessorTable for this type. We can't have the message
* class pass this in to the constructor because of bootstrapping trouble
* with DescriptorProtos.
*/
private FieldAccessorTable internalGetFieldAccessorTable() {
return internalGetResult().internalGetFieldAccessorTable();
}
public BuilderType mergeFrom(Message other) {
if (other.getDescriptorForType() !=
internalGetFieldAccessorTable().descriptor) {
throw new IllegalArgumentException("Message type mismatch.");
}
for (Map.Entry<FieldDescriptor, Object> entry :
other.getAllFields().entrySet()) {
FieldDescriptor field = entry.getKey();
if (field.isRepeated()) {
// Concatenate repeated fields.
for (Object element : (List) entry.getValue()) {
addRepeatedField(field, element);
}
} else if (field.getJavaType() == FieldDescriptor.JavaType.MESSAGE &&
hasField(field)) {
// Merge singular embedded messages.
Message oldValue = (Message) getField(field);
setField(field,
oldValue.newBuilderForType()
.mergeFrom(oldValue)
.mergeFrom((Message) entry.getValue())
.buildPartial());
} else {
// Just overwrite.
setField(field, entry.getValue());
}
}
return (BuilderType) this;
}
public Descriptor getDescriptorForType() {
return internalGetFieldAccessorTable().descriptor;
}
public Map<Descriptors.FieldDescriptor, Object> getAllFields() {
return internalGetResult().getAllFields();
}
public Message.Builder newBuilderForField(
Descriptors.FieldDescriptor field) {
return internalGetFieldAccessorTable().getField(field).newBuilder();
}
public boolean hasField(Descriptors.FieldDescriptor field) {
return internalGetResult().hasField(field);
}
public Object getField(Descriptors.FieldDescriptor field) {
if (field.isRepeated()) {
// The underlying list object is still modifiable at this point.
// Make sure not to expose the modifiable list to the caller.
return Collections.unmodifiableList(
(List) internalGetResult().getField(field));
} else {
return internalGetResult().getField(field);
}
}
public BuilderType setField(Descriptors.FieldDescriptor field,
Object value) {
internalGetFieldAccessorTable().getField(field).set(this, value);
return (BuilderType) this;
}
public BuilderType clearField(Descriptors.FieldDescriptor field) {
internalGetFieldAccessorTable().getField(field).clear(this);
return (BuilderType) this;
}
public int getRepeatedFieldCount(Descriptors.FieldDescriptor field) {
return internalGetResult().getRepeatedFieldCount(field);
}
public Object getRepeatedField(Descriptors.FieldDescriptor field,
int index) {
return internalGetResult().getRepeatedField(field, index);
}
public BuilderType setRepeatedField(Descriptors.FieldDescriptor field,
int index, Object value) {
internalGetFieldAccessorTable().getField(field)
.setRepeated(this, index, value);
return (BuilderType) this;
}
public BuilderType addRepeatedField(Descriptors.FieldDescriptor field,
Object value) {
internalGetFieldAccessorTable().getField(field).addRepeated(this, value);
return (BuilderType) this;
}
public final UnknownFieldSet getUnknownFields() {
return internalGetResult().unknownFields;
}
public final BuilderType setUnknownFields(UnknownFieldSet unknownFields) {
internalGetResult().unknownFields = unknownFields;
return (BuilderType) this;
}
public final BuilderType mergeUnknownFields(UnknownFieldSet unknownFields) {
GeneratedMessage result = internalGetResult();
result.unknownFields =
UnknownFieldSet.newBuilder(result.unknownFields)
.mergeFrom(unknownFields)
.build();
return (BuilderType) this;
}
public boolean isInitialized() {
return internalGetResult().isInitialized();
}
/**
* Called by subclasses to parse an unknown field.
* @return {@code true} unless the tag is an end-group tag.
*/
protected boolean parseUnknownField(CodedInputStream input,
UnknownFieldSet.Builder unknownFields,
ExtensionRegistry extensionRegistry,
int tag)
throws IOException {
return unknownFields.mergeFieldFrom(tag, input);
}
/**
* Adds the {@code values} to the {@code list}.
*
* @throws NullPointerException if any of the elements of {@code values} is
* null.
*/
protected <T> void addAll(Iterable<T> values, Collection<? super T> list) {
for (T value : values) {
if (value == null) {
throw new NullPointerException();
}
}
if (values instanceof Collection) {
@SuppressWarnings("unsafe")
Collection<T> collection = (Collection<T>) values;
list.addAll(collection);
} else {
for (T value : values) {
list.add(value);
}
}
}
}
// =================================================================
// Extensions-related stuff
/**
* Generated message classes for message types that contain extension ranges
* subclass this.
*
* <p>This class implements type-safe accessors for extensions. They
* implement all the same operations that you can do with normal fields --
* e.g. "has", "get", and "getCount" -- but for extensions. The extensions
* are identified using instances of the class {@link GeneratedExtension};
* the protocol compiler generates a static instance of this class for every
* extension in its input. Through the magic of generics, all is made
* type-safe.
*
* <p>For example, imagine you have the {@code .proto} file:
*
* <pre>
* option java_class = "MyProto";
*
* message Foo {
* extensions 1000 to max;
* }
*
* extend Foo {
* optional int32 bar;
* }
* </pre>
*
* <p>Then you might write code like:
*
* <pre>
* MyProto.Foo foo = getFoo();
* int i = foo.getExtension(MyProto.bar);
* </pre>
*
* <p>See also {@link ExtendableBuilder}.
*/
public abstract static class ExtendableMessage<
MessageType extends ExtendableMessage>
extends GeneratedMessage {
protected ExtendableMessage() {}
private final FieldSet extensions = FieldSet.newFieldSet();
private final void verifyExtensionContainingType(
GeneratedExtension<MessageType, ?> extension) {
if (extension.getDescriptor().getContainingType() !=
getDescriptorForType()) {
// This can only happen if someone uses unchecked operations.
throw new IllegalArgumentException(
"Extension is for type \"" +
extension.getDescriptor().getContainingType().getFullName() +
"\" which does not match message type \"" +
getDescriptorForType().getFullName() + "\".");
}
}
/** Check if a singular extension is present. */
public final boolean hasExtension(
GeneratedExtension<MessageType, ?> extension) {
verifyExtensionContainingType(extension);
return extensions.hasField(extension.getDescriptor());
}
/** Get the number of elements in a repeated extension. */
public final <Type> int getExtensionCount(
GeneratedExtension<MessageType, List<Type>> extension) {
verifyExtensionContainingType(extension);
return extensions.getRepeatedFieldCount(extension.getDescriptor());
}
/** Get the value of an extension. */
@SuppressWarnings("unchecked")
public final <Type> Type getExtension(
GeneratedExtension<MessageType, Type> extension) {
verifyExtensionContainingType(extension);
Object value = extensions.getField(extension.getDescriptor());
if (value == null) {
return (Type) extension.getMessageDefaultInstance();
} else {
return (Type) extension.fromReflectionType(value);
}
}
/** Get one element of a repeated extension. */
@SuppressWarnings("unchecked")
public final <Type> Type getExtension(
GeneratedExtension<MessageType, List<Type>> extension, int index) {
verifyExtensionContainingType(extension);
return (Type) extension.singularFromReflectionType(
extensions.getRepeatedField(extension.getDescriptor(), index));
}
/** Called by subclasses to check if all extensions are initialized. */
protected boolean extensionsAreInitialized() {
return extensions.isInitialized();
}
public boolean isInitialized() {
return super.isInitialized() && extensionsAreInitialized();
}
/**
* Used by subclasses to serialize extensions. Extension ranges may be
* interleaved with field numbers, but we must write them in canonical
* (sorted by field number) order. ExtensionWriter helps us write
* individual ranges of extensions at once.
*/
protected class ExtensionWriter {
// Imagine how much simpler this code would be if Java iterators had
// a way to get the next element without advancing the iterator.
final Iterator<Map.Entry<FieldDescriptor, Object>> iter =
extensions.iterator();
Map.Entry<FieldDescriptor, Object> next = null;
private ExtensionWriter() {
if (iter.hasNext()) {
next = iter.next();
}
}
public void writeUntil(int end, CodedOutputStream output)
throws IOException {
while (next != null && next.getKey().getNumber() < end) {
extensions.writeField(next.getKey(), next.getValue(), output);
if (iter.hasNext()) {
next = iter.next();
} else {
next = null;
}
}
}
}
protected ExtensionWriter newExtensionWriter() {
return new ExtensionWriter();
}
/** Called by subclasses to compute the size of extensions. */
protected int extensionsSerializedSize() {
return extensions.getSerializedSize();
}
// ---------------------------------------------------------------
// Reflection
public Map<Descriptors.FieldDescriptor, Object> getAllFields() {
Map<FieldDescriptor, Object> result = super.getAllFieldsMutable();
result.putAll(extensions.getAllFields());
return Collections.unmodifiableMap(result);
}
public boolean hasField(FieldDescriptor field) {
if (field.isExtension()) {
verifyContainingType(field);
return extensions.hasField(field);
} else {
return super.hasField(field);
}
}
public Object getField(FieldDescriptor field) {
if (field.isExtension()) {
verifyContainingType(field);
Object value = extensions.getField(field);
if (value == null) {
// Lacking an ExtensionRegistry, we have no way to determine the
// extension's real type, so we return a DynamicMessage.
return DynamicMessage.getDefaultInstance(field.getMessageType());
} else {
return value;
}
} else {
return super.getField(field);
}
}
public int getRepeatedFieldCount(FieldDescriptor field) {
if (field.isExtension()) {
verifyContainingType(field);
return extensions.getRepeatedFieldCount(field);
} else {
return super.getRepeatedFieldCount(field);
}
}
public Object getRepeatedField(FieldDescriptor field, int index) {
if (field.isExtension()) {
verifyContainingType(field);
return extensions.getRepeatedField(field, index);
} else {
return super.getRepeatedField(field, index);
}
}
private void verifyContainingType(FieldDescriptor field) {
if (field.getContainingType() != getDescriptorForType()) {
throw new IllegalArgumentException(
"FieldDescriptor does not match message type.");
}
}
}
/**
* Generated message builders for message types that contain extension ranges
* subclass this.
*
* <p>This class implements type-safe accessors for extensions. They
* implement all the same operations that you can do with normal fields --
* e.g. "get", "set", and "add" -- but for extensions. The extensions are
* identified using instances of the class {@link GeneratedExtension}; the
* protocol compiler generates a static instance of this class for every
* extension in its input. Through the magic of generics, all is made
* type-safe.
*
* <p>For example, imagine you have the {@code .proto} file:
*
* <pre>
* option java_class = "MyProto";
*
* message Foo {
* extensions 1000 to max;
* }
*
* extend Foo {
* optional int32 bar;
* }
* </pre>
*
* <p>Then you might write code like:
*
* <pre>
* MyProto.Foo foo =
* MyProto.Foo.newBuilder()
* .setExtension(MyProto.bar, 123)
* .build();
* </pre>
*
* <p>See also {@link ExtendableMessage}.
*/
@SuppressWarnings("unchecked")
public abstract static class ExtendableBuilder<
MessageType extends ExtendableMessage,
BuilderType extends ExtendableBuilder>
extends GeneratedMessage.Builder<BuilderType> {
protected ExtendableBuilder() {}
protected abstract ExtendableMessage<MessageType> internalGetResult();
/** Check if a singular extension is present. */
public final boolean hasExtension(
GeneratedExtension<MessageType, ?> extension) {
return internalGetResult().hasExtension(extension);
}
/** Get the number of elements in a repeated extension. */
public final <Type> int getExtensionCount(
GeneratedExtension<MessageType, List<Type>> extension) {
return internalGetResult().getExtensionCount(extension);
}
/** Get the value of an extension. */
public final <Type> Type getExtension(
GeneratedExtension<MessageType, Type> extension) {
return internalGetResult().getExtension(extension);
}
/** Get one element of a repeated extension. */
public final <Type> Type getExtension(
GeneratedExtension<MessageType, List<Type>> extension, int index) {
return internalGetResult().getExtension(extension, index);
}
/** Set the value of an extension. */
public final <Type> BuilderType setExtension(
GeneratedExtension<MessageType, Type> extension, Type value) {
ExtendableMessage<MessageType> message = internalGetResult();
message.verifyExtensionContainingType(extension);
message.extensions.setField(extension.getDescriptor(),
extension.toReflectionType(value));
return (BuilderType) this;
}
/** Set the value of one element of a repeated extension. */
public final <Type> BuilderType setExtension(
GeneratedExtension<MessageType, List<Type>> extension,
int index, Type value) {
ExtendableMessage<MessageType> message = internalGetResult();
message.verifyExtensionContainingType(extension);
message.extensions.setRepeatedField(
extension.getDescriptor(), index,
extension.singularToReflectionType(value));
return (BuilderType) this;
}
/** Append a value to a repeated extension. */
public final <Type> BuilderType addExtension(
GeneratedExtension<MessageType, List<Type>> extension, Type value) {
ExtendableMessage<MessageType> message = internalGetResult();
message.verifyExtensionContainingType(extension);
message.extensions.addRepeatedField(
extension.getDescriptor(), extension.singularToReflectionType(value));
return (BuilderType) this;
}
/** Clear an extension. */
public final <Type> BuilderType clearExtension(
GeneratedExtension<MessageType, ?> extension) {
ExtendableMessage<MessageType> message = internalGetResult();
message.verifyExtensionContainingType(extension);
message.extensions.clearField(extension.getDescriptor());
return (BuilderType) this;
}
/**
* Called by subclasses to parse an unknown field or an extension.
* @return {@code true} unless the tag is an end-group tag.
*/
protected boolean parseUnknownField(CodedInputStream input,
UnknownFieldSet.Builder unknownFields,
ExtensionRegistry extensionRegistry,
int tag)
throws IOException {
ExtendableMessage<MessageType> message = internalGetResult();
return message.extensions.mergeFieldFrom(
input, unknownFields, extensionRegistry, this, tag);
}
// ---------------------------------------------------------------
// Reflection
// We don't have to override the get*() methods here because they already
// just forward to the underlying message.
public BuilderType setField(FieldDescriptor field, Object value) {
if (field.isExtension()) {
ExtendableMessage<MessageType> message = internalGetResult();
message.verifyContainingType(field);
message.extensions.setField(field, value);
return (BuilderType) this;
} else {
return super.setField(field, value);
}
}
public BuilderType clearField(Descriptors.FieldDescriptor field) {
if (field.isExtension()) {
ExtendableMessage<MessageType> message = internalGetResult();
message.verifyContainingType(field);
message.extensions.clearField(field);
return (BuilderType) this;
} else {
return super.clearField(field);
}
}
public BuilderType setRepeatedField(Descriptors.FieldDescriptor field,
int index, Object value) {
if (field.isExtension()) {
ExtendableMessage<MessageType> message = internalGetResult();
message.verifyContainingType(field);
message.extensions.setRepeatedField(field, index, value);
return (BuilderType) this;
} else {
return super.setRepeatedField(field, index, value);
}
}
public BuilderType addRepeatedField(Descriptors.FieldDescriptor field,
Object value) {
if (field.isExtension()) {
ExtendableMessage<MessageType> message = internalGetResult();
message.verifyContainingType(field);
message.extensions.addRepeatedField(field, value);
return (BuilderType) this;
} else {
return super.addRepeatedField(field, value);
}
}
protected final void mergeExtensionFields(ExtendableMessage other) {
internalGetResult().extensions.mergeFrom(other.extensions);
}
}
// -----------------------------------------------------------------
/** For use by generated code only. */
public static <ContainingType extends Message, Type>
GeneratedExtension<ContainingType, Type>
newGeneratedExtension(FieldDescriptor descriptor, Class<Type> type) {
if (descriptor.isRepeated()) {
throw new IllegalArgumentException(
"Must call newRepeatedGeneratedExtension() for repeated types.");
}
return new GeneratedExtension<ContainingType, Type>(descriptor, type);
}
/** For use by generated code only. */
public static <ContainingType extends Message, Type>
GeneratedExtension<ContainingType, List<Type>>
newRepeatedGeneratedExtension(
FieldDescriptor descriptor, Class<Type> type) {
if (!descriptor.isRepeated()) {
throw new IllegalArgumentException(
"Must call newGeneratedExtension() for non-repeated types.");
}
return new GeneratedExtension<ContainingType, List<Type>>(descriptor, type);
}
/**
* Type used to represent generated extensions. The protocol compiler
* generates a static singleton instance of this class for each extension.
*
* <p>For example, imagine you have the {@code .proto} file:
*
* <pre>
* option java_class = "MyProto";
*
* message Foo {
* extensions 1000 to max;
* }
*
* extend Foo {
* optional int32 bar;
* }
* </pre>
*
* <p>Then, {@code MyProto.Foo.bar} has type
* {@code GeneratedExtension<MyProto.Foo, Integer>}.
*
* <p>In general, users should ignore the details of this type, and simply use
* these static singletons as parameters to the extension accessors defined
* in {@link ExtendableMessage} and {@link ExtendableBuilder}.
*/
public static final class GeneratedExtension<
ContainingType extends Message, Type> {
// TODO(kenton): Find ways to avoid using Java reflection within this
// class. Also try to avoid suppressing unchecked warnings.
private GeneratedExtension(FieldDescriptor descriptor, Class type) {
if (!descriptor.isExtension()) {
throw new IllegalArgumentException(
"GeneratedExtension given a regular (non-extension) field.");
}
this.descriptor = descriptor;
this.type = type;
switch (descriptor.getJavaType()) {
case MESSAGE:
enumValueOf = null;
enumGetValueDescriptor = null;
messageDefaultInstance =
(Message) invokeOrDie(getMethodOrDie(type, "getDefaultInstance"),
null);
break;
case ENUM:
enumValueOf = getMethodOrDie(type, "valueOf",
EnumValueDescriptor.class);
enumGetValueDescriptor = getMethodOrDie(type, "getValueDescriptor");
messageDefaultInstance = null;
break;
default:
enumValueOf = null;
enumGetValueDescriptor = null;
messageDefaultInstance = null;
break;
}
}
private final FieldDescriptor descriptor;
private final Class type;
private final Method enumValueOf;
private final Method enumGetValueDescriptor;
private final Message messageDefaultInstance;
public FieldDescriptor getDescriptor() { return descriptor; }
/**
* If the extension is an embedded message or group, returns the default
* instance of the message.
*/
@SuppressWarnings("unchecked")
public Message getMessageDefaultInstance() {
return messageDefaultInstance;
}
/**
* Convert from the type used by the reflection accessors to the type used
* by native accessors. E.g., for enums, the reflection accessors use
* EnumValueDescriptors but the native accessors use the generated enum
* type.
*/
@SuppressWarnings("unchecked")
private Object fromReflectionType(Object value) {
if (descriptor.isRepeated()) {
if (descriptor.getJavaType() == FieldDescriptor.JavaType.MESSAGE ||
descriptor.getJavaType() == FieldDescriptor.JavaType.ENUM) {
// Must convert the whole list.
List result = new ArrayList();
for (Object element : (List) value) {
result.add(singularFromReflectionType(element));
}
return result;
} else {
return value;
}
} else {
return singularFromReflectionType(value);
}
}
/**
* Like {@link #fromReflectionType(Object)}, but if the type is a repeated
* type, this converts a single element.
*/
private Object singularFromReflectionType(Object value) {
switch (descriptor.getJavaType()) {
case MESSAGE:
if (type.isInstance(value)) {
return value;
} else {
// It seems the copy of the embedded message stored inside the
// extended message is not of the exact type the user was
// expecting. This can happen if a user defines a
// GeneratedExtension manually and gives it a different type.
// This should not happen in normal use. But, to be nice, we'll
// copy the message to whatever type the caller was expecting.
return messageDefaultInstance.newBuilderForType()
.mergeFrom((Message) value).build();
}
case ENUM:
return invokeOrDie(enumValueOf, null, (EnumValueDescriptor) value);
default:
return value;
}
}
/**
* Convert from the type used by the native accessors to the type used
* by reflection accessors. E.g., for enums, the reflection accessors use
* EnumValueDescriptors but the native accessors use the generated enum
* type.
*/
@SuppressWarnings("unchecked")
private Object toReflectionType(Object value) {
if (descriptor.isRepeated()) {
if (descriptor.getJavaType() == FieldDescriptor.JavaType.ENUM) {
// Must convert the whole list.
List result = new ArrayList();
for (Object element : (List) value) {
result.add(singularToReflectionType(element));
}
return result;
} else {
return value;
}
} else {
return singularToReflectionType(value);
}
}
/**
* Like {@link #toReflectionType(Object)}, but if the type is a repeated
* type, this converts a single element.
*/
private Object singularToReflectionType(Object value) {
switch (descriptor.getJavaType()) {
case ENUM:
return invokeOrDie(enumGetValueDescriptor, value);
default:
return value;
}
}
}
// =================================================================
/** Calls Class.getMethod and throws a RuntimeException if it fails. */
@SuppressWarnings("unchecked")
private static Method getMethodOrDie(
Class clazz, String name, Class... params) {
try {
return clazz.getMethod(name, params);
} catch (NoSuchMethodException e) {
throw new RuntimeException(
"Generated message class \"" + clazz.getName() +
"\" missing method \"" + name + "\".", e);
}
}
/** Calls invoke and throws a RuntimeException if it fails. */
private static Object invokeOrDie(
Method method, Object object, Object... params) {
try {
return method.invoke(object, params);
} catch (IllegalAccessException e) {
throw new RuntimeException(
"Couldn't use Java reflection to implement protocol message " +
"reflection.", e);
} catch (java.lang.reflect.InvocationTargetException e) {
Throwable cause = e.getCause();
if (cause instanceof RuntimeException) {
throw (RuntimeException) cause;
} else if (cause instanceof Error) {
throw (Error) cause;
} else {
throw new RuntimeException(
"Unexpected exception thrown by generated accessor method.", cause);
}
}
}
/**
* Users should ignore this class. This class provides the implementation
* with access to the fields of a message object using Java reflection.
*/
public static final class FieldAccessorTable {
/**
* Construct a FieldAccessorTable for a particular message class. Only
* one FieldAccessorTable should ever be constructed per class.
*
* @param descriptor The type's descriptor.
* @param camelCaseNames The camelcase names of all fields in the message.
* These are used to derive the accessor method names.
* @param messageClass The message type.
* @param builderClass The builder type.
*/
public FieldAccessorTable(
Descriptor descriptor,
String[] camelCaseNames,
Class<? extends GeneratedMessage> messageClass,
Class<? extends GeneratedMessage.Builder> builderClass) {
this.descriptor = descriptor;
fields = new FieldAccessor[descriptor.getFields().size()];
for (int i = 0; i < fields.length; i++) {
FieldDescriptor field = descriptor.getFields().get(i);
if (field.isRepeated()) {
if (field.getJavaType() == FieldDescriptor.JavaType.MESSAGE) {
fields[i] = new RepeatedMessageFieldAccessor(
field, camelCaseNames[i], messageClass, builderClass);
} else if (field.getJavaType() == FieldDescriptor.JavaType.ENUM) {
fields[i] = new RepeatedEnumFieldAccessor(
field, camelCaseNames[i], messageClass, builderClass);
} else {
fields[i] = new RepeatedFieldAccessor(
field, camelCaseNames[i], messageClass, builderClass);
}
} else {
if (field.getJavaType() == FieldDescriptor.JavaType.MESSAGE) {
fields[i] = new SingularMessageFieldAccessor(
field, camelCaseNames[i], messageClass, builderClass);
} else if (field.getJavaType() == FieldDescriptor.JavaType.ENUM) {
fields[i] = new SingularEnumFieldAccessor(
field, camelCaseNames[i], messageClass, builderClass);
} else {
fields[i] = new SingularFieldAccessor(
field, camelCaseNames[i], messageClass, builderClass);
}
}
}
}
private final Descriptor descriptor;
private final FieldAccessor[] fields;
/** Get the FieldAccessor for a particular field. */
private FieldAccessor getField(FieldDescriptor field) {
if (field.getContainingType() != descriptor) {
throw new IllegalArgumentException(
"FieldDescriptor does not match message type.");
} else if (field.isExtension()) {
// If this type had extensions, it would subclass ExtendableMessage,
// which overrides the reflection interface to handle extensions.
throw new IllegalArgumentException(
"This type does not have extensions.");
}
return fields[field.getIndex()];
}
/**
* Abstract interface that provides access to a single field. This is
* implemented differently depending on the field type and cardinality.
*/
private static interface FieldAccessor {
Object get(GeneratedMessage message);
void set(GeneratedMessage.Builder builder, Object value);
Object getRepeated(GeneratedMessage message, int index);
void setRepeated(GeneratedMessage.Builder builder,
int index, Object value);
void addRepeated(GeneratedMessage.Builder builder, Object value);
boolean has(GeneratedMessage message);
int getRepeatedCount(GeneratedMessage message);
void clear(GeneratedMessage.Builder builder);
Message.Builder newBuilder();
}
// ---------------------------------------------------------------
private static class SingularFieldAccessor implements FieldAccessor {
SingularFieldAccessor(
FieldDescriptor descriptor, String camelCaseName,
Class<? extends GeneratedMessage> messageClass,
Class<? extends GeneratedMessage.Builder> builderClass) {
getMethod = getMethodOrDie(messageClass, "get" + camelCaseName);
type = getMethod.getReturnType();
setMethod = getMethodOrDie(builderClass, "set" + camelCaseName, type);
hasMethod =
getMethodOrDie(messageClass, "has" + camelCaseName);
clearMethod = getMethodOrDie(builderClass, "clear" + camelCaseName);
}
// Note: We use Java reflection to call public methods rather than
// access private fields directly as this avoids runtime security
// checks.
Class type;
Method getMethod;
Method setMethod;
Method hasMethod;
Method clearMethod;
public Object get(GeneratedMessage message) {
return invokeOrDie(getMethod, message);
}
public void set(GeneratedMessage.Builder builder, Object value) {
invokeOrDie(setMethod, builder, value);
}
public Object getRepeated(GeneratedMessage message, int index) {
throw new UnsupportedOperationException(
"getRepeatedField() called on a singular field.");
}
public void setRepeated(GeneratedMessage.Builder builder,
int index, Object value) {
throw new UnsupportedOperationException(
"setRepeatedField() called on a singular field.");
}
public void addRepeated(GeneratedMessage.Builder builder, Object value) {
throw new UnsupportedOperationException(
"addRepeatedField() called on a singular field.");
}
public boolean has(GeneratedMessage message) {
return (Boolean) invokeOrDie(hasMethod, message);
}
public int getRepeatedCount(GeneratedMessage message) {
throw new UnsupportedOperationException(
"getRepeatedFieldSize() called on a singular field.");
}
public void clear(GeneratedMessage.Builder builder) {
invokeOrDie(clearMethod, builder);
}
public Message.Builder newBuilder() {
throw new UnsupportedOperationException(
"newBuilderForField() called on a non-Message type.");
}
}
private static class RepeatedFieldAccessor implements FieldAccessor {
RepeatedFieldAccessor(
FieldDescriptor descriptor, String camelCaseName,
Class<? extends GeneratedMessage> messageClass,
Class<? extends GeneratedMessage.Builder> builderClass) {
getMethod = getMethodOrDie(messageClass, "get" + camelCaseName + "List");
getRepeatedMethod =
getMethodOrDie(messageClass, "get" + camelCaseName, Integer.TYPE);
type = getRepeatedMethod.getReturnType();
setRepeatedMethod =
getMethodOrDie(builderClass, "set" + camelCaseName,
Integer.TYPE, type);
addRepeatedMethod =
getMethodOrDie(builderClass, "add" + camelCaseName, type);
getCountMethod =
getMethodOrDie(messageClass, "get" + camelCaseName + "Count");
clearMethod = getMethodOrDie(builderClass, "clear" + camelCaseName);
}
Class type;
Method getMethod;
Method getRepeatedMethod;
Method setRepeatedMethod;
Method addRepeatedMethod;
Method getCountMethod;
Method clearMethod;
public Object get(GeneratedMessage message) {
return invokeOrDie(getMethod, message);
}
public void set(GeneratedMessage.Builder builder, Object value) {
// Add all the elements individually. This serves two purposes:
// 1) Verifies that each element has the correct type.
// 2) Insures that the caller cannot modify the list later on and
// have the modifications be reflected in the message.
clear(builder);
for (Object element : (List) value) {
addRepeated(builder, element);
}
}
public Object getRepeated(GeneratedMessage message, int index) {
return invokeOrDie(getRepeatedMethod, message, index);
}
public void setRepeated(GeneratedMessage.Builder builder,
int index, Object value) {
invokeOrDie(setRepeatedMethod, builder, index, value);
}
public void addRepeated(GeneratedMessage.Builder builder, Object value) {
invokeOrDie(addRepeatedMethod, builder, value);
}
public boolean has(GeneratedMessage message) {
throw new UnsupportedOperationException(
"hasField() called on a singular field.");
}
public int getRepeatedCount(GeneratedMessage message) {
return (Integer) invokeOrDie(getCountMethod, message);
}
public void clear(GeneratedMessage.Builder builder) {
invokeOrDie(clearMethod, builder);
}
public Message.Builder newBuilder() {
throw new UnsupportedOperationException(
"newBuilderForField() called on a non-Message type.");
}
}
// ---------------------------------------------------------------
private static final class SingularEnumFieldAccessor
extends SingularFieldAccessor {
SingularEnumFieldAccessor(
FieldDescriptor descriptor, String camelCaseName,
Class<? extends GeneratedMessage> messageClass,
Class<? extends GeneratedMessage.Builder> builderClass) {
super(descriptor, camelCaseName, messageClass, builderClass);
valueOfMethod = getMethodOrDie(type, "valueOf",
EnumValueDescriptor.class);
getValueDescriptorMethod =
getMethodOrDie(type, "getValueDescriptor");
}
private Method valueOfMethod;
private Method getValueDescriptorMethod;
public Object get(GeneratedMessage message) {
return invokeOrDie(getValueDescriptorMethod, super.get(message));
}
public void set(GeneratedMessage.Builder builder, Object value) {
super.set(builder, invokeOrDie(valueOfMethod, null, value));
}
}
private static final class RepeatedEnumFieldAccessor
extends RepeatedFieldAccessor {
RepeatedEnumFieldAccessor(
FieldDescriptor descriptor, String camelCaseName,
Class<? extends GeneratedMessage> messageClass,
Class<? extends GeneratedMessage.Builder> builderClass) {
super(descriptor, camelCaseName, messageClass, builderClass);
valueOfMethod = getMethodOrDie(type, "valueOf",
EnumValueDescriptor.class);
getValueDescriptorMethod =
getMethodOrDie(type, "getValueDescriptor");
}
private Method valueOfMethod;
private Method getValueDescriptorMethod;
@SuppressWarnings("unchecked")
public Object get(GeneratedMessage message) {
List newList = new ArrayList();
for (Object element : (List) super.get(message)) {
newList.add(invokeOrDie(getValueDescriptorMethod, element));
}
return Collections.unmodifiableList(newList);
}
public Object getRepeated(GeneratedMessage message, int index) {
return invokeOrDie(getValueDescriptorMethod,
super.getRepeated(message, index));
}
public void setRepeated(GeneratedMessage.Builder builder,
int index, Object value) {
super.setRepeated(builder, index, invokeOrDie(valueOfMethod, null, value));
}
public void addRepeated(GeneratedMessage.Builder builder, Object value) {
super.addRepeated(builder, invokeOrDie(valueOfMethod, null, value));
}
}
// ---------------------------------------------------------------
private static final class SingularMessageFieldAccessor
extends SingularFieldAccessor {
SingularMessageFieldAccessor(
FieldDescriptor descriptor, String camelCaseName,
Class<? extends GeneratedMessage> messageClass,
Class<? extends GeneratedMessage.Builder> builderClass) {
super(descriptor, camelCaseName, messageClass, builderClass);
newBuilderMethod = getMethodOrDie(type, "newBuilder");
}
private Method newBuilderMethod;
private Object coerceType(Object value) {
if (type.isInstance(value)) {
return value;
} else {
// The value is not the exact right message type. However, if it
// is an alternative implementation of the same type -- e.g. a
// DynamicMessage -- we should accept it. In this case we can make
// a copy of the message.
return ((Message.Builder) invokeOrDie(newBuilderMethod, null))
.mergeFrom((Message) value).build();
}
}
public void set(GeneratedMessage.Builder builder, Object value) {
super.set(builder, coerceType(value));
}
public Message.Builder newBuilder() {
return (Message.Builder) invokeOrDie(newBuilderMethod, null);
}
}
private static final class RepeatedMessageFieldAccessor
extends RepeatedFieldAccessor {
RepeatedMessageFieldAccessor(
FieldDescriptor descriptor, String camelCaseName,
Class<? extends GeneratedMessage> messageClass,
Class<? extends GeneratedMessage.Builder> builderClass) {
super(descriptor, camelCaseName, messageClass, builderClass);
newBuilderMethod = getMethodOrDie(type, "newBuilder");
}
private Method newBuilderMethod;
private Object coerceType(Object value) {
if (type.isInstance(value)) {
return value;
} else {
// The value is not the exact right message type. However, if it
// is an alternative implementation of the same type -- e.g. a
// DynamicMessage -- we should accept it. In this case we can make
// a copy of the message.
return ((Message.Builder) invokeOrDie(newBuilderMethod, null))
.mergeFrom((Message) value).build();
}
}
public void setRepeated(GeneratedMessage.Builder builder,
int index, Object value) {
super.setRepeated(builder, index, coerceType(value));
}
public void addRepeated(GeneratedMessage.Builder builder, Object value) {
super.addRepeated(builder, coerceType(value));
}
public Message.Builder newBuilder() {
return (Message.Builder) invokeOrDie(newBuilderMethod, null);
}
}
}
}
| |
/**
* Copyright Pravega Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.pravega.common;
import com.google.common.base.Preconditions;
import java.util.Collection;
import java.util.Map;
import java.util.concurrent.CompletionException;
import java.util.concurrent.ExecutionException;
import lombok.SneakyThrows;
/**
* Helper methods that perform various checks and throw exceptions if certain conditions are met.
*/
public final class Exceptions {
/**
* Throws any throwable 'sneakily' - you don't need to catch it, nor declare that you throw it onwards.
* The exception is still thrown - javac will just stop whining about it.
* <p>
* Example usage:
* <pre>public void run() {
* throw sneakyThrow(new IOException("You don't need to catch me!"));
* }</pre>
* <p>
* NB: The exception is not wrapped, ignored, swallowed, or redefined. The JVM actually does not know or care
* about the concept of a 'checked exception'. All this method does is hide the act of throwing a checked exception
* from the java compiler.
* <p>
* Note that this method has a return type of {@code RuntimeException}; it is advised you always call this
* method as argument to the {@code throw} statement to avoid compiler errors regarding no return
* statement and similar problems. This method won't of course return an actual {@code RuntimeException} -
* it never returns, it always throws the provided exception.
*
* @param t The throwable to throw without requiring you to catch its type.
* @return A dummy RuntimeException; this method never returns normally, it <em>always</em> throws an exception!
*/
@SneakyThrows
public static RuntimeException sneakyThrow(Throwable t) {
throw t;
}
/**
* Determines if the given Throwable represents a fatal exception and cannot be handled.
*
* @param ex The Throwable to inspect.
* @return True if a fatal error which must be rethrown, false otherwise (it can be handled in a catch block).
*/
public static boolean mustRethrow(Throwable ex) {
return ex instanceof VirtualMachineError;
}
/**
* If the provided exception is a CompletionException or ExecutionException which need be unwrapped.
*
* @param ex The exception to be unwrapped.
* @return The cause or the exception provided.
*/
public static Throwable unwrap(Throwable ex) {
if (canInspectCause(ex)) {
Throwable cause = ex.getCause();
if (cause != null) {
return unwrap(cause);
}
}
return ex;
}
/**
* Returns true if the provided class is CompletionException or ExecutionException which need to be unwrapped.
* @param c The class to be tested
* @return True if {@link #unwrap(Throwable)} should be called on exceptions of this type
*/
public static boolean shouldUnwrap(Class<? extends Exception> c) {
return c.equals(CompletionException.class) || c.equals(ExecutionException.class);
}
private static boolean canInspectCause(Throwable ex) {
return ex instanceof CompletionException
|| ex instanceof ExecutionException;
}
@FunctionalInterface
public interface InterruptibleRun<ExceptionT extends Exception> {
void run() throws InterruptedException, ExceptionT;
}
@FunctionalInterface
public interface InterruptibleCall<ExceptionT extends Exception, ResultT> {
ResultT call() throws InterruptedException, ExceptionT;
}
/**
* Eliminates boilerplate code of catching and re-interrupting the thread.
* <p>
* NOTE: This method currently has the limitation that it can only handle functions that throw up to one additional
* exception besides {@link InterruptedException}. This is a limitation of the Compiler.
*
* @param run A method that should be run handling interrupts automatically
* @param <ExceptionT> The type of exception.
* @throws ExceptionT If thrown by run.
*/
@SneakyThrows(InterruptedException.class)
public static <ExceptionT extends Exception> void handleInterrupted(InterruptibleRun<ExceptionT> run)
throws ExceptionT {
try {
run.run();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw e;
}
}
/**
* Eliminates boilerplate code of catching and re-interrupting the thread.
* <p>
* NOTE: This method currently has the limitation that it can only handle functions that throw up to one additional
* exception besides {@link InterruptedException}. This is a limitation of the Compiler.
*
* @param call A method that should be run handling interrupts automatically
* @param <ExceptionT> The type of exception.
* @param <ResultT> The type of the result.
* @throws ExceptionT If thrown by call.
* @return The result of the call.
*/
@SneakyThrows(InterruptedException.class)
public static <ExceptionT extends Exception, ResultT> ResultT handleInterruptedCall(InterruptibleCall<ExceptionT, ResultT> call)
throws ExceptionT {
try {
return call.call();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw e;
}
}
/**
* Throws a NullPointerException if the arg argument is null. Throws an IllegalArgumentException if the String arg
* argument has a length of zero.
*
* @param arg The argument to check.
* @param argName The name of the argument (to be included in the exception message).
* @return The arg.
* @throws NullPointerException If arg is null.
* @throws IllegalArgumentException If arg is not null, but has a length of zero.
*/
public static String checkNotNullOrEmpty(String arg, String argName) throws NullPointerException, IllegalArgumentException {
Preconditions.checkNotNull(arg, argName);
checkArgument(arg.length() > 0, argName, "Cannot be an empty string.");
return arg;
}
/**
* Throws a NullPointerException if the arg argument is null. Throws an IllegalArgumentException if the Collections arg
* argument has a size of zero.
*
* @param <T> The type of elements in the provided collection.
* @param <V> The actual type of the collection.
* @param arg The argument to check.
* @param argName The name of the argument (to be included in the exception message).
* @return The arg.
* @throws NullPointerException If arg is null.
* @throws IllegalArgumentException If arg is not null, but has a length of zero.
*/
public static <T, V extends Collection<T>> V checkNotNullOrEmpty(V arg, String argName) throws NullPointerException, IllegalArgumentException {
Preconditions.checkNotNull(arg, argName);
checkArgument(!arg.isEmpty(), argName, "Cannot be an empty collection.");
return arg;
}
/**
* Throws a NullPointerException if the arg argument is null. Throws an IllegalArgumentException
* if the Map arg argument has a size of zero.
*
* @param <K> The type of keys in the provided map.
* @param <V> The type of keys in the provided map.
* @param arg The argument to check.
* @param argName The name of the argument (to be included in the exception message).
* @return The arg.
* @throws NullPointerException If arg is null.
* @throws IllegalArgumentException If arg is not null, but has a length of zero.
*/
public static <K, V> Map<K, V> checkNotNullOrEmpty(Map<K, V> arg, String argName) throws NullPointerException,
IllegalArgumentException {
Preconditions.checkNotNull(arg, argName);
checkArgument(!arg.isEmpty(), argName, "Cannot be an empty map.");
return arg;
}
/**
* Throws an IllegalArgumentException if the validCondition argument is false.
*
* @param validCondition The result of the condition to validate.
* @param argName The name of the argument (to be included in the exception message).
* @param message The message to include in the exception. This should not include the name of the argument,
* as that is already prefixed.
* @param args Format args for message. These must correspond to String.format() args.
* @throws IllegalArgumentException If validCondition is false.
*/
public static void checkArgument(boolean validCondition, String argName, String message, Object... args) throws IllegalArgumentException {
if (!validCondition) {
throw new IllegalArgumentException(badArgumentMessage(argName, message, args));
}
}
/**
* Throws an appropriate exception if the given range is not included in the given array interval.
*
* @param startIndex The First index in the range.
* @param length The number of items in the range.
* @param arrayLength The length of the array.
* @param startIndexArgName The name of the start index argument.
* @param lengthArgName The name of the length argument.
* @throws ArrayIndexOutOfBoundsException If startIndex is less than lowBoundInclusive or if startIndex+length is
* greater than upBoundExclusive.
* @throws IllegalArgumentException If length is a negative number.
*/
public static void checkArrayRange(long startIndex, int length, long arrayLength, String startIndexArgName, String lengthArgName) throws ArrayIndexOutOfBoundsException, IllegalArgumentException {
// Check for non-negative length.
if (length < 0) {
throw new IllegalArgumentException(badArgumentMessage(lengthArgName, "length must be a non-negative integer."));
}
// Check for valid start index.
if (startIndex < 0 || startIndex >= arrayLength) {
// The only valid case here is if the range has zero elements and the array bounds also has zero elements.
if (!(startIndex == 0 && length == 0 && arrayLength == 0)) {
throw new ArrayIndexOutOfBoundsException(badStartOffsetMessage(startIndex, arrayLength, startIndexArgName));
}
}
// Check for valid end offset. Note that end offset can be equal to upBoundExclusive, because this is a range.
if (startIndex + length > arrayLength) {
throw new ArrayIndexOutOfBoundsException(badLengthMessage(startIndex, length, arrayLength, startIndexArgName, lengthArgName));
}
}
/**
* Throws an ObjectClosedException if the closed argument is true.
*
* @param closed The result of the condition to check. True if object is closed, false otherwise.
* @param targetObject The object itself.
* @throws ObjectClosedException If closed is true.
*/
public static void checkNotClosed(boolean closed, Object targetObject) throws ObjectClosedException {
if (closed) {
throw new ObjectClosedException(targetObject);
}
}
private static String badArgumentMessage(String argName, String message, Object... args) {
return argName + ": " + String.format(message, args);
}
private static String badStartOffsetMessage(long startIndex, long arrayLength, String startIndexArgName) {
return String.format("%s: value must be in interval [0, %d), given %d.", startIndexArgName, arrayLength, startIndex);
}
private static String badLengthMessage(long startIndex, int length, long arrayLength, String startIndexArgName, String lengthArgName) {
return String.format("%s + %s: value must be in interval [0, %d], actual %d.", startIndexArgName, lengthArgName, arrayLength, startIndex + length);
}
}
| |
/*
* Copyright 2015 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.bigquery;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import com.google.api.services.bigquery.model.ExplainQueryStep;
import com.google.cloud.bigquery.QueryStage.QueryStep;
import com.google.common.collect.ImmutableList;
import java.util.List;
import org.junit.Test;
public class QueryStageTest {
private static final List<String> SUBSTEPS1 = ImmutableList.of("substep1", "substep2");
private static final List<String> SUBSTEPS2 = ImmutableList.of("substep3", "substep4");
private static final QueryStep QUERY_STEP1 = new QueryStep("KIND", SUBSTEPS1);
private static final QueryStep QUERY_STEP2 = new QueryStep("KIND", SUBSTEPS2);
private static final long COMPLETED_PARALLEL_INPUTS = 3;
private static final long COMPUTE_MS_AVG = 1234;
private static final long COMPUTE_MS_MAX = 2345;
private static final double COMPUTE_RATIO_AVG = 1.1;
private static final double COMPUTE_RATIO_MAX = 2.2;
private static final long END_MS = 1522540860000L;
private static final long ID = 42L;
private static final List<Long> INPUT_STAGES = ImmutableList.of(Long.valueOf(7), Long.valueOf(9));
private static final String NAME = "StageName";
private static final long PARALLEL_INPUTS = 4;
private static final long READ_MS_AVG = 3456;
private static final long READ_MS_MAX = 4567;
private static final double READ_RATIO_AVG = 3.3;
private static final double READ_RATIO_MAX = 4.4;
private static final long RECORDS_READ = 5L;
private static final long RECORDS_WRITTEN = 6L;
private static final long SHUFFLE_OUTPUT_BYTES = 4096;
private static final long SHUFFLE_OUTPUT_BYTES_SPILLED = 0;
private static final long START_MS = 1522540800000L;
private static final String STATUS = "COMPLETE";
private static final List<QueryStep> STEPS = ImmutableList.of(QUERY_STEP1, QUERY_STEP2);
private static final long WAIT_MS_AVG = 3333;
private static final long WAIT_MS_MAX = 3344;
private static final double WAIT_RATIO_AVG = 7.7;
private static final double WAIT_RATIO_MAX = 8.8;
private static final long WRITE_MS_AVG = 44;
private static final long WRITE_MS_MAX = 50;
private static final double WRITE_RATIO_AVG = 9.9;
private static final double WRITE_RATIO_MAX = 10.10;
private static final long SLOTMS = 1522540800000L;
private static final QueryStage QUERY_STAGE =
QueryStage.newBuilder()
.setCompletedParallelInputs(COMPLETED_PARALLEL_INPUTS)
.setComputeMsAvg(COMPUTE_MS_AVG)
.setComputeMsMax(COMPUTE_MS_MAX)
.setComputeRatioAvg(COMPUTE_RATIO_AVG)
.setComputeRatioMax(COMPUTE_RATIO_MAX)
.setEndMs(END_MS)
.setGeneratedId(ID)
.setInputStages(INPUT_STAGES)
.setName(NAME)
.setParallelInputs(PARALLEL_INPUTS)
.setReadMsAvg(READ_MS_AVG)
.setReadMsMax(READ_MS_MAX)
.setReadRatioAvg(READ_RATIO_AVG)
.setReadRatioMax(READ_RATIO_MAX)
.setRecordsRead(RECORDS_READ)
.setRecordsWritten(RECORDS_WRITTEN)
.setShuffleOutputBytes(SHUFFLE_OUTPUT_BYTES)
.setShuffleOutputBytesSpilled(SHUFFLE_OUTPUT_BYTES_SPILLED)
.setStartMs(START_MS)
.setStatus(STATUS)
.setSteps(STEPS)
.setWaitMsAvg(WAIT_MS_AVG)
.setWaitMsMax(WAIT_MS_MAX)
.setWaitRatioAvg(WAIT_RATIO_AVG)
.setWaitRatioMax(WAIT_RATIO_MAX)
.setWriteMsAvg(WRITE_MS_AVG)
.setWriteMsMax(WRITE_MS_MAX)
.setWriteRatioAvg(WRITE_RATIO_AVG)
.setWriteRatioMax(WRITE_RATIO_MAX)
.setSlotMs(SLOTMS)
.build();
@Test
public void testQueryStepConstructor() {
assertEquals("KIND", QUERY_STEP1.getName());
assertEquals("KIND", QUERY_STEP2.getName());
assertEquals(SUBSTEPS1, QUERY_STEP1.getSubsteps());
assertEquals(SUBSTEPS2, QUERY_STEP2.getSubsteps());
}
@Test
public void testBuilder() {
assertEquals(COMPLETED_PARALLEL_INPUTS, QUERY_STAGE.getCompletedParallelInputs());
assertEquals(COMPUTE_MS_AVG, QUERY_STAGE.getComputeMsAvg());
assertEquals(COMPUTE_MS_MAX, QUERY_STAGE.getComputeMsMax());
assertEquals(COMPUTE_RATIO_AVG, QUERY_STAGE.getComputeRatioAvg(), 0);
assertEquals(COMPUTE_RATIO_MAX, QUERY_STAGE.getComputeRatioMax(), 0);
assertEquals(END_MS, QUERY_STAGE.getEndMs());
assertEquals(ID, QUERY_STAGE.getGeneratedId());
assertEquals(INPUT_STAGES, QUERY_STAGE.getInputStages());
assertEquals(PARALLEL_INPUTS, QUERY_STAGE.getParallelInputs());
assertEquals(NAME, QUERY_STAGE.getName());
assertEquals(READ_MS_AVG, QUERY_STAGE.getReadMsAvg());
assertEquals(READ_MS_MAX, QUERY_STAGE.getReadMsMax());
assertEquals(READ_RATIO_AVG, QUERY_STAGE.getReadRatioAvg(), 0);
assertEquals(READ_RATIO_MAX, QUERY_STAGE.getReadRatioMax(), 0);
assertEquals(RECORDS_READ, QUERY_STAGE.getRecordsRead());
assertEquals(RECORDS_WRITTEN, QUERY_STAGE.getRecordsWritten());
assertEquals(SHUFFLE_OUTPUT_BYTES, QUERY_STAGE.getShuffleOutputBytes());
assertEquals(SHUFFLE_OUTPUT_BYTES_SPILLED, QUERY_STAGE.getShuffleOutputBytesSpilled());
assertEquals(START_MS, QUERY_STAGE.getStartMs());
assertEquals(STATUS, QUERY_STAGE.getStatus());
assertEquals(STEPS, QUERY_STAGE.getSteps());
assertEquals(WAIT_MS_AVG, QUERY_STAGE.getWaitMsAvg());
assertEquals(WAIT_MS_MAX, QUERY_STAGE.getWaitMsMax());
assertEquals(WAIT_RATIO_AVG, QUERY_STAGE.getWaitRatioAvg(), 0);
assertEquals(WAIT_RATIO_MAX, QUERY_STAGE.getWaitRatioMax(), 0);
assertEquals(WRITE_MS_AVG, QUERY_STAGE.getWriteMsAvg());
assertEquals(WRITE_MS_MAX, QUERY_STAGE.getWriteMsMax());
assertEquals(WRITE_RATIO_AVG, QUERY_STAGE.getWriteRatioAvg(), 0);
assertEquals(WRITE_RATIO_MAX, QUERY_STAGE.getWriteRatioMax(), 0);
assertEquals(SLOTMS, QUERY_STAGE.getSlotMs());
}
@Test
public void testToAndFromPb() {
compareQueryStep(QUERY_STEP1, QueryStep.fromPb(QUERY_STEP1.toPb()));
compareQueryStep(QUERY_STEP2, QueryStep.fromPb(QUERY_STEP2.toPb()));
compareQueryStage(QUERY_STAGE, QueryStage.fromPb(QUERY_STAGE.toPb()));
ExplainQueryStep stepPb = new ExplainQueryStep();
stepPb.setKind("KIND");
stepPb.setSubsteps(null);
compareQueryStep(new QueryStep("KIND", ImmutableList.<String>of()), QueryStep.fromPb(stepPb));
}
@Test
public void testEquals() {
compareQueryStep(QUERY_STEP1, QUERY_STEP1);
compareQueryStep(QUERY_STEP2, QUERY_STEP2);
compareQueryStage(QUERY_STAGE, QUERY_STAGE);
}
@Test
public void testNotEquals() {
assertNotEquals(QUERY_STAGE, QUERY_STEP1);
assertNotEquals(QUERY_STEP1, QUERY_STAGE);
}
private void compareQueryStage(QueryStage expected, QueryStage value) {
assertEquals(expected, value);
assertEquals(expected.getCompletedParallelInputs(), value.getCompletedParallelInputs());
assertEquals(expected.getComputeMsAvg(), value.getComputeMsAvg());
assertEquals(expected.getComputeMsMax(), value.getComputeMsMax());
assertEquals(expected.getComputeRatioAvg(), value.getComputeRatioAvg(), 0);
assertEquals(expected.getComputeRatioMax(), value.getComputeRatioMax(), 0);
assertEquals(expected.getEndMs(), value.getEndMs());
assertEquals(expected.getGeneratedId(), value.getGeneratedId());
assertEquals(expected.getInputStages(), value.getInputStages());
assertEquals(expected.getName(), value.getName());
assertEquals(expected.getParallelInputs(), value.getParallelInputs());
assertEquals(expected.getReadRatioAvg(), value.getReadRatioAvg(), 0);
assertEquals(expected.getReadRatioMax(), value.getReadRatioMax(), 0);
assertEquals(expected.getRecordsRead(), value.getRecordsRead());
assertEquals(expected.getRecordsWritten(), value.getRecordsWritten());
assertEquals(expected.getShuffleOutputBytes(), value.getShuffleOutputBytes());
assertEquals(expected.getShuffleOutputBytesSpilled(), value.getShuffleOutputBytesSpilled());
assertEquals(expected.getStartMs(), value.getStartMs());
assertEquals(expected.getStatus(), value.getStatus());
assertEquals(expected.getSteps(), value.getSteps());
assertEquals(expected.getWaitMsAvg(), value.getWaitMsAvg());
assertEquals(expected.getWaitMsMax(), value.getWaitMsMax());
assertEquals(expected.getWaitRatioAvg(), value.getWaitRatioAvg(), 0);
assertEquals(expected.getWaitRatioMax(), value.getWaitRatioMax(), 0);
assertEquals(expected.getWriteMsAvg(), expected.getWriteMsAvg());
assertEquals(expected.getWriteMsMax(), expected.getWriteMsMax());
assertEquals(expected.getWriteRatioAvg(), value.getWriteRatioAvg(), 0);
assertEquals(expected.getWriteRatioMax(), value.getWriteRatioMax(), 0);
assertEquals(expected.getSlotMs(), value.getSlotMs());
assertEquals(expected.hashCode(), value.hashCode());
assertEquals(expected.toString(), value.toString());
}
private void compareQueryStep(QueryStep expected, QueryStep value) {
assertEquals(expected, value);
assertEquals(expected.getName(), value.getName());
assertEquals(expected.getSubsteps(), value.getSubsteps());
assertEquals(expected.hashCode(), value.hashCode());
}
}
| |
/*
* Copyright (c) 2007, 2015, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*/
/*
* Copyright 2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.org.apache.xerces.internal.jaxp.validation;
import java.io.IOException;
import javax.xml.XMLConstants;
import javax.xml.transform.Result;
import javax.xml.transform.Source;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.sax.SAXSource;
import javax.xml.transform.stax.StAXResult;
import javax.xml.transform.stax.StAXSource;
import javax.xml.transform.stream.StreamSource;
import javax.xml.validation.Validator;
import com.sun.org.apache.xerces.internal.impl.Constants;
import com.sun.org.apache.xerces.internal.util.SAXMessageFormatter;
import com.sun.org.apache.xerces.internal.util.Status;
import com.sun.org.apache.xerces.internal.xni.parser.XMLConfigurationException;
import com.sun.org.apache.xerces.internal.xs.AttributePSVI;
import com.sun.org.apache.xerces.internal.xs.ElementPSVI;
import com.sun.org.apache.xerces.internal.xs.PSVIProvider;
import org.w3c.dom.ls.LSResourceResolver;
import org.xml.sax.ErrorHandler;
import org.xml.sax.SAXException;
import org.xml.sax.SAXNotRecognizedException;
import org.xml.sax.SAXNotSupportedException;
/**
* <p>Implementation of Validator for W3C XML Schemas.</p>
*
* @author <a href="mailto:Kohsuke.Kawaguchi@Sun.com">Kohsuke Kawaguchi</a>
* @author Michael Glavassevich, IBM
* @author <a href="mailto:Sunitha.Reddy@Sun.com">Sunitha Reddy</a>
* @version $Id: ValidatorImpl.java,v 1.10 2010-11-01 04:40:08 joehw Exp $
*/
final class ValidatorImpl extends Validator implements PSVIProvider {
//
// Data
//
/** Component manager. **/
private XMLSchemaValidatorComponentManager fComponentManager;
/** SAX validator helper. **/
private ValidatorHandlerImpl fSAXValidatorHelper;
/** DOM validator helper. **/
private DOMValidatorHelper fDOMValidatorHelper;
/** Stream validator helper. **/
private StreamValidatorHelper fStreamValidatorHelper;
/** StAX validator helper. **/
private StAXValidatorHelper fStaxValidatorHelper;
/** Flag for tracking whether features/properties changed since last reset. */
private boolean fConfigurationChanged = false;
/** Flag for tracking whether the error handler changed since last reset. */
private boolean fErrorHandlerChanged = false;
/** Flag for tracking whether the resource resolver changed since last reset. */
private boolean fResourceResolverChanged = false;
/** Support current-element-node property */
private static final String CURRENT_ELEMENT_NODE = Constants.XERCES_PROPERTY_PREFIX + Constants.CURRENT_ELEMENT_NODE_PROPERTY;
public ValidatorImpl(XSGrammarPoolContainer grammarContainer) {
fComponentManager = new XMLSchemaValidatorComponentManager(grammarContainer);
setErrorHandler(null);
setResourceResolver(null);
}
public void validate(Source source, Result result)
throws SAXException, IOException {
if (source instanceof SAXSource) {
// Hand off to SAX validator helper.
if (fSAXValidatorHelper == null) {
fSAXValidatorHelper = new ValidatorHandlerImpl(fComponentManager);
}
fSAXValidatorHelper.validate(source, result);
}
else if (source instanceof DOMSource) {
// Hand off to DOM validator helper.
if (fDOMValidatorHelper == null) {
fDOMValidatorHelper = new DOMValidatorHelper(fComponentManager);
}
fDOMValidatorHelper.validate(source, result);
}
else if (source instanceof StreamSource) {
// Hand off to stream validator helper.
if (fStreamValidatorHelper == null) {
fStreamValidatorHelper = new StreamValidatorHelper(fComponentManager);
}
fStreamValidatorHelper.validate(source, result);
}
else if (source instanceof StAXSource) {
// Hand off to stax validator helper.
if (fStaxValidatorHelper == null) {
fStaxValidatorHelper = new StAXValidatorHelper(fComponentManager);
}
fStaxValidatorHelper.validate(source, result);
}
// Source parameter cannot be null.
else if (source == null) {
throw new NullPointerException(JAXPValidationMessageFormatter.formatMessage(fComponentManager.getLocale(),
"SourceParameterNull", null));
}
// Source parameter must be a SAXSource, DOMSource or StreamSource
else {
throw new IllegalArgumentException(JAXPValidationMessageFormatter.formatMessage(fComponentManager.getLocale(),
"SourceNotAccepted", new Object [] {source.getClass().getName()}));
}
}
public void setErrorHandler(ErrorHandler errorHandler) {
fErrorHandlerChanged = (errorHandler != null);
fComponentManager.setErrorHandler(errorHandler);
}
public ErrorHandler getErrorHandler() {
return fComponentManager.getErrorHandler();
}
public void setResourceResolver(LSResourceResolver resourceResolver) {
fResourceResolverChanged = (resourceResolver != null);
fComponentManager.setResourceResolver(resourceResolver);
}
public LSResourceResolver getResourceResolver() {
return fComponentManager.getResourceResolver();
}
public boolean getFeature(String name)
throws SAXNotRecognizedException, SAXNotSupportedException {
if (name == null) {
throw new NullPointerException();
}
try {
return fComponentManager.getFeature(name);
}
catch (XMLConfigurationException e) {
final String identifier = e.getIdentifier();
final String key = e.getType() == Status.NOT_RECOGNIZED ?
"feature-not-recognized" : "feature-not-supported";
throw new SAXNotRecognizedException(
SAXMessageFormatter.formatMessage(fComponentManager.getLocale(),
key, new Object [] {identifier}));
}
}
public void setFeature(String name, boolean value)
throws SAXNotRecognizedException, SAXNotSupportedException {
if (name == null) {
throw new NullPointerException();
}
try {
fComponentManager.setFeature(name, value);
}
catch (XMLConfigurationException e) {
final String identifier = e.getIdentifier();
final String key;
if (e.getType() == Status.NOT_ALLOWED) {
//for now, the identifier can only be (XMLConstants.FEATURE_SECURE_PROCESSING)
throw new SAXNotSupportedException(
SAXMessageFormatter.formatMessage(fComponentManager.getLocale(),
"jaxp-secureprocessing-feature", null));
} else if (e.getType() == Status.NOT_RECOGNIZED) {
key = "feature-not-recognized";
} else {
key = "feature-not-supported";
}
throw new SAXNotRecognizedException(
SAXMessageFormatter.formatMessage(fComponentManager.getLocale(),
key, new Object [] {identifier}));
}
fConfigurationChanged = true;
}
public Object getProperty(String name)
throws SAXNotRecognizedException, SAXNotSupportedException {
if (name == null) {
throw new NullPointerException();
}
//Support current-element-node; return current node if DOMSource is used.
if (CURRENT_ELEMENT_NODE.equals(name)) {
return (fDOMValidatorHelper != null) ? fDOMValidatorHelper.getCurrentElement() : null;
}
try {
return fComponentManager.getProperty(name);
}
catch (XMLConfigurationException e) {
final String identifier = e.getIdentifier();
final String key = e.getType() == Status.NOT_RECOGNIZED ?
"property-not-recognized" : "property-not-supported";
throw new SAXNotRecognizedException(
SAXMessageFormatter.formatMessage(fComponentManager.getLocale(),
key, new Object [] {identifier}));
}
}
public void setProperty(String name, Object object)
throws SAXNotRecognizedException, SAXNotSupportedException {
if (name == null) {
throw new NullPointerException();
}
try {
fComponentManager.setProperty(name, object);
}
catch (XMLConfigurationException e) {
final String identifier = e.getIdentifier();
final String key = e.getType() == Status.NOT_RECOGNIZED ?
"property-not-recognized" : "property-not-supported";
throw new SAXNotRecognizedException(
SAXMessageFormatter.formatMessage(fComponentManager.getLocale(),
key, new Object [] {identifier}));
}
fConfigurationChanged = true;
}
public void reset() {
// avoid resetting features and properties if the state the validator
// is currently in, is the same as it will be after reset.
if (fConfigurationChanged) {
fComponentManager.restoreInitialState();
setErrorHandler(null);
setResourceResolver(null);
fConfigurationChanged = false;
fErrorHandlerChanged = false;
fResourceResolverChanged = false;
}
else {
if (fErrorHandlerChanged) {
setErrorHandler(null);
fErrorHandlerChanged = false;
}
if (fResourceResolverChanged) {
setResourceResolver(null);
fResourceResolverChanged = false;
}
}
}
/*
* PSVIProvider methods
*/
public ElementPSVI getElementPSVI() {
return (fSAXValidatorHelper != null) ? fSAXValidatorHelper.getElementPSVI() : null;
}
public AttributePSVI getAttributePSVI(int index) {
return (fSAXValidatorHelper != null) ? fSAXValidatorHelper.getAttributePSVI(index) : null;
}
public AttributePSVI getAttributePSVIByName(String uri, String localname) {
return (fSAXValidatorHelper != null) ? fSAXValidatorHelper.getAttributePSVIByName(uri, localname) : null;
}
} // ValidatorImpl
| |
/*
* SmoothingSplineFitFunctionView.java
*
*
* Copyright 2006-2015 James F. Bowring and www.Earth-Time.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.earthtime.Tripoli.dataViews.fitFunctionViews;
import java.awt.Font;
import java.awt.Graphics2D;
import java.awt.Rectangle;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.text.DecimalFormat;
import java.util.SortedMap;
import javax.swing.JButton;
import javax.swing.JLabel;
import javax.swing.JRadioButton;
import javax.swing.JTextField;
import org.earthtime.Tripoli.dataModels.sessionModels.AbstractSessionForStandardDataModel;
import org.earthtime.Tripoli.dataViews.AbstractRawDataView;
import org.earthtime.Tripoli.dataViews.fitFunctionPresentationViews.AbstractFitFunctionPresentationView;
import org.earthtime.Tripoli.dataViews.fitFunctionPresentationViews.SplineOverDispersionChooserPanel;
import org.earthtime.Tripoli.dataViews.overlayViews.DataViewsOverlay;
import org.earthtime.Tripoli.dataViews.simpleViews.FitFunctionDataInterface;
import org.earthtime.Tripoli.dataViews.simpleViews.SessionOfStandardView;
import org.earthtime.Tripoli.fitFunctions.AbstractFunctionOfX;
import org.earthtime.dialogs.DialogEditor;
import org.earthtime.beans.ET_JButton;
import org.earthtime.dataDictionaries.FitFunctionTypeEnum;
/**
*
* @author James F. Bowring
*/
public class SmoothingSplineFitFunctionView extends AbstractFitFunctionView {
private AbstractRawDataView splineOverDispersionChooserPanel;
private final FitFunctionDataInterface targetDataModelView;
private JLabel xiLabel;
private final AbstractFitFunctionPresentationView presentationView;
private JTextField startText;
private JTextField stepText;
private JTextField stopText;
/**
*
* @param smoothingSplineFitFofX
* @param presentationView
* @param functionChoiceRadioButton
* @param bounds
* @param targetDataModelView
*/
public SmoothingSplineFitFunctionView(//
AbstractFunctionOfX smoothingSplineFitFofX, //
AbstractFitFunctionPresentationView presentationView,//
final JRadioButton functionChoiceRadioButton, //
Rectangle bounds, //
final FitFunctionDataInterface targetDataModelView) {
super(smoothingSplineFitFofX, functionChoiceRadioButton, bounds);
this.presentationView = presentationView;
this.targetDataModelView = targetDataModelView;
if (smoothingSplineFitFofX.isOverDispersionSelected()) {
JButton ODChoiceButton = new ET_JButton("Generate OD");
ODChoiceButton.setFont(new Font("SansSerif", Font.PLAIN, 10));
ODChoiceButton.setBounds(5, 20, 81, 15);
ODChoiceButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent ae) {
// TODO: Broke Demeter
SortedMap<Double, AbstractFunctionOfX> sessionOfStandardsSplinesWithOD = null;
if (targetDataModelView instanceof SessionOfStandardView) {
sessionOfStandardsSplinesWithOD = //
((AbstractSessionForStandardDataModel) ((SessionOfStandardView) targetDataModelView)//
.getSessionForStandardDataModel()).generateSetOfSplineWithODfitFunctions(//
Double.valueOf(startText.getText()),//
Double.valueOf(stepText.getText()),//
Double.valueOf(stopText.getText()));
} else if (targetDataModelView instanceof DataViewsOverlay) {
sessionOfStandardsSplinesWithOD = //
((DataViewsOverlay) targetDataModelView)//
.getDownholeFractionationDataModel().generateSetOfSplineWithODfitFunctions(//
Double.valueOf(startText.getText()),//
Double.valueOf(stepText.getText()),//
Double.valueOf(stopText.getText()));
}
if (splineOverDispersionChooserPanel != null) {
remove(splineOverDispersionChooserPanel);
}
generateSplineOverDispersionChooserPanel(sessionOfStandardsSplinesWithOD);
splineOverDispersionChooserPanel.preparePanel();
add(splineOverDispersionChooserPanel);
if (targetDataModelView instanceof SessionOfStandardView) {
((SessionOfStandardView) targetDataModelView).refreshPanel();
} else if (targetDataModelView instanceof SessionOfStandardView) {
((DataViewsOverlay) targetDataModelView).refreshPanel();
}
functionChoiceRadioButton.doClick();
}
});
add(ODChoiceButton);
Font odFont = new Font(
"SansSerif",
Font.PLAIN,
9);
// text boxes for od
DecimalFormat sss = new DecimalFormat("0.00000");
startText = new JTextField();
startText.setDocument(new DialogEditor.DoubleDocument(startText, true));
AbstractSessionForStandardDataModel sessionForStandardDataModel =//
((AbstractSessionForStandardDataModel) ((SessionOfStandardView) targetDataModelView)//
.getSessionForStandardDataModel());
double startVal = Math.sqrt(sessionForStandardDataModel.getSessionOfStandardsFitFunctionsWithOD().get(FitFunctionTypeEnum.LINE.getName()).getOverDispersion());
startText.setText(sss.format(0.8 * startVal));// ("0.001");
startText.setBounds(new Rectangle(35, 38, 50, 15));
startText.setFont(odFont);
add(startText);
JLabel startTextLabel = new JLabel("start:");
startTextLabel.setBounds(new Rectangle(6, 38, 30, 15));
startTextLabel.setFont(odFont);
add(startTextLabel);
stepText = new JTextField();
stepText.setDocument(new DialogEditor.DoubleDocument(stepText, true));
stepText.setText(sss.format(0.2 * startVal / 15.0));// "0.01");
stepText.setBounds(new Rectangle(35, 54, 50, 15));
stepText.setFont(odFont);
add(stepText);
JLabel stepTextLabel = new JLabel("step:");
stepTextLabel.setBounds(new Rectangle(6, 54, 30, 15));
stepTextLabel.setFont(odFont);
add(stepTextLabel);
stopText = new JTextField();
stopText.setDocument(new DialogEditor.DoubleDocument(stopText, true));
stopText.setText(sss.format(startVal));// "0.05");
stopText.setBounds(new Rectangle(35, 70, 50, 15));
stopText.setFont(odFont);
add(stopText);
JLabel stopTextLabel = new JLabel("stop:");
stopTextLabel.setBounds(new Rectangle(6, 70, 30, 15));
stopTextLabel.setFont(odFont);
add(stopTextLabel);
// show overdispersion
DecimalFormat f = new DecimalFormat("\u03BE = 0.0000");
xiLabel = new JLabel(f.format(Math.sqrt(fittedFofX.getOverDispersion())));
xiLabel.setFont(new Font(
"SansSerif",
Font.BOLD,
10));
xiLabel.setBounds(new Rectangle(6, 85, 80, 18));
add(xiLabel);
}
/*
NOTES FROM Noah 27 Oct 2014
Default for Spline-OD =
floor(0.95*0.15*1000)/1000
floor(0.95*LinearOverdispersion*1000)/1000
Start/Step/Stop:
Start: floor(0.8*LinearOverdispersion*1000)/1000
Stop: LinearOverdispersion
Step = difference / 15
Multiply all Overdispersions by 100 for display
Where Overdispersion is the Greek ziggety-zag
For display means in start, step, stop, and value display (ziggety equals)
Show the graph always when Spline-OD is selected.
*/
}
/**
*
*/
public void refreshXiLabel() {
DecimalFormat f = new DecimalFormat("\u03BE = 0.00000");
xiLabel.setText(f.format(Math.sqrt(fittedFofX.getOverDispersion())));
}
private void generateSplineOverDispersionChooserPanel(SortedMap<Double, AbstractFunctionOfX> sessionOfStandardsSplinesWithOD) {
Rectangle bounds = new Rectangle(90, 18, getWidth() - 90, getHeight() - 18);
if (targetDataModelView instanceof SessionOfStandardView) {
splineOverDispersionChooserPanel = //
new SplineOverDispersionChooserPanel(((SessionOfStandardView) targetDataModelView), this, sessionOfStandardsSplinesWithOD, bounds);
} else if (targetDataModelView instanceof DataViewsOverlay) {
splineOverDispersionChooserPanel = //
new SplineOverDispersionChooserPanel(((DataViewsOverlay) targetDataModelView), this, sessionOfStandardsSplinesWithOD, bounds);
}
}
/**
*
*/
@Override
public void resetValueModelSliders() {
//parameterAValueSlider.resetSliderBox();
}
/**
*
* @param g2d
*/
@Override
public void paint(Graphics2D g2d) {
super.paint(g2d);
}
/**
*
*/
@Override
public void preparePanel() {
super.preparePanel();
}
/**
* @return the presentationView
*/
public AbstractFitFunctionPresentationView getPresentationView() {
return presentationView;
}
}
| |
package com.qualixium.playnb.unit.scalatemplate;
import com.qualixium.playnb.filetype.scalatemplate.helper.ScalaTemplateLanguageHelper;
import com.qualixium.playnb.filetype.scalatemplate.helper.TemplateParameter;
import com.qualixium.playnb.util.MiscUtil.Language;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.BeforeClass;
import org.junit.Test;
import org.openide.util.Utilities;
public class ScalaTemplateLanguageHelperTest {
private static String scalaTemplateFileContent;
private static ClassLoader classLoaderWithScalaLibrary;
@BeforeClass
public static void setUpClass() throws URISyntaxException, IOException {
URI uri = ScalaTemplateLanguageHelperTest.class.getClassLoader().getResource("ScalaTemplateTemplate.scala.html").toURI();
scalaTemplateFileContent = new String(Files.readAllBytes(Paths.get(uri)));
String userHomeDir = System.getProperty("user.home");
String scalaLibraryAbsolutePath = userHomeDir + "/.ivy2/cache/org.scala-lang/scala-library/jars/scala-library-2.11.7.jar";
classLoaderWithScalaLibrary = URLClassLoader.newInstance(
new URL[]{Utilities.toURI(new File(scalaLibraryAbsolutePath)).toURL()},
ScalaTemplateLanguageHelperTest.class.getClassLoader()
);
}
@Test
public void testGetConstructorParametersOneParameter() {
final String expected = "notification";
List<TemplateParameter> listParameters = ScalaTemplateLanguageHelper.getConstructorParameters(scalaTemplateFileContent);
boolean result = listParameters.stream()
.anyMatch(parameter -> parameter.variableName.equals(expected));
assertTrue(result);
}
@Test
public void testGetConstructorParametersTwoParameters() {
final String fileContent
= "@(notification: Notification, anotherPar: String)\n"
+ "\n"
+ "@import helper._\n"
+ "\n"
+ "@main() {\n"
+ "\n"
+ "<div id=\"first_div_row\" class=\"row\">\n"
+ " <div class=\"col-lg-12\">\n"
+ " <h1 class=\"page-header\">\n"
+ " @notification.title\n"
+ " <small> \n"
+ " </small>\n"
+ " </h1>\n"
+ " </div>\n"
+ "</div>"
+ "}";
final List<String> listExpectedParameters = new ArrayList<>();
listExpectedParameters.add("notification");
listExpectedParameters.add("anotherPar");
List<String> listParameters = ScalaTemplateLanguageHelper
.getConstructorParameters(fileContent).stream()
.map(cp -> cp.variableName)
.collect(Collectors.toList());
boolean result = listParameters.containsAll(listExpectedParameters);
assertTrue(result);
}
@Test
public void testGetConstructorParametersWithGenerics() {
final String fileContent
= "@(notification: Notification, anotherPar: String, numbers: List[Int])\n"
+ "\n"
+ "@import helper._\n"
+ "\n"
+ "@main() {\n"
+ "\n"
+ "<div id=\"first_div_row\" class=\"row\">\n"
+ " <div class=\"col-lg-12\">\n"
+ " <h1 class=\"page-header\">\n"
+ " @notification.title\n"
+ " <small> \n"
+ " </small>\n"
+ " </h1>\n"
+ " </div>\n"
+ "</div>"
+ "}";
List<TemplateParameter> constructorParameters = ScalaTemplateLanguageHelper.getConstructorParameters(fileContent);
TemplateParameter numberParam = constructorParameters.stream().filter(cp -> cp.variableName.equals("numbers")).findFirst().get();
assertEquals("List[Int]", numberParam.variableType);
}
@Test
public void testMethodNameTranslator() {
assertEquals("+", ScalaTemplateLanguageHelper.getTranslatedMemberName("$plus"));
assertEquals("++", ScalaTemplateLanguageHelper.getTranslatedMemberName("$plus$plus"));
assertEquals("<", ScalaTemplateLanguageHelper.getTranslatedMemberName("$less"));
assertEquals(">", ScalaTemplateLanguageHelper.getTranslatedMemberName("$greater"));
assertEquals("<=", ScalaTemplateLanguageHelper.getTranslatedMemberName("$less$eq"));
assertEquals(">=", ScalaTemplateLanguageHelper.getTranslatedMemberName("$greater$eq"));
assertEquals("|", ScalaTemplateLanguageHelper.getTranslatedMemberName("$bar"));
assertEquals("-", ScalaTemplateLanguageHelper.getTranslatedMemberName("$minus"));
assertEquals("<<", ScalaTemplateLanguageHelper.getTranslatedMemberName("$less$less"));
assertEquals("&", ScalaTemplateLanguageHelper.getTranslatedMemberName("$amp"));
assertEquals("toChar", ScalaTemplateLanguageHelper.getTranslatedMemberName("toChar"));
assertEquals("/", ScalaTemplateLanguageHelper.getTranslatedMemberName("$div"));
assertEquals("%", ScalaTemplateLanguageHelper.getTranslatedMemberName("$percent"));
assertEquals(">>", ScalaTemplateLanguageHelper.getTranslatedMemberName("$greater$greater"));
assertEquals("^", ScalaTemplateLanguageHelper.getTranslatedMemberName("$up"));
assertEquals("unary_-", ScalaTemplateLanguageHelper.getTranslatedMemberName("unary_$minus"));
assertEquals("==", ScalaTemplateLanguageHelper.getTranslatedMemberName("$eq$eq"));
assertEquals("!=", ScalaTemplateLanguageHelper.getTranslatedMemberName("$bang$eq"));
assertEquals("*", ScalaTemplateLanguageHelper.getTranslatedMemberName("$times"));
assertEquals("unary_~", ScalaTemplateLanguageHelper.getTranslatedMemberName("unary_$tilde"));
}
@Test
public void testTypeNameTranslation() {
assertEquals("Boolean", ScalaTemplateLanguageHelper.getTranslatedType("boolean"));
assertEquals("Double", ScalaTemplateLanguageHelper.getTranslatedType("double"));
assertEquals("Any", ScalaTemplateLanguageHelper.getTranslatedType("java.lang.Object"));
assertEquals("Unit", ScalaTemplateLanguageHelper.getTranslatedType("void"));
}
@Test
public void testArgumentNameTranslation() {
assertEquals("x$1", ScalaTemplateLanguageHelper.getTranslatedArgument("arg0"));
assertEquals("x$2", ScalaTemplateLanguageHelper.getTranslatedArgument("arg1"));
assertEquals("firstName", ScalaTemplateLanguageHelper.getTranslatedArgument("firstName"));
}
@Test
public void testGetCompletionItemMethodText() throws ClassNotFoundException {
Class<?> clazz = classLoaderWithScalaLibrary.loadClass("scala.Int");
Method[] methods = clazz.getMethods();
for (Method method : methods) {
if (method.getName().equals("$eq$eq")
&& method.getParameters()[0].getType().getSimpleName().equals("double")) {
final String expected = "==(x$1: Double): Boolean";
String completionItemText = ScalaTemplateLanguageHelper.getCompletionItemMethodText(method);
assertEquals(expected, completionItemText);
}
}
}
@Test
public void testGetCompletionItemMemberText() throws ClassNotFoundException {
Class<?> clazz = Class.forName("java.lang.Integer");
Field[] fields = clazz.getFields();
for (Field field : fields) {
if (field.getName().equals("SIZE")) {
final String expected = "SIZE: Int";
String completionItemText = ScalaTemplateLanguageHelper.getCompletionItemMemberText(field);
assertEquals(expected, completionItemText);
}
}
}
@Test
public void testGetFirstClassFromLine() {
String line = "java.lang.String.valueOf(3).";
String expected = "java.lang.String";
String actual = ScalaTemplateLanguageHelper.getFirstClassFromLine(line).get();
assertEquals(expected, actual);
}
@Test
public void testGetClassToProvideCompletion() throws ClassNotFoundException {
String line = "java.lang.Integer.toString(5).toLowerCase";
String expected = "java.lang.String";
Optional<Class> actual = ScalaTemplateLanguageHelper.getClassToProvideCompletion("", line, getClass().getClassLoader(), Language.SCALA);
assertEquals(expected, actual.get().getName());
}
@Test
public void testGetClassToProvideCompletionWithDate() throws ClassNotFoundException {
String line = "java.util.Date.toInstant.adjustInto";
String expected = "java.time.temporal.Temporal";
Optional<Class> classOptional = ScalaTemplateLanguageHelper.getClassToProvideCompletion("", line, getClass().getClassLoader(), Language.JAVA);
String actual = classOptional.get().getName();
assertEquals(expected, actual);
}
@Test
public void testGetTemplateParameterInBlock() {
String fileContent = "@(emp: Employee, myVar: String, listValues: List[String])\n"
+ "\n"
+ "this is the @myVar.toLowerCase @emp.age\n"
+ "\n"
+ "<input type=\"text\" value=\"@emp.name\" />\n"
+ "\n"
+ "@for(v <- listValues) {\n"
+ "\n"
+ "<label class=\"label-warning\"></label>\n"
+ "\n"
+ "}";
String expectedVariableName = "v";
String expectedVariableType = "String";
int caretPosition = 177;
Optional<TemplateParameter> tpOptional = ScalaTemplateLanguageHelper.getBlockParameter(fileContent, caretPosition);
assertTrue("tpOptional is not present", tpOptional.isPresent());
TemplateParameter tp = tpOptional.get();
assertEquals(expectedVariableName, tp.variableName);
assertEquals(expectedVariableType, tp.variableType);
}
@Test
public void testGetTemplateParameterInMapBlock() {
String fileContent
= "@(emp: Employee, myVar: String, listValues: List[Int])\n"
+ "\n"
+ "this is the @myVar.toLowerCase @emp.age\n"
+ "\n"
+ "<input type=\"text\" value=\"@emp.name\" />\n"
+ "\n"
+ "@for(v -> listValues) {\n"
+ "\n"
+ "<label class=\"label-warning\"></label>\n"
+ "\n"
+ "}\n"
+ "\n"
+ "<b>another text</b>\n"
+ "\n"
+ "@listValues map { valMap =>\n"
+ " <b></b>\n"
+ "}";
String expectedVariableName = "valMap";
String expectedVariableType = "Int";
int caretPosition = 264;
Optional<TemplateParameter> tpOptional = ScalaTemplateLanguageHelper.getBlockParameter(fileContent, caretPosition);
assertTrue("tpOptional is not present", tpOptional.isPresent());
TemplateParameter tp = tpOptional.get();
assertEquals(expectedVariableName, tp.variableName);
assertEquals(expectedVariableType, tp.variableType);
}
@Test
public void testGetTemplateParameterInForEachBlock() {
String fileContent
= "@(emp: Employee, myVar: String, listValues: List[String])\n"
+ "\n"
+ "this is the @myVar.toLowerCase @emp.age\n"
+ "\n"
+ "<input type=\"text\" value=\"@emp.name\" />\n"
+ "\n"
+ "@for(v -> listValues) {\n"
+ "\n"
+ "<label class=\"label-warning\"></label>\n"
+ "\n"
+ "}\n"
+ "\n"
+ "<b>another text</b>\n"
+ "\n"
+ "@listValues.map { valMap =>\n"
+ " <b></b>\n"
+ "}\n"
+ "\n"
+ "@listValues.foreach { fe =>\n"
+ " <i></i>\n"
+ "}";
String expectedVariableName = "fe";
String expectedVariableType = "String";
int caretPosition = 306;
Optional<TemplateParameter> tpOptional = ScalaTemplateLanguageHelper.getBlockParameter(fileContent, caretPosition);
assertTrue("tpOptional is not present", tpOptional.isPresent());
TemplateParameter tp = tpOptional.get();
assertEquals(expectedVariableName, tp.variableName);
assertEquals(expectedVariableType, tp.variableType);
}
@Test
public void testIsTemplateParameter() {
String fileContent = "@(emp: Employee, myVar: String, listValues: List[String])\n"
+ "\n"
+ "this is the @myVar.toLowerCase @emp.age\n"
+ "\n"
+ "<input type=\"text\" value=\"@emp.name\" />\n"
+ "\n"
+ "@for(v <- listValues) {\n"
+ "\n"
+ "<label class=\"label-warning\"></label>\n"
+ "\n"
+ "}";
int caretPosition = 177;
assertTrue(ScalaTemplateLanguageHelper.isTemplateParameter(fileContent, caretPosition, "v"));
assertTrue(ScalaTemplateLanguageHelper.isTemplateParameter(fileContent, caretPosition, "emp"));
assertFalse(ScalaTemplateLanguageHelper.isTemplateParameter(fileContent, caretPosition, "anotherVariable"));
}
@Test
public void getClassFullNameTest() {
String expected = "scala.collection.immutable.List";
String actual = ScalaTemplateLanguageHelper.getClassFullName("List", Language.SCALA);
assertEquals(expected, actual);
}
@Test
public void getClassFullNameTestMoreTypes() {
assertEquals("scala.Int", ScalaTemplateLanguageHelper.getClassFullName("Int", Language.SCALA));
assertEquals("scala.Boolean", ScalaTemplateLanguageHelper.getClassFullName("Boolean", Language.SCALA));
assertEquals("scala.Double", ScalaTemplateLanguageHelper.getClassFullName("Double", Language.SCALA));
assertEquals("java.lang.String", ScalaTemplateLanguageHelper.getClassFullName("String", Language.SCALA));
}
@Test
public void getClassFullNameJavaTypes() {
assertEquals("java.lang.Integer", ScalaTemplateLanguageHelper.getClassFullName("Integer", Language.JAVA));
assertEquals("java.lang.Boolean", ScalaTemplateLanguageHelper.getClassFullName("Boolean", Language.JAVA));
assertEquals("java.lang.String", ScalaTemplateLanguageHelper.getClassFullName("String", Language.JAVA));
assertEquals("java.util.Set", ScalaTemplateLanguageHelper.getClassFullName("Set", Language.JAVA));
assertEquals("java.util.List", ScalaTemplateLanguageHelper.getClassFullName("List", Language.JAVA));
assertEquals("java.util.Map", ScalaTemplateLanguageHelper.getClassFullName("Map", Language.JAVA));
}
@Test
public void testGetImports() {
final String fileContent
= "@(notification: Notification, anotherPar: String)\n"
+ "\n"
+ "@import helper._\n"
+ "@import modu._\n"
+ "@import misc._\n"
+ "\n"
+ "@main() {\n"
+ "\n"
+ "<div id=\"first_div_row\" class=\"row\">\n"
+ " <div class=\"col-lg-12\">\n"
+ " <h1 class=\"page-header\">\n"
+ " @notification.title\n"
+ " <small> \n"
+ " </small>\n"
+ " </h1>\n"
+ " </div>\n"
+ "</div>"
+ "}";
final List<String> listExpectedImports = new ArrayList<>();
listExpectedImports.add("helper");
listExpectedImports.add("modu");
listExpectedImports.add("misc");
List<String> listImports = ScalaTemplateLanguageHelper.getImports(fileContent);
boolean result = listImports.containsAll(listExpectedImports);
assertTrue(result);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.exec.vector;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryMXBean;
import java.lang.ref.SoftReference;
import java.lang.reflect.Constructor;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.ArrayUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.type.DataTypePhysicalVariation;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.llap.LlapUtil;
import org.apache.hadoop.hive.llap.io.api.LlapProxy;
import org.apache.hadoop.hive.ql.CompilationOpContext;
import org.apache.hadoop.hive.ql.exec.GroupByOperator;
import org.apache.hadoop.hive.ql.exec.IConfigureJobConf;
import org.apache.hadoop.hive.ql.exec.KeyWrapper;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.exec.vector.expressions.ConstantVectorExpression;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter;
import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriterFactory;
import org.apache.hadoop.hive.ql.exec.vector.expressions.aggregates.VectorAggregateExpression;
import org.apache.hadoop.hive.ql.exec.vector.wrapper.VectorHashKeyWrapperBase;
import org.apache.hadoop.hive.ql.exec.vector.wrapper.VectorHashKeyWrapperBatch;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
import org.apache.hadoop.hive.ql.plan.GroupByDesc;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.VectorDesc;
import org.apache.hadoop.hive.ql.plan.VectorGroupByDesc;
import org.apache.hadoop.hive.ql.plan.api.OperatorType;
import org.apache.hadoop.hive.ql.util.JavaDataModel;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.mapred.JobConf;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javolution.util.FastBitSet;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
/**
* Vectorized GROUP BY operator implementation. Consumes the vectorized input and
* stores the aggregate operators' intermediate states. Emits row mode output.
*
*/
public class VectorGroupByOperator extends Operator<GroupByDesc>
implements VectorizationOperator, VectorizationContextRegion, IConfigureJobConf {
private static final Logger LOG = LoggerFactory.getLogger(
VectorGroupByOperator.class.getName());
private VectorizationContext vContext;
private VectorGroupByDesc vectorDesc;
/**
* This is the vector of aggregators. They are stateless and only implement
* the algorithm of how to compute the aggregation. state is kept in the
* aggregation buffers and is our responsibility to match the proper state for each key.
*/
private VectorAggregationDesc[] vecAggrDescs;
/**
* Key vector expressions.
*/
private VectorExpression[] keyExpressions;
private int outputKeyLength;
private TypeInfo[] outputTypeInfos;
private DataTypePhysicalVariation[] outputDataTypePhysicalVariations;
// Create a new outgoing vectorization context because column name map will change.
private VectorizationContext vOutContext = null;
// The above members are initialized by the constructor and must not be
// transient.
//---------------------------------------------------------------------------
private transient VectorAggregateExpression[] aggregators;
/**
* The aggregation buffers to use for the current batch.
*/
private transient VectorAggregationBufferBatch aggregationBatchInfo;
/**
* The current batch key wrappers.
* The very same instance gets reused for all batches.
*/
private transient VectorHashKeyWrapperBatch keyWrappersBatch;
private transient Object[] forwardCache;
private transient VectorizedRowBatch outputBatch;
private transient VectorizedRowBatchCtx vrbCtx;
/*
* Grouping sets members.
*/
private transient boolean groupingSetsPresent;
// The field bits (i.e. which fields to include) or "id" for each grouping set.
private transient long[] groupingSets;
// The position in the column keys of the dummy grouping set id column.
private transient int groupingSetsPosition;
// The planner puts a constant field in for the dummy grouping set id. We will overwrite it
// as we process the grouping sets.
private transient ConstantVectorExpression groupingSetsDummyVectorExpression;
// We translate the grouping set bit field into a boolean arrays.
private transient boolean[][] allGroupingSetsOverrideIsNulls;
private transient int numEntriesHashTable;
private transient long maxHashTblMemory;
private transient long maxMemory;
private float memoryThreshold;
private boolean isLlap = false;
/**
* Interface for processing mode: global, hash, unsorted streaming, or group batch
*/
private static interface IProcessingMode {
public void initialize(Configuration hconf) throws HiveException;
public void setNextVectorBatchGroupStatus(boolean isLastGroupBatch) throws HiveException;
public void processBatch(VectorizedRowBatch batch) throws HiveException;
public void close(boolean aborted) throws HiveException;
}
/**
* Base class for all processing modes
*/
private abstract class ProcessingModeBase implements IProcessingMode {
// Overridden and used in ProcessingModeReduceMergePartial mode.
@Override
public void setNextVectorBatchGroupStatus(boolean isLastGroupBatch) throws HiveException {
// Some Spark plans cause Hash and other modes to get this. So, ignore it.
}
protected abstract void doProcessBatch(VectorizedRowBatch batch, boolean isFirstGroupingSet,
boolean[] currentGroupingSetsOverrideIsNulls) throws HiveException;
@Override
public void processBatch(VectorizedRowBatch batch) throws HiveException {
if (!groupingSetsPresent) {
doProcessBatch(batch, false, null);
return;
}
// We drive the doProcessBatch logic with the same batch but different
// grouping set id and null variation.
// PERFORMANCE NOTE: We do not try to reuse columns and generate the KeyWrappers anew...
final int size = groupingSets.length;
for (int i = 0; i < size; i++) {
// NOTE: We are overwriting the constant vector value...
groupingSetsDummyVectorExpression.setLongValue(groupingSets[i]);
groupingSetsDummyVectorExpression.evaluate(batch);
doProcessBatch(batch, (i == 0), allGroupingSetsOverrideIsNulls[i]);
}
}
/**
* Evaluates the aggregators on the current batch.
* The aggregationBatchInfo must have been prepared
* by calling {@link #prepareBatchAggregationBufferSets} first.
*/
protected void processAggregators(VectorizedRowBatch batch) throws HiveException {
// We now have a vector of aggregation buffer sets to use for each row
// We can start computing the aggregates.
// If the number of distinct keys in the batch is 1 we can
// use the optimized code path of aggregateInput
VectorAggregationBufferRow[] aggregationBufferSets =
aggregationBatchInfo.getAggregationBuffers();
if (aggregationBatchInfo.getDistinctBufferSetCount() == 1) {
VectorAggregateExpression.AggregationBuffer[] aggregationBuffers =
aggregationBufferSets[0].getAggregationBuffers();
for (int i = 0; i < aggregators.length; ++i) {
aggregators[i].aggregateInput(aggregationBuffers[i], batch);
}
} else {
for (int i = 0; i < aggregators.length; ++i) {
aggregators[i].aggregateInputSelection(
aggregationBufferSets,
i,
batch);
}
}
}
/**
* allocates a new aggregation buffer set.
*/
protected VectorAggregationBufferRow allocateAggregationBuffer() throws HiveException {
VectorAggregateExpression.AggregationBuffer[] aggregationBuffers =
new VectorAggregateExpression.AggregationBuffer[aggregators.length];
for (int i=0; i < aggregators.length; ++i) {
aggregationBuffers[i] = aggregators[i].getNewAggregationBuffer();
aggregators[i].reset(aggregationBuffers[i]);
}
VectorAggregationBufferRow bufferSet = new VectorAggregationBufferRow(aggregationBuffers);
return bufferSet;
}
}
/**
* Global aggregates (no GROUP BY clause, no keys)
* This mode is very simple, there are no keys to consider, and only flushes one row at closing
* The one row must flush even if no input was seen (NULLs)
*/
private class ProcessingModeGlobalAggregate extends ProcessingModeBase {
/**
* In global processing mode there is only one set of aggregation buffers
*/
private VectorAggregationBufferRow aggregationBuffers;
@Override
public void initialize(Configuration hconf) throws HiveException {
aggregationBuffers = allocateAggregationBuffer();
LOG.info("using global aggregation processing mode");
}
@Override
public void setNextVectorBatchGroupStatus(boolean isLastGroupBatch) throws HiveException {
// Do nothing.
}
@Override
public void doProcessBatch(VectorizedRowBatch batch, boolean isFirstGroupingSet,
boolean[] currentGroupingSetsOverrideIsNulls) throws HiveException {
for (int i = 0; i < aggregators.length; ++i) {
aggregators[i].aggregateInput(aggregationBuffers.getAggregationBuffer(i), batch);
}
}
@Override
public void close(boolean aborted) throws HiveException {
if (!aborted) {
writeSingleRow(null, aggregationBuffers);
}
}
}
/**
* Hash Aggregate mode processing
*/
private class ProcessingModeHashAggregate extends ProcessingModeBase {
/**
* The global key-aggregation hash map.
*/
private Map<KeyWrapper, VectorAggregationBufferRow> mapKeysAggregationBuffers;
/**
* Total per hashtable entry fixed memory (does not depend on key/agg values).
*/
private long fixedHashEntrySize;
/**
* Average per hashtable entry variable size memory (depends on key/agg value).
*/
private int avgVariableSize;
/**
* Number of entries added to the hashtable since the last check if it should flush.
*/
private int numEntriesSinceCheck;
/**
* Sum of batch size processed (ie. rows).
*/
private long sumBatchSize;
/**
* Max number of entries in the vector group by aggregation hashtables.
* Exceeding this will trigger a flush irrelevant of memory pressure condition.
*/
private int maxHtEntries = 1000000;
/**
* The number of new entries that must be added to the hashtable before a memory size check.
*/
private int checkInterval = 10000;
/**
* Percent of entries to flush when memory threshold exceeded.
*/
private float percentEntriesToFlush = 0.1f;
/**
* A soft reference used to detect memory pressure
*/
private SoftReference<Object> gcCanary = new SoftReference<Object>(new Object());
/**
* Counts the number of time the gcCanary died and was resurrected
*/
private long gcCanaryFlushes = 0L;
/**
* Count of rows since the last check for changing from aggregate to streaming mode
*/
private long lastModeCheckRowCount = 0;
/**
* Minimum factor for hash table to reduce number of entries
* If this is not met, the processing switches to streaming mode
*/
private float minReductionHashAggr;
/**
* Number of rows processed between checks for minReductionHashAggr factor
* TODO: there is overlap between numRowsCompareHashAggr and checkInterval
*/
private long numRowsCompareHashAggr;
@Override
public void initialize(Configuration hconf) throws HiveException {
// hconf is null in unit testing
if (null != hconf) {
this.percentEntriesToFlush = HiveConf.getFloatVar(hconf,
HiveConf.ConfVars.HIVE_VECTORIZATION_GROUPBY_FLUSH_PERCENT);
this.checkInterval = HiveConf.getIntVar(hconf,
HiveConf.ConfVars.HIVE_VECTORIZATION_GROUPBY_CHECKINTERVAL);
this.maxHtEntries = HiveConf.getIntVar(hconf,
HiveConf.ConfVars.HIVE_VECTORIZATION_GROUPBY_MAXENTRIES);
this.numRowsCompareHashAggr = HiveConf.getIntVar(hconf,
HiveConf.ConfVars.HIVEGROUPBYMAPINTERVAL);
}
else {
this.percentEntriesToFlush =
HiveConf.ConfVars.HIVE_VECTORIZATION_GROUPBY_FLUSH_PERCENT.defaultFloatVal;
this.checkInterval =
HiveConf.ConfVars.HIVE_VECTORIZATION_GROUPBY_CHECKINTERVAL.defaultIntVal;
this.maxHtEntries =
HiveConf.ConfVars.HIVE_VECTORIZATION_GROUPBY_MAXENTRIES.defaultIntVal;
this.numRowsCompareHashAggr =
HiveConf.ConfVars.HIVEGROUPBYMAPINTERVAL.defaultIntVal;
}
minReductionHashAggr = getConf().getMinReductionHashAggr();
sumBatchSize = 0;
mapKeysAggregationBuffers = new HashMap<KeyWrapper, VectorAggregationBufferRow>();
computeMemoryLimits();
LOG.debug("using hash aggregation processing mode");
}
@Override
public void doProcessBatch(VectorizedRowBatch batch, boolean isFirstGroupingSet,
boolean[] currentGroupingSetsOverrideIsNulls) throws HiveException {
if (!groupingSetsPresent || isFirstGroupingSet) {
// Evaluate the key expressions once.
for(int i = 0; i < keyExpressions.length; ++i) {
keyExpressions[i].evaluate(batch);
}
}
// First we traverse the batch to evaluate and prepare the KeyWrappers
// After this the KeyWrappers are properly set and hash code is computed
if (!groupingSetsPresent) {
keyWrappersBatch.evaluateBatch(batch);
} else {
keyWrappersBatch.evaluateBatchGroupingSets(batch, currentGroupingSetsOverrideIsNulls);
}
// Next we locate the aggregation buffer set for each key
prepareBatchAggregationBufferSets(batch);
// Finally, evaluate the aggregators
processAggregators(batch);
//Flush if memory limits were reached
// We keep flushing until the memory is under threshold
int preFlushEntriesCount = numEntriesHashTable;
while (shouldFlush(batch)) {
flush(false);
if(gcCanary.get() == null) {
gcCanaryFlushes++;
gcCanary = new SoftReference<Object>(new Object());
}
//Validate that some progress is being made
if (!(numEntriesHashTable < preFlushEntriesCount)) {
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("Flush did not progress: %d entries before, %d entries after",
preFlushEntriesCount,
numEntriesHashTable));
}
break;
}
preFlushEntriesCount = numEntriesHashTable;
}
if (sumBatchSize == 0 && 0 != batch.size) {
// Sample the first batch processed for variable sizes.
updateAvgVariableSize(batch);
}
sumBatchSize += batch.size;
lastModeCheckRowCount += batch.size;
// Check if we should turn into streaming mode
checkHashModeEfficiency();
}
@Override
public void close(boolean aborted) throws HiveException {
if (!aborted) {
flush(true);
}
if (!aborted && sumBatchSize == 0 && GroupByOperator.shouldEmitSummaryRow(conf)) {
// in case the empty grouping set is preset; but no output has done
// the "summary row" still needs to be emitted
VectorHashKeyWrapperBase kw = keyWrappersBatch.getVectorHashKeyWrappers()[0];
kw.setNull();
int pos = conf.getGroupingSetPosition();
if (pos >= 0) {
long val = (1L << pos) - 1;
keyWrappersBatch.setLongValue(kw, pos, val);
}
VectorAggregationBufferRow groupAggregators = allocateAggregationBuffer();
writeSingleRow(kw, groupAggregators);
}
}
/**
* Locates the aggregation buffer sets to use for each key in the current batch.
* The keyWrappersBatch must have evaluated the current batch first.
*/
private void prepareBatchAggregationBufferSets(VectorizedRowBatch batch) throws HiveException {
// The aggregation batch vector needs to know when we start a new batch
// to bump its internal version.
aggregationBatchInfo.startBatch();
if (batch.size == 0) {
return;
}
// We now have to probe the global hash and find-or-allocate
// the aggregation buffers to use for each key present in the batch
VectorHashKeyWrapperBase[] keyWrappers = keyWrappersBatch.getVectorHashKeyWrappers();
final int n = keyExpressions.length == 0 ? 1 : batch.size;
// note - the row mapping is not relevant when aggregationBatchInfo::getDistinctBufferSetCount() == 1
for (int i=0; i < n; ++i) {
VectorHashKeyWrapperBase kw = keyWrappers[i];
VectorAggregationBufferRow aggregationBuffer = mapKeysAggregationBuffers.get(kw);
if (null == aggregationBuffer) {
// the probe failed, we must allocate a set of aggregation buffers
// and push the (keywrapper,buffers) pair into the hash.
// is very important to clone the keywrapper, the one we have from our
// keyWrappersBatch is going to be reset/reused on next batch.
aggregationBuffer = allocateAggregationBuffer();
mapKeysAggregationBuffers.put(kw.copyKey(), aggregationBuffer);
numEntriesHashTable++;
numEntriesSinceCheck++;
}
aggregationBatchInfo.mapAggregationBufferSet(aggregationBuffer, i);
}
}
/**
* Computes the memory limits for hash table flush (spill).
*/
private void computeMemoryLimits() {
JavaDataModel model = JavaDataModel.get();
fixedHashEntrySize =
model.hashMapEntry() +
keyWrappersBatch.getKeysFixedSize() +
aggregationBatchInfo.getAggregatorsFixedSize();
MemoryMXBean memoryMXBean = ManagementFactory.getMemoryMXBean();
maxMemory = isLlap ? getConf().getMaxMemoryAvailable() : memoryMXBean.getHeapMemoryUsage().getMax();
memoryThreshold = conf.getMemoryThreshold();
// Tests may leave this unitialized, so better set it to 1
if (memoryThreshold == 0.0f) {
memoryThreshold = 1.0f;
}
maxHashTblMemory = (int)(maxMemory * memoryThreshold);
if (LOG.isDebugEnabled()) {
LOG.debug("GBY memory limits - isLlap: {} maxMemory: {} ({} * {}) fixSize:{} (key:{} agg:{})",
isLlap,
LlapUtil.humanReadableByteCount(maxHashTblMemory),
LlapUtil.humanReadableByteCount(maxMemory),
memoryThreshold,
fixedHashEntrySize,
keyWrappersBatch.getKeysFixedSize(),
aggregationBatchInfo.getAggregatorsFixedSize());
}
}
/**
* Flushes the entries in the hash table by emiting output (forward).
* When parameter 'all' is true all the entries are flushed.
* @param all
* @throws HiveException
*/
private void flush(boolean all) throws HiveException {
int entriesToFlush = all ? numEntriesHashTable :
(int)(numEntriesHashTable * this.percentEntriesToFlush);
int entriesFlushed = 0;
if (LOG.isDebugEnabled()) {
LOG.debug(String.format(
"Flush %d %s entries:%d fixed:%d variable:%d (used:%dMb max:%dMb) gcCanary:%s",
entriesToFlush, all ? "(all)" : "",
numEntriesHashTable, fixedHashEntrySize, avgVariableSize,
numEntriesHashTable * (fixedHashEntrySize + avgVariableSize)/1024/1024,
maxHashTblMemory/1024/1024,
gcCanary.get() == null ? "dead" : "alive"));
}
/* Iterate the global (keywrapper,aggregationbuffers) map and emit
a row for each key */
Iterator<Map.Entry<KeyWrapper, VectorAggregationBufferRow>> iter =
mapKeysAggregationBuffers.entrySet().iterator();
while(iter.hasNext()) {
Map.Entry<KeyWrapper, VectorAggregationBufferRow> pair = iter.next();
writeSingleRow((VectorHashKeyWrapperBase) pair.getKey(), pair.getValue());
if (!all) {
iter.remove();
--numEntriesHashTable;
if (++entriesFlushed >= entriesToFlush) {
break;
}
}
}
if (all) {
mapKeysAggregationBuffers.clear();
numEntriesHashTable = 0;
}
if (all && LOG.isDebugEnabled()) {
LOG.debug(String.format("GC canary caused %d flushes", gcCanaryFlushes));
}
}
/**
* Returns true if the memory threshold for the hash table was reached.
*/
private boolean shouldFlush(VectorizedRowBatch batch) {
if (batch.size == 0) {
return false;
}
//numEntriesSinceCheck is the number of entries added to the hash table
// since the last time we checked the average variable size
if (numEntriesSinceCheck >= this.checkInterval) {
// Were going to update the average variable row size by sampling the current batch
updateAvgVariableSize(batch);
numEntriesSinceCheck = 0;
}
if (numEntriesHashTable > this.maxHtEntries ||
numEntriesHashTable * (fixedHashEntrySize + avgVariableSize) > maxHashTblMemory) {
return true;
}
if (gcCanary.get() == null) {
return true;
}
return false;
}
/**
* Updates the average variable size of the hash table entries.
* The average is only updates by probing the batch that added the entry in the hash table
* that caused the check threshold to be reached.
*/
private void updateAvgVariableSize(VectorizedRowBatch batch) {
int keyVariableSize = keyWrappersBatch.getVariableSize(batch.size);
int aggVariableSize = aggregationBatchInfo.getVariableSize(batch.size);
// This assumes the distribution of variable size keys/aggregates in the input
// is the same as the distribution of variable sizes in the hash entries
avgVariableSize = (int)((avgVariableSize * sumBatchSize + keyVariableSize +aggVariableSize) /
(sumBatchSize + batch.size));
}
/**
* Checks if the HT reduces the number of entries by at least minReductionHashAggr factor
* @throws HiveException
*/
private void checkHashModeEfficiency() throws HiveException {
if (lastModeCheckRowCount > numRowsCompareHashAggr) {
lastModeCheckRowCount = 0;
if (LOG.isDebugEnabled()) {
LOG.debug(String.format("checkHashModeEfficiency: HT:%d RC:%d MIN:%d",
numEntriesHashTable, sumBatchSize, (long)(sumBatchSize * minReductionHashAggr)));
}
if (numEntriesHashTable > sumBatchSize * minReductionHashAggr) {
flush(true);
changeToStreamingMode();
}
}
}
}
/**
* Streaming processing mode on ALREADY GROUPED data. Each input VectorizedRowBatch may
* have a mix of different keys. Intermediate values are flushed each time key changes.
*/
private class ProcessingModeStreaming extends ProcessingModeBase {
/**
* The aggregation buffers used in streaming mode
*/
private VectorAggregationBufferRow currentStreamingAggregators;
/**
* The current key, used in streaming mode
*/
private VectorHashKeyWrapperBase streamingKey;
/**
* The keys that needs to be flushed at the end of the current batch
*/
private final VectorHashKeyWrapperBase[] keysToFlush =
new VectorHashKeyWrapperBase[VectorizedRowBatch.DEFAULT_SIZE];
/**
* The aggregates that needs to be flushed at the end of the current batch
*/
private final VectorAggregationBufferRow[] rowsToFlush =
new VectorAggregationBufferRow[VectorizedRowBatch.DEFAULT_SIZE];
/**
* A pool of VectorAggregationBufferRow to avoid repeated allocations
*/
private VectorUtilBatchObjectPool<VectorAggregationBufferRow>
streamAggregationBufferRowPool;
@Override
public void initialize(Configuration hconf) throws HiveException {
streamAggregationBufferRowPool = new VectorUtilBatchObjectPool<VectorAggregationBufferRow>(
VectorizedRowBatch.DEFAULT_SIZE,
new VectorUtilBatchObjectPool.IAllocator<VectorAggregationBufferRow>() {
@Override
public VectorAggregationBufferRow alloc() throws HiveException {
return allocateAggregationBuffer();
}
@Override
public void free(VectorAggregationBufferRow t) {
// Nothing to do
}
});
LOG.info("using unsorted streaming aggregation processing mode");
}
@Override
public void setNextVectorBatchGroupStatus(boolean isLastGroupBatch) throws HiveException {
// Do nothing.
}
@Override
public void doProcessBatch(VectorizedRowBatch batch, boolean isFirstGroupingSet,
boolean[] currentGroupingSetsOverrideIsNulls) throws HiveException {
if (!groupingSetsPresent || isFirstGroupingSet) {
// Evaluate the key expressions once.
for(int i = 0; i < keyExpressions.length; ++i) {
keyExpressions[i].evaluate(batch);
}
}
// First we traverse the batch to evaluate and prepare the KeyWrappers
// After this the KeyWrappers are properly set and hash code is computed
if (!groupingSetsPresent) {
keyWrappersBatch.evaluateBatch(batch);
} else {
keyWrappersBatch.evaluateBatchGroupingSets(batch, currentGroupingSetsOverrideIsNulls);
}
VectorHashKeyWrapperBase[] batchKeys = keyWrappersBatch.getVectorHashKeyWrappers();
final VectorHashKeyWrapperBase prevKey = streamingKey;
if (streamingKey == null) {
// This is the first batch we process after switching from hash mode
currentStreamingAggregators = streamAggregationBufferRowPool.getFromPool();
streamingKey = batchKeys[0];
}
aggregationBatchInfo.startBatch();
int flushMark = 0;
for(int i = 0; i < batch.size; ++i) {
if (!batchKeys[i].equals(streamingKey)) {
// We've encountered a new key, must save current one
// We can't forward yet, the aggregators have not been evaluated
rowsToFlush[flushMark] = currentStreamingAggregators;
keysToFlush[flushMark] = streamingKey;
currentStreamingAggregators = streamAggregationBufferRowPool.getFromPool();
streamingKey = batchKeys[i];
++flushMark;
}
aggregationBatchInfo.mapAggregationBufferSet(currentStreamingAggregators, i);
}
// evaluate the aggregators
processAggregators(batch);
// Now flush/forward all keys/rows, except the last (current) one
for (int i = 0; i < flushMark; ++i) {
writeSingleRow(keysToFlush[i], rowsToFlush[i]);
rowsToFlush[i].reset();
keysToFlush[i] = null;
streamAggregationBufferRowPool.putInPool(rowsToFlush[i]);
}
if (streamingKey != prevKey) {
streamingKey = (VectorHashKeyWrapperBase) streamingKey.copyKey();
}
}
@Override
public void close(boolean aborted) throws HiveException {
if (!aborted && null != streamingKey) {
writeSingleRow(streamingKey, currentStreamingAggregators);
}
}
}
/**
* Sorted reduce group batch processing mode. Each input VectorizedRowBatch will have the
* same key. On endGroup (or close), the intermediate values are flushed.
*
* We build the output rows one-at-a-time in the output vectorized row batch (outputBatch)
* in 2 steps:
*
* 1) Just after startGroup, we copy the group key to the next position in the output batch,
* but don't increment the size in the batch (yet). This is done with the copyGroupKey
* method of VectorGroupKeyHelper. The next position is outputBatch.size
*
* We know the same key is used for the whole batch (i.e. repeating) since that is how
* vectorized reduce-shuffle feeds the batches to us.
*
* 2) Later at endGroup after reduce-shuffle has fed us all the input batches for the group,
* we fill in the aggregation columns in outputBatch at outputBatch.size. Our method
* writeGroupRow does this and finally increments outputBatch.size.
*
*/
private class ProcessingModeReduceMergePartial extends ProcessingModeBase {
private boolean first;
private boolean isLastGroupBatch;
/**
* The group vector key helper.
*/
VectorGroupKeyHelper groupKeyHelper;
/**
* The group vector aggregation buffers.
*/
private VectorAggregationBufferRow groupAggregators;
/**
* Buffer to hold string values.
*/
private DataOutputBuffer buffer;
@Override
public void initialize(Configuration hconf) throws HiveException {
isLastGroupBatch = true;
// We do not include the dummy grouping set column in the output. So we pass outputKeyLength
// instead of keyExpressions.length
groupKeyHelper = new VectorGroupKeyHelper(outputKeyLength);
groupKeyHelper.init(keyExpressions);
groupAggregators = allocateAggregationBuffer();
buffer = new DataOutputBuffer();
LOG.info("using sorted group batch aggregation processing mode");
}
@Override
public void setNextVectorBatchGroupStatus(boolean isLastGroupBatch) throws HiveException {
if (this.isLastGroupBatch) {
// Previous batch was the last of a group of batches. Remember the next is the first batch
// of a new group of batches.
first = true;
}
this.isLastGroupBatch = isLastGroupBatch;
}
@Override
public void doProcessBatch(VectorizedRowBatch batch, boolean isFirstGroupingSet,
boolean[] currentGroupingSetsOverrideIsNulls) throws HiveException {
if (first) {
// Copy the group key to output batch now. We'll copy in the aggregates at the end of the group.
first = false;
// Evaluate the key expressions of just this first batch to get the correct key.
for (int i = 0; i < outputKeyLength; i++) {
keyExpressions[i].evaluate(batch);
}
groupKeyHelper.copyGroupKey(batch, outputBatch, buffer);
}
// Aggregate this batch.
for (int i = 0; i < aggregators.length; ++i) {
aggregators[i].aggregateInput(groupAggregators.getAggregationBuffer(i), batch);
}
if (isLastGroupBatch) {
writeGroupRow(groupAggregators, buffer);
groupAggregators.reset();
}
}
@Override
public void close(boolean aborted) throws HiveException {
if (!aborted && !first && !isLastGroupBatch) {
writeGroupRow(groupAggregators, buffer);
}
}
}
/**
* Current processing mode. Processing mode can change (eg. hash -> streaming).
*/
private transient IProcessingMode processingMode;
private static final long serialVersionUID = 1L;
public VectorGroupByOperator(CompilationOpContext ctx, OperatorDesc conf,
VectorizationContext vContext, VectorDesc vectorDesc) throws HiveException {
this(ctx);
GroupByDesc desc = (GroupByDesc) conf;
this.conf = desc;
this.vContext = vContext;
this.vectorDesc = (VectorGroupByDesc) vectorDesc;
keyExpressions = this.vectorDesc.getKeyExpressions();
vecAggrDescs = this.vectorDesc.getVecAggrDescs();
// Grouping id should be pruned, which is the last of key columns
// see ColumnPrunerGroupByProc
outputKeyLength =
this.conf.pruneGroupingSetId() ? keyExpressions.length - 1 : keyExpressions.length;
final int aggregationCount = vecAggrDescs.length;
final int outputCount = outputKeyLength + aggregationCount;
outputTypeInfos = new TypeInfo[outputCount];
outputDataTypePhysicalVariations = new DataTypePhysicalVariation[outputCount];
for (int i = 0; i < outputKeyLength; i++) {
VectorExpression keyExpression = keyExpressions[i];
outputTypeInfos[i] = keyExpression.getOutputTypeInfo();
outputDataTypePhysicalVariations[i] = keyExpression.getOutputDataTypePhysicalVariation();
}
for (int i = 0; i < aggregationCount; i++) {
VectorAggregationDesc vecAggrDesc = vecAggrDescs[i];
outputTypeInfos[i + outputKeyLength] = vecAggrDesc.getOutputTypeInfo();
outputDataTypePhysicalVariations[i + outputKeyLength] =
vecAggrDesc.getOutputDataTypePhysicalVariation();
}
vOutContext = new VectorizationContext(getName(), desc.getOutputColumnNames(),
/* vContextEnvironment */ vContext);
vOutContext.setInitialTypeInfos(Arrays.asList(outputTypeInfos));
vOutContext.setInitialDataTypePhysicalVariations(Arrays.asList(outputDataTypePhysicalVariations));
}
/** Kryo ctor. */
@VisibleForTesting
public VectorGroupByOperator() {
super();
}
public VectorGroupByOperator(CompilationOpContext ctx) {
super(ctx);
}
@Override
public VectorizationContext getInputVectorizationContext() {
return vContext;
}
private void setupGroupingSets() {
groupingSetsPresent = conf.isGroupingSetsPresent();
if (!groupingSetsPresent) {
groupingSets = null;
groupingSetsPosition = -1;
groupingSetsDummyVectorExpression = null;
allGroupingSetsOverrideIsNulls = null;
return;
}
groupingSets = ArrayUtils.toPrimitive(conf.getListGroupingSets().toArray(new Long[0]));
groupingSetsPosition = conf.getGroupingSetPosition();
allGroupingSetsOverrideIsNulls = new boolean[groupingSets.length][];
int pos = 0;
for (long groupingSet: groupingSets) {
// Create the mapping corresponding to the grouping set
// Assume all columns are null, except the dummy column is always non-null.
boolean[] groupingSetsOverrideIsNull = new boolean[keyExpressions.length];
Arrays.fill(groupingSetsOverrideIsNull, true);
groupingSetsOverrideIsNull[groupingSetsPosition] = false;
// Add keys of this grouping set.
FastBitSet bitset = GroupByOperator.groupingSet2BitSet(groupingSet, groupingSetsPosition);
for (int keyPos = bitset.nextClearBit(0); keyPos < groupingSetsPosition;
keyPos = bitset.nextClearBit(keyPos+1)) {
groupingSetsOverrideIsNull[keyPos] = false;
}
allGroupingSetsOverrideIsNulls[pos] = groupingSetsOverrideIsNull;
pos++;
}
// The last key column is the dummy grouping set id.
//
// Figure out which (scratch) column was used so we can overwrite the dummy id.
groupingSetsDummyVectorExpression = (ConstantVectorExpression) keyExpressions[groupingSetsPosition];
}
@Override
protected void initializeOp(Configuration hconf) throws HiveException {
super.initializeOp(hconf);
isLlap = LlapProxy.isDaemon();
VectorExpression.doTransientInit(keyExpressions);
List<ObjectInspector> objectInspectors = new ArrayList<ObjectInspector>();
List<ExprNodeDesc> keysDesc = conf.getKeys();
try {
List<String> outputFieldNames = conf.getOutputColumnNames();
final int outputCount = outputFieldNames.size();
for(int i = 0; i < outputKeyLength; ++i) {
VectorExpressionWriter vew = VectorExpressionWriterFactory.
genVectorExpressionWritable(keysDesc.get(i));
ObjectInspector oi = vew.getObjectInspector();
objectInspectors.add(oi);
}
final int aggregateCount = vecAggrDescs.length;
aggregators = new VectorAggregateExpression[aggregateCount];
for (int i = 0; i < aggregateCount; ++i) {
VectorAggregationDesc vecAggrDesc = vecAggrDescs[i];
Class<? extends VectorAggregateExpression> vecAggrClass = vecAggrDesc.getVecAggrClass();
Constructor<? extends VectorAggregateExpression> ctor = null;
try {
ctor = vecAggrClass.getConstructor(VectorAggregationDesc.class);
} catch (Exception e) {
throw new HiveException("Constructor " + vecAggrClass.getSimpleName() +
"(VectorAggregationDesc) not available");
}
VectorAggregateExpression vecAggrExpr = null;
try {
vecAggrExpr = ctor.newInstance(vecAggrDesc);
} catch (Exception e) {
throw new HiveException("Failed to create " + vecAggrClass.getSimpleName() +
"(VectorAggregationDesc) object ", e);
}
VectorExpression.doTransientInit(vecAggrExpr.getInputExpression());
aggregators[i] = vecAggrExpr;
ObjectInspector objInsp =
TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(
vecAggrDesc.getOutputTypeInfo());
Preconditions.checkState(objInsp != null);
objectInspectors.add(objInsp);
}
keyWrappersBatch = VectorHashKeyWrapperBatch.compileKeyWrapperBatch(keyExpressions);
aggregationBatchInfo = new VectorAggregationBufferBatch();
aggregationBatchInfo.compileAggregationBatchInfo(aggregators);
outputObjInspector = ObjectInspectorFactory.getStandardStructObjectInspector(
outputFieldNames, objectInspectors);
vrbCtx = new VectorizedRowBatchCtx(
outputFieldNames.toArray(new String[0]),
outputTypeInfos,
outputDataTypePhysicalVariations,
/* dataColumnNums */ null,
/* partitionColumnCount */ 0,
/* virtualColumnCount */ 0,
/* neededVirtualColumns */ null,
vOutContext.getScratchColumnTypeNames(),
vOutContext.getScratchDataTypePhysicalVariations());
outputBatch = vrbCtx.createVectorizedRowBatch();
} catch (HiveException he) {
throw he;
} catch (Throwable e) {
throw new HiveException(e);
}
forwardCache = new Object[outputKeyLength + aggregators.length];
setupGroupingSets();
switch (vectorDesc.getProcessingMode()) {
case GLOBAL:
Preconditions.checkState(outputKeyLength == 0);
Preconditions.checkState(!groupingSetsPresent);
processingMode = this.new ProcessingModeGlobalAggregate();
break;
case HASH:
processingMode = this.new ProcessingModeHashAggregate();
break;
case MERGE_PARTIAL:
Preconditions.checkState(!groupingSetsPresent);
processingMode = this.new ProcessingModeReduceMergePartial();
break;
case STREAMING:
processingMode = this.new ProcessingModeStreaming();
break;
default:
throw new RuntimeException("Unsupported vector GROUP BY processing mode " +
vectorDesc.getProcessingMode().name());
}
processingMode.initialize(hconf);
}
/**
* changes the processing mode to streaming
* This is done at the request of the hash agg mode, if the number of keys
* exceeds the minReductionHashAggr factor
* @throws HiveException
*/
private void changeToStreamingMode() throws HiveException {
processingMode = this.new ProcessingModeStreaming();
processingMode.initialize(null);
LOG.trace("switched to streaming mode");
}
@Override
public void setNextVectorBatchGroupStatus(boolean isLastGroupBatch) throws HiveException {
processingMode.setNextVectorBatchGroupStatus(isLastGroupBatch);
}
@Override
public void startGroup() throws HiveException {
// We do not call startGroup on operators below because we are batching rows in
// an output batch and the semantics will not work.
// super.startGroup();
throw new HiveException("Unexpected startGroup");
}
@Override
public void endGroup() throws HiveException {
// We do not call endGroup on operators below because we are batching rows in
// an output batch and the semantics will not work.
// super.endGroup();
throw new HiveException("Unexpected startGroup");
}
@Override
public void process(Object row, int tag) throws HiveException {
VectorizedRowBatch batch = (VectorizedRowBatch) row;
if (batch.size > 0) {
processingMode.processBatch(batch);
}
}
/**
* Emits a single row, made from the key and the row aggregation buffers values
* kw is null if keyExpressions.length is 0
* @param kw
* @param agg
* @throws HiveException
*/
private void writeSingleRow(VectorHashKeyWrapperBase kw, VectorAggregationBufferRow agg)
throws HiveException {
int colNum = 0;
final int batchIndex = outputBatch.size;
// Output keys and aggregates into the output batch.
for (int i = 0; i < outputKeyLength; ++i) {
keyWrappersBatch.assignRowColumn(outputBatch, batchIndex, colNum++, kw);
}
for (int i = 0; i < aggregators.length; ++i) {
aggregators[i].assignRowColumn(outputBatch, batchIndex, colNum++,
agg.getAggregationBuffer(i));
}
++outputBatch.size;
if (outputBatch.size == VectorizedRowBatch.DEFAULT_SIZE) {
flushOutput();
}
}
/**
* Emits a (reduce) group row, made from the key (copied in at the beginning of the group) and
* the row aggregation buffers values
* @param agg
* @param buffer
* @throws HiveException
*/
private void writeGroupRow(VectorAggregationBufferRow agg, DataOutputBuffer buffer)
throws HiveException {
int colNum = outputKeyLength; // Start after group keys.
final int batchIndex = outputBatch.size;
for (int i = 0; i < aggregators.length; ++i) {
aggregators[i].assignRowColumn(outputBatch, batchIndex, colNum++,
agg.getAggregationBuffer(i));
}
++outputBatch.size;
if (outputBatch.size == VectorizedRowBatch.DEFAULT_SIZE) {
flushOutput();
buffer.reset();
}
}
private void flushOutput() throws HiveException {
vectorForward(outputBatch);
outputBatch.reset();
}
@Override
public void closeOp(boolean aborted) throws HiveException {
processingMode.close(aborted);
if (!aborted && outputBatch.size > 0) {
flushOutput();
}
}
public VectorExpression[] getKeyExpressions() {
return keyExpressions;
}
public void setKeyExpressions(VectorExpression[] keyExpressions) {
this.keyExpressions = keyExpressions;
}
public VectorAggregateExpression[] getAggregators() {
return aggregators;
}
public void setAggregators(VectorAggregateExpression[] aggregators) {
this.aggregators = aggregators;
}
@Override
public VectorizationContext getOutputVectorizationContext() {
return vOutContext;
}
@Override
public OperatorType getType() {
return OperatorType.GROUPBY;
}
@Override
public String getName() {
return getOperatorName();
}
static public String getOperatorName() {
return "GBY";
}
@Override
public VectorDesc getVectorDesc() {
return vectorDesc;
}
@Override
public void configureJobConf(JobConf job) {
// only needed when grouping sets are present
if (conf.getGroupingSetPosition() > 0 && GroupByOperator.shouldEmitSummaryRow(conf)) {
job.setBoolean(Utilities.ENSURE_OPERATORS_EXECUTED, true);
}
}
public long getMaxMemory() {
return maxMemory;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.