text
stringlengths 7
1.01M
|
|---|
package test;
import static org.junit.Assert.*;
import java.io.Reader;
import java.sql.Connection;
import org.apache.ibatis.cursor.Cursor;
import org.apache.ibatis.io.Resources;
import org.apache.ibatis.jdbc.ScriptRunner;
import org.apache.ibatis.session.SqlSession;
import org.apache.ibatis.session.SqlSessionFactory;
import org.apache.ibatis.session.SqlSessionFactoryBuilder;
import org.junit.BeforeClass;
import org.junit.Test;
public class SimpleTest {
private static SqlSessionFactory sqlSessionFactory;
@BeforeClass
public static void setUp() throws Exception {
// create an SqlSessionFactory
try (Reader reader = Resources.getResourceAsReader("test/mybatis-config.xml")) {
sqlSessionFactory = new SqlSessionFactoryBuilder().build(reader);
}
// prepare in-memory database
try (SqlSession session = sqlSessionFactory.openSession();
Connection conn = session.getConnection();
Reader reader = Resources.getResourceAsReader("test/CreateDB.sql")) {
ScriptRunner runner = new ScriptRunner(conn);
runner.setLogWriter(null);
runner.runScript(reader);
}
}
@Test
public void shouldGetAUser() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
User user = mapper.getUser(1);
assertEquals("User1", user.getName());
}
}
@Test
public void shouldInsertAUser() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
User user = new User();
user.setId(2);
user.setName("User2");
mapper.insertUser(user);
sqlSession.commit();
}
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
User user = mapper.getUser(2);
assertEquals("User2", user.getName());
}
}
@Test
public void shouldListUsers() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
Cursor<User> users = mapper.listUsers();
assertNotNull(users);
}
}
@Test
public void shouldListUsersWithFetchSize() {
try (SqlSession sqlSession = sqlSessionFactory.openSession()) {
Mapper mapper = sqlSession.getMapper(Mapper.class);
Cursor<User> users = mapper.listUsersWithFetchSize();
assertNotNull(users);
}
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.guice;
import com.fasterxml.jackson.databind.BeanProperty;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.InjectableValues;
import com.google.inject.Injector;
import com.google.inject.Key;
import org.apache.druid.java.util.common.IAE;
/**
*/
public class GuiceInjectableValues extends InjectableValues
{
private final Injector injector;
public GuiceInjectableValues(Injector injector)
{
this.injector = injector;
}
@Override
public Object findInjectableValue(
Object valueId,
DeserializationContext ctxt,
BeanProperty forProperty,
Object beanInstance
)
{
// From the docs: "Object that identifies value to inject; may be a simple name or more complex identifier object,
// whatever provider needs"
// Currently we should only be dealing with `Key` instances, and anything more advanced should be handled with
// great care
if (valueId instanceof Key) {
return injector.getInstance((Key) valueId);
}
throw new IAE(
"Unknown class type [%s] for valueId [%s]",
valueId.getClass().getCanonicalName(),
valueId.toString()
);
}
}
|
package io.stargate.web.docsapi.exception;
import com.fasterxml.jackson.core.JsonProcessingException;
import org.jsfr.json.ErrorHandlingStrategy;
import org.jsfr.json.ParsingContext;
/** {@link ErrorHandlingStrategy} that re-throws the RuntimeExceptions */
public class RuntimeExceptionPassHandlingStrategy implements ErrorHandlingStrategy {
@Override
public void handleParsingException(Exception e) {
throw translate(e);
}
@Override
public void handleExceptionFromListener(Exception e, ParsingContext context) {
throw translate(e);
}
private RuntimeException translate(Exception e) {
if (e instanceof RuntimeException) {
throw (RuntimeException) e;
}
if (e instanceof JsonProcessingException) { // from Jackson
throw new UncheckedJacksonException((JsonProcessingException) e);
}
throw new RuntimeException(e.getLocalizedMessage(), e);
}
}
|
// Copyright (C) 2019 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.googlesource.gerrit.plugins.replication;
public class RunwayStatus {
public static RunwayStatus allowed() {
return new RunwayStatus(true, 0);
}
public static RunwayStatus canceled() {
return new RunwayStatus(false, 0);
}
public static RunwayStatus denied(int inFlightPushId) {
return new RunwayStatus(false, inFlightPushId);
}
public static RunwayStatus deniedExternal() {
return new RunwayStatus(false, -1);
}
private final boolean allowed;
private final int inFlightPushId;
private RunwayStatus(boolean allowed, int inFlightPushId) {
this.allowed = allowed;
this.inFlightPushId = inFlightPushId;
}
public boolean isAllowed() {
return allowed;
}
public boolean isCanceled() {
return !allowed && inFlightPushId == 0;
}
public boolean isExternalInflight() {
return !allowed && inFlightPushId == -1;
}
public int getInFlightPushId() {
return inFlightPushId;
}
}
|
package cz.mlcit.customers.exceptions;
import javax.ws.rs.NotFoundException;
import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ExceptionMapper;
import javax.ws.rs.ext.Provider;
/**
* Created by Mlcit on 07.02.2017.
* DefaultNotFoundExceptionMapper catch NotFoundExceptions.
*/
@Provider
public class DefaultNotFoundExceptionMapper implements ExceptionMapper<NotFoundException> {
@Override
public Response toResponse(NotFoundException exception) {
return Response.status(Response.Status.NOT_FOUND).build();
}
}
|
/*
Copyright (c) 2021-2022 by Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package ml.dmlc.xgboost4j.gpu.java;
import java.util.ArrayList;
/**
* Cudf utilities to build cuda array interface against {@link CudfColumn}
*/
class CudfUtils {
/**
* Build the cuda array interface based on CudfColumn(s)
* @param cudfColumns the CudfColumn(s) to be built
* @return the json format of cuda array interface
*/
public static String buildArrayInterface(CudfColumn... cudfColumns) {
return new Builder().add(cudfColumns).build();
}
// Helper class to build array interface string
private static class Builder {
private ArrayList<String> colArrayInterfaces = new ArrayList<String>();
private Builder add(CudfColumn... columns) {
if (columns == null || columns.length <= 0) {
throw new IllegalArgumentException("At least one ColumnData is required.");
}
for (CudfColumn cd : columns) {
colArrayInterfaces.add(buildColumnObject(cd));
}
return this;
}
private String build() {
StringBuilder builder = new StringBuilder();
builder.append("[");
for (int i = 0; i < colArrayInterfaces.size(); i++) {
builder.append(colArrayInterfaces.get(i));
if (i != colArrayInterfaces.size() - 1) {
builder.append(",");
}
}
builder.append("]");
return builder.toString();
}
/** build the whole column information including data and valid info */
private String buildColumnObject(CudfColumn column) {
if (column.getDataPtr() == 0) {
throw new IllegalArgumentException("Empty column data is NOT accepted!");
}
if (column.getTypeStr() == null || column.getTypeStr().isEmpty()) {
throw new IllegalArgumentException("Empty type string is NOT accepted!");
}
StringBuilder builder = new StringBuilder();
String colData = buildMetaObject(column.getDataPtr(), column.getShape(),
column.getTypeStr());
builder.append("{");
builder.append(colData);
if (column.getValidPtr() != 0 && column.getNullCount() != 0) {
String validString = buildMetaObject(column.getValidPtr(), column.getShape(), "<t1");
builder.append(",\"mask\":");
builder.append("{");
builder.append(validString);
builder.append("}");
}
builder.append("}");
return builder.toString();
}
/** build the base information of a column */
private String buildMetaObject(long ptr, long shape, final String typeStr) {
StringBuilder builder = new StringBuilder();
builder.append("\"shape\":[" + shape + "],");
builder.append("\"data\":[" + ptr + "," + "false" + "],");
builder.append("\"typestr\":\"" + typeStr + "\",");
builder.append("\"version\":" + 1);
return builder.toString();
}
}
}
|
package me.vilsol.nmswrapper.wraps.unparsed;
import me.vilsol.nmswrapper.*;
import me.vilsol.nmswrapper.reflections.*;
import me.vilsol.nmswrapper.wraps.*;
@ReflectiveClass(name = "EnumProtocol")
public class NMSEnumProtocol extends NMSWrap {
public NMSEnumProtocol(Object nmsObject){
super(nmsObject);
}
/**
* TODO Find correct name
* @see net.minecraft.server.v1_9_R1.EnumProtocol#a(net.minecraft.server.v1_9_R1.Packet)
*/
@ReflectiveMethod(name = "a", types = {NMSPacket.class})
public NMSEnumProtocol a(NMSPacket packet){
return new NMSEnumProtocol(NMSWrapper.getInstance().exec(nmsObject, packet));
}
/**
* @see net.minecraft.server.v1_9_R1.EnumProtocol#valueOf(java.lang.String)
*/
@ReflectiveMethod(name = "valueOf", types = {String.class})
public NMSEnumProtocol valueOf(String s){
return new NMSEnumProtocol(NMSWrapper.getInstance().exec(nmsObject, s));
}
}
|
/*
* Copyright 2019-2020 NASTEL TECHNOLOGIES, INC.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jkoolcloud.remora.advices;
import static net.bytebuddy.matcher.ElementMatchers.*;
import java.lang.reflect.Method;
import com.jkoolcloud.remora.RemoraConfig;
import com.jkoolcloud.remora.core.EntryDefinition;
import net.bytebuddy.agent.builder.AgentBuilder;
import net.bytebuddy.asm.Advice;
import net.bytebuddy.description.method.MethodDescription;
import net.bytebuddy.description.type.TypeDescription;
import net.bytebuddy.matcher.ElementMatcher;
public class SpringServiceAdvice extends BaseTransformers implements RemoraAdvice {
public static final String ADVICE_NAME = "SpringServiceAdvice";
public static final String[] INTERCEPTING_CLASS = { "org.springframework.web.context.WebApplicationContext" };
public static final String INTERCEPTING_METHOD = "initPropertySources";
/**
* Method matcher intended to match intercepted class method/s to instrument. See (@ElementMatcher) for available
* method matches.
*/
private static ElementMatcher<? super MethodDescription> methodMatcher() {
return named(INTERCEPTING_METHOD).and(takesArguments(0));
}
/**
* Type matcher should find the class intended for instrumentation See (@ElementMatcher) for available matches.
*/
@Override
public ElementMatcher<TypeDescription> getTypeMatcher() {
return hasSuperType(named(INTERCEPTING_CLASS[0]));
}
@Override
public AgentBuilder.Transformer getAdvice() {
return advice;
}
static AgentBuilder.Transformer.ForAdvice advice = new AgentBuilder.Transformer.ForAdvice()
.include(SpringServiceAdvice.class.getClassLoader()).include(RemoraConfig.INSTANCE.classLoader)//
.advice(methodMatcher(), SpringServiceAdvice.class.getName());
/**
* Advices before method is called before instrumented method code
*
* @param thiz
* reference to method object
* @param arguments
* arguments provided for method
* @param method
* instrumented method description
* @param ed
* {@link EntryDefinition} for collecting ant passing values to
* {@link com.jkoolcloud.remora.core.output.OutputManager}
* @param startTime
* method startTime
*
*/
@Advice.OnMethodEnter
public static void before(@Advice.This Object thiz, //
@Advice.AllArguments Object[] arguments, //
@Advice.Origin Method method, //
@Advice.Local("ed") EntryDefinition ed, @Advice.Local("context") InterceptionContext ctx, //
@Advice.Local("startTime") long startTime) {
try {
ctx = prepareIntercept(SpringServiceAdvice.class, thiz, method, arguments);
if (!ctx.intercept) {
return;
}
ed = getEntryDefinition(ed, SpringServiceAdvice.class, ctx);
startTime = fillDefaultValuesBefore(ed, stackThreadLocal, thiz, method, ctx);
} catch (Throwable t) {
handleAdviceException(t, ctx);
}
}
/**
* Method called on instrumented method finished.
*
* @param obj
* reference to method object
* @param method
* instrumented method description
* @param arguments
* arguments provided for method
* @param exception
* exception thrown in method exit (not caught)
* @param ed
* {@link EntryDefinition} passed along the method (from before method)
* @param startTime
* startTime passed along the method
*/
@Advice.OnMethodExit(onThrowable = Throwable.class)
public static void after(@Advice.This Object obj, //
@Advice.Origin Method method, //
@Advice.AllArguments Object[] arguments, //
// @Advice.Return Object returnValue, // //TODO needs separate Advice capture for void type
@Advice.Thrown Throwable exception, @Advice.Local("ed") EntryDefinition ed,
@Advice.Local("context") InterceptionContext ctx, //
@Advice.Local("startTime") long startTime) {
boolean doFinally = true;
try {
ctx = prepareIntercept(SpringServiceAdvice.class, obj, method, arguments);
if (!ctx.intercept) {
return;
}
doFinally = checkEntryDefinition(ed, ctx);
fillDefaultValuesAfter(ed, startTime, exception, ctx);
} catch (Throwable t) {
handleAdviceException(t, ctx);
} finally {
if (doFinally) {
doFinally(ctx, obj.getClass());
}
}
}
@Override
public String getName() {
return ADVICE_NAME;
}
}
|
package com.envisioniot.enos.iot_mqtt_sdk.core.profile;
import com.envisioniot.enos.iot_mqtt_sdk.core.login.LoginInput;
import com.envisioniot.enos.iot_mqtt_sdk.core.login.NormalDeviceLoginInput;
import com.envisioniot.enos.iot_mqtt_sdk.util.GsonUtil;
import com.google.common.base.Charsets;
import com.google.common.io.Files;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
public class FileProfile extends BaseProfile {
private Logger logger = LoggerFactory.getLogger(FileProfile.class);
private File configFile;
public FileProfile() {
this(".config");
}
public FileProfile(String filePath) {
this(filePath, null);
}
public FileProfile(String filePath, LoginInput input) {
this.configFile = new File(filePath);
reload();
if (input != null) {
this.config.setServerUrl(input.getServerUrl());
this.config.setProductKey(input.getProductKey());
this.config.setProductSecret(input.getProductSecret());
this.config.setDeviceKey(input.getDeviceKey());
this.config.setDeviceSecret(input.getDeviceSecret());
}
}
@Override
public void reload() {
if (configFile.exists()) {
try {
String json = Files.toString(configFile, Charsets.UTF_8);
Config conf = GsonUtil.fromJson(json, Config.class);
if (conf != null) {
this.config = conf;
}
} catch (IOException e) {
logger.error("failed to load file [{}] profile", configFile.getAbsolutePath(), e);
}
}
}
public void persist(String filePath) throws IOException {
config.store(filePath, "store config by mqtt sdk");
}
public void persist() throws IOException {
persist(configFile.getPath());
}
public static void main(String[] args) {
LoginInput input = new NormalDeviceLoginInput("url", "pk", "dk", "secret");
FileProfile profile = new FileProfile("C:\\Users\\jian.zhang4\\projects\\.config", input);
try {
profile.persist();
} catch (IOException e) {
e.printStackTrace();
}
}
}
|
/**
* Copyright 2011 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.neo4j.model;
import org.springframework.data.neo4j.annotation.NodeEntity;
@NodeEntity
public class SubGroup extends Group {
}
|
package kg.apc.jmeter.vizualizers;
// TODO: rows in settings should have color markers for better experience
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.Frame;
import java.awt.GraphicsEnvironment;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import javax.swing.BorderFactory;
import javax.swing.DefaultCellEditor;
import javax.swing.JButton;
import javax.swing.JComboBox;
import javax.swing.JDialog;
import javax.swing.JMenuItem;
import javax.swing.JPanel;
import javax.swing.JPopupMenu;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.JTextArea;
import javax.swing.JTextField;
import javax.swing.ListSelectionModel;
import kg.apc.charting.AbstractGraphRow;
import kg.apc.jmeter.JMeterPluginsUtils;
import kg.apc.jmeter.graphs.AbstractOverTimeVisualizer;
import kg.apc.jmeter.gui.ButtonPanelAddCopyRemove;
import kg.apc.jmeter.gui.ComponentBorder;
import kg.apc.jmeter.gui.DialogFactory;
import kg.apc.jmeter.gui.GuiBuilderHelper;
import kg.apc.jmeter.perfmon.PerfMonCollector;
import kg.apc.jmeter.perfmon.PerfMonSampleResult;
import org.apache.jmeter.gui.GuiPackage;
import org.apache.jmeter.gui.util.PowerTableModel;
import org.apache.jmeter.samplers.SampleResult;
import org.apache.jmeter.testelement.TestElement;
import org.apache.jmeter.testelement.property.CollectionProperty;
import org.apache.jmeter.testelement.property.JMeterProperty;
import org.apache.jmeter.testelement.property.NullProperty;
import org.apache.jorphan.logging.LoggingManager;
import org.apache.log.Logger;
public class PerfMonGui
extends AbstractOverTimeVisualizer {
public static final List<String> metrics = Arrays.asList("CPU", "Memory", "Swap", "Disks I/O", "Network I/O");
private static final Logger log = LoggingManager.getLoggerForClass();
private PowerTableModel tableModel;
private JTable grid;
private JTextArea errorTextArea;
private JScrollPane errorPane;
public static final String[] columnIdentifiers = new String[]{
"Host / IP", "Port", "Metric to collect", "Metric parameter (see help)"
};
public static final Class[] columnClasses = new Class[]{
String.class, String.class, String.class, String.class
};
private static String[] defaultValues = new String[]{
"localhost", "4444", "CPU", ""
};
public PerfMonGui() {
super();
setGranulation(1000);
graphPanel.getGraphObject().setYAxisLabel("Performance Metrics");
graphPanel.getGraphObject().getChartSettings().setExpendRows(true);
}
@Override
protected JSettingsPanel createSettingsPanel() {
return new JSettingsPanel(this,
JSettingsPanel.GRADIENT_OPTION
| JSettingsPanel.LIMIT_POINT_OPTION
| JSettingsPanel.MAXY_OPTION
| JSettingsPanel.RELATIVE_TIME_OPTION
| JSettingsPanel.AUTO_EXPAND_OPTION
| JSettingsPanel.MARKERS_OPTION_DISABLED);
}
@Override
public String getWikiPage() {
return "PerfMon";
}
@Override
public String getLabelResource() {
return getClass().getSimpleName();
}
@Override
public String getStaticLabel() {
return JMeterPluginsUtils.prefixLabel("PerfMon Metrics Collector");
}
@Override
protected JPanel getGraphPanelContainer() {
JPanel panel = new JPanel(new BorderLayout());
JPanel innerTopPanel = new JPanel(new BorderLayout());
errorPane = new JScrollPane();
errorPane.setMinimumSize(new Dimension(100, 50));
errorPane.setPreferredSize(new Dimension(100, 50));
errorTextArea = new JTextArea();
errorTextArea.setForeground(Color.red);
errorTextArea.setBackground(new Color(255, 255, 153));
errorTextArea.setEditable(false);
errorPane.setViewportView(errorTextArea);
registerPopup();
innerTopPanel.add(createConnectionsPanel(), BorderLayout.NORTH);
innerTopPanel.add(errorPane, BorderLayout.SOUTH);
innerTopPanel.add(getFilePanel(), BorderLayout.CENTER);
panel.add(innerTopPanel, BorderLayout.NORTH);
errorPane.setVisible(false);
return panel;
}
private void addErrorMessage(String msg, long time) {
errorPane.setVisible(true);
SimpleDateFormat formatter = new SimpleDateFormat("HH:mm:ss");
String newLine = "";
if (errorTextArea.getText().length() != 0) {
newLine = "\n";
}
errorTextArea.setText(errorTextArea.getText() + newLine + formatter.format(time) + " - ERROR: " + msg);
errorTextArea.setCaretPosition(errorTextArea.getDocument().getLength());
updateGui();
}
public void clearErrorMessage() {
errorTextArea.setText("");
errorPane.setVisible(false);
}
private void registerPopup() {
JPopupMenu popup = new JPopupMenu();
JMenuItem hideMessagesMenu = new JMenuItem("Hide Error Panel");
hideMessagesMenu.addActionListener(new HideAction());
popup.add(hideMessagesMenu);
errorTextArea.setComponentPopupMenu(popup);
}
@Override
public void clearData() {
clearErrorMessage();
super.clearData();
}
private Component createConnectionsPanel() {
JPanel panel = new JPanel(new BorderLayout(5, 5));
panel.setBorder(BorderFactory.createTitledBorder("Servers to Monitor (ServerAgent must be started, see help)"));
panel.setPreferredSize(new Dimension(150, 150));
JScrollPane scroll = new JScrollPane(createGrid());
scroll.setPreferredSize(scroll.getMinimumSize());
panel.add(scroll, BorderLayout.CENTER);
panel.add(new ButtonPanelAddCopyRemove(grid, tableModel, defaultValues), BorderLayout.SOUTH);
List<String> items = new LinkedList<String>(metrics);
// add metrics from new agent
items.add("TCP");
items.add("JMX");
items.add("EXEC");
items.add("TAIL");
JComboBox metricTypesBox = new JComboBox(items.toArray());
grid.getColumnModel().getColumn(2).setCellEditor(new DefaultCellEditor(metricTypesBox));
final JTextField wizEditor = new JTextField();
wizEditor.setBorder(null);
JButton wiz = new JButton("...");
if (!GraphicsEnvironment.isHeadless()) {
wiz.addActionListener(new java.awt.event.ActionListener() {
@Override
public void actionPerformed(java.awt.event.ActionEvent evt) {
Frame parent = GuiPackage.getInstance().getMainFrame();
String type = grid.getValueAt(grid.getSelectedRow(), 2).toString();
JPerfmonParamsPanel dlgContent = new JPerfmonParamsPanel(type, wizEditor);
dlgContent.setMinWidth(400);
JDialog dlg = DialogFactory.getJDialogInstance(parent, "PerfMon [" + type + "] Parameters Helper",
true, dlgContent, "/kg/apc/jmeter/vizualizers/wand.png");
DialogFactory.centerDialog(parent, dlg);
dlg.setVisible(true);
}
});
}
wiz.setMargin(new Insets(0, 6, 5, 6));
GuiBuilderHelper.strechItemToComponent(wizEditor, wiz);
ComponentBorder bd = new ComponentBorder(wiz);
bd.install(wizEditor);
grid.getColumnModel().getColumn(3).setCellEditor(new DefaultCellEditor(wizEditor));
grid.getTableHeader().setReorderingAllowed(false);
return panel;
}
private JTable createGrid() {
grid = new JTable();
createTableModel();
grid.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
grid.setMinimumSize(new Dimension(200, 100));
grid.getColumnModel().getColumn(0).setPreferredWidth(170);
grid.getColumnModel().getColumn(1).setPreferredWidth(80);
grid.getColumnModel().getColumn(2).setPreferredWidth(120);
grid.getColumnModel().getColumn(3).setPreferredWidth(500);
return grid;
}
private void createTableModel() {
tableModel = new PowerTableModel(columnIdentifiers, columnClasses);
grid.setModel(tableModel);
}
@Override
public TestElement createTestElement() {
TestElement te = new PerfMonCollector();
modifyTestElement(te);
te.setComment(JMeterPluginsUtils.getWikiLinkText(getWikiPage()));
return te;
}
@Override
public void modifyTestElement(TestElement te) {
super.modifyTestElement(te);
if (grid.isEditing()) {
grid.getCellEditor().stopCellEditing();
}
if (te instanceof PerfMonCollector) {
PerfMonCollector pmte = (PerfMonCollector) te;
CollectionProperty rows = JMeterPluginsUtils.tableModelRowsToCollectionProperty(tableModel, PerfMonCollector.DATA_PROPERTY);
pmte.setData(rows);
}
super.configureTestElement(te);
}
@Override
public void configure(TestElement te) {
super.configure(te);
PerfMonCollector pmte = (PerfMonCollector) te;
JMeterProperty perfmonValues = pmte.getMetricSettings();
if (!(perfmonValues instanceof NullProperty)) {
JMeterPluginsUtils.collectionPropertyToTableModelRows((CollectionProperty) perfmonValues, tableModel);
} else {
log.warn("Received null property instead of collection");
}
}
@Override
public void add(SampleResult res) {
if (res.isSuccessful()) {
if (isSampleIncluded(res)) {
super.add(res);
addPerfMonRecord(res.getSampleLabel(), normalizeTime(res.getStartTime()), PerfMonSampleResult.getValue(res));
updateGui(null);
}
} else {
addErrorMessage(res.getResponseMessage(), res.getStartTime());
}
}
private void addPerfMonRecord(String rowName, long time, double value) {
AbstractGraphRow row = model.get(rowName);
if (row == null) {
row = getNewRow(model, AbstractGraphRow.ROW_AVERAGES, rowName,
AbstractGraphRow.MARKER_SIZE_NONE, false, false, false, true, true);
}
row.add(time, value);
}
private class HideAction
implements ActionListener {
@Override
public void actionPerformed(ActionEvent e) {
errorPane.setVisible(false);
updateGui();
}
}
}
|
/*
* Copyright (c) 2020. Red Hat, Inc. and/or its affiliates.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.mvel;
import org.drools.core.base.ClassObjectType;
import org.drools.core.rule.Declaration;
import org.drools.core.rule.Pattern;
import org.drools.model.Index;
import org.drools.model.constraints.SingleConstraint1;
import org.drools.model.constraints.SingleConstraint2;
import org.drools.model.functions.Predicate1;
import org.drools.model.functions.Predicate2;
import org.drools.model.impl.DeclarationImpl;
import org.drools.model.index.AlphaIndexImpl;
import org.drools.modelcompiler.constraints.ConstraintEvaluator;
import org.drools.modelcompiler.constraints.LambdaConstraint;
import org.drools.modelcompiler.util.EvaluationUtil;
import org.drools.mvel.model.Cheese;
public class LambdaConstraintTestUtil {
private LambdaConstraintTestUtil() {
// don't instantiate a util class
}
public static <T> LambdaConstraint createLambdaConstraint1(Class<T> patternClass, Predicate1<T> predicate) {
return createLambdaConstraint1(patternClass, null, predicate, null);
}
public static <T> LambdaConstraint createLambdaConstraint1(Class<T> patternClass, Pattern pattern, Predicate1<T> predicate, Index<T, ?> index) {
String patternName = "GENERATED_$pattern_" + patternClass.getSimpleName();
DeclarationImpl<T> decl = new DeclarationImpl<T>(patternClass, patternName);
SingleConstraint1<T> singleConstraint = new SingleConstraint1<T>(decl, predicate);
singleConstraint.setIndex(index);
Declaration coreDecl = new Declaration(patternName, null, pattern, false);
ConstraintEvaluator constraintEvaluator = new ConstraintEvaluator(new Declaration[]{coreDecl}, singleConstraint);
return new LambdaConstraint(constraintEvaluator);
}
public static <T, A, B> LambdaConstraint createLambdaConstraint2(Class<A> patternClass, Class<B> rightClass, Pattern pattern, Pattern varPattern, String varName, Predicate2<A, B> predicate, Index<T, ?> index) {
String patternName = "GENERATED_$pattern_" + patternClass.getSimpleName();
DeclarationImpl<A> declA = new DeclarationImpl<A>(patternClass, patternName);
DeclarationImpl<B> declB = new DeclarationImpl<B>(rightClass, varName);
SingleConstraint2<A, B> singleConstraint = new SingleConstraint2<A, B>(declA, declB, predicate);
singleConstraint.setIndex(index);
Declaration patternDecl = new Declaration(patternName, Pattern.getReadAcessor(new ClassObjectType(patternClass, false)), pattern, false);
Declaration varDecl = new Declaration(varName, Pattern.getReadAcessor(new ClassObjectType(rightClass, false)), varPattern, false);
ConstraintEvaluator constraintEvaluator = new ConstraintEvaluator(new Declaration[]{patternDecl, varDecl}, singleConstraint);
return new LambdaConstraint(constraintEvaluator);
}
public static LambdaConstraint createCheeseTypeEqualsConstraint(final String rightValue) {
// Typical LambdaConstraint used in drools-test-coverage. (type == "xxx")
Pattern pattern = new Pattern( 0, new ClassObjectType( Cheese.class ) );
Predicate1<Cheese> predicate = new Predicate1.Impl<Cheese>(_this -> EvaluationUtil.areNullSafeEquals(_this.getType(), rightValue));
AlphaIndexImpl<Cheese, String> index = new AlphaIndexImpl<Cheese, String>(String.class, org.drools.model.Index.ConstraintType.EQUAL, 1, _this -> _this.getType(), rightValue);
return createLambdaConstraint1(Cheese.class, pattern, predicate, index);
}
public static LambdaConstraint createCheesePriceEqualsConstraint(final int rightValue) {
// Typical LambdaConstraint used in drools-test-coverage. (price == xxx)
Pattern pattern = new Pattern(0, new ClassObjectType(Cheese.class));
Predicate1<Cheese> predicate = new Predicate1.Impl<Cheese>(_this -> EvaluationUtil.areNullSafeEquals(_this.getPrice(), rightValue));
AlphaIndexImpl<Cheese, Integer> index = new AlphaIndexImpl<Cheese, Integer>(Integer.class, org.drools.model.Index.ConstraintType.EQUAL, 1, _this -> _this.getPrice(), rightValue);
return createLambdaConstraint1(Cheese.class, pattern, predicate, index);
}
public static LambdaConstraint createCheeseCharTypeEqualsConstraint(final char rightValue, int indexId) {
// Typical LambdaConstraint used in drools-test-coverage. indexId is required when the test uses hashKey
Pattern pattern = new Pattern(0, new ClassObjectType(Cheese.class));
Predicate1<Cheese> predicate = new Predicate1.Impl<Cheese>(_this -> EvaluationUtil.areNullSafeEquals(_this.getCharType(), rightValue));
AlphaIndexImpl<Cheese, Character> index = new AlphaIndexImpl<Cheese, Character>(Character.class, org.drools.model.Index.ConstraintType.EQUAL, indexId, _this -> _this.getCharType(), (char) rightValue);
return LambdaConstraintTestUtil.createLambdaConstraint1(Cheese.class, pattern, predicate, index);
}
public static LambdaConstraint createCheeseCharObjectTypeEqualsConstraint(final char rightValue, int indexId) {
// Typical LambdaConstraint used in drools-test-coverage. indexId is required when the test uses hashKey
Pattern pattern = new Pattern(0, new ClassObjectType(Cheese.class));
Predicate1<Cheese> predicate = new Predicate1.Impl<Cheese>(_this -> EvaluationUtil.areNullSafeEquals(_this.getCharObjectType(), rightValue));
AlphaIndexImpl<Cheese, Character> index = new AlphaIndexImpl<Cheese, Character>(Character.class, org.drools.model.Index.ConstraintType.EQUAL, indexId, _this -> _this.getCharObjectType(), (char) rightValue);
return LambdaConstraintTestUtil.createLambdaConstraint1(Cheese.class, pattern, predicate, index);
}
}
|
// Copyright 2013 The Flutter Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package io.flutter.plugin.editing;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.Rect;
import android.os.Build;
import android.provider.Settings;
import android.text.Editable;
import android.text.InputType;
import android.text.Selection;
import android.util.SparseArray;
import android.view.View;
import android.view.ViewStructure;
import android.view.autofill.AutofillId;
import android.view.autofill.AutofillManager;
import android.view.autofill.AutofillValue;
import android.view.inputmethod.BaseInputConnection;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputConnection;
import android.view.inputmethod.InputMethodManager;
import android.view.inputmethod.InputMethodSubtype;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import io.flutter.embedding.engine.systemchannels.TextInputChannel;
import io.flutter.plugin.platform.PlatformViewsController;
import java.util.HashMap;
/** Android implementation of the text input plugin. */
public class TextInputPlugin {
@NonNull private final View mView;
@NonNull private final InputMethodManager mImm;
@NonNull private final AutofillManager afm;
@NonNull private final TextInputChannel textInputChannel;
@NonNull private InputTarget inputTarget = new InputTarget(InputTarget.Type.NO_TARGET, 0);
@Nullable private TextInputChannel.Configuration configuration;
@Nullable private SparseArray<TextInputChannel.Configuration> mAutofillConfigurations;
@Nullable private Editable mEditable;
private boolean mRestartInputPending;
@Nullable private InputConnection lastInputConnection;
@NonNull private PlatformViewsController platformViewsController;
@Nullable private Rect lastClientRect;
private final boolean restartAlwaysRequired;
// When true following calls to createInputConnection will return the cached lastInputConnection
// if the input
// target is a platform view. See the comments on lockPlatformViewInputConnection for more
// details.
private boolean isInputConnectionLocked;
public TextInputPlugin(
View view,
@NonNull TextInputChannel textInputChannel,
@NonNull PlatformViewsController platformViewsController) {
mView = view;
mImm = (InputMethodManager) view.getContext().getSystemService(Context.INPUT_METHOD_SERVICE);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
afm = view.getContext().getSystemService(AutofillManager.class);
} else {
afm = null;
}
this.textInputChannel = textInputChannel;
textInputChannel.setTextInputMethodHandler(
new TextInputChannel.TextInputMethodHandler() {
@Override
public void show() {
showTextInput(mView);
}
@Override
public void hide() {
hideTextInput(mView);
}
@Override
public void requestAutofill() {
notifyViewEntered();
}
@Override
public void setClient(
int textInputClientId, TextInputChannel.Configuration configuration) {
setTextInputClient(textInputClientId, configuration);
}
@Override
public void setPlatformViewClient(int platformViewId) {
setPlatformViewTextInputClient(platformViewId);
}
@Override
public void setEditingState(TextInputChannel.TextEditState editingState) {
setTextInputEditingState(mView, editingState);
}
@Override
public void setEditableSizeAndTransform(double width, double height, double[] transform) {
saveEditableSizeAndTransform(width, height, transform);
}
@Override
public void clearClient() {
clearTextInputClient();
}
});
textInputChannel.requestExistingInputState();
this.platformViewsController = platformViewsController;
this.platformViewsController.attachTextInputPlugin(this);
restartAlwaysRequired = isRestartAlwaysRequired();
}
@NonNull
public InputMethodManager getInputMethodManager() {
return mImm;
}
@VisibleForTesting
Editable getEditable() {
return mEditable;
}
/**
* * Use the current platform view input connection until unlockPlatformViewInputConnection is
* called.
*
* <p>The current input connection instance is cached and any following call to @{link
* createInputConnection} returns the cached connection until unlockPlatformViewInputConnection is
* called.
*
* <p>This is a no-op if the current input target isn't a platform view.
*
* <p>This is used to preserve an input connection when moving a platform view from one virtual
* display to another.
*/
public void lockPlatformViewInputConnection() {
if (inputTarget.type == InputTarget.Type.PLATFORM_VIEW) {
isInputConnectionLocked = true;
}
}
/**
* Unlocks the input connection.
*
* <p>See also: @{link lockPlatformViewInputConnection}.
*/
public void unlockPlatformViewInputConnection() {
isInputConnectionLocked = false;
}
/**
* Detaches the text input plugin from the platform views controller.
*
* <p>The TextInputPlugin instance should not be used after calling this.
*/
public void destroy() {
platformViewsController.detachTextInputPlugin();
textInputChannel.setTextInputMethodHandler(null);
}
private static int inputTypeFromTextInputType(
TextInputChannel.InputType type,
boolean obscureText,
boolean autocorrect,
boolean enableSuggestions,
TextInputChannel.TextCapitalization textCapitalization) {
if (type.type == TextInputChannel.TextInputType.DATETIME) {
return InputType.TYPE_CLASS_DATETIME;
} else if (type.type == TextInputChannel.TextInputType.NUMBER) {
int textType = InputType.TYPE_CLASS_NUMBER;
if (type.isSigned) {
textType |= InputType.TYPE_NUMBER_FLAG_SIGNED;
}
if (type.isDecimal) {
textType |= InputType.TYPE_NUMBER_FLAG_DECIMAL;
}
return textType;
} else if (type.type == TextInputChannel.TextInputType.PHONE) {
return InputType.TYPE_CLASS_PHONE;
}
int textType = InputType.TYPE_CLASS_TEXT;
if (type.type == TextInputChannel.TextInputType.MULTILINE) {
textType |= InputType.TYPE_TEXT_FLAG_MULTI_LINE;
} else if (type.type == TextInputChannel.TextInputType.EMAIL_ADDRESS) {
textType |= InputType.TYPE_TEXT_VARIATION_EMAIL_ADDRESS;
} else if (type.type == TextInputChannel.TextInputType.URL) {
textType |= InputType.TYPE_TEXT_VARIATION_URI;
} else if (type.type == TextInputChannel.TextInputType.VISIBLE_PASSWORD) {
textType |= InputType.TYPE_TEXT_VARIATION_VISIBLE_PASSWORD;
} else if (type.type == TextInputChannel.TextInputType.NAME) {
textType |= InputType.TYPE_TEXT_VARIATION_PERSON_NAME;
} else if (type.type == TextInputChannel.TextInputType.POSTAL_ADDRESS) {
textType |= InputType.TYPE_TEXT_VARIATION_POSTAL_ADDRESS;
}
if (obscureText) {
// Note: both required. Some devices ignore TYPE_TEXT_FLAG_NO_SUGGESTIONS.
textType |= InputType.TYPE_TEXT_FLAG_NO_SUGGESTIONS;
textType |= InputType.TYPE_TEXT_VARIATION_PASSWORD;
} else {
if (autocorrect) textType |= InputType.TYPE_TEXT_FLAG_AUTO_CORRECT;
if (!enableSuggestions) textType |= InputType.TYPE_TEXT_FLAG_NO_SUGGESTIONS;
}
if (textCapitalization == TextInputChannel.TextCapitalization.CHARACTERS) {
textType |= InputType.TYPE_TEXT_FLAG_CAP_CHARACTERS;
} else if (textCapitalization == TextInputChannel.TextCapitalization.WORDS) {
textType |= InputType.TYPE_TEXT_FLAG_CAP_WORDS;
} else if (textCapitalization == TextInputChannel.TextCapitalization.SENTENCES) {
textType |= InputType.TYPE_TEXT_FLAG_CAP_SENTENCES;
}
return textType;
}
public InputConnection createInputConnection(View view, EditorInfo outAttrs) {
if (inputTarget.type == InputTarget.Type.NO_TARGET) {
lastInputConnection = null;
return null;
}
if (inputTarget.type == InputTarget.Type.PLATFORM_VIEW) {
if (isInputConnectionLocked) {
return lastInputConnection;
}
lastInputConnection =
platformViewsController
.getPlatformViewById(inputTarget.id)
.onCreateInputConnection(outAttrs);
return lastInputConnection;
}
outAttrs.inputType =
inputTypeFromTextInputType(
configuration.inputType,
configuration.obscureText,
configuration.autocorrect,
configuration.enableSuggestions,
configuration.textCapitalization);
outAttrs.imeOptions = EditorInfo.IME_FLAG_NO_FULLSCREEN;
int enterAction;
if (configuration.inputAction == null) {
// If an explicit input action isn't set, then default to none for multi-line fields
// and done for single line fields.
enterAction =
(InputType.TYPE_TEXT_FLAG_MULTI_LINE & outAttrs.inputType) != 0
? EditorInfo.IME_ACTION_NONE
: EditorInfo.IME_ACTION_DONE;
} else {
enterAction = configuration.inputAction;
}
if (configuration.actionLabel != null) {
outAttrs.actionLabel = configuration.actionLabel;
outAttrs.actionId = enterAction;
}
outAttrs.imeOptions |= enterAction;
InputConnectionAdaptor connection =
new InputConnectionAdaptor(view, inputTarget.id, textInputChannel, mEditable, outAttrs);
outAttrs.initialSelStart = Selection.getSelectionStart(mEditable);
outAttrs.initialSelEnd = Selection.getSelectionEnd(mEditable);
lastInputConnection = connection;
return lastInputConnection;
}
@Nullable
public InputConnection getLastInputConnection() {
return lastInputConnection;
}
/**
* Clears a platform view text input client if it is the current input target.
*
* <p>This is called when a platform view is disposed to make sure we're not hanging to a stale
* input connection.
*/
public void clearPlatformViewClient(int platformViewId) {
if (inputTarget.type == InputTarget.Type.PLATFORM_VIEW && inputTarget.id == platformViewId) {
inputTarget = new InputTarget(InputTarget.Type.NO_TARGET, 0);
hideTextInput(mView);
mImm.restartInput(mView);
mRestartInputPending = false;
}
}
private void showTextInput(View view) {
view.requestFocus();
mImm.showSoftInput(view, 0);
}
private void hideTextInput(View view) {
notifyViewExited();
// Note: a race condition may lead to us hiding the keyboard here just after a platform view has
// shown it.
// This can only potentially happen when switching focus from a Flutter text field to a platform
// view's text
// field(by text field here I mean anything that keeps the keyboard open).
// See: https://github.com/flutter/flutter/issues/34169
mImm.hideSoftInputFromWindow(view.getApplicationWindowToken(), 0);
}
private void notifyViewEntered() {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O || afm == null || !needsAutofill()) {
return;
}
final String triggerIdentifier = configuration.autofill.uniqueIdentifier;
final int[] offset = new int[2];
mView.getLocationOnScreen(offset);
Rect rect = new Rect(lastClientRect);
rect.offset(offset[0], offset[1]);
afm.notifyViewEntered(mView, triggerIdentifier.hashCode(), rect);
}
private void notifyViewExited() {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O
|| afm == null
|| configuration == null
|| configuration.autofill == null) {
return;
}
final String triggerIdentifier = configuration.autofill.uniqueIdentifier;
afm.notifyViewExited(mView, triggerIdentifier.hashCode());
}
private void notifyValueChanged(String newValue) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O || afm == null || !needsAutofill()) {
return;
}
final String triggerIdentifier = configuration.autofill.uniqueIdentifier;
afm.notifyValueChanged(mView, triggerIdentifier.hashCode(), AutofillValue.forText(newValue));
}
@VisibleForTesting
void setTextInputClient(int client, TextInputChannel.Configuration configuration) {
inputTarget = new InputTarget(InputTarget.Type.FRAMEWORK_CLIENT, client);
updateAutofillConfigurationIfNeeded(configuration);
mEditable = Editable.Factory.getInstance().newEditable("");
// setTextInputClient will be followed by a call to setTextInputEditingState.
// Do a restartInput at that time.
mRestartInputPending = true;
unlockPlatformViewInputConnection();
lastClientRect = null;
}
private void setPlatformViewTextInputClient(int platformViewId) {
// We need to make sure that the Flutter view is focused so that no imm operations get short
// circuited.
// Not asking for focus here specifically manifested in a but on API 28 devices where the
// platform view's
// request to show a keyboard was ignored.
mView.requestFocus();
inputTarget = new InputTarget(InputTarget.Type.PLATFORM_VIEW, platformViewId);
mImm.restartInput(mView);
mRestartInputPending = false;
}
private void applyStateToSelection(TextInputChannel.TextEditState state) {
int selStart = state.selectionStart;
int selEnd = state.selectionEnd;
if (selStart >= 0
&& selStart <= mEditable.length()
&& selEnd >= 0
&& selEnd <= mEditable.length()) {
Selection.setSelection(mEditable, selStart, selEnd);
} else {
Selection.removeSelection(mEditable);
}
}
@VisibleForTesting
void setTextInputEditingState(View view, TextInputChannel.TextEditState state) {
// Always replace the contents of mEditable if the text differs
if (!state.text.equals(mEditable.toString())) {
mEditable.replace(0, mEditable.length(), state.text);
}
notifyValueChanged(mEditable.toString());
// Always apply state to selection which handles updating the selection if needed.
applyStateToSelection(state);
InputConnection connection = getLastInputConnection();
if (connection != null && connection instanceof InputConnectionAdaptor) {
((InputConnectionAdaptor) connection).markDirty();
}
// Use updateSelection to update imm on selection if it is not neccessary to restart.
if (!restartAlwaysRequired && !mRestartInputPending) {
mImm.updateSelection(
mView,
Math.max(Selection.getSelectionStart(mEditable), 0),
Math.max(Selection.getSelectionEnd(mEditable), 0),
BaseInputConnection.getComposingSpanStart(mEditable),
BaseInputConnection.getComposingSpanEnd(mEditable));
// Restart if there is a pending restart or the device requires a force restart
// (see isRestartAlwaysRequired). Restarting will also update the selection.
} else {
mImm.restartInput(view);
mRestartInputPending = false;
}
}
private interface MinMax {
void inspect(double x, double y);
}
private void saveEditableSizeAndTransform(double width, double height, double[] matrix) {
final double[] minMax = new double[4]; // minX, maxX, minY, maxY.
final boolean isAffine = matrix[3] == 0 && matrix[7] == 0 && matrix[15] == 1;
minMax[0] = minMax[1] = matrix[12] / matrix[15]; // minX and maxX.
minMax[2] = minMax[3] = matrix[13] / matrix[15]; // minY and maxY.
final MinMax finder =
new MinMax() {
@Override
public void inspect(double x, double y) {
final double w = isAffine ? 1 : 1 / (matrix[3] * x + matrix[7] * y + matrix[15]);
final double tx = (matrix[0] * x + matrix[4] * y + matrix[12]) * w;
final double ty = (matrix[1] * x + matrix[5] * y + matrix[13]) * w;
if (tx < minMax[0]) {
minMax[0] = tx;
} else if (tx > minMax[1]) {
minMax[1] = tx;
}
if (ty < minMax[2]) {
minMax[2] = ty;
} else if (ty > minMax[3]) {
minMax[3] = ty;
}
}
};
finder.inspect(width, 0);
finder.inspect(width, height);
finder.inspect(0, height);
final Float density = mView.getContext().getResources().getDisplayMetrics().density;
lastClientRect =
new Rect(
(int) (minMax[0] * density),
(int) (minMax[2] * density),
(int) Math.ceil(minMax[1] * density),
(int) Math.ceil(minMax[3] * density));
}
private void updateAutofillConfigurationIfNeeded(TextInputChannel.Configuration configuration) {
notifyViewExited();
this.configuration = configuration;
final TextInputChannel.Configuration[] configurations = configuration.fields;
if (configuration.autofill == null) {
// Disables autofill if the configuration doesn't have an autofill field.
mAutofillConfigurations = null;
return;
}
mAutofillConfigurations = new SparseArray<>();
if (configurations == null) {
mAutofillConfigurations.put(
configuration.autofill.uniqueIdentifier.hashCode(), configuration);
} else {
for (TextInputChannel.Configuration config : configurations) {
TextInputChannel.Configuration.Autofill autofill = config.autofill;
if (autofill == null) {
continue;
}
mAutofillConfigurations.put(autofill.uniqueIdentifier.hashCode(), config);
}
}
}
private boolean needsAutofill() {
return mAutofillConfigurations != null;
}
public void onProvideAutofillVirtualStructure(ViewStructure structure, int flags) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O || !needsAutofill()) {
return;
}
final String triggerIdentifier = configuration.autofill.uniqueIdentifier;
final AutofillId parentId = structure.getAutofillId();
for (int i = 0; i < mAutofillConfigurations.size(); i++) {
final int autofillId = mAutofillConfigurations.keyAt(i);
final TextInputChannel.Configuration config = mAutofillConfigurations.valueAt(i);
final TextInputChannel.Configuration.Autofill autofill = config.autofill;
if (autofill == null) {
continue;
}
structure.addChildCount(1);
final ViewStructure child = structure.newChild(i);
child.setAutofillId(parentId, autofillId);
child.setAutofillValue(AutofillValue.forText(autofill.editState.text));
child.setAutofillHints(autofill.hints);
child.setAutofillType(View.AUTOFILL_TYPE_TEXT);
child.setVisibility(View.VISIBLE);
}
}
public void autofill(SparseArray<AutofillValue> values) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.O) {
return;
}
final TextInputChannel.Configuration.Autofill currentAutofill = configuration.autofill;
if (currentAutofill == null) {
return;
}
final HashMap<String, TextInputChannel.TextEditState> editingValues = new HashMap<>();
for (int i = 0; i < values.size(); i++) {
int virtualId = values.keyAt(i);
final TextInputChannel.Configuration config = mAutofillConfigurations.get(virtualId);
if (config == null || config.autofill == null) {
continue;
}
final TextInputChannel.Configuration.Autofill autofill = config.autofill;
final String value = values.valueAt(i).getTextValue().toString();
final TextInputChannel.TextEditState newState =
new TextInputChannel.TextEditState(value, value.length(), value.length());
// The value of the currently focused text field needs to be updated.
if (autofill.uniqueIdentifier.equals(currentAutofill.uniqueIdentifier)) {
setTextInputEditingState(mView, newState);
}
editingValues.put(autofill.uniqueIdentifier, newState);
}
textInputChannel.updateEditingStateWithTag(inputTarget.id, editingValues);
}
// Samsung's Korean keyboard has a bug where it always attempts to combine characters based on
// its internal state, ignoring if and when the cursor is moved programmatically. The same bug
// also causes non-korean keyboards to occasionally duplicate text when tapping in the middle
// of existing text to edit it.
//
// Fully restarting the IMM works around this because it flushes the keyboard's internal state
// and stops it from trying to incorrectly combine characters. However this also has some
// negative performance implications, so we don't want to apply this workaround in every case.
@SuppressLint("NewApi") // New API guard is inline, the linter can't see it.
@SuppressWarnings("deprecation")
private boolean isRestartAlwaysRequired() {
InputMethodSubtype subtype = mImm.getCurrentInputMethodSubtype();
// Impacted devices all shipped with Android Lollipop or newer.
if (subtype == null
|| Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP
|| !Build.MANUFACTURER.equals("samsung")) {
return false;
}
String keyboardName =
Settings.Secure.getString(
mView.getContext().getContentResolver(), Settings.Secure.DEFAULT_INPUT_METHOD);
// The Samsung keyboard is called "com.sec.android.inputmethod/.SamsungKeypad" but look
// for "Samsung" just in case Samsung changes the name of the keyboard.
return keyboardName.contains("Samsung");
}
private void clearTextInputClient() {
if (inputTarget.type == InputTarget.Type.PLATFORM_VIEW) {
// Focus changes in the framework tree have no guarantees on the order focus nodes are
// notified. A node
// that lost focus may be notified before or after a node that gained focus.
// When moving the focus from a Flutter text field to an AndroidView, it is possible that the
// Flutter text
// field's focus node will be notified that it lost focus after the AndroidView was notified
// that it gained
// focus. When this happens the text field will send a clearTextInput command which we ignore.
// By doing this we prevent the framework from clearing a platform view input client(the only
// way to do so
// is to set a new framework text client). I don't see an obvious use case for "clearing" a
// platform views
// text input client, and it may be error prone as we don't know how the platform view manages
// the input
// connection and we probably shouldn't interfere.
// If we ever want to allow the framework to clear a platform view text client we should
// probably consider
// changing the focus manager such that focus nodes that lost focus are notified before focus
// nodes that
// gained focus as part of the same focus event.
return;
}
inputTarget = new InputTarget(InputTarget.Type.NO_TARGET, 0);
unlockPlatformViewInputConnection();
notifyViewExited();
lastClientRect = null;
}
private static class InputTarget {
enum Type {
NO_TARGET,
// InputConnection is managed by the TextInputPlugin, and events are forwarded to the Flutter
// framework.
FRAMEWORK_CLIENT,
// InputConnection is managed by an embedded platform view.
PLATFORM_VIEW
}
public InputTarget(@NonNull Type type, int id) {
this.type = type;
this.id = id;
}
@NonNull Type type;
// The ID of the input target.
//
// For framework clients this is the framework input connection client ID.
// For platform views this is the platform view's ID.
int id;
}
}
|
package in.clouthink.daas.sbb.attachment.rest.controller;
import in.clouthink.daas.sbb.account.domain.model.User;
import in.clouthink.daas.sbb.attachment.rest.dto.*;
import in.clouthink.daas.sbb.attachment.rest.support.AttachmentRestSupport;
import in.clouthink.daas.sbb.security.SecurityContexts;
import in.clouthink.daas.sbb.shared.domain.request.impl.PageQueryParameter;
import in.clouthink.daas.fss.core.FileObject;
import in.clouthink.daas.fss.rest.UploadFileRequest;
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.web.bind.annotation.*;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
@Api("附件发布管理")
@RestController
@RequestMapping("/api")
public class AttachmentRestController {
@Autowired
private AttachmentRestSupport attachmentsRestSupport;
/**
* 头像特殊处理,上传后,对头像进行crop和zoom操作
*
* @param request
* @param response
* @throws IOException
*/
@RequestMapping(value = "/attachments/avatar", method = RequestMethod.POST)
@ResponseBody
public String uploadAvatar(UploadFileRequest uploadFileRequest,
HttpServletRequest request,
HttpServletResponse response) throws IOException {
User user = (User) SecurityContexts.getContext().requireUser();
uploadFileRequest.setUploadedBy(user.getUsername());
FileObject fileObject = attachmentsRestSupport.uploadAvatar(uploadFileRequest, request, response);
return fileObject.getId();
}
@ApiOperation(value = "附件列表,支持分页,支持动态查询(按名称,分类查询)")
@RequestMapping(value = "/attachments", method = RequestMethod.GET)
public Page<AttachmentSummary> listAttachmentSummaryPage(AttachmentQueryParameter queryRequest) {
return attachmentsRestSupport.listAttachment(queryRequest);
}
@ApiOperation(value = "查看附件详情")
@RequestMapping(value = "/attachments/{id}", method = RequestMethod.GET)
public AttachmentDetail getAttachmentDetail(@PathVariable String id) {
return attachmentsRestSupport.getAttachmentDetail(id);
}
@ApiOperation(value = "创建附件(前提已经调用daas-fss上传文件得到文件的metadata,然后和业务数据放到一起)")
@RequestMapping(value = "/attachments", method = RequestMethod.POST)
public String createAttachment(@RequestBody SaveAttachmentParameter request) {
User user = (User) SecurityContexts.getContext().requireUser();
return attachmentsRestSupport.createAttachment(request, user);
}
@ApiOperation(value = "修改附件信息(名称,分类等),已发布的附件不能修改")
@RequestMapping(value = "/attachments/{id}", method = RequestMethod.POST)
public void updateNew(@PathVariable String id, @RequestBody SaveAttachmentParameter request) {
User user = (User) SecurityContexts.getContext().requireUser();
attachmentsRestSupport.updateAttachment(id, request, user);
}
@ApiOperation(value = "删除附件(已发布的附件不能删除)")
@RequestMapping(value = "/attachments/{id}", method = RequestMethod.DELETE)
public void deleteAttachment(@PathVariable String id) {
User user = (User) SecurityContexts.getContext().requireUser();
attachmentsRestSupport.deleteAttachment(id, user);
}
@ApiOperation(value = "发布附件(重复发布自动忽略)")
@RequestMapping(value = "/attachments/{id}/publish", method = RequestMethod.POST)
public void publishAttachment(@PathVariable String id) {
User user = (User) SecurityContexts.getContext().requireUser();
attachmentsRestSupport.publishAttachment(id, user);
}
@ApiOperation(value = "取消发布附件(重复取消自动忽略)")
@RequestMapping(value = "/attachments/{id}/unpublish", method = RequestMethod.POST)
public void unpublishAttachment(@PathVariable String id) {
User user = (User) SecurityContexts.getContext().requireUser();
attachmentsRestSupport.unpublishAttachment(id, user);
}
@ApiOperation(value = "下载附件")
@RequestMapping(value = {"/attachments/{id}/download"}, method = RequestMethod.GET)
public void downloadAttachment(@PathVariable String id, HttpServletResponse response) throws IOException {
User user = (User) SecurityContexts.getContext().requireUser();
attachmentsRestSupport.downloadAttachment(id, user, response);
}
@ApiOperation(value = "查看附件的下载历史记录")
@RequestMapping(value = "/attachments/{id}/downloadHistory", method = RequestMethod.GET)
public Page<DownloadSummary> listDownloadHistory(@PathVariable String id, PageQueryParameter queryParameter) {
return attachmentsRestSupport.listDownloadHistory(id, queryParameter);
}
}
|
import java.sql.SQLException;
import java.util.List;
import com.itclass.dao.AbstractDAO;
import com.itclass.dao.PostDAO;
import com.itclass.model.Post;
public class TestConnection {
public static void main(String[] args) throws SQLException {
/*AbstractDAO<Integer, Post> dao = new PostDAO();
List<Post> posts = dao.getAll();
for(Post post: posts) {
System.out.println(post);
}
*/
}
}
|
package me.andrewpeng.cadence.core;
import android.os.Handler;
public class Loop {
public static int FPS = 60;
private Handler handler;
private Runnable runnable;
private final MainView mainView;
private long beginTime;
private long timeDiff;
private int sleepTime;
private int framesSkipped;
private double framePeriod = (int) (1000 / FPS);
private int maxFrameSkips = 5;
public boolean paused = false;
public Loop(MainView mainView){
this.mainView = mainView;
init();
}
private void init(){
sleepTime = 0;
handler = new Handler();
runnable = new Runnable() {
@Override
public void run() {
// Get the time this iteration starts at for reference
beginTime = System.currentTimeMillis();
framesSkipped = 0;
// Only tick and render if the game isn't paused
if (!paused){
mainView.tick();
mainView.render();
}
// Get the time it took to tick and render once
timeDiff = System.currentTimeMillis() - beginTime;
// Get the amount of time missed (if needed to catch up)
sleepTime = (int) (framePeriod - timeDiff);
// If the time missed is negative, and it is possible to skip frames, then do it
while (sleepTime < 0 && framesSkipped < maxFrameSkips && !paused) {
// Tick additional times to catch up
mainView.tick();
sleepTime += framePeriod;
framesSkipped++;
}
if (framesSkipped > 0){
System.out.println("Can't keep up! Skipped " + framesSkipped + " frames");
}
// Recall this runnable for the next iteration of ticks and renders
handler.postDelayed(runnable, sleepTime);
}
};
// Run for the first time
handler.post(runnable);
}
}
|
/*
* Copyright 2015-2018 The OpenZipkin Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package zipkin2.server.internal.brave;
import brave.Span;
import brave.http.HttpServerAdapter;
import brave.http.HttpServerHandler;
import brave.http.HttpTracing;
import brave.propagation.CurrentTraceContext;
import brave.propagation.CurrentTraceContext.Scope;
import brave.propagation.Propagation;
import brave.propagation.TraceContext;
import io.undertow.server.HandlerWrapper;
import io.undertow.server.HttpHandler;
import io.undertow.server.HttpServerExchange;
import io.undertow.server.handlers.ExceptionHandler;
import io.undertow.util.HeaderMap;
import java.net.InetSocketAddress;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.web.embedded.undertow.UndertowDeploymentInfoCustomizer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import zipkin2.Endpoint;
import zipkin2.server.internal.ConditionalOnSelfTracing;
/** TODO: when brave gets undertow tracing by default, switch to that */
@ConditionalOnSelfTracing
@Configuration
public class TracingHttpHandlerConfiguration {
@Bean @Qualifier("httpTracingCustomizer") UndertowDeploymentInfoCustomizer httpTracingCustomizer(
HttpTracing httpTracing) {
TracingHttpHandler.Wrapper result = new TracingHttpHandler.Wrapper(httpTracing);
return info -> info.addInitialHandlerChainWrapper(result);
}
static final class TracingHttpHandler implements HttpHandler {
static final Propagation.Getter<HeaderMap, String>
GETTER = new Propagation.Getter<HeaderMap, String>() {
@Override public String get(HeaderMap carrier, String key) {
return carrier.getFirst(key);
}
@Override public String toString() {
return "HttpServerRequest::getHeader";
}
};
static final class Wrapper implements HandlerWrapper {
final HttpTracing httpTracing;
Wrapper(HttpTracing httpTracing) {
this.httpTracing = httpTracing;
}
@Override public HttpHandler wrap(HttpHandler next) {
return new TracingHttpHandler(httpTracing, next);
}
}
final CurrentTraceContext currentTraceContext;
final HttpServerHandler<HttpServerExchange, HttpServerExchange> serverHandler;
final TraceContext.Extractor<HeaderMap> extractor;
final HttpHandler next;
TracingHttpHandler(HttpTracing httpTracing, HttpHandler next) {
this.currentTraceContext = httpTracing.tracing().currentTraceContext();
this.serverHandler = HttpServerHandler.create(httpTracing, new Adapter());
this.extractor = httpTracing.tracing().propagation().extractor(GETTER);
this.next = next;
}
@Override public void handleRequest(HttpServerExchange exchange) throws Exception {
if (!exchange.isComplete()) {
Span span = serverHandler.handleReceive(extractor, exchange.getRequestHeaders(), exchange);
exchange.addExchangeCompleteListener((exch, nextListener) -> {
try {
nextListener.proceed();
} finally {
serverHandler.handleSend(exch, exch.getAttachment(ExceptionHandler.THROWABLE), span);
}
});
try (Scope scope = currentTraceContext.newScope(span.context())) {
next.handleRequest(exchange);
} catch (Exception | Error e) { // move the error to where the complete listener can see it
exchange.putAttachment(ExceptionHandler.THROWABLE, e);
throw e;
}
} else {
next.handleRequest(exchange);
}
}
}
static final class Adapter extends HttpServerAdapter<HttpServerExchange, HttpServerExchange> {
@Override public String method(HttpServerExchange request) {
return request.getRequestMethod().toString();
}
@Override public String path(HttpServerExchange request) {
return request.getRequestPath();
}
@Override public String url(HttpServerExchange request) {
return request.getRequestURL();
}
@Override public String requestHeader(HttpServerExchange request, String name) {
return request.getRequestHeaders().getFirst(name);
}
@Override public Integer statusCode(HttpServerExchange response) {
return response.getStatusCode();
}
@Override
public boolean parseClientAddress(HttpServerExchange req, Endpoint.Builder builder) {
if (super.parseClientAddress(req, builder)) return true;
InetSocketAddress addr = (InetSocketAddress) req.getConnection().getPeerAddress();
if (builder.parseIp(addr.getAddress())) {
builder.port(addr.getPort());
return true;
}
return false;
}
}
}
|
/*
* Copyright © 2018 Apple Inc. and the ServiceTalk project authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.servicetalk.http.router.jersey;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
import org.junit.runners.Suite.SuiteClasses;
@RunWith(Suite.class)
@SuiteClasses({
// Support
CharSequenceUtilTest.class,
// Core JAX-RS features
SynchronousResourceTest.class,
AsynchronousResourceTest.class,
ExceptionMapperTest.class,
GlobalFiltersTest.class,
InputConsumingGlobalFiltersTest.class,
InterceptorsTest.class,
SecurityFilterTest.class,
// RS features
CancellationTest.class,
// Execution strategy tests
ExecutionStrategyConfigurationFailuresTest.class,
ExecutionStrategyTest.class,
MixedModeResourceTest.class
})
public abstract class BaseJerseyRouterTestSuite {
// NOOP
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sling.testing.mock.osgi;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import org.apache.sling.testing.mock.osgi.OsgiServiceUtilTest.Service3StaticGreedy;
import org.apache.sling.testing.mock.osgi.OsgiServiceUtilTest.ServiceInterface1;
import org.apache.sling.testing.mock.osgi.OsgiServiceUtilTest.ServiceInterface1Optional;
import org.apache.sling.testing.mock.osgi.OsgiServiceUtilTest.ServiceInterface2;
import org.apache.sling.testing.mock.osgi.OsgiServiceUtilTest.ServiceInterface3;
import org.apache.sling.testing.mock.osgi.OsgiServiceUtilTest.ServiceSuperInterface3;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.osgi.framework.BundleContext;
import org.osgi.framework.ServiceReference;
import org.osgi.framework.ServiceRegistration;
import com.google.common.collect.ImmutableSet;
@RunWith(MockitoJUnitRunner.class)
public class MockBundleContextStaticGreedyReferencesTest {
private BundleContext bundleContext;
private ServiceRegistration reg1a;
private ServiceRegistration reg2a;
@Mock
private ServiceInterface1 dependency1a;
@Mock
private ServiceInterface1 dependency1b;
@Mock
private ServiceInterface1Optional dependency1aOptional;
@Mock
private ServiceInterface1Optional dependency1bOptional;
@Mock
private ServiceInterface2 dependency2a;
@Mock
private ServiceInterface2 dependency2b;
@Mock
private ServiceSuperInterface3 dependency3a;
@Mock
private ServiceSuperInterface3 dependency3b;
@Before
public void setUp() {
bundleContext = MockOsgi.newBundleContext();
// setup service instance with only minimum mandatory references
reg1a = bundleContext.registerService(ServiceInterface1.class.getName(), dependency1a, null);
reg2a = bundleContext.registerService(ServiceInterface2.class.getName(), dependency2a, null);
Service3StaticGreedy service = new Service3StaticGreedy();
MockOsgi.injectServices(service, bundleContext);
MockOsgi.activate(service, bundleContext);
bundleContext.registerService(Service3StaticGreedy.class.getName(), service, null);
assertDependency1(dependency1a);
assertDependency1Optional(null);
assertDependencies2(dependency2a);
assertDependencies3();
}
@Test
public void testAddRemoveOptionalUnaryService() {
ServiceRegistration reg1aOptional = bundleContext.registerService(ServiceInterface1Optional.class.getName(), dependency1aOptional, null);
assertDependency1Optional(dependency1aOptional);
reg1aOptional.unregister();
assertDependency1Optional(null);
}
public void testAddOptionalUnaryService_TooMany() {
bundleContext.registerService(ServiceInterface1Optional.class.getName(), dependency1aOptional, null);
assertDependency1Optional(dependency1aOptional);
// in real OSGi this should fail - but this is not covered by the current implementation. so test the real implementation here.
bundleContext.registerService(ServiceInterface1Optional.class.getName(), dependency1bOptional, null);
assertDependency1Optional(dependency1bOptional);
}
@Test(expected = ReferenceViolationException.class)
public void testAddMandatoryUnaryService_TooMany() {
bundleContext.registerService(ServiceInterface1.class.getName(), dependency1b, null);
}
@Test(expected = ReferenceViolationException.class)
public void testRemoveMandatoryUnaryService_TooMany() {
reg1a.unregister();
}
@Test
public void testAddRemoveOptionalMultipleService() {
ServiceRegistration reg3a = bundleContext.registerService(ServiceInterface3.class.getName(), dependency3a, null);
assertDependencies3(dependency3a);
ServiceRegistration reg3b = bundleContext.registerService(ServiceInterface3.class.getName(), dependency3b, null);
assertDependencies3(dependency3a, dependency3b);
reg3a.unregister();
assertDependencies3(dependency3b);
reg3b.unregister();
assertDependencies3();
}
@Test
public void testAddRemoveMandatoryMultipleService() {
ServiceRegistration reg2b = bundleContext.registerService(ServiceInterface2.class.getName(), dependency2b, null);
assertDependencies2(dependency2a, dependency2b);
reg2b.unregister();
assertDependencies2(dependency2a);
}
@Test(expected = ReferenceViolationException.class)
public void testAddRemoveMandatoryMultipleService_FailReg2aUnregister() {
ServiceRegistration reg2b = bundleContext.registerService(ServiceInterface2.class.getName(), dependency2b, null);
assertDependencies2(dependency2a, dependency2b);
reg2b.unregister();
assertDependencies2(dependency2a);
// this should fail
reg2a.unregister();
}
private void assertDependency1(ServiceInterface1 instance) {
Service3StaticGreedy service =getService();
if (instance == null) {
assertNull(service.getReference1());
}
else {
assertSame(instance, service.getReference1());
}
}
private void assertDependency1Optional(ServiceInterface1Optional instance) {
Service3StaticGreedy service =getService();
if (instance == null) {
assertNull(service.getReference1Optional());
}
else {
assertSame(instance, service.getReference1Optional());
}
}
private void assertDependencies2(ServiceInterface2... instances) {
Service3StaticGreedy service =getService();
assertEquals(ImmutableSet.<ServiceInterface2>copyOf(instances),
ImmutableSet.<ServiceInterface2>copyOf(service.getReferences2()));
}
private void assertDependencies3(ServiceSuperInterface3... instances) {
Service3StaticGreedy service =getService();
assertEquals(ImmutableSet.<ServiceSuperInterface3>copyOf(instances),
ImmutableSet.<ServiceSuperInterface3>copyOf(service.getReferences3()));
}
private Service3StaticGreedy getService() {
ServiceReference<?> serviceRef = bundleContext.getServiceReference(Service3StaticGreedy.class.getName());
return (Service3StaticGreedy)bundleContext.getService(serviceRef);
}
}
|
/*
* Copyright 2010-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package software.amazon.awssdk.extensions.dynamodb.mappingclient.operations;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.sameInstance;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyMap;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static software.amazon.awssdk.extensions.dynamodb.mappingclient.AttributeValues.numberValue;
import static software.amazon.awssdk.extensions.dynamodb.mappingclient.AttributeValues.stringValue;
import static software.amazon.awssdk.extensions.dynamodb.mappingclient.functionaltests.models.FakeItem.createUniqueFakeItem;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import software.amazon.awssdk.extensions.dynamodb.mappingclient.Expression;
import software.amazon.awssdk.extensions.dynamodb.mappingclient.MapperExtension;
import software.amazon.awssdk.extensions.dynamodb.mappingclient.OperationContext;
import software.amazon.awssdk.extensions.dynamodb.mappingclient.TableMetadata;
import software.amazon.awssdk.extensions.dynamodb.mappingclient.extensions.WriteModification;
import software.amazon.awssdk.services.dynamodb.DynamoDbClient;
import software.amazon.awssdk.services.dynamodb.model.AttributeValue;
import software.amazon.awssdk.services.dynamodb.model.Put;
import software.amazon.awssdk.services.dynamodb.model.PutItemRequest;
import software.amazon.awssdk.services.dynamodb.model.PutItemResponse;
import software.amazon.awssdk.services.dynamodb.model.TransactWriteItem;
import software.amazon.awssdk.extensions.dynamodb.mappingclient.functionaltests.models.FakeItem;
import software.amazon.awssdk.extensions.dynamodb.mappingclient.functionaltests.models.FakeItemComposedClass;
@RunWith(MockitoJUnitRunner.class)
public class PutItemTest {
private static final String TABLE_NAME = "table-name";
private static final OperationContext PRIMARY_CONTEXT =
OperationContext.of(TABLE_NAME, TableMetadata.primaryIndexName());
private static final OperationContext GSI_1_CONTEXT =
OperationContext.of(TABLE_NAME, "gsi_1");
private static final Expression CONDITION_EXPRESSION;
private static final Expression CONDITION_EXPRESSION_2;
static {
Map<String, String> expressionNames = new HashMap<>();
expressionNames.put("#test_field_1", "test_field_1");
expressionNames.put("#test_field_2", "test_field_2");
Map<String, AttributeValue> expressionValues = new HashMap<>();
expressionValues.put(":test_value_1", numberValue(1));
expressionValues.put(":test_value_2", numberValue(2));
CONDITION_EXPRESSION = Expression.builder()
.expression("#test_field_1 = :test_value_1 OR #test_field_2 = :test_value_2")
.expressionNames(Collections.unmodifiableMap(expressionNames))
.expressionValues(Collections.unmodifiableMap(expressionValues))
.build();
}
static {
Map<String, String> expressionNames = new HashMap<>();
expressionNames.put("#test_field_3", "test_field_3");
expressionNames.put("#test_field_4", "test_field_4");
Map<String, AttributeValue> expressionValues = new HashMap<>();
expressionValues.put(":test_value_3", numberValue(3));
expressionValues.put(":test_value_4", numberValue(4));
CONDITION_EXPRESSION_2 = Expression.builder()
.expression("#test_field_3 = :test_value_3 OR #test_field_4 = :test_value_4")
.expressionNames(Collections.unmodifiableMap(expressionNames))
.expressionValues(Collections.unmodifiableMap(expressionValues))
.build();
}
@Mock
private DynamoDbClient mockDynamoDbClient;
@Mock
private MapperExtension mockMapperExtension;
@Test
public void getServiceCall_makesTheRightCallAndReturnsResponse() {
FakeItem keyItem = createUniqueFakeItem();
PutItem<FakeItem> putItemOperation = PutItem.of(keyItem);
PutItemRequest getItemRequest = PutItemRequest.builder().tableName(TABLE_NAME).build();
PutItemResponse expectedResponse = PutItemResponse.builder().build();
when(mockDynamoDbClient.putItem(any(PutItemRequest.class))).thenReturn(expectedResponse);
PutItemResponse response = putItemOperation.serviceCall(mockDynamoDbClient).apply(getItemRequest);
assertThat(response, sameInstance(expectedResponse));
verify(mockDynamoDbClient).putItem(getItemRequest);
}
@Test(expected = IllegalArgumentException.class)
public void generateRequest_withIndex_throwsIllegalArgumentException() {
FakeItem item = createUniqueFakeItem();
PutItem<FakeItem> putItemOperation = PutItem.of(item);
putItemOperation.generateRequest(FakeItem.getTableSchema(), GSI_1_CONTEXT, null);
}
@Test
public void generateRequest_generatesCorrectRequest() {
FakeItem fakeItem = createUniqueFakeItem();
fakeItem.setSubclassAttribute("subclass-value");
PutItem<FakeItem> putItemOperation = PutItem.of(fakeItem);
PutItemRequest request = putItemOperation.generateRequest(FakeItem.getTableSchema(),
PRIMARY_CONTEXT,
null);
Map<String, AttributeValue> expectedItemMap = new HashMap<>();
expectedItemMap.put("id", AttributeValue.builder().s(fakeItem.getId()).build());
expectedItemMap.put("subclass_attribute", AttributeValue.builder().s("subclass-value").build());
PutItemRequest expectedRequest = PutItemRequest.builder()
.tableName(TABLE_NAME)
.item(expectedItemMap)
.build();
assertThat(request, is(expectedRequest));
}
@Test
public void generateRequest_withConditionExpression_generatesCorrectRequest() {
FakeItem fakeItem = createUniqueFakeItem();
fakeItem.setSubclassAttribute("subclass-value");
PutItem<FakeItem> putItemOperation = PutItem.builder()
.conditionExpression(CONDITION_EXPRESSION)
.item(fakeItem)
.build();
PutItemRequest request = putItemOperation.generateRequest(FakeItem.getTableSchema(),
PRIMARY_CONTEXT,
null);
Map<String, AttributeValue> expectedItemMap = new HashMap<>();
expectedItemMap.put("id", AttributeValue.builder().s(fakeItem.getId()).build());
expectedItemMap.put("subclass_attribute", AttributeValue.builder().s("subclass-value").build());
PutItemRequest expectedRequest =
PutItemRequest.builder()
.tableName(TABLE_NAME)
.item(expectedItemMap)
.conditionExpression(CONDITION_EXPRESSION.expression())
.expressionAttributeNames(CONDITION_EXPRESSION.expressionNames())
.expressionAttributeValues(CONDITION_EXPRESSION.expressionValues())
.build();
assertThat(request, is(expectedRequest));
}
@Test
public void generateRequest_withConditionExpression_andExtensionWithSingleCondition() {
FakeItem baseFakeItem = createUniqueFakeItem();
when(mockMapperExtension.beforeWrite(anyMap(), any(), any()))
.thenReturn(WriteModification.builder().additionalConditionalExpression(CONDITION_EXPRESSION_2).build());
PutItem<FakeItem> putItemOperation =
PutItem.builder().item(baseFakeItem).conditionExpression(CONDITION_EXPRESSION).build();
PutItemRequest request = putItemOperation.generateRequest(FakeItem.getTableSchema(),
PRIMARY_CONTEXT,
mockMapperExtension);
Expression expectedCondition = Expression.coalesce(CONDITION_EXPRESSION, CONDITION_EXPRESSION_2, " AND ");
assertThat(request.conditionExpression(), is(expectedCondition.expression()));
assertThat(request.expressionAttributeNames(), is(expectedCondition.expressionNames()));
assertThat(request.expressionAttributeValues(), is(expectedCondition.expressionValues()));
}
@Test(expected = IllegalArgumentException.class)
public void generateRequest_noPartitionKey_throwsIllegalArgumentException() {
FakeItemComposedClass keyItem = FakeItemComposedClass.builder().composedAttribute("whatever").build();
PutItem<FakeItemComposedClass> putItemOperation = PutItem.of(keyItem);
putItemOperation.generateRequest(FakeItemComposedClass.getTableSchema(), PRIMARY_CONTEXT, null);
}
@Test
public void transformResponse_doesNotBlowUp() {
FakeItem fakeItem = createUniqueFakeItem();
PutItem<FakeItem> putItemOperation = PutItem.of(fakeItem);
PutItemResponse response = PutItemResponse.builder()
.build();
putItemOperation.transformResponse(response, FakeItem.getTableSchema(), PRIMARY_CONTEXT, null);
}
@Test
public void generateRequest_withExtension_modifiesItemToPut() {
FakeItem baseFakeItem = createUniqueFakeItem();
FakeItem fakeItem = createUniqueFakeItem();
Map<String, AttributeValue> baseMap = FakeItem.getTableSchema().itemToMap(baseFakeItem, true);
Map<String, AttributeValue> fakeMap = FakeItem.getTableSchema().itemToMap(fakeItem, true);
when(mockMapperExtension.beforeWrite(anyMap(), any(), any()))
.thenReturn(WriteModification.builder().transformedItem(fakeMap).build());
PutItem<FakeItem> putItemOperation = PutItem.of(baseFakeItem);
PutItemRequest request = putItemOperation.generateRequest(FakeItem.getTableSchema(),
PRIMARY_CONTEXT,
mockMapperExtension);
assertThat(request.item(), is(fakeMap));
verify(mockMapperExtension).beforeWrite(baseMap, PRIMARY_CONTEXT, FakeItem.getTableMetadata());
}
@Test
public void generateRequest_withExtension_singleCondition() {
FakeItem baseFakeItem = createUniqueFakeItem();
FakeItem fakeItem = createUniqueFakeItem();
Map<String, AttributeValue> fakeMap = FakeItem.getTableSchema().itemToMap(fakeItem, true);
Expression condition = Expression.builder().expression("condition").expressionValues(fakeMap).build();
when(mockMapperExtension.beforeWrite(anyMap(), any(), any()))
.thenReturn(WriteModification.builder().additionalConditionalExpression(condition).build());
PutItem<FakeItem> putItemOperation = PutItem.of(baseFakeItem);
PutItemRequest request = putItemOperation.generateRequest(FakeItem.getTableSchema(),
PRIMARY_CONTEXT,
mockMapperExtension);
assertThat(request.conditionExpression(), is("condition"));
assertThat(request.expressionAttributeValues(), is(fakeMap));
}
@Test
public void generateRequest_withExtension_noModifications() {
FakeItem baseFakeItem = createUniqueFakeItem();
when(mockMapperExtension.beforeWrite(anyMap(), any(), any()))
.thenReturn(WriteModification.builder().build());
PutItem<FakeItem> putItemOperation = PutItem.of(baseFakeItem);
PutItemRequest request = putItemOperation.generateRequest(FakeItem.getTableSchema(),
PRIMARY_CONTEXT,
mockMapperExtension);
assertThat(request.conditionExpression(), is(nullValue()));
assertThat(request.expressionAttributeValues().size(), is(0));
}
@Test
public void generateTransactWriteItem_basicRequest() {
FakeItem fakeItem = createUniqueFakeItem();
Map<String, AttributeValue> fakeItemMap = FakeItem.getTableSchema().itemToMap(fakeItem, true);
PutItem<FakeItem> putItemOperation = spy(PutItem.of(fakeItem));
OperationContext context = OperationContext.of(TABLE_NAME, TableMetadata.primaryIndexName());
PutItemRequest putItemRequest = PutItemRequest.builder()
.tableName(TABLE_NAME)
.item(fakeItemMap)
.build();
doReturn(putItemRequest).when(putItemOperation).generateRequest(any(), any(), any());
TransactWriteItem actualResult = putItemOperation.generateTransactWriteItem(FakeItem.getTableSchema(),
context,
mockMapperExtension);
TransactWriteItem expectedResult = TransactWriteItem.builder()
.put(Put.builder()
.item(fakeItemMap)
.tableName(TABLE_NAME)
.build())
.build();
assertThat(actualResult, is(expectedResult));
verify(putItemOperation).generateRequest(FakeItem.getTableSchema(), context, mockMapperExtension);
}
@Test
public void generateTransactWriteItem_conditionalRequest() {
FakeItem fakeItem = createUniqueFakeItem();
Map<String, AttributeValue> fakeItemMap = FakeItem.getTableSchema().itemToMap(fakeItem, true);
PutItem<FakeItem> putItemOperation = spy(PutItem.of(fakeItem));
OperationContext context = OperationContext.of(TABLE_NAME, TableMetadata.primaryIndexName());
String conditionExpression = "condition-expression";
Map<String, AttributeValue> attributeValues = Collections.singletonMap("key", stringValue("value1"));
Map<String, String> attributeNames = Collections.singletonMap("key", "value2");
PutItemRequest putItemRequest = PutItemRequest.builder()
.tableName(TABLE_NAME)
.item(fakeItemMap)
.conditionExpression(conditionExpression)
.expressionAttributeValues(attributeValues)
.expressionAttributeNames(attributeNames)
.build();
doReturn(putItemRequest).when(putItemOperation).generateRequest(any(), any(), any());
TransactWriteItem actualResult = putItemOperation.generateTransactWriteItem(FakeItem.getTableSchema(),
context,
mockMapperExtension);
TransactWriteItem expectedResult = TransactWriteItem.builder()
.put(Put.builder()
.item(fakeItemMap)
.tableName(TABLE_NAME)
.conditionExpression(conditionExpression)
.expressionAttributeNames(attributeNames)
.expressionAttributeValues(attributeValues)
.build())
.build();
assertThat(actualResult, is(expectedResult));
verify(putItemOperation).generateRequest(FakeItem.getTableSchema(), context, mockMapperExtension);
}
}
|
package com.huyq.evealarm.model;
/**
* 像素点
* @author huyiqi
* @date 2020/7/3
*/
public class Point{
public static final Point ZERO = new Point(0,0);
private int x;
private int y;
private int rgb;
public Point(int x, int y) {
this.x = x;
this.y = y;
}
public Point(int x, int y, int rgb) {
this(x,y);
this.rgb = rgb;
}
public int getX() {
return x;
}
public void setX(int x) {
this.x = x;
}
public int getY() {
return y;
}
public void setY(int y) {
this.y = y;
}
public int getRgb() {
return rgb;
}
public void setRgb(int rgb) {
this.rgb = rgb;
}
public void setRgb(int[][] block){
this.rgb = block[this.getX()][this.getY()];
}
}
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.glue.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/glue-2017-03-31/StopCrawler" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class StopCrawlerResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof StopCrawlerResult == false)
return false;
StopCrawlerResult other = (StopCrawlerResult) obj;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
return hashCode;
}
@Override
public StopCrawlerResult clone() {
try {
return (StopCrawlerResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.exec;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.ddl.DDLTask;
import org.apache.hadoop.hive.ql.ddl.DDLWork;
import org.apache.hadoop.hive.ql.exec.mr.MapRedTask;
import org.apache.hadoop.hive.ql.exec.mr.MapredLocalTask;
import org.apache.hadoop.hive.ql.exec.repl.ReplDumpTask;
import org.apache.hadoop.hive.ql.exec.repl.ReplDumpWork;
import org.apache.hadoop.hive.ql.exec.repl.ReplStateLogTask;
import org.apache.hadoop.hive.ql.exec.repl.ReplStateLogWork;
import org.apache.hadoop.hive.ql.exec.repl.ReplLoadTask;
import org.apache.hadoop.hive.ql.exec.repl.ReplLoadWork;
import org.apache.hadoop.hive.ql.exec.spark.SparkTask;
import org.apache.hadoop.hive.ql.exec.tez.TezTask;
import org.apache.hadoop.hive.ql.io.merge.MergeFileTask;
import org.apache.hadoop.hive.ql.io.merge.MergeFileWork;
import org.apache.hadoop.hive.ql.plan.ColumnStatsUpdateWork;
import org.apache.hadoop.hive.ql.plan.StatsWork;
import org.apache.hadoop.hive.ql.plan.ConditionalWork;
import org.apache.hadoop.hive.ql.plan.CopyWork;
import org.apache.hadoop.hive.ql.plan.DependencyCollectionWork;
import org.apache.hadoop.hive.ql.plan.ExplainSQRewriteWork;
import org.apache.hadoop.hive.ql.plan.ExplainWork;
import org.apache.hadoop.hive.ql.plan.ExportWork;
import org.apache.hadoop.hive.ql.plan.FetchWork;
import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
import org.apache.hadoop.hive.ql.plan.MapredWork;
import org.apache.hadoop.hive.ql.plan.MoveWork;
import org.apache.hadoop.hive.ql.plan.ReplCopyWork;
import org.apache.hadoop.hive.ql.plan.ReplTxnWork;
import org.apache.hadoop.hive.ql.plan.SparkWork;
import org.apache.hadoop.hive.ql.plan.TezWork;
import com.google.common.annotations.VisibleForTesting;
import static org.apache.hadoop.hive.ql.exec.repl.ExternalTableCopyTaskBuilder.DirCopyWork;
import static org.apache.hadoop.hive.ql.exec.repl.ExternalTableCopyTaskBuilder.DirCopyTask;
/**
* TaskFactory implementation.
**/
public final class TaskFactory {
/**
* taskTuple.
*
* @param <T>
*/
public static final class TaskTuple<T extends Serializable> {
public Class<T> workClass;
public Class<? extends Task<T>> taskClass;
public TaskTuple(Class<T> workClass, Class<? extends Task<T>> taskClass) {
this.workClass = workClass;
this.taskClass = taskClass;
}
}
public static ArrayList<TaskTuple<? extends Serializable>> taskvec;
static {
taskvec = new ArrayList<TaskTuple<? extends Serializable>>();
taskvec.add(new TaskTuple<MoveWork>(MoveWork.class, MoveTask.class));
taskvec.add(new TaskTuple<FetchWork>(FetchWork.class, FetchTask.class));
taskvec.add(new TaskTuple<CopyWork>(CopyWork.class, CopyTask.class));
taskvec.add(new TaskTuple<ReplCopyWork>(ReplCopyWork.class, ReplCopyTask.class));
taskvec.add(new TaskTuple<DDLWork>(DDLWork.class, DDLTask.class));
taskvec
.add(new TaskTuple<ExplainWork>(ExplainWork.class, ExplainTask.class));
taskvec
.add(new TaskTuple<ExplainSQRewriteWork>(ExplainSQRewriteWork.class, ExplainSQRewriteTask.class));
taskvec.add(new TaskTuple<ConditionalWork>(ConditionalWork.class,
ConditionalTask.class));
taskvec.add(new TaskTuple<MapredWork>(MapredWork.class,
MapRedTask.class));
taskvec.add(new TaskTuple<MapredLocalWork>(MapredLocalWork.class,
MapredLocalTask.class));
taskvec.add(new TaskTuple<StatsWork>(StatsWork.class, StatsTask.class));
taskvec.add(new TaskTuple<ColumnStatsUpdateWork>(ColumnStatsUpdateWork.class, ColumnStatsUpdateTask.class));
taskvec.add(new TaskTuple<MergeFileWork>(MergeFileWork.class,
MergeFileTask.class));
taskvec.add(new TaskTuple<DependencyCollectionWork>(DependencyCollectionWork.class,
DependencyCollectionTask.class));
taskvec.add(new TaskTuple<TezWork>(TezWork.class, TezTask.class));
taskvec.add(new TaskTuple<SparkWork>(SparkWork.class, SparkTask.class));
taskvec.add(new TaskTuple<>(ReplDumpWork.class, ReplDumpTask.class));
taskvec.add(new TaskTuple<>(ReplLoadWork.class, ReplLoadTask.class));
taskvec.add(new TaskTuple<>(ReplStateLogWork.class, ReplStateLogTask.class));
taskvec.add(new TaskTuple<ExportWork>(ExportWork.class, ExportTask.class));
taskvec.add(new TaskTuple<ReplTxnWork>(ReplTxnWork.class, ReplTxnTask.class));
taskvec.add(new TaskTuple<DirCopyWork>(DirCopyWork.class, DirCopyTask.class));
}
private static ThreadLocal<Integer> tid = new ThreadLocal<Integer>() {
@Override
protected Integer initialValue() {
return Integer.valueOf(0);
}
};
public static int getAndIncrementId() {
int curValue = tid.get().intValue();
tid.set(Integer.valueOf(curValue + 1));
return curValue;
}
public static void resetId() {
tid.set(Integer.valueOf(0));
}
@SuppressWarnings("unchecked")
@VisibleForTesting
static <T extends Serializable> Task<T> get(Class<T> workClass) {
for (TaskTuple<? extends Serializable> t : taskvec) {
if (t.workClass == workClass) {
try {
Task<T> ret = (Task<T>) t.taskClass.newInstance();
ret.setId("Stage-" + Integer.toString(getAndIncrementId()));
return ret;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
throw new RuntimeException("No task for work class " + workClass.getName());
}
public static <T extends Serializable> Task<T> get(T work, HiveConf conf) {
@SuppressWarnings("unchecked")
Task<T> ret = get((Class<T>) work.getClass());
ret.setWork(work);
if (null != conf) {
ret.setConf(conf);
}
return ret;
}
public static <T extends Serializable> Task<T> get(T work) {
return get(work, null);
}
@SafeVarargs
public static <T extends Serializable> Task<T> getAndMakeChild(T work,
HiveConf conf, Task<? extends Serializable>... tasklist) {
Task<T> ret = get(work);
if (tasklist.length == 0) {
return (ret);
}
makeChild(ret, tasklist);
return (ret);
}
@SafeVarargs
public static void makeChild(Task<?> ret,
Task<? extends Serializable>... tasklist) {
// Add the new task as child of each of the passed in tasks
for (Task<? extends Serializable> tsk : tasklist) {
List<Task<? extends Serializable>> children = tsk.getChildTasks();
if (children == null) {
children = new ArrayList<Task<? extends Serializable>>();
}
children.add(ret);
tsk.setChildTasks(children);
}
}
private TaskFactory() {
// prevent instantiation
}
}
|
begin_unit|revision:0.9.5;language:Java;cregit-version:0.0.1
begin_comment
comment|/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */
end_comment
begin_package
package|package
name|org
operator|.
name|apache
operator|.
name|cxf
operator|.
name|aegis
operator|.
name|type
operator|.
name|java5
package|;
end_package
begin_import
import|import
name|javax
operator|.
name|xml
operator|.
name|XMLConstants
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|cxf
operator|.
name|aegis
operator|.
name|type
operator|.
name|basic
operator|.
name|StringType
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|ws
operator|.
name|commons
operator|.
name|schema
operator|.
name|XmlSchema
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|ws
operator|.
name|commons
operator|.
name|schema
operator|.
name|XmlSchemaSimpleContentExtension
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|ws
operator|.
name|commons
operator|.
name|schema
operator|.
name|XmlSchemaSimpleType
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|ws
operator|.
name|commons
operator|.
name|schema
operator|.
name|XmlSchemaSimpleTypeRestriction
import|;
end_import
begin_import
import|import
name|org
operator|.
name|apache
operator|.
name|ws
operator|.
name|commons
operator|.
name|schema
operator|.
name|constants
operator|.
name|Constants
import|;
end_import
begin_class
specifier|public
class|class
name|CustomStringType
extends|extends
name|StringType
block|{
annotation|@
name|Override
specifier|public
name|void
name|writeSchema
parameter_list|(
name|XmlSchema
name|root
parameter_list|)
block|{
comment|// this mapping gets used with xs:string, and we might get called.
if|if
condition|(
name|root
operator|.
name|getTargetNamespace
argument_list|()
operator|.
name|equals
argument_list|(
name|XMLConstants
operator|.
name|W3C_XML_SCHEMA_NS_URI
argument_list|)
condition|)
block|{
return|return;
block|}
name|XmlSchemaSimpleType
name|type
init|=
operator|new
name|XmlSchemaSimpleType
argument_list|(
name|root
argument_list|,
literal|true
argument_list|)
decl_stmt|;
name|type
operator|.
name|setName
argument_list|(
name|getSchemaType
argument_list|()
operator|.
name|getLocalPart
argument_list|()
argument_list|)
expr_stmt|;
name|XmlSchemaSimpleContentExtension
name|ext
init|=
operator|new
name|XmlSchemaSimpleContentExtension
argument_list|()
decl_stmt|;
name|ext
operator|.
name|setBaseTypeName
argument_list|(
name|Constants
operator|.
name|XSD_STRING
argument_list|)
expr_stmt|;
name|XmlSchemaSimpleTypeRestriction
name|content
init|=
operator|new
name|XmlSchemaSimpleTypeRestriction
argument_list|()
decl_stmt|;
name|content
operator|.
name|setBaseTypeName
argument_list|(
name|Constants
operator|.
name|XSD_STRING
argument_list|)
expr_stmt|;
name|type
operator|.
name|setContent
argument_list|(
name|content
argument_list|)
expr_stmt|;
block|}
block|}
end_class
end_unit
|
package jsortie.quicksort.selector.grubby;
public abstract class LeftHandedSelectorBase
extends RemedianSelectorBase {
protected abstract int selectLeafCandidate
( int[] vArray, int start, int count );
protected abstract int selectLeafCandidatePositionally
( int[] vArray, int start
, int elementCount, int candidateCount);
public LeftHandedSelectorBase
( boolean isUniform ) {
super(isUniform);
}
@Override
public int getRangeStart
( int start, int stop, int c ) {
int base = start + (stop - start)/6 - c / 2;
return (base<start) ? start : base;
}
@Override
public int selectPivotIndex
( int [] vArray, int start, int stop ) {
int count = stop - start;
if ( count < 32 ) {
return start + count/2;
} else if ( count < 108 ) {
return selectCandidateFromRange
( vArray, start + (count-4)/2, 4 );
} else if ( count < 972 ) {
return selectCandidateFromRange
( vArray, start + (count-12)/2, 12 );
} else if ( count < 8748 ) {
return selectCandidateFromRange
( vArray, start + (count-36)/2, 36 );
} else if ( count < 78732 ) {
return selectCandidateFromRange
( vArray, start + (count-108)/2, 108 );
} else {
int m = 324;
int m_squared_times_9 = 708588;
while ( m_squared_times_9 < count ) {
m *= 3;
m_squared_times_9 *= 9;
}
return selectCandidateFromRange
( vArray, start + (count - m ) / 2, m );
}
}
@Override
public int selectCandidateFromRange
( int [] vArray, int start, int count ) {
if (4<count) {
int a = selectCandidateFromRange
( vArray, start, count/3);
int b = selectCandidateFromRange
( vArray, start+count/3, count/3);
int c = selectCandidateFromRange
( vArray, start+count-count/3, count/3);
return medianOf3Candidates(vArray, a, b, c);
} else {
return selectLeafCandidate(vArray, start, count);
}
}
@Override
public int selectCandidatePositionally
( int [] vArray, int start
, int elementCount, int candidateCount) {
if (4<candidateCount) {
int a = selectCandidatePositionally
( vArray, start
, elementCount/3, candidateCount/3);
int b = selectCandidatePositionally
( vArray, start+elementCount/3
, elementCount/3, candidateCount/3);
int c = selectCandidatePositionally
( vArray, start+2*elementCount/3
, elementCount/3, candidateCount/3);
return medianOf3Candidates(vArray, a, b, c);
} else {
return selectLeafCandidatePositionally
( vArray, start, elementCount, candidateCount );
}
}
}
|
package chao.app.debugtools.widgets;
import android.content.Context;
import android.os.Handler;
import android.os.Looper;
import android.support.annotation.Nullable;
import android.support.v7.widget.RecyclerView;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import chao.app.ami.Ami;
import java.util.ArrayList;
/**
* @author qinchao
* @since 2018/8/15
*/
public class PullRecycleView extends RecyclerView {
private Handler mHandler = new Handler(Looper.getMainLooper());
public static final int NORMAL_MODEL = 1;
public static final int CARD_MODE = 2;
private State mState = State.HIDDEN;
private int mMode = CARD_MODE;
private boolean mTouchable = true;
private PullHeaderView2 mHeaderView;
private LayoutManagerHelper mLayoutManagerHelper;
private ArrayList<OnPullStateChangedListener> mPullStateListeners = new ArrayList<>();
private ArrayList<OnModeChangedListener> mModeListeners = new ArrayList<>();
private NormalModeController normalModeController;
private CardModeController cardModeController;
private AbstractModeController controller;
private PullStaggeredGridLayoutManager layoutManager;
public void resetCardMode() {
Ami.log("resetCardMode");
postDelayed(new Runnable() {
@Override
public void run() {
controller.allShrink();
}
}, 300);
}
public void finishRefreshing() {
setState(State.PULL);
controller.allShrink();
}
public enum State {
HIDDEN, PULL, READY_REFRESH, REFRESHING, READY_CARD, CARD
}
public PullRecycleView(Context context) {
this(context, null);
}
public PullRecycleView(Context context, @Nullable AttributeSet attrs) {
this(context, attrs, 0);
}
public PullRecycleView(final Context context, @Nullable AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
normalModeController = new NormalModeController(this);
cardModeController = new CardModeController(this);
controller = cardModeController;
addOnModeChangedListener(new OnModeChangedListener() {
@Override
public void onModeChangedListener(int mode) {
Ami.log("mode changed: " + mode);
int y = controller.getY();
if (mode == CARD_MODE) {
controller = cardModeController;
} else {
controller = normalModeController;
}
controller.setY(y);
layoutManager.setController(controller);
}
});
addOnPullStateChangedListener(new OnPullStateChangedListener() {
@Override
public void onStateChanged(State state) {
Ami.log("state chagned=" + state.name() + ", mode=" + mMode);
}
});
}
public void setLayoutManager(PullStaggeredGridLayoutManager layout) {
super.setLayoutManager(layout);
layoutManager = layout;
mLayoutManagerHelper = new LayoutManagerHelper(layout);
normalModeController.setLayoutManager(mLayoutManagerHelper);
cardModeController.setLayoutManager(mLayoutManagerHelper);
layoutManager.setController(controller);
}
@Override
public void onScrolled(int dx, int dy) {
super.onScrolled(dx, dy);
// Ami.log("onScrolled: dx=" + dx + ", dy="+dy + ", mOffset=" + controller.offset());
shrink();
}
@Override
protected void onScrollChanged(int l, int t, int oldl, int oldt) {
super.onScrollChanged(l, t, oldl, oldt);
// shrink();
}
private int scrollState;
@Override
public void onScrollStateChanged(int state) {
super.onScrollStateChanged(state);
scrollState = state;
controller.setScrollState(state);
// Ami.log("onScrollStateChanged: scroll state: " + scrollState + ", mode="+mMode + ", state=" + mState.name());
shrink();
}
private void shrink() {
if (controller.headerShow()) {
setMode(NORMAL_MODEL);
return;
}
if (mLayoutManagerHelper.getOrientation() != RecyclerView.VERTICAL) {
return;
}
if (scrollState == SCROLL_STATE_DRAGGING) {
if (isState(State.REFRESHING)) {
//刷新状态下, 不随着手的拖动而改变状态
return;
}
if (mState == State.READY_CARD) {
setState(PullRecycleView.State.CARD);
setMode(CARD_MODE);
smoothScrollBy(0, -controller.offset());
mTouchable = false;
return;
}
if (controller.overFull()) {
setState(State.READY_CARD);
} else if(controller.overRefresh()) {
setState(State.READY_REFRESH);
} else if(controller.overPull()){
setState(State.PULL);
} else {
setState(State.HIDDEN);
}
} else if (scrollState == SCROLL_STATE_IDLE) {
if (mState == State.READY_CARD) {
// controller.showCard();
setState(PullRecycleView.State.CARD);
setMode(CARD_MODE);
smoothScrollBy(0, -controller.offset());
} else if (mState == State.READY_REFRESH) {
controller.showRefresh();
setState(PullRecycleView.State.REFRESHING);
} else if (mState == State.PULL || mState == State.HIDDEN) {
controller.allShrink();
setState(PullRecycleView.State.HIDDEN);
}
}
}
@Override
public boolean onInterceptTouchEvent(MotionEvent e) {
// Ami.log("onInterceptTouchEvent: " + e.toString().substring(15, 40));
if(e.getAction() == MotionEvent.ACTION_DOWN) {
if (!mTouchable) {
mTouchable = true;
}
}
return super.onInterceptTouchEvent(e);
}
@Override
public boolean onTouchEvent(MotionEvent e) {
// Ami.log("onTouchEvent: " + e.toString().substring(15, 40));
switch (e.getAction()) {
case MotionEvent.ACTION_DOWN:
if (!mTouchable) {
mTouchable = true;
return true;
}
break;
case MotionEvent.ACTION_MOVE:
case MotionEvent.ACTION_UP:
case MotionEvent.ACTION_CANCEL:
if (!mTouchable) {
scrollBy(0, -controller.offset());
stopScroll();
return true;
}
break;
}
return super.onTouchEvent(e);
}
private boolean isState(State state) {
return mState == state;
}
private void setState(State state) {
if (mState == state) {
return;
}
mState = state;
onStateChanged(state);
for (OnPullStateChangedListener listener: mPullStateListeners) {
listener.onStateChanged(state);
}
}
private void onStateChanged(State state) {
}
private boolean isMode(int mode) {
return mMode == mode;
}
private void setMode(int mode) {
if (mMode == mode) {
return;
}
mMode = mode;
for (OnModeChangedListener listener: mModeListeners) {
listener.onModeChangedListener(mode);
}
}
@Override
public void onNestedScrollAccepted(View child, View target, int axes) {
super.onNestedScrollAccepted(child, target, axes);
// Ami.log("onNestedScrollAccepted: child=" + child + ", target="+target + ", axes=" + axes);
}
@Override
protected void onOverScrolled(int scrollX, int scrollY, boolean clampedX, boolean clampedY) {
super.onOverScrolled(scrollX, scrollY, clampedX, clampedY);
// Ami.log("onOverScrolled: scrollX=" + scrollX + ", scrollY="+scrollY + ", clampedX=" + clampedX + ", clampedY=" + clampedY);
}
public void setHeaderView(PullHeaderView2 headerView) {
this.mHeaderView = headerView;
mHeaderView.setLayoutParams(new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT));
normalModeController.setHeaderView(mHeaderView);
cardModeController.setHeaderView(mHeaderView);
addOnPullStateChangedListener(mHeaderView);
addOnModeChangedListener(mHeaderView);
}
public PullHeaderView2 getHeaderView() {
return mHeaderView;
}
public void addOnPullStateChangedListener(OnPullStateChangedListener onPullStateChangedListener) {
mPullStateListeners.add(onPullStateChangedListener);
}
public void addOnModeChangedListener(OnModeChangedListener listener) {
mModeListeners.add(listener);
}
public interface OnPullStateChangedListener {
void onStateChanged(State state);
}
public interface OnModeChangedListener {
void onModeChangedListener(int mode);
}
}
|
package org.extreme.model.managers;
public interface ReportSettingsManager {
}
|
/*
* Copyright (c) 2014, 2018, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
package jdk.jshell;
import com.sun.source.tree.CompilationUnitTree;
import com.sun.source.tree.Tree;
import com.sun.source.util.Trees;
import com.sun.tools.javac.api.JavacTaskImpl;
import com.sun.tools.javac.util.Context;
import java.util.ArrayList;
import java.util.List;
import javax.tools.Diagnostic;
import javax.tools.DiagnosticCollector;
import javax.tools.JavaCompiler;
import javax.tools.JavaFileManager;
import javax.tools.JavaFileObject;
import javax.tools.ToolProvider;
import static jdk.jshell.Util.*;
import com.sun.source.tree.ImportTree;
import com.sun.tools.javac.code.Types;
import com.sun.tools.javac.util.JavacMessages;
import jdk.jshell.MemoryFileManager.OutputMemoryJavaFileObject;
import java.util.Collections;
import java.util.Locale;
import static javax.tools.StandardLocation.CLASS_OUTPUT;
import static jdk.internal.jshell.debug.InternalDebugControl.DBG_GEN;
import java.io.File;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.stream.Collectors;
import static java.util.stream.Collectors.toList;
import java.util.stream.Stream;
import javax.lang.model.util.Elements;
import javax.tools.FileObject;
import jdk.jshell.MemoryFileManager.SourceMemoryJavaFileObject;
import java.lang.Runtime.Version;
import java.nio.CharBuffer;
import java.util.function.BiFunction;
import com.sun.source.tree.ClassTree;
import com.sun.source.tree.Tree.Kind;
import com.sun.source.util.TaskEvent;
import com.sun.source.util.TaskListener;
import com.sun.tools.javac.api.JavacTaskPool;
import com.sun.tools.javac.code.ClassFinder;
import com.sun.tools.javac.code.Kinds;
import com.sun.tools.javac.code.Symbol;
import com.sun.tools.javac.code.Symbol.ClassSymbol;
import com.sun.tools.javac.code.Symbol.PackageSymbol;
import com.sun.tools.javac.code.Symbol.TypeSymbol;
import com.sun.tools.javac.code.Symbol.VarSymbol;
import com.sun.tools.javac.code.Symtab;
import com.sun.tools.javac.code.Type;
import com.sun.tools.javac.comp.Attr;
import com.sun.tools.javac.comp.AttrContext;
import com.sun.tools.javac.comp.Enter;
import com.sun.tools.javac.comp.Env;
import com.sun.tools.javac.comp.Resolve;
import com.sun.tools.javac.parser.Parser;
import com.sun.tools.javac.parser.ParserFactory;
import com.sun.tools.javac.tree.JCTree.JCExpression;
import com.sun.tools.javac.tree.JCTree.JCTypeCast;
import com.sun.tools.javac.tree.JCTree.JCVariableDecl;
import com.sun.tools.javac.tree.JCTree.Tag;
import com.sun.tools.javac.util.Context.Factory;
import com.sun.tools.javac.util.Log;
import com.sun.tools.javac.util.Log.DiscardDiagnosticHandler;
import com.sun.tools.javac.util.Names;
import static jdk.internal.jshell.debug.InternalDebugControl.DBG_FMGR;
import jdk.jshell.Snippet.Status;
/**
* The primary interface to the compiler API. Parsing, analysis, and
* compilation to class files (in memory).
* @author Robert Field
*/
class TaskFactory {
private final JavaCompiler compiler;
private final MemoryFileManager fileManager;
private final JShell state;
private String classpath = System.getProperty("java.class.path");
private final static Version INITIAL_SUPPORTED_VER = Version.parse("9");
TaskFactory(JShell state) {
this.state = state;
this.compiler = ToolProvider.getSystemJavaCompiler();
if (compiler == null) {
throw new UnsupportedOperationException("Compiler not available, must be run with full JDK 9.");
}
Version current = Version.parse(System.getProperty("java.specification.version"));
if (INITIAL_SUPPORTED_VER.compareToIgnoreOptional(current) > 0) {
throw new UnsupportedOperationException("Wrong compiler, must be run with full JDK 9.");
}
this.fileManager = new MemoryFileManager(
compiler.getStandardFileManager(null, null, null), state);
initTaskPool();
}
void addToClasspath(String path) {
classpath = classpath + File.pathSeparator + path;
List<String> args = new ArrayList<>();
args.add(classpath);
fileManager().handleOption("-classpath", args.iterator());
initTaskPool();
}
MemoryFileManager fileManager() {
return fileManager;
}
public <Z> Z parse(String source,
boolean forceExpression,
Worker<ParseTask, Z> worker) {
StringSourceHandler sh = new StringSourceHandler();
return runTask(Stream.of(source),
sh,
List.of("-XDallowStringFolding=false", "-proc:none",
"-XDneedsReplParserFactory=" + forceExpression),
(jti, diagnostics) -> new ParseTask(sh, jti, diagnostics, forceExpression),
worker);
}
public <Z> Z analyze(OuterWrap wrap,
Worker<AnalyzeTask, Z> worker) {
return analyze(Collections.singletonList(wrap), worker);
}
public <Z> Z analyze(OuterWrap wrap,
List<String> extraArgs,
Worker<AnalyzeTask, Z> worker) {
return analyze(Collections.singletonList(wrap), extraArgs, worker);
}
public <Z> Z analyze(Collection<OuterWrap> wraps,
Worker<AnalyzeTask, Z> worker) {
return analyze(wraps, Collections.emptyList(), worker);
}
public <Z> Z analyze(Collection<OuterWrap> wraps,
List<String> extraArgs,
Worker<AnalyzeTask, Z> worker) {
WrapSourceHandler sh = new WrapSourceHandler();
List<String> allOptions = new ArrayList<>();
allOptions.add("--should-stop=at=FLOW");
allOptions.add("-Xlint:unchecked");
allOptions.add("-proc:none");
allOptions.addAll(extraArgs);
return runTask(wraps.stream(),
sh,
allOptions,
(jti, diagnostics) -> new AnalyzeTask(sh, jti, diagnostics),
worker);
}
public <Z> Z compile(Collection<OuterWrap> wraps,
Worker<CompileTask, Z> worker) {
WrapSourceHandler sh = new WrapSourceHandler();
return runTask(wraps.stream(),
sh,
List.of("-Xlint:unchecked", "-proc:none", "-parameters"),
(jti, diagnostics) -> new CompileTask(sh, jti, diagnostics),
worker);
}
private <S, T extends BaseTask, Z> Z runTask(Stream<S> inputs,
SourceHandler<S> sh,
List<String> options,
BiFunction<JavacTaskImpl, DiagnosticCollector<JavaFileObject>, T> creator,
Worker<T, Z> worker) {
List<String> allOptions = new ArrayList<>(options.size() + state.extraCompilerOptions.size());
allOptions.addAll(options);
allOptions.addAll(state.extraCompilerOptions);
Iterable<? extends JavaFileObject> compilationUnits = inputs
.map(in -> sh.sourceToFileObject(fileManager, in))
.collect(Collectors.toList());
DiagnosticCollector<JavaFileObject> diagnostics = new DiagnosticCollector<>();
state.debug(DBG_FMGR, "Task (%s %s) Options: %s\n", this, compilationUnits, allOptions);
return javacTaskPool.getTask(null, fileManager, diagnostics, allOptions, null,
compilationUnits, task -> {
JavacTaskImpl jti = (JavacTaskImpl) task;
Context context = jti.getContext();
DisableAccessibilityResolve.preRegister(context);
jti.addTaskListener(new TaskListenerImpl(context, state));
try {
return worker.withTask(creator.apply(jti, diagnostics));
} finally {
//additional cleanup: purge the REPL package:
Symtab syms = Symtab.instance(context);
Names names = Names.instance(context);
PackageSymbol repl = syms.getPackage(syms.unnamedModule, names.fromString(Util.REPL_PACKAGE));
if (repl != null) {
for (ClassSymbol clazz : syms.getAllClasses()) {
if (clazz.packge() == repl) {
syms.removeClass(syms.unnamedModule, clazz.flatName());
}
}
repl.members_field = null;
repl.completer = ClassFinder.instance(context).getCompleter();
}
}
});
}
interface Worker<T extends BaseTask, Z> {
public Z withTask(T task);
}
// Parse a snippet and return our parse task handler
<Z> Z parse(final String source, Worker<ParseTask, Z> worker) {
return parse(source, false, pt -> {
if (!pt.units().isEmpty()
&& pt.units().get(0).getKind() == Kind.EXPRESSION_STATEMENT
&& pt.getDiagnostics().hasOtherThanNotStatementErrors()) {
// It failed, it may be an expression being incorrectly
// parsed as having a leading type variable, example: a < b
// Try forcing interpretation as an expression
return parse(source, true, ept -> {
if (!ept.getDiagnostics().hasOtherThanNotStatementErrors()) {
return worker.withTask(ept);
} else {
return worker.withTask(pt);
}
});
}
return worker.withTask(pt);
});
}
private interface SourceHandler<T> {
JavaFileObject sourceToFileObject(MemoryFileManager fm, T t);
Diag diag(Diagnostic<? extends JavaFileObject> d);
}
private class StringSourceHandler implements SourceHandler<String> {
@Override
public JavaFileObject sourceToFileObject(MemoryFileManager fm, String src) {
return fm.createSourceFileObject(src, "$NeverUsedName$", src);
}
@Override
public Diag diag(final Diagnostic<? extends JavaFileObject> d) {
return new Diag() {
@Override
public boolean isError() {
return d.getKind() == Diagnostic.Kind.ERROR;
}
@Override
public long getPosition() {
return d.getPosition();
}
@Override
public long getStartPosition() {
return d.getStartPosition();
}
@Override
public long getEndPosition() {
return d.getEndPosition();
}
@Override
public String getCode() {
return d.getCode();
}
@Override
public String getMessage(Locale locale) {
return expunge(d.getMessage(locale));
}
};
}
}
private class WrapSourceHandler implements SourceHandler<OuterWrap> {
@Override
public JavaFileObject sourceToFileObject(MemoryFileManager fm, OuterWrap w) {
return fm.createSourceFileObject(w, w.classFullName(), w.wrapped());
}
/**
* Get the source information from the wrap. If this is external, or
* otherwise does not have wrap info, just use source code.
* @param d the Diagnostic from the compiler
* @return the corresponding Diag
*/
@Override
public Diag diag(Diagnostic<? extends JavaFileObject> d) {
JavaFileObject jfo = d.getSource();
return jfo instanceof SourceMemoryJavaFileObject
? ((OuterWrap) ((SourceMemoryJavaFileObject) jfo).getOrigin()).wrapDiag(d)
: new StringSourceHandler().diag(d);
}
}
/**
* Parse a snippet of code (as a String) using the parser subclass. Return
* the parse tree (and errors).
*/
class ParseTask extends BaseTask {
private final Iterable<? extends CompilationUnitTree> cuts;
private final List<? extends Tree> units;
private ParseTask(SourceHandler<String> sh,
JavacTaskImpl task,
DiagnosticCollector<JavaFileObject> diagnostics,
boolean forceExpression) {
super(sh, task, diagnostics);
ReplParserFactory.preRegister(context, forceExpression);
cuts = parse();
units = Util.stream(cuts)
.flatMap(cut -> {
List<? extends ImportTree> imps = cut.getImports();
return (!imps.isEmpty() ? imps : cut.getTypeDecls()).stream();
})
.collect(toList());
}
private Iterable<? extends CompilationUnitTree> parse() {
try {
return task.parse();
} catch (Exception ex) {
throw new InternalError("Exception during parse - " + ex.getMessage(), ex);
}
}
List<? extends Tree> units() {
return units;
}
@Override
Iterable<? extends CompilationUnitTree> cuTrees() {
return cuts;
}
}
/**
* Run the normal "analyze()" pass of the compiler over the wrapped snippet.
*/
class AnalyzeTask extends BaseTask {
private final Iterable<? extends CompilationUnitTree> cuts;
private AnalyzeTask(SourceHandler<OuterWrap> sh,
JavacTaskImpl task,
DiagnosticCollector<JavaFileObject> diagnostics) {
super(sh, task, diagnostics);
cuts = analyze();
}
private Iterable<? extends CompilationUnitTree> analyze() {
try {
Iterable<? extends CompilationUnitTree> cuts = task.parse();
task.analyze();
return cuts;
} catch (Exception ex) {
throw new InternalError("Exception during analyze - " + ex.getMessage(), ex);
}
}
@Override
Iterable<? extends CompilationUnitTree> cuTrees() {
return cuts;
}
Elements getElements() {
return task.getElements();
}
javax.lang.model.util.Types getTypes() {
return task.getTypes();
}
}
/**
* Unit the wrapped snippet to class files.
*/
class CompileTask extends BaseTask {
private final Map<OuterWrap, List<OutputMemoryJavaFileObject>> classObjs = new HashMap<>();
CompileTask(SourceHandler<OuterWrap>sh,
JavacTaskImpl jti,
DiagnosticCollector<JavaFileObject> diagnostics) {
super(sh, jti, diagnostics);
}
boolean compile() {
fileManager.registerClassFileCreationListener(this::listenForNewClassFile);
boolean result = task.call();
fileManager.registerClassFileCreationListener(null);
return result;
}
// Returns the list of classes generated during this compile.
// Stores the mapping between class name and current compiled bytes.
List<String> classList(OuterWrap w) {
List<OutputMemoryJavaFileObject> l = classObjs.get(w);
if (l == null) {
return Collections.emptyList();
}
List<String> list = new ArrayList<>();
for (OutputMemoryJavaFileObject fo : l) {
state.classTracker.setCurrentBytes(fo.getName(), fo.getBytes());
list.add(fo.getName());
}
return list;
}
private void listenForNewClassFile(OutputMemoryJavaFileObject jfo, JavaFileManager.Location location,
String className, JavaFileObject.Kind kind, FileObject sibling) {
//debug("listenForNewClassFile %s loc=%s kind=%s\n", className, location, kind);
if (location == CLASS_OUTPUT) {
state.debug(DBG_GEN, "Compiler generating class %s\n", className);
OuterWrap w = ((sibling instanceof SourceMemoryJavaFileObject)
&& (((SourceMemoryJavaFileObject) sibling).getOrigin() instanceof OuterWrap))
? (OuterWrap) ((SourceMemoryJavaFileObject) sibling).getOrigin()
: null;
classObjs.compute(w, (k, v) -> (v == null)? new ArrayList<>() : v)
.add(jfo);
}
}
@Override
Iterable<? extends CompilationUnitTree> cuTrees() {
throw new UnsupportedOperationException("Not supported.");
}
}
private JavacTaskPool javacTaskPool;
private void initTaskPool() {
javacTaskPool = new JavacTaskPool(5);
}
abstract class BaseTask {
final DiagnosticCollector<JavaFileObject> diagnostics;
final JavacTaskImpl task;
private DiagList diags = null;
private final SourceHandler<?> sourceHandler;
final Context context;
private Types types;
private JavacMessages messages;
private Trees trees;
private <T>BaseTask(SourceHandler<T> sh,
JavacTaskImpl task,
DiagnosticCollector<JavaFileObject> diagnostics) {
this.sourceHandler = sh;
this.task = task;
context = task.getContext();
this.diagnostics = diagnostics;
}
abstract Iterable<? extends CompilationUnitTree> cuTrees();
CompilationUnitTree firstCuTree() {
return cuTrees().iterator().next();
}
Diag diag(Diagnostic<? extends JavaFileObject> diag) {
return sourceHandler.diag(diag);
}
Context getContext() {
return context;
}
Types types() {
if (types == null) {
types = Types.instance(context);
}
return types;
}
JavacMessages messages() {
if (messages == null) {
messages = JavacMessages.instance(context);
}
return messages;
}
Trees trees() {
if (trees == null) {
trees = Trees.instance(task);
}
return trees;
}
// ------------------ diags functionality
DiagList getDiagnostics() {
if (diags == null) {
LinkedHashMap<String, Diag> diagMap = new LinkedHashMap<>();
for (Diagnostic<? extends JavaFileObject> in : diagnostics.getDiagnostics()) {
Diag d = diag(in);
String uniqueKey = d.getCode() + ":" + d.getPosition() + ":" + d.getMessage(PARSED_LOCALE);
diagMap.put(uniqueKey, d);
}
diags = new DiagList(diagMap.values());
}
return diags;
}
boolean hasErrors() {
return getDiagnostics().hasErrors();
}
String shortErrorMessage() {
StringBuilder sb = new StringBuilder();
for (Diag diag : getDiagnostics()) {
for (String line : diag.getMessage(PARSED_LOCALE).split("\\r?\\n")) {
if (!line.trim().startsWith("location:")) {
sb.append(line);
}
}
}
return sb.toString();
}
void debugPrintDiagnostics(String src) {
for (Diag diag : getDiagnostics()) {
state.debug(DBG_GEN, "ERROR --\n");
for (String line : diag.getMessage(PARSED_LOCALE).split("\\r?\\n")) {
if (!line.trim().startsWith("location:")) {
state.debug(DBG_GEN, "%s\n", line);
}
}
int start = (int) diag.getStartPosition();
int end = (int) diag.getEndPosition();
if (src != null) {
String[] srcLines = src.split("\\r?\\n");
for (String line : srcLines) {
state.debug(DBG_GEN, "%s\n", line);
}
StringBuilder sb = new StringBuilder();
for (int i = 0; i < start; ++i) {
sb.append(' ');
}
sb.append('^');
if (end > start) {
for (int i = start + 1; i < end; ++i) {
sb.append('-');
}
sb.append('^');
}
state.debug(DBG_GEN, "%s\n", sb.toString());
}
state.debug(DBG_GEN, "printDiagnostics start-pos = %d ==> %d -- wrap = %s\n",
diag.getStartPosition(), start, this);
state.debug(DBG_GEN, "Code: %s\n", diag.getCode());
state.debug(DBG_GEN, "Pos: %d (%d - %d) -- %s\n", diag.getPosition(),
diag.getStartPosition(), diag.getEndPosition(), diag.getMessage(null));
}
}
}
/**The variable types inferred for "var"s may be non-denotable.
* jshell desugars these variables into fields, and fields must have
* a denotable type. So these fields are declared with some simpler denotable
* type, and the listener here enhances the types of the fields to be the full
* inferred types. This is mainly when the inferred type contains:
* -intersection types (e.g. <Z extends Runnable&CharSequence> Z get() {...} var z = get();)
* -types that are inaccessible at the given place
*
* This type enhancement does not need to do anything about anonymous classes, as these
* are desugared into member classes.
*/
private static final class TaskListenerImpl implements TaskListener {
private final Context context;
private final JShell state;
/* Keep the original (declaration) types of the fields that were enhanced.
* The declaration types need to be put back before writing the fields
* into classfiles.*/
private final Map<VarSymbol, Type> var2OriginalType = new HashMap<>();
public TaskListenerImpl(Context context, JShell state) {
this.context = context;
this.state = state;
}
@Override
public void started(TaskEvent e) {
if (e.getKind() != TaskEvent.Kind.GENERATE)
return ;
//clear enhanced types in fields we are about to write to the classfiles:
for (Tree clazz : e.getCompilationUnit().getTypeDecls()) {
ClassTree ct = (ClassTree) clazz;
for (Tree member : ct.getMembers()) {
if (member.getKind() != Tree.Kind.VARIABLE)
continue;
VarSymbol vsym = ((JCVariableDecl) member).sym;
Type original = var2OriginalType.remove(vsym);
if (original != null) {
vsym.type = original;
}
}
}
}
private boolean variablesSet = false;
@Override
public void finished(TaskEvent e) {
if (e.getKind() != TaskEvent.Kind.ENTER || variablesSet)
return ;
state.maps
.snippetList()
.stream()
.filter(s -> s.status() == Status.VALID)
.filter(s -> s.kind() == Snippet.Kind.VAR)
.filter(s -> s.subKind() == Snippet.SubKind.VAR_DECLARATION_WITH_INITIALIZER_SUBKIND ||
s.subKind() == Snippet.SubKind.TEMP_VAR_EXPRESSION_SUBKIND)
.forEach(s -> setVariableType((VarSnippet) s));
variablesSet = true;
}
/* If the snippet contain enhanced types, enhance the type of
* the variable from snippet s to be the enhanced type.
*/
private void setVariableType(VarSnippet s) {
String typeName = s.fullTypeName;
if (typeName == null)
return ;
Symtab syms = Symtab.instance(context);
Names names = Names.instance(context);
Log log = Log.instance(context);
ParserFactory parserFactory = ParserFactory.instance(context);
Attr attr = Attr.instance(context);
Enter enter = Enter.instance(context);
DisableAccessibilityResolve rs = (DisableAccessibilityResolve) Resolve.instance(context);
//find the variable:
ClassSymbol clazz = syms.getClass(syms.unnamedModule, names.fromString(s.classFullName()));
if (clazz == null || !clazz.isCompleted())
return;
VarSymbol field = (VarSymbol) clazz.members().findFirst(names.fromString(s.name()), sym -> sym.kind == Kinds.Kind.VAR);
if (field != null && !var2OriginalType.containsKey(field)) {
//if it was not enhanced yet:
//ignore any errors:
JavaFileObject prev = log.useSource(null);
DiscardDiagnosticHandler h = new DiscardDiagnosticHandler(log);
try {
//parse the type as a cast, i.e. "(<typeName>) x". This is to support
//intersection types:
CharBuffer buf = CharBuffer.wrap(("(" + typeName +")x\u0000").toCharArray(), 0, typeName.length() + 3);
Parser parser = parserFactory.newParser(buf, false, false, false);
JCExpression expr = parser.parseExpression();
if (expr.hasTag(Tag.TYPECAST)) {
//if parsed OK, attribute and set the type:
var2OriginalType.put(field, field.type);
JCTypeCast tree = (JCTypeCast) expr;
rs.runWithoutAccessChecks(() -> {
field.type = attr.attribType(tree.clazz,
enter.getEnvs().iterator().next().enclClass.sym);
});
}
} finally {
log.popDiagnosticHandler(h);
log.useSource(prev);
}
}
}
}
private static final class DisableAccessibilityResolve extends Resolve {
public static void preRegister(Context context) {
if (context.get(Marker.class) == null) {
context.put(resolveKey, ((Factory<Resolve>) c -> new DisableAccessibilityResolve(c)));
context.put(Marker.class, new Marker());
}
}
private boolean noAccessChecks;
public DisableAccessibilityResolve(Context context) {
super(context);
}
/**Run the given Runnable with all access checks disabled.
*
* @param r Runnnable to run
*/
public void runWithoutAccessChecks(Runnable r) {
boolean prevNoAccessCheckes = noAccessChecks;
try {
noAccessChecks = true;
r.run();
} finally {
noAccessChecks = prevNoAccessCheckes;
}
}
@Override
public boolean isAccessible(Env<AttrContext> env, TypeSymbol c, boolean checkInner) {
if (noAccessChecks) return true;
return super.isAccessible(env, c, checkInner);
}
@Override
public boolean isAccessible(Env<AttrContext> env, Type site, Symbol sym, boolean checkInner) {
if (noAccessChecks) return true;
return super.isAccessible(env, site, sym, checkInner);
}
private static final class Marker {}
}
}
|
package com.centurylink.pctl.mod.address.domain.utils;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.JsonToken;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import java.io.IOException;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeFormatterBuilder;
/**
* Custom Jackson deserializer for transforming a JSON object (using the ISO 8601 date formatwith optional time)
* to a JSR310 LocalDate object.
*/
public class JSR310LocalDateDeserializer extends JsonDeserializer<LocalDate> {
public static final JSR310LocalDateDeserializer INSTANCE = new JSR310LocalDateDeserializer();
private JSR310LocalDateDeserializer() {}
private static final DateTimeFormatter ISO_DATE_OPTIONAL_TIME;
static {
ISO_DATE_OPTIONAL_TIME = new DateTimeFormatterBuilder()
.append(DateTimeFormatter.ISO_LOCAL_DATE)
.optionalStart()
.appendLiteral('T')
.append(DateTimeFormatter.ISO_OFFSET_TIME)
.toFormatter();
}
@Override
public LocalDate deserialize(JsonParser parser, DeserializationContext context) throws IOException {
switch(parser.getCurrentToken()) {
case START_ARRAY:
if(parser.nextToken() == JsonToken.END_ARRAY) {
return null;
}
int year = parser.getIntValue();
parser.nextToken();
int month = parser.getIntValue();
parser.nextToken();
int day = parser.getIntValue();
if(parser.nextToken() != JsonToken.END_ARRAY) {
throw context.wrongTokenException(parser, JsonToken.END_ARRAY, "Expected array to end.");
}
return LocalDate.of(year, month, day);
case VALUE_STRING:
String string = parser.getText().trim();
if(string.length() == 0) {
return null;
}
return LocalDate.parse(string, ISO_DATE_OPTIONAL_TIME);
}
throw context.wrongTokenException(parser, JsonToken.START_ARRAY, "Expected array or string.");
}
}
|
/*___Generated_by_IDEA___*/
package com.twilio.example;
/* This stub is only used by the IDE. It is NOT the R class actually packed into the APK */
public final class R {
}
|
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.screens.datamodeller.backend.server;
import javax.persistence.Entity;
import org.guvnor.common.services.project.model.Module;
import org.guvnor.common.services.shared.metadata.model.Metadata;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.workbench.common.screens.datamodeller.events.DataObjectCreatedEvent;
import org.kie.workbench.common.screens.datamodeller.events.DataObjectDeletedEvent;
import org.kie.workbench.common.screens.datamodeller.model.persistence.PersistableDataObject;
import org.kie.workbench.common.screens.datamodeller.model.persistence.PersistenceDescriptorModel;
import org.kie.workbench.common.screens.datamodeller.model.persistence.PersistenceUnitModel;
import org.kie.workbench.common.screens.datamodeller.service.PersistenceDescriptorService;
import org.kie.workbench.common.services.datamodeller.core.DataObject;
import org.kie.workbench.common.services.datamodeller.core.impl.AnnotationImpl;
import org.kie.workbench.common.services.datamodeller.core.impl.DataObjectImpl;
import org.kie.workbench.common.services.datamodeller.util.DriverUtils;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.MockitoJUnitRunner;
import org.uberfire.backend.vfs.Path;
import org.uberfire.io.IOService;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.*;
@RunWith(MockitoJUnitRunner.class)
public class DataModelerEventObserverTest {
@Mock
PersistenceDescriptorService descriptorService;
@Mock
IOService ioService;
@Mock
public Module module;
@Mock
private Path descriptorPath;
private static final String DESCRIPTOR_PATH = "file://dummy-repo/dummy-project/src/main/resources/META-INF/persistence.xml";
private DataModelerEventObserver eventObserver;
private PersistenceDescriptorModel descriptorModel;
private DataObject dataObject;
@Before
public void init() {
eventObserver = createObserver();
descriptorModel = createModel();
dataObject = new DataObjectImpl("package1", "PersistableObject");
dataObject.addAnnotation(new AnnotationImpl(DriverUtils.buildAnnotationDefinition(Entity.class)));
when(descriptorPath.toURI()).thenReturn(DESCRIPTOR_PATH);
when(descriptorService.calculatePersistenceDescriptorPath(any(Module.class))).thenReturn(descriptorPath);
when(descriptorService.load(descriptorPath)).thenReturn(descriptorModel);
when(ioService.exists(any(org.uberfire.java.nio.file.Path.class))).thenReturn(true);
}
@Test
public void onPersistableDataObjectCreatedTest() {
DataObjectCreatedEvent createdEvent = new DataObjectCreatedEvent(module, dataObject);
eventObserver.onDataObjectCreated(createdEvent);
verify(descriptorService, times(1)).save(eq(descriptorPath), eq(descriptorModel), Mockito.<Metadata> any(), anyString());
assertTrue(descriptorModel.getPersistenceUnit().getClasses().contains(new PersistableDataObject(dataObject.getClassName())));
}
@Test
public void onPersistableDataObjectDeletedTest() {
DataObjectDeletedEvent deletedEvent = new DataObjectDeletedEvent(module, dataObject);
descriptorModel.getPersistenceUnit().getClasses().add(new PersistableDataObject(deletedEvent.getCurrentDataObject().getClassName()));
eventObserver.onDataObjectDeleted(deletedEvent);
verify(descriptorService, times(1)).save(eq(descriptorPath), eq(descriptorModel), Mockito.<Metadata> any(), anyString());
assertFalse(descriptorModel.getPersistenceUnit().getClasses().contains(new PersistableDataObject(dataObject.getClassName())));
}
@Test
public void onNonPersistableDataObjectCreatedTest() {
descriptorModel.getPersistenceUnit().getClasses().add(new PersistableDataObject("package1.PersistableObject"));
DataObjectCreatedEvent createdEvent = new DataObjectCreatedEvent(module, dataObject);
eventObserver.onDataObjectCreated(createdEvent);
verify(descriptorService, times(0)).save(eq(descriptorPath), eq(descriptorModel), Mockito.<Metadata> any(), anyString());
assertEquals(1, descriptorModel.getPersistenceUnit().getClasses().size());
}
@Test
public void onNonPersistableDataObjectDeletedTest() {
DataObject dataObject = new DataObjectImpl("package1", "NonPersistableObject");
DataObjectCreatedEvent createdEvent = new DataObjectCreatedEvent(module, dataObject);
eventObserver.onDataObjectCreated(createdEvent);
descriptorModel.getPersistenceUnit().getClasses().add(new PersistableDataObject(createdEvent.getCurrentDataObject().getClassName()));
verify(descriptorService, times(0)).save(eq(descriptorPath), eq(descriptorModel), Mockito.<Metadata> any(), anyString());
assertEquals(1, descriptorModel.getPersistenceUnit().getClasses().size());
}
private PersistenceDescriptorModel createModel() {
PersistenceDescriptorModel descriptorModel = new PersistenceDescriptorModel();
descriptorModel.setPersistenceUnit(new PersistenceUnitModel());
return descriptorModel;
}
private DataModelerEventObserver createObserver() {
return new DataModelerEventObserver(descriptorService, ioService);
}
}
|
/*
* Copyright 1999-2101 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gxl.kratos.sql.ast.statement;
public interface SQLTableConstraint extends SQLConstraint, SQLTableElement {
}
|
package com.example.hello;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
@Controller
@RequestMapping("/hello/myUserHomePage")
public class MyUserHomePageController {
@RequestMapping("/showa")
public String showMyUserHomePage(){
return "userHomePage";
}
}
|
package com.java.util.function;
import com.java.Sneaky;
import java.util.function.Supplier;
@FunctionalInterface
public interface UncheckedSupplier<T> {
T get() throws Exception;
static <T> Supplier<T> unchecked(UncheckedSupplier<T> supplier) {
return () -> {
try {
return supplier.get();
} catch (Exception e) {
return Sneaky.sneakyThrow(e);
}
};
}
}
|
/* Copyright 2008, 2009, 2010 by the Oxford University Computing Laboratory
This file is part of HermiT.
HermiT is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
HermiT is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with HermiT. If not, see <http://www.gnu.org/licenses/>.
*/
package org.semanticweb.HermiT.model;
import org.semanticweb.HermiT.Prefixes;
/**
* Represents an internal datatype. Such objects are used in DL-clauses (e.g., in structural transformation of complex data ranges), but are ignored by the datatype manager.
*/
public class InternalDatatype extends AtomicDataRange implements DLPredicate {
private static final long serialVersionUID=-1078274072706143620L;
protected final String m_iri;
protected InternalDatatype(String iri) {
m_iri=iri;
}
public String getIRI() {
return m_iri;
}
public int getArity() {
return 1;
}
public LiteralDataRange getNegation() {
return AtomicNegationDataRange.create(this);
}
public boolean isAlwaysTrue() {
return this==RDFS_LITERAL;
}
public boolean isAlwaysFalse() {
return false;
}
public boolean isInternalDatatype() {
return true;
}
public String toString(Prefixes prefixes) {
return prefixes.abbreviateIRI(m_iri);
}
protected Object readResolve() {
return s_interningManager.intern(this);
}
protected static InterningManager<InternalDatatype> s_interningManager=new InterningManager<InternalDatatype>() {
protected boolean equal(InternalDatatype object1,InternalDatatype object2) {
return object1.m_iri.equals(object2.m_iri);
}
protected int getHashCode(InternalDatatype object) {
return object.m_iri.hashCode();
}
};
public static InternalDatatype create(String uri) {
return s_interningManager.intern(new InternalDatatype(uri));
}
public static final InternalDatatype RDFS_LITERAL=create("http://www.w3.org/2000/01/rdf-schema#Literal");
}
|
package com.in28minutes.concurrency;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.LongAdder;
public class ConcurrentMapRunner {
public static void main(String[] args) {
ConcurrentMap<Character, LongAdder> occurances = new ConcurrentHashMap<>();
String str = "ABCD ABCD ABCD";
for (char character : str.toCharArray()) {
occurances.computeIfAbsent(character, ch -> new LongAdder()).increment();
LongAdder longAdder = occurances.get(character);
if (longAdder == null) {
longAdder = new LongAdder();
}
longAdder.increment();
occurances.put(character, longAdder);
}
System.out.println(occurances);
}
}
|
package fr.diabhelp.diabhelp.Connexion_inscription;
import android.app.Activity;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.Spinner;
import android.widget.TextView;
import android.widget.Toast;
import fr.diabhelp.diabhelp.Utils.MyToast;
import fr.diabhelp.diabhelp.R;
public class RegisterPersonalInfosFragment extends Fragment {
private TextView firstnameView;
private TextView lastnameView;
private Spinner roleSpinner;
private Button ValidateButton;
private Activity _context;
private FragmentSecondStepListener mListener;
public RegisterPersonalInfosFragment() {
// Required empty public constructor
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_register_personal_infos, container, false);
firstnameView = (TextView) view.findViewById(R.id.firstname_input);
lastnameView = (TextView) view.findViewById(R.id.lastname_input);
roleSpinner = (Spinner) view.findViewById(R.id.role_spinner);
ArrayAdapter<CharSequence> adapter = ArrayAdapter.createFromResource(_context, R.array.roles, android.R.layout.simple_spinner_item);
adapter.setDropDownViewResource(R.layout.support_simple_spinner_dropdown_item);
roleSpinner.setAdapter(adapter);
ValidateButton = (Button) view.findViewById(R.id.validate_button);
ValidateButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
final String firstname, lastname;
String role;
FieldError fieldError;
firstname = firstnameView.getText().toString();
lastname = lastnameView.getText().toString();
role = (String) roleSpinner.getSelectedItem();
fieldError = checkFields(firstname, lastname);
if (fieldError != FieldError.NONE) {
manage_fieldError(fieldError);
} else {
role = formatRole(role);
mListener.saveDatasSecondStep(firstname, lastname, role);
}
}
});
return (view);
}
private String formatRole(String role) {
String[] roles = getResources().getStringArray(R.array.roles);
if (role.equals(roles[0])){
role = "ROLE_PATIENT";
}
else if (role.equals(roles[1])){
role = "ROLE_PROCHE";
}
return (role);
}
private void manage_fieldError(FieldError fieldError) {
switch (fieldError)
{
case FIELD_INCOMPLETE:{
MyToast.getInstance().displayWarningMessage(getString(R.string.error_incomplete_fields), Toast.LENGTH_LONG, _context);
Log.i("RegisterPersonalInfos", "Les champs sont incomplets");
break;
}
}
}
private FieldError checkFields(String firstname, String lastname) {
FieldError error;
if (!isStringValid(firstname, 3) || !isStringValid(lastname, 3))
error = FieldError.FIELD_INCOMPLETE;
else
error = FieldError.NONE;
return (error);
}
private Boolean isStringValid(String string, int lenghtMinRequired)
{
Boolean is = false;
if (!string.isEmpty() && string.length() >= lenghtMinRequired)
is = true;
return (is);
}
@Override
public void onAttach(Activity context) {
super.onAttach(context);
if (context instanceof FragmentSecondStepListener) {
mListener = (FragmentSecondStepListener) context;
_context = context;
} else {
throw new RuntimeException(context.toString()
+ " must implement FragmentSecondStepListener");
}
}
@Override
public void onDetach() {
super.onDetach();
mListener = null;
}
/**
* This interface must be implemented by activities that contain this
* fragment to allow an interaction in this fragment to be communicated
* to the activity and potentially other fragments contained in that
* activity.
* <p>
* See the Android Training lesson <a href=
* "http://developer.android.com/training/basics/fragments/communicating.html"
* >Communicating with Other Fragments</a> for more information.
*/
public interface FragmentSecondStepListener {
void saveDatasSecondStep(String firstname, String lastName, String role);
}
public enum FieldError{
NONE,
FIELD_INCOMPLETE,
}
}
|
package ru.job4j.illustratingthread;
/**
* Создать пример иллюстрирующий проблемы, которые могут случиться при использовании многопоточности.
*/
public class IllustratingThread implements Runnable {
private Count count;
private Count2 count2;
private IllustratingThread(Count count, Count2 count2) {
this.count = count;
this.count2 = count2;
}
public static void main(String[] args) {
Count count = new Count();
Count2 count2 = new Count2();
new Thread(new IllustratingThread(count, count2)).start();
new Thread(new IllustratingThread(count, count2)).start();
}
@Override
public void run() {
System.out.println(count.illustratingThread());
System.out.println(count2.illustratingThread());
}
}
|
package com.example.restservice;
public class Logging {
private String service;
private String message;
public Logging(String service, String message) {
this.service = service;
this.message = message;
}
public String getService() {
return service;
}
public String getMessage() {
return message;
}
public void setService(String service) {
this.service = service;
}
public void setMessage(String message) {
this.message = message;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("Log{");
sb.append("service='").append(service).append('\'');
sb.append(", message='").append(message).append('\'');
sb.append('}');
return sb.toString();
}
}
|
package com.mysql.cj.protocol;
import com.mysql.cj.Messages;
import java.io.EOFException;
import java.io.FilterInputStream;
import java.io.IOException;
import java.io.InputStream;
public class FullReadInputStream extends FilterInputStream {
public FullReadInputStream(InputStream underlyingStream) {
super(underlyingStream);
}
public InputStream getUnderlyingStream() {
return this.in;
}
public int readFully(byte[] b) throws IOException {
return readFully(b, 0, b.length);
}
public int readFully(byte[] b, int off, int len) throws IOException {
if (len < 0)
throw new IndexOutOfBoundsException();
int n = 0;
while (n < len) {
int count = read(b, off + n, len - n);
if (count < 0)
throw new EOFException(Messages.getString("MysqlIO.EOF", new Object[] { Integer.valueOf(len), Integer.valueOf(n) }));
n += count;
}
return n;
}
public long skipFully(long len) throws IOException {
if (len < 0L)
throw new IOException(Messages.getString("MysqlIO.105"));
long n = 0L;
while (n < len) {
long count = skip(len - n);
if (count < 0L)
throw new EOFException(Messages.getString("MysqlIO.EOF", new Object[] { Long.valueOf(len), Long.valueOf(n) }));
n += count;
}
return n;
}
public int skipLengthEncodedInteger() throws IOException {
int sw = read() & 0xFF;
switch (sw) {
case 252:
return (int)skipFully(2L) + 1;
case 253:
return (int)skipFully(3L) + 1;
case 254:
return (int)skipFully(8L) + 1;
}
return 1;
}
}
/* Location: C:\Users\BSV\AppData\Local\Temp\Rar$DRa6216.20396\Preview\Preview.jar!\com\mysql\cj\protocol\FullReadInputStream.class
* Java compiler version: 8 (52.0)
* JD-Core Version: 1.1.3
*/
|
package com.alibaba.rocketmq.remoting;
import org.junit.Test;
import com.alibaba.rocketmq.remoting.exception.RemotingConnectException;
import com.alibaba.rocketmq.remoting.exception.RemotingSendRequestException;
import com.alibaba.rocketmq.remoting.exception.RemotingTimeoutException;
import com.alibaba.rocketmq.remoting.netty.NettyClientConfig;
import com.alibaba.rocketmq.remoting.netty.NettyRemotingClient;
import com.alibaba.rocketmq.remoting.protocol.RemotingCommand;
/**
* 连接超时测试
*
* @author shijia.wxr<vintage.wang@gmail.com>
* @since 2013-7-6
*/
public class NettyConnectionTest {
public static RemotingClient createRemotingClient() {
NettyClientConfig config = new NettyClientConfig();
config.setClientChannelMaxIdleTimeSeconds(15);
RemotingClient client = new NettyRemotingClient(config);
client.start();
return client;
}
@Test
public void test_connect_timeout() throws InterruptedException, RemotingConnectException,
RemotingSendRequestException, RemotingTimeoutException {
RemotingClient client = createRemotingClient();
for (int i = 0; i < 100; i++) {
try {
RemotingCommand request = RemotingCommand.createRequestCommand(0, null);
RemotingCommand response = client.invokeSync("localhost:8888", request, 1000 * 3);
}
catch (Exception e) {
e.printStackTrace();
}
}
client.shutdown();
System.out.println("-----------------------------------------------------------------");
}
}
|
/*-
* ============LICENSE_START=======================================================
* SDC
* ================================================================================
* Copyright (C) 2017 AT&T Intellectual Property. All rights reserved.
* ================================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ============LICENSE_END=========================================================
*/
package org.openecomp.sdc.be.model.operations.impl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.tinkerpop.gremlin.structure.io.IoCore;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.openecomp.sdc.be.dao.jsongraph.GraphVertex;
import org.openecomp.sdc.be.dao.jsongraph.TitanDao;
import org.openecomp.sdc.be.dao.jsongraph.types.EdgeLabelEnum;
import org.openecomp.sdc.be.dao.jsongraph.types.VertexTypeEnum;
import org.openecomp.sdc.be.dao.titan.TitanOperationStatus;
import org.openecomp.sdc.be.datatypes.elements.MapPropertiesDataDefinition;
import org.openecomp.sdc.be.datatypes.elements.CapabilityDataDefinition;
import org.openecomp.sdc.be.datatypes.elements.MapDataDefinition;
import org.openecomp.sdc.be.datatypes.elements.PropertyDataDefinition;
import org.openecomp.sdc.be.datatypes.enums.ComponentTypeEnum;
import org.openecomp.sdc.be.datatypes.enums.GraphPropertyEnum;
import org.openecomp.sdc.be.datatypes.enums.JsonPresentationFields;
import org.openecomp.sdc.be.datatypes.enums.ResourceTypeEnum;
import org.openecomp.sdc.be.datatypes.tosca.ToscaDataDefinition;
import org.openecomp.sdc.be.model.LifecycleStateEnum;
import org.openecomp.sdc.be.model.ModelTestBase;
import org.openecomp.sdc.be.model.category.CategoryDefinition;
import org.openecomp.sdc.be.model.category.SubCategoryDefinition;
import org.openecomp.sdc.be.model.jsontitan.datamodel.NodeType;
import org.openecomp.sdc.be.model.jsontitan.datamodel.TopologyTemplate;
import org.openecomp.sdc.be.model.jsontitan.datamodel.ToscaElement;
import org.openecomp.sdc.be.model.jsontitan.operations.NodeTypeOperation;
import org.openecomp.sdc.be.model.jsontitan.operations.TopologyTemplateOperation;
import org.openecomp.sdc.be.model.jsontitan.operations.ToscaElementLifecycleOperation;
import org.openecomp.sdc.be.model.operations.api.StorageOperationStatus;
import org.openecomp.sdc.common.util.ValidationUtils;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import com.thinkaurelius.titan.core.TitanGraph;
import com.thinkaurelius.titan.core.TitanVertex;
import fj.data.Either;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration("classpath:application-context-test.xml")
public class ToscaElementLifecycleOperationTest extends ModelTestBase {
@javax.annotation.Resource
protected TitanDao titanDao;
@javax.annotation.Resource
private NodeTypeOperation nodeTypeOperation;
@javax.annotation.Resource
private TopologyTemplateOperation topologyTemplateOperation;
@javax.annotation.Resource
private ToscaElementLifecycleOperation lifecycleOperation;
String categoryName = "category";
String subcategory = "mycategory";
String outputDirectory = "C:\\Output";
@Rule
public TestName name = new TestName();
@BeforeClass
public static void initLifecycleOperation() {
ModelTestBase.init();
}
private GraphVertex ownerVertex;
private GraphVertex modifierVertex;
private GraphVertex vfVertex;
private GraphVertex serviceVertex;
@Before
public void setupBefore() {
clearGraph();
createUsers();
createResourceCategory();
createServiceCategory();
createRootNodeType();
createNodeType("firstVf");
createTopologyTemplate("firstService");
}
@Test
public void lifecycleTest() {
Either<ToscaElement, StorageOperationStatus> res = lifecycleOperation
.checkinToscaELement(LifecycleStateEnum.findState((String) vfVertex.getMetadataProperty(GraphPropertyEnum.STATE)),
vfVertex.getUniqueId(), modifierVertex.getUniqueId(), ownerVertex.getUniqueId());
StorageOperationStatus status;
assertTrue(res.isLeft());
String id = res.left().value().getUniqueId();
res = lifecycleOperation.checkoutToscaElement(id, ownerVertex.getUniqueId(), modifierVertex.getUniqueId());
assertTrue(res.isLeft());
id = res.left().value().getUniqueId();
PropertyDataDefinition prop55 = new PropertyDataDefinition();
prop55.setName("prop55");
prop55.setDefaultValue("def55");
status = nodeTypeOperation.addToscaDataToToscaElement(id, EdgeLabelEnum.PROPERTIES, VertexTypeEnum.PROPERTIES, prop55, JsonPresentationFields.NAME);
assertTrue(status == StorageOperationStatus.OK);
CapabilityDataDefinition cap1 = new CapabilityDataDefinition();
cap1.setName("cap1");
cap1.setDescription("create");
cap1.setUniqueId(UniqueIdBuilder.buildCapabilityUid(id, "cap1"));
status = nodeTypeOperation.addToscaDataToToscaElement(id, EdgeLabelEnum.CAPABILITIES, VertexTypeEnum.CAPABILTIES, cap1, JsonPresentationFields.NAME);
assertTrue(status == StorageOperationStatus.OK);
res = lifecycleOperation.checkinToscaELement(LifecycleStateEnum.NOT_CERTIFIED_CHECKOUT, id, ownerVertex.getUniqueId(), ownerVertex.getUniqueId());
assertTrue(res.isLeft());
id = res.left().value().getUniqueId();
res = lifecycleOperation.checkoutToscaElement(id, ownerVertex.getUniqueId(), ownerVertex.getUniqueId());
assertTrue(res.isLeft());
id = res.left().value().getUniqueId();
prop55.setDefaultValue("AAAAAAAA");
status = nodeTypeOperation.updateToscaDataOfToscaElement(id, EdgeLabelEnum.PROPERTIES, VertexTypeEnum.PROPERTIES, prop55, JsonPresentationFields.NAME);
assertTrue(status == StorageOperationStatus.OK);
cap1.setDescription("update");
status = nodeTypeOperation.updateToscaDataOfToscaElement(id, EdgeLabelEnum.CAPABILITIES, VertexTypeEnum.CAPABILTIES, cap1, JsonPresentationFields.NAME);
assertTrue(status == StorageOperationStatus.OK);
PropertyDataDefinition prop66 = new PropertyDataDefinition();
prop66.setName("prop66");
prop66.setDefaultValue("def66");
status = nodeTypeOperation.addToscaDataToToscaElement(id, EdgeLabelEnum.PROPERTIES, VertexTypeEnum.PROPERTIES, prop66, JsonPresentationFields.NAME);
assertTrue(status == StorageOperationStatus.OK);
res = lifecycleOperation.requestCertificationToscaElement(id, modifierVertex.getUniqueId(), ownerVertex.getUniqueId());
assertTrue(res.isLeft());
id = res.left().value().getUniqueId();
res = lifecycleOperation.startCertificationToscaElement(id, modifierVertex.getUniqueId(), ownerVertex.getUniqueId());
assertTrue(res.isLeft());
id = res.left().value().getUniqueId();
res = lifecycleOperation.certifyToscaElement(id, modifierVertex.getUniqueId(), ownerVertex.getUniqueId());
assertTrue(res.isLeft());
id = res.left().value().getUniqueId();
res = lifecycleOperation.checkoutToscaElement(id, ownerVertex.getUniqueId(), modifierVertex.getUniqueId());
assertTrue(res.isLeft());
id = res.left().value().getUniqueId();
PropertyDataDefinition prop77 = new PropertyDataDefinition();
prop77.setName("prop77");
prop77.setDefaultValue("def77");
status = nodeTypeOperation.addToscaDataToToscaElement(id, EdgeLabelEnum.PROPERTIES, VertexTypeEnum.PROPERTIES, prop77, JsonPresentationFields.NAME);
assertTrue(status == StorageOperationStatus.OK);
res = lifecycleOperation.checkinToscaELement(LifecycleStateEnum.NOT_CERTIFIED_CHECKOUT, id, ownerVertex.getUniqueId(), ownerVertex.getUniqueId());
assertTrue(res.isLeft());
id = res.left().value().getUniqueId();
res = lifecycleOperation.checkoutToscaElement(id, ownerVertex.getUniqueId(), ownerVertex.getUniqueId());
assertTrue(res.isLeft());
id = res.left().value().getUniqueId();
PropertyDataDefinition prop88 = new PropertyDataDefinition();
prop88.setName("prop88");
prop88.setDefaultValue("def88");
status = nodeTypeOperation.addToscaDataToToscaElement(id, EdgeLabelEnum.PROPERTIES, VertexTypeEnum.PROPERTIES, prop88, JsonPresentationFields.NAME);
assertTrue(status == StorageOperationStatus.OK);
res = lifecycleOperation.requestCertificationToscaElement(id, modifierVertex.getUniqueId(), ownerVertex.getUniqueId());
assertTrue(res.isLeft());
id = res.left().value().getUniqueId();
res = lifecycleOperation.startCertificationToscaElement(id, modifierVertex.getUniqueId(), ownerVertex.getUniqueId());
assertTrue(res.isLeft());
id = res.left().value().getUniqueId();
res = lifecycleOperation.certifyToscaElement(id, modifierVertex.getUniqueId(), ownerVertex.getUniqueId());
assertTrue(res.isLeft());
id = res.left().value().getUniqueId();
res = lifecycleOperation.checkoutToscaElement(id, ownerVertex.getUniqueId(), ownerVertex.getUniqueId());
assertTrue(res.isLeft());
id = res.left().value().getUniqueId();
PropertyDataDefinition prop99 = new PropertyDataDefinition();
prop99.setName("prop99");
prop99.setDefaultValue("def99");
status = nodeTypeOperation.addToscaDataToToscaElement(id, EdgeLabelEnum.PROPERTIES, VertexTypeEnum.PROPERTIES, prop99, JsonPresentationFields.NAME);
assertTrue(status == StorageOperationStatus.OK);
res = lifecycleOperation.requestCertificationToscaElement(id, modifierVertex.getUniqueId(), ownerVertex.getUniqueId());
assertTrue(res.isLeft());
id = res.left().value().getUniqueId();
res = lifecycleOperation.startCertificationToscaElement(id, modifierVertex.getUniqueId(), ownerVertex.getUniqueId());
assertTrue(res.isLeft());
id = res.left().value().getUniqueId();
status = nodeTypeOperation.deleteToscaDataElement(id, EdgeLabelEnum.PROPERTIES, VertexTypeEnum.PROPERTIES, "prop99", JsonPresentationFields.NAME);
assertTrue(status == StorageOperationStatus.OK);
//cancel certification
res = lifecycleOperation.cancelOrFailCertification(id, modifierVertex.getUniqueId(), ownerVertex.getUniqueId(), LifecycleStateEnum.READY_FOR_CERTIFICATION);
assertTrue(res.isLeft());
id = res.left().value().getUniqueId();
res = lifecycleOperation.startCertificationToscaElement(id, modifierVertex.getUniqueId(), ownerVertex.getUniqueId());
assertTrue(res.isLeft());
id = res.left().value().getUniqueId();
//fail certification
res = lifecycleOperation.cancelOrFailCertification(id, modifierVertex.getUniqueId(), ownerVertex.getUniqueId(), LifecycleStateEnum.NOT_CERTIFIED_CHECKIN);
assertTrue(res.isLeft());
id = res.left().value().getUniqueId();
//exportGraphMl(titanDao.getGraph().left().value());
}
@Test
public void serviceConformanceLevelTest() {
Either<ToscaElement, StorageOperationStatus> res = lifecycleOperation
.checkinToscaELement(LifecycleStateEnum.findState((String) serviceVertex.getMetadataProperty(GraphPropertyEnum.STATE)),
serviceVertex.getUniqueId(), modifierVertex.getUniqueId(), ownerVertex.getUniqueId());
assertTrue(res.isLeft());
String id = res.left().value().getUniqueId();
res = lifecycleOperation.checkoutToscaElement(id, ownerVertex.getUniqueId(), modifierVertex.getUniqueId());
assertTrue(res.isLeft());
String conformanceLevel = res.left().value().getMetadataValue(JsonPresentationFields.CONFORMANCE_LEVEL).toString();
assertEquals(conformanceLevel, ModelTestBase.configurationManager.getConfiguration().getToscaConformanceLevel());
}
private void createResourceCategory() {
GraphVertex cat = new GraphVertex(VertexTypeEnum.RESOURCE_CATEGORY);
Map<GraphPropertyEnum, Object> metadataProperties = new HashMap<>();
String catId = UniqueIdBuilder.buildComponentCategoryUid(categoryName, VertexTypeEnum.RESOURCE_CATEGORY);
cat.setUniqueId(catId);
metadataProperties.put(GraphPropertyEnum.UNIQUE_ID,catId);
metadataProperties.put(GraphPropertyEnum.LABEL, VertexTypeEnum.RESOURCE_CATEGORY.getName());
metadataProperties.put(GraphPropertyEnum.NAME,categoryName);
metadataProperties.put(GraphPropertyEnum.NORMALIZED_NAME, ValidationUtils.normalizeCategoryName4Uniqueness(categoryName));
cat.setMetadataProperties(metadataProperties);
cat.updateMetadataJsonWithCurrentMetadataProperties();
GraphVertex subCat = new GraphVertex(VertexTypeEnum.RESOURCE_SUBCATEGORY);
metadataProperties = new HashMap<>();
String subCatId = UniqueIdBuilder.buildSubCategoryUid(cat.getUniqueId(), subcategory);
subCat.setUniqueId(subCatId);
metadataProperties.put(GraphPropertyEnum.UNIQUE_ID,subCatId);
metadataProperties.put(GraphPropertyEnum.LABEL, VertexTypeEnum.RESOURCE_SUBCATEGORY.getName());
metadataProperties.put(GraphPropertyEnum.NAME,subcategory);
subCat.setMetadataProperties(metadataProperties);
subCat.updateMetadataJsonWithCurrentMetadataProperties();
Either<GraphVertex, TitanOperationStatus> catRes = titanDao.createVertex(cat);
Either<GraphVertex, TitanOperationStatus> subCatRes = titanDao.createVertex(subCat);
TitanOperationStatus status = titanDao.createEdge(catRes.left().value().getVertex(), subCatRes.left().value().getVertex(), EdgeLabelEnum.SUB_CATEGORY, new HashMap<>());
assertEquals(TitanOperationStatus.OK, status);
}
private void createServiceCategory() {
GraphVertex cat = new GraphVertex(VertexTypeEnum.SERVICE_CATEGORY);
Map<GraphPropertyEnum, Object> metadataProperties = new HashMap<>();
String catId = UniqueIdBuilder.buildComponentCategoryUid(categoryName, VertexTypeEnum.SERVICE_CATEGORY);
cat.setUniqueId(catId);
metadataProperties.put(GraphPropertyEnum.UNIQUE_ID,catId);
metadataProperties.put(GraphPropertyEnum.LABEL, VertexTypeEnum.SERVICE_CATEGORY.getName());
metadataProperties.put(GraphPropertyEnum.NAME,categoryName);
metadataProperties.put(GraphPropertyEnum.NORMALIZED_NAME, ValidationUtils.normalizeCategoryName4Uniqueness(categoryName));
cat.setMetadataProperties(metadataProperties);
cat.updateMetadataJsonWithCurrentMetadataProperties();
Either<GraphVertex, TitanOperationStatus> catRes = titanDao.createVertex(cat);
assertTrue(catRes.isLeft());
}
private TopologyTemplate createTopologyTemplate(String name) {
TopologyTemplate service = new TopologyTemplate();
String uniqueId = UniqueIdBuilder.buildResourceUniqueId();
service.setUniqueId(uniqueId);
service.setCreatorUserId((String) ownerVertex.getMetadataProperty(GraphPropertyEnum.USERID));
service.getMetadata().put(JsonPresentationFields.NAME.getPresentation(), name);
service.getMetadata().put(JsonPresentationFields.UNIQUE_ID.getPresentation(), uniqueId);
service.getMetadata().put(JsonPresentationFields.VERSION.getPresentation(), "0.1");
service.getMetadata().put(JsonPresentationFields.TYPE.getPresentation(),ResourceTypeEnum.VF.name());
service.getMetadata().put(JsonPresentationFields.COMPONENT_TYPE.getPresentation(),ComponentTypeEnum.RESOURCE);
List<CategoryDefinition> categories = new ArrayList<>();
CategoryDefinition cat = new CategoryDefinition();
categories.add(cat);
cat.setName(categoryName);
service.setCategories(categories);
service.setComponentType(ComponentTypeEnum.SERVICE);
Either<TopologyTemplate, StorageOperationStatus> createRes = topologyTemplateOperation.createTopologyTemplate(service);
assertTrue(createRes.isLeft());
Either<GraphVertex, TitanOperationStatus> getNodeTyeRes= titanDao.getVertexById(createRes.left().value().getUniqueId());
assertTrue(getNodeTyeRes.isLeft());
serviceVertex = getNodeTyeRes.left().value();
return service;
}
private <T extends ToscaDataDefinition> NodeType createNodeType(String nodeTypeName) {
NodeType vf = new NodeType();
String uniqueId = UniqueIdBuilder.buildResourceUniqueId();
vf.setUniqueId(uniqueId);
vf.setCreatorUserId((String) ownerVertex.getMetadataProperty(GraphPropertyEnum.USERID));
vf.getMetadata().put(JsonPresentationFields.NAME.getPresentation(), nodeTypeName);
vf.getMetadata().put(JsonPresentationFields.UNIQUE_ID.getPresentation(), uniqueId);
vf.getMetadata().put(JsonPresentationFields.VERSION.getPresentation(), "0.1");
vf.getMetadata().put(JsonPresentationFields.TYPE.getPresentation(),ResourceTypeEnum.VF.name());
vf.getMetadata().put(JsonPresentationFields.COMPONENT_TYPE.getPresentation(),ComponentTypeEnum.RESOURCE);
List<CategoryDefinition> categories = new ArrayList<>();
CategoryDefinition cat = new CategoryDefinition();
categories.add(cat);
cat.setName(categoryName);
List<SubCategoryDefinition> subCategories = new ArrayList<>();
SubCategoryDefinition subCat = new SubCategoryDefinition();
subCat.setName(subcategory);
subCategories.add(subCat);
cat.setSubcategories(subCategories);
vf.setCategories(categories);
List<String> derivedFrom = new ArrayList<>();
derivedFrom.add("root");
vf.setDerivedFrom(derivedFrom);
// Map<String, PropertyDataDefinition> properties = new HashMap<>();
// PropertyDataDefinition prop1 = new PropertyDataDefinition();
// prop1.setName("prop1");
// prop1.setDefaultValue("def1");
//
// properties.put("prop1", prop1);
//
// PropertyDataDefinition prop2 = new PropertyDataDefinition();
// prop2.setName("prop2");
// prop2.setDefaultValue("def2");
// properties.put("prop2", prop2);
//
// PropertyDataDefinition prop3 = new PropertyDataDefinition();
// prop3.setName("prop3");
// prop3.setDefaultValue("def3");
// properties.put("prop3", prop3);
//
// vf.setProperties(properties);
vf.setComponentType(ComponentTypeEnum.RESOURCE);
Either<NodeType, StorageOperationStatus> createVFRes = nodeTypeOperation.createNodeType(vf);
assertTrue(createVFRes.isLeft());
Either<GraphVertex, TitanOperationStatus> getNodeTyeRes= titanDao.getVertexById(createVFRes.left().value().getUniqueId());
assertTrue(getNodeTyeRes.isLeft());
vfVertex = getNodeTyeRes.left().value();
List<PropertyDataDefinition> addProperties = new ArrayList<>();
PropertyDataDefinition prop11 = new PropertyDataDefinition();
prop11.setName("prop11");
prop11.setDefaultValue("def11");
addProperties.add(prop11);
PropertyDataDefinition prop22 = new PropertyDataDefinition();
prop22.setName("prop22");
prop22.setDefaultValue("def22");
addProperties.add(prop22);
StorageOperationStatus status = nodeTypeOperation.addToscaDataToToscaElement(vfVertex, EdgeLabelEnum.PROPERTIES, VertexTypeEnum.PROPERTIES, addProperties, JsonPresentationFields.NAME);
assertTrue(status == StorageOperationStatus.OK);
PropertyDataDefinition prop33 = new PropertyDataDefinition();
prop33.setName("prop33");
prop33.setDefaultValue("def33");
status = nodeTypeOperation.addToscaDataToToscaElement(vfVertex, EdgeLabelEnum.PROPERTIES, VertexTypeEnum.PROPERTIES, prop33, JsonPresentationFields.NAME);
assertTrue(status == StorageOperationStatus.OK);
PropertyDataDefinition prop44 = new PropertyDataDefinition();
prop44.setName("prop44");
prop44.setDefaultValue("def44");
status = nodeTypeOperation.addToscaDataToToscaElement(vfVertex.getUniqueId(), EdgeLabelEnum.PROPERTIES, VertexTypeEnum.PROPERTIES, prop44, JsonPresentationFields.NAME);
assertTrue(status == StorageOperationStatus.OK);
PropertyDataDefinition capProp = new PropertyDataDefinition();
capProp.setName( "capProp");
capProp.setDefaultValue( "capPropDef");
MapDataDefinition dataToCreate = new MapPropertiesDataDefinition();
dataToCreate.put("capProp", capProp);
Map<String, MapDataDefinition> capProps = new HashMap();
capProps.put("capName", dataToCreate);
Either<GraphVertex, StorageOperationStatus> res = nodeTypeOperation.assosiateElementToData(vfVertex, VertexTypeEnum.CAPABILITIES_PROPERTIES, EdgeLabelEnum.CAPABILITIES_PROPERTIES, capProps);
// exportGraphMl(titanDao.getGraph().left().value());
List<String> pathKeys = new ArrayList<>();
pathKeys.add("capName");
capProp.setDefaultValue( "BBBB");
status = nodeTypeOperation.updateToscaDataDeepElementOfToscaElement(vfVertex, EdgeLabelEnum.CAPABILITIES_PROPERTIES, VertexTypeEnum.CAPABILITIES_PROPERTIES,
capProp, pathKeys, JsonPresentationFields.NAME);
return vf;
}
private void createRootNodeType() {
NodeType vf = new NodeType();
String uniqueId = UniqueIdBuilder.buildResourceUniqueId();
vf.setUniqueId(uniqueId);
vf.setComponentType(ComponentTypeEnum.RESOURCE);
vf.setCreatorUserId((String) ownerVertex.getMetadataProperty(GraphPropertyEnum.USERID));
vf.getMetadata().put(JsonPresentationFields.NAME.getPresentation(), "root");
vf.getMetadata().put(JsonPresentationFields.UNIQUE_ID.getPresentation(), uniqueId);
vf.getMetadata().put(JsonPresentationFields.VERSION.getPresentation(), "1.0");
vf.getMetadata().put(JsonPresentationFields.TYPE.getPresentation(),ResourceTypeEnum.VFC.name());
vf.getMetadata().put(JsonPresentationFields.LIFECYCLE_STATE.getPresentation(), LifecycleStateEnum.CERTIFIED.name());
vf.getMetadata().put(JsonPresentationFields.TOSCA_RESOURCE_NAME.getPresentation(), "root");
vf.getMetadata().put(JsonPresentationFields.HIGHEST_VERSION.getPresentation(), true);
List<CategoryDefinition> categories = new ArrayList<>();
CategoryDefinition cat = new CategoryDefinition();
categories.add(cat);
cat.setName(categoryName);
List<SubCategoryDefinition> subCategories = new ArrayList<>();
SubCategoryDefinition subCat = new SubCategoryDefinition();
subCat.setName(subcategory);
subCategories.add(subCat);
cat.setSubcategories(subCategories);
vf.setCategories(categories);
List<String> derivedFrom = new ArrayList<>();
vf.setDerivedFrom(derivedFrom);
Map<String, PropertyDataDefinition> properties = new HashMap<>();
PropertyDataDefinition prop1 = new PropertyDataDefinition();
prop1.setName("derived1");
prop1.setDefaultValue("deriveddef1");
properties.put("derived1", prop1);
PropertyDataDefinition prop2 = new PropertyDataDefinition();
prop2.setUniqueId("derived2");
prop2.setName("deriveddef2");
properties.put("derived2", prop2);
PropertyDataDefinition prop3 = new PropertyDataDefinition();
prop3.setName("derived3");
prop3.setDefaultValue("deriveddef3");
properties.put("derived3", prop3);
vf.setProperties(properties);
vf.setComponentType(ComponentTypeEnum.RESOURCE);
Either<NodeType, StorageOperationStatus> createVFRes = nodeTypeOperation.createNodeType(vf);
assertTrue(createVFRes.isLeft());
Either<GraphVertex, TitanOperationStatus> getNodeTyeRes= titanDao.getVertexById(createVFRes.left().value().getUniqueId());
assertTrue(getNodeTyeRes.isLeft());
}
private void createUsers() {
GraphVertex ownerV = new GraphVertex(VertexTypeEnum.USER);
ownerV.setUniqueId("user1");
Map<GraphPropertyEnum, Object> metadataProperties = new HashMap<>();
metadataProperties.put(GraphPropertyEnum.USERID, ownerV.getUniqueId());
metadataProperties.put(GraphPropertyEnum.LABEL, VertexTypeEnum.USER.getName());
metadataProperties.put(GraphPropertyEnum.NAME, "user1");
ownerV.setMetadataProperties(metadataProperties);
ownerV.updateMetadataJsonWithCurrentMetadataProperties();
ownerV.setJson(new HashMap<>());
Either<GraphVertex, TitanOperationStatus> createUserRes = titanDao.createVertex(ownerV);
assertTrue(createUserRes.isLeft());
ownerVertex = createUserRes.left().value();
GraphVertex modifierV = new GraphVertex(VertexTypeEnum.USER);
modifierV.setUniqueId("user2");
metadataProperties = new HashMap<>();
metadataProperties.put(GraphPropertyEnum.USERID, modifierV.getUniqueId());
metadataProperties.put(GraphPropertyEnum.LABEL, VertexTypeEnum.USER.getName());
metadataProperties.put(GraphPropertyEnum.NAME, "user2");
modifierV.setMetadataProperties(metadataProperties);
modifierV.updateMetadataJsonWithCurrentMetadataProperties();
modifierV.setJson(new HashMap<>());
createUserRes = titanDao.createVertex(modifierV);
assertTrue(createUserRes.isLeft());
modifierVertex = createUserRes.left().value();
Either<GraphVertex, TitanOperationStatus> getOwnerRes = lifecycleOperation.findUser(ownerVertex.getUniqueId());
assertTrue(getOwnerRes.isLeft());
}
@After
public void teardown() {
clearGraph();
}
private void clearGraph() {
Either<TitanGraph, TitanOperationStatus> graphResult = titanDao.getGraph();
TitanGraph graph = graphResult.left().value();
Iterable<TitanVertex> vertices = graph.query().vertices();
if (vertices != null) {
Iterator<TitanVertex> iterator = vertices.iterator();
while (iterator.hasNext()) {
TitanVertex vertex = iterator.next();
vertex.remove();
}
}
titanDao.commit();
}
private String exportGraphMl(TitanGraph graph) {
String result = null;
String outputFile = outputDirectory + File.separator + "exportGraph." + System.currentTimeMillis() + ".graphml";
try {
try (final OutputStream os = new BufferedOutputStream(new FileOutputStream(outputFile))) {
graph.io(IoCore.graphml()).writer().normalize(true).create().writeGraph(os, graph);
}
result = outputFile;
graph.tx().commit();
} catch (Exception e) {
graph.tx().rollback();
e.printStackTrace();
}
return result;
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.soroushbot.component;
import org.apache.camel.RoutesBuilder;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.component.soroushbot.models.SoroushAction;
import org.apache.camel.component.soroushbot.support.SoroushBotTestSupport;
import org.junit.jupiter.api.Test;
public class ConsumerMultiThreadTest extends SoroushBotTestSupport {
@Test
public void supportForConcurrentThreadTest() throws InterruptedException {
MockEndpoint mockEndpoint = getMockEndpoint("mock:supportForConcurrentThreadTest");
mockEndpoint.setExpectedMessageCount(5);
mockEndpoint.setAssertPeriod(1500);
mockEndpoint.assertIsSatisfied();
}
@Override
protected RoutesBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("soroush://" + SoroushAction.getMessage + "/5")
.threads(5).process(exchange -> {
Thread.sleep(1000);
}).to("mock:supportForConcurrentThreadTest");
}
};
}
}
|
package com.sendbird.uikit.widgets;
import android.content.Context;
import android.content.res.TypedArray;
import android.util.AttributeSet;
import android.view.LayoutInflater;
import android.view.View;
import androidx.databinding.DataBindingUtil;
import com.sendbird.android.BaseMessage;
import com.sendbird.android.GroupChannel;
import com.sendbird.uikit.R;
import com.sendbird.uikit.consts.MessageGroupType;
import com.sendbird.uikit.databinding.SbViewMyUserMessageComponentBinding;
import com.sendbird.uikit.utils.DateUtils;
import com.sendbird.uikit.utils.DrawableUtils;
import com.sendbird.uikit.utils.ViewUtils;
public class MyUserMessageView extends GroupChannelMessageView {
private SbViewMyUserMessageComponentBinding binding;
private int editedAppearance;
@Override
public SbViewMyUserMessageComponentBinding getBinding() {
return binding;
}
public MyUserMessageView(Context context) {
this(context, null);
}
public MyUserMessageView(Context context, AttributeSet attrs) {
this(context, attrs, R.attr.sb_message_user_style);
}
public MyUserMessageView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init(context, attrs, defStyle);
}
private void init(Context context, AttributeSet attrs, int defStyle) {
TypedArray a = context.getTheme().obtainStyledAttributes(attrs, R.styleable.MessageView_User, defStyle, 0);
try {
this.binding = DataBindingUtil.inflate(LayoutInflater.from(getContext()), R.layout.sb_view_my_user_message_component, this, true);
int timeAppearance = a.getResourceId(R.styleable.MessageView_User_sb_message_time_text_appearance, R.style.SendbirdCaption4OnLight03);
int messageAppearance = a.getResourceId(R.styleable.MessageView_User_sb_message_me_text_appearance, R.style.SendbirdBody3OnDark01);
int messageBackground = a.getResourceId(R.styleable.MessageView_User_sb_message_me_background, R.drawable.sb_shape_chat_bubble);
int messageBackgroundTint = a.getResourceId(R.styleable.MessageView_User_sb_message_me_background_tint, R.color.sb_message_me_tint_light);
int emojiReactionListBackground = a.getResourceId(R.styleable.MessageView_User_sb_message_emoji_reaction_list_background, R.drawable.sb_shape_chat_bubble_reactions_light);
int ogtagBackground = a.getResourceId(R.styleable.MessageView_User_sb_message_me_ogtag_background, R.drawable.sb_message_og_background);
int ogtagBackgroundTint = a.getResourceId(R.styleable.MessageView_User_sb_message_me_ogtag_background_tint, R.color.sb_message_other_tint_light);
int linkTextColor = a.getResourceId(R.styleable.MessageView_User_sb_message_me_link_text_color, R.color.ondark_01);
int clickedLinkBackgroundColor = a.getResourceId(R.styleable.MessageView_User_sb_message_me_clicked_link_background_color, R.color.primary_400);
editedAppearance = a.getResourceId(R.styleable.MessageView_User_sb_message_my_edited_mark_text_appearance, R.style.SendbirdBody3OnDark02);
this.highlightBackgroundColor = a.getResourceId(R.styleable.MessageView_User_sb_message_highlight_background_color, R.color.highlight);
this.highlightForegroundColor = a.getResourceId(R.styleable.MessageView_User_sb_message_highlight_foreground_color, R.color.background_600);
binding.tvMessage.setTextAppearance(context, messageAppearance);
binding.tvMessage.setLinkTextColor(context.getResources().getColor(linkTextColor));
binding.tvSentAt.setTextAppearance(context, timeAppearance);
binding.contentPanel.setBackground(DrawableUtils.setTintList(getContext(), messageBackground, messageBackgroundTint));
binding.emojiReactionListBackground.setBackgroundResource(emojiReactionListBackground);
binding.ogtagBackground.setBackground(DrawableUtils.setTintList(getContext(), ogtagBackground, ogtagBackgroundTint));
binding.ovOgtag.setBackground(DrawableUtils.setTintList(getContext(), ogtagBackground, ogtagBackgroundTint));
binding.tvMessage.setOnClickListener(v -> binding.contentPanel.performClick());
binding.tvMessage.setOnLongClickListener(v -> binding.contentPanel.performLongClick());
binding.tvMessage.setOnLinkLongClickListener((v, link) -> binding.contentPanel.performLongClick());
binding.tvMessage.setClickedLinkBackgroundColor(context.getResources().getColor(clickedLinkBackgroundColor));
binding.ovOgtag.setOnLongClickListener(v -> binding.contentPanel.performLongClick());
} finally {
a.recycle();
}
}
@Override
public View getLayout() {
return binding.getRoot();
}
@Override
public void drawMessage(GroupChannel channel, BaseMessage message, MessageGroupType messageGroupType) {
boolean sendingState = message.getSendingStatus() == BaseMessage.SendingStatus.SUCCEEDED;
boolean hasOgTag = message.getOgMetaData() != null;
boolean hasReaction = message.getReactions() != null && message.getReactions().size() > 0;
binding.emojiReactionListBackground.setVisibility(hasReaction ? View.VISIBLE : View.GONE);
binding.rvEmojiReactionList.setVisibility(hasReaction ? View.VISIBLE : View.GONE);
binding.ogtagBackground.setVisibility(hasOgTag ? View.VISIBLE : View.GONE);
binding.ovOgtag.setVisibility(hasOgTag ? View.VISIBLE : View.GONE);
binding.tvSentAt.setVisibility((sendingState && (messageGroupType == MessageGroupType.GROUPING_TYPE_TAIL || messageGroupType == MessageGroupType.GROUPING_TYPE_SINGLE)) ? View.VISIBLE : View.GONE);
binding.tvSentAt.setText(DateUtils.formatTime(getContext(), message.getCreatedAt()));
binding.ivStatus.drawStatus(message, channel);
ViewUtils.drawTextMessage(binding.tvMessage, message, editedAppearance, highlightMessageInfo, highlightBackgroundColor, highlightForegroundColor);
ViewUtils.drawOgtag(binding.ovOgtag, message.getOgMetaData());
ViewUtils.drawReactionEnabled(binding.rvEmojiReactionList, channel);
int paddingTop = getResources().getDimensionPixelSize((messageGroupType == MessageGroupType.GROUPING_TYPE_TAIL || messageGroupType == MessageGroupType.GROUPING_TYPE_BODY) ? R.dimen.sb_size_1 : R.dimen.sb_size_8);
int paddingBottom = getResources().getDimensionPixelSize((messageGroupType == MessageGroupType.GROUPING_TYPE_HEAD || messageGroupType == MessageGroupType.GROUPING_TYPE_BODY) ? R.dimen.sb_size_1 : R.dimen.sb_size_8);
binding.root.setPadding(binding.root.getPaddingLeft(), paddingTop, binding.root.getPaddingRight(), paddingBottom);
ViewUtils.drawQuotedMessage(binding.quoteReplyPanel, message);
}
}
|
/**
* Copyright (C) 2011 Brian Ferris <bdferris@onebusaway.org>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onebusaway.transit_data_federation.bundle.tasks.transfer_pattern.graph;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import org.onebusaway.transit_data_federation.impl.otp.GraphContext;
import org.onebusaway.transit_data_federation.services.tripplanner.StopTimeInstance;
import org.opentripplanner.routing.core.Edge;
import org.opentripplanner.routing.core.HasEdges;
import org.opentripplanner.routing.core.Vertex;
public class TPOfflineBlockDepartureVertex extends AbstractTPOfflineBlockVertex {
public TPOfflineBlockDepartureVertex(GraphContext context,
StopTimeInstance instance) {
super(context, instance);
}
/****
* {@link Vertex} Interface
****/
@Override
public String getLabel() {
return "block_departure: " + _instance.toString();
}
/****
* {@link HasEdges} Interface
****/
@Override
public Collection<Edge> getOutgoing() {
StopTimeInstance next = _instance.getNextStopTimeInstance();
if (next == null)
return Collections.emptyList();
return Arrays.asList((Edge) new TPOfflineBlockHopEdge(_context, _instance,
next));
}
/****
* {@link Object} Interface
****/
@Override
public String toString() {
return "block_departure: " + _instance.toString();
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.cube;
import java.io.Serializable;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Collection;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.TimeZone;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.persistence.ResourceStore;
import org.apache.kylin.common.util.Dictionary;
import org.apache.kylin.common.util.Pair;
import org.apache.kylin.common.util.ShardingHash;
import org.apache.kylin.cube.cuboid.CuboidScheduler;
import org.apache.kylin.cube.kv.CubeDimEncMap;
import org.apache.kylin.cube.kv.RowConstants;
import org.apache.kylin.cube.model.CubeDesc;
import org.apache.kylin.metadata.model.DataModelDesc;
import org.apache.kylin.metadata.model.IBuildable;
import org.apache.kylin.metadata.model.ISegment;
import org.apache.kylin.metadata.model.ISegmentAdvisor;
import org.apache.kylin.metadata.model.SegmentRange;
import org.apache.kylin.metadata.model.SegmentRange.TSRange;
import org.apache.kylin.metadata.model.SegmentStatusEnum;
import org.apache.kylin.metadata.model.Segments;
import org.apache.kylin.metadata.model.TblColRef;
import org.apache.kylin.metadata.realization.IRealization;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonAutoDetect.Visibility;
import com.fasterxml.jackson.annotation.JsonBackReference;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
@SuppressWarnings("serial")
@JsonAutoDetect(fieldVisibility = Visibility.NONE, getterVisibility = Visibility.NONE, isGetterVisibility = Visibility.NONE, setterVisibility = Visibility.NONE)
public class CubeSegment implements IBuildable, ISegment, Serializable {
@JsonBackReference
private CubeInstance cubeInstance;
@JsonProperty("uuid")
private String uuid;
@JsonProperty("name")
private String name;
@JsonProperty("storage_location_identifier")
private String storageLocationIdentifier;
@JsonProperty("date_range_start")
private long dateRangeStart;
@JsonProperty("date_range_end")
private long dateRangeEnd;
@JsonProperty("source_offset_start")
private long sourceOffsetStart;
@JsonProperty("source_offset_end")
private long sourceOffsetEnd;
@JsonProperty("status")
private SegmentStatusEnum status;
@JsonProperty("size_kb")
private long sizeKB;
@JsonProperty("is_merged")
private boolean isMerged;
@JsonProperty("estimate_ratio")
private List<Double> estimateRatio;
@JsonProperty("input_records")
private long inputRecords;
@JsonProperty("input_records_size")
private long inputRecordsSize;
@JsonProperty("last_build_time")
private long lastBuildTime;
@JsonProperty("last_build_job_id")
private String lastBuildJobID;
@JsonProperty("create_time_utc")
private long createTimeUTC;
@JsonProperty("cuboid_shard_nums")
private Map<Long, Short> cuboidShardNums = Maps.newHashMap();
@JsonProperty("total_shards") //it is only valid when all cuboids are squshed into some shards. like the HBASE_STORAGE case, otherwise it'll stay 0
private int totalShards = 0;
@JsonProperty("blackout_cuboids")
private List<Long> blackoutCuboids = Lists.newArrayList();
@JsonProperty("binary_signature")
private String binarySignature; // a hash of cube schema and dictionary ID, used for sanity check
@JsonProperty("dictionaries")
private ConcurrentHashMap<String, String> dictionaries; // table/column ==> dictionary resource path
@JsonProperty("snapshots")
private ConcurrentHashMap<String, String> snapshots; // table name ==> snapshot resource path
@JsonProperty("rowkey_stats")
private List<Object[]> rowkeyStats = Lists.newArrayList();
@JsonProperty("source_partition_offset_start")
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private Map<Integer, Long> sourcePartitionOffsetStart = Maps.newHashMap();
@JsonProperty("source_partition_offset_end")
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private Map<Integer, Long> sourcePartitionOffsetEnd = Maps.newHashMap();
@JsonProperty("stream_source_checkpoint")
private String streamSourceCheckpoint;
@JsonProperty("additionalInfo")
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private Map<String, String> additionalInfo = new LinkedHashMap<String, String>();
@JsonProperty("dimension_range_info_map")
@JsonInclude(JsonInclude.Include.NON_EMPTY)
private Map<String, DimensionRangeInfo> dimensionRangeInfoMap = Maps.newHashMap();
private Map<Long, Short> cuboidBaseShards = Maps.newConcurrentMap(); // cuboid id ==> base(starting) shard for this cuboid
// lazy init
transient volatile ISegmentAdvisor advisor = null;
public CubeDesc getCubeDesc() {
return getCubeInstance().getDescriptor();
}
public CuboidScheduler getCuboidScheduler() {
return getCubeInstance().getCuboidScheduler();
}
public static String makeSegmentName(TSRange tsRange, SegmentRange segRange, DataModelDesc modelDesc) {
if (tsRange == null && segRange == null) {
return "FULL_BUILD";
}
if (segRange != null) {
return segRange.start.v + "_" + segRange.end.v;
}
if (!modelDesc.isStandardPartitionedDateColumn()) {
return tsRange.start.v + "_" + tsRange.end.v;
}
// using time
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss", Locale.ROOT);
dateFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
return dateFormat.format(tsRange.start.v) + "_" + dateFormat.format(tsRange.end.v);
}
public static Pair<Long, Long> parseSegmentName(String segmentName) {
if ("FULL".equals(segmentName)) {
return new Pair<>(0L, 0L);
}
String[] startEnd = segmentName.split("_");
if (startEnd.length != 2) {
throw new IllegalArgumentException("the segmentName is illegal: " + segmentName);
}
SimpleDateFormat dateFormat = new SimpleDateFormat("yyyyMMddHHmmss", Locale.ROOT);
dateFormat.setTimeZone(TimeZone.getTimeZone("GMT"));
try {
long dateRangeStart = dateFormat.parse(startEnd[0]).getTime();
long dateRangeEnd = dateFormat.parse(startEnd[1]).getTime();
return new Pair<>(dateRangeStart, dateRangeEnd);
} catch (ParseException e) {
throw new IllegalArgumentException("Invalid segmentName for CubeSegment, segmentName = " + segmentName);
}
}
// ============================================================================
public KylinConfig getConfig() {
return cubeInstance.getConfig();
}
public String getUuid() {
return uuid;
}
public void setUuid(String id) {
this.uuid = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public SegmentStatusEnum getStatus() {
return status;
}
@Override
public DataModelDesc getModel() {
return this.getCubeDesc().getModel();
}
public void setStatus(SegmentStatusEnum status) {
this.status = status;
}
public long getSizeKB() {
return sizeKB;
}
public void setSizeKB(long sizeKB) {
this.sizeKB = sizeKB;
}
public boolean isMerged() {
return isMerged;
}
public void setMerged(boolean isMerged) {
this.isMerged = isMerged;
}
public List<Double> getEstimateRatio() {
return estimateRatio;
}
public void setEstimateRatio(List<Double> estimateRatio) {
this.estimateRatio = estimateRatio;
}
public long getInputRecords() {
return inputRecords;
}
public void setInputRecords(long inputRecords) {
this.inputRecords = inputRecords;
}
public long getInputRecordsSize() {
return inputRecordsSize;
}
public void setInputRecordsSize(long inputRecordsSize) {
this.inputRecordsSize = inputRecordsSize;
}
@Override
public long getLastBuildTime() {
return lastBuildTime;
}
public void setLastBuildTime(long lastBuildTime) {
this.lastBuildTime = lastBuildTime;
}
public String getLastBuildJobID() {
return lastBuildJobID;
}
public void setLastBuildJobID(String lastBuildJobID) {
this.lastBuildJobID = lastBuildJobID;
}
public long getCreateTimeUTC() {
return createTimeUTC;
}
public void setCreateTimeUTC(long createTimeUTC) {
this.createTimeUTC = createTimeUTC;
}
public String getBinarySignature() {
return binarySignature;
}
public void setBinarySignature(String binarySignature) {
this.binarySignature = binarySignature;
}
public CubeInstance getCubeInstance() {
return cubeInstance;
}
public void setCubeInstance(CubeInstance cubeInstance) {
this.cubeInstance = cubeInstance;
}
public String getStorageLocationIdentifier() {
return storageLocationIdentifier;
}
public List<Object[]> getRowkeyStats() {
return rowkeyStats;
}
public Map<String, String> getDictionaries() {
if (dictionaries == null)
dictionaries = new ConcurrentHashMap<String, String>();
return dictionaries;
}
public Map<String, String> getSnapshots() {
if (snapshots == null)
snapshots = new ConcurrentHashMap<String, String>();
return snapshots;
}
public String getSnapshotResPath(String table) {
return getSnapshots().get(table);
}
public void putSnapshotResPath(String table, String snapshotResPath) {
getSnapshots().put(table, snapshotResPath);
}
public Collection<String> getDictionaryPaths() {
return getDictionaries().values();
}
public Collection<String> getSnapshotPaths() {
return getSnapshots().values();
}
public String getDictResPath(TblColRef col) {
String r;
String dictKey = col.getIdentity();
r = getDictionaries().get(dictKey);
// try Kylin v1.x dict key as well
if (r == null) {
String v1DictKey = col.getTable() + "/" + col.getName();
r = getDictionaries().get(v1DictKey);
}
return r;
}
public void putDictResPath(TblColRef col, String dictResPath) {
String dictKey = col.getIdentity();
getDictionaries().put(dictKey, dictResPath);
}
public void setStorageLocationIdentifier(String storageLocationIdentifier) {
this.storageLocationIdentifier = storageLocationIdentifier;
}
public Map<TblColRef, Dictionary<String>> buildDictionaryMap() {
Map<TblColRef, Dictionary<String>> result = Maps.newHashMap();
for (TblColRef col : getCubeDesc().getAllColumnsHaveDictionary()) {
result.put(col, (Dictionary<String>) getDictionary(col));
}
return result;
}
public Map<TblColRef, Dictionary<String>> buildGlobalDictionaryMap(int globalColumnsSize) {
Map<TblColRef, Dictionary<String>> result = Maps.newHashMapWithExpectedSize(globalColumnsSize);
for (TblColRef col : getCubeDesc().getAllGlobalDictColumns()) {
result.put(col, getDictionary(col));
}
return result;
}
public Dictionary<String> getDictionary(TblColRef col) {
TblColRef reuseCol = getCubeDesc().getDictionaryReuseColumn(col);
CubeManager cubeMgr = CubeManager.getInstance(this.getCubeInstance().getConfig());
return cubeMgr.getDictionary(this, reuseCol);
}
public CubeDimEncMap getDimensionEncodingMap() {
return new CubeDimEncMap(this);
}
// Hide the 4 confusing fields: dateRangeStart, dateRangeEnd, sourceOffsetStart, sourceOffsetEnd.
// They are now managed via SegmentRange and TSRange.
long _getDateRangeStart() {
return dateRangeStart;
}
void _setDateRangeStart(long dateRangeStart) {
this.dateRangeStart = dateRangeStart;
}
long _getDateRangeEnd() {
return dateRangeEnd;
}
void _setDateRangeEnd(long dateRangeEnd) {
this.dateRangeEnd = dateRangeEnd;
}
long _getSourceOffsetStart() {
return sourceOffsetStart;
}
void _setSourceOffsetStart(long sourceOffsetStart) {
this.sourceOffsetStart = sourceOffsetStart;
}
long _getSourceOffsetEnd() {
return sourceOffsetEnd;
}
void _setSourceOffsetEnd(long sourceOffsetEnd) {
this.sourceOffsetEnd = sourceOffsetEnd;
}
@Override
public SegmentRange getSegRange() {
return getAdvisor().getSegRange();
}
public void setSegRange(SegmentRange range) {
getAdvisor().setSegRange(range);
}
@Override
public TSRange getTSRange() {
return getAdvisor().getTSRange();
}
public void setTSRange(TSRange range) {
getAdvisor().setTSRange(range);
}
public boolean isOffsetCube() {
return getAdvisor().isOffsetCube();
}
private ISegmentAdvisor getAdvisor() {
if (advisor != null)
return advisor;
synchronized (this) {
if (advisor == null) {
advisor = Segments.newSegmentAdvisor(this);
}
return advisor;
}
}
@Override
public void validate() throws IllegalStateException {
if (cubeInstance.getDescriptor().getModel().getPartitionDesc().isPartitioned()) {
if (!isOffsetCube() && dateRangeStart >= dateRangeEnd)
throw new IllegalStateException("Invalid segment, dateRangeStart(" + dateRangeStart + ") must be smaller than dateRangeEnd(" + dateRangeEnd + ") in segment " + this);
if (isOffsetCube() && sourceOffsetStart >= sourceOffsetEnd)
throw new IllegalStateException("Invalid segment, sourceOffsetStart(" + sourceOffsetStart + ") must be smaller than sourceOffsetEnd(" + sourceOffsetEnd + ") in segment " + this);
}
}
public String getProject() {
return getCubeDesc().getProject();
}
@Override
public int compareTo(ISegment other) {
int comp = this.getSegRange().start.compareTo(other.getSegRange().start);
if (comp != 0)
return comp;
return this.getSegRange().end.compareTo(other.getSegRange().end);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((cubeInstance == null) ? 0 : cubeInstance.hashCode());
result = prime * result + ((name == null) ? 0 : name.hashCode());
result = prime * result + ((status == null) ? 0 : status.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
CubeSegment other = (CubeSegment) obj;
if (cubeInstance == null) {
if (other.cubeInstance != null)
return false;
} else if (!cubeInstance.equals(other.cubeInstance))
return false;
if (uuid == null) {
if (other.uuid != null)
return false;
} else if (!uuid.equals(other.uuid))
return false;
if (name == null) {
if (other.name != null)
return false;
} else if (!name.equals(other.name))
return false;
if (status != other.status)
return false;
return true;
}
@Override
public String toString() {
return cubeInstance.getName() + "[" + name + "]";
}
public void setDictionaries(ConcurrentHashMap<String, String> dictionaries) {
this.dictionaries = dictionaries;
}
public void setSnapshots(ConcurrentHashMap<String, String> snapshots) {
this.snapshots = snapshots;
}
public String getStatisticsResourcePath() {
return getStatisticsResourcePath(this.getCubeInstance().getName(), this.getUuid());
}
public static String getStatisticsResourcePath(String cubeName, String cubeSegmentId) {
return ResourceStore.CUBE_STATISTICS_ROOT + "/" + cubeName + "/" + cubeSegmentId + ".seq";
}
@Override
public int getSourceType() {
return cubeInstance.getSourceType();
}
@Override
public int getEngineType() {
return cubeInstance.getEngineType();
}
@Override
public int getStorageType() {
return cubeInstance.getStorageType();
}
public boolean isEnableSharding() {
return getCubeDesc().isEnableSharding();
}
public Set<TblColRef> getShardByColumns() {
return getCubeDesc().getShardByColumns();
}
public int getRowKeyPreambleSize() {
return isEnableSharding() ? RowConstants.ROWKEY_SHARD_AND_CUBOID_LEN : RowConstants.ROWKEY_CUBOIDID_LEN;
}
/**
* get the number of shards where each cuboid will distribute
*
* @return
*/
public Short getCuboidShardNum(Long cuboidId) {
Short ret = this.cuboidShardNums.get(cuboidId);
if (ret == null) {
return 1;
} else {
return ret;
}
}
public void setCuboidShardNums(Map<Long, Short> newCuboidShards) {
this.cuboidShardNums = newCuboidShards;
}
public int getTotalShards(long cuboidId) {
if (totalShards > 0) {
return totalShards;
} else {
int ret = getCuboidShardNum(cuboidId);
return ret;
}
}
public void setTotalShards(int totalShards) {
this.totalShards = totalShards;
}
public short getCuboidBaseShard(Long cuboidId) {
if (totalShards == 0)
return 0;
Short ret = cuboidBaseShards.get(cuboidId);
if (ret == null) {
ret = ShardingHash.getShard(cuboidId, totalShards);
cuboidBaseShards.put(cuboidId, ret);
}
return ret;
}
public List<Long> getBlackoutCuboids() {
return this.blackoutCuboids;
}
public IRealization getRealization() {
return cubeInstance;
}
public Map<String, String> getAdditionalInfo() {
return additionalInfo;
}
public void setAdditionalInfo(Map<String, String> additionalInfo) {
this.additionalInfo = additionalInfo;
}
public Map<Integer, Long> getSourcePartitionOffsetEnd() {
return sourcePartitionOffsetEnd;
}
public void setSourcePartitionOffsetEnd(Map<Integer, Long> sourcePartitionOffsetEnd) {
this.sourcePartitionOffsetEnd = sourcePartitionOffsetEnd;
}
public Map<Integer, Long> getSourcePartitionOffsetStart() {
return sourcePartitionOffsetStart;
}
public void setSourcePartitionOffsetStart(Map<Integer, Long> sourcePartitionOffsetStart) {
this.sourcePartitionOffsetStart = sourcePartitionOffsetStart;
}
public Map<String, DimensionRangeInfo> getDimensionRangeInfoMap() {
return dimensionRangeInfoMap;
}
public void setDimensionRangeInfoMap(Map<String, DimensionRangeInfo> dimensionRangeInfoMap) {
this.dimensionRangeInfoMap = dimensionRangeInfoMap;
}
public String getStreamSourceCheckpoint() {
return streamSourceCheckpoint;
}
public void setStreamSourceCheckpoint(String streamSourceCheckpoint) {
this.streamSourceCheckpoint = streamSourceCheckpoint;
}
}
|
package mage.cards.l;
import java.util.UUID;
import mage.MageInt;
import mage.abilities.common.SimpleStaticAbility;
import mage.abilities.effects.common.continuous.GainAbilityPairedEffect;
import mage.abilities.keyword.HasteAbility;
import mage.abilities.keyword.SoulbondAbility;
import mage.cards.CardImpl;
import mage.cards.CardSetInfo;
import mage.constants.CardType;
import mage.constants.SubType;
import mage.constants.Zone;
/**
*
* @author noxx
*/
public final class LightningMauler extends CardImpl {
private static final String ruleText = "As long as {this} is paired with another creature, both creatures have haste";
public LightningMauler(UUID ownerId, CardSetInfo setInfo) {
super(ownerId,setInfo,new CardType[]{CardType.CREATURE},"{1}{R}");
this.subtype.add(SubType.HUMAN);
this.subtype.add(SubType.BERSERKER);
this.power = new MageInt(2);
this.toughness = new MageInt(1);
// Soulbond
this.addAbility(new SoulbondAbility());
// As long as Lightning Mauler is paired with another creature, both creatures have haste.
this.addAbility(new SimpleStaticAbility(Zone.BATTLEFIELD, new GainAbilityPairedEffect(HasteAbility.getInstance(), ruleText)));
}
public LightningMauler(final LightningMauler card) {
super(card);
}
@Override
public LightningMauler copy() {
return new LightningMauler(this);
}
}
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.verifier.resolver;
import com.facebook.presto.common.type.ArrayType;
import com.facebook.presto.common.type.DoubleType;
import com.facebook.presto.common.type.MapType;
import com.facebook.presto.common.type.RealType;
import com.facebook.presto.common.type.RowType;
import com.facebook.presto.common.type.Type;
import com.facebook.presto.verifier.checksum.ArrayColumnChecksum;
import com.facebook.presto.verifier.checksum.ColumnMatchResult;
import com.facebook.presto.verifier.checksum.MapColumnChecksum;
import com.facebook.presto.verifier.checksum.StructureColumnChecksum;
import com.facebook.presto.verifier.framework.DataMatchResult;
import com.facebook.presto.verifier.framework.QueryBundle;
import java.util.Objects;
import java.util.Optional;
import static com.facebook.presto.common.type.RowType.Field;
import static com.facebook.presto.verifier.framework.DataMatchResult.MatchType.COLUMN_MISMATCH;
import static com.google.common.base.Preconditions.checkArgument;
public class StructuredColumnMismatchResolver
implements FailureResolver
{
public static final String NAME = "structured-column";
@Override
public Optional<String> resolveResultMismatch(DataMatchResult matchResult, QueryBundle control)
{
checkArgument(!matchResult.isMatched(), "Expect not matched");
if (matchResult.getMatchType() != COLUMN_MISMATCH) {
return Optional.empty();
}
for (ColumnMatchResult<?> mismatchedColumn : matchResult.getMismatchedColumns()) {
Type columnType = mismatchedColumn.getColumn().getType();
if (columnType instanceof ArrayType) {
ArrayColumnChecksum controlChecksum = (ArrayColumnChecksum) mismatchedColumn.getControlChecksum();
ArrayColumnChecksum testChecksum = (ArrayColumnChecksum) mismatchedColumn.getTestChecksum();
if (!containsFloatingPointType(((ArrayType) columnType).getElementType())
|| !isCardinalityMatched(controlChecksum, testChecksum)) {
return Optional.empty();
}
}
else if (columnType instanceof MapType) {
MapColumnChecksum controlChecksum = (MapColumnChecksum) mismatchedColumn.getControlChecksum();
MapColumnChecksum testChecksum = (MapColumnChecksum) mismatchedColumn.getTestChecksum();
if (!isCardinalityMatched(controlChecksum, testChecksum)) {
return Optional.empty();
}
boolean keyContainsFloatingPoint = containsFloatingPointType(((MapType) columnType).getKeyType());
boolean valueContainsFloatingPoint = containsFloatingPointType(((MapType) columnType).getValueType());
if (!keyContainsFloatingPoint && !valueContainsFloatingPoint) {
return Optional.empty();
}
if (!keyContainsFloatingPoint &&
!Objects.equals(controlChecksum.getKeysChecksum(), testChecksum.getKeysChecksum())) {
return Optional.empty();
}
if (!valueContainsFloatingPoint &&
!Objects.equals(controlChecksum.getValuesChecksum(), testChecksum.getValuesChecksum())) {
return Optional.empty();
}
}
else {
return Optional.empty();
}
}
return Optional.of("Structured columns auto-resolved");
}
private static boolean containsFloatingPointType(Type type)
{
if (type instanceof DoubleType || type instanceof RealType) {
return true;
}
if (type instanceof ArrayType) {
return containsFloatingPointType(((ArrayType) type).getElementType());
}
if (type instanceof MapType) {
return containsFloatingPointType(((MapType) type).getKeyType()) || containsFloatingPointType(((MapType) type).getValueType());
}
if (type instanceof RowType) {
for (Field field : ((RowType) type).getFields()) {
if (containsFloatingPointType(field.getType())) {
return true;
}
}
return false;
}
return false;
}
private static <T extends StructureColumnChecksum> boolean isCardinalityMatched(T controlChecksum, T testChecksum)
{
return Objects.equals(controlChecksum.getCardinalityChecksum(), testChecksum.getCardinalityChecksum())
&& Objects.equals(controlChecksum.getCardinalitySum(), testChecksum.getCardinalitySum());
}
}
|
package com.alipay.api.domain;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
/**
* 动态相关的地理位置
*
* @author auto create
* @since 1.0, 2016-10-26 17:43:38
*/
public class NewsfeedLocationInfo extends AlipayObject {
private static final long serialVersionUID = 2681387767573635276L;
/**
* 地理信息
*/
@ApiField("ad_code")
private String adCode;
/**
* 纬度 latitude(填写非0非1)
*/
@ApiField("lat")
private String lat;
/**
* 经度 longitude(填写非0非1)
*/
@ApiField("lon")
private String lon;
public String getAdCode() {
return this.adCode;
}
public void setAdCode(String adCode) {
this.adCode = adCode;
}
public String getLat() {
return this.lat;
}
public void setLat(String lat) {
this.lat = lat;
}
public String getLon() {
return this.lon;
}
public void setLon(String lon) {
this.lon = lon;
}
}
|
package com.ctrlcollege.ctrlcollege;
import android.content.Intent;
import android.os.AsyncTask;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
public class AlteraSenha extends AppCompatActivity {
private String user;
private String url = "";
private String parametros = "";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_altera_senha);
Intent i = getIntent();
if(i != null){
Bundle parametros = i.getExtras();
if(parametros != null){
user = parametros.getString("user");
}
}
Button salvar = (Button) findViewById(R.id.bt_salvaSenha);
salvar.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
EditText senhaAtual = (EditText) findViewById(R.id.tx_senhaAtual);
EditText senhaNova = (EditText) findViewById(R.id.tx_senhaNova);
EditText confirmSenhaNova = (EditText) findViewById(R.id.tx_confirmSenha);
if (senhaNova.getText().toString().equals(confirmSenhaNova.getText().toString())){
url = "http://192.168.1.100/nodemcu/alteraSenha.php";
parametros = "id="+ user +"&sa="+senhaAtual.getText().toString()+"&sn="+senhaNova.getText().toString();
//fetch data
new Alterar().execute(url);
}else{
Toast.makeText(AlteraSenha.this, "As senhas digitadas não são iguais", Toast.LENGTH_SHORT).show();
senhaNova.setText(null);
confirmSenhaNova.setText(null);
senhaNova.requestFocus();
}
}
});
}
@Override
public void onBackPressed() {
Intent i = new Intent(this, TelaMenuPais.class);
Bundle parametros = new Bundle();
parametros.putString("user", user);
i.putExtras(parametros);
startActivity(i);
finish();
}
private class Alterar extends AsyncTask<String, Void, String> {
@Override
protected String doInBackground(String... urls){
return Conexao.postDados(urls[0], parametros);
}
// onPostExecute displays the results of the AsyncTask
@Override
protected void onPostExecute(String result){
Toast.makeText(AlteraSenha.this, result, Toast.LENGTH_SHORT).show();
Intent i=new Intent(AlteraSenha.this, TelaMenuPais.class);
Bundle parametros = new Bundle();
parametros.putString("user", user);
i.putExtras(parametros);
startActivity(i);
finish();
}
}
}
|
package org.sculptor.dddsample.routing.serviceapi;
import static org.junit.Assert.assertFalse;
import java.util.List;
import org.junit.Test;
import org.sculptor.dddsample.routing.domain.TransitPath;
import org.sculptor.framework.test.AbstractDbUnitJpaTests;
import org.springframework.beans.factory.annotation.Autowired;
/**
* Spring based transactional test with DbUnit support.
*/
public class GraphTraversalServiceTest extends AbstractDbUnitJpaTests implements GraphTraversalServiceTestBase {
private GraphTraversalService graphTraversalService;
@Autowired
public void setGraphTraversalService(GraphTraversalService graphTraversalService) {
this.graphTraversalService = graphTraversalService;
}
@Override
protected String getDataSetFile() {
return "dbunit/TestData.xml";
}
@Test
public void testFindShortestPath() throws Exception {
List<TransitPath> paths = graphTraversalService.findShortestPath(getServiceContext(), "SESTO", "FIHEL");
assertFalse(paths.isEmpty());
}
}
|
package SWEA;
/**
* @author steve.jh.kang@gmail.com
* @time 2020. 2. 28. 오후 6:09:08
* @category
* @problem_description
* 초기에 배정된 나라별 색 지도를 칠할려고 보니 인접된 나라의 색이 동일한 경우가 있어
* 나라별 구별을 위해 다른 색으로 변경해야한다. 인접된 나라의 동일한 색을 최소 변경을 통해
* 지도에 모든 나라의 색을 지정해보자. 최소 3개국가 최대 8개 색은 4개
*
* 입력: 나라수, 각 나라에 배정된 색상 값이 숫자 1번부터 4번까지 주어진다.
* 다섯번째 줄부터 국가간 인접 정보가 인접행렬 정보로 주어진다.
* @solving_description
*/
public class SWEA_7208 {
public static void main(String[] args) {
}
}
|
/*
This file is part of the iText (R) project.
Copyright (c) 1998-2020 iText Group NV
Authors: Bruno Lowagie, Paulo Soares, et al.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License version 3
as published by the Free Software Foundation with the addition of the
following permission added to Section 15 as permitted in Section 7(a):
FOR ANY PART OF THE COVERED WORK IN WHICH THE COPYRIGHT IS OWNED BY
ITEXT GROUP. ITEXT GROUP DISCLAIMS THE WARRANTY OF NON INFRINGEMENT
OF THIRD PARTY RIGHTS
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program; if not, see http://www.gnu.org/licenses or write to
the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA, 02110-1301 USA, or download the license from the following URL:
http://itextpdf.com/terms-of-use/
The interactive user interfaces in modified source and object code versions
of this program must display Appropriate Legal Notices, as required under
Section 5 of the GNU Affero General Public License.
In accordance with Section 7(b) of the GNU Affero General Public License,
a covered work must retain the producer line in every PDF that is created
or manipulated using iText.
You can be released from the requirements of the license by purchasing
a commercial license. Buying such a license is mandatory as soon as you
develop commercial activities involving the iText software without
disclosing the source code of your own applications.
These activities include: offering paid services to customers as an ASP,
serving PDFs on the fly in a web application, shipping iText with a closed
source product.
For more information, please contact iText Software Corp. at this
address: sales@itextpdf.com
*/
package com.itextpdf.signatures;
import java.security.cert.X509Certificate;
/**
* Class that informs you that the verification of a Certificate
* succeeded using a specific CertificateVerifier and for a specific
* reason.
*/
public class VerificationOK {
/** The certificate that was verified successfully. */
protected X509Certificate certificate;
/** The CertificateVerifier that was used for verifying. */
protected Class<? extends CertificateVerifier> verifierClass;
/** The reason why the certificate verified successfully. */
protected String message;
/**
* Creates a VerificationOK object
* @param certificate the certificate that was successfully verified
* @param verifierClass the class that was used for verification
* @param message the reason why the certificate could be verified
*/
public VerificationOK(X509Certificate certificate,
Class<? extends CertificateVerifier> verifierClass, String message) {
this.certificate = certificate;
this.verifierClass = verifierClass;
this.message = message;
}
/**
* Return a single String explaining which certificate was verified, how and why.
* @see java.lang.Object#toString()
*/
public String toString() {
StringBuilder sb = new StringBuilder();
if (certificate != null) {
sb.append(certificate.getSubjectDN().getName());
sb.append(" verified with ");
}
sb.append(verifierClass.getName());
sb.append(": ");
sb.append(message);
return sb.toString();
}
}
|
/*
* The MIT License
*
* Copyright (c) 2015 Misakura.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package jp.gr.java_conf.kgd.example.blog.collections.iterable.bidirectional;
import jp.gr.java_conf.kgd.example.blog.collections.iterable.forward.ForwardInputIterable;
import jp.gr.java_conf.kgd.example.blog.collections.iterable.reverse.ReverseInputIterable;
import jp.gr.java_conf.kgd.example.blog.collections.iterator.variation.bidirectional.BidirectionalInputIterator;
public interface BidirectionalInputIterable<T> extends BidirectionalIterable, ForwardInputIterable<T>, ReverseInputIterable<T> {
@Override
BidirectionalInputIterator<T> firstIterator();
@Override
BidirectionalInputIterator<T> lastIterator();
}
|
/*
* Copyright 2017 Alexey Andreev.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.teavm.runtime;
import org.teavm.interop.Structure;
import org.teavm.interop.c.Name;
import org.teavm.interop.c.Native;
@Native
@Name("TeaVM_CallSiteLocation")
public class CallSiteLocation extends Structure {
public MethodLocation method;
public int lineNumber;
}
|
package org.w3c.dom.html;
public interface HTMLTableRowElement extends HTMLElement {
int getRowIndex();
void setRowIndex(int var1);
int getSectionRowIndex();
void setSectionRowIndex(int var1);
HTMLCollection getCells();
void setCells(HTMLCollection var1);
String getAlign();
void setAlign(String var1);
String getBgColor();
void setBgColor(String var1);
String getCh();
void setCh(String var1);
String getChOff();
void setChOff(String var1);
String getVAlign();
void setVAlign(String var1);
HTMLElement insertCell(int var1);
void deleteCell(int var1);
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tajo.plan;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.tajo.BuiltinStorages;
import org.apache.tajo.OverridableConf;
import org.apache.tajo.QueryVars;
import org.apache.tajo.SessionVars;
import org.apache.tajo.algebra.*;
import org.apache.tajo.algebra.WindowSpec;
import org.apache.tajo.catalog.*;
import org.apache.tajo.exception.UndefinedColumnException;
import org.apache.tajo.exception.UndefinedTableException;
import org.apache.tajo.catalog.partition.PartitionMethodDesc;
import org.apache.tajo.catalog.proto.CatalogProtos;
import org.apache.tajo.catalog.proto.CatalogProtos.IndexMethod;
import org.apache.tajo.common.TajoDataTypes;
import org.apache.tajo.conf.TajoConf;
import org.apache.tajo.datum.NullDatum;
import org.apache.tajo.exception.ExceptionUtil;
import org.apache.tajo.exception.TajoException;
import org.apache.tajo.exception.TajoInternalError;
import org.apache.tajo.exception.NotImplementedException;
import org.apache.tajo.plan.LogicalPlan.QueryBlock;
import org.apache.tajo.plan.algebra.BaseAlgebraVisitor;
import org.apache.tajo.plan.expr.*;
import org.apache.tajo.plan.exprrewrite.EvalTreeOptimizer;
import org.apache.tajo.plan.logical.*;
import org.apache.tajo.plan.nameresolver.NameResolvingMode;
import org.apache.tajo.plan.rewrite.rules.ProjectionPushDownRule;
import org.apache.tajo.plan.util.ExprFinder;
import org.apache.tajo.plan.util.PlannerUtil;
import org.apache.tajo.storage.StorageService;
import org.apache.tajo.util.KeyValueSet;
import org.apache.tajo.util.Pair;
import org.apache.tajo.util.StringUtils;
import org.apache.tajo.util.TUtil;
import java.net.URI;
import java.util.*;
import static org.apache.tajo.algebra.CreateTable.PartitionType;
import static org.apache.tajo.plan.ExprNormalizer.ExprNormalizedResult;
import static org.apache.tajo.plan.LogicalPlan.BlockType;
import static org.apache.tajo.plan.verifier.SyntaxErrorUtil.makeSyntaxError;
/**
* This class creates a logical plan from a nested tajo algebra expression ({@link org.apache.tajo.algebra})
*/
public class LogicalPlanner extends BaseAlgebraVisitor<LogicalPlanner.PlanContext, LogicalNode> {
private static Log LOG = LogFactory.getLog(LogicalPlanner.class);
private final CatalogService catalog;
private final StorageService storage;
private final LogicalPlanPreprocessor preprocessor;
private final EvalTreeOptimizer evalOptimizer;
private final ExprAnnotator exprAnnotator;
private final ExprNormalizer normalizer;
public LogicalPlanner(CatalogService catalog, StorageService storage) {
this.catalog = catalog;
this.storage = storage;
this.exprAnnotator = new ExprAnnotator(catalog);
this.preprocessor = new LogicalPlanPreprocessor(catalog, exprAnnotator);
this.normalizer = new ExprNormalizer();
this.evalOptimizer = new EvalTreeOptimizer();
}
public static class PlanContext {
OverridableConf queryContext;
LogicalPlan plan;
QueryBlock queryBlock;
EvalTreeOptimizer evalOptimizer;
TimeZone timeZone;
List<Expr> unplannedExprs = TUtil.newList();
boolean debugOrUnitTests;
Integer noNameSubqueryId = 0;
public PlanContext(OverridableConf context, LogicalPlan plan, QueryBlock block, EvalTreeOptimizer evalOptimizer,
boolean debugOrUnitTests) {
this.queryContext = context;
this.plan = plan;
this.queryBlock = block;
this.evalOptimizer = evalOptimizer;
// session's time zone
if (context.containsKey(SessionVars.TIMEZONE)) {
String timezoneId = context.get(SessionVars.TIMEZONE);
timeZone = TimeZone.getTimeZone(timezoneId);
}
this.debugOrUnitTests = debugOrUnitTests;
}
public PlanContext(PlanContext context, QueryBlock block) {
this.queryContext = context.queryContext;
this.plan = context.plan;
this.queryBlock = block;
this.evalOptimizer = context.evalOptimizer;
this.debugOrUnitTests = context.debugOrUnitTests;
}
public QueryBlock getQueryBlock() {
return queryBlock;
}
public OverridableConf getQueryContext() {
return queryContext;
}
public String toString() {
return "block=" + queryBlock.getName() + ", relNum=" + queryBlock.getRelations().size() + ", "+
queryBlock.namedExprsMgr.toString();
}
/**
* It generates a unique table subquery name
*/
public String generateUniqueSubQueryName() {
return LogicalPlan.NONAME_SUBQUERY_PREFIX + noNameSubqueryId++;
}
}
/**
* This generates a logical plan.
*
* @param expr A relational algebraic expression for a query.
* @return A logical plan
*/
public LogicalPlan createPlan(OverridableConf context, Expr expr) throws TajoException {
return createPlan(context, expr, false);
}
@VisibleForTesting
public LogicalPlan createPlan(OverridableConf queryContext, Expr expr, boolean debug) throws TajoException {
LogicalPlan plan = new LogicalPlan();
QueryBlock rootBlock = plan.newAndGetBlock(LogicalPlan.ROOT_BLOCK);
PlanContext context = new PlanContext(queryContext, plan, rootBlock, evalOptimizer, debug);
preprocessor.visit(context, new Stack<Expr>(), expr);
plan.resetGeneratedId();
LogicalNode topMostNode = this.visit(context, new Stack<Expr>(), expr);
// Add Root Node
LogicalRootNode root = plan.createNode(LogicalRootNode.class);
root.setInSchema(topMostNode.getOutSchema());
root.setChild(topMostNode);
root.setOutSchema(topMostNode.getOutSchema());
plan.getRootBlock().setRoot(root);
return plan;
}
public ExprAnnotator getExprAnnotator() {
return this.exprAnnotator;
}
public void preHook(PlanContext context, Stack<Expr> stack, Expr expr) throws TajoException {
context.queryBlock.updateCurrentNode(expr);
}
public LogicalNode postHook(PlanContext context, Stack<Expr> stack, Expr expr, LogicalNode current)
throws TajoException {
// Some generated logical nodes (e.g., implicit aggregation) without exprs will pass NULL as a expr parameter.
// We should skip them.
if (expr != null) {
// A relation list including a single ScanNode will return a ScanNode instance that already passed postHook.
// So, it skips the already-visited ScanNode instance.
if (expr.getType() == OpType.RelationList && current.getType() == NodeType.SCAN) {
return current;
}
}
QueryBlock queryBlock = context.queryBlock;
queryBlock.updateLatestNode(current);
// if this node is the topmost
if (stack.size() == 0) {
queryBlock.setRoot(current);
}
if (!stack.empty()) {
queryBlock.updateCurrentNode(stack.peek());
}
return current;
}
@Override
public LogicalNode visitSetSession(PlanContext context, Stack<Expr> stack, SetSession expr) throws TajoException {
QueryBlock block = context.queryBlock;
SetSessionNode setSessionNode = block.getNodeFromExpr(expr);
setSessionNode.init(expr.getName(), expr.getValue());
return setSessionNode;
}
public LogicalNode visitExplain(PlanContext ctx, Stack<Expr> stack, Explain expr) throws TajoException {
ctx.plan.setExplain(expr.isGlobal());
return visit(ctx, stack, expr.getChild());
}
/*===============================================================================================
Data Manupulation Language (DML) SECTION
===============================================================================================*/
/*===============================================================================================
PROJECTION SECTION
===============================================================================================*/
@Override
public LogicalNode visitProjection(PlanContext context, Stack<Expr> stack, Projection projection)
throws TajoException {
QueryBlock block = context.queryBlock;
// If a non-from statement is given
if (!projection.hasChild()) {
return buildPlanForNoneFromStatement(context, stack, projection);
}
String [] referenceNames;
// in prephase, insert all target list into NamedExprManagers.
// Then it gets reference names, each of which points an expression in target list.
Pair<String [], ExprNormalizer.WindowSpecReferences []> referencesPair = doProjectionPrephase(context, projection);
referenceNames = referencesPair.getFirst();
////////////////////////////////////////////////////////
// Visit and Build Child Plan
////////////////////////////////////////////////////////
stack.push(projection);
LogicalNode child = visit(context, stack, projection.getChild());
// check if it is implicit aggregation. If so, it inserts group-by node to its child.
if (block.isAggregationRequired()) {
child = insertGroupbyNode(context, child, stack);
}
if (block.hasWindowSpecs()) {
LogicalNode windowAggNode =
insertWindowAggNode(context, child, stack, referenceNames, referencesPair.getSecond());
if (windowAggNode != null) {
child = windowAggNode;
}
}
stack.pop();
////////////////////////////////////////////////////////
ProjectionNode projectionNode;
Target[] targets;
targets = buildTargets(context, referenceNames);
// Set ProjectionNode
projectionNode = context.queryBlock.getNodeFromExpr(projection);
projectionNode.init(projection.isDistinct(), targets);
projectionNode.setChild(child);
projectionNode.setInSchema(child.getOutSchema());
projectionNode.setOutSchema(PlannerUtil.targetToSchema(targets));
if (projection.isDistinct() && block.hasNode(NodeType.GROUP_BY)) {
throw makeSyntaxError("Cannot support grouping and distinct at the same time yet");
} else {
if (projection.isDistinct()) {
insertDistinctOperator(context, projectionNode, child, stack);
}
}
// It's for debugging and unit tests purpose.
// It sets raw targets, all of them are raw expressions instead of references.
if (context.debugOrUnitTests) {
setRawTargets(context, targets, referenceNames, projection);
}
verifyProjectedFields(block, projectionNode);
return projectionNode;
}
private void setRawTargets(PlanContext context, Target[] targets, String[] referenceNames,
Projection projection) throws TajoException {
LogicalPlan plan = context.plan;
QueryBlock block = context.queryBlock;
// It's for debugging or unit tests.
Target [] rawTargets = new Target[projection.getNamedExprs().length];
for (int i = 0; i < projection.getNamedExprs().length; i++) {
NamedExpr namedExpr = projection.getNamedExprs()[i];
EvalNode evalNode = exprAnnotator.createEvalNode(context, namedExpr.getExpr(),
NameResolvingMode.RELS_AND_SUBEXPRS);
rawTargets[i] = new Target(evalNode, referenceNames[i]);
}
// it's for debugging or unit testing
block.setRawTargets(rawTargets);
}
private void insertDistinctOperator(PlanContext context, ProjectionNode projectionNode, LogicalNode child,
Stack<Expr> stack) throws TajoException {
LogicalPlan plan = context.plan;
QueryBlock block = context.queryBlock;
Schema outSchema = projectionNode.getOutSchema();
GroupbyNode dupRemoval = context.plan.createNode(GroupbyNode.class);
dupRemoval.setChild(child);
dupRemoval.setInSchema(projectionNode.getInSchema());
dupRemoval.setTargets(PlannerUtil.schemaToTargets(outSchema));
dupRemoval.setGroupingColumns(outSchema.toArray());
block.registerNode(dupRemoval);
postHook(context, stack, null, dupRemoval);
projectionNode.setChild(dupRemoval);
projectionNode.setInSchema(dupRemoval.getOutSchema());
}
private Pair<String [], ExprNormalizer.WindowSpecReferences []> doProjectionPrephase(PlanContext context,
Projection projection)
throws TajoException {
QueryBlock block = context.queryBlock;
int finalTargetNum = projection.size();
String [] referenceNames = new String[finalTargetNum];
ExprNormalizedResult[] normalizedExprList = new ExprNormalizedResult[finalTargetNum];
List<ExprNormalizer.WindowSpecReferences> windowSpecReferencesList = TUtil.newList();
List<Integer> targetsIds = normalize(context, projection, normalizedExprList, new Matcher() {
@Override
public boolean isMatch(Expr expr) {
return ExprFinder.finds(expr, OpType.WindowFunction).size() == 0;
}
});
// Note: Why separate normalization and add(Named)Expr?
//
// ExprNormalizer internally makes use of the named exprs in NamedExprsManager.
// If we don't separate normalization work and addExprWithName, addExprWithName will find named exprs evaluated
// the same logical node. It will cause impossible evaluation in physical executors.
addNamedExprs(block, referenceNames, normalizedExprList, windowSpecReferencesList, projection, targetsIds);
targetsIds = normalize(context, projection, normalizedExprList, new Matcher() {
@Override
public boolean isMatch(Expr expr) {
return ExprFinder.finds(expr, OpType.WindowFunction).size() > 0;
}
});
addNamedExprs(block, referenceNames, normalizedExprList, windowSpecReferencesList, projection, targetsIds);
return new Pair<String[], ExprNormalizer.WindowSpecReferences []>(referenceNames,
windowSpecReferencesList.toArray(new ExprNormalizer.WindowSpecReferences[windowSpecReferencesList.size()]));
}
private interface Matcher {
boolean isMatch(Expr expr);
}
public List<Integer> normalize(PlanContext context, Projection projection, ExprNormalizedResult [] normalizedExprList,
Matcher matcher) throws TajoException {
List<Integer> targetIds = new ArrayList<Integer>();
for (int i = 0; i < projection.size(); i++) {
NamedExpr namedExpr = projection.getNamedExprs()[i];
if (PlannerUtil.existsAggregationFunction(namedExpr)) {
context.queryBlock.setAggregationRequire();
}
if (matcher.isMatch(namedExpr.getExpr())) {
// If a value is constant value, it adds the constant value with a proper name to the constant map
// of the current block
if (!namedExpr.hasAlias() && OpType.isLiteralType(namedExpr.getExpr().getType())) {
String generatedName = context.plan.generateUniqueColumnName(namedExpr.getExpr());
ConstEval constEval = (ConstEval) exprAnnotator.createEvalNode(context, namedExpr.getExpr(),
NameResolvingMode.RELS_ONLY);
context.getQueryBlock().addConstReference(generatedName, namedExpr.getExpr(), constEval);
normalizedExprList[i] = new ExprNormalizedResult(context, false);
normalizedExprList[i].baseExpr = new ColumnReferenceExpr(generatedName);
} else {
// dissect an expression into multiple parts (at most dissected into three parts)
normalizedExprList[i] = normalizer.normalize(context, namedExpr.getExpr());
}
targetIds.add(i);
}
}
return targetIds;
}
private void addNamedExprs(QueryBlock block, String [] referenceNames, ExprNormalizedResult [] normalizedExprList,
List<ExprNormalizer.WindowSpecReferences> windowSpecReferencesList, Projection projection,
List<Integer> targetIds) throws TajoException {
for (int i : targetIds) {
NamedExpr namedExpr = projection.getNamedExprs()[i];
// Get all projecting references
if (namedExpr.hasAlias()) {
NamedExpr aliasedExpr = new NamedExpr(normalizedExprList[i].baseExpr, namedExpr.getAlias());
referenceNames[i] = block.namedExprsMgr.addNamedExpr(aliasedExpr);
} else {
referenceNames[i] = block.namedExprsMgr.addExpr(normalizedExprList[i].baseExpr);
}
// Add sub-expressions (i.e., aggregation part and scalar part) from dissected parts.
block.namedExprsMgr.addNamedExprArray(normalizedExprList[i].aggExprs);
block.namedExprsMgr.addNamedExprArray(normalizedExprList[i].scalarExprs);
block.namedExprsMgr.addNamedExprArray(normalizedExprList[i].windowAggExprs);
windowSpecReferencesList.addAll(normalizedExprList[i].windowSpecs);
}
}
/**
* It builds non-from statement (only expressions) like '<code>SELECT 1+3 as plus</code>'.
*/
private EvalExprNode buildPlanForNoneFromStatement(PlanContext context, Stack<Expr> stack, Projection projection)
throws TajoException {
LogicalPlan plan = context.plan;
QueryBlock block = context.queryBlock;
int finalTargetNum = projection.getNamedExprs().length;
Target [] targets = new Target[finalTargetNum];
for (int i = 0; i < targets.length; i++) {
NamedExpr namedExpr = projection.getNamedExprs()[i];
EvalNode evalNode = exprAnnotator.createEvalNode(context, namedExpr.getExpr(), NameResolvingMode.RELS_ONLY);
if (namedExpr.hasAlias()) {
targets[i] = new Target(evalNode, namedExpr.getAlias());
} else {
targets[i] = new Target(evalNode, context.plan.generateUniqueColumnName(namedExpr.getExpr()));
}
}
EvalExprNode evalExprNode = context.queryBlock.getNodeFromExpr(projection);
evalExprNode.setTargets(targets);
evalExprNode.setOutSchema(PlannerUtil.targetToSchema(targets));
// it's for debugging or unit testing
block.setRawTargets(targets);
return evalExprNode;
}
private Target [] buildTargets(PlanContext context, String[] referenceNames)
throws TajoException {
QueryBlock block = context.queryBlock;
Target [] targets = new Target[referenceNames.length];
for (int i = 0; i < referenceNames.length; i++) {
String refName = referenceNames[i];
if (block.isConstReference(refName)) {
targets[i] = new Target(block.getConstByReference(refName), refName);
} else if (block.namedExprsMgr.isEvaluated(refName)) {
targets[i] = block.namedExprsMgr.getTarget(refName);
} else {
NamedExpr namedExpr = block.namedExprsMgr.getNamedExpr(refName);
EvalNode evalNode = exprAnnotator.createEvalNode(context, namedExpr.getExpr(),
NameResolvingMode.RELS_AND_SUBEXPRS);
block.namedExprsMgr.markAsEvaluated(refName, evalNode);
targets[i] = new Target(evalNode, refName);
}
}
return targets;
}
/**
* It checks if all targets of Projectable plan node can be evaluated from the child node.
* It can avoid potential errors which possibly occur in physical operators.
*
* @param block QueryBlock which includes the Projectable node
* @param projectable Projectable node to be valid
* @throws TajoException
*/
public static void verifyProjectedFields(QueryBlock block, Projectable projectable) throws TajoException {
if (projectable instanceof GroupbyNode) {
GroupbyNode groupbyNode = (GroupbyNode) projectable;
if (!groupbyNode.isEmptyGrouping()) { // it should be targets instead of
int groupingKeyNum = groupbyNode.getGroupingColumns().length;
for (int i = 0; i < groupingKeyNum; i++) {
Target target = groupbyNode.getTargets()[i];
if (groupbyNode.getTargets()[i].getEvalTree().getType() == EvalType.FIELD) {
FieldEval grpKeyEvalNode = target.getEvalTree();
if (!groupbyNode.getInSchema().contains(grpKeyEvalNode.getColumnRef())) {
throwCannotEvaluateException(projectable, grpKeyEvalNode.getName());
}
}
}
}
if (groupbyNode.hasAggFunctions()) {
verifyIfEvalNodesCanBeEvaluated(projectable, groupbyNode.getAggFunctions());
}
} else if (projectable instanceof WindowAggNode) {
WindowAggNode windowAggNode = (WindowAggNode) projectable;
if (windowAggNode.hasPartitionKeys()) {
verifyIfColumnCanBeEvaluated(projectable.getInSchema(), projectable, windowAggNode.getPartitionKeys());
}
if (windowAggNode.hasAggFunctions()) {
verifyIfEvalNodesCanBeEvaluated(projectable, windowAggNode.getWindowFunctions());
}
if (windowAggNode.hasSortSpecs()) {
Column [] sortKeys = PlannerUtil.sortSpecsToSchema(windowAggNode.getSortSpecs()).toArray();
verifyIfColumnCanBeEvaluated(projectable.getInSchema(), projectable, sortKeys);
}
// verify targets except for function slots
for (int i = 0; i < windowAggNode.getTargets().length - windowAggNode.getWindowFunctions().length; i++) {
Target target = windowAggNode.getTargets()[i];
Set<Column> columns = EvalTreeUtil.findUniqueColumns(target.getEvalTree());
for (Column c : columns) {
if (!windowAggNode.getInSchema().contains(c)) {
throwCannotEvaluateException(projectable, c.getQualifiedName());
}
}
}
} else if (projectable instanceof RelationNode) {
RelationNode relationNode = (RelationNode) projectable;
prohibitNestedRecordProjection((Projectable) relationNode);
verifyIfTargetsCanBeEvaluated(relationNode.getLogicalSchema(), (Projectable) relationNode);
} else {
prohibitNestedRecordProjection(projectable);
verifyIfTargetsCanBeEvaluated(projectable.getInSchema(), projectable);
}
}
public static void prohibitNestedRecordProjection(Projectable projectable)
throws TajoException {
for (Target t : projectable.getTargets()) {
if (t.getEvalTree().getValueType().getType() == TajoDataTypes.Type.RECORD) {
throw new NotImplementedException("record field projection");
}
}
}
public static void verifyIfEvalNodesCanBeEvaluated(Projectable projectable, EvalNode[] evalNodes)
throws TajoException {
for (EvalNode e : evalNodes) {
Set<Column> columns = EvalTreeUtil.findUniqueColumns(e);
for (Column c : columns) {
if (!projectable.getInSchema().contains(c)) {
throwCannotEvaluateException(projectable, c.getQualifiedName());
}
}
}
}
public static void verifyIfTargetsCanBeEvaluated(Schema baseSchema, Projectable projectable)
throws TajoException {
for (Target target : projectable.getTargets()) {
Set<Column> columns = EvalTreeUtil.findUniqueColumns(target.getEvalTree());
for (Column c : columns) {
if (!baseSchema.contains(c)) {
throwCannotEvaluateException(projectable, c.getQualifiedName());
}
}
}
}
public static void verifyIfColumnCanBeEvaluated(Schema baseSchema, Projectable projectable, Column [] columns)
throws TajoException {
for (Column c : columns) {
if (!baseSchema.contains(c)) {
throwCannotEvaluateException(projectable, c.getQualifiedName());
}
}
}
public static void throwCannotEvaluateException(Projectable projectable, String columnName) throws TajoException {
if (projectable instanceof UnaryNode && ((UnaryNode) projectable).getChild().getType() == NodeType.GROUP_BY) {
throw makeSyntaxError(columnName
+ " must appear in the GROUP BY clause or be used in an aggregate function at node ("
+ projectable.getPID() + ")");
} else {
throw makeSyntaxError(String.format("Cannot evaluate the field \"%s\" at node (%d)",
columnName, projectable.getPID()));
}
}
private LogicalNode insertWindowAggNode(PlanContext context, LogicalNode child, Stack<Expr> stack,
String [] referenceNames,
ExprNormalizer.WindowSpecReferences [] windowSpecReferenceses)
throws TajoException {
LogicalPlan plan = context.plan;
QueryBlock block = context.queryBlock;
WindowAggNode windowAggNode = context.plan.createNode(WindowAggNode.class);
if (child.getType() == NodeType.LIMIT) {
LimitNode limitNode = (LimitNode) child;
windowAggNode.setChild(limitNode.getChild());
windowAggNode.setInSchema(limitNode.getChild().getOutSchema());
limitNode.setChild(windowAggNode);
} else if (child.getType() == NodeType.SORT) {
SortNode sortNode = (SortNode) child;
windowAggNode.setChild(sortNode.getChild());
windowAggNode.setInSchema(sortNode.getChild().getOutSchema());
sortNode.setChild(windowAggNode);
} else {
windowAggNode.setChild(child);
windowAggNode.setInSchema(child.getOutSchema());
}
List<String> winFuncRefs = new ArrayList<String>();
List<WindowFunctionEval> winFuncs = new ArrayList<WindowFunctionEval>();
List<WindowSpec> rawWindowSpecs = Lists.newArrayList();
for (Iterator<NamedExpr> it = block.namedExprsMgr.getIteratorForUnevaluatedExprs(); it.hasNext();) {
NamedExpr rawTarget = it.next();
try {
EvalNode evalNode = exprAnnotator.createEvalNode(context, rawTarget.getExpr(),
NameResolvingMode.SUBEXPRS_AND_RELS);
if (evalNode.getType() == EvalType.WINDOW_FUNCTION) {
winFuncRefs.add(rawTarget.getAlias());
winFuncs.add((WindowFunctionEval) evalNode);
block.namedExprsMgr.markAsEvaluated(rawTarget.getAlias(), evalNode);
// TODO - Later, we also consider the possibility that a window function contains only a window name.
rawWindowSpecs.add(((WindowFunctionExpr) (rawTarget.getExpr())).getWindowSpec());
}
} catch (UndefinedColumnException uc) {
}
}
// we only consider one window definition.
if (windowSpecReferenceses[0].hasPartitionKeys()) {
Column [] partitionKeyColumns = new Column[windowSpecReferenceses[0].getPartitionKeys().length];
int i = 0;
for (String partitionKey : windowSpecReferenceses[0].getPartitionKeys()) {
if (block.namedExprsMgr.isEvaluated(partitionKey)) {
partitionKeyColumns[i++] = block.namedExprsMgr.getTarget(partitionKey).getNamedColumn();
} else {
throw makeSyntaxError("Each grouping column expression must be a scalar expression.");
}
}
windowAggNode.setPartitionKeys(partitionKeyColumns);
}
SortSpec [][] sortGroups = new SortSpec[rawWindowSpecs.size()][];
for (int winSpecIdx = 0; winSpecIdx < rawWindowSpecs.size(); winSpecIdx++) {
WindowSpec spec = rawWindowSpecs.get(winSpecIdx);
if (spec.hasOrderBy()) {
Sort.SortSpec [] sortSpecs = spec.getSortSpecs();
int sortNum = sortSpecs.length;
String [] sortKeyRefNames = windowSpecReferenceses[winSpecIdx].getOrderKeys();
SortSpec [] annotatedSortSpecs = new SortSpec[sortNum];
Column column;
for (int i = 0; i < sortNum; i++) {
if (block.namedExprsMgr.isEvaluated(sortKeyRefNames[i])) {
column = block.namedExprsMgr.getTarget(sortKeyRefNames[i]).getNamedColumn();
} else {
throw new IllegalStateException("Unexpected State: " + StringUtils.join(sortSpecs));
}
annotatedSortSpecs[i] = new SortSpec(column, sortSpecs[i].isAscending(), sortSpecs[i].isNullFirst());
}
sortGroups[winSpecIdx] = annotatedSortSpecs;
} else {
sortGroups[winSpecIdx] = null;
}
}
for (int i = 0; i < winFuncRefs.size(); i++) {
WindowFunctionEval winFunc = winFuncs.get(i);
if (sortGroups[i] != null) {
winFunc.setSortSpecs(sortGroups[i]);
}
}
Target [] targets = new Target[referenceNames.length];
List<Integer> windowFuncIndices = Lists.newArrayList();
Projection projection = (Projection) stack.peek();
int windowFuncIdx = 0;
for (NamedExpr expr : projection.getNamedExprs()) {
if (expr.getExpr().getType() == OpType.WindowFunction) {
windowFuncIndices.add(windowFuncIdx);
}
windowFuncIdx++;
}
windowAggNode.setWindowFunctions(winFuncs.toArray(new WindowFunctionEval[winFuncs.size()]));
int targetIdx = 0;
for (int i = 0; i < referenceNames.length ; i++) {
if (!windowFuncIndices.contains(i)) {
if (block.isConstReference(referenceNames[i])) {
targets[targetIdx++] = new Target(block.getConstByReference(referenceNames[i]), referenceNames[i]);
} else {
targets[targetIdx++] = block.namedExprsMgr.getTarget(referenceNames[i]);
}
}
}
for (int i = 0; i < winFuncRefs.size(); i++) {
targets[targetIdx++] = block.namedExprsMgr.getTarget(winFuncRefs.get(i));
}
windowAggNode.setTargets(targets);
verifyProjectedFields(block, windowAggNode);
block.registerNode(windowAggNode);
postHook(context, stack, null, windowAggNode);
if (child.getType() == NodeType.LIMIT) {
LimitNode limitNode = (LimitNode) child;
limitNode.setInSchema(windowAggNode.getOutSchema());
limitNode.setOutSchema(windowAggNode.getOutSchema());
return null;
} else if (child.getType() == NodeType.SORT) {
SortNode sortNode = (SortNode) child;
sortNode.setInSchema(windowAggNode.getOutSchema());
sortNode.setOutSchema(windowAggNode.getOutSchema());
return null;
} else {
return windowAggNode;
}
}
/**
* Insert a group-by operator before a sort or a projection operator.
* It is used only when a group-by clause is not given.
*/
private LogicalNode insertGroupbyNode(PlanContext context, LogicalNode child, Stack<Expr> stack)
throws TajoException {
LogicalPlan plan = context.plan;
QueryBlock block = context.queryBlock;
// The limit operation must affect to the number of results, not the number of input records.
// Thus, the aggregation must be carried out before the limit operation.
if (child.getType() == NodeType.LIMIT) {
child = ((LimitNode)child).getChild();
}
GroupbyNode groupbyNode = context.plan.createNode(GroupbyNode.class);
groupbyNode.setChild(child);
groupbyNode.setInSchema(child.getOutSchema());
groupbyNode.setGroupingColumns(new Column[] {});
Set<String> aggEvalNames = new LinkedHashSet<String>();
Set<AggregationFunctionCallEval> aggEvals = new LinkedHashSet<AggregationFunctionCallEval>();
boolean includeDistinctFunction = false;
for (Iterator<NamedExpr> it = block.namedExprsMgr.getIteratorForUnevaluatedExprs(); it.hasNext();) {
NamedExpr rawTarget = it.next();
try {
// check if at least distinct aggregation function
includeDistinctFunction |= PlannerUtil.existsDistinctAggregationFunction(rawTarget.getExpr());
EvalNode evalNode = exprAnnotator.createEvalNode(context, rawTarget.getExpr(),
NameResolvingMode.SUBEXPRS_AND_RELS);
if (evalNode.getType() == EvalType.AGG_FUNCTION) {
aggEvalNames.add(rawTarget.getAlias());
aggEvals.add((AggregationFunctionCallEval) evalNode);
block.namedExprsMgr.markAsEvaluated(rawTarget.getAlias(), evalNode);
}
} catch (UndefinedColumnException ve) {
}
}
groupbyNode.setDistinct(includeDistinctFunction);
groupbyNode.setAggFunctions(aggEvals.toArray(new AggregationFunctionCallEval[aggEvals.size()]));
Target [] targets = ProjectionPushDownRule.buildGroupByTarget(groupbyNode, null,
aggEvalNames.toArray(new String[aggEvalNames.size()]));
groupbyNode.setTargets(targets);
// this inserted group-by node doesn't pass through preprocessor. So manually added.
block.registerNode(groupbyNode);
postHook(context, stack, null, groupbyNode);
verifyProjectedFields(block, groupbyNode);
return groupbyNode;
}
/*===============================================================================================
SORT SECTION
===============================================================================================*/
@Override
public LimitNode visitLimit(PlanContext context, Stack<Expr> stack, Limit limit) throws TajoException {
QueryBlock block = context.queryBlock;
EvalNode firstFetNum;
LogicalNode child;
if (limit.getFetchFirstNum().getType() == OpType.Literal) {
firstFetNum = exprAnnotator.createEvalNode(context, limit.getFetchFirstNum(),
NameResolvingMode.RELS_ONLY);
////////////////////////////////////////////////////////
// Visit and Build Child Plan
////////////////////////////////////////////////////////
stack.push(limit);
child = visit(context, stack, limit.getChild());
stack.pop();
////////////////////////////////////////////////////////
} else {
ExprNormalizedResult normalizedResult = normalizer.normalize(context, limit.getFetchFirstNum());
String referName = block.namedExprsMgr.addExpr(normalizedResult.baseExpr);
block.namedExprsMgr.addNamedExprArray(normalizedResult.aggExprs);
block.namedExprsMgr.addNamedExprArray(normalizedResult.scalarExprs);
////////////////////////////////////////////////////////
// Visit and Build Child Plan
////////////////////////////////////////////////////////
stack.push(limit);
child = visit(context, stack, limit.getChild());
stack.pop();
////////////////////////////////////////////////////////
if (block.namedExprsMgr.isEvaluated(referName)) {
firstFetNum = block.namedExprsMgr.getTarget(referName).getEvalTree();
} else {
NamedExpr namedExpr = block.namedExprsMgr.getNamedExpr(referName);
firstFetNum = exprAnnotator.createEvalNode(context, namedExpr.getExpr(), NameResolvingMode.SUBEXPRS_AND_RELS);
block.namedExprsMgr.markAsEvaluated(referName, firstFetNum);
}
}
LimitNode limitNode = block.getNodeFromExpr(limit);
limitNode.setChild(child);
limitNode.setInSchema(child.getOutSchema());
limitNode.setOutSchema(child.getOutSchema());
firstFetNum.bind(null, null);
limitNode.setFetchFirst(firstFetNum.eval(null).asInt8());
return limitNode;
}
@Override
public LogicalNode visitSort(PlanContext context, Stack<Expr> stack, Sort sort) throws TajoException {
QueryBlock block = context.queryBlock;
int sortKeyNum = sort.getSortSpecs().length;
Sort.SortSpec[] sortSpecs = sort.getSortSpecs();
String [] referNames = new String[sortKeyNum];
ExprNormalizedResult [] normalizedExprList = new ExprNormalizedResult[sortKeyNum];
for (int i = 0; i < sortKeyNum; i++) {
normalizedExprList[i] = normalizer.normalize(context, sortSpecs[i].getKey());
}
for (int i = 0; i < sortKeyNum; i++) {
referNames[i] = block.namedExprsMgr.addExpr(normalizedExprList[i].baseExpr);
block.namedExprsMgr.addNamedExprArray(normalizedExprList[i].aggExprs);
block.namedExprsMgr.addNamedExprArray(normalizedExprList[i].scalarExprs);
}
////////////////////////////////////////////////////////
// Visit and Build Child Plan
////////////////////////////////////////////////////////
stack.push(sort);
LogicalNode child = visit(context, stack, sort.getChild());
if (block.isAggregationRequired()) {
child = insertGroupbyNode(context, child, stack);
}
stack.pop();
////////////////////////////////////////////////////////
SortNode sortNode = block.getNodeFromExpr(sort);
sortNode.setChild(child);
sortNode.setInSchema(child.getOutSchema());
sortNode.setOutSchema(child.getOutSchema());
// Building sort keys
SortSpec[] annotatedSortSpecs = annotateSortSpecs(block, referNames, sortSpecs);
if (annotatedSortSpecs.length == 0) {
return child;
} else {
sortNode.setSortSpecs(annotatedSortSpecs);
return sortNode;
}
}
private static SortSpec[] annotateSortSpecs(QueryBlock block, String [] referNames, Sort.SortSpec[] rawSortSpecs) {
int sortKeyNum = rawSortSpecs.length;
Column column;
List<SortSpec> annotatedSortSpecs = Lists.newArrayList();
for (int i = 0; i < sortKeyNum; i++) {
String refName = referNames[i];
if (block.isConstReference(refName)) {
continue;
} else if (block.namedExprsMgr.isEvaluated(refName)) {
column = block.namedExprsMgr.getTarget(refName).getNamedColumn();
} else {
throw new IllegalStateException("Unexpected State: " + StringUtils.join(rawSortSpecs));
}
annotatedSortSpecs.add(new SortSpec(column, rawSortSpecs[i].isAscending(), rawSortSpecs[i].isNullFirst()));
}
return annotatedSortSpecs.toArray(new SortSpec[annotatedSortSpecs.size()]);
}
/*===============================================================================================
GROUP BY SECTION
===============================================================================================*/
@Override
public LogicalNode visitHaving(PlanContext context, Stack<Expr> stack, Having expr) throws TajoException {
QueryBlock block = context.queryBlock;
ExprNormalizedResult normalizedResult = normalizer.normalize(context, expr.getQual());
String referName = block.namedExprsMgr.addExpr(normalizedResult.baseExpr);
block.namedExprsMgr.addNamedExprArray(normalizedResult.aggExprs);
block.namedExprsMgr.addNamedExprArray(normalizedResult.scalarExprs);
////////////////////////////////////////////////////////
// Visit and Build Child Plan
////////////////////////////////////////////////////////
stack.push(expr);
LogicalNode child = visit(context, stack, expr.getChild());
stack.pop();
////////////////////////////////////////////////////////
HavingNode having = context.queryBlock.getNodeFromExpr(expr);
having.setChild(child);
having.setInSchema(child.getOutSchema());
having.setOutSchema(child.getOutSchema());
EvalNode havingCondition;
if (block.namedExprsMgr.isEvaluated(referName)) {
havingCondition = block.namedExprsMgr.getTarget(referName).getEvalTree();
} else {
NamedExpr namedExpr = block.namedExprsMgr.getNamedExpr(referName);
havingCondition = exprAnnotator.createEvalNode(context, namedExpr.getExpr(),
NameResolvingMode.SUBEXPRS_AND_RELS);
block.namedExprsMgr.markAsEvaluated(referName, havingCondition);
}
// set having condition
having.setQual(havingCondition);
return having;
}
@Override
public LogicalNode visitGroupBy(PlanContext context, Stack<Expr> stack, Aggregation aggregation)
throws TajoException {
// Initialization Phase:
LogicalPlan plan = context.plan;
QueryBlock block = context.queryBlock;
// Normalize grouping keys and add normalized grouping keys to NamedExprManager
int groupingKeyNum = aggregation.getGroupSet()[0].getGroupingSets().length;
ExprNormalizedResult [] normalizedResults = new ExprNormalizedResult[groupingKeyNum];
for (int i = 0; i < groupingKeyNum; i++) {
Expr groupingKey = aggregation.getGroupSet()[0].getGroupingSets()[i];
normalizedResults[i] = normalizer.normalize(context, groupingKey);
}
String [] groupingKeyRefNames = new String[groupingKeyNum];
for (int i = 0; i < groupingKeyNum; i++) {
groupingKeyRefNames[i] = block.namedExprsMgr.addExpr(normalizedResults[i].baseExpr);
block.namedExprsMgr.addNamedExprArray(normalizedResults[i].aggExprs);
block.namedExprsMgr.addNamedExprArray(normalizedResults[i].scalarExprs);
}
////////////////////////////////////////////////////////
// Visit and Build Child Plan
////////////////////////////////////////////////////////
stack.push(aggregation);
LogicalNode child = visit(context, stack, aggregation.getChild());
stack.pop();
////////////////////////////////////////////////////////
GroupbyNode groupingNode = context.queryBlock.getNodeFromExpr(aggregation);
groupingNode.setChild(child);
groupingNode.setInSchema(child.getOutSchema());
// Set grouping sets
List<Column> groupingColumns = Lists.newArrayList();
for (int i = 0; i < groupingKeyRefNames.length; i++) {
String refName = groupingKeyRefNames[i];
if (context.getQueryBlock().isConstReference(refName)) {
continue;
} else if (block.namedExprsMgr.isEvaluated(groupingKeyRefNames[i])) {
groupingColumns.add(block.namedExprsMgr.getTarget(groupingKeyRefNames[i]).getNamedColumn());
} else {
throw makeSyntaxError("Each grouping column expression must be a scalar expression.");
}
}
int effectiveGroupingKeyNum = groupingColumns.size();
groupingNode.setGroupingColumns(groupingColumns.toArray(new Column[effectiveGroupingKeyNum]));
////////////////////////////////////////////////////////
// Visit and Build Child Plan
////////////////////////////////////////////////////////
// create EvalNodes and check if each EvalNode can be evaluated here.
List<String> aggEvalNames = TUtil.newList();
List<AggregationFunctionCallEval> aggEvalNodes = TUtil.newList();
boolean includeDistinctFunction = false;
for (Iterator<NamedExpr> iterator = block.namedExprsMgr.getIteratorForUnevaluatedExprs(); iterator.hasNext();) {
NamedExpr namedExpr = iterator.next();
try {
includeDistinctFunction |= PlannerUtil.existsDistinctAggregationFunction(namedExpr.getExpr());
EvalNode evalNode = exprAnnotator.createEvalNode(context, namedExpr.getExpr(),
NameResolvingMode.SUBEXPRS_AND_RELS);
if (evalNode.getType() == EvalType.AGG_FUNCTION) {
block.namedExprsMgr.markAsEvaluated(namedExpr.getAlias(), evalNode);
aggEvalNames.add(namedExpr.getAlias());
aggEvalNodes.add((AggregationFunctionCallEval) evalNode);
}
} catch (UndefinedColumnException ve) {
}
}
// if there is at least one distinct aggregation function
groupingNode.setDistinct(includeDistinctFunction);
groupingNode.setAggFunctions(aggEvalNodes.toArray(new AggregationFunctionCallEval[aggEvalNodes.size()]));
Target [] targets = new Target[effectiveGroupingKeyNum + aggEvalNames.size()];
// In target, grouping columns will be followed by aggregation evals.
//
// col1, col2, col3, sum(..), agv(..)
// ^^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^
// grouping keys aggregation evals
// Build grouping keys
for (int i = 0; i < effectiveGroupingKeyNum; i++) {
Target target = block.namedExprsMgr.getTarget(groupingNode.getGroupingColumns()[i].getQualifiedName());
targets[i] = target;
}
for (int i = 0, targetIdx = effectiveGroupingKeyNum; i < aggEvalNodes.size(); i++, targetIdx++) {
targets[targetIdx] = block.namedExprsMgr.getTarget(aggEvalNames.get(i));
}
groupingNode.setTargets(targets);
block.unsetAggregationRequire();
verifyProjectedFields(block, groupingNode);
return groupingNode;
}
private static final Column[] ALL= Lists.newArrayList().toArray(new Column[0]);
public static List<Column[]> generateCuboids(Column[] columns) {
int numCuboids = (int) Math.pow(2, columns.length);
int maxBits = columns.length;
List<Column[]> cube = Lists.newArrayList();
List<Column> cuboidCols;
cube.add(ALL);
for (int cuboidId = 1; cuboidId < numCuboids; cuboidId++) {
cuboidCols = Lists.newArrayList();
for (int j = 0; j < maxBits; j++) {
int bit = 1 << j;
if ((cuboidId & bit) == bit) {
cuboidCols.add(columns[j]);
}
}
cube.add(cuboidCols.toArray(new Column[cuboidCols.size()]));
}
return cube;
}
@Override
public SelectionNode visitFilter(PlanContext context, Stack<Expr> stack, Selection selection)
throws TajoException {
QueryBlock block = context.queryBlock;
ExprNormalizedResult normalizedResult = normalizer.normalize(context, selection.getQual());
block.namedExprsMgr.addExpr(normalizedResult.baseExpr);
if (normalizedResult.aggExprs.size() > 0 || normalizedResult.scalarExprs.size() > 0) {
throw makeSyntaxError("Filter condition cannot include aggregation function");
}
////////////////////////////////////////////////////////
// Visit and Build Child Plan
////////////////////////////////////////////////////////
stack.push(selection);
// Since filter push down will be done later, it is guaranteed that in-subqueries are found at only selection.
for (Expr eachQual : PlannerUtil.extractInSubquery(selection.getQual())) {
InPredicate inPredicate = (InPredicate) eachQual;
visit(context, stack, inPredicate.getInValue());
context.unplannedExprs.add(inPredicate.getInValue());
}
LogicalNode child = visit(context, stack, selection.getChild());
stack.pop();
////////////////////////////////////////////////////////
SelectionNode selectionNode = context.queryBlock.getNodeFromExpr(selection);
selectionNode.setChild(child);
selectionNode.setInSchema(child.getOutSchema());
selectionNode.setOutSchema(child.getOutSchema());
// Create EvalNode for a search condition.
EvalNode searchCondition = exprAnnotator.createEvalNode(context, selection.getQual(),
NameResolvingMode.RELS_AND_SUBEXPRS);
EvalNode simplified = context.evalOptimizer.optimize(context, searchCondition);
// set selection condition
selectionNode.setQual(simplified);
return selectionNode;
}
/*===============================================================================================
JOIN SECTION
===============================================================================================*/
@Override
public LogicalNode visitJoin(PlanContext context, Stack<Expr> stack, Join join)
throws TajoException {
// Phase 1: Init
LogicalPlan plan = context.plan;
QueryBlock block = context.queryBlock;
if (join.hasQual()) {
ExprNormalizedResult normalizedResult = normalizer.normalize(context, join.getQual(), true);
block.namedExprsMgr.addExpr(normalizedResult.baseExpr);
if (normalizedResult.aggExprs.size() > 0 || normalizedResult.scalarExprs.size() > 0) {
throw makeSyntaxError("Filter condition cannot include aggregation function");
}
}
////////////////////////////////////////////////////////
// Visit and Build Child Plan
////////////////////////////////////////////////////////
stack.push(join);
LogicalNode left = visit(context, stack, join.getLeft());
LogicalNode right = visit(context, stack, join.getRight());
stack.pop();
////////////////////////////////////////////////////////
JoinNode joinNode = context.queryBlock.getNodeFromExpr(join);
joinNode.setJoinType(join.getJoinType());
joinNode.setLeftChild(left);
joinNode.setRightChild(right);
// Set A merged input schema
Schema merged;
if (join.isNatural()) {
merged = getNaturalJoinSchema(left, right);
} else {
merged = SchemaUtil.merge(left.getOutSchema(), right.getOutSchema());
}
joinNode.setInSchema(merged);
// Create EvalNode for a search condition.
EvalNode joinCondition = null;
if (join.hasQual()) {
EvalNode evalNode = exprAnnotator.createEvalNode(context, join.getQual(), NameResolvingMode.LEGACY);
joinCondition = context.evalOptimizer.optimize(context, evalNode);
}
// If the query involves a subquery, the stack can be empty.
// In this case, this join is the top most one within a query block.
boolean isTopMostJoin = stack.isEmpty() ? true : stack.peek().getType() != OpType.Join;
List<String> newlyEvaluatedExprs = getNewlyEvaluatedExprsForJoin(context, joinNode, isTopMostJoin);
List<Target> targets = TUtil.newList(PlannerUtil.schemaToTargets(merged));
for (String newAddedExpr : newlyEvaluatedExprs) {
targets.add(block.namedExprsMgr.getTarget(newAddedExpr, true));
}
joinNode.setTargets(targets.toArray(new Target[targets.size()]));
// Determine join conditions
if (join.isNatural()) { // if natural join, it should have the equi-join conditions by common column names
EvalNode njCond = getNaturalJoinCondition(joinNode);
joinNode.setJoinQual(njCond);
} else if (join.hasQual()) { // otherwise, the given join conditions are set
joinNode.setJoinQual(joinCondition);
}
return joinNode;
}
private List<String> getNewlyEvaluatedExprsForJoin(PlanContext context, JoinNode joinNode, boolean isTopMostJoin)
throws TajoException {
QueryBlock block = context.queryBlock;
EvalNode evalNode;
List<String> newlyEvaluatedExprs = TUtil.newList();
for (Iterator<NamedExpr> it = block.namedExprsMgr.getIteratorForUnevaluatedExprs(); it.hasNext();) {
NamedExpr namedExpr = it.next();
try {
evalNode = exprAnnotator.createEvalNode(context, namedExpr.getExpr(), NameResolvingMode.LEGACY);
// the predicates specified in the on clause are already processed in visitJoin()
if (LogicalPlanner.checkIfBeEvaluatedAtJoin(context.queryBlock, evalNode, joinNode, isTopMostJoin)) {
block.namedExprsMgr.markAsEvaluated(namedExpr.getAlias(), evalNode);
newlyEvaluatedExprs.add(namedExpr.getAlias());
}
} catch (UndefinedColumnException ve) {
}
}
return newlyEvaluatedExprs;
}
private static Schema getNaturalJoinSchema(LogicalNode left, LogicalNode right) {
Schema joinSchema = new Schema();
Schema commons = SchemaUtil.getNaturalJoinColumns(left.getOutSchema(), right.getOutSchema());
joinSchema.addColumns(commons);
for (Column c : left.getOutSchema().getRootColumns()) {
if (!joinSchema.contains(c.getQualifiedName())) {
joinSchema.addColumn(c);
}
}
for (Column c : right.getOutSchema().getRootColumns()) {
if (!joinSchema.contains(c.getQualifiedName())) {
joinSchema.addColumn(c);
}
}
return joinSchema;
}
private static EvalNode getNaturalJoinCondition(JoinNode joinNode) {
Schema leftSchema = joinNode.getLeftChild().getInSchema();
Schema rightSchema = joinNode.getRightChild().getInSchema();
Schema commons = SchemaUtil.getNaturalJoinColumns(leftSchema, rightSchema);
EvalNode njQual = null;
EvalNode equiQual;
Column leftJoinKey;
Column rightJoinKey;
for (Column common : commons.getRootColumns()) {
leftJoinKey = leftSchema.getColumn(common.getQualifiedName());
rightJoinKey = rightSchema.getColumn(common.getQualifiedName());
equiQual = new BinaryEval(EvalType.EQUAL,
new FieldEval(leftJoinKey), new FieldEval(rightJoinKey));
if (njQual == null) {
njQual = equiQual;
} else {
njQual = new BinaryEval(EvalType.AND, njQual, equiQual);
}
}
return njQual;
}
private LogicalNode createCartesianProduct(PlanContext context, LogicalNode left, LogicalNode right)
throws TajoException {
LogicalPlan plan = context.plan;
QueryBlock block = context.queryBlock;
Schema merged = SchemaUtil.merge(left.getOutSchema(), right.getOutSchema());
JoinNode join = plan.createNode(JoinNode.class);
join.init(JoinType.CROSS, left, right);
join.setInSchema(merged);
block.addJoinType(join.getJoinType());
EvalNode evalNode;
List<String> newlyEvaluatedExprs = TUtil.newList();
for (Iterator<NamedExpr> it = block.namedExprsMgr.getIteratorForUnevaluatedExprs(); it.hasNext();) {
NamedExpr namedExpr = it.next();
try {
evalNode = exprAnnotator.createEvalNode(context, namedExpr.getExpr(), NameResolvingMode.LEGACY);
if (EvalTreeUtil.findDistinctAggFunction(evalNode).size() == 0
&& EvalTreeUtil.findWindowFunction(evalNode).size() == 0) {
block.namedExprsMgr.markAsEvaluated(namedExpr.getAlias(), evalNode);
newlyEvaluatedExprs.add(namedExpr.getAlias());
}
} catch (UndefinedColumnException ve) {}
}
List<Target> targets = TUtil.newList(PlannerUtil.schemaToTargets(merged));
for (String newAddedExpr : newlyEvaluatedExprs) {
targets.add(block.namedExprsMgr.getTarget(newAddedExpr, true));
}
join.setTargets(targets.toArray(new Target[targets.size()]));
return join;
}
@Override
public LogicalNode visitRelationList(PlanContext context, Stack<Expr> stack, RelationList relations)
throws TajoException {
LogicalNode current = visit(context, stack, relations.getRelations()[0]);
LogicalNode left;
LogicalNode right;
if (relations.size() > 1) {
for (int i = 1; i < relations.size(); i++) {
left = current;
right = visit(context, stack, relations.getRelations()[i]);
current = createCartesianProduct(context, left, right);
}
}
context.queryBlock.registerNode(current);
return current;
}
@Override
public ScanNode visitRelation(PlanContext context, Stack<Expr> stack, Relation expr)
throws TajoException {
QueryBlock block = context.queryBlock;
ScanNode scanNode = block.getNodeFromExpr(expr);
updatePhysicalInfo(context, scanNode.getTableDesc());
// Find expression which can be evaluated at this relation node.
// Except for column references, additional expressions used in select list, where clause, order-by clauses
// can be evaluated here. Their reference names are kept in newlyEvaluatedExprsRef.
Set<String> newlyEvaluatedExprsReferences = new LinkedHashSet<String>();
for (Iterator<NamedExpr> iterator = block.namedExprsMgr.getIteratorForUnevaluatedExprs(); iterator.hasNext();) {
NamedExpr rawTarget = iterator.next();
try {
EvalNode evalNode = exprAnnotator.createEvalNode(context, rawTarget.getExpr(),
NameResolvingMode.RELS_ONLY);
if (checkIfBeEvaluatedAtRelation(block, evalNode, scanNode)) {
block.namedExprsMgr.markAsEvaluated(rawTarget.getAlias(), evalNode);
newlyEvaluatedExprsReferences.add(rawTarget.getAlias()); // newly added exr
}
} catch (UndefinedColumnException ve) {
}
}
// Assume that each unique expr is evaluated once.
LinkedHashSet<Target> targets = createFieldTargetsFromRelation(block, scanNode, newlyEvaluatedExprsReferences);
// The fact the some expr is included in newlyEvaluatedExprsReferences means that it is already evaluated.
// So, we get a raw expression and then creates a target.
for (String reference : newlyEvaluatedExprsReferences) {
NamedExpr refrer = block.namedExprsMgr.getNamedExpr(reference);
EvalNode evalNode = exprAnnotator.createEvalNode(context, refrer.getExpr(), NameResolvingMode.RELS_ONLY);
targets.add(new Target(evalNode, reference));
}
scanNode.setTargets(targets.toArray(new Target[targets.size()]));
verifyProjectedFields(block, scanNode);
return scanNode;
}
private static LinkedHashSet<Target> createFieldTargetsFromRelation(QueryBlock block, RelationNode relationNode,
Set<String> newlyEvaluatedRefNames) {
LinkedHashSet<Target> targets = Sets.newLinkedHashSet();
for (Column column : relationNode.getLogicalSchema().getAllColumns()) {
// TODO - Currently, EvalNode has DataType as a return type. So, RECORD cannot be used for any target.
// The following line is a kind of hack, preventing RECORD to be used for target in the logical planning phase.
// This problem should be resolved after TAJO-1402.
if (column.getTypeDesc().getDataType().getType() == TajoDataTypes.Type.RECORD) {
continue;
}
String aliasName = block.namedExprsMgr.checkAndGetIfAliasedColumn(column.getQualifiedName());
if (aliasName != null) {
targets.add(new Target(new FieldEval(column), aliasName));
newlyEvaluatedRefNames.remove(aliasName);
} else {
targets.add(new Target(new FieldEval(column)));
}
}
return targets;
}
private void updatePhysicalInfo(PlanContext planContext, TableDesc desc) {
if (desc.getUri() != null &&
!desc.getMeta().getStoreType().equals("SYSTEM") &&
!desc.getMeta().getStoreType().equals("FAKEFILE") && // FAKEFILE is used for test
PlannerUtil.isFileStorageType(desc.getMeta().getStoreType())) {
try {
Path path = new Path(desc.getUri());
FileSystem fs = path.getFileSystem(planContext.queryContext.getConf());
FileStatus status = fs.getFileStatus(path);
if (desc.getStats() != null && (status.isDirectory() || status.isFile())) {
ContentSummary summary = fs.getContentSummary(path);
if (summary != null) {
long volume = summary.getLength();
desc.getStats().setNumBytes(volume);
}
}
} catch (Throwable t) {
LOG.warn(t, t);
}
}
}
@Override
public TableSubQueryNode visitTableSubQuery(PlanContext context, Stack<Expr> stack, TablePrimarySubQuery expr)
throws TajoException {
return visitCommonTableSubquery(context, stack, expr);
}
@Override
public TableSubQueryNode visitSimpleTableSubquery(PlanContext context, Stack<Expr> stack, SimpleTableSubquery expr)
throws TajoException {
return visitCommonTableSubquery(context, stack, expr);
}
private TableSubQueryNode visitCommonTableSubquery(PlanContext context, Stack<Expr> stack, CommonSubquery expr)
throws TajoException {
QueryBlock currentBlock = context.queryBlock;
QueryBlock childBlock = context.plan.getBlock(context.plan.getBlockNameByExpr(expr.getSubQuery()));
context.plan.connectBlocks(childBlock, currentBlock, BlockType.TableSubQuery);
PlanContext newContext = new PlanContext(context, childBlock);
context.plan.connectBlocks(childBlock, context.queryBlock, BlockType.TableSubQuery);
LogicalNode child = visit(newContext, new Stack<Expr>(), expr.getSubQuery());
TableSubQueryNode subQueryNode = currentBlock.getNodeFromExpr(expr);
subQueryNode.setSubQuery(child);
setTargetOfTableSubQuery(context, currentBlock, subQueryNode);
return subQueryNode;
}
private void setTargetOfTableSubQuery (PlanContext context, QueryBlock block, TableSubQueryNode subQueryNode)
throws TajoException {
// Add additional expressions required in upper nodes.
Set<String> newlyEvaluatedExprs = TUtil.newHashSet();
for (NamedExpr rawTarget : block.namedExprsMgr.getAllNamedExprs()) {
try {
EvalNode evalNode = exprAnnotator.createEvalNode(context, rawTarget.getExpr(),
NameResolvingMode.RELS_ONLY);
if (checkIfBeEvaluatedAtRelation(block, evalNode, subQueryNode)) {
block.namedExprsMgr.markAsEvaluated(rawTarget.getAlias(), evalNode);
newlyEvaluatedExprs.add(rawTarget.getAlias()); // newly added exr
}
} catch (UndefinedColumnException ve) {
}
}
// Assume that each unique expr is evaluated once.
LinkedHashSet<Target> targets = createFieldTargetsFromRelation(block, subQueryNode, newlyEvaluatedExprs);
for (String newAddedExpr : newlyEvaluatedExprs) {
targets.add(block.namedExprsMgr.getTarget(newAddedExpr, true));
}
subQueryNode.setTargets(targets.toArray(new Target[targets.size()]));
}
/*===============================================================================================
SET OPERATION SECTION
===============================================================================================*/
@Override
public LogicalNode visitUnion(PlanContext context, Stack<Expr> stack, SetOperation setOperation)
throws TajoException {
UnionNode unionNode = (UnionNode)buildSetPlan(context, stack, setOperation);
LogicalNode resultingNode = unionNode;
/**
* if the given node is Union (Distinct), it adds group by node
* change
* from
* union
*
* to
* projection
* |
* group by
* |
* table subquery
* |
* union
*/
if (unionNode.isDistinct()) {
return insertProjectionGroupbyBeforeSetOperation(context, unionNode);
}
return resultingNode;
}
private ProjectionNode insertProjectionGroupbyBeforeSetOperation(PlanContext context,
SetOperationNode setOperationNode)
throws TajoException {
QueryBlock currentBlock = context.queryBlock;
// make table subquery node which has set operation as its subquery
TableSubQueryNode setOpTableSubQueryNode = context.plan.createNode(TableSubQueryNode.class);
setOpTableSubQueryNode.init(CatalogUtil.buildFQName(context.queryContext.get(SessionVars.CURRENT_DATABASE),
context.generateUniqueSubQueryName()), setOperationNode);
setTargetOfTableSubQuery(context, currentBlock, setOpTableSubQueryNode);
currentBlock.registerNode(setOpTableSubQueryNode);
currentBlock.addRelation(setOpTableSubQueryNode);
Schema setOpSchema = setOpTableSubQueryNode.getOutSchema();
Target[] setOpTarget = setOpTableSubQueryNode.getTargets();
// make group by node whose grouping keys are all columns of set operation
GroupbyNode setOpGroupbyNode = context.plan.createNode(GroupbyNode.class);
setOpGroupbyNode.setInSchema(setOpSchema);
setOpGroupbyNode.setGroupingColumns(setOpSchema.toArray());
setOpGroupbyNode.setTargets(setOpTarget);
setOpGroupbyNode.setChild(setOpTableSubQueryNode);
currentBlock.registerNode(setOpGroupbyNode);
// make projection node which projects all the union columns
ProjectionNode setOpProjectionNode = context.plan.createNode(ProjectionNode.class);
setOpProjectionNode.setInSchema(setOpSchema);
setOpProjectionNode.setTargets(setOpTarget);
setOpProjectionNode.setChild(setOpGroupbyNode);
currentBlock.registerNode(setOpProjectionNode);
// changing query block chain: at below, ( ) indicates query block
// (... + set operation ) - (...) ==> (... + projection + group by + table subquery) - (set operation) - (...)
QueryBlock setOpBlock = context.plan.newQueryBlock();
setOpBlock.registerNode(setOperationNode);
setOpBlock.setRoot(setOperationNode);
QueryBlock leftBlock = context.plan.getBlock(setOperationNode.getLeftChild());
QueryBlock rightBlock = context.plan.getBlock(setOperationNode.getRightChild());
context.plan.disconnectBlocks(leftBlock, context.queryBlock);
context.plan.disconnectBlocks(rightBlock, context.queryBlock);
context.plan.connectBlocks(setOpBlock, context.queryBlock, BlockType.TableSubQuery);
context.plan.connectBlocks(leftBlock, setOpBlock, BlockType.TableSubQuery);
context.plan.connectBlocks(rightBlock, setOpBlock, BlockType.TableSubQuery);
// projection node (not original set operation node) will be a new child of parent node
return setOpProjectionNode;
}
@Override
public LogicalNode visitExcept(PlanContext context, Stack<Expr> stack, SetOperation setOperation)
throws TajoException {
return buildSetPlan(context, stack, setOperation);
}
@Override
public LogicalNode visitIntersect(PlanContext context, Stack<Expr> stack, SetOperation setOperation)
throws TajoException {
return buildSetPlan(context, stack, setOperation);
}
private LogicalNode buildSetPlan(PlanContext context, Stack<Expr> stack, SetOperation setOperation)
throws TajoException {
// 1. Init Phase
LogicalPlan plan = context.plan;
QueryBlock block = context.queryBlock;
////////////////////////////////////////////////////////
// Visit and Build Left Child Plan
////////////////////////////////////////////////////////
QueryBlock leftBlock = context.plan.getBlockByExpr(setOperation.getLeft());
PlanContext leftContext = new PlanContext(context, leftBlock);
stack.push(setOperation);
LogicalNode leftChild = visit(leftContext, new Stack<Expr>(), setOperation.getLeft());
stack.pop();
// Connect left child and current blocks
context.plan.connectBlocks(leftContext.queryBlock, context.queryBlock, BlockType.TableSubQuery);
////////////////////////////////////////////////////////
// Visit and Build Right Child Plan
////////////////////////////////////////////////////////
QueryBlock rightBlock = context.plan.getBlockByExpr(setOperation.getRight());
PlanContext rightContext = new PlanContext(context, rightBlock);
stack.push(setOperation);
LogicalNode rightChild = visit(rightContext, new Stack<Expr>(), setOperation.getRight());
stack.pop();
// Connect right child and current blocks
context.plan.connectBlocks(rightContext.queryBlock, context.queryBlock, BlockType.TableSubQuery);
BinaryNode setOp;
if (setOperation.getType() == OpType.Union) {
setOp = block.getNodeFromExpr(setOperation);
} else if (setOperation.getType() == OpType.Except) {
setOp = block.getNodeFromExpr(setOperation);
} else if (setOperation.getType() == OpType.Intersect) {
setOp = block.getNodeFromExpr(setOperation);
} else {
throw new TajoInternalError("Unknown set type: " + setOperation.getType());
}
setOp.setLeftChild(leftChild);
setOp.setRightChild(rightChild);
// An union statement can be derived from two query blocks.
// For one union statement between both relations, we can ensure that each corresponding data domain of both
// relations are the same. However, if necessary, the schema of left query block will be used as a base schema.
Target [] leftStrippedTargets = PlannerUtil.stripTarget(
PlannerUtil.schemaToTargets(leftBlock.getRoot().getOutSchema()));
setOp.setInSchema(leftChild.getOutSchema());
Schema outSchema = PlannerUtil.targetToSchema(leftStrippedTargets);
setOp.setOutSchema(outSchema);
return setOp;
}
/*===============================================================================================
INSERT SECTION
===============================================================================================*/
public LogicalNode visitInsert(PlanContext context, Stack<Expr> stack, Insert expr) throws TajoException {
stack.push(expr);
LogicalNode subQuery = super.visitInsert(context, stack, expr);
stack.pop();
InsertNode insertNode = context.queryBlock.getNodeFromExpr(expr);
insertNode.setOverwrite(expr.isOverwrite());
insertNode.setSubQuery(subQuery);
if (expr.hasTableName()) { // INSERT (OVERWRITE) INTO TABLE ...
return buildInsertIntoTablePlan(context, insertNode, expr);
} else if (expr.hasLocation()) { // INSERT (OVERWRITE) INTO LOCATION ...
return buildInsertIntoLocationPlan(context, insertNode, expr);
} else {
throw new IllegalStateException("Invalid Query");
}
}
/**
* Builds a InsertNode with a target table.
*
* ex) INSERT OVERWRITE INTO TABLE ...
* <br />
*
* We use the following terms, such target table, target column
* <pre>
* INSERT INTO [DATABASE_NAME.]TB_NAME (col1, col2) SELECT c1, c2 FROM ...
* ^^^^^^^^^^^^^^ ^^^^^^^ ^^^^^^^^^^^^ ^^^^^^^^^^^^
* target database target table target columns (or schema) projected columns (or schema)
* </pre>
*/
private InsertNode buildInsertIntoTablePlan(PlanContext context, InsertNode insertNode, Insert expr)
throws TajoException {
// Get and set a target table
String databaseName;
String tableName;
if (CatalogUtil.isFQTableName(expr.getTableName())) {
databaseName = CatalogUtil.extractQualifier(expr.getTableName());
tableName = CatalogUtil.extractSimpleName(expr.getTableName());
} else {
databaseName = context.queryContext.get(SessionVars.CURRENT_DATABASE);
tableName = expr.getTableName();
}
TableDesc desc = catalog.getTableDesc(databaseName, tableName);
insertNode.setTargetTable(desc);
//
// When we use 'INSERT (OVERWIRTE) INTO TABLE statements, there are two cases.
//
// First, when a user specified target columns
// INSERT (OVERWRITE)? INTO table_name (col1 type, col2 type) SELECT ...
//
// Second, when a user do not specified target columns
// INSERT (OVERWRITE)? INTO table_name SELECT ...
//
// In the former case is, target columns' schema and corresponding projected columns' schema
// must be equivalent or be available to cast implicitly.
//
// In the later case, the target table's schema and projected column's
// schema of select clause can be different to each other. In this case,
// we use only a sequence of preceding columns of target table's schema
// as target columns.
//
// For example, consider a target table and an 'insert into' query are given as follows:
//
// CREATE TABLE TB1 (col1 int, col2 int, col3 long);
// || ||
// INSERT OVERWRITE INTO TB1 SELECT order_key, part_num FROM ...
//
// In this example, only col1 and col2 are used as target columns.
if (expr.hasTargetColumns()) { // when a user specified target columns
if (expr.getTargetColumns().length > insertNode.getChild().getOutSchema().size()) {
throw makeSyntaxError("Target columns and projected columns are mismatched to each other");
}
// See PreLogicalPlanVerifier.visitInsert.
// It guarantees that the equivalence between the numbers of target and projected columns.
ColumnReferenceExpr [] targets = expr.getTargetColumns();
Schema targetColumns = new Schema();
for (int i = 0; i < targets.length; i++) {
Column targetColumn = desc.getLogicalSchema().getColumn(targets[i].getCanonicalName().replace(".", "/"));
if (targetColumn == null) {
throw makeSyntaxError("column '" + targets[i] + "' of relation '" + desc.getName() + "' does not exist");
}
targetColumns.addColumn(targetColumn);
}
insertNode.setTargetSchema(targetColumns);
insertNode.setOutSchema(targetColumns);
buildProjectedInsert(context, insertNode);
} else { // when a user do not specified target columns
// The output schema of select clause determines the target columns.
Schema tableSchema = desc.getLogicalSchema();
Schema projectedSchema = insertNode.getChild().getOutSchema();
Schema targetColumns = new Schema();
for (int i = 0; i < projectedSchema.size(); i++) {
targetColumns.addColumn(tableSchema.getColumn(i));
}
insertNode.setTargetSchema(targetColumns);
buildProjectedInsert(context, insertNode);
}
if (desc.hasPartition()) {
insertNode.setPartitionMethod(desc.getPartitionMethod());
}
return insertNode;
}
private void buildProjectedInsert(PlanContext context, InsertNode insertNode) {
Schema tableSchema = insertNode.getTableSchema();
Schema targetColumns = insertNode.getTargetSchema();
LogicalNode child = insertNode.getChild();
if (child.getType() == NodeType.UNION) {
child = makeProjectionForInsertUnion(context, insertNode);
}
if (child instanceof Projectable) {
Projectable projectionNode = insertNode.getChild();
// Modifying projected columns by adding NULL constants
// It is because that table appender does not support target columns to be written.
List<Target> targets = TUtil.newList();
for (Column column : tableSchema.getAllColumns()) {
int idxInProjectionNode = targetColumns.getIndex(column);
// record type itself cannot be projected yet.
if (column.getDataType().getType() == TajoDataTypes.Type.RECORD) {
continue;
}
if (idxInProjectionNode >= 0 && idxInProjectionNode < projectionNode.getTargets().length) {
targets.add(projectionNode.getTargets()[idxInProjectionNode]);
} else {
targets.add(new Target(new ConstEval(NullDatum.get()), column.getSimpleName()));
}
}
projectionNode.setTargets(targets.toArray(new Target[targets.size()]));
insertNode.setInSchema(projectionNode.getOutSchema());
insertNode.setOutSchema(projectionNode.getOutSchema());
insertNode.setProjectedSchema(PlannerUtil.targetToSchema(targets));
} else {
throw new RuntimeException("Wrong child node type: " + child.getType() + " for insert");
}
}
private ProjectionNode makeProjectionForInsertUnion(PlanContext context, InsertNode insertNode) {
LogicalNode child = insertNode.getChild();
// add (projection - subquery) to RootBlock and create new QueryBlock for UnionNode
TableSubQueryNode subQueryNode = context.plan.createNode(TableSubQueryNode.class);
subQueryNode.init(context.queryBlock.getName(), child);
subQueryNode.setTargets(PlannerUtil.schemaToTargets(subQueryNode.getOutSchema()));
ProjectionNode projectionNode = context.plan.createNode(ProjectionNode.class);
projectionNode.setChild(subQueryNode);
projectionNode.setInSchema(subQueryNode.getInSchema());
projectionNode.setTargets(subQueryNode.getTargets());
context.queryBlock.registerNode(projectionNode);
context.queryBlock.registerNode(subQueryNode);
// add child QueryBlock to the UnionNode's QueryBlock
UnionNode unionNode = (UnionNode)child;
context.queryBlock.unregisterNode(unionNode);
QueryBlock unionBlock = context.plan.newQueryBlock();
unionBlock.registerNode(unionNode);
unionBlock.setRoot(unionNode);
QueryBlock leftBlock = context.plan.getBlock(unionNode.getLeftChild());
QueryBlock rightBlock = context.plan.getBlock(unionNode.getRightChild());
context.plan.disconnectBlocks(leftBlock, context.queryBlock);
context.plan.disconnectBlocks(rightBlock, context.queryBlock);
context.plan.connectBlocks(unionBlock, context.queryBlock, BlockType.TableSubQuery);
context.plan.connectBlocks(leftBlock, unionBlock, BlockType.TableSubQuery);
context.plan.connectBlocks(rightBlock, unionBlock, BlockType.TableSubQuery);
// set InsertNode's child with ProjectionNode which is created.
insertNode.setChild(projectionNode);
return projectionNode;
}
/**
* Build a InsertNode with a location.
*
* ex) INSERT OVERWRITE INTO LOCATION 'hdfs://....' ..
*/
private InsertNode buildInsertIntoLocationPlan(PlanContext context, InsertNode insertNode, Insert expr) {
// INSERT (OVERWRITE)? INTO LOCATION path (USING file_type (param_clause)?)? query_expression
LogicalNode child = insertNode.getChild();
if (child.getType() == NodeType.UNION) {
child = makeProjectionForInsertUnion(context, insertNode);
}
Schema childSchema = child.getOutSchema();
insertNode.setInSchema(childSchema);
insertNode.setOutSchema(childSchema);
insertNode.setTableSchema(childSchema);
// Rewrite
URI targetUri = URI.create(expr.getLocation());
if (targetUri.getScheme() == null) {
targetUri = URI.create(context.getQueryContext().get(QueryVars.DEFAULT_SPACE_ROOT_URI) + "/" + targetUri);
}
insertNode.setUri(targetUri);
if (expr.hasStorageType()) {
insertNode.setStorageType(CatalogUtil.getBackwardCompitablityStoreType(expr.getStorageType()));
}
if (expr.hasParams()) {
KeyValueSet options = new KeyValueSet();
options.putAll(expr.getParams());
insertNode.setOptions(options);
}
return insertNode;
}
/*===============================================================================================
Data Definition Language (DDL) SECTION
===============================================================================================*/
@Override
public LogicalNode visitCreateDatabase(PlanContext context, Stack<Expr> stack, CreateDatabase expr)
throws TajoException {
CreateDatabaseNode createDatabaseNode = context.queryBlock.getNodeFromExpr(expr);
createDatabaseNode.init(expr.getDatabaseName(), expr.isIfNotExists());
return createDatabaseNode;
}
@Override
public LogicalNode visitDropDatabase(PlanContext context, Stack<Expr> stack, DropDatabase expr)
throws TajoException {
DropDatabaseNode dropDatabaseNode = context.queryBlock.getNodeFromExpr(expr);
dropDatabaseNode.init(expr.getDatabaseName(), expr.isIfExists());
return dropDatabaseNode;
}
public LogicalNode handleCreateTableLike(PlanContext context, CreateTable expr, CreateTableNode createTableNode)
throws TajoException {
String parentTableName = expr.getLikeParentTableName();
if (CatalogUtil.isFQTableName(parentTableName) == false) {
parentTableName = CatalogUtil.buildFQName(context.queryContext.get(SessionVars.CURRENT_DATABASE),
parentTableName);
}
TableDesc baseTable = catalog.getTableDesc(parentTableName);
if(baseTable == null) {
throw new UndefinedTableException(parentTableName);
}
PartitionMethodDesc partitionDesc = baseTable.getPartitionMethod();
createTableNode.setTableSchema(baseTable.getSchema());
createTableNode.setPartitionMethod(partitionDesc);
createTableNode.setStorageType(CatalogUtil.getBackwardCompitablityStoreType(baseTable.getMeta().getStoreType()));
createTableNode.setOptions(baseTable.getMeta().getOptions());
createTableNode.setExternal(baseTable.isExternal());
if(baseTable.isExternal()) {
createTableNode.setUri(baseTable.getUri());
}
return createTableNode;
}
@Override
public LogicalNode visitCreateTable(PlanContext context, Stack<Expr> stack, CreateTable expr)
throws TajoException {
CreateTableNode createTableNode = context.queryBlock.getNodeFromExpr(expr);
createTableNode.setIfNotExists(expr.isIfNotExists());
// Set a table name to be created.
if (CatalogUtil.isFQTableName(expr.getTableName())) {
createTableNode.setTableName(expr.getTableName());
} else {
createTableNode.setTableName(
CatalogUtil.buildFQName(context.queryContext.get(SessionVars.CURRENT_DATABASE), expr.getTableName()));
}
// This is CREATE TABLE <tablename> LIKE <parentTable>
if(expr.getLikeParentTableName() != null) {
return handleCreateTableLike(context, expr, createTableNode);
}
if (expr.hasTableSpaceName()) {
createTableNode.setTableSpaceName(expr.getTableSpaceName());
}
createTableNode.setUri(getCreatedTableURI(context, expr));
if (expr.hasStorageType()) { // If storage type (using clause) is specified
createTableNode.setStorageType(CatalogUtil.getBackwardCompitablityStoreType(expr.getStorageType()));
} else { // otherwise, default type
createTableNode.setStorageType(BuiltinStorages.TEXT);
}
// Set default storage properties to table
createTableNode.setOptions(CatalogUtil.newDefaultProperty(createTableNode.getStorageType()));
// Priority to apply table properties
// 1. Explicit table properties specified in WITH clause
// 2. Session variables
// Set session variables to properties
TablePropertyUtil.setTableProperty(context.queryContext, createTableNode);
// Set table property specified in WITH clause and it will override all others
if (expr.hasParams()) {
createTableNode.getOptions().putAll(expr.getParams());
}
if (expr.hasPartition()) {
if (expr.getPartitionMethod().getPartitionType().equals(PartitionType.COLUMN)) {
createTableNode.setPartitionMethod(getPartitionMethod(context, expr.getTableName(), expr.getPartitionMethod()));
} else {
throw ExceptionUtil.makeNotSupported(
String.format("PartitonType " + expr.getPartitionMethod().getPartitionType()));
}
}
if (expr.hasSubQuery()) { // CREATE TABLE .. AS SELECT
stack.add(expr);
LogicalNode subQuery = visit(context, stack, expr.getSubQuery());
stack.pop();
createTableNode.setChild(subQuery);
createTableNode.setInSchema(subQuery.getOutSchema());
// If the table schema is defined
// ex) CREATE TABLE tbl(col1 type, col2 type) AS SELECT ...
if (expr.hasTableElements()) {
createTableNode.setOutSchema(convertTableElementsSchema(expr.getTableElements()));
createTableNode.setTableSchema(convertTableElementsSchema(expr.getTableElements()));
} else {
// if no table definition, the select clause's output schema will be used.
// ex) CREATE TABLE tbl AS SELECT ...
if (expr.hasPartition()) {
PartitionMethodDesc partitionMethod = createTableNode.getPartitionMethod();
Schema queryOutputSchema = subQuery.getOutSchema();
Schema partitionExpressionSchema = partitionMethod.getExpressionSchema();
if (partitionMethod.getPartitionType() == CatalogProtos.PartitionType.COLUMN &&
queryOutputSchema.size() < partitionExpressionSchema.size()) {
throw makeSyntaxError("Partition columns cannot be more than table columns.");
}
Schema tableSchema = new Schema();
for (int i = 0; i < queryOutputSchema.size() - partitionExpressionSchema.size(); i++) {
tableSchema.addColumn(queryOutputSchema.getColumn(i));
}
createTableNode.setOutSchema(tableSchema);
createTableNode.setTableSchema(tableSchema);
} else {
// Convert the schema of subquery into the target table's one.
Schema schema = new Schema(subQuery.getOutSchema());
schema.setQualifier(createTableNode.getTableName());
createTableNode.setOutSchema(schema);
createTableNode.setTableSchema(schema);
}
}
return createTableNode;
} else { // if CREATE AN EMPTY TABLE
Schema tableSchema = convertColumnsToSchema(expr.getTableElements());
createTableNode.setTableSchema(tableSchema);
if (expr.isExternal()) {
createTableNode.setExternal(true);
}
return createTableNode;
}
}
/**
* Return a table uri to be created
*
* @param context PlanContext
* @param createTable An algebral expression for create table
* @return a Table uri to be created on a given table space
*/
private URI getCreatedTableURI(PlanContext context, CreateTable createTable) {
if (createTable.hasLocation()) {
URI tableUri = URI.create(createTable.getLocation());
if (tableUri.getScheme() == null) { // if a given table URI is a just path, the default tablespace will be added.
tableUri = URI.create(context.queryContext.get(QueryVars.DEFAULT_SPACE_ROOT_URI) + createTable.getLocation());
}
return tableUri;
} else {
String tableName = createTable.getTableName();
String databaseName = CatalogUtil.isFQTableName(tableName) ?
CatalogUtil.extractQualifier(tableName) : context.queryContext.get(SessionVars.CURRENT_DATABASE);
return storage.getTableURI(
createTable.getTableSpaceName(), databaseName, CatalogUtil.extractSimpleName(tableName));
}
}
private PartitionMethodDesc getPartitionMethod(PlanContext context,
String tableName,
CreateTable.PartitionMethodDescExpr expr) throws TajoException {
PartitionMethodDesc partitionMethodDesc;
if(expr.getPartitionType() == PartitionType.COLUMN) {
CreateTable.ColumnPartition partition = (CreateTable.ColumnPartition) expr;
String partitionExpression = Joiner.on(',').join(partition.getColumns());
partitionMethodDesc = new PartitionMethodDesc(context.queryContext.get(SessionVars.CURRENT_DATABASE), tableName,
CatalogProtos.PartitionType.COLUMN, partitionExpression, convertColumnsToSchema(partition.getColumns()));
} else {
throw new NotImplementedException("partition type '" + expr.getPartitionType() + "'");
}
return partitionMethodDesc;
}
/**
* It transforms table definition elements to schema.
*
* @param elements to be transformed
* @return schema transformed from table definition elements
*/
private Schema convertColumnsToSchema(ColumnDefinition[] elements) {
Schema schema = new Schema();
for (ColumnDefinition columnDefinition: elements) {
schema.addColumn(convertColumn(columnDefinition));
}
return schema;
}
/**
* It transforms table definition elements to schema.
*
* @param elements to be transformed
* @return schema transformed from table definition elements
*/
private static Schema convertTableElementsSchema(ColumnDefinition[] elements) {
Schema schema = new Schema();
for (ColumnDefinition columnDefinition: elements) {
schema.addColumn(convertColumn(columnDefinition));
}
return schema;
}
/**
* It transforms ColumnDefinition array to String array.
*
* @param columnReferenceExprs
* @return
*/
private static String[] convertColumnsToStrings(ColumnReferenceExpr[] columnReferenceExprs) {
int columnCount = columnReferenceExprs.length;
String[] columns = new String[columnCount];
for(int i = 0; i < columnCount; i++) {
ColumnReferenceExpr columnReferenceExpr = columnReferenceExprs[i];
columns[i] = columnReferenceExpr.getName();
}
return columns;
}
/**
* It transforms Expr array to String array.
*
* @param exprs
* @return
*/
private static String[] convertExprsToStrings(Expr[] exprs) {
int exprCount = exprs.length;
String[] values = new String[exprCount];
for(int i = 0; i < exprCount; i++) {
LiteralValue expr = (LiteralValue)exprs[i];
values[i] = expr.getValue();
}
return values;
}
private static Column convertColumn(ColumnDefinition columnDefinition) {
return new Column(columnDefinition.getColumnName(), convertDataType(columnDefinition));
}
public static TypeDesc convertDataType(DataTypeExpr dataType) {
TajoDataTypes.Type type = TajoDataTypes.Type.valueOf(dataType.getTypeName());
TajoDataTypes.DataType.Builder builder = TajoDataTypes.DataType.newBuilder();
builder.setType(type);
if (dataType.hasLengthOrPrecision()) {
builder.setLength(dataType.getLengthOrPrecision());
} else {
if (type == TajoDataTypes.Type.CHAR) {
builder.setLength(1);
}
}
TypeDesc typeDesc;
if (type == TajoDataTypes.Type.RECORD) {
Schema nestedRecordSchema = convertTableElementsSchema(dataType.getNestedRecordTypes());
typeDesc = new TypeDesc(nestedRecordSchema);
} else {
typeDesc = new TypeDesc(builder.build());
}
return typeDesc;
}
@Override
public LogicalNode visitDropTable(PlanContext context, Stack<Expr> stack, DropTable dropTable) {
DropTableNode dropTableNode = context.queryBlock.getNodeFromExpr(dropTable);
String qualified;
if (CatalogUtil.isFQTableName(dropTable.getTableName())) {
qualified = dropTable.getTableName();
} else {
qualified = CatalogUtil.buildFQName(
context.queryContext.get(SessionVars.CURRENT_DATABASE), dropTable.getTableName());
}
dropTableNode.init(qualified, dropTable.isIfExists(), dropTable.isPurge());
return dropTableNode;
}
public LogicalNode visitAlterTablespace(PlanContext context, Stack<Expr> stack, AlterTablespace alterTablespace) {
AlterTablespaceNode alter = context.queryBlock.getNodeFromExpr(alterTablespace);
alter.setTablespaceName(alterTablespace.getTablespaceName());
alter.setLocation(alterTablespace.getLocation());
return alter;
}
@Override
public LogicalNode visitAlterTable(PlanContext context, Stack<Expr> stack, AlterTable alterTable) {
AlterTableNode alterTableNode = context.queryBlock.getNodeFromExpr(alterTable);
alterTableNode.setTableName(alterTable.getTableName());
alterTableNode.setNewTableName(alterTable.getNewTableName());
alterTableNode.setColumnName(alterTable.getColumnName());
alterTableNode.setNewColumnName(alterTable.getNewColumnName());
alterTableNode.setProperties(new KeyValueSet(alterTable.getParams()));
if (null != alterTable.getAddNewColumn()) {
alterTableNode.setAddNewColumn(convertColumn(alterTable.getAddNewColumn()));
}
if (alterTable.getColumns() != null) {
alterTableNode.setPartitionColumns(convertColumnsToStrings(alterTable.getColumns()));
}
if (alterTable.getValues() != null) {
alterTableNode.setPartitionValues(convertExprsToStrings(alterTable.getValues()));
}
if (alterTable.getLocation() != null) {
alterTableNode.setLocation(alterTable.getLocation());
}
alterTableNode.setPurge(alterTable.isPurge());
alterTableNode.setIfNotExists(alterTable.isIfNotExists());
alterTableNode.setIfExists(alterTable.isIfExists());
alterTableNode.setAlterTableOpType(alterTable.getAlterTableOpType());
return alterTableNode;
}
private static URI getIndexPath(PlanContext context, String databaseName, String indexName) {
return new Path(TajoConf.getWarehouseDir(context.queryContext.getConf()),
databaseName + "/" + indexName + "/").toUri();
}
@Override
public LogicalNode visitCreateIndex(PlanContext context, Stack<Expr> stack, CreateIndex createIndex)
throws TajoException {
stack.push(createIndex);
LogicalNode child = visit(context, stack, createIndex.getChild());
stack.pop();
QueryBlock block = context.queryBlock;
CreateIndexNode createIndexNode = block.getNodeFromExpr(createIndex);
if (CatalogUtil.isFQTableName(createIndex.getIndexName())) {
createIndexNode.setIndexName(createIndex.getIndexName());
} else {
createIndexNode.setIndexName(
CatalogUtil.buildFQName(context.queryContext.get(SessionVars.CURRENT_DATABASE), createIndex.getIndexName()));
}
createIndexNode.setUnique(createIndex.isUnique());
Sort.SortSpec[] sortSpecs = createIndex.getSortSpecs();
int sortKeyNum = sortSpecs.length;
String[] referNames = new String[sortKeyNum];
ExprNormalizedResult[] normalizedExprList = new ExprNormalizedResult[sortKeyNum];
for (int i = 0; i < sortKeyNum; i++) {
normalizedExprList[i] = normalizer.normalize(context, sortSpecs[i].getKey());
}
for (int i = 0; i < sortKeyNum; i++) {
// even if base expressions don't have their name,
// reference names should be identifiable for the later sort spec creation.
referNames[i] = block.namedExprsMgr.addExpr(normalizedExprList[i].baseExpr);
block.namedExprsMgr.addNamedExprArray(normalizedExprList[i].aggExprs);
block.namedExprsMgr.addNamedExprArray(normalizedExprList[i].scalarExprs);
}
createIndexNode.setExternal(createIndex.isExternal());
Collection<RelationNode> relations = block.getRelations();
assert relations.size() == 1;
createIndexNode.setKeySortSpecs(relations.iterator().next().getLogicalSchema(),
annotateSortSpecs(block, referNames, sortSpecs));
createIndexNode.setIndexMethod(IndexMethod.valueOf(createIndex.getMethodSpec().getName().toUpperCase()));
if (createIndex.isExternal()) {
createIndexNode.setIndexPath(new Path(createIndex.getIndexPath()).toUri());
} else {
createIndexNode.setIndexPath(
getIndexPath(context, context.queryContext.get(SessionVars.CURRENT_DATABASE), createIndex.getIndexName()));
}
if (createIndex.getParams() != null) {
KeyValueSet keyValueSet = new KeyValueSet();
keyValueSet.putAll(createIndex.getParams());
createIndexNode.setOptions(keyValueSet);
}
createIndexNode.setChild(child);
return createIndexNode;
}
@Override
public LogicalNode visitDropIndex(PlanContext context, Stack<Expr> stack, DropIndex dropIndex) {
DropIndexNode dropIndexNode = context.queryBlock.getNodeFromExpr(dropIndex);
dropIndexNode.setIndexName(dropIndex.getIndexName());
return dropIndexNode;
}
@Override
public LogicalNode visitTruncateTable(PlanContext context, Stack<Expr> stack, TruncateTable truncateTable)
throws TajoException {
TruncateTableNode truncateTableNode = context.queryBlock.getNodeFromExpr(truncateTable);
truncateTableNode.setTableNames(truncateTable.getTableNames());
return truncateTableNode;
}
/*===============================================================================================
Util SECTION
===============================================================================================*/
public static boolean checkIfBeEvaluatedAtWindowAgg(EvalNode evalNode, WindowAggNode node) {
Set<Column> columnRefs = EvalTreeUtil.findUniqueColumns(evalNode);
if (columnRefs.size() > 0 && !node.getInSchema().containsAll(columnRefs)) {
return false;
}
if (EvalTreeUtil.findDistinctAggFunction(evalNode).size() > 0) {
return false;
}
return true;
}
public static boolean checkIfBeEvaluatedAtGroupBy(EvalNode evalNode, GroupbyNode node) {
Set<Column> columnRefs = EvalTreeUtil.findUniqueColumns(evalNode);
if (columnRefs.size() > 0 && !node.getInSchema().containsAll(columnRefs)) {
return false;
}
if (EvalTreeUtil.findEvalsByType(evalNode, EvalType.WINDOW_FUNCTION).size() > 0) {
return false;
}
return true;
}
public static boolean isEvaluatableJoinQual(QueryBlock block, EvalNode evalNode, JoinNode node,
boolean isOnPredicate, boolean isTopMostJoin) {
if (checkIfBeEvaluatedAtJoin(block, evalNode, node, isTopMostJoin)) {
if (isNonEquiThetaJoinQual(block, node, evalNode)) {
return false;
}
if (PlannerUtil.isOuterJoinType(node.getJoinType())) {
/*
* For outer joins, only predicates which are specified at the on clause can be evaluated during processing join.
* Other predicates from the where clause must be evaluated after the join.
* The below code will be modified after improving join operators to keep join filters by themselves (TAJO-1310).
*/
if (!isOnPredicate) {
return false;
}
} else {
/*
* Only join predicates should be evaluated at join if the join type is inner or cross. (TAJO-1445)
*/
if (!EvalTreeUtil.isJoinQual(block, node.getLeftChild().getOutSchema(), node.getRightChild().getOutSchema(),
evalNode, false)) {
return false;
}
}
return true;
}
return false;
}
public static boolean isNonEquiThetaJoinQual(final LogicalPlan.QueryBlock block,
final JoinNode joinNode,
final EvalNode evalNode) {
if (EvalTreeUtil.isJoinQual(block, joinNode.getLeftChild().getOutSchema(),
joinNode.getRightChild().getOutSchema(), evalNode, true) &&
evalNode.getType() != EvalType.EQUAL) {
return true;
} else {
return false;
}
}
public static boolean checkIfBeEvaluatedAtJoin(QueryBlock block, EvalNode evalNode, JoinNode node,
boolean isTopMostJoin) {
Set<Column> columnRefs = EvalTreeUtil.findUniqueColumns(evalNode);
if (EvalTreeUtil.findDistinctAggFunction(evalNode).size() > 0) {
return false;
}
if (EvalTreeUtil.findEvalsByType(evalNode, EvalType.WINDOW_FUNCTION).size() > 0) {
return false;
}
if (columnRefs.size() > 0 && !node.getInSchema().containsAll(columnRefs)) {
return false;
}
// When a 'case-when' is used with outer join, the case-when expression must be evaluated
// at the topmost join operator.
// TODO - It's also valid that case-when is evalauted at the topmost outer operator.
// But, how can we know there is no further outer join operator after this node?
if (containsOuterJoin(block)) {
if (!isTopMostJoin) {
Collection<EvalNode> found = EvalTreeUtil.findOuterJoinSensitiveEvals(evalNode);
if (found.size() > 0) {
return false;
}
}
}
return true;
}
public static boolean containsOuterJoin(QueryBlock block) {
return block.containsJoinType(JoinType.LEFT_OUTER) || block.containsJoinType(JoinType.RIGHT_OUTER) ||
block.containsJoinType(JoinType.FULL_OUTER);
}
/**
* It checks if evalNode can be evaluated at this @{link RelationNode}.
*/
public static boolean checkIfBeEvaluatedAtRelation(QueryBlock block, EvalNode evalNode, RelationNode node) {
Set<Column> columnRefs = EvalTreeUtil.findUniqueColumns(evalNode);
// aggregation functions cannot be evaluated in scan node
if (EvalTreeUtil.findDistinctAggFunction(evalNode).size() > 0) {
return false;
}
// aggregation functions cannot be evaluated in scan node
if (EvalTreeUtil.findEvalsByType(evalNode, EvalType.WINDOW_FUNCTION).size() > 0) {
return false;
}
if (columnRefs.size() > 0 && !node.getLogicalSchema().containsAll(columnRefs)) {
return false;
}
// Why? - When a {case when} is used with outer join, case when must be evaluated at topmost outer join.
if (containsOuterJoin(block)) {
Collection<EvalNode> found = EvalTreeUtil.findOuterJoinSensitiveEvals(evalNode);
if (found.size() > 0) {
return false;
}
}
return true;
}
public static boolean checkIfBeEvaluatedAtThis(EvalNode evalNode, LogicalNode node) {
Set<Column> columnRefs = EvalTreeUtil.findUniqueColumns(evalNode);
if (columnRefs.size() > 0 && !node.getInSchema().containsAll(columnRefs)) {
return false;
}
return true;
}
}
|
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.network;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.eq;
import static org.mockito.Matchers.isNull;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.mockito.Mockito.spy;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.cloud.dc.DataCenter;
import com.cloud.dc.DataCenterVO;
import com.cloud.dc.dao.DataCenterDao;
import com.cloud.network.dao.PhysicalNetworkDao;
import com.cloud.network.dao.PhysicalNetworkServiceProviderDao;
import com.cloud.network.dao.PhysicalNetworkServiceProviderVO;
import com.cloud.network.dao.PhysicalNetworkVO;
import junit.framework.Assert;
import org.junit.Before;
import org.junit.Test;
import com.cloud.dc.VlanVO;
import com.cloud.dc.dao.VlanDao;
import com.cloud.exception.UnsupportedServiceException;
import com.cloud.network.dao.IPAddressDao;
import com.cloud.network.dao.IPAddressVO;
import com.cloud.network.element.NetworkElement;
import com.cloud.user.Account;
import com.cloud.utils.db.Filter;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.net.Ip;
import com.cloud.network.Network.Provider;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.mockito.Spy;
public class NetworkModelTest {
@Mock
private DataCenterDao dataCenterDao;
@Mock
private PhysicalNetworkDao physicalNetworkDao;
@Mock
private PhysicalNetworkServiceProviderDao physicalNetworkServiceProviderDao;
@Mock
private NetworkService networkService;
@InjectMocks
@Spy
private NetworkModelImpl networkModel = new NetworkModelImpl();
@Mock
private DataCenterVO zone1;
@Mock
private DataCenterVO zone2;
@Mock
private PhysicalNetworkVO physicalNetworkZone1;
@Mock
private PhysicalNetworkVO physicalNetworkZone2;
@Mock
private PhysicalNetworkServiceProviderVO providerVO;
private static final long ZONE_1_ID = 1L;
private static final long ZONE_2_ID = 2L;
private static final long PHYSICAL_NETWORK_1_ID = 1L;
private static final long PHYSICAL_NETWORK_2_ID = 2L;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
when(dataCenterDao.listEnabledZones()).thenReturn(Arrays.asList(zone1, zone2));
when(physicalNetworkDao.listByZoneAndTrafficType(ZONE_1_ID, Networks.TrafficType.Guest)).
thenReturn(Collections.singletonList(physicalNetworkZone1));
when(physicalNetworkDao.listByZoneAndTrafficType(ZONE_2_ID, Networks.TrafficType.Guest)).
thenReturn(Collections.singletonList(physicalNetworkZone2));
when(physicalNetworkServiceProviderDao.findByServiceProvider(
PHYSICAL_NETWORK_1_ID, Network.Provider.ConfigDrive.getName())).thenReturn(null);
when(physicalNetworkServiceProviderDao.findByServiceProvider(
PHYSICAL_NETWORK_2_ID, Network.Provider.ConfigDrive.getName())).thenReturn(null);
when(zone1.getNetworkType()).thenReturn(DataCenter.NetworkType.Advanced);
when(zone1.getId()).thenReturn(ZONE_1_ID);
when(zone2.getNetworkType()).thenReturn(DataCenter.NetworkType.Advanced);
when(zone2.getId()).thenReturn(ZONE_2_ID);
when(physicalNetworkZone1.getId()).thenReturn(PHYSICAL_NETWORK_1_ID);
when(physicalNetworkZone2.getId()).thenReturn(PHYSICAL_NETWORK_2_ID);
}
@Test
public void testGetSourceNatIpAddressForGuestNetwork() {
NetworkModelImpl modelImpl = new NetworkModelImpl();
IPAddressDao ipAddressDao = mock(IPAddressDao.class);
modelImpl._ipAddressDao = ipAddressDao;
List<IPAddressVO> fakeList = new ArrayList<IPAddressVO>();
IPAddressVO fakeIp = new IPAddressVO(new Ip("75.75.75.75"), 1, 0xaabbccddeeffL, 10, false);
fakeList.add(fakeIp);
SearchBuilder<IPAddressVO> fakeSearch = mock(SearchBuilder.class);
modelImpl.IpAddressSearch = fakeSearch;
VlanDao fakeVlanDao = mock(VlanDao.class);
when(fakeVlanDao.findById(anyLong())).thenReturn(mock(VlanVO.class));
modelImpl._vlanDao = fakeVlanDao;
when(fakeSearch.create()).thenReturn(mock(SearchCriteria.class));
when(ipAddressDao.search(any(SearchCriteria.class), (Filter) isNull())).thenReturn(fakeList);
when(ipAddressDao.findById(anyLong())).thenReturn(fakeIp);
Account fakeAccount = mock(Account.class);
when(fakeAccount.getId()).thenReturn(1L);
Network fakeNetwork = mock(Network.class);
when(fakeNetwork.getId()).thenReturn(1L);
PublicIpAddress answer = modelImpl.getSourceNatIpAddressForGuestNetwork(fakeAccount, fakeNetwork);
Assert.assertNull(answer);
IPAddressVO fakeIp2 = new IPAddressVO(new Ip("76.75.75.75"), 1, 0xaabb10ddeeffL, 10, true);
fakeList.add(fakeIp2);
when(ipAddressDao.findById(anyLong())).thenReturn(fakeIp2);
answer = modelImpl.getSourceNatIpAddressForGuestNetwork(fakeAccount, fakeNetwork);
Assert.assertNotNull(answer);
Assert.assertEquals(answer.getAddress().addr(), "76.75.75.75");
}
@Test
public void testCapabilityForProvider() {
NetworkModelImpl modelImpl = spy(NetworkModelImpl.class);
Set<Provider> providers = new HashSet<>();
providers.add(Provider.NuageVsp);
NetworkElement nuageVspElement = mock(NetworkElement.class);
HashMap<Network.Service, Map<Network.Capability, String>> nuageVspCap = new HashMap<Network.Service, Map<Network.Capability, String>>();
HashMap<Network.Capability, String> nuageVspConnectivity = new HashMap<Network.Capability, String>();
nuageVspConnectivity.put(Network.Capability.NoVlan, "FindMe");
nuageVspConnectivity.put(Network.Capability.PublicAccess, "");
nuageVspCap.put(Network.Service.Connectivity, nuageVspConnectivity);
when(nuageVspElement.getName()).thenReturn("NuageVsp");
doReturn(nuageVspCap).when(nuageVspElement).getCapabilities();
doReturn(nuageVspElement).when(modelImpl).getElementImplementingProvider("NuageVsp");
try {
modelImpl.checkCapabilityForProvider(providers, Network.Service.UserData, null, null);
Assert.fail();
} catch (UnsupportedServiceException e) {
Assert.assertEquals(e.getMessage(), "Service " + Network.Service.UserData.getName() + " is not supported by the element=NuageVsp implementing Provider=" + Provider.NuageVsp.getName());
}
try {
modelImpl.checkCapabilityForProvider(providers, Network.Service.Connectivity, Network.Capability.ElasticIp, null);
Assert.fail();
} catch (UnsupportedServiceException e) {
Assert.assertEquals(e.getMessage(), "Service " + Network.Service.Connectivity.getName() + " doesn't have capability " + Network.Capability.ElasticIp.getName() + " for element=NuageVsp implementing Provider=" + Provider.NuageVsp.getName());
}
try {
modelImpl.checkCapabilityForProvider(providers, Network.Service.Connectivity, Network.Capability.PublicAccess, "NonExistingVal");
Assert.fail();
} catch (UnsupportedServiceException e){
Assert.assertEquals(e.getMessage(),"Service Connectivity doesn't have capability PublicAccess for element=NuageVsp implementing Provider=NuageVsp");
}
modelImpl.checkCapabilityForProvider(providers, Network.Service.Connectivity, Network.Capability.NoVlan, "FindMe");
NetworkElement nuageVspElement2 = mock(NetworkElement.class);
doReturn(null).when(nuageVspElement).getCapabilities();
try {
modelImpl.checkCapabilityForProvider(providers, Network.Service.Connectivity, Network.Capability.PublicAccess, "");
Assert.fail();
} catch (UnsupportedServiceException e) {
Assert.assertEquals(e.getMessage(), "Service Connectivity is not supported by the element=NuageVsp implementing Provider=NuageVsp");
}
}
@Test
public void testVerifyDisabledConfigDriveEntriesOnZonesBothEnabledZones() {
networkModel.verifyDisabledConfigDriveEntriesOnEnabledZones();
verify(networkModel, times(2)).addDisabledConfigDriveEntriesOnZone(any(DataCenterVO.class));
}
@Test
public void testVerifyDisabledConfigDriveEntriesOnZonesOneEnabledZone() {
when(dataCenterDao.listEnabledZones()).thenReturn(Collections.singletonList(zone1));
networkModel.verifyDisabledConfigDriveEntriesOnEnabledZones();
verify(networkModel).addDisabledConfigDriveEntriesOnZone(any(DataCenterVO.class));
}
@Test
public void testVerifyDisabledConfigDriveEntriesOnZonesNoEnabledZones() {
when(dataCenterDao.listEnabledZones()).thenReturn(null);
networkModel.verifyDisabledConfigDriveEntriesOnEnabledZones();
verify(networkModel, never()).addDisabledConfigDriveEntriesOnZone(any(DataCenterVO.class));
}
@Test
public void testAddDisabledConfigDriveEntriesOnZoneBasicZone() {
when(zone1.getNetworkType()).thenReturn(DataCenter.NetworkType.Basic);
networkModel.addDisabledConfigDriveEntriesOnZone(zone1);
verify(physicalNetworkDao, never()).listByZoneAndTrafficType(ZONE_1_ID, Networks.TrafficType.Guest);
verify(networkService, never()).
addProviderToPhysicalNetwork(anyLong(), eq(Provider.ConfigDrive.getName()), isNull(Long.class), isNull(List.class));
}
@Test
public void testAddDisabledConfigDriveEntriesOnZoneAdvancedZoneExistingConfigDrive() {
when(physicalNetworkServiceProviderDao.findByServiceProvider(
PHYSICAL_NETWORK_1_ID, Network.Provider.ConfigDrive.getName())).thenReturn(providerVO);
networkModel.addDisabledConfigDriveEntriesOnZone(zone1);
verify(networkService, never()).
addProviderToPhysicalNetwork(anyLong(), eq(Provider.ConfigDrive.getName()), isNull(Long.class), isNull(List.class));
}
@Test
public void testAddDisabledConfigDriveEntriesOnZoneAdvancedZoneNonExistingConfigDrive() {
networkModel.addDisabledConfigDriveEntriesOnZone(zone1);
verify(networkService).
addProviderToPhysicalNetwork(anyLong(), eq(Provider.ConfigDrive.getName()), isNull(Long.class), isNull(List.class));
}
}
|
package bftsmart.microbenchmark.tpcc.table;
import java.util.Objects;
import java.util.Set;
import java.util.StringJoiner;
import org.javatuples.Quartet;
import org.javatuples.Quintet;
import org.javatuples.Tuple;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonPOJOBuilder;
import com.google.common.collect.ImmutableSet;
import bftsmart.microbenchmark.tpcc.probject.ModelType;
import bftsmart.microbenchmark.tpcc.probject.PRObject;
/**
* <ol>
* <li>Primary Key: (O_W_ID, O_D_ID, O_ID)</li>
* <li>(O_W_ID, O_D_ID, O_C_ID) Foreign Key, references (C_W_ID, C_D_ID,
* C_ID)</li>
* </ol>
*/
@JsonDeserialize(builder = Order.Builder.class)
public class Order implements PRObject {
private static final long serialVersionUID = -7877754976067698904L;
private static final ModelType MODEL_TYPE = ModelType.ORDER;
private final Tuple key;
private final Set<Tuple> secondaryKeys;
/**
* o_id - 10,000,000 unique IDs
*/
@JsonProperty("o_id")
private final Integer orderId;
/**
* o_d_id - 20 unique IDs
*/
@JsonProperty("o_d_id")
private final Integer districtId;
/**
* o_w_id - 2*W unique IDs
*/
@JsonProperty("o_w_id")
private final Integer warehouseId;
/**
* o_c_id - 96,000 unique IDs
*/
@JsonProperty("o_c_id")
private final Integer customerId;
/**
* o_entry_d - date and time
*/
@JsonProperty("o_entry_d")
private final Long entryDate;
/**
* o_carrier_id - 10 unique IDs, or null
*/
@JsonProperty("o_carrier_id")
private final Integer carrierId;
/**
* o_ol_cnt - numeric(2) - Count of Order-Lines
*/
@JsonProperty("o_ol_cnt")
private final Integer orderLineCounter;
/**
* o_all_local - numeric(1)
*/
@JsonProperty("o_all_local")
private final Integer allLocal;
private Order(Builder builder) {
this.orderId = builder.orderId;
this.districtId = builder.districtId;
this.warehouseId = builder.warehouseId;
this.customerId = builder.customerId;
this.entryDate = builder.entryDate;
this.carrierId = builder.carrierId;
this.orderLineCounter = builder.orderLineCounter;
this.allLocal = builder.allLocal;
this.key = key(warehouseId, districtId, orderId);
this.secondaryKeys = ImmutableSet.of(orderKey(warehouseId, districtId, customerId, orderId),
customerKey(warehouseId, districtId, customerId));
}
@Override
public Tuple getKey() {
return key;
}
@Override
public Set<Tuple> getSecondaryKeys() {
return secondaryKeys;
}
public Integer getOrderId() {
return orderId;
}
public Integer getDistrictId() {
return districtId;
}
public Integer getWarehouseId() {
return warehouseId;
}
public Integer getCustomerId() {
return customerId;
}
public Long getEntryDate() {
return entryDate;
}
public Integer getCarrierId() {
return carrierId;
}
public Integer getOrderLineCounter() {
return orderLineCounter;
}
public Integer getAllLocal() {
return allLocal;
}
public static Tuple orderKey(Integer warehouseId, Integer districtId, Integer customerId, Integer orderId) {
return Quintet.with(MODEL_TYPE, warehouseId, districtId, customerId, orderId);
}
public static Tuple customerKey(Integer warehouseId, Integer districtId, Integer customerId) {
return Quartet.with(MODEL_TYPE, warehouseId, districtId, customerId);
}
public static Tuple key(Integer warehouseId, Integer districtId, Integer orderId) {
return Quartet.with(MODEL_TYPE, warehouseId, districtId, orderId);
}
public static Builder builder() {
return new Builder();
}
public static Builder from(Order order) {
return new Builder(order);
}
@Override
public int hashCode() {
return Objects.hash(key);
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Order other = (Order) obj;
return Objects.equals(key, other.key);
}
@Override
public String toString() {
return new StringJoiner(", ", Order.class.getSimpleName() + "[", "]").add("key=" + key)
.add("secondaryKeys=" + secondaryKeys)
.add("orderId=" + orderId)
.add("districtId=" + districtId)
.add("warehouseId=" + warehouseId)
.add("customerId=" + customerId)
.add("entryDate=" + entryDate)
.add("carrierId=" + carrierId)
.add("orderLineCounter=" + orderLineCounter)
.add("allLocal=" + allLocal)
.toString();
}
@JsonPOJOBuilder
public static class Builder {
@JsonProperty("o_id")
private Integer orderId;
@JsonProperty("o_d_id")
private Integer districtId;
@JsonProperty("o_w_id")
private Integer warehouseId;
@JsonProperty("o_c_id")
private Integer customerId;
@JsonProperty("o_entry_d")
private Long entryDate;
@JsonProperty("o_carrier_id")
private Integer carrierId;
@JsonProperty("o_ol_cnt")
private Integer orderLineCounter;
@JsonProperty("o_all_local")
private Integer allLocal;
private Builder() {
super();
}
private Builder(Order order) {
this.orderId = order.orderId;
this.districtId = order.districtId;
this.warehouseId = order.warehouseId;
this.customerId = order.customerId;
this.entryDate = order.entryDate;
this.carrierId = order.carrierId;
this.orderLineCounter = order.orderLineCounter;
this.allLocal = order.allLocal;
}
public Builder orderId(Integer orderId) {
this.orderId = orderId;
return this;
}
public Builder districtId(Integer districtId) {
this.districtId = districtId;
return this;
}
public Builder warehouseId(Integer warehouseId) {
this.warehouseId = warehouseId;
return this;
}
public Builder customerId(Integer customerId) {
this.customerId = customerId;
return this;
}
public Builder entryDate(Long entryDate) {
this.entryDate = entryDate;
return this;
}
public Builder carrierId(Integer carrierId) {
this.carrierId = carrierId;
return this;
}
public Builder orderLineCounter(Integer orderLineCounter) {
this.orderLineCounter = orderLineCounter;
return this;
}
public Builder allLocal(Integer allLocal) {
this.allLocal = allLocal;
return this;
}
public Order build() {
return new Order(this);
}
}
}
|
package com.atguigu.gulimall.coupon.controller;
import java.util.Arrays;
import java.util.Map;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import com.atguigu.gulimall.coupon.entity.SkuLadderEntity;
import com.atguigu.gulimall.coupon.service.SkuLadderService;
import com.atguigu.common.utils.PageUtils;
import com.atguigu.common.utils.R;
/**
* 商品阶梯价格
*
* @author
* @email
* @date 2022-01-11 20:28:38
*/
@RestController
@RequestMapping("coupon/skuladder")
public class SkuLadderController {
@Autowired
private SkuLadderService skuLadderService;
/**
* 列表
*/
@RequestMapping("/list")
//@RequiresPermissions("coupon:skuladder:list")
public R list(@RequestParam Map<String, Object> params){
PageUtils page = skuLadderService.queryPage(params);
return R.ok().put("page", page);
}
/**
* 信息
*/
@RequestMapping("/info/{id}")
//@RequiresPermissions("coupon:skuladder:info")
public R info(@PathVariable("id") Long id){
SkuLadderEntity skuLadder = skuLadderService.getById(id);
return R.ok().put("skuLadder", skuLadder);
}
/**
* 保存
*/
@RequestMapping("/save")
//@RequiresPermissions("coupon:skuladder:save")
public R save(@RequestBody SkuLadderEntity skuLadder){
skuLadderService.save(skuLadder);
return R.ok();
}
/**
* 修改
*/
@RequestMapping("/update")
//@RequiresPermissions("coupon:skuladder:update")
public R update(@RequestBody SkuLadderEntity skuLadder){
skuLadderService.updateById(skuLadder);
return R.ok();
}
/**
* 删除
*/
@RequestMapping("/delete")
//@RequiresPermissions("coupon:skuladder:delete")
public R delete(@RequestBody Long[] ids){
skuLadderService.removeByIds(Arrays.asList(ids));
return R.ok();
}
}
|
package org.osgi.service.indexer.impl;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.zip.GZIPInputStream;
import org.junit.Ignore;
import org.osgi.service.indexer.Capability;
import org.osgi.service.indexer.Requirement;
@Ignore
public class Utils {
/** the platform specific EOL */
static private String eol = String.format("%n");
public static final String readStream(InputStream stream) throws IOException {
BufferedReader reader = new BufferedReader(new InputStreamReader(stream, "UTF-8"));
try {
StringBuilder result = new StringBuilder();
String line;
while ((line = reader.readLine()) != null) {
if (result.length() > 0) {
result.append(eol);
}
result.append(line);
}
return result.toString();
} finally {
reader.close();
}
}
public static final String decompress(InputStream compressedStream) throws IOException {
GZIPInputStream decompressedStream = new GZIPInputStream(compressedStream);
return readStream(decompressedStream);
}
public static String decompress(String string) throws IOException {
return decompress(new ByteArrayInputStream(string.getBytes()));
}
public static String decompress(byte[] byteArray) throws IOException {
return decompress(new ByteArrayInputStream(byteArray));
}
public static List<Capability> findCaps(String namespace, Collection<Capability> caps) {
List<Capability> result = new ArrayList<Capability>();
for (Capability cap : caps) {
if (namespace.equals(cap.getNamespace()))
result.add(cap);
}
return result;
}
public static List<Requirement> findReqs(String namespace, Collection<Requirement> reqs) {
List<Requirement> result = new ArrayList<Requirement>();
for (Requirement req : reqs) {
if (namespace.equals(req.getNamespace()))
result.add(req);
}
return result;
}
}
|
/* All Contributors (C) 2020 */
package io.github.dreamylost.practice;
import java.util.Arrays;
import java.util.Stack;
/**
* @description 借用辅助栈存储min的大小,自定义了栈结构
* @note:装饰Stack类
*/
public class AddMinFunctionForStack3 {
private int size;
private int min = Integer.MAX_VALUE;
private Stack<Integer> minStack = new Stack<Integer>();
private Integer[] elements = new Integer[10];
public void push(int node) {
ensureCapacity(size + 1);
elements[size++] = node;
if (node <= min) {
minStack.push(node);
min = minStack.peek();
} else {
minStack.push(min);
}
}
private void ensureCapacity(int size) {
int len = elements.length;
if (size > len) {
int newLen = (len * 3) / 2 + 1; // 每次扩容方式
elements = Arrays.copyOf(elements, newLen);
}
}
public void pop() {
Integer top = top();
// 因为top返回了int,所以不会为null,改成返回Integer
if (top != null) {
elements[size - 1] = null;
}
size--;
minStack.pop();
min = minStack.peek();
}
@SuppressWarnings("null")
public Integer top() {
if (!empty()) {
if (size - 1 >= 0) return elements[size - 1];
}
return null;
}
public boolean empty() {
return size == 0;
}
public int min() {
return min;
}
}
|
package com.micro.base.delegate;
import android.app.Activity;
import android.os.Bundle;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.fragment.app.FragmentManager;
import com.micro.base.BaseActivity;
import com.micro.base.BaseFragment;
import com.micro.integration.ActivityLifecycle;
import com.micro.integration.cache.Cache;
import com.micro.integration.cache.LruCache;
/**
* ================================================
* 框架要求框架中的每个 {@link Activity} 都需要实现此类,以满足规范
*
* @see BaseActivity
* ================================================
*/
public interface IActivity {
/**
* 提供在 {@link Activity} 生命周期内的缓存容器, 可向此 {@link Activity} 存取一些必要的数据
* 此缓存容器和 {@link Activity} 的生命周期绑定, 如果 {@link Activity} 在屏幕旋转或者配置更改的情况下
* 重新创建, 那此缓存容器中的数据也会被清空, 如果你想避免此种情况请使用 <a href="https://github.com/JessYanCoding/LifecycleModel">LifecycleModel</a>
*
* @return like {@link LruCache}
*/
@NonNull
Cache<String, Object> provideCache();
// /**
// * 提供 AppComponent (提供所有的单例对象) 给实现类, 进行 Component 依赖
// *
// * @param appComponent
// */
// void setupActivityComponent(@NonNull ApplicationComponent appComponent);
/**
* 是否使用 EventBus
* Arms 核心库现在并不会依赖某个 EventBus, 要想使用 EventBus, 还请在项目中自行依赖对应的 EventBus
* 现在支持两种 EventBus, greenrobot 的 EventBus 和畅销书 《Android源码设计模式解析与实战》的作者 何红辉 所作的 AndroidEventBus
* 确保依赖后, 将此方法返回 true, Arms 会自动检测您依赖的 EventBus, 并自动注册
* 这种做法可以让使用者有自行选择三方库的权利, 并且还可以减轻 Arms 的体积
*
* @return 返回 {@code true}, Arms 会自动注册 EventBus
*/
boolean useEventBus();
/**
* 初始化 View, 如果 {@link #initView(Bundle)} 返回 0, 框架则不会调用 {@link Activity#setContentView(int)}
*
* @param savedInstanceState
* @return
*/
int initView(@Nullable Bundle savedInstanceState);
/**
* 初始化数据
*
* @param savedInstanceState
*/
void initData(@Nullable Bundle savedInstanceState);
/**
* 这个 Activity 是否会使用 Fragment,框架会根据这个属性判断是否注册 {@link FragmentManager.FragmentLifecycleCallbacks}
* 如果返回{@code false},那意味着这个 Activity 不需要绑定 Fragment,那你再在这个 Activity 中绑定继承于 {@link BaseFragment} 的 Fragment 将不起任何作用
* @see ActivityLifecycle #registerFragmentCallbacks (Fragment 的注册过程)
*
* @return
*/
boolean useFragment();
}
|
package com.leisurexi.data.structures.leetcode;
import lombok.extern.slf4j.Slf4j;
import java.util.Arrays;
/**
* leetcode面试题 40.最小的k个数
*
* @author: leisurexi
* @date: 2020-03-20 22:26
* @since JDK 1.8
*/
@Slf4j
public class MinKNumbers {
/**
* 题目描述:
* 输入整数数组 arr ,找出其中最小的 k 个数。例如,输入4、5、1、6、2、7、3、8这8个数字,则最小的4个数字是1、2、3、4。
* <p>
* <p>
* <p>
* 示例 1:
* <p>
* 输入:arr = [3,2,1], k = 2
* 输出:[1,2] 或者 [2,1]
* 示例 2:
* <p>
* 输入:arr = [0,1,2,1], k = 1
* 输出:[0]
* <p>
* <p>
* 限制:
* <p>
* 0 <= k <= arr.length <= 10000
* 0 <= arr[i] <= 10000
* 来源:力扣(LeetCode)
* 链接:https://leetcode-cn.com/problems/zui-xiao-de-kge-shu-lcof/submissions/
* 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
*/
/**
* 冒泡排序版
*/
public static int[] getLeastNumbers(int[] arr, int k) {
if (arr.length == 1 && k == 1) {
return arr;
}
for (int i = 0; i < arr.length - 1; i++) {
for (int j = 0; j < arr.length - i - 1; j++) {
if (arr[j] > arr[j + 1]) {
int temp = arr[j];
arr[j] = arr[j + 1];
arr[j + 1] = temp;
}
}
}
log.info(Arrays.toString(arr));
int[] result = new int[k];
for (int i = 0; i < k; i++) {
result[i] = arr[i];
}
return result;
}
/**
* 快速排序版
*/
public static int[] getLeastNumbers_2(int[] arr, int k) {
if (arr.length == 1 && k == 1) {
return arr;
}
quickSort(arr, 0, arr.length - 1);
log.info("排序好后的数组: {}", Arrays.toString(arr));
int[] result = new int[k];
for (int i = 0; i < k; i++) {
result[i] = arr[i];
}
return result;
}
private static void quickSort(int[] array, int startIndex, int endIndex) {
if (startIndex >= endIndex) {
return;
}
int pivotIndex = partition(array, startIndex, endIndex);
quickSort(array, startIndex, pivotIndex - 1);
quickSort(array, pivotIndex + 1, endIndex);
}
/**
* 分治(单边循环法)
*
* @param array 带交换数组
* @param startIndex 起始下标
* @param endIndex 结束下标
* @return
*/
private static int partition(int[] array, int startIndex, int endIndex) {
// 取第1个位置(也可以是随机位置)的元素作为基准元素
int pivot = array[startIndex];
int mark = startIndex;
for (int i = startIndex + 1; i <= endIndex; i++) {
if (array[i] < pivot) {
mark++;
int p = array[mark];
array[mark] = array[i];
array[i] = p;
}
}
array[startIndex] = array[mark];
array[mark] = pivot;
return mark;
}
public static void main(String[] args) {
int[] arr = {0, 0, 1, 2, 4, 2, 2, 3, 1, 4};
int k = 8;
log.info(Arrays.toString(getLeastNumbers_2(arr, k)));
}
}
|
package com.ashokvarma.gander.internal.ui.details;
import android.app.Application;
import android.arch.lifecycle.AndroidViewModel;
import android.arch.lifecycle.LiveData;
import com.ashokvarma.gander.internal.data.GanderDatabase;
import com.ashokvarma.gander.internal.data.HttpTransaction;
import com.ashokvarma.gander.internal.data.TransactionDao;
/**
* Class description
*
* @author ashok
* @version 1.0
* @since 03/06/18
*/
public class TransactionDetailViewModel extends AndroidViewModel {
private final TransactionDao mTransactionDao;
public TransactionDetailViewModel(Application application) {
super(application);
mTransactionDao = GanderDatabase.getInstance(application).httpTransactionDao();
}
public LiveData<HttpTransaction> getTransactionWithId(long id) {
return mTransactionDao.getTransactionsWithId(id);
}
}
|
package de.metas.vertical.pharma.model.interceptor;
import org.adempiere.ad.modelvalidator.annotations.Interceptor;
import org.adempiere.ad.modelvalidator.annotations.ModelChange;
import org.adempiere.exceptions.AdempiereException;
import org.compiere.model.ModelValidator;
import org.springframework.stereotype.Component;
import de.metas.i18n.AdMessageKey;
import de.metas.i18n.IMsgBL;
import de.metas.i18n.ITranslatableString;
import de.metas.util.Services;
import de.metas.vertical.pharma.PharmaModulo11Validator;
import de.metas.vertical.pharma.model.I_M_Product;
/*
* #%L
* metasfresh-pharma
* %%
* Copyright (C) 2019 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
@Interceptor(I_M_Product.class)
@Component
public class M_Product
{
private final static AdMessageKey ERR_Invalid_PZN = AdMessageKey.of("de.metas.vertical.pharma.model.interceptor.M_Product.Invalid_PZN");
@ModelChange(timings = { ModelValidator.TYPE_BEFORE_NEW, ModelValidator.TYPE_BEFORE_CHANGE }, ifColumnsChanged = {
I_M_Product.COLUMNNAME_Value,
I_M_Product.COLUMNNAME_IsPharmaProduct
})
public void ValidatePZN(final I_M_Product product)
{
if (!product.isPharmaProduct())
{
// nothing to do
return;
}
final String pzn = product.getValue();
final boolean isValidPZN = PharmaModulo11Validator.isValid(pzn);
if (!isValidPZN)
{
final IMsgBL msgBL = Services.get(IMsgBL.class);
final ITranslatableString invalidPZNMessage = msgBL.getTranslatableMsgText(ERR_Invalid_PZN, pzn);
throw new AdempiereException(invalidPZNMessage)
.markAsUserValidationError();
}
}
}
|
package com.bertrand.android10.sample.presentation.view.fragment;
import android.os.Bundle;
import android.support.design.widget.FloatingActionButton;
import android.text.Editable;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.EditText;
import android.widget.TextView;
import com.bertrand.android10.sample.domain.DomainPinballMatchModel;
import com.bertrand.android10.sample.presentation.R;
import com.bertrand.android10.sample.presentation.internal.di.components.CreatePinballComponent;
import com.bertrand.android10.sample.presentation.presenter.CreatePinballMatchPresenter;
import com.bertrand.android10.sample.presentation.view.CreatePinballView;
import javax.inject.Inject;
import butterknife.BindView;
import butterknife.ButterKnife;
import butterknife.Unbinder;
public class CreatePinBallMatchFragment extends BaseFragment implements CreatePinballView {
@BindView(R.id.create_pinball_match_et)
EditText createPinballMatchEditText;
@BindView(R.id.create_pinball_match_tv)
TextView pointsTotal;
@Inject
CreatePinballMatchPresenter presenter;
private Unbinder unbinder;
public CreatePinBallMatchFragment() {
setRetainInstance(true);
}
@Override public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
this.getComponent(CreatePinballComponent.class).inject(this);
}
@Override public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
final View fragmentView = inflater.inflate(R.layout.fragment_create_pinball_match, container, false);
unbinder = ButterKnife.bind(this, fragmentView);
return fragmentView;
}
@Override public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
this.presenter.setView(this);
}
private void createPinBallMatch() {
Editable text = createPinballMatchEditText.getText();
if(text != null) {
this.presenter.createPinballMatch(text.toString());
}
}
@Override public void onResume() {
super.onResume();
this.presenter.resume();
if(getActivity() != null ) {
FloatingActionButton fab = getActivity().findViewById(R.id.fab);
fab.setOnClickListener(v -> createPinBallMatch());
}
}
@Override public void onPause() {
super.onPause();
this.presenter.pause();
}
@Override public void onDestroyView() {
super.onDestroyView();
unbinder.unbind();
}
@Override public void onDestroy() {
super.onDestroy();
this.presenter.destroy();
}
@Override
public void onPinballMatchCreated(DomainPinballMatchModel domainPinballMatchModel) {
if(domainPinballMatchModel != null) {
pointsTotal.setText(Integer.toString(domainPinballMatchModel.getPinballMatchPointsTotal()));
}
}
@Override
public boolean isReady() {
return isAdded();
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.bean.validator.springboot;
import org.springframework.boot.context.properties.ConfigurationProperties;
/**
* The Validator component performs bean validation of the message body using
* the Java Bean Validation API.
*
* Generated by camel-package-maven-plugin - do not edit this file!
*/
@ConfigurationProperties(prefix = "camel.component.bean-validator")
public class BeanValidatorComponentConfiguration {
/**
* Whether the component should resolve property placeholders on itself when
* starting. Only properties which are of String type can use property
* placeholders.
*/
private Boolean resolvePropertyPlaceholders = true;
public Boolean getResolvePropertyPlaceholders() {
return resolvePropertyPlaceholders;
}
public void setResolvePropertyPlaceholders(
Boolean resolvePropertyPlaceholders) {
this.resolvePropertyPlaceholders = resolvePropertyPlaceholders;
}
}
|
package com.andersenlab.crm.rest.response;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import javax.validation.constraints.NotNull;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class CountryDto {
@NotNull
private Long id;
private String name;
}
|
package com.jinhx.blog.common.config;
import com.google.code.kaptcha.impl.DefaultKaptcha;
import com.google.code.kaptcha.util.Config;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.util.Properties;
/**
* 验证码配置
*
* @author jinhx
* @since 2019-10-07
*/
@Configuration
public class KaptchaConfig {
@Bean
public DefaultKaptcha producer() {
Properties properties=new Properties();
properties.put("kaptcha.border", "no");
properties.put("kaptcha.textproducer.font.color", "black");
properties.put("kaptcha.textproducer.char.space", "5");
Config config = new Config(properties);
DefaultKaptcha defaultKaptcha = new DefaultKaptcha();
defaultKaptcha.setConfig(config);
return defaultKaptcha;
}
}
|
/*******************************************************************************
* Gorlok AO, an implementation of Argentum Online using Java.
* Copyright (C) 2019 Pablo Fernando Lillia «gorlok»
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*******************************************************************************/
package com.argentumjk.server.protocol;
import com.argentumjk.server.net.*;
import io.netty.buffer.ByteBuf;
public class UserHitNPCResponse extends ServerPacket {
// UserHitNPC,l:damage
@Override
public ServerPacketID id() {
return ServerPacketID.UserHitNPC;
}
public int damage;
public UserHitNPCResponse(int damage){
this.damage = damage;
}
public static UserHitNPCResponse decode(ByteBuf in) {
try {
int damage = readInt(in);
return new UserHitNPCResponse(damage);
} catch (IndexOutOfBoundsException e) {
return null;
}
}
@Override
public void encode(ByteBuf out) {
writeByte(out,this.id().id());
writeInt(out,damage);
}
};
|
package io.kubernetes.client.openapi.models;
import io.kubernetes.client.fluent.Nested;
import java.lang.Deprecated;
import io.kubernetes.client.fluent.BaseFluent;
import java.lang.Object;
import java.lang.Boolean;
/**
* Generated
*/
public class V1beta1JobTemplateSpecFluentImpl<A extends io.kubernetes.client.openapi.models.V1beta1JobTemplateSpecFluent<A>> extends io.kubernetes.client.fluent.BaseFluent<A> implements io.kubernetes.client.openapi.models.V1beta1JobTemplateSpecFluent<A>{
public V1beta1JobTemplateSpecFluentImpl() {
}
public V1beta1JobTemplateSpecFluentImpl(io.kubernetes.client.openapi.models.V1beta1JobTemplateSpec instance) {
this.withMetadata(instance.getMetadata());
this.withSpec(instance.getSpec());
}
private io.kubernetes.client.openapi.models.V1ObjectMetaBuilder metadata;
private io.kubernetes.client.openapi.models.V1JobSpecBuilder spec;
/**
* This method has been deprecated, please use method buildMetadata instead.
* @return The buildable object.
*/
@java.lang.Deprecated
public io.kubernetes.client.openapi.models.V1ObjectMeta getMetadata() {
return this.metadata!=null?this.metadata.build():null;
}
public io.kubernetes.client.openapi.models.V1ObjectMeta buildMetadata() {
return this.metadata!=null?this.metadata.build():null;
}
public A withMetadata(io.kubernetes.client.openapi.models.V1ObjectMeta metadata) {
_visitables.get("metadata").remove(this.metadata);
if (metadata!=null){ this.metadata= new io.kubernetes.client.openapi.models.V1ObjectMetaBuilder(metadata); _visitables.get("metadata").add(this.metadata);} return (A) this;
}
public java.lang.Boolean hasMetadata() {
return this.metadata != null;
}
public io.kubernetes.client.openapi.models.V1beta1JobTemplateSpecFluent.MetadataNested<A> withNewMetadata() {
return new io.kubernetes.client.openapi.models.V1beta1JobTemplateSpecFluentImpl.MetadataNestedImpl();
}
public io.kubernetes.client.openapi.models.V1beta1JobTemplateSpecFluent.MetadataNested<A> withNewMetadataLike(io.kubernetes.client.openapi.models.V1ObjectMeta item) {
return new io.kubernetes.client.openapi.models.V1beta1JobTemplateSpecFluentImpl.MetadataNestedImpl(item);
}
public io.kubernetes.client.openapi.models.V1beta1JobTemplateSpecFluent.MetadataNested<A> editMetadata() {
return withNewMetadataLike(getMetadata());
}
public io.kubernetes.client.openapi.models.V1beta1JobTemplateSpecFluent.MetadataNested<A> editOrNewMetadata() {
return withNewMetadataLike(getMetadata() != null ? getMetadata(): new io.kubernetes.client.openapi.models.V1ObjectMetaBuilder().build());
}
public io.kubernetes.client.openapi.models.V1beta1JobTemplateSpecFluent.MetadataNested<A> editOrNewMetadataLike(io.kubernetes.client.openapi.models.V1ObjectMeta item) {
return withNewMetadataLike(getMetadata() != null ? getMetadata(): item);
}
/**
* This method has been deprecated, please use method buildSpec instead.
* @return The buildable object.
*/
@java.lang.Deprecated
public io.kubernetes.client.openapi.models.V1JobSpec getSpec() {
return this.spec!=null?this.spec.build():null;
}
public io.kubernetes.client.openapi.models.V1JobSpec buildSpec() {
return this.spec!=null?this.spec.build():null;
}
public A withSpec(io.kubernetes.client.openapi.models.V1JobSpec spec) {
_visitables.get("spec").remove(this.spec);
if (spec!=null){ this.spec= new io.kubernetes.client.openapi.models.V1JobSpecBuilder(spec); _visitables.get("spec").add(this.spec);} return (A) this;
}
public java.lang.Boolean hasSpec() {
return this.spec != null;
}
public io.kubernetes.client.openapi.models.V1beta1JobTemplateSpecFluent.SpecNested<A> withNewSpec() {
return new io.kubernetes.client.openapi.models.V1beta1JobTemplateSpecFluentImpl.SpecNestedImpl();
}
public io.kubernetes.client.openapi.models.V1beta1JobTemplateSpecFluent.SpecNested<A> withNewSpecLike(io.kubernetes.client.openapi.models.V1JobSpec item) {
return new io.kubernetes.client.openapi.models.V1beta1JobTemplateSpecFluentImpl.SpecNestedImpl(item);
}
public io.kubernetes.client.openapi.models.V1beta1JobTemplateSpecFluent.SpecNested<A> editSpec() {
return withNewSpecLike(getSpec());
}
public io.kubernetes.client.openapi.models.V1beta1JobTemplateSpecFluent.SpecNested<A> editOrNewSpec() {
return withNewSpecLike(getSpec() != null ? getSpec(): new io.kubernetes.client.openapi.models.V1JobSpecBuilder().build());
}
public io.kubernetes.client.openapi.models.V1beta1JobTemplateSpecFluent.SpecNested<A> editOrNewSpecLike(io.kubernetes.client.openapi.models.V1JobSpec item) {
return withNewSpecLike(getSpec() != null ? getSpec(): item);
}
public boolean equals(java.lang.Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
V1beta1JobTemplateSpecFluentImpl that = (V1beta1JobTemplateSpecFluentImpl) o;
if (metadata != null ? !metadata.equals(that.metadata) :that.metadata != null) return false;
if (spec != null ? !spec.equals(that.spec) :that.spec != null) return false;
return true;
}
public int hashCode() {
return java.util.Objects.hash(metadata, spec, super.hashCode());
}
public class MetadataNestedImpl<N> extends io.kubernetes.client.openapi.models.V1ObjectMetaFluentImpl<io.kubernetes.client.openapi.models.V1beta1JobTemplateSpecFluent.MetadataNested<N>> implements io.kubernetes.client.openapi.models.V1beta1JobTemplateSpecFluent.MetadataNested<N>,io.kubernetes.client.fluent.Nested<N>{
MetadataNestedImpl(io.kubernetes.client.openapi.models.V1ObjectMeta item) {
this.builder = new io.kubernetes.client.openapi.models.V1ObjectMetaBuilder(this, item);
}
MetadataNestedImpl() {
this.builder = new io.kubernetes.client.openapi.models.V1ObjectMetaBuilder(this);
}
io.kubernetes.client.openapi.models.V1ObjectMetaBuilder builder;
public N and() {
return (N) V1beta1JobTemplateSpecFluentImpl.this.withMetadata(builder.build());
}
public N endMetadata() {
return and();
}
}
public class SpecNestedImpl<N> extends io.kubernetes.client.openapi.models.V1JobSpecFluentImpl<io.kubernetes.client.openapi.models.V1beta1JobTemplateSpecFluent.SpecNested<N>> implements io.kubernetes.client.openapi.models.V1beta1JobTemplateSpecFluent.SpecNested<N>,io.kubernetes.client.fluent.Nested<N>{
SpecNestedImpl(io.kubernetes.client.openapi.models.V1JobSpec item) {
this.builder = new io.kubernetes.client.openapi.models.V1JobSpecBuilder(this, item);
}
SpecNestedImpl() {
this.builder = new io.kubernetes.client.openapi.models.V1JobSpecBuilder(this);
}
io.kubernetes.client.openapi.models.V1JobSpecBuilder builder;
public N and() {
return (N) V1beta1JobTemplateSpecFluentImpl.this.withSpec(builder.build());
}
public N endSpec() {
return and();
}
}
}
|
package uk.gov.register.exceptions;
/**
* Used if there is a problem with the definition of a particular field.
* For example, if the field's definition does not match the rest of the
* environment.
*/
public class FieldDefinitionException extends RuntimeException {
public FieldDefinitionException(String message) {
super(message);
}
}
|
package io.digitalstate.stix.coo.extension.types;
import com.fasterxml.jackson.annotation.*;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import io.digitalstate.stix.coo.extension.CyberObservableExtension;
import io.digitalstate.stix.coo.objects.NetworkTrafficCoo;
import io.digitalstate.stix.validation.contraints.businessrule.BusinessRule;
import io.digitalstate.stix.validation.contraints.coo.allowedparents.AllowedParents;
import io.digitalstate.stix.validation.contraints.defaulttypevalue.DefaultTypeValue;
import io.digitalstate.stix.validation.groups.DefaultValuesProcessor;
import org.immutables.serial.Serial;
import org.immutables.value.Value;
import javax.validation.constraints.Pattern;
import java.util.Optional;
import static com.fasterxml.jackson.annotation.JsonInclude.Include.NON_EMPTY;
/**
* tcp-ext
* <p>
* The TCP extension specifies a default extension for capturing network traffic
* properties specific to TCP.
*
*/
@Value.Immutable @Serial.Version(1L)
@DefaultTypeValue(value = "tcp-ext", groups = {DefaultValuesProcessor.class})
@Value.Style(typeAbstract="*Ext", typeImmutable="*", validationMethod = Value.Style.ValidationMethod.NONE, additionalJsonAnnotations = {JsonTypeName.class}, passAnnotations = {AllowedParents.class}, depluralize = true)
@JsonSerialize(as = TcpExtension.class) @JsonDeserialize(builder = TcpExtension.Builder.class)
@JsonInclude(value = NON_EMPTY, content= NON_EMPTY)
@JsonPropertyOrder({ "src_flags_hex", "dst_flags_hex" })
@JsonTypeName("tcp-ext")
@AllowedParents({NetworkTrafficCoo.class})
@BusinessRule(ifExp = "true", thenExp = "getSrcFlagsHex().isPresent() == true || getDstFlagsHex().isPresent() == true", errorMessage = "TCP Extension MUST contain at least one property from this extension")
public interface TcpExtensionExt extends CyberObservableExtension {
/**
* Specifies the source TCP flags, as the union of all TCP flags observed
* between the start of the traffic (as defined by the start property) and
* the end of the traffic (as defined by the end property).
*
*/
@JsonProperty("src_flags_hex")
@JsonPropertyDescription("Specifies the source TCP flags, as the union of all TCP flags observed between the start of the traffic (as defined by the start property) and the end of the traffic (as defined by the end property). ")
Optional<@Pattern(regexp = "^([a-fA-F0-9]{2})+$")
String> getSrcFlagsHex();
/**
* Specifies the destination TCP flags, as the union of all TCP flags
* observed between the start of the traffic (as defined by the start
* property) and the end of the traffic (as defined by the end property).
*
*/
@JsonProperty("dst_flags_hex")
@JsonPropertyDescription("Specifies the destination TCP flags, as the union of all TCP flags observed between the start of the traffic (as defined by the start property) and the end of the traffic (as defined by the end property).")
Optional<@Pattern(regexp = "^([a-fA-F0-9]{2})+$")
String> getDstFlagsHex();
}
|
package com.fangdd.tp.helper;
import java.io.*;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
/**
* @author xuwenzhen
* @date 18/1/2
*/
public class GzipHelper {
public static byte[] compress(final String str) throws IOException {
if ((str == null) || (str.length() == 0)) {
return null;
}
ByteArrayOutputStream obj = new ByteArrayOutputStream();
GZIPOutputStream gzip = new GZIPOutputStream(obj);
gzip.write(str.getBytes("UTF-8"));
gzip.flush();
gzip.close();
return obj.toByteArray();
}
public static String decompress(final byte[] compressed) throws IOException {
final StringBuilder outStr = new StringBuilder();
if ((compressed == null) || (compressed.length == 0)) {
return "";
}
if (isCompressed(compressed)) {
final GZIPInputStream gis = new GZIPInputStream(new ByteArrayInputStream(compressed));
final BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(gis, "UTF-8"));
String line;
while ((line = bufferedReader.readLine()) != null) {
outStr.append(line);
}
} else {
outStr.append(compressed);
}
return outStr.toString();
}
private static boolean isCompressed(final byte[] compressed) {
return (compressed[0] == (byte) (GZIPInputStream.GZIP_MAGIC)) && (compressed[1] == (byte) (GZIPInputStream.GZIP_MAGIC >> 8));
}
}
|
package com.samsung.esrenderengine;
import android.content.Context;
import android.support.test.InstrumentationRegistry;
import android.support.test.runner.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
/**
* Instrumentation test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() throws Exception {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getTargetContext();
assertEquals("com.samsung.esrenderengine", appContext.getPackageName());
}
}
|
/*
* Copyright 2011-2016 by the original author(s).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.mapping.model;
import org.springframework.data.mapping.Association;
import org.springframework.data.mapping.PersistentEntity;
import org.springframework.data.mapping.PersistentProperty;
import org.springframework.data.mapping.PersistentPropertyAccessor;
/**
* Interface capturing mutator methods for {@link PersistentEntity}s.
*
* @author Oliver Gierke
* @author Mark Paluch
*/
public interface MutablePersistentEntity<T, P extends PersistentProperty<P>> extends PersistentEntity<T, P> {
/**
* Adds a {@link PersistentProperty} to the entity.
*
* @param property
*/
void addPersistentProperty(P property);
/**
* Adds an {@link Association} to the entity.
*
* @param association
*/
void addAssociation(Association<P> association);
/**
* Callback method to trigger validation of the {@link PersistentEntity}. As {@link MutablePersistentEntity} is not
* immutable there might be some verification steps necessary after the object has reached is final state.
*
* @throws MappingException in case the entity is invalid
*/
void verify() throws MappingException;
/**
* Sets the {@link PersistentPropertyAccessorFactory} for the entity. A {@link PersistentPropertyAccessorFactory}
* creates {@link PersistentPropertyAccessor}s for instances of this entity.
*
* @param factory must not be {@literal null}.
*/
void setPersistentPropertyAccessorFactory(PersistentPropertyAccessorFactory factory);
}
|
package org.nesc.ec.bigdata.service;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import org.nesc.ec.bigdata.cache.HomeCache;
import org.nesc.ec.bigdata.common.model.BrokerInfo;
import org.nesc.ec.bigdata.common.model.MeterMetric;
import org.nesc.ec.bigdata.common.util.ElasticSearchQuery;
import org.nesc.ec.bigdata.common.util.JmxCollector;
import org.nesc.ec.bigdata.constant.BrokerConfig;
import org.nesc.ec.bigdata.constant.Constants;
import org.nesc.ec.bigdata.model.ClusterInfo;
import org.apache.kafka.clients.admin.DescribeClusterResult;
import org.apache.kafka.common.Node;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
@Service
public class HomeService {
private final Logger LOG = LoggerFactory.getLogger(HomeService.class);
@Autowired
ClusterService clusterService;
@Autowired
KafkaAdminService kafkaAdminService;
@Autowired
MonitorService monitorService;
@Autowired
AlertService alertService;
@Autowired
ElasticsearchService elasticsearchService;
@Autowired
ZKService zkService;
public List<JSONObject> clusterStatistical() {
List<ClusterInfo> clusters = clusterService.getTotalData();
List<JSONObject> result = new ArrayList<>();
for (ClusterInfo cluster:clusters){
JSONObject json = new JSONObject();
json.put(BrokerConfig.VERSION, cluster.getKafkaVersion());
List<String> list = new ArrayList<>();
json.put(Constants.JsonObject.NAME, cluster.getName());
json.put(Constants.JsonObject.ID, cluster.getId());
try {
DescribeClusterResult describeClusterResult = kafkaAdminService
.getKafkaAdmins(String.valueOf(cluster.getId())).descCluster();
Node controal = describeClusterResult.controller().get(5, TimeUnit.SECONDS);
json.put(BrokerConfig.CONTROLLER, controal.host());
describeClusterResult.nodes().get().forEach(node -> list.add(node.host()));
json.put(BrokerConfig.NODES, list);
int brokers = describeClusterResult.nodes().get().size();
json.put(BrokerConfig.BROKER, brokers);
if(brokers==0) {json.put(Constants.Status.STATUS, Constants.Status.BAD);}
if(cluster.isEnable()) {
if(cluster.getBrokerSize()>brokers) {
json.put(Constants.Status.STATUS, Constants.Status.WARN);
}else if(cluster.getBrokerSize()==brokers) {
json.put(Constants.Status.STATUS, Constants.Status.OK);
}
}else {
json.put(Constants.Status.STATUS, Constants.Status.OK);
}
} catch (Exception e) {
continue;
}
result.add(json);
}
return result;
}
public HomeCache.HomePageCache clusterInfo() {
HomeCache.HomePageCache pageCache = HomeCache.getConfigCache();
try {
List<ClusterInfo> clusterInfoList = clusterService.getTotalData();
pageCache.setAlertSize(pageCache.getAlertSize()==0?alertService.countData():pageCache.getAlertSize());
pageCache.setClusterSize(pageCache.getClusterSize()==0?clusterInfoList.size():pageCache.getClusterSize());
pageCache.setTopicSize(pageCache.getTopicSize()==0?this.getTopicList(clusterInfoList):pageCache.getTopicSize());
pageCache.setGroupSize(pageCache.getGroupSize()==0?calcGroup(clusterInfoList):pageCache.getGroupSize());
} catch (Exception e2) {
LOG.error("Get Cluster Date Faild!,{}",e2.getMessage());
}
return pageCache;
}
private int calcGroup(List<ClusterInfo> clusterInfos){
int group = 0;
int zk = 0;
try{
for(ClusterInfo cluser:clusterInfos) {
group += kafkaAdminService.getKafkaAdmins(cluser.getId().toString()).listConsumerGroups().size();
zk += zkService.getZK(cluser.getId().toString()).listConsumerGroups().size();
}
}catch (Exception e){
LOG.error("calc all cluster group faild!,",e);
}
return (zk+group);
}
public int getTopicList(List<ClusterInfo> clusters) {
int count = 0;
try {
for(ClusterInfo cluster:clusters) {
Set<String> topicMap = kafkaAdminService.getKafkaAdmins(cluster.getId().toString()).listTopics();
count += topicMap.size();
}
} catch (Exception e) {
LOG.error("Get Topics Date Faild!,",e);
}
return count;
}
public Set<MeterMetric> brokerMetric(ClusterInfo clusterInfo) throws Exception {
Set<MeterMetric> metricSet = new HashSet<>();
List<BrokerInfo> brokers = zkService.getZK(clusterInfo.getId().toString()).getBrokers();
Map<String,Set<MeterMetric>> metricBrokers = JmxCollector.getInstance().metricEveryBroker(brokers);
metricBrokers.forEach((host,metricCol)->{
metricCol.forEach(meterMetric -> {
meterMetric.setClusterID(clusterInfo.getId().toString());
meterMetric.setClusterName(clusterInfo.getName());
meterMetric.setLocation(clusterInfo.getLocation());
meterMetric.setBroker(host);
metricSet.add(meterMetric);
});
});
return metricSet;
}
public Map<String,JSONArray> trendClusterData(long start,long end,long clientId) {
Map<String, JSONArray> map = null;
try {
map = elasticsearchService.clusterTrendData(start, end, clientId);
} catch (Exception e) {
LOG.error("Get trend Cluster data Faild!,",e);
}
return map;
}
public Map<String, JSONArray> summatTrend(long start,long end,String interval){
String searchQuery = ElasticSearchQuery.summaryMetricTrendQuery(interval, start, end);
return elasticsearchService.summaryMetricTrend(searchQuery,start,end);
}
public JSONArray summaryData(long start,long end){
Map<String,Long> map = elasticsearchService.summaryMetric(start, end);
JSONArray array = new JSONArray();
map.forEach((k,v)->{
JSONObject obj = new JSONObject();
obj.put(BrokerConfig.METRICNAME, k);
obj.put(Constants.JsonObject.VALUE, v);
array.add(obj);
});
return array;
}
static class MetricVo {
private String broker;
private String port;
private String jmxPort;
private String byteIn;
private String byteOut;
private String messageIn;
private Double byteInOneMin;
private Double byteOutOneMin;
private String MsgInOneMin;
public String getBroker() {
return broker;
}
public void setBroker(String broker) {
this.broker = broker;
}
public String getPort() {
return port;
}
public void setPort(String port) {
this.port = port;
}
public String getJmxPort() {
return jmxPort;
}
void setJmxPort(String jmxPort) {
this.jmxPort = jmxPort;
}
public String getByteIn() {
return byteIn;
}
void setByteIn(String byteIn) {
this.byteIn = byteIn;
}
public String getByteOut() {
return byteOut;
}
void setByteOut(String byteOut) {
this.byteOut = byteOut;
}
public String getMessageIn() {
return messageIn;
}
void setMessageIn(String messageIn) {
this.messageIn = messageIn;
}
public Double getByteInOneMin() {
return byteInOneMin;
}
void setByteInOneMin(Double byteInOneMin) {
this.byteInOneMin = byteInOneMin;
}
public Double getByteOutOneMin() {
return byteOutOneMin;
}
void setByteOutOneMin(Double byteOutOneMin) {
this.byteOutOneMin = byteOutOneMin;
}
public String getMsgInOneMin() {
return MsgInOneMin;
}
void setMsgInOneMin(String msgInOneMin) {
MsgInOneMin = msgInOneMin;
}
}
}
|
// Copyright (c) YugaByte, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the License
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
// or implied. See the License for the specific language governing permissions and limitations
// under the License.
//
package org.yb.cql;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Row;
import org.junit.Test;
import static org.yb.AssertionWrappers.assertEquals;
import static org.yb.AssertionWrappers.assertFalse;
import static org.yb.AssertionWrappers.assertNull;
import static org.yb.AssertionWrappers.assertTrue;
import java.math.BigDecimal;
import java.util.Iterator;
import java.util.Random;
import java.util.TreeSet;
import org.yb.YBTestRunner;
import org.junit.runner.RunWith;
@RunWith(value=YBTestRunner.class)
public class TestDecimalDataType extends BaseCQLTest {
private String getRandomVarInt(boolean withSign, int length) {
String digits = "0123456789";
final Random random = new Random();
String s = "";
for (int j = 0; j < length; j++) {
s += digits.charAt(random.nextInt(digits.length() - 1));
}
if (withSign) {
int i = random.nextInt(4);
if (i < 2) {
// Half of the time make this a negative number.
s = "-" + s;
} else if (i == 3) {
// 25% of the time append a '+' sign to test parsing.
s = "+" + s;
}
}
return s;
}
private String getRandomVarInt(boolean withSign) {
final Random random = new Random();
int length = random.nextInt(100) + 20;
return getRandomVarInt(withSign, length);
}
private String getRandomDecimal() {
String decimal = getRandomVarInt(true) + "." + getRandomVarInt(false);
final Random random = new Random();
int r = random.nextInt(10);
if (r < 3) {
return decimal + "E" + getRandomVarInt(true, 3);
} else if (r < 5) {
return decimal + "e" + getRandomVarInt(true, 3);
}
return decimal;
}
@Test
public void testDecimalDataTypeInHash() throws Exception {
BigDecimal hash = new BigDecimal("-0.2");
TreeSet<BigDecimal> decimals = new TreeSet<BigDecimal>();
decimals.add(new BigDecimal("-100.02"));
decimals.add(new BigDecimal("-43.030016"));
decimals.add(new BigDecimal("-6.00001"));
decimals.add(new BigDecimal("-6.000001"));
decimals.add(new BigDecimal("-6"));
decimals.add(new BigDecimal("-5.99999956"));
decimals.add(new BigDecimal("-5.8999999"));
decimals.add(new BigDecimal("-1.2"));
decimals.add(new BigDecimal("-1.15"));
decimals.add(new BigDecimal("-.05"));
decimals.add(new BigDecimal("0"));
decimals.add(new BigDecimal("0.05"));
decimals.add(new BigDecimal("1.05"));
decimals.add(new BigDecimal("1.15"));
decimals.add(new BigDecimal("1.2"));
testDecimalDataTypeInHash(hash, decimals);
}
@Test
public void testDecimalDataTypeInHashRandom() throws Exception {
final Random random = new Random();
BigDecimal hashDecimal = new BigDecimal(getRandomDecimal());
TreeSet<BigDecimal> decimals = new TreeSet<BigDecimal>();
for (int i = 0; i < 100; i++) {
BigDecimal decimal;
do {
decimal = new BigDecimal(getRandomDecimal());
} while (!decimals.add(decimal));
}
testDecimalDataTypeInHash(hashDecimal, decimals);
}
private void testDecimalDataTypeInHash(BigDecimal hashDecimal, TreeSet<BigDecimal> decimals)
throws Exception {
LOG.info("TEST CQL DECIMAL TYPE IN HASH - Start");
// Create table
String tableName = "test_decimal";
String createStmt = String.format("CREATE TABLE %s " +
"(h1 decimal, h2 int, r1 decimal, r2 int, v1 decimal, v2 int, " +
"primary key((h1, h2), r1, r2));", tableName);
session.execute(createStmt);
for (BigDecimal decimal : decimals) {
// Insert one row. Deliberately insert with same hash key but different range column values.
final String insertStmt =
String.format("INSERT INTO %s (h1, h2, r1, r2, v1, v2) VALUES (%s, 1, %s, %d, %s, 2);",
tableName, hashDecimal.toString(), decimal.toString(), decimal.intValue(),
decimal.toString());
LOG.info("insertStmt: " + insertStmt);
session.execute(insertStmt);
}
// Select row by the hash key. Results should come sorted by range keys in ascending order.
final String selectStmt = String.format("SELECT h1, h2, r1, r2, v1, v2 FROM %s " +
"WHERE h1 = %s AND h2 = 1;", tableName, hashDecimal.toString());
LOG.info("selectStmt: " + selectStmt);
ResultSet rs = session.execute(selectStmt);
assertEquals(decimals.size(), rs.getAvailableWithoutFetching());
for (Iterator<BigDecimal> iter = decimals.iterator(); iter.hasNext();) {
Row row = rs.one();
BigDecimal decimal = iter.next();
assertEquals(0, row.getDecimal("h1").compareTo(hashDecimal));
assertEquals(1, row.getInt("h2"));
assertEquals(0, row.getDecimal("r1").compareTo(decimal));
assertEquals(decimal.intValue(), row.getInt("r2"));
assertEquals(0, row.getDecimal("v1").compareTo(decimal));
assertEquals(2, row.getInt("v2"));
}
// Test UPDATE with hash and range decimal keys.
for (Iterator<BigDecimal> iter = decimals.iterator(); iter.hasNext();) {
BigDecimal rangeDecimal = iter.next();
BigDecimal newDecimalValue = new BigDecimal(getRandomDecimal());
final String updateStmt =
String.format("UPDATE %s SET v1 = %s WHERE h1 = %s AND h2 = 1 and r1 = %s and r2 = %d",
tableName, newDecimalValue.toString(), hashDecimal.toString(),
rangeDecimal.toString(), rangeDecimal.intValue());
rs = session.execute(updateStmt);
final String selectStmt3 = String.format("SELECT h1, h2, r1, r2, v1, v2 FROM %s " +
"WHERE h1 = %s AND h2 = 1 AND r1 = %s and r2 = %d;", tableName, hashDecimal.toString(),
rangeDecimal.toString(), rangeDecimal.intValue());
rs = session.execute(selectStmt3);
assertEquals(1, rs.getAvailableWithoutFetching());
Row row = rs.one();
assertEquals(0, row.getDecimal("h1").compareTo(hashDecimal));
assertEquals(1, row.getInt("h2"));
assertEquals(0, row.getDecimal("r1").compareTo(rangeDecimal));
assertEquals(rangeDecimal.intValue(), row.getInt("r2"));
assertEquals(0, row.getDecimal("v1").compareTo(newDecimalValue));
assertEquals(2, row.getInt("v2"));
}
// Test DELETE with hash and range decimal keys.
for (Iterator<BigDecimal> iter = decimals.iterator(); iter.hasNext();) {
BigDecimal rangeDecimal = iter.next();
final String deleteStmt =
String.format("DELETE FROM %s WHERE h1 = %s AND h2 = 1 and r1 = %s and r2 = %d",
tableName, hashDecimal.toString(), rangeDecimal.toString(),
rangeDecimal.intValue());
rs = session.execute(deleteStmt);
final String selectStmt3 = String.format("SELECT h1, h2, r1, r2, v1, v2 FROM %s " +
"WHERE h1 = %s AND h2 = 1 AND r1 = %s and r2 = %d;", tableName, hashDecimal.toString(),
rangeDecimal.toString(), rangeDecimal.intValue());
rs = session.execute(selectStmt3);
assertEquals(0, rs.getAvailableWithoutFetching());
}
final String dropStmt = "DROP TABLE test_decimal;";
session.execute(dropStmt);
LOG.info("TEST CQL DECIMAL TYPE IN HASH - End");
}
@Test
public void testCanonicalDecimalInHash() throws Exception {
LOG.info("TEST CQL CANONICAL DECIMAL IN HASH - Start");
// Create table
String createStmt =
"CREATE TABLE test_decimal(h1 decimal, h2 int, r1 decimal, primary key((h1, h2), r1));";
session.execute(createStmt);
String insertStmt = "INSERT INTO test_decimal (h1, h2, r1) VALUES (10.1, 1, 2.0);";
LOG.info("insertStmt: " + insertStmt);
session.execute(insertStmt);
insertStmt = "INSERT INTO test_decimal (h1, h2, r1) VALUES (-10.1, 1, 2.0);";
LOG.info("insertStmt: " + insertStmt);
session.execute(insertStmt);
final String[] positiveDecimals = { "10.1",
".101E2",
"1.01E1",
".101E+2",
"1.01E+1",
".101e2",
"1.01e1",
".101e+2",
"1.01e+1",
"0.101E2",
"0.101E+2",
"0.101e2",
"0.101e+2" };
final String[] negativeDecimals = { "-10.1",
"-.101E2",
"-1.01E1",
"-.101E+2",
"-1.01E+1",
"-.101e2",
"-1.01e1",
"-.101e+2",
"-1.01e+1",
"-0.101E2",
"-0.101E+2",
"-0.101e2",
"-0.101e+2" };
// Test that we can query by using different representations of the same decimal value.
for (String dec : positiveDecimals) {
final String selectStmt2 = String.format("SELECT h1, h2, r1 FROM test_decimal " +
"WHERE h1 = %s AND h2 = 1;", dec);
LOG.info("selectStmt: " + selectStmt2);
ResultSet rs = session.execute(selectStmt2);
if (rs.getAvailableWithoutFetching() != 1) {
LOG.info("Failed select: " + selectStmt2);
}
assertEquals(1, rs.getAvailableWithoutFetching());
}
for (String dec : negativeDecimals) {
final String selectStmt2 = String.format("SELECT h1, h2, r1 FROM test_decimal " +
"WHERE h1 = %s AND h2 = 1;", dec);
LOG.info("selectStmt: " + selectStmt2);
ResultSet rs = session.execute(selectStmt2);
if (rs.getAvailableWithoutFetching() != 1) {
LOG.info("Failed select: " + selectStmt2);
}
assertEquals(1, rs.getAvailableWithoutFetching());
}
final String dropStmt = "DROP TABLE test_decimal;";
session.execute(dropStmt);
LOG.info("TEST CQL CANONICAL DECIMAL IN HASH - End");
}
private void decimalDataTypeInRange(TreeSet<BigDecimal> decimals,
boolean sortIsAscending) throws Exception {
String sortOrder = "ASC";
if (!sortIsAscending) {
sortOrder = "DESC";
}
// Create table
String createStmt = String.format("CREATE TABLE test_decimal " +
"(h1 varchar, r1 decimal, v1 int, primary key(h1, r1)) WITH CLUSTERING ORDER BY (r1 %s);",
sortOrder);
session.execute(createStmt);
for (BigDecimal decimal : decimals) {
// Insert one row. Deliberately insert with same hash key but different range column values.
final String insertStmt = String.format("INSERT INTO test_decimal (h1, r1, v1) " +
"VALUES ('bob', %s, 1);", decimal.toString());
LOG.info("insertStmt: " + insertStmt);
session.execute(insertStmt);
}
final String selectStmt = "SELECT h1, r1, v1 FROM test_decimal WHERE h1 = 'bob';";
ResultSet rs = session.execute(selectStmt);
assertEquals(decimals.size(), rs.getAvailableWithoutFetching());
// Verify data is sorted as expected.
Iterator<BigDecimal> iter;
if (sortIsAscending) {
iter = decimals.iterator();
} else {
iter = decimals.descendingIterator();
}
while (iter.hasNext()) {
Row row = rs.one();
BigDecimal nextDecimal = iter.next();
assertEquals(0, row.getDecimal("r1").compareTo(nextDecimal));
}
final String dropStmt = "DROP TABLE test_decimal;";
session.execute(dropStmt);
}
private TreeSet<BigDecimal> getRandomDecimalSet() {
final Random random = new Random();
TreeSet<BigDecimal> decimals = new TreeSet<BigDecimal>();
for (int i = 0; i < 100; i++) {
BigDecimal decimal;
do {
decimal = new BigDecimal(getRandomDecimal());
} while (!decimals.add(decimal));
}
return decimals;
}
@Test
public void testAscendingDecimalDataTypeInRangeRandom() throws Exception {
LOG.info("TEST CQL RANDOM ASCENDING DECIMAL TYPE IN RANGE - Start");
decimalDataTypeInRange(getRandomDecimalSet(), true);
LOG.info("TEST CQL RANDOM ASCENDING DECIMAL TYPE IN RANGE - End");
}
@Test
public void testDescendingDecimalDataTypeInRangeRandom() throws Exception {
LOG.info("TEST CQL RANDOM DESCENDING DECIMAL TYPE IN RANGE - Start");
decimalDataTypeInRange(getRandomDecimalSet(), false);
LOG.info("TEST CQL RANDOM DESCENDING DECIMAL TYPE IN RANGE - End");
}
private TreeSet<BigDecimal> getDecimalSet() {
TreeSet<BigDecimal> decimals = new TreeSet<BigDecimal>();
decimals.add(new BigDecimal("-100.1"));
decimals.add(new BigDecimal("-83.21"));
decimals.add(new BigDecimal("-83.2"));
decimals.add(new BigDecimal("-83.1999"));
decimals.add(new BigDecimal("-27.9"));
decimals.add(new BigDecimal("-1.2"));
decimals.add(new BigDecimal("-1.199999"));
decimals.add(new BigDecimal("-1.15"));
decimals.add(new BigDecimal("-1.0"));
decimals.add(new BigDecimal("-0.99"));
decimals.add(new BigDecimal("0"));
decimals.add(new BigDecimal("0.005"));
decimals.add(new BigDecimal("0.05"));
decimals.add(new BigDecimal("0.5"));
decimals.add(new BigDecimal("0.75"));
decimals.add(new BigDecimal("0.99"));
decimals.add(new BigDecimal("1.0"));
decimals.add(new BigDecimal("1.15"));
decimals.add(new BigDecimal("1.2"));
decimals.add(new BigDecimal("1.200000001"));
decimals.add(new BigDecimal("3.2"));
decimals.add(new BigDecimal("12.7"));
decimals.add(new BigDecimal("55.13435"));
decimals.add(new BigDecimal("189.327"));
return decimals;
}
@Test
public void testAscendingDecimalDataTypeInRange() throws Exception {
LOG.info("TEST CQL ASCENDING DECIMAL TYPE IN RANGE - Start");
decimalDataTypeInRange(getDecimalSet(), false);
LOG.info("TEST CQL ASCENDING DECIMAL TYPE IN RANGE - End");
}
@Test
public void testDescendingDecimalDataTypeInRange() throws Exception {
LOG.info("TEST CQL DESCENDING DECIMAL TYPE IN RANGE - Start");
decimalDataTypeInRange(getDecimalSet(), true);
LOG.info("TEST CQL DESCENDING DECIMAL TYPE IN RANGE - End");
}
@Test
public void testDecimalComparisonInRange() throws Exception {
LOG.info("TEST CQL DECIMAL TYPE IN RANGE - Start");
// Create table
String createStmt = "CREATE TABLE test_decimal" +
"(h1 varchar, r1 decimal, r2 decimal, v1 int, primary key(h1, r1, r2));";
session.execute(createStmt);
final Random random = new Random();
TreeSet<BigDecimal> decimals = new TreeSet<BigDecimal>();
for (int i = 0; i < 100; i++) {
BigDecimal decimal;
do {
decimal = new BigDecimal(getRandomDecimal());
} while (!decimals.add(decimal));
// Insert one row. Deliberately insert with same hash key but different range column values.
final String insertStmt =
String.format("INSERT INTO test_decimal (h1, r1, r2, v1) " +
"VALUES ('bob', %s, 1.1, 1);", decimal.toString());
LOG.info("insertStmt: " + insertStmt);
session.execute(insertStmt);
}
int i = 1;
for (Iterator<BigDecimal> iter = decimals.iterator(); iter.hasNext(); i++) {
BigDecimal decimal = iter.next();
// Select rows that are greater than a specific decimal.
final String selectStmt =
String.format("SELECT h1, r1, v1 FROM test_decimal " +
"WHERE h1 = 'bob' AND r1 > %s;", decimal.toString());
LOG.info("selectStmt: " + selectStmt);
ResultSet rs = session.execute(selectStmt);
LOG.info("got " + rs.getAvailableWithoutFetching() + " results");
assertEquals(decimals.size() - i, rs.getAvailableWithoutFetching());
}
i = 1;
for (Iterator<BigDecimal> iter = decimals.descendingIterator(); iter.hasNext(); i++) {
BigDecimal decimal = iter.next();
// Select rows that are greater than a specific decimal.
final String selectStmt =
String.format("SELECT h1, r1, v1 FROM test_decimal " +
"WHERE h1 = 'bob' AND r1 < %s;", decimal.toString());
LOG.info("selectStmt: " + selectStmt);
ResultSet rs = session.execute(selectStmt);
LOG.info("got " + rs.getAvailableWithoutFetching() + " results");
assertEquals(decimals.size() - i, rs.getAvailableWithoutFetching());
}
final String dropStmt = "DROP TABLE test_decimal;";
session.execute(dropStmt);
LOG.info("TEST CQL DECIMAL TYPE IN RANGE - End");
}
@Test
public void testDecimalMultipleComparisonInRange() throws Exception {
BigDecimal decimal1 = new BigDecimal("1.2");
BigDecimal decimal2 = new BigDecimal("3.4");
BigDecimal delta = new BigDecimal(".05");
LOG.info("TEST CQL DECIMAL TYPE IN RANGE - Start");
testDecimalMultipleComparisonInRange(decimal1, decimal2, delta);
LOG.info("TEST CQL DECIMAL TYPE IN RANGE - End");
}
@Test
public void testDecimalMultipleComparisonInRangeRandom() throws Exception {
final Random random = new Random();
BigDecimal decimal1 = new BigDecimal(getRandomDecimal());
BigDecimal decimal2 = new BigDecimal(getRandomDecimal());
BigDecimal delta = new BigDecimal(".05");
LOG.info("TEST CQL DECIMAL TYPE IN RANGE RANDOM - Start");
testDecimalMultipleComparisonInRange(decimal1, decimal2, delta);
LOG.info("TEST CQL DECIMAL TYPE IN RANGE RANDOM - End");
}
private void testDecimalMultipleComparisonInRange(BigDecimal decimal1, BigDecimal decimal2,
BigDecimal delta) throws Exception {
// Create table
String createStmt = "CREATE TABLE test_decimal" +
"(h1 varchar, r1 decimal, r2 decimal, v1 int, primary key(h1, r1, r2));";
session.execute(createStmt);
final String insertStmt =
String.format("INSERT INTO test_decimal (h1, r1, r2, v1) " +
"VALUES ('bob', %s, %s, 1);", decimal1.toString(), decimal2.toString());
LOG.info("insertStmt: " + insertStmt);
session.execute(insertStmt);
BigDecimal smallerDecimal1 = decimal1.subtract(delta);
BigDecimal smallerDecimal2 = decimal2.subtract(delta);
BigDecimal largerDecimal1 = decimal1.add(delta);
BigDecimal largerDecimal2 = decimal2.add(delta);
String selectStmt = String.format("SELECT h1, r1, r2, v1 FROM test_decimal " +
"WHERE h1 = 'bob' AND r1 > %s AND r2 < %s;", smallerDecimal1.toString(),
largerDecimal2.toString());
ResultSet rs = session.execute(selectStmt);
assertEquals(1, rs.getAvailableWithoutFetching());
selectStmt = String.format("SELECT h1, r1, r2, v1 FROM test_decimal " +
"WHERE h1 = 'bob' AND r1 < %s AND r2 > %s;", largerDecimal1.toString(),
smallerDecimal2.toString());
rs = session.execute(selectStmt);
assertEquals(1, rs.getAvailableWithoutFetching());
selectStmt = String.format("SELECT h1, r1, r2, v1 FROM test_decimal " +
"WHERE h1 = 'bob' AND r1 > %s AND r2 < %s;", largerDecimal1.toString(),
smallerDecimal2.toString());
rs = session.execute(selectStmt);
assertEquals(0, rs.getAvailableWithoutFetching());
selectStmt = String.format("SELECT h1, r1, r2, v1 FROM test_decimal " +
"WHERE h1 = 'bob' AND r1 < %s AND r2 > %s;", smallerDecimal1.toString(),
largerDecimal2.toString());
rs = session.execute(selectStmt);
assertEquals(0, rs.getAvailableWithoutFetching());
final String dropStmt = "DROP TABLE test_decimal;";
session.execute(dropStmt);
}
@Test
public void testConversionsRandom() throws Exception {
// Test the conversions from varint -> (tinyint, smallint, int, bigint, decimal, double, float)
// and decimal -> (double, float).
LOG.info("TEST CQL CONVERSIONS RANDOM - Start");
String createStmt = "CREATE TABLE test_decimal" +
"(h1 decimal, r1 decimal, v1 tinyint, v2 smallint, v3 int, v4 bigint, " +
"v5 float, v6 double, v7 float, v8 double, primary key(h1, r1));";
session.execute(createStmt);
TreeSet<BigDecimal> decimals = new TreeSet<BigDecimal>();
BigDecimal decimalHash;
for (int i = 0; i < 100; i++) {
// Create a unique decimal hash.
do {
decimalHash = new BigDecimal(getRandomDecimal());
} while (!decimals.add(decimalHash));
final Random random = new Random();
final int yqlTinyInt = random.nextInt(255) - 128;
final int yqlSmallInt = random.nextInt(65535) - 32768;
final int yqlInt = random.nextInt();
final long yqlBigInt = random.nextLong();
final float yqlFloat = random.nextFloat() * random.nextLong();
final double yqlDouble = random.nextDouble() * random.nextLong();
// Insert a very large integer in a decimal column.
String yqlVarInt = getRandomVarInt(true);
final String insertStmt =
String.format("INSERT INTO test_decimal (h1, r1, v1, v2, v3, v4, v5, v6, v7, v8) " +
"VALUES (%s, %s, %d, %d, %d, %d, %d, %d, %f, %f);", decimalHash.toString(), yqlVarInt,
yqlTinyInt, yqlSmallInt, yqlInt, yqlBigInt, yqlInt, yqlBigInt, yqlFloat, yqlDouble);
LOG.info("Insert statement: " + insertStmt);
session.execute(insertStmt);
final String selectStmt = String.format("SELECT h1, r1, v1, v2, v3, v4, v5, v6, v7, v8 " +
"FROM test_decimal WHERE h1 = %s;", decimalHash.toString());
ResultSet rs = session.execute(selectStmt);
assertEquals(1, rs.getAvailableWithoutFetching());
Row row = rs.one();
BigDecimal decimal = new BigDecimal(yqlVarInt);
assertEquals(0, row.getDecimal("h1").compareTo(decimalHash));
assertEquals(0, row.getDecimal("r1").compareTo(decimal));
assertEquals(yqlTinyInt, row.getByte("v1"));
assertEquals(yqlSmallInt, row.getShort("v2"));
assertEquals(yqlInt, row.getInt("v3"));
assertEquals(yqlBigInt, row.getLong("v4"));
assertEquals(yqlInt, row.getFloat("v5"), 0);
assertEquals(yqlBigInt, row.getDouble("v6"), 0);
assertEquals(yqlFloat, row.getFloat("v7"), 1e-5);
assertEquals(yqlDouble, row.getDouble("v8"), 1e-5);
}
final String dropStmt = "DROP TABLE test_decimal;";
session.execute(dropStmt);
LOG.info("TEST CQL CONVERSIONS RANDOM - End");
}
@Test
public void testConversionsLimits() throws Exception {
// Test the numeric data types limits. This process includes conversions from varint ->
// (tinyint, smallint, int, bigint, decimal, double, float) and decimal -> (double, float).
LOG.info("TEST CQL CONVERSIONS LIMITS - Start");
String createStmt = "CREATE TABLE test_decimal" +
"(h1 decimal, r1 decimal, v1 tinyint, v2 smallint, v3 int, v4 bigint, " +
"v5 float, v6 double, primary key(h1, r1));";
session.execute(createStmt);
TreeSet<BigDecimal> decimals = new TreeSet<BigDecimal>();
BigDecimal decimalHash;
// Create a unique decimal hash.
do {
decimalHash = new BigDecimal(getRandomDecimal());
} while (!decimals.add(decimalHash));
// Test the minimum values allowed for each integer type.
final String insertStmtFmt =
"INSERT INTO test_decimal (h1, r1, v1, v2, v3, v4, v5, v6) " +
"VALUES (%s, 1, %d, %d, %d, %d, %e, %e);";
String insertStmt = String.format(insertStmtFmt, decimalHash.toString(), -128, -32768,
Integer.MIN_VALUE, Long.MIN_VALUE, Float.MIN_VALUE,
Double.MIN_VALUE);
session.execute(insertStmt);
final String selectStmtFmt =
"SELECT h1, v1, v2, v3, v4, v5, v6 FROM test_decimal WHERE h1 = %s;";
String selectStmt = String.format(selectStmtFmt, decimalHash.toString());
LOG.info("selectStmt: " + selectStmt);
ResultSet rs = session.execute(selectStmt);
assertEquals(1, rs.getAvailableWithoutFetching());
Row row = rs.one();
assertEquals(0, row.getDecimal("h1").compareTo(decimalHash));
assertEquals(-128, row.getByte("v1"));
assertEquals(-32768, row.getShort("v2"));
assertEquals(Integer.MIN_VALUE, row.getInt("v3"));
assertEquals(Long.MIN_VALUE, row.getLong("v4"));
assertEquals(Float.MIN_VALUE, row.getFloat("v5"), Float.MIN_VALUE);
assertEquals(Double.MIN_VALUE, row.getDouble("v6"), Double.MIN_VALUE);
// Test the maximum values allowed for each integer type.
insertStmt = String.format(insertStmtFmt, decimalHash.toString(), 127, 32767, Integer.MAX_VALUE,
Long.MAX_VALUE, Float.MAX_VALUE, Double.MAX_VALUE);
session.execute(insertStmt);
selectStmt = String.format(selectStmtFmt, decimalHash.toString());
rs = session.execute(selectStmt);
assertEquals(1, rs.getAvailableWithoutFetching());
row = rs.one();
assertEquals(0, row.getDecimal("h1").compareTo(decimalHash));
assertEquals(127, row.getByte("v1"));
assertEquals(32767, row.getShort("v2"));
assertEquals(Integer.MAX_VALUE, row.getInt("v3"));
assertEquals(Long.MAX_VALUE, row.getLong("v4"));
assertEquals(Float.MAX_VALUE, row.getFloat("v5"), Float.MAX_VALUE / 1e5);
assertEquals(Double.MAX_VALUE, row.getDouble("v6"), Double.MAX_VALUE / 1e5);
final String dropStmt = "DROP TABLE test_decimal;";
session.execute(dropStmt);
LOG.info("TEST CQL CONVERSIONS LIMITS - End");
}
@Test
public void testDecimalDataTypeSum() throws Exception {
BigDecimal hash = new BigDecimal("-0.2");
TreeSet<BigDecimal> decimals = new TreeSet<BigDecimal>();
decimals.add(new BigDecimal("-100.02"));
decimals.add(new BigDecimal("-43.030016"));
decimals.add(new BigDecimal("-6.00001"));
decimals.add(new BigDecimal("-6.000001"));
decimals.add(new BigDecimal("-6"));
decimals.add(new BigDecimal("-5.99999956"));
decimals.add(new BigDecimal("-5.8999999"));
decimals.add(new BigDecimal("-1.2"));
decimals.add(new BigDecimal("-1.15"));
decimals.add(new BigDecimal("-.05"));
decimals.add(new BigDecimal("-1.01E+2"));
decimals.add(new BigDecimal("0"));
decimals.add(new BigDecimal("0.05"));
decimals.add(new BigDecimal("1.05"));
decimals.add(new BigDecimal("1.15"));
decimals.add(new BigDecimal("1.2"));
decimals.add(new BigDecimal("1.01E+5"));
testDecimalDataTypeSum(hash, decimals);
}
@Test
public void testDecimalDataTypeSumRandom() throws Exception {
final Random random = new Random();
BigDecimal hashDecimal = new BigDecimal(getRandomDecimal());
TreeSet<BigDecimal> decimals = new TreeSet<BigDecimal>();
for (int i = 0; i < 100; i++) {
BigDecimal decimal;
do {
decimal = new BigDecimal(getRandomDecimal());
} while (!decimals.add(decimal));
}
testDecimalDataTypeSum(hashDecimal, decimals);
}
private void testDecimalDataTypeSum(BigDecimal hashDecimal, TreeSet<BigDecimal> decimals)
throws Exception {
LOG.info("TEST CQL DECIMAL TYPE IN HASH - Start");
// Create table
String tableName = "test_decimal";
String createStmt = String.format("CREATE TABLE %s " +
"(h1 decimal, h2 int, r1 decimal, r2 int, v1 decimal, v2 int, " +
"primary key((h1, h2), r1, r2));", tableName);
session.execute(createStmt);
BigDecimal sumDecimal = new BigDecimal("0");
for (BigDecimal decimal : decimals) {
// Insert one row. Deliberately insert with same hash key but different range column values.
final String insertStmt =
String.format("INSERT INTO %s (h1, h2, r1, r2, v1, v2) VALUES (%s, 1, %s, %d, %s, 2);",
tableName, hashDecimal.toString(), decimal.toString(), decimal.intValue(),
decimal.toString());
LOG.info("insertStmt: " + insertStmt);
session.execute(insertStmt);
sumDecimal = sumDecimal.add(decimal);
}
// Select sum of rows by the hash key. Should be 1 result row.
final String selectSumStmt = String.format("SELECT sum(v1) FROM %s " +
"WHERE h1 = %s AND h2 = 1;", tableName, hashDecimal.toString());
LOG.info("selectSumStmt: " + selectSumStmt);
ResultSet rsSum = session.execute(selectSumStmt);
assertEquals(1, rsSum.getAvailableWithoutFetching());
Row rowSum = rsSum.one();
assertEquals(0, rowSum.getDecimal(0).compareTo(sumDecimal));
final String dropStmt = "DROP TABLE test_decimal;";
session.execute(dropStmt);
LOG.info("TEST CQL DECIMAL TYPE SUM - End");
}
}
|
package com.rashwan.reactive_popular_movies.feature.movieDetails.movieReviews;
import com.rashwan.reactive_popular_movies.common.MvpView;
import com.rashwan.reactive_popular_movies.data.model.Review;
import java.util.List;
/**
* Created by rashwan on 4/20/17.
*/
public interface MovieReviewsView extends MvpView{
void showReviews(List<Review> reviews);
void showOfflineLayout();
void hideOfflineLayout();
void showNoReviewsMsg();
}
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.jdbc;
import com.google.common.collect.ImmutableSet;
import io.airlift.log.Logger;
import io.trino.plugin.base.aggregation.AggregateFunctionRewriter;
import io.trino.plugin.jdbc.aggregation.ImplementCountAll;
import io.trino.plugin.jdbc.mapping.DefaultIdentifierMapping;
import io.trino.plugin.jdbc.mapping.IdentifierMapping;
import io.trino.spi.TrinoException;
import io.trino.spi.connector.AggregateFunction;
import io.trino.spi.connector.ColumnHandle;
import io.trino.spi.connector.ConnectorSession;
import io.trino.spi.connector.SchemaTableName;
import io.trino.spi.type.CharType;
import io.trino.spi.type.TimestampType;
import io.trino.spi.type.Type;
import io.trino.spi.type.VarcharType;
import net.jodah.failsafe.Failsafe;
import net.jodah.failsafe.RetryPolicy;
import java.sql.Connection;
import java.sql.Types;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static io.trino.plugin.jdbc.StandardColumnMappings.bigintColumnMapping;
import static io.trino.plugin.jdbc.StandardColumnMappings.bigintWriteFunction;
import static io.trino.plugin.jdbc.StandardColumnMappings.booleanColumnMapping;
import static io.trino.plugin.jdbc.StandardColumnMappings.charWriteFunction;
import static io.trino.plugin.jdbc.StandardColumnMappings.dateColumnMappingUsingSqlDate;
import static io.trino.plugin.jdbc.StandardColumnMappings.dateWriteFunctionUsingSqlDate;
import static io.trino.plugin.jdbc.StandardColumnMappings.defaultCharColumnMapping;
import static io.trino.plugin.jdbc.StandardColumnMappings.defaultVarcharColumnMapping;
import static io.trino.plugin.jdbc.StandardColumnMappings.doubleColumnMapping;
import static io.trino.plugin.jdbc.StandardColumnMappings.doubleWriteFunction;
import static io.trino.plugin.jdbc.StandardColumnMappings.integerColumnMapping;
import static io.trino.plugin.jdbc.StandardColumnMappings.integerWriteFunction;
import static io.trino.plugin.jdbc.StandardColumnMappings.realColumnMapping;
import static io.trino.plugin.jdbc.StandardColumnMappings.realWriteFunction;
import static io.trino.plugin.jdbc.StandardColumnMappings.smallintColumnMapping;
import static io.trino.plugin.jdbc.StandardColumnMappings.smallintWriteFunction;
import static io.trino.plugin.jdbc.StandardColumnMappings.timeColumnMapping;
import static io.trino.plugin.jdbc.StandardColumnMappings.timestampColumnMapping;
import static io.trino.plugin.jdbc.StandardColumnMappings.tinyintColumnMapping;
import static io.trino.plugin.jdbc.StandardColumnMappings.tinyintWriteFunction;
import static io.trino.plugin.jdbc.StandardColumnMappings.varcharWriteFunction;
import static io.trino.plugin.jdbc.TypeHandlingJdbcSessionProperties.getUnsupportedTypeHandling;
import static io.trino.plugin.jdbc.UnsupportedTypeHandling.CONVERT_TO_VARCHAR;
import static io.trino.spi.StandardErrorCode.NOT_SUPPORTED;
import static io.trino.spi.type.BigintType.BIGINT;
import static io.trino.spi.type.DateType.DATE;
import static io.trino.spi.type.DoubleType.DOUBLE;
import static io.trino.spi.type.IntegerType.INTEGER;
import static io.trino.spi.type.RealType.REAL;
import static io.trino.spi.type.SmallintType.SMALLINT;
import static io.trino.spi.type.TimeType.TIME_MILLIS;
import static io.trino.spi.type.TimestampType.TIMESTAMP_MILLIS;
import static io.trino.spi.type.TinyintType.TINYINT;
class TestingH2JdbcClient
extends BaseJdbcClient
{
private static final Logger log = Logger.get(TestingH2JdbcClient.class);
private static final JdbcTypeHandle BIGINT_TYPE_HANDLE = new JdbcTypeHandle(Types.BIGINT, Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty(), Optional.empty());
public TestingH2JdbcClient(BaseJdbcConfig config, ConnectionFactory connectionFactory)
{
this(config, connectionFactory, new DefaultIdentifierMapping());
}
public TestingH2JdbcClient(BaseJdbcConfig config, ConnectionFactory connectionFactory, IdentifierMapping identifierMapping)
{
super(config, "\"", connectionFactory, identifierMapping);
}
@Override
public Collection<String> listSchemas(Connection connection)
{
// listing schemas in H2 may fail with NullPointerException when a schema is concurrently dropped
return Failsafe.with(new RetryPolicy<Collection<String>>()
.withMaxAttempts(100)
.onRetry(event -> log.warn(event.getLastFailure(), "Failed to list schemas, retrying")))
.get(() -> super.listSchemas(connection));
}
@Override
public boolean supportsAggregationPushdown(ConnectorSession session, JdbcTableHandle table, List<AggregateFunction> aggregates, Map<String, ColumnHandle> assignments, List<List<ColumnHandle>> groupingSets)
{
// GROUP BY with GROUPING SETS is not supported
return groupingSets.size() == 1;
}
@Override
public Optional<JdbcExpression> implementAggregation(ConnectorSession session, AggregateFunction aggregate, Map<String, ColumnHandle> assignments)
{
return new AggregateFunctionRewriter<>(this::quoted, ImmutableSet.of(new ImplementCountAll(BIGINT_TYPE_HANDLE)))
.rewrite(session, aggregate, assignments);
}
@Override
public Optional<ColumnMapping> toColumnMapping(ConnectorSession session, Connection connection, JdbcTypeHandle typeHandle)
{
Optional<ColumnMapping> mapping = getForcedMappingToVarchar(typeHandle);
if (mapping.isPresent()) {
return mapping;
}
switch (typeHandle.getJdbcType()) {
case Types.BOOLEAN:
return Optional.of(booleanColumnMapping());
case Types.TINYINT:
return Optional.of(tinyintColumnMapping());
case Types.SMALLINT:
return Optional.of(smallintColumnMapping());
case Types.INTEGER:
return Optional.of(integerColumnMapping());
case Types.BIGINT:
return Optional.of(bigintColumnMapping());
case Types.REAL:
return Optional.of(realColumnMapping());
case Types.DOUBLE:
return Optional.of(doubleColumnMapping());
case Types.CHAR:
return Optional.of(defaultCharColumnMapping(typeHandle.getRequiredColumnSize(), true));
case Types.VARCHAR:
return Optional.of(defaultVarcharColumnMapping(typeHandle.getRequiredColumnSize(), true));
case Types.DATE:
return Optional.of(dateColumnMappingUsingSqlDate());
case Types.TIME:
return Optional.of(timeColumnMapping(TIME_MILLIS));
case Types.TIMESTAMP:
TimestampType timestampType = typeHandle.getDecimalDigits()
.map(TimestampType::createTimestampType)
.orElse(TIMESTAMP_MILLIS);
return Optional.of(timestampColumnMapping(timestampType));
}
if (getUnsupportedTypeHandling(session) == CONVERT_TO_VARCHAR) {
return mapToUnboundedVarchar(typeHandle);
}
return Optional.empty();
}
@Override
public WriteMapping toWriteMapping(ConnectorSession session, Type type)
{
if (type == TINYINT) {
return WriteMapping.longMapping("tinyint", tinyintWriteFunction());
}
if (type == SMALLINT) {
return WriteMapping.longMapping("smallint", smallintWriteFunction());
}
if (type == INTEGER) {
return WriteMapping.longMapping("integer", integerWriteFunction());
}
if (type == BIGINT) {
return WriteMapping.longMapping("bigint", bigintWriteFunction());
}
if (type == REAL) {
return WriteMapping.longMapping("float", realWriteFunction());
}
if (type == DOUBLE) {
return WriteMapping.doubleMapping("double precision", doubleWriteFunction());
}
if (type instanceof VarcharType) {
VarcharType varcharType = (VarcharType) type;
String dataType = varcharType.isUnbounded() ? "varchar" : "varchar(" + varcharType.getBoundedLength() + ")";
return WriteMapping.sliceMapping(dataType, varcharWriteFunction());
}
if (type instanceof CharType) {
CharType charType = (CharType) type;
String dataType = "char(" + charType.getLength() + ")";
return WriteMapping.sliceMapping(dataType, charWriteFunction());
}
if (type == DATE) {
return WriteMapping.longMapping("date", dateWriteFunctionUsingSqlDate());
}
throw new TrinoException(NOT_SUPPORTED, "Unsupported column type: " + type.getDisplayName());
}
@Override
protected void renameTable(ConnectorSession session, String catalogName, String schemaName, String tableName, SchemaTableName newTable)
{
if (!schemaName.equalsIgnoreCase(newTable.getSchemaName())) {
throw new TrinoException(NOT_SUPPORTED, "This connector does not support renaming tables across schemas");
}
super.renameTable(session, catalogName, schemaName, tableName, newTable);
}
}
|
/*
* Copyright (c) 2015-2020, www.dibo.ltd (service@dibo.ltd).
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
* <p>
* https://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.diboot.core.binding.binder;
import com.baomidou.mybatisplus.extension.service.IService;
import com.diboot.core.binding.annotation.BindFieldList;
import com.diboot.core.binding.helper.ResultAssembler;
import com.diboot.core.config.Cons;
import com.diboot.core.exception.InvalidUsageException;
import com.diboot.core.util.BeanUtils;
import com.diboot.core.util.V;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.*;
/**
* 关联字段绑定
* @author mazc@dibo.ltd
* @version v2.0
* @date 2019/1/19
*/
public class FieldListBinder<T> extends FieldBinder<T> {
private static final Logger log = LoggerFactory.getLogger(FieldListBinder.class);
/***
* 构造方法
* @param serviceInstance
* @param voList
* @param annotation
*/
public FieldListBinder(IService<T> serviceInstance, List voList, BindFieldList annotation) {
super(serviceInstance, voList);
if(V.notEmpty(annotation.splitBy())){
this.splitBy = annotation.splitBy();
}
if(V.notEmpty(annotation.orderBy())){
this.orderBy = annotation.orderBy();
}
}
@Override
public void bind() {
if(V.isEmpty(annoObjectList)){
return;
}
if(V.isEmpty(refObjJoinCols)){
throw new InvalidUsageException("调用错误:无法从condition中解析出字段关联.");
}
if(referencedGetterFieldNameList == null){
throw new InvalidUsageException("调用错误:字段绑定必须指定字段field.");
}
Map<String, List> valueEntityListMap = new HashMap<>();
// 直接关联
if(middleTable == null){
super.simplifySelectColumns();
super.buildQueryWrapperJoinOn();
//处理orderBy,附加排序
this.appendOrderBy();
// 查询entity列表: List<Role>
List<T> list = getEntityList(queryWrapper);
if(V.notEmpty(list)){
valueEntityListMap = this.buildMatchKey2FieldListMap(list);
}
// 遍历list并赋值
ResultAssembler.bindFieldListPropValue(annoObjectList, getAnnoObjJoinFlds(), valueEntityListMap,
annoObjectSetterPropNameList, referencedGetterFieldNameList, this.splitBy);
}
// 通过中间表关联
else{
if(refObjJoinCols.size() > 1){
throw new InvalidUsageException(NOT_SUPPORT_MSG);
}
// 提取注解条件中指定的对应的列表
Map<String, List> trunkObjCol2ValuesMap = super.buildTrunkObjCol2ValuesMap();
// 处理中间表, 将结果转换成map
Map<String, List> middleTableResultMap = middleTable.executeOneToManyQuery(trunkObjCol2ValuesMap);
if(V.isEmpty(middleTableResultMap)){
return;
}
super.simplifySelectColumns();
//处理orderBy,附加排序
this.appendOrderBy();
// 收集查询结果values集合
List entityIdList = extractIdValueFromMap(middleTableResultMap);
if(V.notEmpty(this.splitBy)){
entityIdList = ResultAssembler.unpackValueList(entityIdList, this.splitBy);
}
// 构建查询条件
queryWrapper.in(refObjJoinCols.get(0), entityIdList);
// 查询entity列表: List<Role>
List<T> list = getEntityList(queryWrapper);
if(V.isEmpty(list)){
return;
}
String refObjJoinOnField = toRefObjField(refObjJoinCols.get(0));
// 转换entity列表为Map<ID, Entity>
Map<String, T> entityMap = BeanUtils.convertToStringKeyObjectMap(list, refObjJoinOnField);
for(Map.Entry<String, List> entry : middleTableResultMap.entrySet()){
// List<roleId>
List annoObjFKList = entry.getValue();
if(V.isEmpty(annoObjFKList)){
continue;
}
List valueList = new ArrayList();
for(Object obj : annoObjFKList){
String valStr = String.valueOf(obj);
T ent = entityMap.get(valStr);
if(ent != null){
valueList.add(ent);
}
else if(V.notEmpty(splitBy) && valStr.contains(splitBy)){
for(String key : valStr.split(splitBy)){
ent = entityMap.get(key);
if(ent != null){
valueList.add(ent);
}
}
}
}
valueEntityListMap.put(entry.getKey(), valueList);
}
// 遍历list并赋值
bindPropValue(annoObjectList, middleTable.getTrunkObjColMapping(), valueEntityListMap);
}
}
/***
* 从对象集合提取某个属性值到list中
* @param fromList
* @param trunkObjColMapping
* @param valueMatchMap
* @param <E>
*/
private <E> void bindPropValue(List<E> fromList, Map<String, String> trunkObjColMapping, Map<String, List> valueMatchMap){
if(V.isEmpty(fromList) || V.isEmpty(valueMatchMap)){
return;
}
StringBuilder sb = new StringBuilder();
try{
for(E object : fromList){
boolean appendComma = false;
sb.setLength(0);
for(Map.Entry<String, String> entry :trunkObjColMapping.entrySet()){
String getterField = toAnnoObjField(entry.getKey());
String fieldValue = BeanUtils.getStringProperty(object, getterField);
if(appendComma){
sb.append(Cons.SEPARATOR_COMMA);
}
sb.append(fieldValue);
if(appendComma == false){
appendComma = true;
}
}
// 查找匹配Key
List entityList = valueMatchMap.get(sb.toString());
if(entityList != null){
// 赋值
for(int i = 0; i< annoObjectSetterPropNameList.size(); i++){
List valObjList = BeanUtils.collectToList(entityList, referencedGetterFieldNameList.get(i));
BeanUtils.setProperty(object, annoObjectSetterPropNameList.get(i), valObjList);
}
}
}
}
catch (Exception e){
log.warn("设置属性值异常", e);
}
}
/**
* 构建匹配key-entity目标的map
* @param list
* @return
*/
private Map<String, List> buildMatchKey2FieldListMap(List<T> list){
Map<String, List> key2TargetListMap = new HashMap<>(list.size());
StringBuilder sb = new StringBuilder();
for(T entity : list){
sb.setLength(0);
for(int i=0; i<refObjJoinCols.size(); i++){
String refObjJoinOnCol = refObjJoinCols.get(i);
String fldValue = BeanUtils.getStringProperty(entity, toRefObjField(refObjJoinOnCol));
if(i > 0){
sb.append(Cons.SEPARATOR_COMMA);
}
sb.append(fldValue);
}
String matchKey = sb.toString();
// 获取list
List entityList = key2TargetListMap.get(matchKey);
if(entityList == null){
entityList = new ArrayList<>();
key2TargetListMap.put(matchKey, entityList);
}
entityList.add(entity);
}
sb.setLength(0);
return key2TargetListMap;
}
}
|
package com.remondis.remap;
import com.googlecode.openbeans.BeanInfo;
import com.googlecode.openbeans.IntrospectionException;
import com.googlecode.openbeans.Introspector;
import com.googlecode.openbeans.PropertyDescriptor;
import static java.util.Objects.isNull;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import java.util.stream.Collectors;
/**
* Util class to get a list of all properties of a class.
*
* @author schuettec
*/
class Properties {
/**
* A readable string representation for a {@link PropertyDescriptor}.
*
* @param pd The pd
* @return Returns a readable string.
*/
static String asStringWithType(PropertyDescriptor pd) {
return asStringWithType(pd, false);
}
/**
* A readable string representation for a {@link PropertyDescriptor}.
*
* @param pd The pd
* @param detailed If <code>false</code> simple names should be used. If <code>true</code> fully qualified names
* should be used.
* @return Returns a readable string.
*/
static String asStringWithType(PropertyDescriptor pd, boolean detailed) {
Class<?> clazz = Properties.getPropertyClass(pd);
return String.format("Property '%s' (%s) in %s", pd.getName(), pd.getPropertyType()
.getName(), (detailed ? clazz.getName() : clazz.getSimpleName()));
}
/**
* A readable string representation for a {@link PropertyDescriptor}.
*
* @param pd The pd
* @return Returns a readable string.
*/
static String asString(PropertyDescriptor pd) {
return asString(pd, false);
}
/**
* A readable string representation for a {@link PropertyDescriptor}.
*
* @param pd The pd
* @param detailed If <code>false</code> simple names should be used. If <code>true</code> fully qualified names
* should be used.
* @return Returns a readable string.
*/
static String asString(PropertyDescriptor pd, boolean detailed) {
Class<?> clazz = Properties.getPropertyClass(pd);
return String.format("Property '%s' in %s", pd.getName(), (detailed ? clazz.getName() : clazz.getSimpleName()));
}
/**
* Returns the class declaring the property.
*
* @param propertyDescriptor the {@link PropertyDescriptor}
* @return Returns the declaring class.
*/
static Class<?> getPropertyClass(PropertyDescriptor propertyDescriptor) {
return propertyDescriptor.getReadMethod()
.getDeclaringClass();
}
/**
* Creates a message showing all currently unmapped properties.
*
* @param unmapped The set of unmapped properties.
* @return Returns the message.
*/
static String createUnmappedMessage(Set<PropertyDescriptor> unmapped) {
StringBuilder msg = new StringBuilder("The following properties are unmapped:\n");
for (PropertyDescriptor pd : unmapped) {
String getter = pd.getReadMethod()
.getName();
Method writeMethod = pd.getWriteMethod();
String setter = isNull(writeMethod) ? "none" : writeMethod.getName();
msg.append("- ")
.append(asString(pd))
.append("\n\taccess methods: ")
.append(getter)
.append("() / ")
.append(setter)
.append("()\n");
}
return msg.toString();
}
/**
* Returns a {@link Set} of properties with read and write access.
*
* @param inspectType The type to inspect.
* @param targetType The type of mapping target.
* @return Returns the list of {@link PropertyDescriptor}s that grant read and write access.
* @throws MappingException Thrown on any introspection error.
*/
static Set<PropertyDescriptor> getProperties(Class<?> inspectType, Target targetType) {
try {
BeanInfo beanInfo = Introspector.getBeanInfo(inspectType);
PropertyDescriptor[] propertyDescriptors = beanInfo.getPropertyDescriptors();
return new HashSet<>(Arrays.asList(propertyDescriptors)
.stream()
.filter(pd -> !pd.getName()
.equals("class"))
.filter(Properties::hasGetter)
.filter(pd -> {
if (Target.SOURCE.equals(targetType)) {
return true;
} else {
return hasSetter(pd);
}
})
.collect(Collectors.toList()));
} catch (IntrospectionException e) {
throw new MappingException(String.format("Cannot introspect the type %s.", inspectType.getName()));
}
}
private static boolean hasGetter(PropertyDescriptor pd) {
return pd.getReadMethod() != null;
}
private static boolean hasSetter(PropertyDescriptor pd) {
return pd.getWriteMethod() != null;
}
}
|
/*
* Copyright (c) 2018 J.S. Boellaard
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.Bluefix.Prodosia.DataHandler;
import com.Bluefix.Prodosia.Prefix.CommandPrefix;
import com.Bluefix.Prodosia.SQLite.SqlDatabase;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
public class CommandPrefixStorage extends LocalStorageHandler<CommandPrefix>
{
//region Singleton and Constructor
private static CommandPrefixStorage me;
public static CommandPrefixStorage handler()
{
if (me == null)
me = new CommandPrefixStorage();
return me;
}
private CommandPrefixStorage()
{
super(true);
}
//endregion
//region Local Storage Handler implementation
/**
* Retrieve the prepared statements necessary for adding an item.
*
* @param commandPrefix
*/
@Override
CommandPrefix setItem(CommandPrefix commandPrefix) throws SQLException
{
return dbSetCPrefix(commandPrefix);
}
/**
* Remove an item from the storage.
*
* @param commandPrefix
*/
@Override
void removeItem(CommandPrefix commandPrefix) throws SQLException
{
dbRemoveCPrefix(commandPrefix);
}
/**
* Retrieve all items from the storage in no particular order.
*
* @return
*/
@Override
ArrayList<CommandPrefix> getAllItems() throws SQLException
{
return dbGetCPrefixes();
}
//endregion
//region Database management
private static CommandPrefix dbSetCPrefix(CommandPrefix cp) throws SQLException
{
// skip if the command prefix is null
if (cp == null)
return null;
// retrieve the old prefix item.
CommandPrefix oldPrefix = dbGetCPrefix(cp.getType());
// complete the old prefix if it existed.
dbRemoveCPrefix(oldPrefix);
String query =
"INSERT INTO CommandPrefix " +
"(type, regex) VALUES (?,?);";
PreparedStatement prep = SqlDatabase.getStatement(query);
prep.setInt(1, cp.getType().getValue());
prep.setString(2, cp.getRegex());
SqlDatabase.execute(prep);
assert(prep.isClosed());
return oldPrefix;
}
private static void dbRemoveCPrefix(CommandPrefix cp) throws SQLException
{
// skip if the command prefix is null
if (cp == null)
return;
String query =
"DELETE FROM CommandPrefix " +
"WHERE type = ?;";
PreparedStatement prep = SqlDatabase.getStatement(query);
prep.setInt(1, cp.getType().getValue());
SqlDatabase.execute(prep);
assert(prep.isClosed());
}
private static CommandPrefix dbGetCPrefix(CommandPrefix.Type type) throws SQLException
{
String query =
"SELECT type, regex " +
"FROM CommandPrefix " +
"WHERE type = ?;";
PreparedStatement prep = SqlDatabase.getStatement(query);
prep.setInt(1, type.getValue());
ArrayList<ResultSet> result = SqlDatabase.query(prep);
if (result.size() != 1)
throw new SQLException("SqlDatabase exception: Expected result size did not match (was " + result.size() + ")");
ResultSet rs = result.get(0);
// parse the query and return the result
ArrayList<CommandPrefix> parseResult = parsePrefixes(rs);
prep.close();
assert(prep.isClosed());
if (parseResult.isEmpty())
return null;
return parseResult.get(0);
}
private static ArrayList<CommandPrefix> dbGetCPrefixes() throws SQLException
{
String query =
"SELECT type, regex " +
"FROM CommandPrefix;";
PreparedStatement prep = SqlDatabase.getStatement(query);
ArrayList<ResultSet> result = SqlDatabase.query(prep);
if (result.size() != 1)
throw new SQLException("SqlDatabase exception: Expected result size did not match (was " + result.size() + ")");
ResultSet rs = result.get(0);
ArrayList<CommandPrefix> parseResult = parsePrefixes(rs);
prep.close();
assert(prep.isClosed());
return parseResult;
}
private static ArrayList<CommandPrefix> parsePrefixes(ResultSet rs) throws SQLException
{
ArrayList<CommandPrefix> output = new ArrayList<>();
while (rs.next())
{
CommandPrefix.Type type = CommandPrefix.Type.parseType(rs.getInt(1));
String regex = rs.getString(2);
output.add(new CommandPrefix(type, regex));
}
// close the resultset.
rs.close();
return output;
}
//endregion
//region Helper methods
/**
* Get the command-prefix for the specified type, or null if it did not exist.
* @param type The type for which to fetch the command-prefix.
* @return The commandprefix corresponding to the type if it existed, or null otherwise.
* @throws Exception
*/
public static CommandPrefix getPrefixForType(CommandPrefix.Type type) throws SQLException
{
ArrayList<CommandPrefix> items = handler().getAll();
for (CommandPrefix i : items)
{
if (i.getType() == type)
return i;
}
return null;
}
//endregion
}
|
package com.spark.platform.gateway.controller;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.HashMap;
import java.util.Map;
/**
* @author: wangdingfeng
* @ProjectName: spark-platform
* @Package: com.spark.platform.gateway.controller
* @ClassName: FallbackController
* @Description: 通用熔断器
* @Version: 1.0
*/
@RestController
@RequestMapping("common")
public class FallbackController {
Logger logger = LoggerFactory.getLogger(FallbackController.class);
@RequestMapping("fallback")
public Map fallback() {
logger.error("Hystrix请求超时");
Map map = new HashMap<>();
map.put("code", 504);
map.put("msg", "请求超时,请稍后再试");
return map;
}
}
|
package com.mars.citizen.model;
import io.swagger.annotations.ApiModelProperty;
import java.io.Serializable;
import java.util.Date;
public class UmsMenu implements Serializable {
private Long id;
@ApiModelProperty(value = "父级ID")
private Long parentId;
@ApiModelProperty(value = "创建时间")
private Date createTime;
@ApiModelProperty(value = "菜单名称")
private String title;
@ApiModelProperty(value = "菜单级数")
private Integer level;
@ApiModelProperty(value = "菜单排序")
private Integer sort;
@ApiModelProperty(value = "前端名称")
private String name;
@ApiModelProperty(value = "前端图标")
private String icon;
@ApiModelProperty(value = "前端隐藏")
private Integer hidden;
private static final long serialVersionUID = 1L;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public Long getParentId() {
return parentId;
}
public void setParentId(Long parentId) {
this.parentId = parentId;
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public Integer getLevel() {
return level;
}
public void setLevel(Integer level) {
this.level = level;
}
public Integer getSort() {
return sort;
}
public void setSort(Integer sort) {
this.sort = sort;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getIcon() {
return icon;
}
public void setIcon(String icon) {
this.icon = icon;
}
public Integer getHidden() {
return hidden;
}
public void setHidden(Integer hidden) {
this.hidden = hidden;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName());
sb.append(" [");
sb.append("Hash = ").append(hashCode());
sb.append(", id=").append(id);
sb.append(", parentId=").append(parentId);
sb.append(", createTime=").append(createTime);
sb.append(", title=").append(title);
sb.append(", level=").append(level);
sb.append(", sort=").append(sort);
sb.append(", name=").append(name);
sb.append(", icon=").append(icon);
sb.append(", hidden=").append(hidden);
sb.append(", serialVersionUID=").append(serialVersionUID);
sb.append("]");
return sb.toString();
}
}
|
/*
* Copyright (c) 2017-2018 THL A29 Limited, a Tencent company. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tencentcloudapi.bmlb.v20180625.models;
import com.tencentcloudapi.common.AbstractModel;
import com.google.gson.annotations.SerializedName;
import com.google.gson.annotations.Expose;
import java.util.HashMap;
public class UnbindL4BackendsResponse extends AbstractModel{
/**
* 任务ID。该接口为异步任务,可根据本参数调用DescribeLoadBalancerTaskResult接口来查询任务操作结果。
*/
@SerializedName("TaskId")
@Expose
private String TaskId;
/**
* 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
*/
@SerializedName("RequestId")
@Expose
private String RequestId;
/**
* Get 任务ID。该接口为异步任务,可根据本参数调用DescribeLoadBalancerTaskResult接口来查询任务操作结果。
* @return TaskId 任务ID。该接口为异步任务,可根据本参数调用DescribeLoadBalancerTaskResult接口来查询任务操作结果。
*/
public String getTaskId() {
return this.TaskId;
}
/**
* Set 任务ID。该接口为异步任务,可根据本参数调用DescribeLoadBalancerTaskResult接口来查询任务操作结果。
* @param TaskId 任务ID。该接口为异步任务,可根据本参数调用DescribeLoadBalancerTaskResult接口来查询任务操作结果。
*/
public void setTaskId(String TaskId) {
this.TaskId = TaskId;
}
/**
* Get 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
* @return RequestId 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
*/
public String getRequestId() {
return this.RequestId;
}
/**
* Set 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
* @param RequestId 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
*/
public void setRequestId(String RequestId) {
this.RequestId = RequestId;
}
/**
* Internal implementation, normal users should not use it.
*/
public void toMap(HashMap<String, String> map, String prefix) {
this.setParamSimple(map, prefix + "TaskId", this.TaskId);
this.setParamSimple(map, prefix + "RequestId", this.RequestId);
}
}
|
/**
*
*/
package services.dao;
import java.util.List;
import models.CellStyle;
/**
* @author Eva
* Interface to provide data access to CellStyle objects
*/
public interface CellStyleDAO {
/**
* Get all the cell styles that exist
* @return a java.util.List of CellStyle objects
*/
public List<CellStyle> findAll();
/**
* Updates an existing CellStyle object
* @param style CellStyle object to persist
* @return true if update was successful; false otherwise
*/
public boolean update(CellStyle style);
/**
* Adds a new CellStyle object
* @param style CellStyle object to persist
* @return true if add was successful; false otherwise
*/
public boolean add(CellStyle style);
/**
* Removes a cell style
* @param rowId the row id (unsigned short value)
* @param columnIndex the column number (unsigned byte value)
* @return true if removal was successful; false otherwise
*/
public boolean remove(short rowId, byte columnIndex);
/**
* Gets a cell style, as specified by its row and column
* @param rowID the row id of the cell (unsigned short value)
* @param columnIndex the column index of the cell (unsigned byte value)
* @return the CellStyle object corresponding to this row and column
*/
public CellStyle getByRowCol(short rowID, byte columnIndex);
}
|
/*
* (c) Copyright 2018 Palantir Technologies Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.palantir.atlasdb.util;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.codahale.metrics.MetricRegistry;
import com.google.common.cache.Cache;
import com.palantir.tritium.event.InvocationContext;
import com.palantir.tritium.event.log.LoggingInvocationEventHandler;
import com.palantir.tritium.event.log.LoggingLevel;
import com.palantir.tritium.metrics.MetricRegistries;
import com.palantir.tritium.metrics.caffeine.CaffeineCacheStats;
import com.palantir.tritium.metrics.registry.TaggedMetricRegistry;
import com.palantir.tritium.proxy.Instrumentation;
public final class AtlasDbMetrics {
private static final Logger log = LoggerFactory.getLogger(AtlasDbMetrics.class);
private AtlasDbMetrics() {}
public static <T, U extends T> T instrument(
MetricRegistry metricRegistry, Class<T> serviceInterface, U service) {
return instrument(metricRegistry, serviceInterface, service, serviceInterface.getName());
}
public static <T, U extends T> T instrument(
MetricRegistry metricRegistry, Class<T> serviceInterface, U service, String name) {
return Instrumentation.builder(serviceInterface, service)
.withHandler(new SlidingWindowMetricsInvocationHandler(metricRegistry, name))
.withLogging(
LoggerFactory.getLogger("performance." + name),
LoggingLevel.TRACE,
LoggingInvocationEventHandler.LOG_DURATIONS_GREATER_THAN_1_MICROSECOND)
.build();
}
public static <T, U extends T> T instrumentWithTaggedMetrics(
TaggedMetricRegistry taggedMetrics,
Class<T> serviceInterface,
U service,
String name,
Function<InvocationContext, Map<String, String>> tagFunction) {
return Instrumentation.builder(serviceInterface, service)
.withHandler(new TaggedMetricsInvocationEventHandler(taggedMetrics, name, tagFunction))
.withLogging(
LoggerFactory.getLogger("performance." + name),
LoggingLevel.TRACE,
LoggingInvocationEventHandler.LOG_DURATIONS_GREATER_THAN_1_MICROSECOND)
.build();
}
public static void registerCache(MetricRegistry metricRegistry, Cache<?, ?> cache, String metricsPrefix) {
Set<String> existingMetrics = metricRegistry.getMetrics().keySet().stream()
.filter(name -> name.startsWith(metricsPrefix))
.collect(Collectors.toSet());
if (existingMetrics.isEmpty()) {
MetricRegistries.registerCache(metricRegistry, cache, metricsPrefix);
} else {
log.info("Not registering cache with prefix '{}' as metric registry already contains metrics: {}",
metricsPrefix, existingMetrics);
}
}
public static void registerCache(MetricRegistry metricRegistry,
com.github.benmanes.caffeine.cache.Cache<?, ?> cache, String metricsPrefix) {
Set<String> existingMetrics = metricRegistry.getMetrics().keySet().stream()
.filter(name -> name.startsWith(metricsPrefix))
.collect(Collectors.toSet());
if (existingMetrics.isEmpty()) {
CaffeineCacheStats.registerCache(metricRegistry, cache, metricsPrefix);
} else {
log.info("Not registering cache with prefix '{}' as metric registry already contains metrics: {}",
metricsPrefix, existingMetrics);
}
}
}
|
/*
* Copyright 2020 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.bookkeeper.proto;
import org.apache.bookkeeper.net.BookieId;
import org.apache.bookkeeper.net.BookieSocketAddress;
/**
* Maps a logical BookieId to a ResolvedBookieSocketAddress
that it to a network address.
*/
public interface BookieAddressResolver {
/**
* Maps a logical address to a network address.
* @param bookieId
* @return a mapped address.
* @throws BookieIdNotResolvedException if it is not possible to resolve the address of the BookieId
*/
BookieSocketAddress resolve(BookieId bookieId) throws BookieIdNotResolvedException;
/**
* This error happens when there is not enough information to resolve a BookieId
* to a BookieSocketAddress, this can happen when the Bookie is down
* and it is not publishing its EndpointInfo.
*/
class BookieIdNotResolvedException extends RuntimeException {
private final BookieId bookieId;
public BookieIdNotResolvedException(BookieId bookieId, Throwable cause) {
super("Cannot resolve bookieId " + bookieId + ", bookie does not exist or it is not running", cause);
this.bookieId = bookieId;
}
public BookieId getBookieId() {
return bookieId;
}
}
}
|
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.projectView;
import com.intellij.ide.util.treeView.AbstractTreeNode;
import com.intellij.openapi.extensions.ExtensionPointName;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collection;
/**
* Allows a plugin to modify the structure of a project as displayed in the project view.
*/
public interface TreeStructureProvider {
ExtensionPointName<TreeStructureProvider> EP_NAME = ExtensionPointName.create("com.intellij.treeStructureProvider");
/**
* Allows a plugin to modify the list of children displayed for the specified node in the
* project view.
*
* @param parent the parent node.
* @param children the list of child nodes according to the default project structure.
* Elements of the collection are of type {@link ProjectViewNode}.
* @param settings the current project view settings.
* @return the modified collection of child nodes, or <code>children</code> if no modifications
* are required.
*/
@NotNull
Collection<AbstractTreeNode> modify(@NotNull AbstractTreeNode parent, @NotNull Collection<AbstractTreeNode> children, ViewSettings settings);
/**
* Returns a user data object of the specified type for the specified selection in the
* project view.
*
* @param selected the list of nodes currently selected in the project view.
* @param dataName the identifier of the requested data object (for example, as defined in
* {@link com.intellij.openapi.actionSystem.PlatformDataKeys})
* @return the data object, or null if no data object can be returned by this provider.
* @see com.intellij.openapi.actionSystem.DataProvider
*/
@Nullable
Object getData(Collection<AbstractTreeNode> selected, String dataName);
}
|
package creos.simsg.api.loadapproximator.uncertain.multisubs.brainstorm;
import creos.simsg.api.model.Cabinet;
import java.util.*;
/**
* A bag of paths is a collection of paths (cf. {@link Path}) that share cabinets and fuses. Between two random
* bags, the intersection of cabinets and the intersection of fuses equal the empty set.
*/
public class BagOfPaths {
private BagOfPaths(){}
public static List<Map<Integer, List<Path>>> getAllBags(Collection<List<Path>> allPaths) {
var res = new ArrayList<Map<Integer, List<Path>>>(allPaths.size());
for(List<Path> paths: allPaths) {
res.add(getBagOfPaths(paths));
}
return res;
}
public static Map<Integer, List<Path>> getBagOfPaths(Collection<Path> paths) {
int nextBagId = 0;
var mapCabBag = new HashMap<Cabinet, Integer>();
var mapBagPath = new HashMap<Integer, List<Path>>();
for(Path path: paths) {
int bagId = -1;
for(Cabinet c: path.getCabinets()) {
if(mapCabBag.containsKey(c)) {
bagId = mapCabBag.get(c);
break;
}
}
if(bagId == -1) {
bagId = nextBagId;
nextBagId++;
}
mapBagPath.compute(bagId, (Integer key, List<Path> currentVal) -> {
List<Path> res;
if (currentVal == null) {
res = new ArrayList<>(1);
} else {
res = new ArrayList<Path>(currentVal);
}
res.add(path);
return res;
});
for(Cabinet c: path.getCabinets()) {
mapCabBag.put(c, bagId);
}
}
return mapBagPath;
}
}
|
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.simplesystemsmanagement.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ssm-2014-11-06/GetPatchBaselineForPatchGroup" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetPatchBaselineForPatchGroupRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The name of the patch group whose patch baseline should be retrieved.
* </p>
*/
private String patchGroup;
/**
* <p>
* Returns he operating system rule specified for patch groups using the patch baseline.
* </p>
*/
private String operatingSystem;
/**
* <p>
* The name of the patch group whose patch baseline should be retrieved.
* </p>
*
* @param patchGroup
* The name of the patch group whose patch baseline should be retrieved.
*/
public void setPatchGroup(String patchGroup) {
this.patchGroup = patchGroup;
}
/**
* <p>
* The name of the patch group whose patch baseline should be retrieved.
* </p>
*
* @return The name of the patch group whose patch baseline should be retrieved.
*/
public String getPatchGroup() {
return this.patchGroup;
}
/**
* <p>
* The name of the patch group whose patch baseline should be retrieved.
* </p>
*
* @param patchGroup
* The name of the patch group whose patch baseline should be retrieved.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public GetPatchBaselineForPatchGroupRequest withPatchGroup(String patchGroup) {
setPatchGroup(patchGroup);
return this;
}
/**
* <p>
* Returns he operating system rule specified for patch groups using the patch baseline.
* </p>
*
* @param operatingSystem
* Returns he operating system rule specified for patch groups using the patch baseline.
* @see OperatingSystem
*/
public void setOperatingSystem(String operatingSystem) {
this.operatingSystem = operatingSystem;
}
/**
* <p>
* Returns he operating system rule specified for patch groups using the patch baseline.
* </p>
*
* @return Returns he operating system rule specified for patch groups using the patch baseline.
* @see OperatingSystem
*/
public String getOperatingSystem() {
return this.operatingSystem;
}
/**
* <p>
* Returns he operating system rule specified for patch groups using the patch baseline.
* </p>
*
* @param operatingSystem
* Returns he operating system rule specified for patch groups using the patch baseline.
* @return Returns a reference to this object so that method calls can be chained together.
* @see OperatingSystem
*/
public GetPatchBaselineForPatchGroupRequest withOperatingSystem(String operatingSystem) {
setOperatingSystem(operatingSystem);
return this;
}
/**
* <p>
* Returns he operating system rule specified for patch groups using the patch baseline.
* </p>
*
* @param operatingSystem
* Returns he operating system rule specified for patch groups using the patch baseline.
* @see OperatingSystem
*/
public void setOperatingSystem(OperatingSystem operatingSystem) {
withOperatingSystem(operatingSystem);
}
/**
* <p>
* Returns he operating system rule specified for patch groups using the patch baseline.
* </p>
*
* @param operatingSystem
* Returns he operating system rule specified for patch groups using the patch baseline.
* @return Returns a reference to this object so that method calls can be chained together.
* @see OperatingSystem
*/
public GetPatchBaselineForPatchGroupRequest withOperatingSystem(OperatingSystem operatingSystem) {
this.operatingSystem = operatingSystem.toString();
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getPatchGroup() != null)
sb.append("PatchGroup: ").append(getPatchGroup()).append(",");
if (getOperatingSystem() != null)
sb.append("OperatingSystem: ").append(getOperatingSystem());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetPatchBaselineForPatchGroupRequest == false)
return false;
GetPatchBaselineForPatchGroupRequest other = (GetPatchBaselineForPatchGroupRequest) obj;
if (other.getPatchGroup() == null ^ this.getPatchGroup() == null)
return false;
if (other.getPatchGroup() != null && other.getPatchGroup().equals(this.getPatchGroup()) == false)
return false;
if (other.getOperatingSystem() == null ^ this.getOperatingSystem() == null)
return false;
if (other.getOperatingSystem() != null && other.getOperatingSystem().equals(this.getOperatingSystem()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getPatchGroup() == null) ? 0 : getPatchGroup().hashCode());
hashCode = prime * hashCode + ((getOperatingSystem() == null) ? 0 : getOperatingSystem().hashCode());
return hashCode;
}
@Override
public GetPatchBaselineForPatchGroupRequest clone() {
return (GetPatchBaselineForPatchGroupRequest) super.clone();
}
}
|
package org.jdraft.pattern;
import com.github.javaparser.ast.body.BodyDeclaration;
import com.github.javaparser.ast.comments.BlockComment;
import com.github.javaparser.ast.expr.ObjectCreationExpr;
import com.github.javaparser.ast.Node;
import com.github.javaparser.ast.expr.Expression;
import com.github.javaparser.ast.stmt.*;
import com.github.javaparser.printer.PrettyPrintVisitor;
import com.github.javaparser.printer.PrettyPrinterConfiguration;
import java.util.*;
import java.util.function.*;
import org.jdraft.*;
import org.jdraft.Expr.QuadConsumer;
import org.jdraft.Expr.TriConsumer;
import org.jdraft.text.*;
import org.jdraft.walk.Walk;
/**
* Pattern of a Java {@link Statement} that provides operations for
* constructing, analyzing, matching, extracting, removing, replacing / etc. Statement
* type nodes within the AST.
* <PRE>
* NOTE: In the future I might create individual implementations for each Statement type
* i.e. $assertStmt, $blockStmt, $continueStmt...
* rather than:
* $stmt<AssertStmt>, $stmt<BlockStmt>, $stmt<ContinueStmt>...
*
* </PRE>
* <PRE>
* $stmt
* CONSTRUCT
* .construct([Translator], Tokens) build & return
* .fill([Translator], values)
* PARAMETERIZE
* .$(Tokens)
* .hardcode$([translator], target, value)
* MATCH
* .constraint(Predicate<T>) //set the matching constraint
* .matches(Statement)
* .select(Statement)
* .deconstruct( Statement )
* QUERY
* .first/.firstIn(_node, proto) find the first matching statement in
* .list/.listIn(_node, proto, Predicate<>) list all matches in
* .selectFirst/.selectFirstIn(_node, proto) return the first "selection" match
* .selectList/.selectListIn(_node, proto) return a list of selection matches
* MODIFY
* .remove/.removeIn(_node, proto)
* .replace/.replaceIn(_node, protoTarget, protoReplacement)
* .forIn(_node, Consumer<T>)
* .forSelectedIn(_node, Consumer<T>)
*</PRE>
* @param <S> AST Statement implementation type
* @param <_S> _java._domain implementation type
*/
public class $stmt<S extends Statement, _S extends _stmt>
implements Template<_S>, $pattern.$java<_S, $stmt<S, _S>>, $body.$part, $method.$part, $constructor.$part {
public Class<_S> _modelType(){
return (Class<_S>) _stmt.class;
}
/**
* This allows Statements to be commented out or uncommented in a conventional way.
* the "convention" is
* the "comment" MUST START with the text "<code&rt;" (preliminary line feeds and white space allowed)
* the "comment" MUST END with the text "</code&rt;" (line feeds and white space allowed after the end)
*
* Specifically, if we have:<PRE>
* class MyClass{
* void m(){
* /*<code>System.out.println(123);</code>* /
* //<code>assert(1==1);</code>
* /**<code>System.exit(-1);</code>* /
* }
* }
*
* // uncomment ALL Statements in code (that follow the convention)
* _class _c = $stmt.of().unComment(MyClass.class);
* // since all of the commented out Statements match $stmt.of(), they are all uncommented
* // RESULTS:
* class MyClass{
* void m(){
* System.out.println(123);
* assert(1==1);
* System.exit(-1);
* }
* }
* // UNcomment Statements that are System.out.println() Statements
* _class _c = $stmt.of("System.out.println($any$);").unComment(MyClass.class);
*
* //RESULTS : (ONLY uncomment System.out.println() statements )
* class MyClass{
* void m(){
* System.out.println(123);
* //<code>assert(1==1);</code>
* /**<code>System.exit(-1);</code>* /
* }
* }
* </PRE>
public static $comment STATEMENT_COMMENT = $comment.STATEMENT_COMMENT;
*/
/**
*
* @param ste
* @return
*/
private static <S extends Statement, _S extends _stmt> $stmt<S, _S> from(StackTraceElement ste ){
Statement st = Stmt.from( ste );
return new $stmt( (S)st );
}
/**
*
* @param stmtClass
* @param pattern
* @param <S>
* @return
*/
public static <S extends Statement, _S extends _stmt> $stmt<S, _S> of(Class<S> stmtClass, String...pattern){
return new $stmt<S, _S>(stmtClass, Text.combine(pattern));
}
/**
*
* @param <S>
* @param proto
* @return
*/
public static <S extends Statement, _S extends _stmt> $stmt<S, _S> of(Expr.Command proto ){
StackTraceElement ste = Thread.currentThread().getStackTrace()[2];
return from( ste );
}
/**
*
* @param <T>
* @param <S>
* @param proto
* @return
*/
public static <T extends Object, S extends Statement, _S extends _stmt> $stmt<S, _S> of(Consumer<T> proto ){
StackTraceElement ste = Thread.currentThread().getStackTrace()[2];
return from( ste );
}
/**
*
* @param <T>
* @param <U>
* @param <S>
* @param proto
* @return
*/
public static <T extends Object, U extends Object, S extends Statement, _S extends _stmt> $stmt<S, _S> of(BiConsumer<T,U> proto ){
StackTraceElement ste = Thread.currentThread().getStackTrace()[2];
return from( ste );
}
/**
*
* @param <T>
* @param <U>
* @param <V>
* @param <S>
* @param proto
* @return
*/
public static <T extends Object, U extends Object, V extends Object, S extends Statement, _S extends _stmt> $stmt<S, _S> of(TriConsumer<T, U, V> proto ){
StackTraceElement ste = Thread.currentThread().getStackTrace()[2];
return from( ste );
}
/**
*
* @param <T>
* @param <U>
* @param <V>
* @param <X>
* @param <S>
* @param proto
* @return
*/
public static <T extends Object, U extends Object, V extends Object, X extends Object, S extends Statement, _S extends _stmt> $stmt<S, _S> of(QuadConsumer<T, U, V, X> proto ){
StackTraceElement ste = Thread.currentThread().getStackTrace()[2];
return from( ste );
}
/**
* Will match ANY statement, or empty statemen
* @param stmtClasses the classes accepted for the Statement
* @return
*/
public static $stmt<Statement, _stmt> not(Class<? extends Statement>... stmtClasses){
Set<Class<? extends Statement>> notClasses = new HashSet<>();
Arrays.stream(stmtClasses).forEach( s -> notClasses.add(s));
Predicate<Statement> ps = s-> notClasses.contains(s.getClass());
return ($stmt<Statement, _stmt>) new $stmt( Statement.class, t-> true )
.$not( ps );
}
/**
* A Statement that contains this expression
* @param $ex the expression contained within the Statement
* @return the new Statement matching only Statements that contain this expression
*/
public static $stmt<Statement, _stmt> of($ex $ex ){
return $stmt.of().$and( s-> $ex.isIn(s) );
}
/**
* A Statement
* @param $exprs
* @return
*/
public static $stmt<Statement, _stmt> hasAny($ex...$exprs ){
return $stmt.of().$and( s-> Arrays.stream($exprs).anyMatch( e -> e.isIn(s) ));
}
/**
* A Statement
* @param $exprs
* @return
*/
public static $stmt<Statement, _stmt> hasAll($ex...$exprs ){
return $stmt.of().$and( s-> Arrays.stream($exprs).allMatch( e -> e.isIn(s) ));
}
/**
* Match ONLY statements of these classes
* @param stmtClasses the classes accepted for the Statement
* @return
*/
public static $stmt<Statement, _stmt> of(Class<? extends Statement>... stmtClasses){
Set<Class<? extends Statement>> notClasses = new HashSet<>();
Arrays.stream(stmtClasses).forEach( s -> notClasses.add(s));
Predicate<Statement> ps = s-> notClasses.contains(s.getClass());
return ($stmt<Statement, _stmt>) new $stmt( Statement.class, t-> true )
.$and( ps );
}
/**
* Will match ANY statement, or empty statemen
* @param <S>
* @return
*/
public static <S extends Statement, _S extends _stmt> $stmt<S, _S> of(){
return new $stmt( Statement.class, t-> true );
}
/**
* $stmt.of( new Object(){
* if( $a$ != $b$){
* $then$:{}
* }else{
* $else$:{}
* }
* });
* @param anonymousObjectWithStatement
* @param <S>
* @return
*/
public static <S extends Statement, _S extends _stmt> $stmt<S,_S> of(Object anonymousObjectWithStatement ){
ObjectCreationExpr oce = Expr.newExpr( Thread.currentThread().getStackTrace()[2]);
BlockStmt bs = oce.findFirst(com.github.javaparser.ast.stmt.BlockStmt.class).get();
//?? do I want to do anything with $label$:{} ?
//return new $stmt<S>( );
return of( bs.getStatement(0) );
}
public static <S extends Statement, _S extends _stmt> $stmt<S, _S> of(String prototypePattern ){
return of( new String[] {prototypePattern});
}
/**
*
* @param pattern
* @return
*/
public static <S extends Statement, _S extends _stmt> $stmt<S,_S> of(String...pattern ){
S st = (S) Stmt.of(pattern);
return new $stmt<S, _S>(st);
}
/**
*
* @param astProto
* @return
*/
public static $stmt of(Statement astProto ){
return new $stmt<>(astProto);
}
public static $stmt.Or or( Statement... _protos ){
$stmt[] arr = new $stmt[_protos.length];
for(int i=0;i<_protos.length;i++){
arr[i] = $stmt.of( _protos[i]);
}
return or(arr);
}
public static $stmt.Or or( $stmt...$tps ){
return new $stmt.Or($tps);
}
/**
* Returns a prototype that matches ANY assertStmt
* @return
*/
public static $stmt<AssertStmt, _assertStmt> assertStmt(){
return new $stmt( AssertStmt.class, "$assertStmt$" );
}
/**
* i.e."assert(1==1);"
* @param pattern
* @return and AssertStmt with the code
*/
public static $stmt<AssertStmt, _assertStmt> assertStmt(String... pattern ) {
return new $stmt( Stmt.assertStmt(pattern));
}
/**
* i.e."assert(1==1);"
* @param pattern
* @param constraint
* @return and AssertStmt with the code
*/
public static $stmt<AssertStmt, _assertStmt> assertStmt(String pattern, Predicate<AssertStmt> constraint) {
return new $stmt( Stmt.assertStmt(pattern) ).$and(constraint);
}
/**
* Returns a prototype that matches ANY assertStmt
* @return
*/
public static $stmt<BlockStmt, _blockStmt> blockStmt(){
return new $stmt( BlockStmt.class, "$blockStmt$" );
}
/**
*
* @param block
* @return
*/
public static $stmt<BlockStmt, _blockStmt> blockStmt(BlockStmt block){
return new $stmt( block );
}
/**
* NOTE: If you omit the opening and closing braces { }, they will be added
*
* i.e."{ int i=1; return i;}"
* @param pattern the code making up the blockStmt
* @return the BlockStmt
*/
public static $stmt<BlockStmt, _blockStmt> blockStmt(String... pattern ) {
return new $stmt( Stmt.blockStmt(pattern));
}
/**
* NOTE: If you omit the opening and closing braces { }, they will be added
*
* i.e."{ int i=1; return i;}"
* @param pattern the code making up the blockStmt
* @param constraint
* @return the BlockStmt
*/
public static $stmt<BlockStmt, _blockStmt> blockStmt(String pattern, Predicate<BlockStmt> constraint) {
return new $stmt( Stmt.blockStmt(pattern)).$and(constraint);
}
/**
* Returns a prototype that matches ANY assertStmt
* @return
*/
public static $stmt<BreakStmt, _breakStmt> breakStmt(){
return new $stmt( BreakStmt.class, "$breakStmt$" );
}
/**
* i.e."break;" or "break outer;"
* @param pattern String representing the break of
* @return the breakStmt
*/
public static $stmt<BreakStmt, _breakStmt> breakStmt(String... pattern ) {
return new $stmt( Stmt.breakStmt(pattern));
}
/**
* i.e."break;" or "break outer;"
* @param constraint
* @return the breakStmt
*/
public static $stmt<BreakStmt, _breakStmt> breakStmt(Predicate<_breakStmt> constraint) {
return breakStmt().$and(constraint);
}
/**
* i.e."break;" or "break outer;"
* @param pattern String representing the break of
* @param constraint
* @return the breakStmt
*/
public static $stmt<BreakStmt, _breakStmt> breakStmt(String pattern, Predicate<BreakStmt> constraint) {
return new $stmt( Stmt.breakStmt(pattern)).$and(constraint);
}
/**
* Returns a prototype that matches ANY continueStmt
* @return
*/
public static $stmt<ContinueStmt, _continueStmt> continueStmt(){
return new $stmt( ContinueStmt.class, "$continueStmt$" );
}
/**
* i.e."continue outer;"
* @param pattern
* @return
*/
public static $stmt<ContinueStmt, _continueStmt> continueStmt(String... pattern ) {
return new $stmt( Stmt.continueStmt(pattern));
}
/**
* i.e."continue outer;"
* @param pattern
* @param constraint
* @return
*/
public static $stmt<ContinueStmt, _continueStmt> continueStmt(String pattern, Predicate<ContinueStmt> constraint) {
return new $stmt( Stmt.continueStmt(pattern)).$and(constraint);
}
/**
* Returns a prototype that matches ANY assertStmt
* @return
*/
public static $doStmt doStmt(){
return $doStmt.of();
}
/**
* i.e."do{ System.out.println(1); }while( a < 100 );"
* @param ds
* @return
*/
public static $stmt<DoStmt, _doStmt> doStmt( DoStmt ds) {
return new $stmt( ds);
}
/**
* i.e."do{ System.out.println(1); }while( a < 100 );"
* @param pattern
* @return
*/
public static $stmt<DoStmt, _doStmt> doStmt(String... pattern ) {
return new $stmt( Stmt.doStmt(pattern));
}
/**
* i.e."do{ System.out.println(1); }while( a < 100 );"
* @param pattern
* @param constraint
* @return
*/
public static $stmt<DoStmt, _doStmt> doStmt(String pattern, Predicate<DoStmt> constraint) {
return new $stmt( Stmt.doStmt(pattern)).$and(constraint);
}
/**
*
* @return
*/
public static $stmt<EmptyStmt, _emptyStmt> emptyStmt(){
return new $stmt( new EmptyStmt() );
}
public static $stmt<ExplicitConstructorInvocationStmt, _constructorCallStmt> constructorCallStmt(){
return new $stmt( ExplicitConstructorInvocationStmt.class, "$constructorCallStmt$" );
}
public static $stmt<ExplicitConstructorInvocationStmt, _constructorCallStmt> constructorCallStmt(String... pattern ) {
return new $stmt( Stmt.of(pattern));
}
/**
* i.e."this(100,2900);"
* @param cts
* @return
*/
public static $stmt<ExplicitConstructorInvocationStmt, _constructorCallStmt> constructorCallStmt(ExplicitConstructorInvocationStmt cts) {
return new $stmt( cts );
}
/**
*
* Returns a pattern that matches ANY thisOrSuperCallStmt
* @return
*/
public static $stmt<ExplicitConstructorInvocationStmt, _constructorCallStmt> thisCallStmt(){
return new $stmt( ExplicitConstructorInvocationStmt.class, "$thisCallStmt$" );
}
/**
* i.e."this(100,2900);"
* @param pattern
* @return
*/
public static $stmt<ExplicitConstructorInvocationStmt, _constructorCallStmt> thisCallStmt(String... pattern ) {
return new $stmt( Stmt.constructorCallStmt(pattern));
}
/**
* i.e."this(100,2900);"
* @param cts
* @return
*/
public static $stmt<ExplicitConstructorInvocationStmt, _constructorCallStmt> thisCallStmt(ExplicitConstructorInvocationStmt cts) {
return new $stmt( cts );
}
/**
* i.e."this(100,2900);"
* @param pattern
* @param constraint
* @return
*/
public static $stmt<ExplicitConstructorInvocationStmt, _constructorCallStmt> thisCallStmt(String pattern, Predicate<ExplicitConstructorInvocationStmt> constraint) {
return new $stmt( Stmt.constructorCallStmt(pattern)).$and(constraint);
}
/**
*
* Returns a pattern that matches ANY thisOrSuperCallStmt
* @return
*/
public static $stmt<ExplicitConstructorInvocationStmt, _constructorCallStmt> superCallStmt(){
return new $stmt( ExplicitConstructorInvocationStmt.class, "$superCallStmt$" );
}
/**
* i.e."this(100,2900);"
* @param pattern
* @return
*/
public static $stmt<ExplicitConstructorInvocationStmt, _constructorCallStmt> superCallStmt(String... pattern ) {
return new $stmt( Stmt.constructorCallStmt(pattern));
}
/**
* i.e."this(100,2900);"
* @param cts
* @return
*/
public static $stmt<ExplicitConstructorInvocationStmt, _constructorCallStmt> superCallStmt(ExplicitConstructorInvocationStmt cts) {
return new $stmt( cts );
}
/**
* i.e."this(100,2900);"
* @param pattern
* @param constraint
* @return
*/
public static $stmt<ExplicitConstructorInvocationStmt, _constructorCallStmt> superCallStmt(String pattern, Predicate<ExplicitConstructorInvocationStmt> constraint) {
return new $stmt( Stmt.constructorCallStmt(pattern)).$and(constraint);
}
/**
* i.e."s += t;"
* @return
*/
public static $stmt<ExpressionStmt, _exprStmt> expressionStmt() {
return new $stmt( ExpressionStmt.class, "$expressionStmt$");
}
/**
* i.e."s += t;"
* @param es
* @return
*/
public static $stmt<ExpressionStmt, _exprStmt> expressionStmt(ExpressionStmt es) {
return new $stmt( es);
}
/**
* i.e."s += t;"
* @param pattern
* @return
*/
public static $stmt<ExpressionStmt, _exprStmt> expressionStmt(String... pattern ) {
return new $stmt( Stmt.expressionStmt(pattern));
}
/**
* i.e."s += t;"
* @param pattern
* @param constraint
* @return
*/
public static $stmt<ExpressionStmt, _exprStmt> expressionStmt(String pattern, Predicate<ExpressionStmt> constraint) {
return new $stmt( Stmt.expressionStmt(pattern)).$and(constraint);
}
/**
* i.e."s += t;"
* @return
*/
public static $stmt<ForStmt, _forStmt> forStmt( ) {
return new $stmt( ForStmt.class, "$forStmt$");
}
/**
* i.e."for(int i=0; i<100;i++) {...}"
* @param pattern
* @return
*/
public static $stmt<ForStmt, _forStmt> forStmt( String... pattern ) {
return new $stmt( Stmt.forStmt(pattern));
}
/**
* i.e."for(int i=0; i<100;i++) {...}"
* @param pattern
* @param constraint
* @return
*/
public static $stmt<ForStmt, _forStmt> forStmt( String pattern, Predicate<ForStmt> constraint ) {
return new $stmt( Stmt.forStmt(pattern)).$and(constraint);
}
/**
* i.e."s += t;"
* @return
*/
public static $stmt<ForEachStmt, _forEachStmt> forEachStmt() {
return new $stmt( ForEachStmt.class, "$forEachStmt$");
}
/**
* i.e."s += t;"
* @return
*/
public static $stmt<ForEachStmt, _forEachStmt> forEachStmt(ForEachStmt fes) {
return new $stmt( fes);
}
/**
* i.e."for(String element:arr){...}"
* @param pattern
* @return
*/
public static $stmt<ForEachStmt, _forEachStmt> forEachStmt( String... pattern ) {
return new $stmt( Stmt.forEachStmt(pattern));
}
/**
* i.e."for(String element:arr){...}"
* @param pattern
* @param constraint
* @return
*/
public static $stmt<ForEachStmt, _forEachStmt> forEachStmt( String pattern, Predicate<ForEachStmt> constraint) {
return new $stmt( Stmt.forEachStmt(pattern)).$and(constraint);
}
/**
* i.e."if(a==1){...}"
* @return
*/
public static $stmt<IfStmt, _ifStmt> ifStmt( ) {
return new $stmt( IfStmt.class, "$ifStmt$");
}
/**
* i.e."if(a==1){...}"
* @return
*/
public static $stmt<IfStmt, _ifStmt> ifStmt( IfStmt is) {
return new $stmt( is );
}
/**
* i.e."if(a==1){...}"
* @param pattern
* @return
*/
public static $stmt<IfStmt, _ifStmt> ifStmt( String... pattern ) {
return new $stmt( Stmt.ifStmt(pattern));
}
/**
* i.e."if(a==1){...}"
* @param pattern
* @param constraint
* @return
*/
public static $stmt<IfStmt, _ifStmt> ifStmt( String pattern, Predicate<IfStmt> constraint) {
return new $stmt( Stmt.ifStmt(pattern)).$and(constraint);
}
/**
* i.e."outer: start = getValue();"
* @return
*/
public static $stmt<LabeledStmt, _labeledStmt> labeledStmt( ) {
return new $stmt( LabeledStmt.class, "$labeledStmt$");
}
/**
* i.e."outer: start = getValue();"
* @param ls
* @return
*/
public static $stmt<LabeledStmt, _labeledStmt> labeledStmt( LabeledStmt ls) {
return new $stmt( ls );
}
/**
* i.e."outer: start = getValue();"
* @param pattern
* @return
*/
public static $stmt<LabeledStmt, _labeledStmt> labeledStmt( String... pattern ) {
return new $stmt( Stmt.labeledStmt(pattern));
}
/**
* i.e."outer: start = getValue();"
* @param pattern
* @param constraint
* @return
*/
public static $stmt<LabeledStmt, _labeledStmt> labeledStmt( String pattern, Predicate<LabeledStmt> constraint) {
return new $stmt( Stmt.labeledStmt(pattern)).$and(constraint);
}
/**
* i.e."class C{ int a, b; }"
* @return the AST implementation
*/
public static $stmt<LocalClassDeclarationStmt, _localClassStmt> localClassStmt() {
return new $stmt( LocalClassDeclarationStmt.class, "$localClass$");
}
/**
*
* @param lcs
* @return
*/
public static $stmt<LocalClassDeclarationStmt, _localClassStmt> localClassStmt(LocalClassDeclarationStmt lcs) {
return new $stmt( lcs );
}
/**
* Converts from a String to a LocalClass
* i.e. "class C{ int a, b; }"
* @param pattern the code that represents a local class
* @return the AST implementation
*/
public static $stmt<LocalClassDeclarationStmt, _localClassStmt> localClassStmt( String... pattern ) {
return new $stmt( Stmt.localClassDeclarationStmt(pattern));
}
/**
* Converts from a String to a LocalClass
* i.e."class C{ int a, b; }"
* @param pattern the code that represents a local class
* @param constraint
* @return the AST implementation
*/
public static $stmt<LocalClassDeclarationStmt, _localClassStmt> localClassStmt( String pattern, Predicate<LocalClassDeclarationStmt> constraint) {
return new $stmt( Stmt.localClassDeclarationStmt(pattern)).$and(constraint);
}
/**
*
* @return
*/
public static $stmt<ReturnStmt, _returnStmt> returnStmt() {
return new $stmt(ReturnStmt.class, "$returnStmt$");
}
/**
*
* @param rs
* @return
*/
public static $stmt<ReturnStmt, _returnStmt> returnStmt(ReturnStmt rs){
return new $stmt(rs);
}
/**
* i.e."return VALUE;"
* @param pattern
* @return
*/
public static $stmt<ReturnStmt, _returnStmt> returnStmt( String... pattern ) {
return new $stmt( Stmt.returnStmt(pattern));
}
/**
* i.e."return VALUE;"
* @param pattern
* @param constraint
* @return
*/
public static $stmt<ReturnStmt, _returnStmt> returnStmt( String pattern, Predicate<ReturnStmt> constraint ) {
return new $stmt( Stmt.returnStmt(pattern)).$and(constraint);
}
/**
*
* @return
*/
public static $stmt<SwitchStmt, _switchStmt> switchStmt() {
return new $stmt(SwitchStmt.class, "$switchStmt$");
}
/**
*
* @param ss
* @return
*/
public static $stmt<SwitchStmt, _switchStmt> switchStmt(SwitchStmt ss) {
return new $stmt(ss);
}
/**
*
* @param pattern
* @return
*/
public static $stmt<SwitchStmt, _switchStmt> switchStmt( String... pattern ) {
return new $stmt( Stmt.switchStmt(pattern));
}
/**
*
* @param pattern
* @param constraint
* @return
*/
public static $stmt<SwitchStmt, _switchStmt> switchStmt( String pattern, Predicate<SwitchStmt> constraint) {
return new $stmt( Stmt.switchStmt(pattern)).$and(constraint);
}
/**
*
* @return
*/
public static $stmt<SynchronizedStmt, _synchronizedStmt> synchronizedStmt() {
return new $stmt(SynchronizedStmt.class, "$synchronizedStmt$" );
}
/**
*
* @param pattern
* @return
*/
public static $stmt<SynchronizedStmt, _synchronizedStmt> synchronizedStmt( String... pattern ) {
return new $stmt( Stmt.synchronizedStmt(pattern));
}
/**
*
* @param pattern
* @param constraint
* @return
*/
public static $stmt<SynchronizedStmt, _synchronizedStmt> synchronizedStmt( String pattern, Predicate<SynchronizedStmt> constraint ) {
return new $stmt( Stmt.synchronizedStmt(pattern)).$and(constraint);
}
/**
*
* @return
*/
public static $stmt<ThrowStmt, _throwStmt> throwStmt( ) {
return new $stmt(ThrowStmt.class, "$throwStmt$");
}
/**
*
* @param ts
* @return
*/
public static $stmt<ThrowStmt, _throwStmt> throwStmt( ThrowStmt ts ){
return new $stmt( ts);
}
/**
*
* @param pattern
* @return
*/
public static $stmt<ThrowStmt, _throwStmt> throwStmt( String... pattern ) {
return new $stmt( Stmt.throwStmt(pattern));
}
/**
*
* @param pattern
* @param constraint
* @return
*/
public static $stmt<ThrowStmt, _throwStmt> throwStmt( String pattern, Predicate<ThrowStmt> constraint) {
return new $stmt( Stmt.throwStmt(pattern)).$and(constraint);
}
/**
*
* @return
*/
public static $stmt<TryStmt, _tryStmt> tryStmt( ) {
return new $stmt(TryStmt.class, "$tryStmt$" );
}
/**
*
* @param ts
* @return
*/
public static $stmt<TryStmt, _tryStmt> tryStmt(TryStmt ts ){
return new $stmt(ts);
}
/**
* i.e."try{ clazz.getMethod("fieldName"); }"
* @param pattern
* @return
*/
public static $stmt<TryStmt, _tryStmt> tryStmt( String... pattern ) {
return new $stmt( Stmt.tryStmt(pattern));
}
/**
* i.e."try{ clazz.getMethod("fieldName"); }"
* @param pattern
* @param constraint
* @return
*/
public static $stmt<TryStmt, _tryStmt> tryStmt( String pattern, Predicate<TryStmt> constraint ) {
return new $stmt( Stmt.tryStmt(pattern)).$and(constraint);
}
/**
*
* @return
*/
public static $stmt<WhileStmt, _whileStmt> whileStmt( ) {
return new $stmt(WhileStmt.class, "$whileStmt$");
}
/**
* i.e."while(i< 1) { ...}"
* @param pattern
* @return
*/
public static $stmt<WhileStmt, _whileStmt> whileStmt( String... pattern ) {
return new $stmt( Stmt.whileStmt(pattern));
}
/**
*
* @param ws
* @return
*/
public static $stmt<WhileStmt, _whileStmt> whileStmt( WhileStmt ws){
return new $stmt( ws);
}
/**
* i.e."while(i< 1) { ...}"
* @param pattern
* @param constraint
* @return
*/
public static $stmt<WhileStmt, _whileStmt> whileStmt( String pattern, Predicate<WhileStmt> constraint ) {
return new $stmt( Stmt.whileStmt(pattern)).$and(constraint);
}
/**
* Match predicate tested against the AST {@link Statement} syntax implementation
*/
public Predicate<_S> astMatch = s -> true;
/**
* Match predicate tested against the domain {@link _stmt} implementation
public Predicate<_S> domainMatch = _s -> true;
*/
/** The stencil representing the statement */
public Stencil stmtStencil;
/** the class of the statement */
public Class<S> statementClass;
protected $stmt( Class<S> statementClass ){
this( statementClass, "$any$");
}
protected $stmt(Class<S> statementClass, String pattern){
this.astMatch = s ->true;
this.statementClass = statementClass;
this.stmtStencil = Stencil.of(pattern);
}
protected $stmt(Class<S> statementClass, Predicate<_S> _matchFn){
this.statementClass = statementClass;
this.stmtStencil = Stencil.of("$any$");
this.astMatch = _matchFn;
}
public $stmt( S st ){
this.statementClass = (Class<S>)st.getClass();
this.stmtStencil = Stencil.of( st.toString(PRINT_$LABELED_AS_EMBED) );
//Hmm, I could just have a specific visitor that transforms
// $label: assert($value$);
// TO
/** New... for Embeds
st.walk(LabeledStmt.class, ls-> {
String labelName = ls.getLabel().asString();
if( labelName.startsWith("$") ){
//this is MEANT to be a Stencil.Embed
Stencil embedBody = ls.getStatement()
}
});
*/
}
public $stmt<S, _S> $and(Predicate<_S> constraint ){
this.astMatch = this.astMatch.and(constraint);
return this;
}
/*
public $stmt<S, _S> $and(Predicate<S> constraint ){
this.astMatch = this.astMatch.and(constraint);
return this;
}
*/
@Override
public $stmt $(String target, String $paramName) {
this.stmtStencil = this.stmtStencil.$(target, $paramName);
return this;
}
@Override
public Template<_S> $hardcode(Translator translator, Map<String, Object> keyValues) {
return null;
}
/**
* Convert (normalize) the expr to an Expression, then to a String
* @param expr
* @param $name
* @return
*/
public $stmt $expr( String expr, String $name){
return $(Expr.of(expr).toString(), $name);
}
/**
*
* @param expr
* @param $name
* @return
*/
public $stmt $(Expression expr, String $name ){
String exprString = expr.toString();
return $(exprString, $name);
}
/**
* WE need to have custom logic to find the statements WITHIN statements...
* for example if we have a if statement with a body: <PRE>
* if( a ){
* doSomething();
* }</PRE>
* //and we want to parameterize the "doSomething();" method,
* we need to handle all of the whitespace/indents, line feeds so that our
* Template looks like this:<PRE>
* if(a){$name$}
* </PRE>
*
* ... Note that this will match ALL of the following :<PRE>
* if(a){}
* if(a){
* singleStatement();
* }
* if(a){
* multi();
* statement();
* }
* </PRE>
*
* IF we DID NOT first treat the whitespace around the statement as a part
* of the parameter, then our template Would look like this:<PRE>
*
* if(a){
* $name$
* }
* </PRE>
* and it would only match:<PRE>
* if(a) {
* singleStatement();
* }
* </PRE>
*
* @param stmt the statement to parameterize
* @param $name the name used for the statement parameter
* @return the modified $stmt
*/
public $stmt $(Statement stmt, String $name ){
String stmtString = stmt.toString( Print.PRINT_NO_COMMENTS );
List<String> stringsToReplace = new ArrayList<>();
String fixedText = this.stmtStencil.getTextForm().getFixedText();
int nextInd = fixedText.indexOf(stmtString);
while( nextInd >= 0 ){
String padded = Text.matchNextPaddedTarget(fixedText, stmtString, nextInd );
stringsToReplace.add( padded );
nextInd = fixedText.indexOf(stmtString, nextInd + stmtString.length() );
}
for(int i=0;i<stringsToReplace.size();i++){
int indexOfAssert = this.stmtStencil.getTextForm().getFixedText().indexOf( stringsToReplace.get(i) );
this.stmtStencil = this.stmtStencil.$( stringsToReplace.get(i), $name);
}
return this;
}
@Override
public _S fill(Object...values){
String str = stmtStencil.fill(Translator.DEFAULT_TRANSLATOR, values);
return (_S) _stmt.of((S) Stmt.of( str));
}
@Override
public _S fill(Translator t, Object...values){
List<String> keys = $listNormalized();
if( values.length < keys.size() ){
throw new _jdraftException("not enough values("+values.length+") to fill ("+keys.size()+") variables "+ keys);
}
Map<String,Object> kvs = new HashMap<>();
for(int i=0;i<values.length;i++){
kvs.put( keys.get(i), values[i]);
}
return draft( t, kvs );
}
@Override
public _S draft(Object...keyValues ){
Tokens tokens = Tokens.of(keyValues);
return (_S) _stmt.of( parameterize$LabeledStmt( Stmt.of(stmtStencil.draft( tokens )), tokens ));
}
@Override
public _S draft(Translator t, Object...keyValues ){
Tokens tokens = Tokens.of(keyValues);
return (_S) _stmt.of(parameterize$LabeledStmt( Stmt.of(stmtStencil.draft( t, tokens )), tokens ));
}
@Override
public _S draft(Map<String,Object> tokens ){
return (_S) _stmt.of((S) parameterize$LabeledStmt( Stmt.of(stmtStencil.draft( tokens )), tokens ));
}
@Override
public _S draft(Translator t, Map<String,Object> tokens ){
return (_S) _stmt.of(parameterize$LabeledStmt( Stmt.of(stmtStencil.draft( t, tokens )), tokens ));
}
public boolean match( Node node ) {
if (node instanceof Statement) {
return matches((Statement) node);
}
return false;
}
/**
*
* @param stmt
* @return
*/
public boolean matches( String...stmt ){
return matches( Stmt.of(stmt));
}
/**
*
* @param astStmt
* @return
*/
public boolean matches( Statement astStmt ){
Select sel = select(astStmt);
return sel != null;
}
@Override
public List<String> $list(){
return this.stmtStencil.$list();
}
@Override
public List<String> $listNormalized(){
return this.stmtStencil.$listNormalized();
}
/**
*
* @param translator
* @param kvs
* @return
*/
public $stmt $hardcode(Translator translator, Tokens kvs ) {
this.stmtStencil = this.stmtStencil.$hardcode(translator, kvs);
return this;
}
public Select<S, _S> select(String s){
return select( new String[]{s});
}
/**
*
* @param stmt
* @return
*/
public Select<S, _S> select(String...stmt){
try{
return select(Stmt.of(stmt));
}catch(Exception e){
return null;
}
}
public boolean isMatchAny(){
try{
return this.astMatch.test(null)
&& this.statementClass == Statement.class
&& this.stmtStencil.isMatchAny();
}catch(Exception e){
return false;
}
}
public Select<S, _S> select(_stmt _s){
if( _s == null ){
return null;
}
if( !statementClass.isAssignableFrom(_s.node().getClass())){
return null;
}
S s = (S)_s.node();
if( ! astMatch.test((_S) _stmt.of(s))){
return null;
}
Tokens st = this.stmtStencil.parse(_s.node().toString(NO_COMMENTS));
if( st == null ){
return null;
}
return new Select( _s.node(), $tokens.of(st) );
}
/**
*
* @param astStmt
* @return
*/
public Select<S, _S> select(Statement astStmt ){
if( astStmt == null ){
return null;
}
if( !statementClass.isAssignableFrom(astStmt.getClass())){
return null;
}
S s = (S)astStmt;
if( ! astMatch.test((_S) _stmt.of(s))){
return null;
}
Tokens st = this.stmtStencil.parse(astStmt.toString(NO_COMMENTS));
if( st == null ){
return null;
}
return new Select( astStmt, $tokens.of(st) );
}
/**
* Returns the first Statement that matches the
* @param _j
* @return
*/
@Override
public Select<S, _S> selectFirstIn(_java._domain _j){
if( _j instanceof _codeUnit){
_codeUnit _c = (_codeUnit) _j;
if( _c.isTopLevel() ){
return selectFirstIn(_c.astCompilationUnit());
}
_type _t = (_type) _j; //only possible
return selectFirstIn(_t.node());
}
if( _j instanceof _body ){
return selectFirstIn( ((_body)_j).ast() );
}
return selectFirstIn( ((_tree._node) _j).node() );
}
/**
* Returns the first Statement that matches the
* @param astNode the
* @return a Select containing the Statement and the key value pairs from the pattern
*/
@Override
public Select<S, _S> selectFirstIn(Node astNode ){
Optional<S> f = astNode.findFirst(this.statementClass, s -> this.matches(s) );
if( f.isPresent()){
return this.select(f.get());
}
return null;
}
/**
* Returns the first Statement that matches the
* @param _n
* @param selectConstraint
* @return
*/
public Select<S, _S> selectFirstIn(_java._domain _n, Predicate<Select<S, _S>> selectConstraint ){
if( _n instanceof _codeUnit){
if( ((_codeUnit) _n).isTopLevel()){
return selectFirstIn( ((_codeUnit) _n).astCompilationUnit(), selectConstraint );
}
return selectFirstIn( ((_type)_n).node(), selectConstraint);
}
return selectFirstIn( ((_tree._node)_n).node(), selectConstraint );
}
/**
* Returns the first Statement that matches the
* @param astNode the
* @param selectConstraint
* @return a Select containing the Statement and the key value pairs from the pattern
*/
public Select<S, _S> selectFirstIn(Node astNode, Predicate<Select<S, _S>> selectConstraint ){
Optional<S> f = astNode.findFirst(this.statementClass, s -> {
Select<S, _S> sel = this.select(s);
return sel != null && selectConstraint.test(sel);
});
//s -> this.matches(s) );
if( f.isPresent()){
return this.select(f.get());
}
return null;
}
/**
* Returns the first Statement that matches the
* @param astStartNode the
* @param statementMatchFn
* @return
*/
@Override
public _S firstIn(Node astStartNode, Predicate<_S> statementMatchFn ){
Optional<S> f = astStartNode.findFirst(this.statementClass, s ->{
Select sel = select(s);
return sel != null && statementMatchFn.test((_S)sel._s);
});
if( f.isPresent()){
return (_S) _stmt.of(f.get());
}
return null;
}
@Override
public <N extends Node> N forEachIn(N astNode, Predicate<_S> statementMatchFn, Consumer<_S> statementActionFn){
astNode.walk(this.statementClass, e-> {
Select sel = select(e);
if( sel != null && statementMatchFn.test((_S)sel._s) ) {
statementActionFn.accept((_S)sel._s);
}
});
return astNode;
}
/**
*
* @param <N>
* @param astNode
* @param selectedActionFn
* @return
*/
public <N extends Node> N forSelectedIn(N astNode, Consumer<Select<S, _S>> selectedActionFn){
astNode.walk(this.statementClass, e-> {
Select<S,_S> sel = select( e );
if( sel != null ){
selectedActionFn.accept( sel );
}
});
return astNode;
}
/**
*
* @param clazz
* @param selectedActionFn
* @return
*/
public <_CT extends _type> _CT forSelectedIn(Class clazz, Consumer<Select<S, _S>> selectedActionFn){
return (_CT)forSelectedIn((_type) _type.of(clazz), selectedActionFn);
}
/**
*
* @param <_J>
* @param _j
* @param selectedActionFn
* @return
*/
public <_J extends _java._domain> _J forSelectedIn(_J _j, Consumer<Select<S, _S>> selectedActionFn){
Walk.in(_j, this.statementClass, e->{
Select<S, _S> sel = select( e );
if( sel != null ){
selectedActionFn.accept( sel );
}
});
return _j;
}
/**
*
* @param <N>
* @param astNode
* @param selectConstraint
* @param selectedActionFn
* @return
*/
public <N extends Node> N forSelectedIn(N astNode, Predicate<Select<S, _S>> selectConstraint, Consumer<Select<S, _S>> selectedActionFn){
astNode.walk(this.statementClass, e-> {
Select<S, _S> sel = select( e );
if( sel != null && selectConstraint.test(sel) ){
selectedActionFn.accept( sel );
}
});
return astNode;
}
/**
*
* @param clazz
* @param selectConstraint
* @param selectedActionFn
* @return
*/
public <CT extends _type> CT forSelectedIn(Class clazz, Predicate<Select<S, _S>> selectConstraint, Consumer<Select<S, _S>> selectedActionFn){
return forSelectedIn((CT) _type.of(clazz), selectConstraint, selectedActionFn);
}
/**
*
* @param <_J>
* @param _j
* @param selectConstraint
* @param selectedActionFn
* @return
*/
public <_J extends _java._domain> _J forSelectedIn(_J _j, Predicate<Select<S, _S>> selectConstraint, Consumer<Select<S,_S>> selectedActionFn){
Walk.in(_j, this.statementClass, e->{
Select<S, _S> sel = select( e );
if( sel != null && selectConstraint.test(sel)){
selectedActionFn.accept( sel );
}
});
return _j;
}
/** Write the Statements without comments (for matching, comparison) */
public static final PrettyPrinterConfiguration NO_COMMENTS =
new PrettyPrinterConfiguration()
.setPrintComments(false).setPrintJavadoc(false);
/**
*
* @param clazz
* @return
*/
@Override
public List<Select<S, _S>> listSelectedIn(Class clazz){
return listSelectedIn((_type) _type.of(clazz));
}
@Override
public List<Select<S, _S>> listSelectedIn(Node astNode ){
List<Select<S, _S>>sts = new ArrayList<>();
astNode.walk(this.statementClass, st-> {
Select sel = select(st);
if( sel != null ){
sts.add( sel); //new Select( (T)st, tokens) );
}
});
return sts;
}
/**
*
* @param clazz
* @param selectConstraint
* @return
*/
public List<Select<S, _S>> listSelectedIn(Class clazz, Predicate<Select<S, _S>> selectConstraint ){
return listSelectedIn( (_type) _type.of(clazz), selectConstraint);
}
/**
*
* @param astNode
* @param selectConstraint
* @return
*/
public List<Select<S, _S>> listSelectedIn(Node astNode, Predicate<Select<S, _S>> selectConstraint ){
List<Select<S, _S>>sts = new ArrayList<>();
astNode.walk(this.statementClass, st-> {
Select sel = select(st);
if( sel != null && selectConstraint.test(sel)){
sts.add( sel); //new Select( (T)st, tokens) );
}
});
return sts;
}
/**
*
* @param _j
* @param selectConstraint
* @return
*/
public List<Select<S, _S>> listSelectedIn(_java._domain _j, Predicate<Select<S, _S>> selectConstraint ){
List<Select<S, _S>>sts = new ArrayList<>();
Walk.in(_j, this.statementClass, st->{
Select sel = select(st);
if (sel != null && selectConstraint.test(sel)){
sts.add(sel);
}
});
return sts;
}
/**
*
* @param clazz
* @param $repl
* @return
*/
public <_CT extends _type> _CT replaceIn(Class clazz, $stmt $repl){
return (_CT)replaceIn( (_type) _type.of(clazz), $repl);
}
/**
*
* @param clazz
* @param replacement
* @return
*/
public <_CT extends _type> _CT replaceIn( Class clazz, String...replacement){
return (_CT)replaceIn( (_type) _type.of(clazz), replacement);
}
/**
*
* @param <_J>
* @param _j
* @param $repl
* @return
*/
public <_J extends _java._domain> _J replaceIn(_J _j, $stmt $repl ){
$stmts $sn = new $stmts($repl);
return replaceIn(_j, $sn);
}
/**
*
* @param <_J>
* @param _j
* @param statements
* @return
*/
public <_J extends _java._domain> _J replaceIn(_J _j, String... statements ){
$stmts $sn = $stmts.of(statements);
return replaceIn(_j, $sn);
}
public static final Consumer<_stmt> REPLACE_WITH_EMPTY_COMMENT_BLOCK = (st)->{
BlockStmt bs = Ast.blockStmt("{/*<code>"+st.toString(Print.PRINT_NO_COMMENTS)+"</code>*" + "/}");
/**
* Check if you are in this situation (replacing System.out.println()) which is already in an empty block
* <PRE>
* void m() {
* {
* System.out.println(1);
* }
* }
* </PRE>
*/
if( st.node().getParentNode().isPresent()
&& st.node().getParentNode().get() instanceof BlockStmt
&& ((BlockStmt)st.node().getParentNode().get()).getParentNode().isPresent()
&& !(((BlockStmt)st.node().getParentNode().get()).getParentNode().get() instanceof BodyDeclaration) ){
BlockStmt par = ((BlockStmt)st.node().getParentNode().get());
if( par.getStatements().size() == 1 ){
bs = Ast.blockStmt("{/*<code>"+st.toString(Print.PRINT_NO_COMMENTS)+"</code>" + "*/}");
par.replace( bs );
} else{
st.node().replace(bs);
}
}
};
public static final Consumer<_stmt> REPLACE_WITH_EMPTY_STMT_COMMENT = (st)->{
Statement es = new EmptyStmt(); //create a new empty statement
es.setComment( new BlockComment("<code>"+st.toString(Print.PRINT_NO_COMMENTS)+"</code>") );
st.node().replace( es );
};
/** comments out the matching code
* as a BlockStmt containing the code
* i.e.
* //comment out all assertStmts
* class A{
* void m(){
* assert(1==1);
* }
* }
* _class _c = $.assertStmt().commentOut(A.class);
* class A{
* void m(){
* { /* assert(1==1); * / }
* }
* }
*/
public <N extends Node> N commentOut( N ast ){
return commentOut(ast, REPLACE_WITH_EMPTY_STMT_COMMENT);
}
//comments out the matching code
public <_CT extends _type> _CT commentOut( Class clazz){
return (_CT)commentOut( _class.of(clazz), REPLACE_WITH_EMPTY_STMT_COMMENT);
}
public _project commentOut(_project _codeProvider){
return commentOut(_codeProvider, REPLACE_WITH_EMPTY_STMT_COMMENT);
}
public _project commentOut(_project _codeProvider, Consumer<_stmt> commenter){
forEachIn(_codeProvider, n-> commenter.accept(n));
return _codeProvider;
}
//comments out the matching code
public <_J extends _java._domain> _J commentOut(_J _j){
return commentOut(_j, REPLACE_WITH_EMPTY_STMT_COMMENT);
}
//
public <N extends Node> N commentOut( N ast, Consumer<_stmt> commenter){
return forEachIn(ast, n-> commenter.accept(n));
}
// comments out the matching code
public <_CT extends _type> _CT commentOut( Class clazz, Consumer<_stmt> commenter){
return (_CT)commentOut( _class.of(clazz), commenter);
}
/** comments out the matching code */
public <_J extends _java._domain> _J commentOut(_J _j, Consumer<_stmt>commenter){
return forEachIn(_j, s-> commenter.accept(s) );
}
public static final $comment<com.github.javaparser.ast.comments.Comment> $COMMENTED_STATEMENT = $comment.as("<code>$statement$</code>");
/**
*
* @param ast
* @param <N>
* @return
*/
public <N extends Node> N unComment( N ast ){
$COMMENTED_STATEMENT.forSelectedIn( ast, ($comment.Select sel)-> {
try {
Statement st = Stmt.of( sel.get("statement").toString() );
Select ssel = this.select(st);
if( ssel != null ){
//if it's a comment on an EmptyStmt ";", lets replace the statement
Optional<Node> oc = sel.comment.getCommentedNode();
if( oc.isPresent() && ( oc.get() instanceof EmptyStmt)) {
//System.out.println("Empty Stmt");
oc.get().replace(st);
} else { //TODO handle Empty Block Comments
/*
if( !oc.isPresent() ){
Optional<Node> oparent = sel.comment.getParentNode();
if( oparent.isPresent() && )
oparent.get().is
}
*/
Comments.replace(sel.comment, st);
}
}
} catch( Exception e ){
//couldnt parse comment statement
}
} );
return ast;
}
/**
*
* @param clazz
* @param <_CT>
* @return
*/
public <_CT extends _type> _CT unComment( Class clazz){
return (_CT)unComment( _class.of(clazz));
}
/**
*
* @param _n a node to be uncommented
* @param <_N> the node type
* @return the modified node
*/
public <_N extends _tree._node> _N unComment(_N _n){
unComment( _n.node() );
return _n;
}
public <N extends Node> N replaceIn( N node, $stmt $pat){
return replaceIn(node, $stmts.of($pat));
}
public <N extends Node> N replaceIn( N node, $stmts $pat){
Walk.in(node, this.statementClass, st->{
$stmt.Select sel = select( st );
if( sel != null ){
//construct the replacement snippet
List<Statement> replacements = $pat.draft(sel.tokens);
//Statement firstStmt = sel.statements.get(0);
//Node par = firstStmt.getParentNode().get();
//NodeWithStatements parentNode = (NodeWithStatements)par;
//int addIndex = par.getChildNodes().indexOf( firstStmt );
LabeledStmt ls = Stmt.labeledStmt("$replacement$:{}");
// we want to add the contents of the replacement to a labeled statement,
// because, (if we did it INLINE, we could end up in an infinite loop, searching the
// tree up to a cursor, then adding some code AT the cursor, then finding a match within the added
// code, then adding more code, etc. etc.
// this way, WE ADD A SINGLE LABELED STATEMENT AT THE LOCATION OF THE FIRST MATCH (which contains multiple statements)
// then, we move to the next statement
for(int i=0;i<replacements.size(); i++){
ls.getStatement().asBlockStmt().addStatement( replacements.get(i) );
}
sel._s.node().replace( ls );
//parentNode.addStatement(addIndex +1, ls);
//removeIn all but the first statement
//sel.statements.forEach( s-> s.removeIn() );
//System.out.println("PAR AFTER Remove "+ par );
}
});
Walk.deLabel(node, "$replacement$");
return node;
}
/**
*
* @param <_J>
* @param _j
* @param $protoReplacement
* @return
*/
public <_J extends _java._domain> _J replaceIn(_J _j, $stmts $protoReplacement ){
//AtomicInteger ai = new AtomicInteger(0);
Walk.in(_j, this.statementClass, st->{
$stmt.Select sel = select( st );
if( sel != null ){
//construct the replacement snippet
List<Statement> replacements = $protoReplacement.draft(sel.tokens);
//Statement firstStmt = sel.statements.get(0);
//Node par = firstStmt.getParentNode().get();
//NodeWithStatements parentNode = (NodeWithStatements)par;
//int addIndex = par.getChildNodes().indexOf( firstStmt );
LabeledStmt ls = Stmt.labeledStmt("$replacement$:{}");
// we want to add the contents of the replacement to a labeled statement,
// because, (if we did it INLINE, we could end up in an infinite loop, searching the
// tree up to a cursor, then adding some code AT the cursor, then finding a match within the added
// code, then adding more code, etc. etc.
// this way, WE ADD A SINGLE LABELED STATEMENT AT THE LOCATION OF THE FIRST MATCH (which contains multiple statements)
// then, we move to the next statement
for(int i=0;i<replacements.size(); i++){
ls.getStatement().asBlockStmt().addStatement( replacements.get(i) );
}
sel._s.node().replace( ls );
//parentNode.addStatement(addIndex +1, ls);
//removeIn all but the first statement
//sel.statements.forEach( s-> s.removeIn() );
//System.out.println("PAR AFTER Remove "+ par );
}
});
if( _j instanceof _tree._node){
Walk.deLabel( ((_tree._node) _j).node(), "$replacement$");
}
return (_J) _j;
}
/**
* Adds a constraint that the beforeExpression occurs in the same context/block before the target Expression
* @param patternsOccurringBeforeThisNode
* @return
*/
public $stmt<S, _S> $isAfter($pattern... patternsOccurringBeforeThisNode ){
Predicate<_S> prev = e -> $pattern.BodyScope.findPrevious(e.node(), patternsOccurringBeforeThisNode) != null;
return $and(prev);
}
/**
* Adds a constraint that the beforeExpression occurs in the same context/block before the target Expression
* @param patternsOccurringBeforeThisNode
* @return
*/
public $stmt<S, _S> $isNotAfter($pattern... patternsOccurringBeforeThisNode ){
Predicate<_S> prev = e -> $pattern.BodyScope.findPrevious(e.node(), patternsOccurringBeforeThisNode) != null;
return $not(prev);
}
/**
*
* @param patternsOccurringAfterThisNode
* @return
*/
public $stmt<S, _S> $isBefore($pattern... patternsOccurringAfterThisNode ){
Predicate<_S> prev = e -> $pattern.BodyScope.findNext(e.node(), patternsOccurringAfterThisNode) != null;
return $and(prev);
}
/**
*
* @param patternsOccurringAfterThisNode
* @return
*/
public $stmt<S, _S> $isNotBefore($pattern... patternsOccurringAfterThisNode ){
Predicate<_S> prev = e -> $pattern.BodyScope.findNext(e.node(), patternsOccurringAfterThisNode) != null;
return $not(prev);
}
@Override
public String toString(){
return "$stmt{ ("+this.statementClass.getSimpleName()+") : \""+ this.stmtStencil +"\" }";
}
/**
* After we've constructed the body from the String based Stencil...
*
* we look for Labeled Statements with label starts with $, i.e.<PRE>
*
* $callSuperEquals: eq = super.typesEqual(proxy) && eq;
* </PRE>
*
* when we encounter the first type (a labeled statement with code...)
* <PRE>$callSuperEquals: eq = super.typesEqual($b$) && eq;</PRE>
* we look know to look for the parameter "callSuperEquals" when we are
* constructing the _body...
*
* IF the value for "callSuperEquals" is null, or FALSE
* then there is no trace of it in the output
*
* IF the value "callSuperEquals" is a Statement or List<Statement> we replace
* the contents of the labelStatement "callSuperEquals" with the statement/(s)
*
* IF the value of "callSuperEquals" is anything else, we add the statment(s)
*
* <PRE>super.typesEqual(proxy) && eq;</PRE>
*
* and we remove/flatten the label where only the statement remains in the body
*
* @param stmt
* @param tokens
* @return the (potentially modified) statement
*/
public static Statement construct$LabelStmt( Statement stmt, Map<String,Object> tokens ){
if( stmt instanceof LabeledStmt && stmt.asLabeledStmt().getLabel().asString().startsWith("$") ){
return labelStmtReplacement(stmt.asLabeledStmt(), tokens);
}
return parameterize$LabeledStmt( stmt, tokens);
}
/**
* Walks AST nodes looking for a $labeledStmt,
*
* if found will replace the labeled stmt with a parameter
* @param <N>
* @param node
* @param tokens
* @return
*/
public static <N extends Node> N parameterize$LabeledStmt(N node, Map<String,Object> tokens ){
//separate into (2) operations, (dont WALK and MUTATE at the same time)
List<LabeledStmt> lss = Walk.list(node, LabeledStmt.class, ls-> ls.getLabel().asString().startsWith("$") );
lss.forEach(ls-> {
//System.out.println( " Found "+ ls+" in "+ node);
Statement st = labelStmtReplacement(ls, tokens);
//System.out.println( " REPLACING WITH "+ st);
if( st.isEmptyStmt() || (st.isBlockStmt() && st.asBlockStmt().isEmpty()) ){
//System.out.println( " ST IS EMPTY REMOVE FORCED !!!");
//ls.removeForced();
boolean rem = ls.remove(); //getParentNode().get().remove(ls);
if( !rem ){
//System.out.println( " COULDNT REMOVE!!!!!!!!!!!!");
ls.replace( st );
}
//System.out.println( " AFTERWARDS "+ node);
} else{
LabeledStmt $TO_REPLACE = Stmt.labeledStmt("$TO_REPLACE: {}");
$TO_REPLACE.setStatement(st);
ls.replace( $TO_REPLACE );
Walk.deLabel(node, "$TO_REPLACE");
}
});
return node;
}
public static Statement labelStmtReplacement(LabeledStmt ls, Map<String,Object> tokens){
//System.out.println("Found labeled Statenm " +ls.getLabel() );
String name = ls.getLabel().asString().substring(1);
Object value = tokens.get(name);
//HIDE:
if( value == null || value == Boolean.FALSE ){
return new EmptyStmt();
}
//OVERRIDE: with a static Statement
else if( value instanceof Statement) {
return (Statement) value;
}
//OVERRIDE: with a String Statement
else if( value instanceof String ){
return Stmt.of( (String)value);
}
//OVERRIDE: with a proto Statement
else if( value instanceof $stmt ){
return (($stmt)value).draft(tokens).node();
}
//SHOW (just remove the $label:)
return ls.getStatement();
}
/**
* An Or entity that can match against any of the $pattern instances provided
* NOTE: template features (draft/fill) are suppressed.
*/
public static class Or extends $stmt {
final List<$stmt>ors = new ArrayList<>();
public Or($stmt...$as){
super(Statement.class);
Arrays.stream($as).forEach($a -> ors.add($a) );
}
@Override
public $stmt $hardcode(Translator translator, Tokens kvs) {
ors.forEach( $a -> $a.$hardcode(translator, kvs));
return this;
}
@Override
public String toString(){
StringBuilder sb = new StringBuilder();
sb.append("$stmt.Or{");
sb.append(System.lineSeparator());
ors.forEach($a -> sb.append( Text.indent($a.toString()) ) );
sb.append("}");
return sb.toString();
}
/**
*
* @param n
* @return
*/
public $stmt.Select select(Statement n){
$stmt $a = whichMatch(n);
if( $a != null ){
return $a.select(n);
}
return null;
}
public boolean isMatchAny(){
return false;
}
/**
* Return the underlying $method that matches the Method or null if none of the match
* @param stmt
* @return
*/
public $stmt whichMatch(Statement stmt){
if( !this.astMatch.test(stmt ) ){
return null;
}
Optional<$stmt> orsel = this.ors.stream().filter( $p-> $p.matches(stmt) ).findFirst();
if( orsel.isPresent() ){
return orsel.get();
}
return null;
}
}
public static final PrettyPrinterConfiguration PRINT_$LABELED_AS_EMBED = new PrettyPrinterConfiguration()
.setPrintComments(false).setPrintJavadoc(false) //MED ADDED
.setVisitorFactory(Print$LabeledStatementsAsStencilEmbed::new);
public static class Print$LabeledStatementsAsStencilEmbed extends PrettyPrintVisitor {
//MAKE THIS A STENCIL??
public Predicate<String> labelMatcher = l-> l.startsWith("$");
public Function<String,String> labelToName = l-> {
if( l.startsWith("$") ){
return l.substring(1);
}
return null;
};
public Print$LabeledStatementsAsStencilEmbed(PrettyPrinterConfiguration prettyPrinterConfiguration) {
super(prettyPrinterConfiguration);
}
public Print$LabeledStatementsAsStencilEmbed(Function<String,String> labelToName, PrettyPrinterConfiguration prettyPrinterConfiguration) {
super(prettyPrinterConfiguration);
this.labelToName = labelToName;
}
//convert $label: assert($condition$);
// to
// $$label: assert($condition$);:$$
public void visit(LabeledStmt ls, Void arg){
String name = labelToName.apply(ls.getLabel().asString());
if( name != null ){
printer.print("$$"+name+":"+ls.getStatement().toString(Print.PRINT_NO_COMMENTS)+":$$");
} else{
super.visit(ls, arg);
}
}
}
/**
*
* @param <T>
*/
public static class Select<T extends Statement, _S extends _stmt> implements $pattern.selected,
selectAst<T>, select_java<_S> {
public _S _s;
public $tokens tokens;
public Select( T astStatement, $tokens tokens){
this._s = (_S) _stmt.of( astStatement); //this.astStatement = astStatement;
this.tokens = tokens;
}
@Override
public $tokens tokens(){
return tokens;
}
@Override
public String toString(){
return "$stmt.Select{"+ System.lineSeparator()+
Text.indent(_s.toString() )+ System.lineSeparator()+
Text.indent("$tokens : " + tokens) + System.lineSeparator()+
"}";
}
@Override
public T ast() {
return (T)_s.node();
}
@Override
public _S _node() {
return _s;
}
}
}
|
/*
* Copyright (c) 2011-2020, hubin (jobob@qq.com).
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.baomidou.mybatisplus.extension.injector.methods;
import com.baomidou.mybatisplus.core.enums.SqlMethod;
import com.baomidou.mybatisplus.core.metadata.TableInfo;
import com.baomidou.mybatisplus.core.toolkit.Constants;
import com.baomidou.mybatisplus.core.toolkit.StringPool;
import com.baomidou.mybatisplus.core.toolkit.sql.SqlScriptUtils;
import com.baomidou.mybatisplus.extension.injector.AbstractLogicMethod;
import org.apache.ibatis.mapping.MappedStatement;
import org.apache.ibatis.mapping.SqlSource;
/**
* <p>
* 根据 ID 集合查询
* </p>
*
* @author hubin
* @since 2018-06-13
*/
public class LogicSelectBatchByIds extends AbstractLogicMethod {
@Override
public MappedStatement injectMappedStatement(Class<?> mapperClass, Class<?> modelClass, TableInfo tableInfo) {
SqlMethod sqlMethod = SqlMethod.LOGIC_SELECT_BATCH_BY_IDS;
SqlSource sqlSource = languageDriver.createSqlSource(configuration, String.format(sqlMethod.getSql(),
sqlSelectColumns(tableInfo, false), tableInfo.getTableName(), tableInfo.getKeyColumn(),
SqlScriptUtils.convertForeach("#{item}", Constants.COLLECTION, null, "item", StringPool.COMMA),
tableInfo.getLogicDeleteSql(true, false)), modelClass);
return addSelectMappedStatement(mapperClass, sqlMethod.getMethod(), sqlSource, modelClass, tableInfo);
}
}
|
package com.sequenceiq.cloudbreak.domain;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.ManyToOne;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import javax.persistence.UniqueConstraint;
import com.sequenceiq.cloudbreak.authorization.WorkspaceResource;
import com.sequenceiq.cloudbreak.domain.workspace.Workspace;
import com.sequenceiq.cloudbreak.domain.workspace.WorkspaceAwareResource;
@Entity
@Table(uniqueConstraints = @UniqueConstraint(columnNames = {"workspace_id", "name"}))
public class ImageCatalog implements ProvisionEntity, WorkspaceAwareResource {
@Id
@GeneratedValue(strategy = GenerationType.AUTO, generator = "imagecatalog_generator")
@SequenceGenerator(name = "imagecatalog_generator", sequenceName = "imagecatalog_id_seq", allocationSize = 1)
private Long id;
@Column(nullable = false)
private String name;
@Column(length = 1000000, columnDefinition = "TEXT")
private String description;
@Column(name = "url", nullable = false)
private String imageCatalogUrl;
@Column(columnDefinition = "boolean default false")
private boolean archived;
@ManyToOne
private Workspace workspace;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getImageCatalogUrl() {
return imageCatalogUrl;
}
public void setImageCatalogUrl(String imageCatalogUrl) {
this.imageCatalogUrl = imageCatalogUrl;
}
public boolean isArchived() {
return archived;
}
public void setArchived(boolean archived) {
this.archived = archived;
}
public Workspace getWorkspace() {
return workspace;
}
@Override
public String getName() {
return name;
}
public void setWorkspace(Workspace workspace) {
this.workspace = workspace;
}
@Override
public WorkspaceResource getResource() {
return WorkspaceResource.IMAGECATALOG;
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.parse;
import java.util.ArrayList;
import org.antlr.runtime.ANTLRStringStream;
import org.antlr.runtime.CharStream;
import org.antlr.runtime.CommonToken;
import org.antlr.runtime.NoViableAltException;
import org.antlr.runtime.RecognitionException;
import org.antlr.runtime.Token;
import org.antlr.runtime.TokenRewriteStream;
import org.antlr.runtime.TokenStream;
import org.antlr.runtime.tree.CommonTree;
import org.antlr.runtime.tree.CommonTreeAdaptor;
import org.antlr.runtime.tree.TreeAdaptor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.hive.ql.Context;
/**
* ParseDriver.
*
*/
public class ParseDriver {
private static final Logger LOG = LoggerFactory.getLogger("hive.ql.parse.ParseDriver");
/**
* ANTLRNoCaseStringStream.
*
*/
//This class provides and implementation for a case insensitive token checker
//for the lexical analysis part of antlr. By converting the token stream into
//upper case at the time when lexical rules are checked, this class ensures that the
//lexical rules need to just match the token with upper case letters as opposed to
//combination of upper case and lower case characteres. This is purely used for matching lexical
//rules. The actual token text is stored in the same way as the user input without
//actually converting it into an upper case. The token values are generated by the consume()
//function of the super class ANTLRStringStream. The LA() function is the lookahead funtion
//and is purely used for matching lexical rules. This also means that the grammar will only
//accept capitalized tokens in case it is run from other tools like antlrworks which
//do not have the ANTLRNoCaseStringStream implementation.
public class ANTLRNoCaseStringStream extends ANTLRStringStream {
public ANTLRNoCaseStringStream(String input) {
super(input);
}
@Override
public int LA(int i) {
int returnChar = super.LA(i);
if (returnChar == CharStream.EOF) {
return returnChar;
} else if (returnChar == 0) {
return returnChar;
}
return Character.toUpperCase((char) returnChar);
}
}
/**
* HiveLexerX.
*
*/
public class HiveLexerX extends HiveLexer {
private final ArrayList<ParseError> errors;
public HiveLexerX() {
super();
errors = new ArrayList<ParseError>();
}
public HiveLexerX(CharStream input) {
super(input);
errors = new ArrayList<ParseError>();
}
@Override
public void displayRecognitionError(String[] tokenNames,
RecognitionException e) {
errors.add(new ParseError(this, e, tokenNames));
}
@Override
public String getErrorMessage(RecognitionException e, String[] tokenNames) {
String msg = null;
if (e instanceof NoViableAltException) {
@SuppressWarnings("unused")
NoViableAltException nvae = (NoViableAltException) e;
// for development, can add
// "decision=<<"+nvae.grammarDecisionDescription+">>"
// and "(decision="+nvae.decisionNumber+") and
// "state "+nvae.stateNumber
msg = "character " + getCharErrorDisplay(e.c) + " not supported here";
} else {
msg = super.getErrorMessage(e, tokenNames);
}
return msg;
}
public ArrayList<ParseError> getErrors() {
return errors;
}
}
/**
* Tree adaptor for making antlr return ASTNodes instead of CommonTree nodes
* so that the graph walking algorithms and the rules framework defined in
* ql.lib can be used with the AST Nodes.
*/
public static final TreeAdaptor adaptor = new CommonTreeAdaptor() {
/**
* Creates an ASTNode for the given token. The ASTNode is a wrapper around
* antlr's CommonTree class that implements the Node interface.
*
* @param payload
* The token.
* @return Object (which is actually an ASTNode) for the token.
*/
@Override
public Object create(Token payload) {
return new ASTNode(payload);
}
@Override
public Token createToken(int tokenType, String text) {
if (tokenType == HiveParser.TOK_SETCOLREF) {
// ParseUtils.processSetColsNode() can change type of TOK_SETCOLREF nodes later
return new CommonToken(tokenType, text);
} else {
return new ImmutableCommonToken(tokenType, text);
}
}
@Override
public Object dupNode(Object t) {
return create(((CommonTree)t).token);
}
@Override
public Object dupTree(Object t, Object parent) {
// Overriden to copy start index / end index, that is needed through optimization,
// e.g., for masking/filtering
ASTNode astNode = (ASTNode) t;
ASTNode astNodeCopy = (ASTNode) super.dupTree(t, parent);
astNodeCopy.setTokenStartIndex(astNode.getTokenStartIndex());
astNodeCopy.setTokenStopIndex(astNode.getTokenStopIndex());
return astNodeCopy;
}
@Override
public Object errorNode(TokenStream input, Token start, Token stop, RecognitionException e) {
return new ASTErrorNode(input, start, stop, e);
}
};
public ASTNode parse(String command) throws ParseException {
return parse(command, null);
}
public ASTNode parse(String command, Context ctx)
throws ParseException {
return parse(command, ctx, null);
}
/**
* Parses a command, optionally assigning the parser's token stream to the
* given context.
*
* @param command
* command to parse
*
* @param ctx
* context with which to associate this parser's token stream, or
* null if either no context is available or the context already has
* an existing stream
*
* @return parsed AST
*/
public ASTNode parse(String command, Context ctx, String viewFullyQualifiedName)
throws ParseException {
if (LOG.isDebugEnabled()) {
LOG.debug("Parsing command: " + command);
}
HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command));
TokenRewriteStream tokens = new TokenRewriteStream(lexer);
if (ctx != null) {
if (viewFullyQualifiedName == null) {
// Top level query
ctx.setTokenRewriteStream(tokens);
} else {
// It is a view
ctx.addViewTokenRewriteStream(viewFullyQualifiedName, tokens);
}
lexer.setHiveConf(ctx.getConf());
}
HiveParser parser = new HiveParser(tokens);
if (ctx != null) {
parser.setHiveConf(ctx.getConf());
}
parser.setTreeAdaptor(adaptor);
HiveParser.statement_return r = null;
try {
r = parser.statement();
} catch (RecognitionException e) {
e.printStackTrace();
throw new ParseException(parser.errors);
}
if (lexer.getErrors().size() == 0 && parser.errors.size() == 0) {
LOG.debug("Parse Completed");
} else if (lexer.getErrors().size() != 0) {
throw new ParseException(lexer.getErrors());
} else {
throw new ParseException(parser.errors);
}
ASTNode tree = (ASTNode) r.getTree();
tree.setUnknownTokenBoundaries();
return tree;
}
/*
* Parse a string as a query hint.
*/
public ASTNode parseHint(String command) throws ParseException {
LOG.info("Parsing hint: " + command);
HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command));
TokenRewriteStream tokens = new TokenRewriteStream(lexer);
HintParser parser = new HintParser(tokens);
parser.setTreeAdaptor(adaptor);
HintParser.hint_return r = null;
try {
r = parser.hint();
} catch (RecognitionException e) {
e.printStackTrace();
throw new ParseException(parser.errors);
}
if (lexer.getErrors().size() == 0 && parser.errors.size() == 0) {
LOG.info("Parse Completed");
} else if (lexer.getErrors().size() != 0) {
throw new ParseException(lexer.getErrors());
} else {
throw new ParseException(parser.errors);
}
return (ASTNode) r.getTree();
}
/*
* parse a String as a Select List. This allows table functions to be passed expression Strings
* that are translated in
* the context they define at invocation time. Currently used by NPath to allow users to specify
* what output they want.
* NPath allows expressions n 'tpath' a column that represents the matched set of rows. This
* column doesn't exist in
* the input schema and hence the Result Expression cannot be analyzed by the regular Hive
* translation process.
*/
public ASTNode parseSelect(String command, Context ctx) throws ParseException {
if (LOG.isDebugEnabled()) {
LOG.debug("Parsing command: " + command);
}
HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command));
TokenRewriteStream tokens = new TokenRewriteStream(lexer);
if (ctx != null) {
ctx.setTokenRewriteStream(tokens);
}
HiveParser parser = new HiveParser(tokens);
parser.setTreeAdaptor(adaptor);
HiveParser_SelectClauseParser.selectClause_return r = null;
try {
r = parser.selectClause();
} catch (RecognitionException e) {
e.printStackTrace();
throw new ParseException(parser.errors);
}
if (lexer.getErrors().size() == 0 && parser.errors.size() == 0) {
LOG.debug("Parse Completed");
} else if (lexer.getErrors().size() != 0) {
throw new ParseException(lexer.getErrors());
} else {
throw new ParseException(parser.errors);
}
return r.getTree();
}
public ASTNode parseExpression(String command) throws ParseException {
LOG.info("Parsing expression: " + command);
HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command));
TokenRewriteStream tokens = new TokenRewriteStream(lexer);
HiveParser parser = new HiveParser(tokens);
parser.setTreeAdaptor(adaptor);
HiveParser_IdentifiersParser.expression_return r = null;
try {
r = parser.expression();
} catch (RecognitionException e) {
e.printStackTrace();
throw new ParseException(parser.errors);
}
if (lexer.getErrors().size() == 0 && parser.errors.size() == 0) {
LOG.info("Parse Completed");
} else if (lexer.getErrors().size() != 0) {
throw new ParseException(lexer.getErrors());
} else {
throw new ParseException(parser.errors);
}
return (ASTNode) r.getTree();
}
public ASTNode parseTriggerExpression(String command) throws ParseException {
HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command));
TokenRewriteStream tokens = new TokenRewriteStream(lexer);
HiveParser parser = new HiveParser(tokens);
parser.setTreeAdaptor(adaptor);
HiveParser_ResourcePlanParser.triggerExpressionStandalone_return r = null;
try {
r = parser.gResourcePlanParser.triggerExpressionStandalone();
} catch (RecognitionException e) {
e.printStackTrace();
throw new ParseException(parser.errors);
}
if (lexer.getErrors().size() != 0) {
throw new ParseException(lexer.getErrors());
} else if (parser.errors.size() != 0) {
throw new ParseException(parser.errors);
}
return r.getTree();
}
public ASTNode parseTriggerActionExpression(String command) throws ParseException {
HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command));
TokenRewriteStream tokens = new TokenRewriteStream(lexer);
HiveParser parser = new HiveParser(tokens);
parser.setTreeAdaptor(adaptor);
HiveParser_ResourcePlanParser.triggerActionExpressionStandalone_return r = null;
try {
r = parser.gResourcePlanParser.triggerActionExpressionStandalone();
} catch (RecognitionException e) {
e.printStackTrace();
throw new ParseException(parser.errors);
}
if (lexer.getErrors().size() != 0) {
throw new ParseException(lexer.getErrors());
} else if (parser.errors.size() != 0) {
throw new ParseException(parser.errors);
}
return r.getTree();
}
}
|
public class Vitoria extends Comunicado{
private String nomeJogador;
private int pontuacao;
public Vitoria(String jogador, int pontuacao) {
this.pontuacao = pontuacao;
this.nomeJogador = jogador;
}
public String getVencedor(){
return this.nomeJogador;
}
public int getPontuacao(){
return this.pontuacao;
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.connector.catalog;
import org.apache.spark.annotation.Evolving;
import org.apache.spark.sql.catalyst.analysis.NamespaceAlreadyExistsException;
import org.apache.spark.sql.catalyst.analysis.NoSuchNamespaceException;
import java.util.Map;
/**
* Catalog methods for working with namespaces.
* <p>
* If an object such as a table, view, or function exists, its parent namespaces must also exist
* and must be returned by the discovery methods {@link #listNamespaces()} and
* {@link #listNamespaces(String[])}.
* <p>
* Catalog implementations are not required to maintain the existence of namespaces independent of
* objects in a namespace. For example, a function catalog that loads functions using reflection
* and uses Java packages as namespaces is not required to support the methods to create, alter, or
* drop a namespace. Implementations are allowed to discover the existence of objects or namespaces
* without throwing {@link NoSuchNamespaceException} when no namespace is found.
*
* @since 3.0.0
*/
@Evolving
public interface SupportsNamespaces extends CatalogPlugin {
/**
* A reserved property to specify the location of the namespace. If the namespace
* needs to store files, it should be under this location.
*/
String PROP_LOCATION = "location";
/**
* A reserved property to specify the description of the namespace. The description
* will be returned in the result of "DESCRIBE NAMESPACE" command.
*/
String PROP_COMMENT = "comment";
/**
* A reserved property to specify the owner of the namespace.
*/
String PROP_OWNER = "owner";
/**
* List top-level namespaces from the catalog.
* <p>
* If an object such as a table, view, or function exists, its parent namespaces must also exist
* and must be returned by this discovery method. For example, if table a.b.t exists, this method
* must return ["a"] in the result array.
*
* @return an array of multi-part namespace names
*/
String[][] listNamespaces() throws NoSuchNamespaceException;
/**
* List namespaces in a namespace.
* <p>
* If an object such as a table, view, or function exists, its parent namespaces must also exist
* and must be returned by this discovery method. For example, if table a.b.t exists, this method
* invoked as listNamespaces(["a"]) must return ["a", "b"] in the result array.
*
* @param namespace a multi-part namespace
* @return an array of multi-part namespace names
* @throws NoSuchNamespaceException If the namespace does not exist (optional)
*/
String[][] listNamespaces(String[] namespace) throws NoSuchNamespaceException;
/**
* Test whether a namespace exists.
* <p>
* If an object such as a table, view, or function exists, its parent namespaces must also exist.
* For example, if table a.b.t exists, this method invoked as namespaceExists(["a"]) or
* namespaceExists(["a", "b"]) must return true.
*
* @param namespace a multi-part namespace
* @return true if the namespace exists, false otherwise
*/
default boolean namespaceExists(String[] namespace) {
try {
loadNamespaceMetadata(namespace);
return true;
} catch (NoSuchNamespaceException e) {
return false;
}
}
/**
* Load metadata properties for a namespace.
*
* @param namespace a multi-part namespace
* @return a string map of properties for the given namespace
* @throws NoSuchNamespaceException If the namespace does not exist (optional)
* @throws UnsupportedOperationException If namespace properties are not supported
*/
Map<String, String> loadNamespaceMetadata(String[] namespace) throws NoSuchNamespaceException;
/**
* Create a namespace in the catalog.
*
* @param namespace a multi-part namespace
* @param metadata a string map of properties for the given namespace
* @throws NamespaceAlreadyExistsException If the namespace already exists
* @throws UnsupportedOperationException If create is not a supported operation
*/
void createNamespace(
String[] namespace,
Map<String, String> metadata) throws NamespaceAlreadyExistsException;
/**
* Apply a set of metadata changes to a namespace in the catalog.
*
* @param namespace a multi-part namespace
* @param changes a collection of changes to apply to the namespace
* @throws NoSuchNamespaceException If the namespace does not exist (optional)
* @throws UnsupportedOperationException If namespace properties are not supported
*/
void alterNamespace(
String[] namespace,
NamespaceChange... changes) throws NoSuchNamespaceException;
/**
* Drop a namespace from the catalog, recursively dropping all objects within the namespace.
* <p>
* If the catalog implementation does not support this operation, it may throw
* {@link UnsupportedOperationException}.
*
* @param namespace a multi-part namespace
* @return true if the namespace was dropped
* @throws NoSuchNamespaceException If the namespace does not exist (optional)
* @throws UnsupportedOperationException If drop is not a supported operation
*/
boolean dropNamespace(String[] namespace) throws NoSuchNamespaceException;
}
|
package io.katharsis.core.internal.jackson.mock.models;
import java.util.Collections;
import java.util.List;
import io.katharsis.resource.annotations.JsonApiId;
import io.katharsis.resource.annotations.JsonApiIncludeByDefault;
import io.katharsis.resource.annotations.JsonApiResource;
import io.katharsis.resource.annotations.JsonApiToMany;
import io.katharsis.resource.annotations.JsonApiToOne;
@JsonApiResource(type = "classBs")
public class ClassB {
@JsonApiId
private Long id;
@JsonApiToMany(lazy = false)
private final List<ClassC> classCs;
@JsonApiToOne
private final ClassC classC;
@JsonApiToOne
@JsonApiIncludeByDefault
private final ClassA classA;
public ClassB() {
this.classCs = null;
this.classC = null;
this.classA = null;
}
public ClassB(ClassC classCs, ClassC classC) {
this.classCs = Collections.singletonList(classCs);
this.classC = classC;
this.classA = null;
}
public ClassB(ClassA classA) {
this.classA = classA;
this.classC = null;
this.classCs = null;
}
public ClassB(ClassC classCs, ClassC classC, ClassA classA) {
this.classCs = Collections.singletonList(classCs);
this.classC = classC;
this.classA = classA;
}
public Long getId() {
return id;
}
public ClassB setId(Long id) {
this.id = id;
return this;
}
public List<ClassC> getClassCs() {
return classCs;
}
public ClassC getClassC() {
return classC;
}
public ClassA getClassA() {
return classA;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.